blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
โ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
โ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
โ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3767e7c945abe719f74b7ea747b807c21211ab0e
|
2f89231a207b89acda1c46aba0b03572fb1da8aa
|
/main.py
|
b97b41680283a8d71d2b5d7c127bf9028765e375
|
[] |
no_license
|
BentleyJOakes/wordpress_extractor
|
8649487562a7113effced06cd06a50ffcf53dff9
|
02891890b801515e2c7873ccb95f62692a354fa3
|
refs/heads/master
| 2023-01-30T01:50:43.290318 | 2020-12-09T22:18:13 | 2020-12-09T22:18:13 | 297,141,840 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 788 |
py
|
import argparse
from scrapy.crawler import CrawlerProcess
from WordpressSpider import WordpressSpider
from scrapy.utils.project import get_project_settings
class WordPressExtractor:
def __init__(self):
self.process = CrawlerProcess(get_project_settings())
def parse(self, start_urls):
for surl in start_urls:
WordpressSpider.start_urls = [surl]
self.process.crawl(WordpressSpider, domain = surl)
self.process.start()
if __name__ == "__main__":
parser = argparse.ArgumentParser(
usage="%(prog)s [OPTION] URL",
description="Extract a WordPress site to HTML."
)
parser.add_argument('files', nargs='*')
args = parser.parse_args()
wpe = WordPressExtractor()
wpe.parse(list(args.files))
|
[
"[email protected]"
] | |
8e7da99e05dbef88db6e0a0a7373afb00d7ae04c
|
409a3d53b08f55b6b42643dc68b1eb6daeb54ced
|
/mars/tensor/fetch/__init__.py
|
9fe01c973ada2f630855b79aa187e44692abfcc5
|
[
"BSD-3-Clause",
"MIT",
"OFL-1.1",
"LicenseRef-scancode-unknown-license-reference",
"Python-2.0",
"CC0-1.0",
"Apache-2.0",
"BSD-2-Clause"
] |
permissive
|
sighingnow/mars
|
06cbcd8f4aa132a85cdb6c7215c61139636e316b
|
c7897fbd144d230fff5edabc1494fb3ff44aa0d2
|
refs/heads/master
| 2023-01-21T12:11:12.469853 | 2019-09-26T09:34:13 | 2019-09-26T09:34:13 | 189,408,668 | 0 | 0 |
Apache-2.0
| 2021-01-12T06:19:58 | 2019-05-30T12:19:28 |
Python
|
UTF-8
|
Python
| false | false | 647 |
py
|
# Copyright 1999-2018 Alibaba Group Holding Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .core import TensorFetch, TensorFetchShuffle
|
[
"[email protected]"
] | |
49aeb0cb38e74c85e659bb3dc53c57a0ff8bf0bd
|
c90674d955fe1399c0e99cf34437e583d1cf9fb9
|
/application6-webcam motion detector/facedetection/script_face_detector.py
|
9e1d75fde667176ccd297abb50c5542b6827f488
|
[] |
no_license
|
TrellixVulnTeam/My_python_code_QQZ2
|
556878cbe4f8d6d92e71f48285a6d2439b10ca81
|
8cd8b697d92e1a79cce109baf560eeff27717ce8
|
refs/heads/master
| 2023-03-19T15:26:35.836114 | 2018-06-29T14:09:06 | 2018-06-29T14:09:06 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 496 |
py
|
import cv2
face_cascade=cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
img=cv2.imread("people-6.jpg")
gray_img=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
faces=face_cascade.detectMultiScale(gray_img,scaleFactor=1.05,minNeighbors=5)
print(type(faces))
print(faces)
for x,y,w,h in faces:
img=cv2.rectangle(img, (x,y),(x+w,y+h),(0,255,0),3)
resized=cv2.resize(img,(int(img.shape[1]/2),int(img.shape[0]/2)))
cv2.imshow("Gray",img)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
[
"[email protected]"
] | |
6b676d38806dcbf551a398d20280036d0eda2c59
|
4631798b64f2118b7d8e64483a14d7485163358b
|
/pizzaim.py
|
899a6b086debd422f3812a74737846fb8e184845
|
[] |
no_license
|
royrowe/python
|
288680aba27b8c2d46368250b45fb1672427fe6a
|
dc7cebd56aa1bee7b2afd91e3a2a4b03f1775ba5
|
refs/heads/master
| 2020-04-15T20:05:02.587794 | 2019-01-10T02:53:08 | 2019-01-10T02:53:08 | 164,978,105 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 305 |
py
|
#!/usr/bin/env python
'''
@File :8.6.1_pizza.py
@Copyright :luoming
@Date :
@Desc :
'''
def make_pizza(size,*toppings):
'''ๆฆ่ฟฐ่ฆๅถไฝ็ๅน่จ'''
print("\nMaking a "+str(size)+"-inch pizza with the following toppings:")
for topping in toppings:
print("-"+ topping)
|
[
"your email"
] |
your email
|
30d50006b8f6f9c5ac6db3ed6ed1ae6ae151b195
|
312d8dbbf980bf164f210e7935b17dc08d64ff87
|
/Model/repeat1_link_prediction_appeared_utilize_existing_attribute/STGGNN/main.py
|
97389c092854b118fe55455b3f04168c8e8d4bd8
|
[] |
no_license
|
hsack6/OWGP_NBA
|
27dafbd6e59c17ce4a66e92132ee56782e2126bf
|
56656efb5884cd9f806e476a92c5e6485c71adeb
|
refs/heads/master
| 2023-02-25T09:52:05.165494 | 2021-02-03T12:44:04 | 2021-02-03T12:44:04 | 288,363,250 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,315 |
py
|
import argparse
import random
import pandas as pd
import torch
import torch.nn as nn
import torch.optim as optim
from model import STGGNN
from utils.train import train
from utils.valid import valid
from utils.test import test
from utils.inference import inference
from utils.data.dataset import BADataset
from utils.data.dataloader import BADataloader
from utils.pytorchtools import EarlyStopping
import sys
import os
current_dir = os.path.dirname(os.path.abspath("__file__"))
sys.path.append( str(current_dir) + '/../../../' )
from setting_param import Model_repeat1_link_prediction_appeared_utilize_existing_attribute_InputDir as InputDir
from setting_param import Model_repeat1_link_prediction_appeared_utilize_existing_attribute_STGGNN_OutputDir as OutputDir
from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_worker
from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_batchSize
from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_lr
from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_init_L
from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_annotation_dim
from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_state_dim
from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_output_dim
from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_n_steps
from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_niter
from setting_param import repeat1_link_prediction_appeared_utilize_existing_attribute_patience
parser = argparse.ArgumentParser()
parser.add_argument('--workers', type=int, help='number of data loading workers', default=repeat1_link_prediction_appeared_utilize_existing_attribute_worker)
parser.add_argument('--batchSize', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_batchSize, help='input batch size')
parser.add_argument('--state_dim', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_state_dim, help='GGNN hidden state size')
parser.add_argument('--annotation_dim', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_annotation_dim, help='GGNN input annotation size')
parser.add_argument('--output_dim', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_output_dim, help='Model output state size')
parser.add_argument('--init_L', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_init_L, help='number of observation time step')
parser.add_argument('--niter', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_niter, help='number of epochs to train for')
parser.add_argument('--n_steps', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_n_steps, help='propogation steps number of GGNN')
parser.add_argument('--patience', type=int, default=repeat1_link_prediction_appeared_utilize_existing_attribute_patience, help='Early stopping patience')
parser.add_argument('--lr', type=float, default=repeat1_link_prediction_appeared_utilize_existing_attribute_lr, help='learning rate')
parser.add_argument('--cuda', action='store_true', help='enables cuda')
parser.add_argument('--verbal', action='store_true', help='print training info or not')
parser.add_argument('--manualSeed', type=int, help='manual seed')
opt = parser.parse_args()
print(opt)
if opt.manualSeed is None:
opt.manualSeed = random.randint(1, 10000)
print("Random Seed: ", opt.manualSeed)
random.seed(opt.manualSeed)
torch.manual_seed(opt.manualSeed)
opt.dataroot = InputDir
if opt.cuda:
torch.cuda.manual_seed_all(opt.manualSeed)
opt.L = opt.init_L
def main(opt):
train_dataset = BADataset(opt.dataroot, opt.L, True, False, False)
train_dataloader = BADataloader(train_dataset, batch_size=opt.batchSize, \
shuffle=True, num_workers=opt.workers, drop_last=True)
valid_dataset = BADataset(opt.dataroot, opt.L, False, True, False)
valid_dataloader = BADataloader(valid_dataset, batch_size=opt.batchSize, \
shuffle=True, num_workers=opt.workers, drop_last=True)
test_dataset = BADataset(opt.dataroot, opt.L, False, False, True)
test_dataloader = BADataloader(test_dataset, batch_size=opt.batchSize, \
shuffle=True, num_workers=opt.workers, drop_last=True)
all_dataset = BADataset(opt.dataroot, opt.L, False, False, False)
all_dataloader = BADataloader(all_dataset, batch_size=opt.batchSize, \
shuffle=False, num_workers=opt.workers, drop_last=False)
opt.n_edge_types = train_dataset.n_edge_types
opt.n_node = train_dataset.n_node
net = STGGNN(opt, kernel_size=2, n_blocks=1, state_dim_bottleneck=opt.state_dim, annotation_dim_bottleneck=opt.annotation_dim)
net.double()
print(net)
criterion = nn.BCELoss()
if opt.cuda:
net.cuda()
criterion.cuda()
optimizer = optim.Adam(net.parameters(), lr=opt.lr)
early_stopping = EarlyStopping(patience=opt.patience, verbose=True)
os.makedirs(OutputDir, exist_ok=True)
train_loss_ls = []
valid_loss_ls = []
test_loss_ls = []
for epoch in range(0, opt.niter):
train_loss = train(epoch, train_dataloader, net, criterion, optimizer, opt)
valid_loss = valid(valid_dataloader, net, criterion, opt)
test_loss = test(test_dataloader, net, criterion, opt)
train_loss_ls.append(train_loss)
valid_loss_ls.append(valid_loss)
test_loss_ls.append(test_loss)
early_stopping(valid_loss, net, OutputDir)
if early_stopping.early_stop:
print("Early stopping")
break
df = pd.DataFrame({'epoch':[i for i in range(1, len(train_loss_ls)+1)], 'train_loss': train_loss_ls, 'valid_loss': valid_loss_ls, 'test_loss': test_loss_ls})
df.to_csv(OutputDir + '/loss.csv', index=False)
net.load_state_dict(torch.load(OutputDir + '/checkpoint.pt'))
inference(all_dataloader, net, criterion, opt, OutputDir)
if __name__ == "__main__":
main(opt)
|
[
"[email protected]"
] | |
9b382f90b9dd2ca867af5f2dcd6efb0088ac50d7
|
ae7e36ede21f38fce15d31caf12d235d76276d54
|
/tests/component/deb/test_import_dsc.py
|
41d081e5dee9013cb645ec380bed530384e7e13e
|
[] |
no_license
|
pombreda/git-buildpackage-rpm
|
08e658847b5608b21f59abb2f7da0ebec333df45
|
bd72c30f029c2099ec3ed792b82eefcaedbc79b6
|
refs/heads/master
| 2020-12-30T19:45:18.763908 | 2015-03-31T15:59:37 | 2015-04-01T06:48:20 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,483 |
py
|
# vim: set fileencoding=utf-8 :
#
# (C) 2013,2014 Guido Gรผnther <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
from tests.component import (ComponentTestBase,
ComponentTestGitRepository)
from tests.component.deb import DEB_TEST_DATA_DIR
from nose.tools import ok_
from gbp.scripts.import_dsc import main as import_dsc
class TestImportDsc(ComponentTestBase):
"""Test importing of debian source packages"""
def test_debian_import(self):
"""Test that importing of debian native packages works"""
def _dsc(version):
return os.path.join(DEB_TEST_DATA_DIR,
'dsc-native',
'git-buildpackage_%s.dsc' % version)
dsc = _dsc('0.4.14')
assert import_dsc(['arg0', dsc]) == 0
repo = ComponentTestGitRepository('git-buildpackage')
self._check_repo_state(repo, 'master', ['master'])
assert len(repo.get_commits()) == 1
os.chdir('git-buildpackage')
dsc = _dsc('0.4.15')
assert import_dsc(['arg0', dsc]) == 0
self._check_repo_state(repo, 'master', ['master'])
assert len(repo.get_commits()) == 2
dsc = _dsc('0.4.16')
assert import_dsc(['arg0', dsc]) == 0
self._check_repo_state(repo, 'master', ['master'])
assert len(repo.get_commits()) == 3
def test_create_branches(self):
"""Test if creating missing branches works"""
def _dsc(version):
return os.path.join(DEB_TEST_DATA_DIR,
'dsc-3.0',
'hello-debhelper_%s.dsc' % version)
dsc = _dsc('2.6-2')
assert import_dsc(['arg0',
'--verbose',
'--pristine-tar',
'--debian-branch=master',
'--upstream-branch=upstream',
dsc]) == 0
repo = ComponentTestGitRepository('hello-debhelper')
os.chdir('hello-debhelper')
assert len(repo.get_commits()) == 2
self._check_repo_state(repo, 'master', ['master', 'pristine-tar', 'upstream'])
dsc = _dsc('2.8-1')
assert import_dsc(['arg0',
'--verbose',
'--pristine-tar',
'--debian-branch=foo',
'--upstream-branch=bar',
'--create-missing-branches',
dsc]) == 0
self._check_repo_state(repo, 'master', ['bar', 'foo', 'master', 'pristine-tar', 'upstream'])
commits, expected = len(repo.get_commits()), 2
ok_(commits == expected, "Found %d commit instead of %d" % (commits, expected))
|
[
"[email protected]"
] | |
3092838cfce71af4c34d48f0d6bbbaf825881518
|
bc565ca3361eb7119a6ff757201e550c20e1ea84
|
/Programmers/Lv1/lv1_๊ฐ์ด๋ฐ๊ธ์๊ฐ์ ธ์ค๊ธฐ.py
|
a1018cc534f5910aa9e173681eb9b1391cd9d583
|
[] |
no_license
|
seoyoungsoo/CodingTest-Python
|
1f9a3caaa3a424f4f7bd0e01a30664b183aaf5eb
|
d47cb46dd78f52b7cfa26846e8e77b63a931161e
|
refs/heads/master
| 2023-04-21T20:25:46.663686 | 2021-05-17T12:17:21 | 2021-05-17T12:17:21 | 351,439,429 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 321 |
py
|
# ๊ฐ์ด๋ฐ ๊ธ์ ๊ฐ์ ธ์ค๊ธฐ
def solution(s):
answer = ''
slen = len(s)
if slen % 2 == 0:
mid = int(slen / 2 - 1)
answer += s[mid]
answer += s[mid + 1]
else:
mid = int(slen / 2)
answer += s[mid]
return answer
# testcase 1
s = "abcde"
print(solution(s))
|
[
"[email protected]"
] | |
ab908d2c9e608b5422fd8f8fed60465b577c9043
|
587dbdf730b6cc3e693efc5dca5d83d1dd35ee1a
|
/leetcode/1-300/25.py
|
933cf1a3090bdb12225bd9e756ef2cfc34a4c9f9
|
[] |
no_license
|
Rivarrl/leetcode_python
|
8db2a15646d68e4d84ab263d8c3b6e38d8e3ea99
|
dbe8eb449e5b112a71bc1cd4eabfd138304de4a3
|
refs/heads/master
| 2021-06-17T15:21:28.321280 | 2021-03-11T07:28:19 | 2021-03-11T07:28:19 | 179,452,345 | 3 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,034 |
py
|
# -*- coding: utf-8 -*-
# ======================================
# @File : 25.py
# @Time : 2020/5/16 0:07
# @Author : Rivarrl
# ======================================
from algorithm_utils import *
class Solution:
"""
[25. K ไธชไธ็ป็ฟป่ฝฌ้พ่กจ](https://leetcode-cn.com/problems/reverse-nodes-in-k-group/)
"""
@timeit
def reverseKGroup(self, head: ListNode, k: int) -> ListNode:
p = head
n = 0
while p:
n += 1
p = p.next
dummy = last = ListNode(-1)
p = head
while n > 0:
if n < k:
last.next = p
break
q = p
for _ in range(k-1):
t = p.next
p.next = t.next
t.next = q
q = t
last.next = q
last = p
p = p.next
n -= k
return dummy.next
if __name__ == '__main__':
a = Solution()
x = construct_list_node([1,2,3,4,5])
a.reverseKGroup(x, 3)
|
[
"[email protected]"
] | |
1f59d88201b38289c14c3124ed3a3da301396303
|
6c8d8b1c5b02e3181efd41b5b227f8905f474fa9
|
/clustering/clustering_categorical_peoples_interests/main.py
|
f024becc456028afc3f32d2de04be24de61c83c5
|
[] |
no_license
|
DXV-HUST-SoICT/data_mining_mini_projects
|
9203990a620546fb61ee571090ef51e16242054d
|
c1010a4f39b5b114ad58ae1f3224435c3a84f50e
|
refs/heads/master
| 2022-11-01T08:38:21.141429 | 2020-06-16T12:01:32 | 2020-06-16T12:01:32 | 257,429,399 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,780 |
py
|
from time import time
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import joblib
from sklearn import metrics
from sklearn.cluster import KMeans, DBSCAN
from sklearn.datasets import load_digits
from sklearn.decomposition import PCA
from sklearn.preprocessing import scale
from sklearn import preprocessing
np.random.seed(42)
start = time()
# X, y = load_digits(return_X_y=True)
raw_data = pd.read_csv('./data/kaggle_Interests_group.csv')
X = raw_data.drop(columns = ['group', 'grand_tot_interests'])
y = raw_data['group']
X = dict(X)
for key in X:
for i in range(len(X[key])):
if X[key][i] != 1:
X[key][i] = 0
X = pd.DataFrame(X)
data = scale(X)
le = preprocessing.LabelEncoder()
fit_list = y
le.fit(fit_list)
y = le.transform(y)
labels = y
n_samples, n_features = data.shape
n_clusters = 10
print("n_clusters: %d, \t n_samples %d, \t n_features %d" % (n_clusters, n_samples, n_features))
print(82 * '_')
print('init\ttime\tinertia\tsilhouette')
def bench_k_means(estimator, name, data):
t0 = time()
estimator.fit(data)
silhouette_score = metrics.silhouette_score(data, estimator.labels_, metric='euclidean')
print('%-9s\t%.2fs\t%i\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f\t%.3f'
% (name, (time() - t0), estimator.inertia_,
metrics.homogeneity_score(labels, estimator.labels_),
metrics.completeness_score(labels, estimator.labels_),
metrics.v_measure_score(labels, estimator.labels_),
metrics.adjusted_rand_score(labels, estimator.labels_),
metrics.adjusted_mutual_info_score(labels, estimator.labels_),
silhouette_score))
filename = './model/' + str(start) + '_' + name + '_n_clusters_' + str(n_clusters) + '_silhouette_score_' + str(silhouette_score) + '.sav'
joblib.dump(estimator, filename)
pca = PCA(n_components=n_clusters).fit(data)
estimators = dict()
estimators['k-means_k-means++'] = KMeans(init='k-means++', n_clusters=n_clusters, n_init=10)
estimators['k-means_random'] = KMeans(init='random', n_clusters=n_clusters, n_init=10)
estimators['k-means_PCA-based'] = KMeans(init=pca.components_, n_clusters=n_clusters, n_init=1)
for name in estimators:
# name = 'kmeans k-means++'
estimator = estimators[name]
bench_k_means(estimator=estimator, name=name, data=data)
print(82 * '_')
# ###########################################################
# Visualize the results on PCA-reduced data
reduced_data = PCA(n_components=2).fit_transform(data)
kmeans = KMeans(init='k-means++', n_clusters=n_clusters, n_init=10)
kmeans.fit(reduced_data)
# Step size of the mesh. Decrease to increase the quality of the VQ.
h = 0.02 # point in the mesh [x_min, x_max]x[y_min, y_max].
# Plot the decision boundary. For that, we will assign a color to each
x_min, x_max = reduced_data[:, 0].min() - 1, reduced_data[:, 0].max() + 1
y_min, y_max = reduced_data[:, 1].min() - 1, reduced_data[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
# Obtain labels for each point in mesh. Use last trained model.
Z = kmeans.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure(1)
plt.clf()
plt.imshow(Z,
interpolation='nearest',
extent=(xx.min(), xx.max(), yy.min(), yy.max()),
cmap=plt.cm.Paired,
aspect='auto',
origin='lower')
plt.plot(reduced_data[:, 0], reduced_data[:, 1], 'k.', markersize=2)
# Plot the centroids as a white X
centroids = kmeans.cluster_centers_
plt.scatter(centroids[:, 0], centroids[:, 1],
marker='x', s=169, linewidths=3,
color='w', zorder=10)
plt.title('K-means clustering on the kaggle_Interests_group dataset (PCA-reduced data)\n'
'Centroids are marked with white cross')
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
plt.xticks(())
plt.yticks(())
plt.show()
|
[
"[email protected]"
] | |
74f80006fb22a26cc258bf1607e458416ed5a188
|
a34e3d435f48ef87477d3ae13ca8a43015e5052c
|
/fw2/z.py
|
8986497000a50feaecab9d3e7771b510e72b2e36
|
[] |
no_license
|
haehn/sandbox
|
636069372fc7bb7fd72b5fde302f42b815e8e9b0
|
e49a0a30a1811adb73577ff697d81db16ca82808
|
refs/heads/master
| 2021-01-22T03:39:03.415863 | 2015-02-11T23:16:22 | 2015-02-11T23:16:22 | 26,128,048 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 561 |
py
|
#!/usr/bin/env python
import os
import sys
import _zstack
def print_help( script_name ):
'''
'''
description = ''
print description
print
print 'Usage: ' + script_name + ' INPUT_DIRECTORY'
print
#
# entry point
#
if __name__ == "__main__":
# always show the help if no arguments were specified
if len(sys.argv) != 0 and len( sys.argv ) < 2:
print_help( sys.argv[0] )
sys.exit( 1 )
input_dir = sys.argv[1]
manager = _zstack.Manager(input_dir)
manager.start()
webserver = _zstack.WebServer(manager)
webserver.start()
|
[
"[email protected]"
] | |
84b57fa7d45db79a5a17f56d0c3453631c7fc231
|
ab39a61ff9882e8b06ea6f0d2939dbccb7b75bd5
|
/src/views.py
|
1c48266966e228cc0569f6405f284cbb62f5bc27
|
[] |
no_license
|
ShipraShalini/NotificationCenter
|
1be9c0774bfce2d75f3c31bef852150d7cb60281
|
e544a53645b411977c19c7317b80cefe35e8f720
|
refs/heads/master
| 2021-01-19T21:40:35.884152 | 2017-02-26T19:06:43 | 2017-02-26T19:06:43 | 82,533,008 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,289 |
py
|
from apscheduler.jobstores.base import JobLookupError
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.views import View
from django.views.generic import FormView
from src.forms import NotificationForm, ModifyNotificationForm
from src.notification_utils import get_notifications
class NotificationView(FormView):
template_name = 'notification_form.html'
form_class = NotificationForm
def form_valid(self, form):
job_id = form.schedule_notification()
return render_to_response("success.html", context={"job_id_scheduled": job_id})
class ModifyNotificationView(FormView):
template_name = 'notification_form.html'
form_class = ModifyNotificationForm
def form_valid(self, form):
try:
job_id, action = form.modify()
except JobLookupError as e:
return render_to_response("error.html", context={"etype": e.__class__.__name__, "message": e.message})
return render_to_response("success.html", context={"job_id_modified": job_id, "action": action})
class ListNotificationView(View):
http_method_names = ['get']
def get(self, request):
job_list = get_notifications()
return HttpResponse(job_list, content_type='application/json')
|
[
"[email protected]"
] | |
4aab842ed7919e90fdd2d4fae1bab975297d211b
|
fdb9bdc6c4ab2f14ba71e544493706d5e275899f
|
/fhir/resources/tests/test_questionnaireresponse.py
|
f4c0b5b5d25ac332f7ef4b157ddba01873270d93
|
[
"BSD-3-Clause"
] |
permissive
|
nazrulworld/fhir.resources
|
6ae8aea8180c611b0c5050759c6dcdf63e4cb061
|
1fd6ea476b27b3fcb8c4ef8f23bc51cf161e69e3
|
refs/heads/main
| 2023-08-30T18:27:27.277249 | 2023-07-03T19:57:06 | 2023-07-03T19:57:06 | 165,297,877 | 256 | 83 |
NOASSERTION
| 2023-08-24T15:34:05 | 2019-01-11T19:26:41 |
Python
|
UTF-8
|
Python
| false | false | 69,438 |
py
|
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/QuestionnaireResponse
Release: R5
Version: 5.0.0
Build ID: 2aecd53
Last updated: 2023-03-26T15:21:02.749+11:00
"""
from pydantic.validators import bytes_validator # noqa: F401
from .. import fhirtypes # noqa: F401
from .. import questionnaireresponse
def impl_questionnaireresponse_1(inst):
assert inst.author.reference == "#questauth"
assert inst.authored == fhirtypes.DateTime.validate("2013-02-19T14:15:00-05:00")
assert inst.basedOn[0].reference == "#order"
assert inst.contained[0].id == "patsub"
assert inst.contained[1].id == "order"
assert inst.contained[2].id == "questauth"
assert inst.encounter.reference == "Encounter/example"
assert inst.id == "3141"
assert (
inst.identifier[0].system
== "http://example.org/fhir/NamingSystem/questionnaire-ids"
)
assert inst.identifier[0].value == "Q12349876"
assert (
inst.item[0].item[0].answer[0].item[0].item[0].answer[0].valueCoding.code == "1"
)
assert (
inst.item[0].item[0].answer[0].item[0].item[0].answer[0].valueCoding.system
== "http://cancer.questionnaire.org/system/code/yesno"
)
assert inst.item[0].item[0].answer[0].item[0].item[0].linkId == "1.1.1.1"
assert (
inst.item[0].item[0].answer[0].item[0].item[1].answer[0].valueCoding.code == "1"
)
assert (
inst.item[0].item[0].answer[0].item[0].item[1].answer[0].valueCoding.system
== "http://cancer.questionnaire.org/system/code/yesno"
)
assert inst.item[0].item[0].answer[0].item[0].item[1].linkId == "1.1.1.2"
assert (
inst.item[0].item[0].answer[0].item[0].item[2].answer[0].valueCoding.code == "0"
)
assert (
inst.item[0].item[0].answer[0].item[0].item[2].answer[0].valueCoding.system
== "http://cancer.questionnaire.org/system/code/yesno"
)
assert inst.item[0].item[0].answer[0].item[0].item[2].linkId == "1.1.1.3"
assert inst.item[0].item[0].answer[0].item[0].linkId == "1.1.1"
assert inst.item[0].item[0].answer[0].valueCoding.code == "1"
assert inst.item[0].item[0].answer[0].valueCoding.display == "Yes"
assert (
inst.item[0].item[0].answer[0].valueCoding.system
== "http://cancer.questionnaire.org/system/code/yesno"
)
assert inst.item[0].item[0].linkId == "1.1"
assert inst.item[0].linkId == "1"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.partOf[0].reference == "Procedure/f201"
assert inst.questionnaire == "urn:uuid:95eaedf7-8a24-478a-8300-39acc44c746b"
assert inst.status == "completed"
assert inst.subject.reference == "#patsub"
assert inst.text.status == "generated"
def test_questionnaireresponse_1(base_settings):
"""No. 1 tests collection for QuestionnaireResponse.
Test File: questionnaireresponse-example.json
"""
filename = base_settings["unittest_data_dir"] / "questionnaireresponse-example.json"
inst = questionnaireresponse.QuestionnaireResponse.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "QuestionnaireResponse" == inst.resource_type
impl_questionnaireresponse_1(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "QuestionnaireResponse" == data["resourceType"]
inst2 = questionnaireresponse.QuestionnaireResponse(**data)
impl_questionnaireresponse_1(inst2)
def impl_questionnaireresponse_2(inst):
assert inst.authored == fhirtypes.DateTime.validate("2008-01-17")
assert inst.id == "ussg-fht-answers"
assert inst.item[0].item[0].answer[0].valueDate == fhirtypes.Date.validate(
"2008-01-17"
)
assert inst.item[0].item[0].linkId == "0.1"
assert inst.item[0].item[0].text == "Date Done"
assert inst.item[0].linkId == "0"
assert inst.item[1].definition == "http://loinc.org/fhir/DataElement/54126-8"
assert inst.item[1].item[0].item[0].answer[0].valueString == "Annie Proband"
assert (
inst.item[1].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54125-0"
)
assert inst.item[1].item[0].item[0].linkId == "1.1.1"
assert inst.item[1].item[0].item[0].text == "Name"
assert inst.item[1].item[0].item[1].answer[0].valueCoding.code == "LA3-6"
assert inst.item[1].item[0].item[1].answer[0].valueCoding.display == "Female"
assert (
inst.item[1].item[0].item[1].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[1].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54131-8"
)
assert inst.item[1].item[0].item[1].linkId == "1.1.2"
assert inst.item[1].item[0].item[1].text == "Gender"
assert inst.item[1].item[0].item[2].answer[0].valueDate == fhirtypes.Date.validate(
"1966-04-04"
)
assert (
inst.item[1].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/21112-8"
)
assert inst.item[1].item[0].item[2].linkId == "1.1.3"
assert inst.item[1].item[0].item[2].text == "Date of Birth"
assert inst.item[1].item[0].item[3].answer[0].valueCoding.code == "LA32-8"
assert inst.item[1].item[0].item[3].answer[0].valueCoding.display == "No"
assert (
inst.item[1].item[0].item[3].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[1].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54132-6"
)
assert inst.item[1].item[0].item[3].linkId == "1.1.4"
assert inst.item[1].item[0].item[3].text == "Were you born a twin?"
assert inst.item[1].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[1].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[1].item[0].item[4].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[1].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54128-4"
)
assert inst.item[1].item[0].item[4].linkId == "1.1.5"
assert inst.item[1].item[0].item[4].text == "Were you adopted?"
assert inst.item[1].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[1].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[1].item[0].item[5].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[1].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54135-9"
)
assert inst.item[1].item[0].item[5].linkId == "1.1.6"
assert inst.item[1].item[0].item[5].text == (
"Are your parents related to each other in any way other than" " marriage?"
)
assert (
inst.item[1]
.item[0]
.item[6]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
== "[in_i]"
)
assert (
inst.item[1]
.item[0]
.item[6]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
== "inches"
)
assert (
inst.item[1]
.item[0]
.item[6]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
== "http://unitsofmeasure.org"
)
assert inst.item[1].item[0].item[6].answer[0].item[0].item[0].linkId == "1.1.7.1.1"
assert inst.item[1].item[0].item[6].answer[0].item[0].item[0].text == "Units"
assert inst.item[1].item[0].item[6].answer[0].item[0].linkId == "1.1.7.1"
assert float(inst.item[1].item[0].item[6].answer[0].valueDecimal) == float(63)
assert (
inst.item[1].item[0].item[6].definition
== "http://loinc.org/fhir/DataElement/8302-2"
)
assert inst.item[1].item[0].item[6].linkId == "1.1.7"
assert inst.item[1].item[0].item[6].text == "Height"
assert (
inst.item[1]
.item[0]
.item[7]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
== "[lb_av]"
)
assert (
inst.item[1]
.item[0]
.item[7]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
== "pounds"
)
assert (
inst.item[1]
.item[0]
.item[7]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
== "http://unitsofmeasure.org"
)
assert inst.item[1].item[0].item[7].answer[0].item[0].item[0].linkId == "1.1.8.1.1"
assert inst.item[1].item[0].item[7].answer[0].item[0].item[0].text == "Units"
assert inst.item[1].item[0].item[7].answer[0].item[0].linkId == "1.1.8.1"
assert float(inst.item[1].item[0].item[7].answer[0].valueDecimal) == float(127)
assert (
inst.item[1].item[0].item[7].definition
== "http://loinc.org/fhir/DataElement/29463-7"
)
assert inst.item[1].item[0].item[7].linkId == "1.1.8"
assert inst.item[1].item[0].item[7].text == "Weight"
assert float(inst.item[1].item[0].item[8].answer[0].valueDecimal) == float(22.5)
assert (
inst.item[1].item[0].item[8].definition
== "http://loinc.org/fhir/DataElement/39156-5"
)
assert inst.item[1].item[0].item[8].linkId == "1.1.9"
assert inst.item[1].item[0].item[8].text == "Body mass index (BMI) [Ratio]"
assert inst.item[1].item[0].item[9].answer[0].valueCoding.code == "LA4457-3"
assert inst.item[1].item[0].item[9].answer[0].valueCoding.display == "White"
assert (
inst.item[1].item[0].item[9].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[1].item[0].item[9].definition
== "http://loinc.org/fhir/DataElement/54134-2"
)
assert inst.item[1].item[0].item[9].linkId == "1.1.10"
assert inst.item[1].item[0].item[9].text == "Race"
assert inst.item[1].item[0].linkId == "1.1"
assert inst.item[1].linkId == "1"
assert inst.item[1].text == "Your health information"
assert inst.item[2].definition == "http://loinc.org/fhir/DataElement/54114-4"
assert inst.item[2].item[0].item[0].answer[0].valueCoding.code == "LA10405-1"
assert inst.item[2].item[0].item[0].answer[0].valueCoding.display == "Daughter"
assert (
inst.item[2].item[0].item[0].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[2].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[0].item[1].answer[0].valueString == "Susan"
assert (
inst.item[2].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[0].item[1].text == "Name"
assert inst.item[2].item[0].item[2].answer[0].valueCoding.code == "LA3-6"
assert inst.item[2].item[0].item[2].answer[0].valueCoding.display == "Female"
assert (
inst.item[2].item[0].item[2].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[2].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[0].item[2].text == "Gender"
assert float(
inst.item[2].item[0].item[3].answer[0].item[0].item[0].answer[0].valueDecimal
) == float(17)
assert (
inst.item[2].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[0].item[3].answer[0].item[0].item[0].linkId == "2.1.1.4.2.2"
)
assert inst.item[2].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[0].item[3].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[2].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[0].item[3].text == "Living?"
assert inst.item[2].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[0].item[4].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[2].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[0].item[5].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[2].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[0].linkId == "2.1"
assert (
inst.item[2].item[1].item[0].item[0].answer[0].valueCoding.code == "LA10415-0"
)
assert (
inst.item[2].item[1].item[0].item[0].answer[0].valueCoding.display == "Brother"
)
assert (
inst.item[2].item[1].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[1].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[1].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[1].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[1].item[0].item[1].answer[0].valueString == "Brian"
assert (
inst.item[2].item[1].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[1].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[1].item[0].item[1].text == "Name"
assert inst.item[2].item[1].item[0].item[2].answer[0].valueCoding.code == "LA2-8"
assert inst.item[2].item[1].item[0].item[2].answer[0].valueCoding.display == "Male"
assert (
inst.item[2].item[1].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[1].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[1].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[1].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[1]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(32)
assert (
inst.item[2].item[1].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[1].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[1].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[1].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[1].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[1].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[1].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[1].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[1].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[1].item[0].item[3].text == "Living?"
assert inst.item[2].item[1].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[1].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[1].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[1].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[1].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[1].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[1].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[1].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[1].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[1].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[1].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[1].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[1].item[0].linkId == "2.1.1"
assert (
inst.item[2].item[1].item[1].item[0].answer[0].valueCoding.code == "LA10550-4"
)
assert (
inst.item[2].item[1].item[1].item[0].answer[0].valueCoding.display
== "-- Other Cancer"
)
assert (
inst.item[2].item[1].item[1].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[1].item[1].item[0].linkId == "2.1.2.1"
assert inst.item[2].item[1].item[1].item[0].text == "Disease or Condition"
assert (
inst.item[2].item[1].item[1].item[1].answer[0].valueCoding.code == "LA10397-0"
)
assert inst.item[2].item[1].item[1].item[1].answer[0].valueCoding.display == "30-39"
assert (
inst.item[2].item[1].item[1].item[1].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[1].item[1].item[1].linkId == "2.1.2.2"
assert inst.item[2].item[1].item[1].item[1].text == "Age at Diagnosis"
assert inst.item[2].item[1].item[1].linkId == "2.1.2"
assert (
inst.item[2].item[1].item[1].text == "This family member's history of disease"
)
assert inst.item[2].item[1].linkId == "2.1"
assert (
inst.item[2].item[2].item[0].item[0].answer[0].valueCoding.code == "LA10418-4"
)
assert (
inst.item[2].item[2].item[0].item[0].answer[0].valueCoding.display == "Sister"
)
assert (
inst.item[2].item[2].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[2].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[2].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[2].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[2].item[0].item[1].answer[0].valueString == "Janet"
assert (
inst.item[2].item[2].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[2].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[2].item[0].item[1].text == "Name"
assert inst.item[2].item[2].item[0].item[2].answer[0].valueCoding.code == "LA3-6"
assert (
inst.item[2].item[2].item[0].item[2].answer[0].valueCoding.display == "Female"
)
assert (
inst.item[2].item[2].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[2].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[2].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[2].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[2]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(36)
assert (
inst.item[2].item[2].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[2].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[2].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[2].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[2].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[2].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[2].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[2].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[2].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[2].item[0].item[3].text == "Living?"
assert inst.item[2].item[2].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[2].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[2].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[2].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[2].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[2].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[2].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[2].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[2].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[2].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[2].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[2].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[2].item[0].linkId == "2.1.1"
assert (
inst.item[2].item[2].item[1].item[0].answer[0].valueCoding.code == "LA10536-3"
)
assert (
inst.item[2].item[2].item[1].item[0].answer[0].valueCoding.display
== "-- Breast Cancer"
)
assert (
inst.item[2].item[2].item[1].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[2].item[1].item[0].linkId == "2.1.2.1"
assert inst.item[2].item[2].item[1].item[0].text == "Disease or Condition"
assert (
inst.item[2].item[2].item[1].item[1].answer[0].valueCoding.code == "LA10397-0"
)
assert inst.item[2].item[2].item[1].item[1].answer[0].valueCoding.display == "30-39"
assert (
inst.item[2].item[2].item[1].item[1].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[2].item[1].item[1].linkId == "2.1.2.2"
assert inst.item[2].item[2].item[1].item[1].text == "Age at Diagnosis"
assert inst.item[2].item[2].item[1].linkId == "2.1.2"
assert (
inst.item[2].item[2].item[1].text == "This family member's history of disease"
)
assert inst.item[2].item[2].linkId == "2.1"
assert (
inst.item[2].item[3].item[0].item[0].answer[0].valueCoding.code == "LA10419-2"
)
assert (
inst.item[2].item[3].item[0].item[0].answer[0].valueCoding.display == "Nephew"
)
assert (
inst.item[2].item[3].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[3].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[3].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[3].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[3].item[0].item[1].answer[0].valueString == "Ian"
assert (
inst.item[2].item[3].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[3].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[3].item[0].item[1].text == "Name"
assert inst.item[2].item[3].item[0].item[2].answer[0].valueCoding.code == "LA2-8"
assert inst.item[2].item[3].item[0].item[2].answer[0].valueCoding.display == "Male"
assert (
inst.item[2].item[3].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[3].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[3].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[3].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[3]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(16)
assert (
inst.item[2].item[3].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[3].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[3].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[3].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[3].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[3].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[3].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[3].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[3].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[3].item[0].item[3].text == "Living?"
assert inst.item[2].item[3].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[3].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[3].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[3].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[3].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[3].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[3].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[3].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[3].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[3].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[3].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[3].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[3].item[0].linkId == "2.1.1"
assert inst.item[2].item[3].linkId == "2.1"
assert (
inst.item[2].item[4].item[0].item[0].answer[0].valueCoding.code == "LA10420-0"
)
assert inst.item[2].item[4].item[0].item[0].answer[0].valueCoding.display == "Niece"
assert (
inst.item[2].item[4].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[4].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[4].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[4].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[4].item[0].item[1].answer[0].valueString == "Helen"
assert (
inst.item[2].item[4].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[4].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[4].item[0].item[1].text == "Name"
assert inst.item[2].item[4].item[0].item[2].answer[0].valueCoding.code == "LA3-6"
assert (
inst.item[2].item[4].item[0].item[2].answer[0].valueCoding.display == "Female"
)
assert (
inst.item[2].item[4].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[4].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[4].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[4].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[4]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(15)
assert (
inst.item[2].item[4].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[4].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[4].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[4].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[4].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[4].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[4].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[4].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[4].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[4].item[0].item[3].text == "Living?"
assert inst.item[2].item[4].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[4].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[4].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[4].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[4].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[4].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[4].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[4].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[4].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[4].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[4].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[4].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[4].item[0].linkId == "2.1.1"
assert inst.item[2].item[4].linkId == "2.1"
assert (
inst.item[2].item[5].item[0].item[0].answer[0].valueCoding.code == "LA10416-8"
)
assert (
inst.item[2].item[5].item[0].item[0].answer[0].valueCoding.display == "Father"
)
assert (
inst.item[2].item[5].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[5].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[5].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[5].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[5].item[0].item[1].answer[0].valueString == "Donald"
assert (
inst.item[2].item[5].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[5].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[5].item[0].item[1].text == "Name"
assert inst.item[2].item[5].item[0].item[2].answer[0].valueCoding.code == "LA2-8"
assert inst.item[2].item[5].item[0].item[2].answer[0].valueCoding.display == "Male"
assert (
inst.item[2].item[5].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[5].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[5].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[5].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[5]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(52)
assert (
inst.item[2].item[5].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[5].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[5].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[5].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[5].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[5].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[5].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[5].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[5].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[5].item[0].item[3].text == "Living?"
assert inst.item[2].item[5].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[5].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[5].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[5].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[5].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[5].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[5].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[5].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[5].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[5].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[5].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[5].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[5].item[0].linkId == "2.1.1"
assert inst.item[2].item[5].linkId == "2.1"
assert (
inst.item[2].item[6].item[0].item[0].answer[0].valueCoding.code == "LA10425-9"
)
assert (
inst.item[2].item[6].item[0].item[0].answer[0].valueCoding.display
== "Paternal Uncle"
)
assert (
inst.item[2].item[6].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[6].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[6].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[6].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[6].item[0].item[1].answer[0].valueString == "Eric"
assert (
inst.item[2].item[6].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[6].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[6].item[0].item[1].text == "Name"
assert inst.item[2].item[6].item[0].item[2].answer[0].valueCoding.code == "LA2-8"
assert inst.item[2].item[6].item[0].item[2].answer[0].valueCoding.display == "Male"
assert (
inst.item[2].item[6].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[6].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[6].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[6].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[6]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(56)
assert (
inst.item[2].item[6].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[6].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[6].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[6].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[6].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[6].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[6].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[6].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[6].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[6].item[0].item[3].text == "Living?"
assert inst.item[2].item[6].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[6].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[6].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[6].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[6].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[6].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[6].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[6].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[6].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[6].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[6].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[6].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[6].item[0].linkId == "2.1.1"
assert inst.item[2].item[6].linkId == "2.1"
assert (
inst.item[2].item[7].item[0].item[0].answer[0].valueCoding.code == "LA10421-8"
)
assert (
inst.item[2].item[7].item[0].item[0].answer[0].valueCoding.display
== "Paternal Aunt"
)
assert (
inst.item[2].item[7].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[7].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[7].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[7].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[7].item[0].item[1].answer[0].valueString == "Fiona"
assert (
inst.item[2].item[7].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[7].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[7].item[0].item[1].text == "Name"
assert inst.item[2].item[7].item[0].item[2].answer[0].valueCoding.code == "LA3-6"
assert (
inst.item[2].item[7].item[0].item[2].answer[0].valueCoding.display == "Female"
)
assert (
inst.item[2].item[7].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[7].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[7].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[7].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[7]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(57)
assert (
inst.item[2].item[7].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[7].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[7].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[7].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[7].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[7].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[7].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[7].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[7].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[7].item[0].item[3].text == "Living?"
assert inst.item[2].item[7].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[7].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[7].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[7].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[7].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[7].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[7].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[7].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[7].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[7].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[7].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[7].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[7].item[0].linkId == "2.1.1"
assert (
inst.item[2].item[7].item[1].item[0].answer[0].valueCoding.code == "LA10543-9"
)
assert (
inst.item[2].item[7].item[1].item[0].answer[0].valueCoding.display
== "-- Skin Cancer"
)
assert (
inst.item[2].item[7].item[1].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[7].item[1].item[0].linkId == "2.1.2.1"
assert inst.item[2].item[7].item[1].item[0].text == "Disease or Condition"
assert inst.item[2].item[7].item[1].linkId == "2.1.2"
assert (
inst.item[2].item[7].item[1].text == "This family member's history of disease"
)
assert inst.item[2].item[7].linkId == "2.1"
assert (
inst.item[2].item[8].item[0].item[0].answer[0].valueCoding.code == "LA10423-4"
)
assert (
inst.item[2].item[8].item[0].item[0].answer[0].valueCoding.display
== "Paternal Grandfather"
)
assert (
inst.item[2].item[8].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[8].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[8].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[8].item[0].item[1].answer[0].valueString == "Bob"
assert (
inst.item[2].item[8].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[8].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[8].item[0].item[1].text == "Name"
assert inst.item[2].item[8].item[0].item[2].answer[0].valueCoding.code == "LA2-8"
assert inst.item[2].item[8].item[0].item[2].answer[0].valueCoding.display == "Male"
assert (
inst.item[2].item[8].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[8].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[8].item[0].item[2].text == "Gender"
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
== "LA10537-1"
)
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
== "-- Colon Cancer"
)
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54112-8"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.1.1"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[0].text
== "Cause of Death"
)
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.code
== "LA10400-2"
)
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.display
== "OVER 60"
)
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54113-6"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[1].linkId
== "2.1.1.4.1.2"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[1].text
== "Age at Death"
)
assert inst.item[2].item[8].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.1"
assert inst.item[2].item[8].item[0].item[3].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[8].item[0].item[3].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[8].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[8].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[8].item[0].item[3].text == "Living?"
assert inst.item[2].item[8].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[8].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[8].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[8].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[8].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[8].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[8].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[8].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[8].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[8].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[8].item[0].linkId == "2.1.1"
assert (
inst.item[2].item[8].item[1].item[0].answer[0].valueCoding.code == "LA10537-1"
)
assert (
inst.item[2].item[8].item[1].item[0].answer[0].valueCoding.display
== "-- Colon Cancer"
)
assert (
inst.item[2].item[8].item[1].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[8].item[1].item[0].linkId == "2.1.2.1"
assert inst.item[2].item[8].item[1].item[0].text == "Disease or Condition"
assert (
inst.item[2].item[8].item[1].item[1].answer[0].valueCoding.code == "LA10400-2"
)
assert (
inst.item[2].item[8].item[1].item[1].answer[0].valueCoding.display == "OVER 60"
)
assert (
inst.item[2].item[8].item[1].item[1].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[8].item[1].item[1].linkId == "2.1.2.2"
assert inst.item[2].item[8].item[1].item[1].text == "Age at Diagnosis"
assert inst.item[2].item[8].item[1].linkId == "2.1.2"
assert (
inst.item[2].item[8].item[1].text == "This family member's history of disease"
)
assert inst.item[2].item[8].linkId == "2.1"
assert (
inst.item[2].item[9].item[0].item[0].answer[0].valueCoding.code == "LA10424-2"
)
assert (
inst.item[2].item[9].item[0].item[0].answer[0].valueCoding.display
== "Paternal Grandmother"
)
assert (
inst.item[2].item[9].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[9].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[9].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[9].item[0].item[1].answer[0].valueString == "Claire"
assert (
inst.item[2].item[9].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[9].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[9].item[0].item[1].text == "Name"
assert inst.item[2].item[9].item[0].item[2].answer[0].valueCoding.code == "LA3-6"
assert (
inst.item[2].item[9].item[0].item[2].answer[0].valueCoding.display == "Female"
)
assert (
inst.item[2].item[9].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[9].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[9].item[0].item[2].text == "Gender"
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.item[0]
.answer[0]
.valueString
== "Lou Gehrigs"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.item[0]
.linkId
== "2.1.1.4.1.1.1"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.item[0]
.text
== "Please specify"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
== "LA10589-2"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
== "-- Other/Unexpected"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54112-8"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.1.1"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[0].text
== "Cause of Death"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.code
== "LA10400-2"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.display
== "OVER 60"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54113-6"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[1].linkId
== "2.1.1.4.1.2"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[1].text
== "Age at Death"
)
assert inst.item[2].item[9].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.1"
assert inst.item[2].item[9].item[0].item[3].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[9].item[0].item[3].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[9].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[9].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[9].item[0].item[3].text == "Living?"
assert inst.item[2].item[9].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[9].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[9].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[9].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[9].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[9].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[9].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[9].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[9].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[9].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[9].item[0].linkId == "2.1.1"
assert inst.item[2].item[9].linkId == "2.1"
assert inst.item[2].linkId == "2"
assert inst.item[2].text == "Family member health information"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.questionnaire == "http://hl7.org/fhir/Questionnaire/ussg-fht"
assert inst.status == "in-progress"
assert inst.subject.reference == "http://hl7.org/fhir/Patient/proband"
assert inst.subject.type == "Patient"
assert inst.text.status == "generated"
def test_questionnaireresponse_2(base_settings):
"""No. 2 tests collection for QuestionnaireResponse.
Test File: questionnaireresponse-example-ussg-fht-answers.json
"""
filename = (
base_settings["unittest_data_dir"]
/ "questionnaireresponse-example-ussg-fht-answers.json"
)
inst = questionnaireresponse.QuestionnaireResponse.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "QuestionnaireResponse" == inst.resource_type
impl_questionnaireresponse_2(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "QuestionnaireResponse" == data["resourceType"]
inst2 = questionnaireresponse.QuestionnaireResponse(**data)
impl_questionnaireresponse_2(inst2)
def impl_questionnaireresponse_3(inst):
assert inst.author.reference == "Practitioner/f201"
assert inst.authored == fhirtypes.DateTime.validate("2013-06-18T00:00:00+01:00")
assert inst.id == "f201"
assert inst.item[0].answer[0].valueBoolean is True
assert inst.item[0].linkId == "1"
assert inst.item[0].text == "Do you have allergies?"
assert inst.item[1].item[0].answer[0].valueString == "Male"
assert inst.item[1].item[0].linkId == "2.1"
assert inst.item[1].item[0].text == "What is your gender?"
assert inst.item[1].item[1].answer[0].valueDate == fhirtypes.Date.validate(
"1960-03-13"
)
assert inst.item[1].item[1].linkId == "2.2"
assert inst.item[1].item[1].text == "What is your date of birth?"
assert inst.item[1].item[2].answer[0].valueString == "The Netherlands"
assert inst.item[1].item[2].linkId == "2.3"
assert inst.item[1].item[2].text == "What is your country of birth?"
assert inst.item[1].item[3].answer[0].valueString == "married"
assert inst.item[1].item[3].linkId == "2.4"
assert inst.item[1].item[3].text == "What is your marital status?"
assert inst.item[1].linkId == "2"
assert inst.item[1].text == "General questions"
assert inst.item[2].item[0].answer[0].valueBoolean is False
assert inst.item[2].item[0].linkId == "3.1"
assert inst.item[2].item[0].text == "Do you smoke?"
assert inst.item[2].item[1].answer[0].valueBoolean is False
assert inst.item[2].item[1].linkId == "3.2"
assert inst.item[2].item[1].text == "Do you drink alchohol?"
assert inst.item[2].linkId == "3"
assert inst.item[2].text == "Intoxications"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.source.reference == "Practitioner/f201"
assert inst.status == "completed"
assert inst.subject.display == "Roel"
assert inst.subject.reference == "Patient/f201"
assert inst.text.status == "generated"
def test_questionnaireresponse_3(base_settings):
"""No. 3 tests collection for QuestionnaireResponse.
Test File: questionnaireresponse-example-f201-lifelines.json
"""
filename = (
base_settings["unittest_data_dir"]
/ "questionnaireresponse-example-f201-lifelines.json"
)
inst = questionnaireresponse.QuestionnaireResponse.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "QuestionnaireResponse" == inst.resource_type
impl_questionnaireresponse_3(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "QuestionnaireResponse" == data["resourceType"]
inst2 = questionnaireresponse.QuestionnaireResponse(**data)
impl_questionnaireresponse_3(inst2)
def impl_questionnaireresponse_4(inst):
assert inst.author.reference == "http://hl7.org/fhir/Practitioner/example"
assert inst.author.type == "Practitioner"
assert inst.authored == fhirtypes.DateTime.validate("2013-02-19T14:15:00+10:00")
assert inst.id == "bb"
assert inst.item[0].item[0].item[0].answer[0].valueString == "Cathy Jones"
assert inst.item[0].item[0].item[0].linkId == "nameOfChild"
assert inst.item[0].item[0].item[0].text == "Name of child"
assert inst.item[0].item[0].item[1].answer[0].valueCoding.code == "F"
assert inst.item[0].item[0].item[1].linkId == "sex"
assert inst.item[0].item[0].item[1].text == "Sex"
assert inst.item[0].item[0].linkId == "group"
assert float(inst.item[0].item[1].item[0].answer[0].valueDecimal) == float(3.25)
assert inst.item[0].item[1].item[0].linkId == "birthWeight"
assert inst.item[0].item[1].item[0].text == "Birth weight (kg)"
assert float(inst.item[0].item[1].item[1].answer[0].valueDecimal) == float(44.3)
assert inst.item[0].item[1].item[1].linkId == "birthLength"
assert inst.item[0].item[1].item[1].text == "Birth length (cm)"
assert inst.item[0].item[1].item[2].answer[0].item[0].item[0].answer[
0
].valueDateTime == fhirtypes.DateTime.validate("1972-11-30")
assert (
inst.item[0].item[1].item[2].answer[0].item[0].item[0].linkId == "vitaminKDose1"
)
assert inst.item[0].item[1].item[2].answer[0].item[0].item[0].text == "1st dose"
assert inst.item[0].item[1].item[2].answer[0].item[0].item[1].answer[
0
].valueDateTime == fhirtypes.DateTime.validate("1972-12-11")
assert (
inst.item[0].item[1].item[2].answer[0].item[0].item[1].linkId == "vitaminKDose2"
)
assert inst.item[0].item[1].item[2].answer[0].item[0].item[1].text == "2nd dose"
assert inst.item[0].item[1].item[2].answer[0].item[0].linkId == "vitaminKgivenDoses"
assert inst.item[0].item[1].item[2].answer[0].valueCoding.code == "INJECTION"
assert inst.item[0].item[1].item[2].linkId == "vitaminKgiven"
assert inst.item[0].item[1].item[2].text == "Vitamin K given"
assert inst.item[0].item[1].item[3].answer[0].item[0].answer[
0
].valueDate == fhirtypes.Date.validate("1972-12-04")
assert inst.item[0].item[1].item[3].answer[0].item[0].linkId == "hepBgivenDate"
assert inst.item[0].item[1].item[3].answer[0].item[0].text == "Date given"
assert inst.item[0].item[1].item[3].answer[0].valueBoolean is True
assert inst.item[0].item[1].item[3].linkId == "hepBgiven"
assert inst.item[0].item[1].item[3].text == "Hep B given y / n"
assert (
inst.item[0].item[1].item[4].answer[0].valueString
== "Already able to speak Chinese"
)
assert inst.item[0].item[1].item[4].linkId == "abnormalitiesAtBirth"
assert inst.item[0].item[1].item[4].text == "Abnormalities noted at birth"
assert inst.item[0].item[1].linkId == "neonatalInformation"
assert inst.item[0].item[1].text == "Neonatal Information"
assert inst.item[0].linkId == "birthDetails"
assert inst.item[0].text == "Birth details - To be completed by health professional"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.questionnaire == "http://hl7.org/fhir/Questionnaire/bb"
assert inst.status == "completed"
assert inst.subject.reference == "http://hl7.org/fhir/Patient/1"
assert inst.subject.type == "Patient"
assert inst.text.status == "generated"
def test_questionnaireresponse_4(base_settings):
"""No. 4 tests collection for QuestionnaireResponse.
Test File: questionnaireresponse-example-bluebook.json
"""
filename = (
base_settings["unittest_data_dir"]
/ "questionnaireresponse-example-bluebook.json"
)
inst = questionnaireresponse.QuestionnaireResponse.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "QuestionnaireResponse" == inst.resource_type
impl_questionnaireresponse_4(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "QuestionnaireResponse" == data["resourceType"]
inst2 = questionnaireresponse.QuestionnaireResponse(**data)
impl_questionnaireresponse_4(inst2)
def impl_questionnaireresponse_5(inst):
assert inst.authored == fhirtypes.DateTime.validate("2014-12-11T04:44:16Z")
assert inst.id == "gcs"
assert inst.item[0].answer[0].valueCoding.code == "LA6560-2"
assert inst.item[0].answer[0].valueCoding.display == "Confused"
assert (
inst.item[0].answer[0].valueCoding.extension[0].url
== "http://hl7.org/fhir/StructureDefinition/itemWeight"
)
assert float(inst.item[0].answer[0].valueCoding.extension[0].valueDecimal) == float(
4
)
assert inst.item[0].answer[0].valueCoding.system == "http://loinc.org"
assert inst.item[0].linkId == "1.1"
assert inst.item[1].answer[0].valueCoding.code == "LA6566-9"
assert inst.item[1].answer[0].valueCoding.display == "Localizing pain"
assert (
inst.item[1].answer[0].valueCoding.extension[0].url
== "http://hl7.org/fhir/StructureDefinition/itemWeight"
)
assert float(inst.item[1].answer[0].valueCoding.extension[0].valueDecimal) == float(
5
)
assert inst.item[1].answer[0].valueCoding.system == "http://loinc.org"
assert inst.item[1].linkId == "1.2"
assert inst.item[2].answer[0].valueCoding.code == "LA6556-0"
assert inst.item[2].answer[0].valueCoding.display == "Eyes open spontaneously"
assert (
inst.item[2].answer[0].valueCoding.extension[0].url
== "http://hl7.org/fhir/StructureDefinition/itemWeight"
)
assert float(inst.item[2].answer[0].valueCoding.extension[0].valueDecimal) == float(
4
)
assert inst.item[2].answer[0].valueCoding.system == "http://loinc.org"
assert inst.item[2].linkId == "1.3"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.questionnaire == "http://hl7.org/fhir/Questionnaire/gcs"
assert inst.source.reference == "Practitioner/f007"
assert inst.status == "completed"
assert inst.subject.display == "Peter James Chalmers"
assert inst.subject.reference == "Patient/example"
assert inst.text.status == "generated"
def test_questionnaireresponse_5(base_settings):
"""No. 5 tests collection for QuestionnaireResponse.
Test File: questionnaireresponse-example-gcs.json
"""
filename = (
base_settings["unittest_data_dir"] / "questionnaireresponse-example-gcs.json"
)
inst = questionnaireresponse.QuestionnaireResponse.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "QuestionnaireResponse" == inst.resource_type
impl_questionnaireresponse_5(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "QuestionnaireResponse" == data["resourceType"]
inst2 = questionnaireresponse.QuestionnaireResponse(**data)
impl_questionnaireresponse_5(inst2)
|
[
"[email protected]"
] | |
a70e6430dc82722e95b7aa187dba90f4f65b2478
|
d860a2c1fa8fffc76a9101e4f91cecc80c27e802
|
/leetcode/388_Longest_Absolute_File_Path.py
|
b5c85ee31daee9b4d6fef541d42bacebabeddfca
|
[] |
no_license
|
heroming/algorithm
|
80ea8f00ac049b0bc815140253568484e49c39e3
|
18e510f02bff92bc45cceb7090a79fbd40c209ec
|
refs/heads/master
| 2021-01-19T01:27:31.676356 | 2019-06-09T08:51:16 | 2019-06-09T08:51:16 | 62,952,889 | 3 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,833 |
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
def show(self) :
it, v = self, []
while it :
v.append(it.val)
it = it.next
print v
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class TreeOrder(object):
def preorder(self, root) :
ans = []
self.dfs(root, ans, 0)
print ans
def inorder(self, root) :
ans = []
self.dfs(root, ans, 1)
print ans
def postoder(self, root) :
ans = []
self.dfs(root, ans, 2)
print ans
def dfs(self, root, ans, flag) :
if root == None : return
if (flag == 0) : ans.append(root.val)
self.dfs(root.left, ans, flag)
if (flag == 1) : ans.append(root.val)
self.dfs(root.right, ans, flag)
if (flag == 2) : ans.append(root.val)
class TrieNode(object) :
n = 26
def __init__(self) :
self.finish = False
self.data = [None for i in range(self.n)]
class MinHeap(object) :
def __init__(self) :
self.data = []
def heapify(self, k) :
while True :
idx = k
l, r = (k << 1) | 1, (k + 1) << 1
if l < len(self.data) and self.data[l] < self.data[idx] : idx = l
if r < len(self.data) and self.data[r] < self.data[idx] : idx = r
if idx == k : break
self.data[k], self.data[idx] = self.data[idx], self.data[k]
k = idx
def push(self, x) :
k = len(self.data)
self.data.append(x)
while k > 0 :
p = (k - 1) >> 1
if self.data[p] <= self.data[k] : break
self.data[p], self.data[k] = self.data[k], self.data[p]
k = p
def top(self) :
if not self.data : return None
return self.data[0]
def pop(self) :
if not self.data : return None
ret = self.data[0]
self.data[0] = self.data[-1]
self.data.pop()
self.heapify(0)
return ret
def size() :
return len(self.data)
def isEmpty(self) :
return len(self.data) == 0
class Solution(object) :
def lengthLongestPath(self, s) :
v = []
ans, cnt = 0, 0
t = s.split("\n")
for x in t :
k = x.count('\t')
while k < len(v) :
cnt -= len(v[-1])
v.pop()
v.append(x[k:])
cnt += len(v[-1])
if '.' in v[-1] : ans = max(ans, cnt + len(v) - 1)
return ans
if __name__ == '__main__' :
so = Solution()
|
[
"[email protected]"
] | |
3b7225d69c4c077284ccdc74dae85cbbc5634161
|
37530ffbb3d14cc9c7307fb4b1a276d9d26516dc
|
/api/migrations/0015_auto_20180831_1847.py
|
cf66e98858b6394528d104f6c5617359338775ad
|
[] |
no_license
|
aballah-chamakh/Repair-api
|
567a4f81107569bec67072fa598aa76384343752
|
22574a9b69df8d62c5f027146fc21d6abf4b245a
|
refs/heads/master
| 2020-04-10T12:46:32.309178 | 2018-12-09T11:20:07 | 2018-12-09T11:20:07 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,494 |
py
|
# Generated by Django 2.0.7 on 2018-08-31 16:47
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('api', '0014_auto_20180630_0054'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AlterField(
model_name='offer',
name='categorie',
field=models.CharField(choices=[('web development', 'Web development'), ('all categories', 'All categories'), ('web development', 'Web designe')], max_length=30),
),
migrations.AlterField(
model_name='profile',
name='city',
field=models.CharField(choices=[('bizert', 'Bizert'), ('city', 'City'), ('ariana', 'Ariana'), ('tunis', 'Tunis'), ('sfax', 'Sfax')], max_length=30),
),
migrations.AddField(
model_name='comment',
name='product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.Offer'),
),
]
|
[
"[email protected]"
] | |
2aa60f59119f97962eebc18716cb636a07ebabe5
|
255e19ddc1bcde0d3d4fe70e01cec9bb724979c9
|
/dockerized-gists/dc3335ee46ab9f650b19885e8ade6c7a/snippet.py
|
cc0b25145b92da727de388094323b861968c2325
|
[
"MIT"
] |
permissive
|
gistable/gistable
|
26c1e909928ec463026811f69b61619b62f14721
|
665d39a2bd82543d5196555f0801ef8fd4a3ee48
|
refs/heads/master
| 2023-02-17T21:33:55.558398 | 2023-02-11T18:20:10 | 2023-02-11T18:20:10 | 119,861,038 | 76 | 19 | null | 2020-07-26T03:14:55 | 2018-02-01T16:19:24 |
Python
|
UTF-8
|
Python
| false | false | 1,929 |
py
|
#!/bin/python
from flashtext.keyword import KeywordProcessor
import random
import string
import re
import time
def get_word_of_length(str_length):
# generate a random word of given length
return ''.join(random.choice(string.ascii_lowercase) for _ in range(str_length))
# generate a list of 100K words of randomly chosen size
all_words = [get_word_of_length(random.choice([3, 4, 5, 6, 7, 8])) for i in range(100000)]
print('Count | FlashText | Regex ')
print('-------------------------------')
for keywords_length in range(1, 20002, 1000):
# chose 5000 terms and create a string to search in.
all_words_chosen = random.sample(all_words, 5000)
story = ' '.join(all_words_chosen)
# get unique keywords from the list of words generated.
unique_keywords_sublist = list(set(random.sample(all_words, keywords_length)))
# compile regex
# source: https://stackoverflow.com/questions/6116978/python-replace-multiple-strings
rep = dict([(key, '_keyword_') for key in unique_keywords_sublist])
compiled_re = re.compile("|".join(rep.keys()))
# add keywords to flashtext
keyword_processor = KeywordProcessor()
for keyword in unique_keywords_sublist:
keyword_processor.add_keyword(keyword, '_keyword_')
# time the modules
start = time.time()
_ = keyword_processor.replace_keywords(story)
mid = time.time()
_ = compiled_re.sub(lambda m: rep[re.escape(m.group(0))], story)
end = time.time()
# print output
print(str(keywords_length).ljust(6), '|',
"{0:.5f}".format(mid - start).ljust(9), '|',
"{0:.5f}".format(end - mid).ljust(9), '|',)
# Count | FlashText | Regex
# -------------------------------
# 1 | 0.02141 | 0.00004 |
# 1001 | 0.02498 | 0.13180 |
# 5001 | 0.03147 | 0.59799 |
# 10001 | 0.02858 | 1.08717 |
# 15001 | 0.02734 | 1.51461 |
# 20001 | 0.03109 | 1.76158 |
|
[
"[email protected]"
] | |
c164d54f8fd02d4a2faf4337c691176cb3244813
|
11d3f0fcf4a968a6b612f2b85d242cbbbabc7e07
|
/services/convert-document/test.py
|
ba6972a0ac5a807125ea05f14912997425a99161
|
[
"MIT"
] |
permissive
|
OpenUpSA/eskom-enquiry
|
21d728e6e7ceceed84fe646a439d9aee8c4222d5
|
71dfa8aa00688a83df24a4afaeb9e5639565bbbb
|
refs/heads/master
| 2023-05-11T11:23:18.561070 | 2018-08-24T12:51:55 | 2018-08-24T12:51:55 | 137,739,474 | 0 | 1 |
MIT
| 2023-05-01T19:47:30 | 2018-06-18T10:40:42 |
Python
|
UTF-8
|
Python
| false | false | 654 |
py
|
import os
import sys
import signal
import requests
from multiprocessing import Pool
signal.signal(signal.SIGINT, signal.SIG_IGN)
url = os.environ.get('UNOSERVICE_URL')
def request(i):
path = sys.argv[1]
files = {'file': open(path, 'rb')}
data = {'extension': 'docx'}
# print('send request')
res = requests.post(url, files=files, data=data)
# message = res.text if res.status_code != 200 else ''
print(res.status_code, res.content[:20])
# print(res.content == open(path, 'rb').read())
pool = Pool(20)
try:
pool.map(request, range(10000))
except KeyboardInterrupt:
pool.terminate()
pool.join()
# request(5)
|
[
"[email protected]"
] | |
1c7a40531f47be5347ae4c77918c932197355793
|
982fac39f5a2232f3976789bbc265cdc2cc8be08
|
/bqskit/compiler/search/generator.py
|
81d1f4eeac8e6ce93567fb2d735539f9377ff711
|
[
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause"
] |
permissive
|
mtreinish/bqskit
|
8516575eef46241e426f78b96d047987e6c9b2df
|
3083218c2f4e3c3ce4ba027d12caa30c384d7665
|
refs/heads/master
| 2023-08-06T20:11:01.278308 | 2021-08-09T19:55:30 | 2021-08-09T19:55:30 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 884 |
py
|
"""This module implements the LayerGenerator base class."""
from __future__ import annotations
import abc
from typing import Any
from bqskit.ir.circuit import Circuit
from bqskit.qis.state.state import StateVector
from bqskit.qis.unitary.unitarymatrix import UnitaryMatrix
class LayerGenerator(abc.ABC):
"""
The LayerGenerator base class.
Search based synthesis uses the layer generator to generate the root node
and the successors of a node.
"""
@abc.abstractmethod
def gen_initial_layer(
self,
target: UnitaryMatrix | StateVector,
data: dict[str, Any],
) -> Circuit:
"""Generate the initial layer for search."""
@abc.abstractmethod
def gen_successors(
self,
circuit: Circuit,
data: dict[str, Any],
) -> list[Circuit]:
"""Generate the successors of a circuit node."""
|
[
"[email protected]"
] | |
b2b5040251b52f2a7d00a3fcd69aff1df9c96f97
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2408/60648/267250.py
|
42fd2a73853aa61e8e1101e5966296a105b18cc4
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 793 |
py
|
class Solution:
def numPrimeArrangements(self, n: int) -> int:
# (sum(prime)) ็้ถไน ไนไปฅ (n - sum(prime)) ็้ถไน
def countPrimes(n: int) -> int:
if n < 2:
return 0
prime = [1] * (n + 1)
prime[0] = prime[1] = 0
for i in range(2, int(n**0.5) +1):
if prime[i] == 1:
prime[i*i:n + 1:i] = [0]*len(prime[i*i:n + 1:i])
return sum(prime)
def func(n):
if n == 0 or n == 1:
return 1
else:
return (n * func(n - 1))
return func(countPrimes(n)) * func(n - countPrimes(n)) % (10**9 + 7)
if __name__=="__main__":
s=int(input())
x=Solution().numPrimeArrangements(s)
print(x)
|
[
"[email protected]"
] | |
b09d38372acd5147033c1441a46c1221bd6cffee
|
5442f2f71e36419ad23894d3dd527837029e42f3
|
/ramda/memoize.py
|
90f43e8a162f1d4455b1acae4b5aa05196ba2aa1
|
[
"MIT"
] |
permissive
|
zequequiel/ramda.py
|
5e6a23765598550ecaf1a76b785f4fde7bc11ea4
|
eac054163de535520659ce6269536355d5e89865
|
refs/heads/master
| 2020-09-11T16:43:07.989407 | 2019-09-06T12:07:26 | 2019-09-06T12:10:43 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 533 |
py
|
import hashlib
import json
from ramda.memoize_with import memoize_with
def memoize(f):
"""Creates a new function that, when invoked, caches the result of calling fn
for a given argument set and returns the result. Subsequent calls to the
memoized fn with the same argument set will not result in an additional
call to fn; instead, the cached result for that set of arguments will be
returned"""
def hash(*args):
return hashlib.sha256(json.dumps(args).encode("utf-8")).hexdigest()
return memoize_with(hash, f)
|
[
"[email protected]"
] | |
6907cd3956eefc7814e16763b87275820f02964b
|
f0d713996eb095bcdc701f3fab0a8110b8541cbb
|
/xdSKkXQkkMroNzq8C_8.py
|
6a7c8f49d0986abe2427cee6cbaff07659418e65
|
[] |
no_license
|
daniel-reich/turbo-robot
|
feda6c0523bb83ab8954b6d06302bfec5b16ebdf
|
a7a25c63097674c0a81675eed7e6b763785f1c41
|
refs/heads/main
| 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 521 |
py
|
"""
Create a function that counts how many D's are in a sentence.
### Examples
count_d("My friend Dylan got distracted in school.") โ 4
count_d("Debris was scattered all over the yard.") โ 3
count_d("The rodents hibernated in their den.") โ 3
### Notes
* Your function must be case-insensitive.
* Remember to `return` the result.
* Check the **Resources** for help.
"""
def count_d(sentence):
d = 'd'
D = 'D'
count = sentence.count(d) + sentence.count(D)
return count
|
[
"[email protected]"
] | |
b44d3cbb59f5f3f8a54a12f0b7ce7209ce3bc61c
|
ece0d321e48f182832252b23db1df0c21b78f20c
|
/engine/2.80/scripts/startup/bl_ui/properties_data_curve.py
|
e7f9de5dc201d7f35e1f93372c5733fd3877f555
|
[
"Unlicense",
"GPL-3.0-only",
"Font-exception-2.0",
"GPL-3.0-or-later",
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-public-domain-disclaimer",
"Bitstream-Vera",
"LicenseRef-scancode-blender-2010",
"LGPL-2.1-or-later",
"GPL-2.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"PSF-2.0",
"LicenseRef-scancode-free-unknown",
"LicenseRef-scancode-proprietary-license",
"GPL-1.0-or-later",
"BSD-2-Clause"
] |
permissive
|
byteinc/Phasor
|
47d4e48a52fa562dfa1a2dbe493f8ec9e94625b9
|
f7d23a489c2b4bcc3c1961ac955926484ff8b8d9
|
refs/heads/master
| 2022-10-25T17:05:01.585032 | 2019-03-16T19:24:22 | 2019-03-16T19:24:22 | 175,723,233 | 3 | 1 |
Unlicense
| 2022-10-21T07:02:37 | 2019-03-15T00:58:08 |
Python
|
UTF-8
|
Python
| false | false | 14,402 |
py
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy
from bpy.types import Panel
from rna_prop_ui import PropertyPanel
from bpy.types import Curve, SurfaceCurve, TextCurve
class CurveButtonsPanel:
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "data"
@classmethod
def poll(cls, context):
return (context.curve is not None)
class CurveButtonsPanelCurve(CurveButtonsPanel):
@classmethod
def poll(cls, context):
return (type(context.curve) is Curve)
class CurveButtonsPanelSurface(CurveButtonsPanel):
@classmethod
def poll(cls, context):
return (type(context.curve) is SurfaceCurve)
class CurveButtonsPanelText(CurveButtonsPanel):
@classmethod
def poll(cls, context):
return (type(context.curve) is TextCurve)
class CurveButtonsPanelActive(CurveButtonsPanel):
"""Same as above but for curves only"""
@classmethod
def poll(cls, context):
curve = context.curve
return (curve and type(curve) is not TextCurve and curve.splines.active)
class DATA_PT_context_curve(CurveButtonsPanel, Panel):
bl_label = ""
bl_options = {'HIDE_HEADER'}
def draw(self, context):
layout = self.layout
obj = context.object
curve = context.curve
space = context.space_data
if obj:
layout.template_ID(obj, "data")
elif curve:
layout.template_ID(space, "pin_id")
class DATA_PT_shape_curve(CurveButtonsPanel, Panel):
bl_label = "Shape"
def draw(self, context):
layout = self.layout
curve = context.curve
is_surf = type(curve) is SurfaceCurve
is_curve = type(curve) is Curve
is_text = type(curve) is TextCurve
if is_curve:
row = layout.row()
row.prop(curve, "dimensions", expand=True)
layout.use_property_split = True
col = layout.column()
sub = col.column(align=True)
sub.prop(curve, "resolution_u", text="Resolution Preview U")
if is_surf:
sub.prop(curve, "resolution_v", text="V")
sub = col.column(align=True)
sub.prop(curve, "render_resolution_u", text="Render U")
if is_surf:
sub.prop(curve, "render_resolution_v", text="V")
col.separator()
if is_curve:
col.prop(curve, "twist_mode")
col.prop(curve, "twist_smooth", text="Smooth")
elif is_text:
col.prop(curve, "use_fast_edit", text="Fast Editing")
if is_curve or is_text:
col = layout.column()
col.separator()
sub = col.column()
sub.active = (curve.dimensions == '2D' or (curve.bevel_object is None and curve.dimensions == '3D'))
sub.prop(curve, "fill_mode")
col.prop(curve, "use_fill_deform")
if is_curve:
col = layout.column()
col.separator()
sub = col.column()
sub.prop(curve, "use_radius")
sub.prop(curve, "use_stretch")
sub.prop(curve, "use_deform_bounds")
class DATA_PT_curve_texture_space(CurveButtonsPanel, Panel):
bl_label = "Texture Space"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_EEVEE', 'BLENDER_WORKBENCH'}
def draw(self, context):
layout = self.layout
layout.use_property_split = True
curve = context.curve
col = layout.column()
col.prop(curve, "use_uv_as_generated")
col.prop(curve, "use_auto_texspace")
col = layout.column()
col.prop(curve, "texspace_location")
col.prop(curve, "texspace_size")
layout.operator("curve.match_texture_space")
class DATA_PT_geometry_curve(CurveButtonsPanelCurve, Panel):
bl_label = "Geometry"
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
return (type(context.curve) in {Curve, TextCurve})
def draw(self, context):
layout = self.layout
layout.use_property_split = True
curve = context.curve
col = layout.column()
col.prop(curve, "offset")
sub = col.column()
sub.active = (curve.bevel_object is None)
sub.prop(curve, "extrude")
col.prop(curve, "taper_object")
sub = col.column()
sub.active = curve.taper_object is not None
sub.prop(curve, "use_map_taper")
class DATA_PT_geometry_curve_bevel(CurveButtonsPanelCurve, Panel):
bl_label = "Bevel"
bl_parent_id = "DATA_PT_geometry_curve"
@classmethod
def poll(cls, context):
return (type(context.curve) in {Curve, TextCurve})
def draw(self, context):
layout = self.layout
layout.use_property_split = True
curve = context.curve
col = layout.column()
sub = col.column()
sub.active = (curve.bevel_object is None)
sub.prop(curve, "bevel_depth", text="Depth")
sub.prop(curve, "bevel_resolution", text="Resolution")
col.prop(curve, "bevel_object", text="Object")
sub = col.column()
sub.active = curve.bevel_object is not None
sub.prop(curve, "use_fill_caps")
if type(curve) is not TextCurve:
col = layout.column()
col.active = (
(curve.bevel_depth > 0.0) or
(curve.extrude > 0.0) or
(curve.bevel_object is not None)
)
sub = col.column(align=True)
sub.prop(curve, "bevel_factor_start", text="Bevel Start")
sub.prop(curve, "bevel_factor_end", text="End")
sub = col.column(align=True)
sub.prop(curve, "bevel_factor_mapping_start", text="Bevel Mapping Start")
sub.prop(curve, "bevel_factor_mapping_end", text="End")
class DATA_PT_pathanim(CurveButtonsPanelCurve, Panel):
bl_label = "Path Animation"
bl_options = {'DEFAULT_CLOSED'}
def draw_header(self, context):
curve = context.curve
self.layout.prop(curve, "use_path", text="")
def draw(self, context):
layout = self.layout
layout.use_property_split = True
curve = context.curve
layout.active = curve.use_path
col = layout.column()
col.prop(curve, "path_duration", text="Frames")
col.prop(curve, "eval_time")
# these are for paths only
col.separator()
col.prop(curve, "use_path_follow")
class DATA_PT_active_spline(CurveButtonsPanelActive, Panel):
bl_label = "Active Spline"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
layout.use_property_split = True
curve = context.curve
act_spline = curve.splines.active
is_surf = type(curve) is SurfaceCurve
is_poly = (act_spline.type == 'POLY')
col = layout.column()
if is_poly:
# These settings are below but its easier to have
# polys set aside since they use so few settings
col.prop(act_spline, "use_cyclic_u")
col.prop(act_spline, "use_smooth")
else:
sub = col.column(align=True)
sub.prop(act_spline, "use_cyclic_u")
if is_surf:
sub.prop(act_spline, "use_cyclic_v", text="V")
if act_spline.type == 'NURBS':
sub = col.column(align=True)
# sub.active = (not act_spline.use_cyclic_u)
sub.prop(act_spline, "use_bezier_u", text="Bezier U")
if is_surf:
subsub = sub.column()
subsub.active = (not act_spline.use_cyclic_v)
subsub.prop(act_spline, "use_bezier_v", text="V")
sub = col.column(align=True)
sub.prop(act_spline, "use_endpoint_u", text="Endpoint U")
if is_surf:
subsub = sub.column()
subsub.active = (not act_spline.use_cyclic_v)
subsub.prop(act_spline, "use_endpoint_v", text="V")
sub = col.column(align=True)
sub.prop(act_spline, "order_u", text="Order U")
if is_surf:
sub.prop(act_spline, "order_v", text="V")
sub = col.column(align=True)
sub.prop(act_spline, "resolution_u", text="Resolution U")
if is_surf:
sub.prop(act_spline, "resolution_v", text="V")
if act_spline.type == 'BEZIER':
col.separator()
sub = col.column()
sub.active = (curve.dimensions == '3D')
sub.prop(act_spline, "tilt_interpolation", text="Interpolation Tilt")
col.prop(act_spline, "radius_interpolation", text="Radius")
layout.prop(act_spline, "use_smooth")
class DATA_PT_font(CurveButtonsPanelText, Panel):
bl_label = "Font"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
text = context.curve
char = context.curve.edit_format
row = layout.split(factor=0.25)
row.label(text="Regular")
row.template_ID(text, "font", open="font.open", unlink="font.unlink")
row = layout.split(factor=0.25)
row.label(text="Bold")
row.template_ID(text, "font_bold", open="font.open", unlink="font.unlink")
row = layout.split(factor=0.25)
row.label(text="Italic")
row.template_ID(text, "font_italic", open="font.open", unlink="font.unlink")
row = layout.split(factor=0.25)
row.label(text="Bold & Italic")
row.template_ID(text, "font_bold_italic", open="font.open", unlink="font.unlink")
layout.separator()
row = layout.row(align=True)
row.prop(char, "use_bold", toggle=True)
row.prop(char, "use_italic", toggle=True)
row.prop(char, "use_underline", toggle=True)
row.prop(char, "use_small_caps", toggle=True)
class DATA_PT_font_transform(CurveButtonsPanelText, Panel):
bl_label = "Transform"
bl_parent_id = "DATA_PT_font"
def draw(self, context):
layout = self.layout
text = context.curve
layout.use_property_split = True
col = layout.column()
col.separator()
col.prop(text, "size", text="Size")
col.prop(text, "shear")
col.separator()
col.prop(text, "family")
col.prop(text, "follow_curve")
col.separator()
sub = col.column(align=True)
sub.prop(text, "underline_position", text="Underline Position")
sub.prop(text, "underline_height", text="Underline Thickness")
col.prop(text, "small_caps_scale", text="Small Caps Scale")
class DATA_PT_paragraph(CurveButtonsPanelText, Panel):
bl_label = "Paragraph"
def draw(self, context):
# Parent panel
pass
class DATA_PT_paragraph_alignment(CurveButtonsPanelText, Panel):
bl_parent_id = "DATA_PT_paragraph"
bl_label = "Alignment"
def draw(self, context):
layout = self.layout
layout.use_property_split = True
text = context.curve
col = layout.column()
col.prop(text, "align_x", text="Horizontal")
col.prop(text, "align_y", text="Vertical")
class DATA_PT_paragraph_spacing(CurveButtonsPanelText, Panel):
bl_parent_id = "DATA_PT_paragraph"
bl_label = "Spacing"
def draw(self, context):
layout = self.layout
layout.use_property_split = True
text = context.curve
col = layout.column(align=True)
col.prop(text, "space_character", text="Character Spacing")
col.prop(text, "space_word", text="Word Spacing")
col.prop(text, "space_line", text="Line Spacing")
layout.separator()
col = layout.column(align=True)
col.prop(text, "offset_x", text="Offset X")
col.prop(text, "offset_y", text="Y")
class DATA_PT_text_boxes(CurveButtonsPanelText, Panel):
bl_label = "Text Boxes"
bl_options = {'DEFAULT_CLOSED'}
def draw(self, context):
layout = self.layout
text = context.curve
layout.operator("font.textbox_add", icon='ADD')
layout.prop(text, "overflow", text="Overflow")
for i, box in enumerate(text.text_boxes):
boxy = layout.box()
row = boxy.row()
col = row.column()
col.use_property_split = True
sub = col.column(align=True)
sub.prop(box, "width", text="Size X")
sub.prop(box, "height", text="Y")
sub = col.column(align=True)
sub.prop(box, "x", text="Offset X")
sub.prop(box, "y", text="Y")
row.operator("font.textbox_remove", text="", icon='X', emboss=False).index = i
class DATA_PT_custom_props_curve(CurveButtonsPanel, PropertyPanel, Panel):
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_EEVEE', 'BLENDER_WORKBENCH'}
_context_path = "object.data"
_property_type = bpy.types.Curve
classes = (
DATA_PT_context_curve,
DATA_PT_shape_curve,
DATA_PT_curve_texture_space,
DATA_PT_geometry_curve,
DATA_PT_geometry_curve_bevel,
DATA_PT_pathanim,
DATA_PT_active_spline,
DATA_PT_font,
DATA_PT_font_transform,
DATA_PT_paragraph,
DATA_PT_paragraph_alignment,
DATA_PT_paragraph_spacing,
DATA_PT_text_boxes,
DATA_PT_custom_props_curve,
)
if __name__ == "__main__": # only for live edit.
from bpy.utils import register_class
for cls in classes:
register_class(cls)
|
[
"[email protected]"
] | |
78d5a9d6714ee5729fc953d355fd9ece21e45447
|
b507751bb8adbf0c3270e399dab00259371afd05
|
/setup.py
|
aa38ce6ee4a06ff0d1640bb6106e9f645240be61
|
[] |
no_license
|
sdkwe/pywe-component-token
|
2c7bd3fe05b95dcd70b5c49f516e8936519b9a73
|
020d1be06ffd0d8a79e5cb34dd9ecb4a09d740ed
|
refs/heads/master
| 2020-03-07T19:05:16.951405 | 2018-05-01T20:10:31 | 2018-05-01T20:10:31 | 127,661,479 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,205 |
py
|
# -*- coding: utf-8 -*-
from setuptools import setup
version = '1.1.0'
setup(
name='pywe-component-token',
version=version,
keywords='Wechat Weixin Component Token',
description='Wechat Component Token Module for Python.',
long_description=open('README.rst').read(),
url='https://github.com/sdkwe/pywe-component-token',
author='Hackathon',
author_email='[email protected]',
packages=['pywe_component_token'],
py_modules=[],
install_requires=['pywe_base', 'pywe_component_ticket', 'pywe_exception', 'pywe_storage'],
classifiers=[
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
[
"[email protected]"
] | |
69380cac821df4b434fe8e0bbea627355081cb50
|
161fd6370ffa0b35ecd50719d6266224da597ee0
|
/Python/Django/sports/views2.py
|
23bfca83cec0e4ed4b02022eb70bc98072091a6a
|
[] |
no_license
|
ebergstein/DojoAssignments
|
a30fd8b36442bff2a4253902a591ad11f191fc12
|
3ad9ac65073c733ead32b93ce4be19af5369fccf
|
refs/heads/master
| 2021-06-19T09:48:23.100713 | 2017-06-30T04:24:35 | 2017-06-30T04:24:35 | 82,743,546 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 472 |
py
|
from django.shortcuts import render, redirect
from .models import League, Team, Player
from . import team_maker
def index(request):
context = {
"leagues": League.objects.filter(name__contains="Womens"),
""""teams": Team.objects.all(),
"players": Player.objects.all(),"""
}
return render(request, "leagues/index.html", context)
def make_data(request):
team_maker.gen_leagues(10)
team_maker.gen_teams(50)
team_maker.gen_players(200)
return redirect("index")
|
[
"[email protected]"
] | |
e2cc167ab3cd9c985b8979c1c3ad79f7754354a2
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03494/s213100148.py
|
504f8df2d6619e1e1087cee14a9f5a8c697d6b08
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 173 |
py
|
N = int(input())
A = list(map(int,input().split()))
def X(x):
y = 0
while x%2==0:
x = x/2
y = y+1
return y
a = [X(A[i]) for i in range(0,N)]
print(min(a))
|
[
"[email protected]"
] | |
7e006593e39f695a39c0508dbad1b2e8fa0889c9
|
90be755a741d6c93dd59d4acef8b27b4cf93ff54
|
/src/elsia/scripts/coarse2fine.py
|
4012618330283e3d1b0f119612ad5aaeaa7aa7c6
|
[] |
no_license
|
karry3775/Elsia_ws
|
05aa5786a6f3f64b70c7ceafead6d72d4ca18bab
|
031f8006e9a439d9947be5ed288a666f20fca3a7
|
refs/heads/master
| 2023-02-21T05:21:10.842475 | 2021-01-23T14:58:57 | 2021-01-23T15:21:46 | 326,032,434 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,529 |
py
|
#!/usr/bin/env python
import rospy
from nav_msgs.msg import Odometry # to get the laser cross track and abs yaw
from std_msgs.msg import String # to get the block_val message
from geometry_msgs.msg import Twist # for cmd_vel
from tf.transformations import euler_from_quaternion, quaternion_from_euler
import math as m
import time
rospy.init_node("coarse2fine_act_prop_node")
aisle_pub = rospy.Publisher("/aisle_odom", Odometry, queue_size=10)
# global variables
abs_yaw = 0.0
ct_dist = 0.0
f = 63.06 # 50 # mm
alpha = 0.264583 # 0.20977
lat = 4.0 # 7.6 # m (total cross track distance)
column_gap = 2.25 # m
# Global estimates
X_aisle = []
Y_aisle = []
odom_depth = 0.0
depth_thresh = column_gap / 3
last_laser_odom_x = 0.0
vel = 0.0
last_time_stamp = time.time()
def laser_cb(msg):
global ct_dist
y = msg.pose.pose.position.y
ct_dist = -y
def aisle_ct_cb(msg):
global ct_dist
y = msg.pose.pose.position.y
ct_dist = -y
def abs_yaw_cb(msg):
global abs_yaw
ori = msg.pose.pose.orientation
(_, _, yaw) = euler_from_quaternion([ori.x, ori.y, ori.z, ori.w])
abs_yaw = yaw
def cmd_vel_cb(msg):
global vel
vel = msg.linear.x
def getDepth(l_r, px):
camera_front_offset = 0.1
ct_camera = ct_dist - (camera_front_offset) * m.sin(abs_yaw)
if l_r == "l":
print("it is l")
# find xa and ya
xa = px * (alpha/1000) * m.cos(abs_yaw) - \
(f/1000) * m.sin(abs_yaw) + ct_camera
ya = px * (alpha/1000) * m.sin(abs_yaw) + (f/1000) * m.cos(abs_yaw)
depth = ((-(lat/2) - ct_camera) * ya) / (xa - ct_camera)
else:
# find xa and ya
xa = px * (alpha/1000) * m.cos(abs_yaw) - \
(f/1000) * m.sin(abs_yaw) + ct_camera
ya = px * (alpha/1000) * m.sin(abs_yaw) + (f/1000) * m.cos(abs_yaw)
print("it is r")
depth = (((lat/2) - ct_camera) * ya) / (xa - ct_camera)
depth = depth + (camera_front_offset) * m.cos(abs_yaw)
print("depth value for abs_yaw = {}, ct_camera = {}, px = {} is: {}".format(
abs_yaw, ct_camera, px, depth))
return depth
def ignoreErratic(odom_depth, odom_depth_1, odom_depth_0):
if abs(odom_depth - odom_depth_1) > 2:
odom_depth = odom_depth_0
else:
odom_depth = odom_depth_1
return odom_depth
def block_val_cb(msg):
global X_aisle, Y_aisle, odom_depth, last_laser_odom_x, last_time_stamp
##################Find delta based on cmd_vel###############################
cur_time_stamp = time.time()
delta_time = cur_time_stamp - last_time_stamp
last_time_stamp = cur_time_stamp
delta_x = vel * delta_time * m.cos(abs_yaw)
############################################################################
# msg is of the type string separated by commas --> block_count, px, l_r
data = msg.data.split(",")
print("####################################################")
# print("data: {}".format(data))
block_count = float(data[0])
px = float(data[1]) # in pixels which is basically the x value
l_r = data[2]
px0 = float(data[3]) # second last value
l_r0 = data[4] # second last value
print("original px was: {}".format(px))
px = px - 200
px0 = px0 - 200
depth = getDepth(l_r, px)
depth0 = getDepth(l_r0, px0)
odom_depth_1 = (block_count * column_gap) - depth + column_gap
odom_depth_0 = ((block_count - 1) * column_gap) - depth0 + column_gap
odom_depth_final = ignoreErratic(odom_depth, odom_depth_1, odom_depth_0)
print("prev_odom_depth : {}, odom_depth_0 : {}, odom_depth_1 : {}".format(
odom_depth, odom_depth_0, odom_depth_1))
# manually setting odom_depth to be
weight = 0.98
odom_depth = (1 - weight) * odom_depth_final + \
weight * (odom_depth + delta_x)
print("depth : {}, odom_depth : {}".format(depth, odom_depth))
# append to the trajectory estimates
Y_aisle.append(-ct_dist)
X_aisle.append(odom_depth)
# publish to aisle_odom
q = quaternion_from_euler(0, 0, abs_yaw)
odom_msg = Odometry()
odom_msg.pose.pose.position.x = odom_depth
odom_msg.pose.pose.position.y = -ct_dist
# putting raw aisle odometry as the z value
odom_msg.pose.pose.position.z = 0.0#odom_depth_1 (we were obviously using this for some analytics, but for lets put the correct value here)
odom_msg.pose.pose.orientation.x = q[0]
odom_msg.pose.pose.orientation.y = q[1]
odom_msg.pose.pose.orientation.z = q[2]
odom_msg.pose.pose.orientation.w = q[3]
odom_msg.header.stamp = rospy.Time.now()
odom_msg.header.frame_id="/odom"
odom_msg.child_frame_id="/aisle_link"
aisle_pub.publish(odom_msg)
if __name__ == "__main__":
try:
abs_yaw_sub = rospy.Subscriber(
"/abs_orientation_odom", Odometry, abs_yaw_cb) # using the ceil's yaw
# abs_yaw_sub = rospy.Subscriber("/ground_truth/state", Odometry, abs_yaw_cb) # using gt yaw
# laser_sub = rospy.Subscriber("/odom_rf2o_corrected_ceil", Odometry, laser_cb)# using the ceil's y(ct_dist)
# using the aisle cross track
aisle_ct_sub = rospy.Subscriber("/aisle_ct", Odometry, aisle_ct_cb)
# laser_sub = rospy.Subscriber("/ground_truth/state", Odometry, laser_cb) # using gt y(ct_dist)
block_val_sub = rospy.Subscriber("/block_val", String, block_val_cb)
cmd_vel_sub = rospy.Subscriber("/jacky/cmd_vel", Twist, cmd_vel_cb)
rospy.spin()
except rospy.ROSInterruptException:
pass
|
[
"[email protected]"
] | |
43ba92b302deb3f78d664830d576fe2b464496d3
|
bb78978db67663287ecabd7124b844a068a607ce
|
/command/sub8_rqt/sub8_rqt_gui/__init__.py
|
b916ed4001769e1ce299f046ebd3546166f06783
|
[
"MIT"
] |
permissive
|
ErolB/Sub8
|
fb0493c20e7d5409a21c9fb473117dccb8c11c6b
|
757e56ed0c22aa0386dba2e7f539a9c62782c167
|
refs/heads/master
| 2021-01-11T07:41:42.849536 | 2016-03-28T05:01:13 | 2016-03-28T05:01:13 | 54,864,501 | 0 | 0 | null | 2016-03-28T04:05:16 | 2016-03-28T04:05:15 | null |
UTF-8
|
Python
| false | false | 27 |
py
|
from gui import AlarmPlugin
|
[
"[email protected]"
] | |
73f12fed22111fd0e30c62b8b0e51fb76df8a6bd
|
c1d5aeaa30418507610d3f1364ef24759b41e86d
|
/tango_with_django_project/settings.py
|
a73a709671e694d9d025eba696db1505d946ba6b
|
[] |
no_license
|
kydkang/workspace8
|
7c368f3549e40dd62bf5aa126747aa83f14f09a1
|
bc17b6a1cdf0e4b025e844eb5cf33f53a629875f
|
refs/heads/master
| 2020-03-12T13:49:50.047181 | 2018-04-23T06:44:12 | 2018-04-23T06:44:12 | 130,651,578 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,145 |
py
|
"""
Django settings for tango_with_django_project project.
Generated by 'django-admin startproject' using Django 2.0.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'm)7(y*!)ut0)qk)o9(=pr@(b77@!70h^vjogh26us72@6b%x)r'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tango_with_django_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tango_with_django_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
|
[
"[email protected]"
] | |
fc8dd96f4980bb48684fbfa0034cb7b99912c982
|
96dcea595e7c16cec07b3f649afd65f3660a0bad
|
/tests/components/nextdns/test_init.py
|
fb9ea74509e9554bd31f080eb5f5aa471a463f55
|
[
"Apache-2.0"
] |
permissive
|
home-assistant/core
|
3455eac2e9d925c92d30178643b1aaccf3a6484f
|
80caeafcb5b6e2f9da192d0ea6dd1a5b8244b743
|
refs/heads/dev
| 2023-08-31T15:41:06.299469 | 2023-08-31T14:50:53 | 2023-08-31T14:50:53 | 12,888,993 | 35,501 | 20,617 |
Apache-2.0
| 2023-09-14T21:50:15 | 2013-09-17T07:29:48 |
Python
|
UTF-8
|
Python
| false | false | 1,806 |
py
|
"""Test init of NextDNS integration."""
from unittest.mock import patch
from nextdns import ApiError
from homeassistant.components.nextdns.const import CONF_PROFILE_ID, DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.const import CONF_API_KEY, STATE_UNAVAILABLE
from homeassistant.core import HomeAssistant
from . import init_integration
from tests.common import MockConfigEntry
async def test_async_setup_entry(hass: HomeAssistant) -> None:
"""Test a successful setup entry."""
await init_integration(hass)
state = hass.states.get("sensor.fake_profile_dns_queries_blocked_ratio")
assert state is not None
assert state.state != STATE_UNAVAILABLE
assert state.state == "20.0"
async def test_config_not_ready(hass: HomeAssistant) -> None:
"""Test for setup failure if the connection to the service fails."""
entry = MockConfigEntry(
domain=DOMAIN,
title="Fake Profile",
unique_id="xyz12",
data={CONF_API_KEY: "fake_api_key", CONF_PROFILE_ID: "xyz12"},
)
with patch(
"homeassistant.components.nextdns.NextDns.get_profiles",
side_effect=ApiError("API Error"),
):
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
assert entry.state is ConfigEntryState.SETUP_RETRY
async def test_unload_entry(hass: HomeAssistant) -> None:
"""Test successful unload of entry."""
entry = await init_integration(hass)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert entry.state is ConfigEntryState.LOADED
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.NOT_LOADED
assert not hass.data.get(DOMAIN)
|
[
"[email protected]"
] | |
c268cd0eb69a97954d3fe683c3c669a4afee24df
|
02e9e67f6fd0d7dd6c88620f78923afa0ea639d1
|
/Classes/Examples/Lesson 2/animal_tester.py
|
adaf99c2bc0d252784ad1bd762666685158ece31
|
[] |
no_license
|
fbhs-cs/PreAPCS-classcode
|
55823b0f16333c35650785b4dd92ae03cd6b4768
|
ef1355c04fcdbb3c3d4e16c4aae20acfbc833c1c
|
refs/heads/master
| 2021-03-27T19:08:49.479105 | 2018-04-06T21:21:23 | 2018-04-06T21:21:23 | 103,399,299 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 448 |
py
|
import animal
def main():
print("Creating dogs...")
try:
dog1 = animal.Dog(name="Drama",gender="female",age=14,breed="Chow/Lab")
dog2 = animal.Dog(name="Santa's Little Helper",gender="male",age=3,breed="Greyhound")
dog3 = animal.Dog(name="Einstein",gender="male",age=38,breed="Sheepdog")
except:
print("Something is wrong with your __init__ method in Dog")
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
0dc63cb0e85c77c53207e0d69ae4bae808320782
|
ce7c414f098e3ea6674bec05d40345d0047bdbe5
|
/basic07.py
|
4d9fdf47767336fb17e20427de88584fafe1803e
|
[] |
no_license
|
jun-yoshiyoshi/python_plactice100
|
0545783d199c04ebd5b53b354c5dc50c52a99b6e
|
466dc39341cad50594c957ae60d5d00c254d06f7
|
refs/heads/main
| 2023-08-11T06:29:41.711413 | 2021-09-14T00:59:38 | 2021-09-14T00:59:38 | 406,175,270 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 303 |
py
|
# if,elif,else fizzbuzz ใฏใณใฉใคใใผ
for i in range(1, 31):
if i % 15 == 0:
print('fizzbuzz')
elif i % 3 == 0:
print('fizz')
elif i % 5 == 0:
print('buzz')
else:
print(i)
for i in range(1, 51):
print("Fizz"*(i % 3 < 1)+"Buzz"*(i % 5 < 1) or i)
|
[
"[email protected]"
] | |
a647121f068ff94bcce89cf280706e5fae7e7d1d
|
bc441bb06b8948288f110af63feda4e798f30225
|
/topology_sdk/api/container/update_container_pb2.py
|
bc6d3f93792986767e0d060cda91eefac7cdf152
|
[
"Apache-2.0"
] |
permissive
|
easyopsapis/easyops-api-python
|
23204f8846a332c30f5f3ff627bf220940137b6b
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
refs/heads/master
| 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null |
UTF-8
|
Python
| false | true | 11,781 |
py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: update_container.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from topology_sdk.model.topology import property_pb2 as topology__sdk_dot_model_dot_topology_dot_property__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='update_container.proto',
package='container',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x16update_container.proto\x12\tcontainer\x1a*topology_sdk/model/topology/property.proto\"\x87\x02\n\x16UpdateContainerRequest\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x12\n\ndataSource\x18\x03 \x01(\t\x12$\n\x08property\x18\x04 \x01(\x0b\x32\x12.topology.Property\x12\x36\n\x05style\x18\x05 \x01(\x0b\x32\'.container.UpdateContainerRequest.Style\x12\x10\n\x08\x63ollapse\x18\x06 \x01(\x08\x1aO\n\x05Style\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02\x12\r\n\x05width\x18\x03 \x01(\x02\x12\x0e\n\x06height\x18\x04 \x01(\x02\x12\x11\n\tclassName\x18\x05 \x01(\t\"%\n\x17UpdateContainerResponse\x12\n\n\x02id\x18\x01 \x01(\t\"\x84\x01\n\x1eUpdateContainerResponseWrapper\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x13\n\x0b\x63odeExplain\x18\x02 \x01(\t\x12\r\n\x05\x65rror\x18\x03 \x01(\t\x12\x30\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32\".container.UpdateContainerResponseb\x06proto3')
,
dependencies=[topology__sdk_dot_model_dot_topology_dot_property__pb2.DESCRIPTOR,])
_UPDATECONTAINERREQUEST_STYLE = _descriptor.Descriptor(
name='Style',
full_name='container.UpdateContainerRequest.Style',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='x', full_name='container.UpdateContainerRequest.Style.x', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='y', full_name='container.UpdateContainerRequest.Style.y', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='width', full_name='container.UpdateContainerRequest.Style.width', index=2,
number=3, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='height', full_name='container.UpdateContainerRequest.Style.height', index=3,
number=4, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='className', full_name='container.UpdateContainerRequest.Style.className', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=266,
serialized_end=345,
)
_UPDATECONTAINERREQUEST = _descriptor.Descriptor(
name='UpdateContainerRequest',
full_name='container.UpdateContainerRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='container.UpdateContainerRequest.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='container.UpdateContainerRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dataSource', full_name='container.UpdateContainerRequest.dataSource', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='property', full_name='container.UpdateContainerRequest.property', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='style', full_name='container.UpdateContainerRequest.style', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='collapse', full_name='container.UpdateContainerRequest.collapse', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[_UPDATECONTAINERREQUEST_STYLE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=82,
serialized_end=345,
)
_UPDATECONTAINERRESPONSE = _descriptor.Descriptor(
name='UpdateContainerResponse',
full_name='container.UpdateContainerResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='container.UpdateContainerResponse.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=347,
serialized_end=384,
)
_UPDATECONTAINERRESPONSEWRAPPER = _descriptor.Descriptor(
name='UpdateContainerResponseWrapper',
full_name='container.UpdateContainerResponseWrapper',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='container.UpdateContainerResponseWrapper.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='codeExplain', full_name='container.UpdateContainerResponseWrapper.codeExplain', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='error', full_name='container.UpdateContainerResponseWrapper.error', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='data', full_name='container.UpdateContainerResponseWrapper.data', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=387,
serialized_end=519,
)
_UPDATECONTAINERREQUEST_STYLE.containing_type = _UPDATECONTAINERREQUEST
_UPDATECONTAINERREQUEST.fields_by_name['property'].message_type = topology__sdk_dot_model_dot_topology_dot_property__pb2._PROPERTY
_UPDATECONTAINERREQUEST.fields_by_name['style'].message_type = _UPDATECONTAINERREQUEST_STYLE
_UPDATECONTAINERRESPONSEWRAPPER.fields_by_name['data'].message_type = _UPDATECONTAINERRESPONSE
DESCRIPTOR.message_types_by_name['UpdateContainerRequest'] = _UPDATECONTAINERREQUEST
DESCRIPTOR.message_types_by_name['UpdateContainerResponse'] = _UPDATECONTAINERRESPONSE
DESCRIPTOR.message_types_by_name['UpdateContainerResponseWrapper'] = _UPDATECONTAINERRESPONSEWRAPPER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
UpdateContainerRequest = _reflection.GeneratedProtocolMessageType('UpdateContainerRequest', (_message.Message,), {
'Style' : _reflection.GeneratedProtocolMessageType('Style', (_message.Message,), {
'DESCRIPTOR' : _UPDATECONTAINERREQUEST_STYLE,
'__module__' : 'update_container_pb2'
# @@protoc_insertion_point(class_scope:container.UpdateContainerRequest.Style)
})
,
'DESCRIPTOR' : _UPDATECONTAINERREQUEST,
'__module__' : 'update_container_pb2'
# @@protoc_insertion_point(class_scope:container.UpdateContainerRequest)
})
_sym_db.RegisterMessage(UpdateContainerRequest)
_sym_db.RegisterMessage(UpdateContainerRequest.Style)
UpdateContainerResponse = _reflection.GeneratedProtocolMessageType('UpdateContainerResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATECONTAINERRESPONSE,
'__module__' : 'update_container_pb2'
# @@protoc_insertion_point(class_scope:container.UpdateContainerResponse)
})
_sym_db.RegisterMessage(UpdateContainerResponse)
UpdateContainerResponseWrapper = _reflection.GeneratedProtocolMessageType('UpdateContainerResponseWrapper', (_message.Message,), {
'DESCRIPTOR' : _UPDATECONTAINERRESPONSEWRAPPER,
'__module__' : 'update_container_pb2'
# @@protoc_insertion_point(class_scope:container.UpdateContainerResponseWrapper)
})
_sym_db.RegisterMessage(UpdateContainerResponseWrapper)
# @@protoc_insertion_point(module_scope)
|
[
"[email protected]"
] | |
41c70140030ead7bcb3232f412ecfb56956c7900
|
22ccc673a522b52f2678b6ac96e3ff2a104864ff
|
/digest/migrations/0015_auto_20150731_0859.py
|
fbd6d24eea0abcd05e835877f6ddb37710feb92a
|
[] |
no_license
|
ivlevdenis/pythondigest
|
07e448da149d92f37b8ce3bd01b645ace1fa0888
|
f8ccc44808a26960fb69a4c4c3491df3e6d3d24e
|
refs/heads/master
| 2021-01-18T02:09:42.121559 | 2016-05-15T22:44:34 | 2016-05-15T22:44:34 | 58,350,368 | 0 | 0 | null | 2016-05-09T05:21:39 | 2016-05-09T05:21:39 | null |
UTF-8
|
Python
| false | false | 486 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('digest', '0014_auto_20150731_0859'), ]
operations = [migrations.AlterField(
model_name='item',
name='tags',
field=models.ManyToManyField(to='digest.Tag',
verbose_name='\u0422\u044d\u0433\u0438',
blank=True), ), ]
|
[
"[email protected]"
] | |
865946e4d753262386d009e877aa101002220064
|
48cb50826c7774f320c9b4d51a287dcb4e805d8e
|
/jsk_pcl_ros_utils/cfg/PolygonArrayAreaLikelihood.cfg
|
5c6645133f23ae642c1b94f2a27d657fd33ee98b
|
[] |
no_license
|
huangxliang/jsk_recognition
|
921d28771f5a1c1dde6dcdaa9289dafdde81b310
|
8c6b46d0ca9ccb00f033ceef305b35edafb32162
|
refs/heads/master
| 2021-01-18T12:56:05.945787 | 2016-03-11T05:50:08 | 2016-03-11T05:50:08 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 505 |
cfg
|
#!/usr/bin/env python
# set up parameters that we care about
PACKAGE = 'jsk_pcl_ros_utils'
try:
import imp
imp.find_module(PACKAGE)
from dynamic_reconfigure.parameter_generator_catkin import *;
except:
import roslib; roslib.load_manifest(PACKAGE)
from dynamic_reconfigure.parameter_generator import *;
from math import pi
gen = ParameterGenerator ()
gen.add("area", double_t, 0, "", 1.0, 0.0, 10.0)
exit (gen.generate (PACKAGE, "jsk_pcl_ros_utils", "PolygonArrayAreaLikelihood"))
|
[
"[email protected]"
] | |
ebf1ee4cdfea3e7fa89adbca475840bc63d22534
|
dc5d8dbbb4c6f296a95bea7069d13de38db00ac6
|
/lines.py
|
2d7e386e0f375c07c2fc1d2ac950771ef6d3ee0d
|
[] |
no_license
|
the-isf-academy/drawing
|
906591afa279529cb7a6208183188b01651b2fcb
|
652fea4754d3a465b55aaeb6673d989402a06350
|
refs/heads/master
| 2021-10-14T11:34:57.447129 | 2021-09-28T02:45:15 | 2021-09-28T02:45:15 | 205,507,729 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,585 |
py
|
# lines.py
# by Chris Proctor
# Helper functions for playing with how the turtle draws
# =============================================================================
# ! Advanced !
# =============================================================================
# This module contains some fancy code that we don't expect you to understand
# yet. That's ok--as long as we know how to use code, we don't have to
# understand everything about it. (Do you understand everything about
# MacOS?) Check out the README for documentation on how to use this code.
# Of course, if you want to dig into this module, feel free. You can ask a
# teacher about it if you're interested.
# =============================================================================
from itertools import cycle
from turtle import Turtle, pendown, penup, pencolor
class Segmenter:
"""
Breaks a distance (length) into segments, which are yielded one at a time.
Whatever's left over at the end gets yielded too. If start_at is given,
the pattern is offset by this much. For example:
>>> from drawing.lines import Segmenter
>>> list(Segmenter([1, 5]).segment(20))
[1, 5, 1, 5, 1, 5, 1, 1]
"""
def __init__(self, pattern):
"Should be initialized with a pattern like [(10, penup), (20, pendown)]"
self.pattern = pattern
self.remainder = 0
self.remainder_state = None
self.pattern_cycle = cycle(pattern)
def segment(self, length):
"""
Segments `length` into chunks according to the pattern, yielding each chunk
along with a boolean indicating whether there is more coming
"""
if self.remainder > 0:
if length > self.remainder:
yield self.remainder, self.remainder_state
length -= self.remainder
self.remainder = 0
else:
yield length, self.remainder_state
self.remainder -= length
length = 0
if length > 0:
for (seg, state) in self.pattern_cycle:
if length >= seg:
yield seg, state
length -= seg
else:
if length > 0:
yield length, state
self.remainder = seg - length
self.remainder_state = state
return
def go_segmented(turtle, distance):
"This is the fake go function that we're going to inject into the turtle"
for seg, state in turtle.segmenter.segment(distance):
state()
turtle.true_go(seg)
def color_setter_factory(color):
"Returns a function that sets the pencolor"
def set_color():
pencolor(color)
return set_color
class dashes:
"""
A context manager which causes a code block to draw with dashes.
This is accomplished by briefly hacking the Turtle. Sorry!
"""
def __init__(self, spacing=20):
self.spacing = spacing
def __enter__(self):
Turtle.segmenter = Segmenter([(self.spacing, pendown), (self.spacing, penup)])
Turtle.true_go = Turtle._go
Turtle._go = go_segmented
def __exit__(self, exc_type, exc_value, traceback):
Turtle._go = Turtle.true_go
del Turtle.true_go
class dots:
"A context manager which causes a code block to draw with dots"
def __init__(self, spacing=10):
self.spacing = spacing
def __enter__(self):
Turtle.segmenter = Segmenter([(1, pendown), (self.spacing, penup)])
Turtle.true_go = Turtle._go
Turtle._go = go_segmented
def __exit__(self, exc_type, exc_value, traceback):
Turtle._go = Turtle.true_go
del Turtle.true_go
class rainbow:
"A context manager which causes a code block to draw in rainbow colors"
default_colors = ['red', 'orange', 'yellow', 'green', 'blue', 'purple']
def __init__(self, spacing=10, colors=None):
self.spacing = spacing
self.colors = colors or rainbow.default_colors
def __enter__(self):
Turtle.segmenter = Segmenter([(self.spacing, color_setter_factory(color)) for color in self.colors])
Turtle.true_go = Turtle._go
Turtle._go = go_segmented
def __exit__(self, exc_type, exc_value, traceback):
Turtle._go = Turtle.true_go
del Turtle.true_go
if __name__ == '__main__':
from turtle import *
pensize(6)
with rainbow():
for i in range(100):
forward(i)
right(2 * 360/(i+1))
|
[
"[email protected]"
] | |
03f40f445a153af0590c13f0c05606a782782605
|
4c9580b2e09e2b000e27a1c9021b12cf2747f56a
|
/chapter02/app02/urls.py
|
23e633effc2aff2db92d1f3dca1fc3e48b57a431
|
[] |
no_license
|
jzplyy/xiaoyue_mall
|
69072c0657a6878a4cf799b8c8218cc7d88c8d12
|
4f9353d6857d1bd7dc54151ca8b34dcb4671b8dc
|
refs/heads/master
| 2023-06-26T02:48:03.103635 | 2021-07-22T15:51:07 | 2021-07-22T15:51:07 | 388,514,311 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 139 |
py
|
from django.urls import path, re_path
from app02 import views
urlpatterns = [
path('blog-list/', views.blog, {'blog_id': 3}),
]
|
[
"[email protected]"
] | |
c8b0206da89a30aca71601e901c93e63d45782b8
|
ffdcd340fdef833bfd9af89d779845ba2991a08c
|
/customer/migrations/0150_auto_20190325_1616.py
|
c47d4b86a52930c61ecb4fef98dc24e10b8ed1a2
|
[] |
no_license
|
calvinti12/Goat
|
64a122f697e06855bb53c37c8b7472a14c1030a3
|
2993dc48296cc1c6dd41651c05752647f074cb70
|
refs/heads/master
| 2020-07-03T19:16:26.942485 | 2019-08-12T20:17:43 | 2019-08-12T20:17:43 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 485 |
py
|
# Generated by Django 2.1.4 on 2019-03-25 20:16
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('customer', '0149_auto_20190323_1312'),
]
operations = [
migrations.AlterField(
model_name='orderrequest',
name='date',
field=models.DateTimeField(default=datetime.datetime(2019, 3, 25, 16, 15, 55, 214289), verbose_name='Order Placed'),
),
]
|
[
"[email protected]"
] | |
42eec63fcd92eef17a0881c4d0abc24e596ff8f6
|
26d5c795d8aa83bf5cb3f228675ff51e2f704f57
|
/scripts/bqexport
|
bfef9fe73be2b701637137c11c7a7d955f03f311
|
[] |
no_license
|
binarymachines/mercury
|
8e13bb10c67a056fe88e02f558d73f1f1b95d028
|
db3e2425f4e77a44a97c740f7fff90312a1bd33f
|
refs/heads/master
| 2023-07-08T11:35:26.867494 | 2023-06-25T00:46:23 | 2023-06-25T00:46:23 | 94,708,610 | 2 | 6 | null | 2023-02-15T21:50:06 | 2017-06-18T19:31:50 |
Python
|
UTF-8
|
Python
| false | false | 4,413 |
#!/usr/bin/env python
'''
Usage:
bqexport [-p] <project> <dataset> --table <table> --bucket <google_bucket> --format=<fmt> [--delimiter=<delimiter>] [--directory=<directory>]
bqexport [-p] <project> <dataset> --table-list-file <tables> --bucket <google_bucket> --format=<fmt> [--delimiter=<delimiter>] [--directory=<directory>]
Options:
-p,--preview : show (but do not execute) export command
'''
import os, sys
import json
from snap import common
import docopt
import sh
from sh import bq # Google Cloud CLI must already be installed
class EXPORT_FORMAT(object):
CSV = 'csv'
JSON = 'json'
def extract_data(source_table_designator, target_designator, export_format, delimiter):
try:
if export_format == EXPORT_FORMAT.CSV:
result = bq.extract('--field_delimiter',
delimiter,
'--destination_format',
'CSV',
source_table_designator,
target_designator)
print('\n### export of "%s" to "%s" complete.\n' %
(source_table_designator, target_designator), file=sys.stderr)
else: # export JSON records
result = bq.extract('--destination_format',
'NEWLINE_DELIMITED_JSON',
source_table_designator,
target_designator)
print('\n### export of "%s" to "%s" complete.\n' %
(source_table_designator, target_designator), file=sys.stderr)
except Exception as err:
print('!!! error exporting table data.', file=sys.stderr)
print(err, file=sys.stderr)
def main(args):
export_format = args['--format']
if export_format == EXPORT_FORMAT.CSV:
if args.get('--delimiter') is None:
print('### csv chosen as the export format, but no delimiter specified. Defaulting to comma.', file=sys.stderr)
elif export_format != EXPORT_FORMAT.JSON:
print('!!! supported export formats are "csv" and "json".')
return
tables = []
if args.get('--table'):
tables.append(args['<table>'])
elif args.get('--table-list-file'):
table_list_file = args['<tables>']
with open(table_list_file) as f:
for line in f:
tables.append(line.lstrip().rstrip())
project_name = args['<project>']
dataset = args['<dataset>']
bucket = args['<google_bucket>']
delimiter = ','
if args.get('--delimiter') is not None:
delimiter = args['--delimiter']
preview_mode = False
if args.get('--preview'):
preview_mode = True
print('\n### running bqex in preview mode.\n', file=sys.stderr)
if args.get('--directory') is not None:
bucket_directory = args['--directory']
else:
bucket_directory = ''
for table_name in tables:
source_table_designator = '{project}:{dataset}.{table}'.format(project=project_name,
dataset=dataset,
table=table_name)
filename = '%s_*.%s' % (table_name, export_format)
path_string = os.path.join(bucket, bucket_directory, filename)
target_designator = 'gs://%s' % path_string
if preview_mode:
if export_format == EXPORT_FORMAT.CSV:
print(bq.extract.bake('--field_delimiter',
'\'%s\'' % delimiter,
'--destination_format',
'CSV',
source_table_designator,
target_designator))
else:
print(bq.extract.bake('--destination_format',
'NEWLINE_DELIMITED_JSON',
source_table_designator,
target_designator))
else:
extract_data(source_table_designator, target_designator, export_format, delimiter)
print('\n### exiting.', file=sys.stderr)
if __name__ == '__main__':
args = docopt.docopt(__doc__)
main(args)
|
[
"[email protected]"
] | ||
ca831b57120478eb123c55e911034a76632d7712
|
6022d4228b3bd318447f2693a799c44f78d4d552
|
/hqca/tools/_operator.py
|
ad4edef8fa0c0febf1deac5caf1d205c20702f1b
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
sesmart/HQCA
|
74e67673913e6f3a141d3ed814dfad2d4f51c287
|
07ed661b95d0ee798c8f75396960e510e23ffbe6
|
refs/heads/main
| 2023-02-28T10:44:59.789765 | 2021-02-02T21:04:03 | 2021-02-02T21:04:03 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 10,936 |
py
|
from copy import deepcopy as copy
import sys
import traceback
from hqca.tools.quantum_strings import *
class Operator:
'''
Can construct mathematical operators using different 'string'.
Aggregate collection of smaller strings, typically either qubit,
fermionic, or Pauli strings. Addition or multiplication follows the
rules for the component string. Can also be iterated through, or accessed
through indices.
transform(Transformation) returns a new operator.
'''
def __init__(self,
op=None,
):
self.op = []
if type(op)==type(None):
pass
elif isinstance(op, type(QuantumString())):
self.op = self.__add__(op).op
elif isinstance(op,type([])):
new = copy(self)
for o in op:
new+= o
self.op = new.op
def norm(self):
'''
calculates the l2 norm of operator in the respective basis
'''
n = 0
for o in self:
n+= (o.c.real+1j*o.c.imag)*(o.c.real-1j*o.c.imag)
return (n.real)**(0.5)
def __str__(self):
z = ''
for i in self.op:
z += i.__str__()
z += '\n'
return z[:-1]
def __next__(self):
return self.op.__next__()
def __iter__(self):
return self.op.__iter__()
def __getitem__(self,key):
return self.op[key]
def __contains__(self,A):
return A in self.op
def __len__(self):
return len(self.op)
def __add__(self,A):
new = copy(self)
if isinstance(A ,type(QuantumString())):
# we are adding a string
add=True
for n,i in enumerate(self):
if i==A:
new.op[n]+=A
add=False
break
if add:
new.op.append(A)
return new
elif isinstance(A,type(Operator())):
for o in A:
add=True
for n,i in enumerate(self):
if i==o:
new.op[n]+=o
add=False
break
if add:
new.op.append(o)
new.clean()
return new
def __mul__(self,A):
new = Operator()
if isinstance(A,type(QuantumString())):
for i in self:
new+= i*A
# we are adding a string
new.clean()
return new
elif isinstance(A,type(Operator())):
for a in A:
for s in self:
new+= s*a
elif isinstance(A,float):
for o in self:
new+= o*A
else:
raise TypeError
new.clean()
return new
def null(self):
for i in self:
if abs(i.c)>=1e-10:
return False
return True
def transform(self,
T=None,
*args,**kwargs):
'''
perform transformation on some operators
'''
new = Operator()
new += T(self,*args,**kwargs)
return new
def __sub__(self,A):
new = copy(self)
if isinstance(A ,type(QuantumString())):
A.c *=-1
# we are adding a string
add=True
for n,i in enumerate(self):
if i==A:
new.op[n]+=A
add=False
break
if add:
new.op.append(A)
return new
elif isinstance(A,type(Operator())):
for n,o in enumerate(A):
A[n].c*=-1
add=True
for n,i in enumerate(self):
if i==o:
new.op[n]+=o
add=False
break
if add:
new.op.append(o)
new.clean()
return new
def clean(self):
done = False
while not done:
done = True
for n,i in enumerate(self):
if abs(i.c)<1e-12:
self.op.pop(n)
done=False
break
def simplify(self):
if not isinstance(self.op[0],type(FermiString())):
sys.exit('Can not simplify non Fermionic strings.')
done = False
def sub1(self):
for j in range(len(self.op)):
for i in range(j):
for k in range(len(self.op[i].s)):
s1 = self.op[i].s[:k]+self.op[i].s[k+1:]
s2 = self.op[j].s[:k]+self.op[j].s[k+1:]
c1,c2 = copy(self.op[i].c),copy(self.op[j].c)
if s1==s2 and set([self.op[i].s[k],self.op[j].s[k]])==set(['p','h']):
self.op[i].s = self.op[i].s[:k]+'i'+self.op[i].s[k+1:]
self.op[j].c = c2-c1
return False
return True
def sub2(self):
for j in range(len(self.op)):
for i in range(j):
if self.op[i]==self.op[j]:
#print(self.op[i],self.op[j])
self.op[i].c+= self.op[j].c
del self.op[j]
return False
return True
def sub3(self):
for j in range(len(self.op)):
for i in range(j):
for k in range(len(self.op[i].s)):
s1 = self.op[i].s[:k]+self.op[i].s[k+1:]
s2 = self.op[j].s[:k]+self.op[j].s[k+1:]
k1,k2 = self.op[i].s[k], self.op[j].s[k]
c1,c2 = copy(self.op[i].c),copy(self.op[j].c)
if s1==s2 and set([self.op[i].s[k],self.op[j].s[k]])==set(['p','i']):
if abs(c1+c2)<1e-6:
c = abs(c1)
if self.op[i].s[k]=='p':
self.op[i].c = c2
elif self.op[i].s[k]=='i':
self.op[i].c = c1
self.op[i].s = self.op[i].s[:k]+'h'+self.op[i].s[k+1:]
del self.op[j]
return False
elif k1=='i' and abs(c1+c2*0.5)<1e-6:
# c1 is half as large as c2
# i.e., c1 = c2*0.5
# c1 I - 2c1 P = *
self.op[i].s = self.op[i].s[:k]+'h'+self.op[i].s[k+1:]
self.op[i].c = c1
self.op[j].s = self.op[i].s[:k]+'p'+self.op[i].s[k+1:]
self.op[j].c = 0.5*c2
elif k2=='i' and abs(c1*0.5+c2)<1e-6:
#
#
self.op[i].s = self.op[i].s[:k]+'p'+self.op[i].s[k+1:]
self.op[i].c = c1*0.5
self.op[j].s = self.op[i].s[:k]+'h'+self.op[i].s[k+1:]
self.op[j].c = c2
elif s1==s2 and set([self.op[i].s[k],self.op[j].s[k]])==set(['h','i']):
if abs(c1+c2)<1e-6:
c = abs(c1)
if self.op[i].s[k]=='h':
self.op[i].c = c2
elif self.op[i].s[k]=='i':
self.op[i].c = c1
self.op[i].s = self.op[i].s[:k]+'p'+self.op[i].s[k+1:]
del self.op[j]
return False
elif k1=='i' and abs(c1+c2*0.5)<1e-6:
# c1 is half as large as c2
# i.e., c1 = c2*0.5
# c1 I - 2c1 P = *
self.op[i].s = self.op[i].s[:k]+'p'+self.op[i].s[k+1:]
self.op[i].c = c1
self.op[j].s = self.op[i].s[:k]+'h'+self.op[i].s[k+1:]
self.op[j].c = 0.5*c2
elif k2=='i' and abs(c1*0.5+c2)<1e-6:
#
#
self.op[i].s = self.op[i].s[:k]+'h'+self.op[i].s[k+1:]
self.op[i].c = c1*0.5
self.op[j].s = self.op[i].s[:k]+'p'+self.op[i].s[k+1:]
return True
pre = False
#print(len(self.op))
while not pre:
pre = sub2(self)
#print(len(self.op))
l1 = False
l2 = False
while not (l1 and l2):
l1 = False
while not l1:
l1 = sub1(self)
almost = False
while not almost:
almost = sub2(self)
l2 = False
while not l2:
l2 = sub3(self)
self.clean()
return self
def truncate(self,threshold=1e-10):
for i in reversed(range(len(self))):
if abs(self.op[i].c)<threshold:
self.op.pop(i)
def commutator(self,A):
try:
return self*A - A*self
except Exception:
new = Operator()
new+= A
return self*A-A*self
def clifford(self,U):
'''
applies clifford unitaries...note these are in terms of qubit orderings
'''
cliff = {
'H':{
'X':['Z',1],
'Y':['Y',-1],
'Z':['X',1],
'I':['I',1],
},
'S':{
'X':['Y',-1],
'Y':['X',-1],
'Z':['Z',1],
'I':['I',1],
},
'V':{ # SHSdag
'X':['X',-1],
'Y':['Z',-1],
'Z':['Y',-1],
'I':['I',1],
},
}
new = Operator()
#print('U: ',U)
for op in self:
if not isinstance(op,type(PauliString())):
sys.exit('Can not apply Clifford groups to non-Pauli strings.')
temp = []
c = copy(op.c)
for s,u in zip(op.s,U):
temp.append(cliff[u][s][0])
c*= cliff[u][s][1]
#print(PauliString(''.join(temp),c))
#print('----')
new+= PauliString(''.join(temp),c)
return new
|
[
"[email protected]"
] | |
d2f93461f3e3a9caba1a1a3f2303cf3528743bae
|
a838d4bed14d5df5314000b41f8318c4ebe0974e
|
/sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_11_01/aio/operations/_route_filter_rules_operations.py
|
87b50c0fb113dd13bb5d4aee1f915c107da18d14
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
scbedd/azure-sdk-for-python
|
ee7cbd6a8725ddd4a6edfde5f40a2a589808daea
|
cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a
|
refs/heads/master
| 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 |
MIT
| 2019-08-11T21:16:01 | 2018-11-28T21:34:49 |
Python
|
UTF-8
|
Python
| false | false | 28,417 |
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class RouteFilterRulesOperations:
"""RouteFilterRulesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified rule from a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def get(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
**kwargs
) -> "_models.RouteFilterRule":
"""Gets the specified rule from a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the rule.
:type rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RouteFilterRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_11_01.models.RouteFilterRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.RouteFilterRule",
**kwargs
) -> "_models.RouteFilterRule":
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(route_filter_rule_parameters, 'RouteFilterRule')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.RouteFilterRule",
**kwargs
) -> AsyncLROPoller["_models.RouteFilterRule"]:
"""Creates or updates a route in the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the route filter rule.
:type rule_name: str
:param route_filter_rule_parameters: Parameters supplied to the create or update route filter
rule operation.
:type route_filter_rule_parameters: ~azure.mgmt.network.v2018_11_01.models.RouteFilterRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteFilterRule or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_11_01.models.RouteFilterRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
route_filter_rule_parameters=route_filter_rule_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def _update_initial(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.PatchRouteFilterRule",
**kwargs
) -> "_models.RouteFilterRule":
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(route_filter_rule_parameters, 'PatchRouteFilterRule')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
async def begin_update(
self,
resource_group_name: str,
route_filter_name: str,
rule_name: str,
route_filter_rule_parameters: "_models.PatchRouteFilterRule",
**kwargs
) -> AsyncLROPoller["_models.RouteFilterRule"]:
"""Updates a route in the specified route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:param rule_name: The name of the route filter rule.
:type rule_name: str
:param route_filter_rule_parameters: Parameters supplied to the update route filter rule
operation.
:type route_filter_rule_parameters: ~azure.mgmt.network.v2018_11_01.models.PatchRouteFilterRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either RouteFilterRule or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2018_11_01.models.RouteFilterRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
route_filter_name=route_filter_name,
rule_name=rule_name,
route_filter_rule_parameters=route_filter_rule_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('RouteFilterRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'ruleName': self._serialize.url("rule_name", rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules/{ruleName}'} # type: ignore
def list_by_route_filter(
self,
resource_group_name: str,
route_filter_name: str,
**kwargs
) -> AsyncIterable["_models.RouteFilterRuleListResult"]:
"""Gets all RouteFilterRules in a route filter.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param route_filter_name: The name of the route filter.
:type route_filter_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RouteFilterRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_11_01.models.RouteFilterRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.RouteFilterRuleListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_route_filter.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'routeFilterName': self._serialize.url("route_filter_name", route_filter_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('RouteFilterRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_route_filter.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/routeFilters/{routeFilterName}/routeFilterRules'} # type: ignore
|
[
"[email protected]"
] | |
9ee758cd3a968a7234a34b8742d1648503bc6124
|
1e1f303cf81da16dec2aa2a5e04c0f3e420ffae8
|
/scripts/pypi/tests/build_pypi_package_test.py
|
cbda69b39a3e0d89bb656211bced2608121c5b8d
|
[
"MIT"
] |
permissive
|
brianjo/pyre-check
|
ba56e727dafb0c626b6dd0ba414c0df94e762475
|
faae20632480948d943d094895c1a2c025e9d82a
|
refs/heads/master
| 2021-08-07T08:43:15.473504 | 2020-12-08T18:46:48 | 2020-12-08T18:46:48 | 250,374,200 | 0 | 0 |
MIT
| 2020-03-26T21:20:21 | 2020-03-26T21:20:20 | null |
UTF-8
|
Python
| false | false | 2,854 |
py
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import os
import tempfile
import unittest
from pathlib import Path
from unittest.mock import Mock, patch
from ..build_pypi_package import (
MODULE_NAME,
add_init_files,
patch_version,
sync_pysa_stubs,
sync_python_files,
validate_version,
)
class TestArgumentValidationMethods(unittest.TestCase):
def test_validate_version(self) -> None:
validate_version("0.0.01")
with self.assertRaises(ValueError):
validate_version("x0.0.01")
class TestCreatingWheel(unittest.TestCase):
def setUp(self) -> None:
self.pyre_directory: Path = Path(__file__).resolve().parent.parent.parent.parent
def test_create_init_files(self) -> None:
with tempfile.TemporaryDirectory() as build_root:
path = Path(build_root)
add_init_files(path)
# Assert the expected __init__ files are present
init_files = [str(path) for path in path.glob("**/*.py")]
self.assertTrue(build_root + "/pyre_check/__init__.py" in init_files)
self.assertTrue(build_root + "/pyre_check/client/__init__.py" in init_files)
self.assertTrue(build_root + "/pyre_check/tools/__init__.py" in init_files)
self.assertTrue(
build_root + "/pyre_check/tools/upgrade/__init__.py" in init_files
)
def test_sync_files(self) -> None:
with tempfile.TemporaryDirectory() as build_root:
build_path = Path(build_root)
add_init_files(build_path)
sync_python_files(self.pyre_directory, build_path)
command_directory = build_path / "pyre_check/client/commands"
self.assertTrue(command_directory.is_dir())
@patch("subprocess.run")
def test_rsync(self, subprocess_run: Mock) -> None:
with tempfile.TemporaryDirectory() as build_root:
build_path = Path(build_root)
add_init_files(build_path)
sync_pysa_stubs(self.pyre_directory, build_path)
args, _ = subprocess_run.call_args
expected_args = [
"rsync",
"-avm",
"--filter=+ */",
build_root,
]
self.assertTrue(all(x in args[0] for x in expected_args))
subprocess_run.assert_called()
def test_patch_version(self) -> None:
with tempfile.TemporaryDirectory() as build_root:
build_path = Path(build_root)
add_init_files(build_path)
patch_version("0.0.21", build_path)
path = build_path / MODULE_NAME / "client/version.py"
self.assertTrue(path.is_file())
|
[
"[email protected]"
] | |
588945412a87dad60b0a723b7af48d15a27e0609
|
b3b5902f0cd292defdc1acccd8fa5c7890e8ba5d
|
/SPORTS/sports_spiders/sports_spiders/pipelines.py
|
4b51184e5232bc55a5bcf189c34fc18983dce0fa
|
[] |
no_license
|
headrun/SWIFT
|
83d3d926244e472813ef79b304ac1639750904a3
|
68bdd09d83ee2967a2378375d9b2cb6232a3a0cf
|
refs/heads/master
| 2023-01-25T00:29:57.810604 | 2020-12-01T01:22:16 | 2020-12-01T01:22:16 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,221 |
py
|
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import MySQLdb
import os
import copy
from scrapy import signals
from sports_spiders import configUtils
from sports_spiders import game_utils as gu
from sports_spiders.vtv_utils import VTV_SERVER_DIR
from datetime import datetime
DELEM = '_'
STEPS = '..'
UTILS_CFG = 'game_utils.cfg'
CONFIG = os.path.join(VTV_SERVER_DIR, UTILS_CFG)
STATS_DIR = os.path.join(STEPS, STEPS, 'SPORTS_STATS_DIR')
class SportsGames(object):
def __init__(self):
self.gids_file = None
self.spider_class = None
self.conn = None
self.cursor = None
self.hash_conf = None
self.spider_name = None
self.items_log = None
@classmethod
def from_crawler(cls, crawler):
pipeline = cls()
crawler.signals.connect(pipeline.spider_opened, signals.spider_opened)
crawler.signals.connect(pipeline.spider_closed, signals.spider_closed)
return pipeline
def spider_opened(self, spider):
today = datetime.now()
today = today.strftime("%Y-%m-%d")
if hasattr(spider, 'spider_type'):
self.spider_name = spider.name + DELEM \
+ spider.spider_type \
+ DELEM + today
else:
self.spider_name = spider.name + DELEM + today
self.hash_conf = configUtils.readConfFile(CONFIG)
self.conn = MySQLdb.connect(db=self.hash_conf['DB_NAME'],
host=self.hash_conf['HOST'],
user=self.hash_conf['USER'],
charset="utf8", use_unicode=True,
passwd="root")
self.cursor = self.conn.cursor()
self.spider_class = spider.__class__.__name__
self.gids_file = os.path.join(
STATS_DIR, self.spider_class + '_gids.pickle')
log_name = os.path.join(STATS_DIR, self.spider_name)
self.items_log = open(log_name, 'a+')
def spider_closed(self, spider):
self.conn.close()
def write_log(self, item):
items_dict = copy.deepcopy(item._values)
items_dict['spider_class'] = self.spider_class
self.items_log.write(str(datetime.now()) + ': ' +
str(items_dict) + '\n\n')
def process_item(self, item, spider):
self.write_log(item)
sports_item = gu.SportsdbSetup(
item, self.cursor, self.spider_class, self.gids_file, self.hash_conf)
if item.get('result_type', '') and \
'standings' in item.get('result_type', ''):
sports_item.populate_standings()
sports_item.clean()
return
if item.get('result_type', '') and \
'roster' in item.get('result_type', ''):
sports_item.populate_rosters()
sports_item.clean()
return
sports_item.process_record()
sports_item.clean()
return item
class CheckDB(object):
def __init__(self):
pass
def process_item(self, item, spider):
pass
|
[
"[email protected]"
] | |
bf0e511132c09ac1f4357a51bc4e9d9d89c5c22a
|
df541a802b2dfa89d3aab14af627358dc7c76e6e
|
/APP่ชๅจๅ/StoneUIFramework/testcase/็ฉบ้ด/test3_ๅข้ไบบไบไปปๅ
/TeamAssignJob003.py
|
0c43aa4c91f7c63a4792fccfbf00acf0681abc82
|
[] |
no_license
|
gupan2018/PyAutomation
|
de966aff91f750c7207c9d3f3dfb488698492342
|
230aebe3eca5799c621673afb647d35a175c74f1
|
refs/heads/master
| 2021-09-07T19:44:20.710574 | 2017-12-22T15:58:23 | 2017-12-22T15:58:23 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,445 |
py
|
__author__ = 'Administrator'
# -*- coding: utf-8 -*-
import unittest
from time import sleep
import logging
from StoneUIFramework.public.common.Connect import Connect
from StoneUIFramework.public.common.publicfunction import Tools
from StoneUIFramework.config.globalparam import GlobalParam
from StoneUIFramework.public.handle.space.SPACEHANDLE5 import _SPACEHANDLE5
from StoneUIFramework.testcase.็ฉบ้ด.test3_ๅข้ไบบไบไปปๅ
.TeamAssignJob import TeamAssignJob
from StoneUIFramework.public.common.datainfo import DataInfo
#ๅข้ไบบไบไปปๅ
class team_Assign(unittest.TestCase):
@classmethod#่ฃ
้ฅฐๅจ๏ผ็ฑปๆนๆณ
def setUpClass(self):#ๆๅผๅงๆง่ก
#ๅปบ็ซ่ฟๆฅไฟกๆฏ
cnn = Connect()
self.driver = cnn.connect()
#ๅๅปบๅทฅๅ
ท็ฑป
self.tools = Tools(self.driver)#toolsๅทฅๅ
ท
#ๅๅปบ_SPACEHANDLE5ๅ
ฌๆๅฎไฝๆงไปถๅฏน่ฑก
self.handle = _SPACEHANDLE5(self.driver)
#ๅๅปบ่ฏปๅ้
็ฝฎไฟกๆฏๅฏน่ฑก
cf = GlobalParam('config','path_file.conf')
#่ทๅๆชๅพ่ทฏๅพใๆฅๅฟ่ทฏๅพใๆฅๅฟๅ
self.screen_path = cf.getParam('space',"path_003")#้่ฟ้
็ฝฎๆไปถ่ทๅๆชๅพ็่ทฏๅพ
self.log_path = cf.getParam('space',"log")#้่ฟ้
็ฝฎๆไปถ่ทๅๆฅๅฟ็่ทฏๅพ
self.logfile = cf.getParam('space',"logfile")#ๆฅๅฟๆไปถๅ
#ๅๅปบTeamAssignJobๅClosespaceๅฏน่ฑก
self.SpaceTa = TeamAssignJob()
sleep(1)
#ๆต่ฏๆฐๆฎ
d = DataInfo()#ๅๅปบDataInfo()ๅฏน่ฑก
self.spacename = d.cell("test003",2,1)#ๆต่ฏ็ฉบ้ด123
def test_teamassign(self):
"""ๅข้ไบบไบไปปๅ
"""
try:
# self.tools.coverUpdate(self.log_path,self.screen_path)#่ฆ็ๆดๆฐๆฅๅฟ,่ฆ็ๆดๆฐๆชๅพ
self.tools.getLog(self.logfile)#ๆๅฐๆฅๅฟ
#1.็ฉบ้ด้ฆ้กต
self.handle.Kjlb_click()
self.tools.getScreenShot(self.screen_path,"็ฉบ้ด้ฆ้กต")
#2.้ๆฉ็ฉบ้ด:ๆต่ฏ็ฉบ้ด123
self.handle.Kjlb_browseorgspaceByName_click(self.spacename)
#3.ไปปๅ
+็งป้ค
self.SpaceTa.teamAssignJob(self.driver)
logging.info("success@@!!!!!!!")#ๅฎฃๅธๆๅ
except Exception as err:
self.tools.getScreenShot(self.screen_path,"ExceptionShot")
logging.error("Error Information TeamAssignJob Outside : %s"%err)
raise err
finally:
self.driver.quit()
|
[
"[email protected]"
] | |
fe582f20576126607ef96485a1246221704d6f63
|
313b64057838ae28ebca3d8bfff191cb13b80c61
|
/101/Q110.py
|
30d49ef376657c2ad1d4fa9dd4f9ec5c2c727348
|
[] |
no_license
|
veblush/Euler
|
e78961fb378be8e7c25070131e0adb72a83381f2
|
de54a77da41b3b58642055169bf2ea4090dbefb8
|
refs/heads/master
| 2016-08-05T09:12:16.775008 | 2013-03-13T08:31:11 | 2013-03-13T08:31:11 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 875 |
py
|
import math
import bisect
# ready prime array
primes = [2]
for i in xrange(3, 1000+1):
ii = int(math.sqrt(i))
composite = False
for p in primes:
if p > ii:
break
if i % p == 0:
composite = True
break
if not composite:
primes.append(i)
f = lambda a: reduce(lambda x, y: x*y, (k+1 for p, k in a))
M = 4000000
m = (M-1)*2
q = [ (4, [(2, 2)]) ]
qs = set([4])
while True:
x, a = q.pop(0)
qs.remove(x)
if f(a) > m:
print x, f(a), a
print reduce(lambda x, y: x*y, (p**(k/2) for p, k in a))
break
w = []
for i in range(len(a)+1):
t = x * primes[i] * primes[i]
if i < len(a):
e = a[:i] + [(a[i][0], a[i][1]+2)] + a[i+1:]
else:
e = a + [(primes[i], 2)]
w.append((t, e))
w = sorted(w, key=lambda x: x[0])
for k in w:
if k[0] not in qs:
bisect.insort_right(q, k)
qs.add(k[0])
|
[
"[email protected]"
] | |
775f2b2ed2fdb72a82f704927dee417183e0c016
|
c77d8dd4042d29150da277184a06834fb551c953
|
/env/lib/python3.9/site-packages/split_settings/tools.py
|
307d347a226f30c00a2e6f81dd9b49cd11a66cb6
|
[] |
no_license
|
josephaw1022/centralizedAPI
|
9978858ff6e906337062ab73b7e36576b0796a39
|
5850a7869519250912279c3a78d6b9585c9591de
|
refs/heads/master
| 2023-06-12T17:12:20.128798 | 2021-07-05T21:24:52 | 2021-07-05T21:24:52 | 383,256,639 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,737 |
py
|
# -*- coding: utf-8 -*-
"""
Organize Django settings into multiple files and directories.
Easily override and modify settings. Use wildcards and optional
settings files.
"""
import glob
import inspect
import os
import sys
from importlib.util import module_from_spec, spec_from_file_location
__all__ = ('optional', 'include') # noqa: WPS410
#: Special magic attribute that is sometimes set by `uwsgi` / `gunicord`.
_INCLUDED_FILE = '__included_file__'
def optional(filename: str) -> str:
"""
This functions is used for compatibility reasons.
It masks the old `optional` class with the name error.
Now `invalid-name` is removed from `pylint`.
Args:
filename: the filename to be optional.
Returns:
New instance of :class:`_Optional`.
"""
return _Optional(filename)
class _Optional(str): # noqa: WPS600
"""
Wrap a file path with this class to mark it as optional.
Optional paths don't raise an :class:`IOError` if file is not found.
"""
def include(*args: str, **kwargs) -> None: # noqa: WPS210, WPS231, C901
"""
Used for including Django project settings from multiple files.
Usage:
.. code:: python
from split_settings.tools import optional, include
include(
'components/base.py',
'components/database.py',
optional('local_settings.py'),
scope=globals(), # optional scope
)
Args:
*args: File paths (``glob`` - compatible wildcards can be used).
**kwargs: Settings context: ``scope=globals()`` or ``None``.
Raises:
IOError: if a required settings file is not found.
"""
# we are getting globals() from previous frame
# globals - it is caller's globals()
scope = kwargs.pop('scope', inspect.stack()[1][0].f_globals)
scope.setdefault('__included_files__', [])
included_files = scope.get('__included_files__')
including_file = scope.get(
_INCLUDED_FILE,
scope['__file__'].rstrip('c'),
)
conf_path = os.path.dirname(including_file)
for conf_file in args:
saved_included_file = scope.get(_INCLUDED_FILE)
pattern = os.path.join(conf_path, conf_file)
# find files per pattern, raise an error if not found
# (unless file is optional)
files_to_include = glob.glob(pattern)
if not files_to_include and not isinstance(conf_file, _Optional):
raise IOError('No such file: {0}'.format(pattern))
for included_file in files_to_include:
included_file = os.path.abspath(included_file) # noqa: WPS440
if included_file in included_files:
continue
included_files.append(included_file)
scope[_INCLUDED_FILE] = included_file
with open(included_file, 'rb') as to_compile:
compiled_code = compile( # noqa: WPS421
to_compile.read(), included_file, 'exec',
)
exec(compiled_code, scope) # noqa: S102, WPS421
# Adds dummy modules to sys.modules to make runserver autoreload
# work with settings components:
rel_path = os.path.relpath(included_file)
module_name = '_split_settings.{0}'.format(
rel_path[:rel_path.rfind('.')].replace('/', '.'),
)
spec = spec_from_file_location(
module_name, included_file,
)
module = module_from_spec(spec)
sys.modules[module_name] = module
if saved_included_file:
scope[_INCLUDED_FILE] = saved_included_file
elif _INCLUDED_FILE in scope:
scope.pop(_INCLUDED_FILE)
|
[
"[email protected]"
] | |
bcae5eb42cbe4e4b38d8b5de33efb5ba30dcb142
|
a836c17c1e8cfcc79f85a3f05e1a5c126e85da75
|
/login/views.py
|
587513fa07253c62b1d6c55ed1fa9cb5fad30159
|
[] |
no_license
|
Rabidza/hackathon
|
949729a0b8f0c5d1f18e054700a032630613e991
|
807399892f43fb67a26837080e49fb1773ddee8c
|
refs/heads/master
| 2021-01-10T16:10:34.371846 | 2016-01-06T06:34:25 | 2016-01-06T06:34:25 | 45,219,066 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,186 |
py
|
#views.py
from login.forms import *
from django.contrib.auth.decorators import login_required
from django.contrib.auth import logout
from django.views.decorators.csrf import csrf_protect
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from django.template import RequestContext
@csrf_protect
def register(request):
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
user = User.objects.create_user(
username=form.cleaned_data['username'],
password=form.cleaned_data['password'],
)
return HttpResponseRedirect('/real_page/')
else:
form = RegistrationForm()
variables = RequestContext(request, {
'form': form
})
return render_to_response(
'deroubaix/register.html',
variables,
)
def register_success(request):
return render_to_response(
'deroubaix/success.html',
)
def logout_page(request):
logout(request)
return HttpResponseRedirect('/')
@login_required
def home(request):
return render_to_response(
'home.html',
{ 'user': request.user }
)
|
[
"[email protected]"
] | |
a6bcd8507b54ca4a44c6428e7a49a7801f35cd74
|
13d0ad57a2f5deb83593e73843be7cbeeaad8d3d
|
/medium/knight_probability.py
|
be06b5a4f1c93083f6686ced4cbb0d22268fbe2f
|
[] |
no_license
|
mwong33/leet-code-practice
|
b21f277d73b30df9e681499733baad07979480a1
|
9c0e6294bf3b3614b185f0760906abad60f8d9b6
|
refs/heads/main
| 2023-03-29T20:35:43.841662 | 2021-03-31T22:05:44 | 2021-03-31T22:05:44 | 317,382,193 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,466 |
py
|
class Solution:
# Top Down Memo - O(N*N*K) time O(N*N*K) space
def knightProbability(self, N: int, K: int, r: int, c: int) -> float:
return self.knightProbabilityMemo(N, K, r, c, 0, {})
def knightProbabilityMemo(self, grid_size, total_moves, row, col, move_count, cache):
if (row, col, move_count) in cache:
return cache[(row, col, move_count)]
# Base Cases
if move_count == total_moves:
return 1
# Get Number of Options for current position
valid_positions = self.getValidMovesList(grid_size, row, col)
# Try all valid options and get their probabilites to stay on board
probability = 0
for new_position in valid_positions:
probability += (1/8) * self.knightProbabilityMemo(grid_size, total_moves, new_position[0], new_position[1], move_count+1, cache)
cache[(row, col, move_count)] = probability
return probability
def getValidMovesList(self, grid_size, x, y):
valid_positions = []
for dx, dy in [(1,2), (-1,2), (-1,-2), (1,-2), (2,1), (-2,1), (2,-1), (-2,-1)]:
nx, ny = x + dx, y + dy
# Check if we are out of bounds
if nx < 0 or ny < 0 or nx >= grid_size or ny >= grid_size:
continue
valid_positions.append((nx, ny))
return valid_positions
|
[
"[email protected]"
] | |
b49a0876dd1c49261f74128ad2221410df781ed8
|
67d76057aee86c43d32e0b74f3ac94d521ee03d8
|
/tests/pyre.pkg/descriptors/timestamps.py
|
a53130b8d4f65d34e6904ffdfb98461d080e6fdd
|
[
"BSD-3-Clause"
] |
permissive
|
jlmaurer/pyre
|
0f94b1855bf029210f07c528747221751e37687f
|
6af38a83621d7d6228d147b4bb94f97fbb10f6e2
|
refs/heads/master
| 2023-05-25T04:33:19.907452 | 2020-06-18T14:07:54 | 2020-06-18T14:07:54 | 273,362,988 | 0 | 0 |
NOASSERTION
| 2021-06-10T23:42:14 | 2020-06-18T23:50:28 | null |
UTF-8
|
Python
| false | false | 1,135 |
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# michael a.g. aรฏvรกzis
# orthologue
# (c) 1998-2020 all rights reserved
#
"""
Verify that time conversions work as expected
"""
def test():
import pyre.descriptors
# create a descriptor
time = pyre.descriptors.timestamp()
# casts are not implemented yet
magic = time.coerce('1992-12-21 13:30:00')
# check
assert magic.hour == 13
assert magic.minute == 30
assert magic.second == 0
# now one with a different input format
time = pyre.descriptors.time(format='%Y/%m/%d %H|%M|%S')
# try again
magic = time.coerce(value='1992/12/21 13|30|00')
# check
assert magic.hour == 13
assert magic.minute == 30
assert magic.second == 0
# how about one
try:
# with the wrong format
time.coerce(value='13-30-00')
assert False
# it should fail
except time.CastingError:
# so no problem
pass
return
# main
if __name__ == "__main__":
# skip pyre initialization since we don't rely on the executive
pyre_noboot = True
# do...
test()
# end of file
|
[
"[email protected]"
] | |
b894061a7d8848b8131261f6320a605aa72345cb
|
59b0ebc4249f20edd0e87dc63784c6e8c138c7fd
|
/.history/anagrams.1_20180607000217.py
|
f4fa839c7c5fce48d1434066880e74d167a6f952
|
[] |
no_license
|
Los4U/first_python_programs
|
f397da10be3ef525995f3f220e3b60012a6accaa
|
c3fc33a38c84abd292cb2e86de63e09434fc7fc4
|
refs/heads/master
| 2020-03-22T08:09:40.426118 | 2018-07-04T17:17:58 | 2018-07-04T17:17:58 | 139,748,883 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 538 |
py
|
import sys
file = open(str(sys.argv[1]), "r")
words = file.read().splitlines()
p2 = 1
for word in words:
print(words.index(word))
i = 0
for i in range(i, len(words)):
if sorted(word) == sorted(words[i]):
#print(i)
print(str(i) + ": " + word + " - " + words[i])
i = i + 1
words.index
#for p2 in range (p2, len(words)-1):
# if sorted(str(word) == sorted(word[p2])):
# print(str(word) + ":" + str(p2) + ":" + word[p2])
file.close()
|
[
"[email protected]"
] | |
5f417a6eea67c50ba9f5cf04eac634c569c0d4e4
|
793ec68fc65013b8bd3689a850fddff4e50aa34f
|
/func.py
|
5b98cbcd034f0f3b3ad17dd19fe4f6eb5ddf9d4a
|
[] |
no_license
|
Acrelion/various-files
|
1ec27f3c03718a0c01756bac47134daa74cc3870
|
becb38141110a31e817402de09c4b6d44d0580f6
|
refs/heads/master
| 2021-09-28T16:41:32.949690 | 2018-11-18T13:44:36 | 2018-11-18T13:44:36 | 108,088,685 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 89 |
py
|
def do_sum(arr):
result = 0
for i in arr:
result += i
return result
|
[
"[email protected]"
] | |
5549cfd4d5d6343f8d3430fa4093b7f1d0b8fd9a
|
05e634a232574f676434dfa8e4183f3d0a1a4bc9
|
/paddlecv/ppcv/ops/output/tracker.py
|
0df393bc437d7098472b6cb7eebcfd7a5c2d1fc1
|
[
"Apache-2.0"
] |
permissive
|
PaddlePaddle/models
|
67ac00d93c5255ac64a9d80ae5be2e8927e47cee
|
8042c21b690ffc0162095e749a41b94dd38732da
|
refs/heads/release/2.4
| 2023-09-04T15:23:59.543625 | 2023-07-20T11:54:16 | 2023-07-20T11:54:16 | 88,868,842 | 7,633 | 3,597 |
Apache-2.0
| 2023-09-05T23:23:54 | 2017-04-20T13:30:15 |
Python
|
UTF-8
|
Python
| false | false | 3,870 |
py
|
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import numpy as np
import math
import glob
import paddle
import cv2
import json
from collections import defaultdict
from .base import OutputBaseOp
from .detection import draw_det
from ppcv.utils.logger import setup_logger
from ppcv.core.workspace import register
from PIL import Image, ImageDraw, ImageFile
logger = setup_logger('TrackerOutput')
def write_mot_results(filename, results, data_type='mot', num_classes=1):
# support single and multi classes
if data_type in ['mot', 'mcmot']:
save_format = '{frame},{id},{x1},{y1},{w},{h},{score},{cls_id},-1,-1\n'
elif data_type == 'kitti':
save_format = '{frame} {id} car 0 0 -10 {x1} {y1} {x2} {y2} -10 -10 -10 -1000 -1000 -1000 -10\n'
else:
raise ValueError(data_type)
frame_id, tk_bboxes, tk_scores, tk_ids, tk_cls_ids = results
frame_id = -1 if data_type == 'kitti' else frame_id
with open(filename, 'w') as f:
for bbox, score, tk_id, cls_id in zip(tk_bboxes, tk_scores, tk_ids,
tk_cls_ids):
if tk_id < 0: continue
if data_type == 'mot':
cls_id = -1
x1, y1, x2, y2 = bbox
w, h = x2 - x1, y2 - y1
line = save_format.format(
frame=frame_id,
id=tk_id,
x1=x1,
y1=y1,
x2=x2,
y2=y2,
w=w,
h=h,
score=score,
cls_id=cls_id)
f.write(line)
@register
class TrackerOutput(OutputBaseOp):
def __init__(self, model_cfg, env_cfg):
super(TrackerOutput, self).__init__(model_cfg, env_cfg)
def __call__(self, inputs):
total_res = []
vis_images = []
for res in inputs:
fn, image, tk_bboxes, tk_scores, tk_ids, tk_cls_ids, tk_cls_names = list(
res.values())[:7]
tk_names = [
'{} {}'.format(tk_cls_name, tk_id)
for tk_id, tk_cls_name in zip(tk_ids, tk_cls_names)
]
image = draw_det(image, tk_bboxes, tk_scores, tk_names, tk_ids)
res.pop('input.image')
if self.frame_id != -1:
res.update({'frame_id': self.frame_id})
logger.info(res)
if self.save_img:
vis_images.append(image)
if self.save_res or self.return_res:
total_res.append(res)
if self.save_res:
video_name = fn.split('/')[-1].split('.')[0]
output_dir = os.path.join(self.output_dir, video_name)
if not os.path.exists(output_dir):
os.makedirs(output_dir, exist_ok=True)
out_path = os.path.join(output_dir, '{}.txt'.format(self.frame_id))
logger.info('Save output result to {}'.format(out_path))
write_mot_results(
out_path,
[self.frame_id, tk_bboxes, tk_scores, tk_ids, tk_cls_ids])
if self.return_res:
if vis_images:
for i, vis_im in enumerate(vis_images):
total_res[i].update({'output': vis_im})
return total_res
return
|
[
"[email protected]"
] | |
4c8eb4e0b011ab53434d6d5b85b39b91903957ab
|
4a76ac7ad1aaeec44729ab6d5b121b1cae0d910c
|
/Week 2/FindTheMedian.py
|
f1017afb1254cfc9663bbd2a334cfc369fdfa9d9
|
[] |
no_license
|
kalmad99/CompetitiveProgramming
|
2d825e839faa9e13ef43dbb45498bd3eef6723ab
|
6cbb1f12f7670d0016fa2af8f2dd597d9123070d
|
refs/heads/main
| 2023-03-25T20:18:23.389396 | 2021-03-24T21:36:52 | 2021-03-24T21:36:52 | 325,816,614 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 907 |
py
|
def findMedian(arr):
negarr = []
posarr = []
finarr = []
for i in arr:
if i < 0:
negarr.append(i)
else:
posarr.append(i)
for i in range(len(negarr)):
negarr[i] = -1 * negarr[i]
negarr = sorting(negarr)
for i in range(len(negarr) - 1, -1, -1):
negarr[i] = -1 * negarr[i]
posarr = sorting(posarr)
for i in negarr:
finarr.append(i)
for i in posarr:
finarr.append(i)
return finarr[len(arr) // 2]
def sorting(arr):
output = [0 for i in range(len(arr))]
counter = [0 for i in range(20001)]
for i in arr:
counter[i] += 1
for i in range(-10000, 10000):
counter[i] += counter[i - 1]
for i in range(len(arr)):
output[counter[arr[i]] - 1] = arr[i]
counter[arr[i]] -= 1
return output
nums = [0, 1, 2, 4, 6, 5, 3]
print(findMedian(nums))
|
[
"[email protected]"
] | |
b6efb199a5f5c5dfbf9ef558876f865598c298da
|
56f428833bac273c180dd95bafd0a8da992349c1
|
/bin/Utils/GetFiles.py
|
6b9e2aa465828987020f0a802346239f3c3a6ea5
|
[
"BSD-2-Clause"
] |
permissive
|
LlianeFR/craft
|
8e6ac764a76812628c1d3dc0be75c7aceea771f4
|
0d1e5e5c12fa817cf411ab4fc4a9d1815432e461
|
refs/heads/master
| 2020-03-25T20:19:23.376298 | 2018-08-06T09:32:30 | 2018-08-06T09:32:56 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,960 |
py
|
# -*- coding: utf-8 -*-
# Copyright Hannah von Reth <[email protected]>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
### fetch functions
from CraftCore import CraftCore
from CraftDebug import deprecated
import utils
import os
import urllib
import subprocess
import sys
@deprecated("Utils.GetFiles.getFile")
def getFiles(urls, destdir, suffix='', filenames=''):
"""download files from 'url' into 'destdir'"""
CraftCore.log.debug("getfiles called. urls: %s, filenames: %s, suffix: %s" % (urls, filenames, suffix))
# make sure distfiles dir exists
if (not os.path.exists(destdir)):
os.makedirs(destdir)
if type(urls) == list:
urlList = urls
else:
urlList = urls.split()
if filenames == '':
filenames = [os.path.basename(x) for x in urlList]
if type(filenames) == list:
filenameList = filenames
else:
filenameList = filenames.split()
dlist = list(zip(urlList, filenameList))
for url, filename in dlist:
if (not getFile(url + suffix, destdir, filename)):
return False
return True
def getFile(url, destdir, filename='') -> bool:
"""download file from 'url' into 'destdir'"""
CraftCore.log.debug("getFile called. url: %s" % url)
if url == "":
CraftCore.log.error("fetch: no url given")
return False
pUrl = urllib.parse.urlparse(url)
if not filename:
filename = os.path.basename(pUrl.path)
if pUrl.scheme == "s3":
return s3File(url, destdir, filename)
# curl and wget basically only work when we have a cert store on windows
if not CraftCore.compiler.isWindows or os.path.exists(os.path.join(CraftCore.standardDirs.etcDir(), "cacert.pem")):
if CraftCore.cache.findApplication("wget"):
return wgetFile(url, destdir, filename)
if CraftCore.cache.findApplication("curl"):
return curlFile(url, destdir, filename)
if os.path.exists(os.path.join(destdir, filename)):
return True
powershell = CraftCore.cache.findApplication("powershell")
if powershell:
filename = os.path.join(destdir, filename)
return utils.system([powershell, "-NoProfile", "-ExecutionPolicy", "ByPass", "-Command",
f"(new-object net.webclient).DownloadFile(\"{url}\", \"{filename}\")"])
else:
def dlProgress(count, blockSize, totalSize):
if totalSize != -1:
percent = int(count * blockSize * 100 / totalSize)
utils.printProgress(percent)
else:
sys.stdout.write(("\r%s bytes downloaded" % (count * blockSize)))
sys.stdout.flush()
try:
urllib.request.urlretrieve(url, filename=os.path.join(destdir, filename),
reporthook=dlProgress if CraftCore.debug.verbose() >= 0 else None)
except Exception as e:
CraftCore.log.warning(e)
return False
if CraftCore.debug.verbose() >= 0:
sys.stdout.write("\n")
sys.stdout.flush()
return True
def curlFile(url, destdir, filename=''):
"""download file with curl from 'url' into 'destdir', if filename is given to the file specified"""
curl = CraftCore.cache.findApplication("curl")
command = [curl, "-C", "-", "--retry", "10", "-L", "--ftp-ssl", "--fail"]
cert = os.path.join(CraftCore.standardDirs.etcDir(), "cacert.pem")
if os.path.exists(cert):
command += ["--cacert", cert]
# the default of 20 might not be enough for sourceforge ...
command += ["--max-redirs", "50"]
command += ["-o", os.path.join(destdir, filename)]
command += [url]
CraftCore.log.debug("curlfile called")
if not CraftCore.settings.getboolean("ContinuousIntegration", "Enabled", False) and CraftCore.debug.verbose() < 1 and CraftCore.cache.checkCommandOutputFor(curl, "--progress-bar"):
command += ["--progress-bar"]
CraftCore.log.info(f"curl {url}")
return utils.system(command, displayProgress=True, logCommand=False, stderr=subprocess.STDOUT)
else:
if CraftCore.debug.verbose() > 0:
command += ["-v"]
return utils.system(command)
def wgetFile(url, destdir, filename=''):
"""download file with wget from 'url' into 'destdir', if filename is given to the file specified"""
wget = CraftCore.cache.findApplication("wget")
command = [wget, "-c", "-t", "10"]
cert = os.path.join(CraftCore.standardDirs.etcDir(), "cacert.pem")
if os.path.exists(cert):
command += ["--ca-certificate", cert]
# the default of 20 might not be enough for sourceforge ...
command += ["--max-redirect", "50"]
if CraftCore.settings.getboolean("General", "EMERGE_NO_PASSIVE_FTP", False):
command += ["--no-passive-ftp"]
if not filename:
command += ["-P", destdir]
else:
command += ["-O", os.path.join(destdir, filename)]
command += [url]
CraftCore.log.debug("wgetfile called")
if not CraftCore.settings.getboolean("ContinuousIntegration", "Enabled", False) and CraftCore.debug.verbose() < 1 and CraftCore.cache.checkCommandOutputFor(wget, "--show-progress"):
command += ["-q", "--show-progress"]
CraftCore.log.info(f"wget {url}")
return utils.system(command, displayProgress=True, logCommand=False, stderr=subprocess.STDOUT)
else:
return utils.system(command)
def s3File(url, destdir, filename):
aws = CraftCore.cache.findApplication("aws")
if not aws:
CraftCore.log.critical("aws not found, please install awscli. \"pip install awscli\" ")
return False
return utils.system([aws, "s3", "cp", url, os.path.join(destdir, filename)])
|
[
"[email protected]"
] | |
5a67bbde46a7bcb4dcf509d2e277e112851804a4
|
f185d98c2d56c4c212c023ad71514e2ad398950b
|
/nv/resources/avatars.py
|
50dd2d40463da4535700a2f9831c1f3783d5851f
|
[
"MIT"
] |
permissive
|
new-valley/new-valley
|
24b9591eba21ed85634d55d6ac36f0eb25d27198
|
8810739cab52ad4dea2f4005a59b8b7afea1e2db
|
refs/heads/dev
| 2022-12-27T12:26:36.144077 | 2018-12-02T18:51:21 | 2018-12-02T18:51:21 | 156,936,213 | 0 | 0 |
MIT
| 2022-12-08T01:17:15 | 2018-11-10T01:27:24 |
Python
|
UTF-8
|
Python
| false | false | 2,314 |
py
|
from flask import request
from flask_restful import (
Resource,
)
from flask_jwt_extended import (
jwt_required,
get_jwt_identity,
)
from webargs.flaskparser import parser
from webargs.fields import (
Str,
Int,
)
from webargs import validate
from nv.models import (
Avatar,
)
from nv.schemas import (
AvatarSchema,
)
from nv.util import (
mk_errors,
)
from nv.permissions import (
CreateAvatar,
EditAvatar,
DeleteAvatar,
)
from nv.database import db
from nv.resources.common import (
parse_get_coll_args,
generic_get_coll,
generic_get,
generic_post,
generic_put,
generic_delete,
get_user,
get_obj,
check_permissions,
)
class AvatarsRes(Resource):
def get(self):
args = parse_get_coll_args(request)
objs = generic_get_coll(
full_query=Avatar.query,
schema=AvatarSchema(many=True),
**args,
)
return objs
@jwt_required
def post(self):
user = get_user(username=get_jwt_identity())
check_permissions(user, [
CreateAvatar(),
])
ret = generic_post(
schema=AvatarSchema(),
data=request.form,
)
return ret
class AvatarRes(Resource):
def get(self, avatar_id):
ret = generic_get(
obj=Avatar.query.get(avatar_id),
schema=AvatarSchema(),
)
return ret
@jwt_required
def delete(self, avatar_id):
user = get_user(username=get_jwt_identity())
avatar = get_obj(Avatar.query.filter_by(avatar_id=avatar_id),
'avatar does not exist')
check_permissions(user, [
DeleteAvatar(avatar),
])
ret = generic_delete(
obj=Avatar.query.get(avatar_id),
)
return ret
@jwt_required
def put(self, avatar_id):
user = get_user(username=get_jwt_identity())
avatar = get_obj(Avatar.query.filter_by(avatar_id=avatar_id),
'avatar does not exist')
check_permissions(user, [
EditAvatar(avatar, attributes=set(request.form)),
])
ret = generic_put(
obj=Avatar.query.get(avatar_id),
schema=AvatarSchema(),
data=request.form
)
return ret
|
[
"[email protected]"
] | |
86b34b358fa22363f9d0c994feca0b1566cf675e
|
f9be6f15af272fce4565a74d85dfe3298ea22315
|
/aiogram/types/game.py
|
f5861d90c825331214bf4649639d797f96a9e959
|
[
"MIT"
] |
permissive
|
hellboi-atul/aiogram
|
7654ca127b9b709b8700cb1755674a895abb7d05
|
3440ab3c96e31384cbcf515e75904dcade6f5fd0
|
refs/heads/dev-2.x
| 2023-01-27T12:09:21.177136 | 2020-12-05T13:28:56 | 2020-12-05T13:28:56 | 318,952,603 | 1 | 0 |
MIT
| 2020-12-06T04:44:05 | 2020-12-06T04:44:04 | null |
UTF-8
|
Python
| false | false | 725 |
py
|
import typing
from . import base
from . import fields
from .animation import Animation
from .message_entity import MessageEntity
from .photo_size import PhotoSize
class Game(base.TelegramObject):
"""
This object represents a game.
Use BotFather to create and edit games, their short names will act as unique identifiers.
https://core.telegram.org/bots/api#game
"""
title: base.String = fields.Field()
description: base.String = fields.Field()
photo: typing.List[PhotoSize] = fields.ListField(base=PhotoSize)
text: base.String = fields.Field()
text_entities: typing.List[MessageEntity] = fields.ListField(base=MessageEntity)
animation: Animation = fields.Field(base=Animation)
|
[
"[email protected]"
] | |
6212f545fdf293959684ff38828655f7139ad795
|
3d5035d6b6ece6beca77ee625b6f1b4a906c4c3a
|
/project-addons/res_partner_farm_data/models/__init__.py
|
2813df0338d6631c828c32d88235013156f216d1
|
[] |
no_license
|
fadeldamen/CMNT_00033_2015_COOP_IV
|
98ba3fd4ca5df17651f251c76aec80b92c497603
|
f1f0027b25dffe1281de956c146340dd825cbe9b
|
refs/heads/master
| 2020-04-07T22:00:58.920747 | 2016-02-02T17:39:22 | 2016-02-02T17:39:22 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,261 |
py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 Comunitea All Rights Reserved
# $Jesรบs Ventosinos Mayor <[email protected]>$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import yearly_data
from . import employee_count
from . import cow_count
from . import res_partner
from . import res_company
from . import cost_imputation
from . import output_quota
from . import lot
from . import account_fiscalyear
from . import stock
from . import product
|
[
"[email protected]"
] | |
e224ca5de74c0f709f85248fb16c8941f8f53647
|
191a7f83d964f74a2b3c7faeb4fc47d9c63d521f
|
/.history/main_20210523135820.py
|
e8c2172120c7256018689f40de758c86f45f3e5b
|
[] |
no_license
|
AndreLiu1225/Kinder-Values-Survey
|
2a317feee8d5b17c27da2b2116742656e35d8ab9
|
090c27da0c822abb7dfc0ec6e13ae1b3dcb7bbf3
|
refs/heads/master
| 2023-05-03T00:26:00.481423 | 2021-06-04T03:24:19 | 2021-06-04T03:24:19 | 371,989,154 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,322 |
py
|
from flask import Flask
from flask_wtf import FlaskForm
from wtforms import StringField, TextField, SubmitField, IntegerField, SelectField, RadioField
from wtforms.validators import DataRequired, Email, EqualTo, Length, ValidationError
class MCQ(FlaskForm):
age = IntegerField("Age", [DataRequired()])
profession = StringField("Profession", [DataRequired(), Length(max=30, min=2)])
power = RadioField("Defining goal: social status and prestige, control or dominance over people and resources."
,choices=[('Yes','I want to be dominant'), ('No', 'Dominance over others is not the main priority')])
tradition = RadioField("Defining goal: respect, commitment, and acceptance of the customs and ideas that oneโs culture or religion provides."
,choices=[('Yes', 'I would contribute to the survival and uniqueness of traditon'), ('No', 'I am always open and ready to change')])
achievement = RadioField("Defining goal: personal success through demonstrating competence according to social standards."
,choices=[('Yes', "I want to demonstrate competence in prevailing cultural standards and obtain social approval.'), ('No', 'I may want to achieve excellence, but it doesn't need to be socially approved")])
stimulation = RadioField("Defining goal: excitement, novelty, and challenge in life."
,choices=[('Yes', 'I want a challenging and exciting life.'), ('No', 'I prefer a life with lower amounts of stress.')])
self_direction = RadioField("Defining goal: independent thought and actionโchoosing, creating, exploring."
,choices=[('Yes', 'I like freedom in thought and expression.'), ('No', 'Nah')])
hedonism = RadioField("Defining goal: pleasure or sensuous gratification for oneself."
,choices=[('Yes', 'My pleasure and satisfaction are of utmost priority'), ('No', 'Welfare of others is also important.')])
conformity = RadioField("Defining goal: restraint of actions, inclinations, and impulses likely to upset or harm others and violate social expectations or norms."
,choices=[('Yes', 'I do care about how others view me and follow the social norms'), ''])
|
[
"[email protected]"
] | |
09fbe7c3b04b86dd5917185ce79e3bbccb6ef515
|
8d3fd439c6d5a52eda578847545234b2ebdc4f3b
|
/ๆบๅจๅญฆไน ็พ็ง/pycode/numpyๅฎๆ.py
|
5f899edd24ae4ec602d0f7df5ce3d9c76f5df722
|
[] |
no_license
|
gm-p/practice_demo
|
d1530dcdb3de95832f1fa5b6e30c75e7ca6acc05
|
7eaa825fc634ad21aea48713133c0266a44ac54a
|
refs/heads/main
| 2023-03-31T23:01:02.302579 | 2021-04-04T13:29:50 | 2021-04-04T13:29:50 | 354,193,496 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,759 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 28 22:19:13 2019
https://mp.weixin.qq.com/s?__biz=Mzg5NzAxMDgwNg==&mid=2247484006&idx=1&sn=1d0b49c0200e901915a99d29f0dadc79&chksm=c0791f7ff70e966929ed3a9b1358beb3b31a1106084f4332176476eafebf3a407fff2150d5b3&scene=21#wechat_redirect
@author: guangming.pan
"""
# 1.1ๅ่กจ่ฝฌไธบ็ฉ้ต
import numpy as np
array = np.array([
[1, 3, 5],
[4, 6, 9]
])
print(array)
# 1.2็ปดๅบฆ
print('number of dim:', array.ndim)
# 1.3่กๆฐๅๅๆฐ
print('shape:', array.shape)
#1.4ๅ
็ด ไธชๆฐ
print('size:', array.size)
#2 Numpyๅๅปบarray
# 2.1 ไธ็ปดarrayๅๅปบ
a = np.array([2, 23, 4], dtype=np.int32) # np.int้ป่ฎคไธบint32
print(a)
print(a.dtype)
# 2.2 ๅค็ปดarrayๅๅปบ
a = np.array([[2, 3, 4],
[3, 4, 5]])
print(a) # ็ๆ2่ก3ๅ็็ฉ้ต
# 2.3ๅๅปบๅ
จ้ถๆฐ็ป
a = np.zeros((3, 4))
print(a) # ็ๆ3่ก4ๅ็ๅ
จ้ถ็ฉ้ต
# 2.4ๅๅปบๅ
จไธๆฐ็ป
a = np.ones((3, 4), dtype=np.int)
print(a)
# 2.5ๅๅปบๅ
จ็ฉบๆฐ็ป
a = np.empty((3, 4)) # ๆฏไธชๅผ้ฝๆฏๆฅ่ฟไบ0็ๅผ
print(a)
# 2.6 ๅๅปบ่ฟ็ปญๆฐ็ป
a = np.arange(10, 21, 2) # 10-20็ๆฐๆฎ๏ผๆญฅ้ฟไธบ2
print(a)
# 2.7 reshapeๆไฝ
b = a.reshape((2, 3))
print(b)
# 2.8 ๅๅปบ่ฟ็ปญๅๆฐๆฎ
a = np.linspace(1, 10, 20) # ๅผๅง็ซฏ1๏ผ็ปๆ็ซฏ10๏ผไธๅๅฒๆ20ไธชๆฐๆฎ๏ผ็ๆ็บฟๆฎต
print(a)
# 2.9 linspace็reshapeๆไฝ
b = a.reshape((5, 4))
print(b)
#3.Numpy็ๅบๆฌ่ฟ็ฎ
# 3.1 ไธ็ปด็ฉ้ต่ฟ็ฎ
a = np.array([10, 20, 30, 40])
b = np.arange(4)
print(a, b)
c = a - b
print(c)
print(a*b) #่ฅ็จa.dot(b),ๅไธบๅ็ปดไนๅ
# ๅจNumpyไธญ๏ผๆณ่ฆๆฑๅบ็ฉ้ตไธญๅไธชๅ
็ด ็ไนๆน้่ฆไพ่ตๅๆ็ฌฆๅท**
c = b**2
print(c)
c = np.sin(a)
print(c)
print(b < 2)
a = np.array([1, 1, 4, 3])
b = np.arange(4)
print(a == b)
# 3.2 ๅค็ปด็ฉ้ต่ฟ็ฎ
a = np.array([[1, 1], [0, 1]])
b = np.arange(4).reshape((2, 2))
print(a)
print(b)
# ๅค็ปดๅบฆ็ฉ้ตไนๆณ
c = a.dot(b) #็ฌฌไธ็งไนๆณ
print(c)
c = np.dot(a, b) # ็ฌฌไบ็งไนๆณ
print(c)
# ๅค็ปด็ฉ้ตไนๆณไธ่ฝ็ดๆฅไฝฟ็จ'*'ๅท
a = np.random.random((2, 4))
print(a)
print(np.sum(a))
print(np.min(a))
print(np.max(a))
print("sum = ", np.sum(a, axis=1))
print("min = ", np.min(a, axis=0))
print("max = ", np.max(a, axis=1))
'''
ๅฆๆไฝ ้่ฆๅฏน่กๆๅ่ฟ่กๆฅๆพ๏ผๅฐฑ้่ฆๅฏนaxis่ตๅผ
axis = 0,ๅฐไผไปฅๅไฝไธบๆฅๆพๅๅ
axis = 1,ๅฐไผไปฅ่กไฝไธบๆฅๆพๅๅ
'''
# 3.3 ๅบๆฌ่ฎก็ฎ
A = np.arange(2, 14).reshape((3, 4))
print(A)
# ๆๅฐๅ
็ด ็ดขๅผ
print(np.argmin(A))
# ๆๅคงๅ
็ด ็ดขๅผ
print(np.argmax(A))
# ๆฑๆดไธช็ฉ้ต็ๅๅผ
print(np.mean(A))
print(np.average(A))
print(np.median(A))
# ็ดฏๅ
print(np.cumsum(A))
print(A.mean())
# ไธญไฝๆฐA))
# ็ดฏๅทฎ่ฟ็ฎ
B = np.array([[3, 5, 9], [4, 8, 10]])
print(np.diff(B))
C = np.array([[0, 5, 9], [4, 0, 10]])
print(np.nonzero(B)) # ๅฐๆๆ้้ถๅ
็ด ็่กๅๅๅๆ ๅๅผ๏ผ้ๆๆไธคไธชๅๅซๅ
ณไบ่กๅๅ็็ฉ้ต
print(np.nonzero(C))
# ไปฟ็
งๅ่กจๆๅบ
A = np.arange(14, 2, -1).reshape((3, 4)) # -1่กจ็คบๅๅ้ๅไธไธชๆญฅ้ฟ
print(A)
print(np.sort(A)) # ๅชๅฏนๆฏ่ก่ฟ่ก้ๅขๆๅบ
print(np.transpose(A)) # ็ฉ้ต่ฝฌ็ฝฎ
print(A.T) # ็ฉ้ต่ฝฌ็ฝฎ
print(A)
print(np.clip(A, 5, 9))
'''
clip(Array, Array_min, Array_max) ่ฅArray_min < X < Array_max ๅไฟๆๅๆฐไธๅ๏ผ
ๅฆๅ๏ผ่ฅX < Array_min, ๅ X = Array_min
่ฅX > Array_max, ๅ X = Array_max
'''
# 4.Numpy็ดขๅผไธๅ็
A = np.arange(3, 15)
print(A)
print(A[3])
B = A.reshape(3, 4)
print(B)
print(B[2])
print(B[0][2])
print(B[0, 2])
# list ๅ็
print(B[1, 1:3])
for row in B:
print(row)
# ๆๅฐๅ๏ผๅ่ฟ่ก่ฝฌ็ฝฎๅณๅฏ
for column in B.T:
print(column)
# ๅค็ปด่ฝฌไธ็ปด
A = np.arange(3, 15).reshape((3, 4))
print(A)
print(A.flatten())
# flatๆฏไธไธช่ฟญไปฃๅจ๏ผๆฌ่บซๆฏไธไธชObjectๅฑๆง
for item in A.flat:
print(item)
# Numpy arrayๅๅนถ
# 5.1ๆฐ็ปๅๅนถ
A = np.array([1, 1, 1])
B = np.array([2, 2, 2])
print(np.vstack((A, B))) # vertical stack ไธไธๅๅนถ
C = np.vstack((A, B))
print(C)
print(A.shape, B.shape, C.shape)
D = np.hstack((A, B)) # horizontal stackๅทฆๅณๅๅนถ
print(D)
print(A.shape, B.shape, D.shape)
# 5.2 ๆฐ็ป่ฝฌ็ฝฎไธบ็ฉ้ต
print(A[np.newaxis, :])
print(A[np.newaxis, :].shape)
print(A[:, np.newaxis])
# 5.3ๅคไธช็ฉ้ตๅๅนถ
# concatenate็็ฌฌไธไธชไพๅญ
print("--------------")
print(A[:, np.newaxis].shape)
A = A[:, np.newaxis] # ๆฐ็ป่ฝฌไธบ็ฉ้ต
B = B[:, np.newaxis] # ๆฐ็ป่ฝฌไธบ็ฉ้ต
C = np.concatenate((A, B, B, A), axis=0) # axis=0็บตๅๅๅนถ
print(C)
C = np.concatenate((A, B), axis=1) # axis=1ๆจชๅๅๅนถ
print(C)
# concatenate็็ฌฌไบไธชไพๅญ
a = np.arange(8).reshape(2, 4)
b = np.arange(8).reshape(2, 4)
print(a)
print(b)
c = np.concatenate((a, b), axis=0) # axis=0ๅคไธช็ฉ้ต็บตๅๅๅนถ
print(c)
c = np.concatenate((a, b), axis=1) # axis=1ๅคไธช็ฉ้ตๆจชๅๅๅนถ
print(c)
# 6. Numpy arrayๅๅฒ
#6.1 ๆ้ 3่ก4ๅ็ฉ้ต
A = np.arange(12).reshape((3, 4))
print(A)
#6.2 ็ญ้ๅๅฒ
print(np.split(A, 2, axis=1)) #็บตๅๅๅฒๅๆจชๅๅๅนถ็axis
print(np.split(A, 3, axis=0)) #ๆจชๅๅๅฒๅ็บตๅๅๅนถaxis
#6.3 ไธ็ญ้ๅๅฒ
print(np.array_split(A, 3, axis=1))
#6.4 ๅ
ถไปๅๅฒๆนๅผ
print(np.vsplit(A, 3)) #ๆจชๅๅๅฒ๏ผ็ญไปทไบpirnt(np.split(A, 3, axis=0))
print(np.hsplit(A, 2)) #็บตๅๅๅฒ๏ผ็ญไปทไบprint(np.split(A, 2, axis=1))
# 7.Numpy copy ไธ =
# 7.1 =่ตๅผๆนๅผไผๅธฆๆฅๅ
ณ่ๆง
a = np.arange(4)
print(a)
b = a
c = a
d = b
a[0] = 11
print(a)
print(b)
print(c)
print(d)
print(b is a)
print(c is a)
print(d is a)
d[1:3] = [22, 33]
print(a)
print(b)
print(c)
# 7.2 copy()่ตๅผๆนๅผๆฒกๆๅ
ณ่ๆง
a = np.arange(4)
print(a)
b = a.copy()
print(b)
a[3] = 44
print(a)
print(b)
# 8.ๅนฟๆญๆบๅถ
a = np.array([[0, 0, 0],
[10, 10, 10],
[20, 20, 20],
[30, 30, 30]])
b = np.array([0, 1, 2])
print(a + b)
b = np.tile([0, 1, 2], (4, 1)) # ๅฏน[0,1,2]่ก้ๅค3ๆฌก๏ผๅ้ๅค1ๆฌก
print(a + b)
# 9.ๅธธ็จๅฝๆฐ
# 9.1 np.bincount()
x = np.array([1, 2, 3, 3, 0, 1, 4])
print(np.bincount(x)) #็ป่ฎก็ดขๅผๅบ็ฐๆฌกๆฐ
w = np.array([0.3, 0.5, 0.7, 0.6, 0.1, -0.9, 1])
print(np.bincount(x, weights=w))
# minlengthๅฝๆ็ป็binๆฐ้ๅคไบๅฎ้
ไปxไธญๅพๅฐ็binๆฐ้ๅ๏ผๅ้ขๆฒกๆ่ฎฟ้ฎๅฐ็่ฎพ็ฝฎไธบ0ๅณๅฏ
print(np.bincount(x, minlength=7))
# 9.2 np.argmax()
# ๅฝๆฐๅๅไธบ๏ผnumpy.argmax(a, axis=None, out=None)
# ๅ่ฝ๏ผ่ฟๅๆฒฟ่ฝดaxisๆๅคงๅผ็็ดขๅผ
x = [[1, 3, 3],
[7, 5, 2]]
print(np.argmax(x))
# axis=0่กจ็คบๆๅๆไฝ๏ผๅฏนๆฏๅฝๅๅ๏ผๆพๅบๆๅคงๅผ็็ดขๅผ
x = [[1, 3, 3],
[7, 5, 2]]
print(np.argmax(x, axis=0))
# axis=1 ่กจ็คบๆ่กๆไฝ๏ผๅฏนๆฏๅฝๅ่ก๏ผๆพๅบๆๅคงๅผ็็ดขๅผ
print(np.argmax(x, axis=1))
# ๆๅคงๅ
็ด ้ๅค่ฟๅ็ฌฌไธไธช
x = np.array([1, 3, 2, 3, 0, 1, 0])
print(x.argmax())
x = np.array([1, 2, 3, 3, 0, 1, 4])
print(np.argmax(np.bincount(x)))
# 9.4ๆฑๅ็ฒพๅบฆ
# ๅๆๅฎไฝ็ฝฎ็็ฒพๅบฆ
print(np.around([-0.6, 1.2798, 2.357, 9.67, 13], decimals=0)) # ่ดๆฐ่ฟไฝๅ็ปๅฏนๅผๅคง็
print(np.around([1.2798, 2.357, 9.67, 13], decimals=1))
print(np.around([1.2798, 2.357, 9.67, 13], decimals=2))
print(np.around([1, 2, 5, 6, 56], decimals=-1)) # -1่กจ็คบ็ไธไฝๆฐ่ฟไฝ
print(np.around([1, 2, 5, 50, 56, 190], decimals=-2))
# ่ฎก็ฎๆฒฟๆๅฎ่ฝด็ฌฌN็ปด็็ฆปๆฃๅทฎๅผ
x = np.arange(1, 16).reshape((3, 5))
print(x)
print(np.diff(x, axis=1)) # ้ป่ฎคaxis=1
print(np.diff(x, axis=0))
# ๅๆด
print(np.floor([-0.6, -1.4, -0.1, -1.8, 0, 1.4, 1.7])) # ่ดๆฐๅๆด๏ผ่ทaroundไธๆ ท๏ผๅๅทฆ
# ๅไธ้
print(np.ceil([1.2, 1.5, 1.8, 2.1, 2.0, -0.5, -0.6, -0.3]))
# ๆฅๆพ
x = np.array([[1, 0], # ๅฉ็จnp.whereๅฎ็ฐๅฐไบ0็ๅผ็จ0ๅกซๅ
๏ผๅคงไบ0็ๆฐไธๅ
[2, -2],
[-2, 1]])
print(x)
print(np.where(x>0, x, 0))
|
[
"abc"
] |
abc
|
80ad83afd181b73ca8f260ec4eb78f2354f20a7f
|
98efe1aee73bd9fbec640132e6fb2e54ff444904
|
/loldib/getratings/models/NA/na_azir/na_azir_top.py
|
914fc01c70d432f9190f4176ab17fef32b18fec6
|
[
"Apache-2.0"
] |
permissive
|
koliupy/loldib
|
be4a1702c26546d6ae1b4a14943a416f73171718
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
refs/heads/master
| 2021-07-04T03:34:43.615423 | 2017-09-21T15:44:10 | 2017-09-21T15:44:10 | 104,359,388 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,269 |
py
|
from getratings.models.ratings import Ratings
class NA_Azir_Top_Aatrox(Ratings):
pass
class NA_Azir_Top_Ahri(Ratings):
pass
class NA_Azir_Top_Akali(Ratings):
pass
class NA_Azir_Top_Alistar(Ratings):
pass
class NA_Azir_Top_Amumu(Ratings):
pass
class NA_Azir_Top_Anivia(Ratings):
pass
class NA_Azir_Top_Annie(Ratings):
pass
class NA_Azir_Top_Ashe(Ratings):
pass
class NA_Azir_Top_AurelionSol(Ratings):
pass
class NA_Azir_Top_Azir(Ratings):
pass
class NA_Azir_Top_Bard(Ratings):
pass
class NA_Azir_Top_Blitzcrank(Ratings):
pass
class NA_Azir_Top_Brand(Ratings):
pass
class NA_Azir_Top_Braum(Ratings):
pass
class NA_Azir_Top_Caitlyn(Ratings):
pass
class NA_Azir_Top_Camille(Ratings):
pass
class NA_Azir_Top_Cassiopeia(Ratings):
pass
class NA_Azir_Top_Chogath(Ratings):
pass
class NA_Azir_Top_Corki(Ratings):
pass
class NA_Azir_Top_Darius(Ratings):
pass
class NA_Azir_Top_Diana(Ratings):
pass
class NA_Azir_Top_Draven(Ratings):
pass
class NA_Azir_Top_DrMundo(Ratings):
pass
class NA_Azir_Top_Ekko(Ratings):
pass
class NA_Azir_Top_Elise(Ratings):
pass
class NA_Azir_Top_Evelynn(Ratings):
pass
class NA_Azir_Top_Ezreal(Ratings):
pass
class NA_Azir_Top_Fiddlesticks(Ratings):
pass
class NA_Azir_Top_Fiora(Ratings):
pass
class NA_Azir_Top_Fizz(Ratings):
pass
class NA_Azir_Top_Galio(Ratings):
pass
class NA_Azir_Top_Gangplank(Ratings):
pass
class NA_Azir_Top_Garen(Ratings):
pass
class NA_Azir_Top_Gnar(Ratings):
pass
class NA_Azir_Top_Gragas(Ratings):
pass
class NA_Azir_Top_Graves(Ratings):
pass
class NA_Azir_Top_Hecarim(Ratings):
pass
class NA_Azir_Top_Heimerdinger(Ratings):
pass
class NA_Azir_Top_Illaoi(Ratings):
pass
class NA_Azir_Top_Irelia(Ratings):
pass
class NA_Azir_Top_Ivern(Ratings):
pass
class NA_Azir_Top_Janna(Ratings):
pass
class NA_Azir_Top_JarvanIV(Ratings):
pass
class NA_Azir_Top_Jax(Ratings):
pass
class NA_Azir_Top_Jayce(Ratings):
pass
class NA_Azir_Top_Jhin(Ratings):
pass
class NA_Azir_Top_Jinx(Ratings):
pass
class NA_Azir_Top_Kalista(Ratings):
pass
class NA_Azir_Top_Karma(Ratings):
pass
class NA_Azir_Top_Karthus(Ratings):
pass
class NA_Azir_Top_Kassadin(Ratings):
pass
class NA_Azir_Top_Katarina(Ratings):
pass
class NA_Azir_Top_Kayle(Ratings):
pass
class NA_Azir_Top_Kayn(Ratings):
pass
class NA_Azir_Top_Kennen(Ratings):
pass
class NA_Azir_Top_Khazix(Ratings):
pass
class NA_Azir_Top_Kindred(Ratings):
pass
class NA_Azir_Top_Kled(Ratings):
pass
class NA_Azir_Top_KogMaw(Ratings):
pass
class NA_Azir_Top_Leblanc(Ratings):
pass
class NA_Azir_Top_LeeSin(Ratings):
pass
class NA_Azir_Top_Leona(Ratings):
pass
class NA_Azir_Top_Lissandra(Ratings):
pass
class NA_Azir_Top_Lucian(Ratings):
pass
class NA_Azir_Top_Lulu(Ratings):
pass
class NA_Azir_Top_Lux(Ratings):
pass
class NA_Azir_Top_Malphite(Ratings):
pass
class NA_Azir_Top_Malzahar(Ratings):
pass
class NA_Azir_Top_Maokai(Ratings):
pass
class NA_Azir_Top_MasterYi(Ratings):
pass
class NA_Azir_Top_MissFortune(Ratings):
pass
class NA_Azir_Top_MonkeyKing(Ratings):
pass
class NA_Azir_Top_Mordekaiser(Ratings):
pass
class NA_Azir_Top_Morgana(Ratings):
pass
class NA_Azir_Top_Nami(Ratings):
pass
class NA_Azir_Top_Nasus(Ratings):
pass
class NA_Azir_Top_Nautilus(Ratings):
pass
class NA_Azir_Top_Nidalee(Ratings):
pass
class NA_Azir_Top_Nocturne(Ratings):
pass
class NA_Azir_Top_Nunu(Ratings):
pass
class NA_Azir_Top_Olaf(Ratings):
pass
class NA_Azir_Top_Orianna(Ratings):
pass
class NA_Azir_Top_Ornn(Ratings):
pass
class NA_Azir_Top_Pantheon(Ratings):
pass
class NA_Azir_Top_Poppy(Ratings):
pass
class NA_Azir_Top_Quinn(Ratings):
pass
class NA_Azir_Top_Rakan(Ratings):
pass
class NA_Azir_Top_Rammus(Ratings):
pass
class NA_Azir_Top_RekSai(Ratings):
pass
class NA_Azir_Top_Renekton(Ratings):
pass
class NA_Azir_Top_Rengar(Ratings):
pass
class NA_Azir_Top_Riven(Ratings):
pass
class NA_Azir_Top_Rumble(Ratings):
pass
class NA_Azir_Top_Ryze(Ratings):
pass
class NA_Azir_Top_Sejuani(Ratings):
pass
class NA_Azir_Top_Shaco(Ratings):
pass
class NA_Azir_Top_Shen(Ratings):
pass
class NA_Azir_Top_Shyvana(Ratings):
pass
class NA_Azir_Top_Singed(Ratings):
pass
class NA_Azir_Top_Sion(Ratings):
pass
class NA_Azir_Top_Sivir(Ratings):
pass
class NA_Azir_Top_Skarner(Ratings):
pass
class NA_Azir_Top_Sona(Ratings):
pass
class NA_Azir_Top_Soraka(Ratings):
pass
class NA_Azir_Top_Swain(Ratings):
pass
class NA_Azir_Top_Syndra(Ratings):
pass
class NA_Azir_Top_TahmKench(Ratings):
pass
class NA_Azir_Top_Taliyah(Ratings):
pass
class NA_Azir_Top_Talon(Ratings):
pass
class NA_Azir_Top_Taric(Ratings):
pass
class NA_Azir_Top_Teemo(Ratings):
pass
class NA_Azir_Top_Thresh(Ratings):
pass
class NA_Azir_Top_Tristana(Ratings):
pass
class NA_Azir_Top_Trundle(Ratings):
pass
class NA_Azir_Top_Tryndamere(Ratings):
pass
class NA_Azir_Top_TwistedFate(Ratings):
pass
class NA_Azir_Top_Twitch(Ratings):
pass
class NA_Azir_Top_Udyr(Ratings):
pass
class NA_Azir_Top_Urgot(Ratings):
pass
class NA_Azir_Top_Varus(Ratings):
pass
class NA_Azir_Top_Vayne(Ratings):
pass
class NA_Azir_Top_Veigar(Ratings):
pass
class NA_Azir_Top_Velkoz(Ratings):
pass
class NA_Azir_Top_Vi(Ratings):
pass
class NA_Azir_Top_Viktor(Ratings):
pass
class NA_Azir_Top_Vladimir(Ratings):
pass
class NA_Azir_Top_Volibear(Ratings):
pass
class NA_Azir_Top_Warwick(Ratings):
pass
class NA_Azir_Top_Xayah(Ratings):
pass
class NA_Azir_Top_Xerath(Ratings):
pass
class NA_Azir_Top_XinZhao(Ratings):
pass
class NA_Azir_Top_Yasuo(Ratings):
pass
class NA_Azir_Top_Yorick(Ratings):
pass
class NA_Azir_Top_Zac(Ratings):
pass
class NA_Azir_Top_Zed(Ratings):
pass
class NA_Azir_Top_Ziggs(Ratings):
pass
class NA_Azir_Top_Zilean(Ratings):
pass
class NA_Azir_Top_Zyra(Ratings):
pass
|
[
"[email protected]"
] | |
300860ec08d763f75ed23d5aaa30370d6baf1713
|
6c5971b878d245fdca10d68ca653c3e72470d0f3
|
/pyesmon/daemon.py
|
b47a20151c9d3d1e44b88bbb63d46c8f7d5fbfe2
|
[] |
no_license
|
patirot/LustrePerfMon
|
417328674680e8693707e0dc4b93dd597409fb4c
|
4a351f6190a713ba4b861a9d22fb9240c3261266
|
refs/heads/master
| 2023-08-02T08:05:37.742882 | 2021-09-30T18:49:12 | 2021-09-30T18:49:12 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 482 |
py
|
# Copyright (c) 2017 DataDirect Networks, Inc.
# All Rights Reserved.
# Author: [email protected]
"""
Library for daemon process
"""
import logging
SHUTTING_DOWN = False
EXIT_REASON = "unkown reason"
def signal_handler(signum, frame):
"""
Singal hander
"""
# pylint: disable=global-statement,unused-argument
global SHUTTING_DOWN, EXIT_REASON
SHUTTING_DOWN = True
EXIT_REASON = "got signal %d" % signum
logging.error("exiting because %s", EXIT_REASON)
|
[
"[email protected]"
] | |
0a9eff2aa9d6f96402d906eb1b805de2ca963c30
|
a2fec2dada04b2c7cd69f5e186efea99e98bd32e
|
/leetcode/maximum-product-subarray.py
|
07cc57beb127beb0fd6d99c5f146e9a2c6aa210b
|
[] |
no_license
|
IcyCC/effective_note
|
363ed9c2eb986151caef7134815be424e53fc592
|
d55074032217c48e3d872d4524ba6cea94613b86
|
refs/heads/master
| 2022-02-13T21:42:54.871413 | 2022-01-24T02:10:11 | 2022-01-24T02:10:11 | 168,490,684 | 78 | 7 | null | null | null | null |
UTF-8
|
Python
| false | false | 500 |
py
|
class Solution:
def maxProduct(self, nums: List[int]) -> int:
if not nums :
return 0
t_min = nums[0]
t_max = nums[0]
res = nums[0]
for i in nums[1:]:
if i < 0:
# ้ๅฐ่ดๆฐๅ่ฝฌ
t_max, t_min = t_min, t_max
t_max = max(i, i*t_max) # ๆ ธๅฟๆ่ทฏ ่ฆไนไธ็ดไน่ฟๆฅ ่ฆไนไปiๅผๅงไน
t_min = min(i, i*t_min) # ่ดๆฐ
res = max (res ,t_max)
return res
|
[
"[email protected]"
] | |
b2549b3de94c275ed7aad85c9d88802e3d7deead
|
b43e73898400662b68e522c84286a79e359a3390
|
/cybox/test/objects/win_registry_key_test.py
|
ca9afc432a92efa097a04e28519119d64ec90b74
|
[
"BSD-3-Clause"
] |
permissive
|
emmanvg/python-cybox
|
fc8f44200a02dc956c81da60b4a051984d03ac5b
|
e58649356e21720cf79bb09ac9ceaf73283c0f26
|
refs/heads/master
| 2021-01-20T01:11:24.264142 | 2017-10-26T01:19:56 | 2017-10-26T01:19:56 | 89,234,015 | 1 | 0 | null | 2017-04-24T11:48:24 | 2017-04-24T11:48:24 | null |
UTF-8
|
Python
| false | false | 2,087 |
py
|
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
import unittest
from mixbox.vendor.six import u
from cybox.compat import long
from cybox.objects.win_registry_key_object import WinRegistryKey
from cybox.test.objects import ObjectTestCase
class TestWinRegistryKey(ObjectTestCase, unittest.TestCase):
object_type = "WindowsRegistryKeyObjectType"
klass = WinRegistryKey
_full_dict = {
'key': u("\\SOFTWARE\\Microsoft\\Windows\\Windows Error Reporting"),
'hive': u("HKEY_LOCAL_MACHINE"),
'number_values': 6,
'values': [
{
'name': u("Disabled"),
'data': u("1"),
'datatype': u("REG_DWORD"),
'byte_runs': [{'length': 1, 'byte_run_data': u("A")}],
},
{
'name': u("ErrorPort"),
'data': u("\\WindowsErrorReportingServicePort"),
'datatype': u("REG_SZ"),
},
],
'modified_time': u("2013-08-08T15:15:15-04:00"),
'creator_username': u("gback"),
'handle_list': [
{
'name': u("RegHandle"),
'pointer_count': long(1),
'type': u("RegistryKey"),
'xsi:type': u('WindowsHandleObjectType'),
},
],
'number_subkeys': 1,
'subkeys': [
{
'key': u("Consent"),
'number_values': 1,
'values': [
{
'name': u("NewUserDefaultConsent"),
'data': u("1"),
'datatype': u("REG_DWORD"),
},
],
'xsi:type': 'WindowsRegistryKeyObjectType',
},
],
'byte_runs': [
{'length': 4, 'byte_run_data': u("z!%f")},
{'offset': 0x1000, 'length': 8, 'byte_run_data': u("%40V.,2@")},
],
'xsi:type': object_type,
}
if __name__ == "__main__":
unittest.main()
|
[
"[email protected]"
] | |
f0072159501e0c427e68bdfa0438e86855b9b9a1
|
058f6cf55de8b72a7cdd6e592d40243a91431bde
|
/tests/parser/static/test_match_assignments/test_match_assignments_2.py
|
8f5c218a9b77384a83595bb1205e81efc452285e
|
[
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
LLNL/FPChecker
|
85e8ebf1d321b3208acee7ddfda2d8878a238535
|
e665ef0f050316f6bc4dfc64c1f17355403e771b
|
refs/heads/master
| 2023-08-30T23:24:43.749418 | 2022-04-14T19:57:44 | 2022-04-14T19:57:44 | 177,033,795 | 24 | 6 |
Apache-2.0
| 2022-09-19T00:09:50 | 2019-03-21T22:34:14 |
Python
|
UTF-8
|
Python
| false | false | 2,464 |
py
|
import os
import pathlib
import sys
import subprocess
sys.path.insert(1, str(pathlib.Path(__file__).parent.absolute())+"/../../../../parser")
#sys.path.insert(1, '/usr/workspace/wsa/laguna/fpchecker/FPChecker/parser')
from tokenizer import Tokenizer
from instrument import Instrument
RUNTIME='../../../../src/Runtime_parser.h'
prog_2 = """
__device__ double *p_new;
__device__ double *bvc;
__device__ double *e_old;
__device__ double fabs(double x);
__device__ double p_cut;
__device__ double *vnewc;
__device__ double eosvmax;
__device__ double pmin;
__device__ void comp(int i) {
p_new[i] = bvc[i] * e_old[i] ; if ( fabs(p_new[i]) < p_cut ) p_new[i] = 0.0 ; if ( vnewc[i] >= eosvmax ) p_new[i] = 0.0 ; if ( p_new[i] < pmin ) p_new[i] = pmin ;;
}
"""
def setup_module(module):
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
os.chdir(THIS_DIR)
def teardown_module(module):
cmd = ["rm -f *.o *.ii *.cu"]
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
def preprocessFile(prog_name: str):
cmd = ['nvcc -E '+prog_name+'.cu -o '+prog_name+'.ii']
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
def createFile(prog: str, prog_name: str):
with open(prog_name+'.cu', 'w') as fd:
fd.write(prog)
fd.write('\n')
preprocessFile(prog_name)
def instrument(prog_name: str):
pass
preFileName = prog_name+'.ii'
sourceFileName = prog_name+'.cu'
inst = Instrument(preFileName, sourceFileName)
inst.deprocess()
inst.findDeviceDeclarations()
inst.findAssigments()
inst.produceInstrumentedLines()
inst.instrument()
def compileProggram(prog_name: str):
cmd = ['nvcc -std=c++11 -c -include '+RUNTIME+' '+prog_name+'_inst.cu']
cmdOutput = subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True)
def countInstrumentationCalls(prog_name: str):
ret = 0
with open(prog_name+'_inst.cu', 'r') as fd:
for l in fd.readlines():
for w in l.split():
if '_FPC_CHECK_' in w:
ret += 1
return ret
def inst_program(prog: str, prog_name: str, num_inst: int):
try:
createFile(prog, prog_name)
instrument(prog_name)
compileProggram(prog_name)
n = countInstrumentationCalls(prog_name)
assert n == num_inst
return True
except Exception as e:
print(e)
return False
def test_1():
os.environ['FPC_VERBOSE'] = '1'
assert inst_program(prog_2, 'prog_2', 1)
if __name__ == '__main__':
test_1()
|
[
"[email protected]"
] | |
6e360b41dcec06167dd4382fe2834712cd879592
|
58ec75465a2a6f8500b220bba92d9268e9f77f01
|
/blog/views.py
|
b6552b9681fbe39c099a9ebc15e912b63800a1b8
|
[] |
no_license
|
Rayhun/Django-blog
|
797e13524aad95d7677a4675b3d9921ad56c6064
|
8aadfb6a6e6246f1dd979fc11f5b21436ac2bfa1
|
refs/heads/master
| 2023-08-28T07:17:34.479268 | 2021-11-04T14:07:04 | 2021-11-04T14:07:04 | 296,093,670 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,996 |
py
|
from datetime import datetime
from ipware import get_client_ip
import json, urllib
from django.shortcuts import render, redirect, get_object_or_404
from django.views.generic import TemplateView, DetailView
from .models import BlogPost, BlogComment, IpStore
from .forms import CommentForm, SignUpForm
from django.contrib.auth import login, logout
class UserCreateView(TemplateView):
template_name = 'signup.html'
def get(self, request):
form = SignUpForm()
return render(request, self.template_name, {'form': form})
def post(self, request):
form = SignUpForm(request.POST)
if form.is_valid():
form.save()
login(request, form.instance)
return redirect('/')
return render(request, self.template_name, {'form': form})
class BlogHomePageView(TemplateView):
template_name = 'home.html'
model = BlogPost
def get_context_data(self, **kwargs):
context = super(BlogHomePageView, self).get_context_data(**kwargs)
context['all_post'] = self.model.objects.all()
context['last_post'] = self.model.objects.last()
context['first_four'] = self.model.objects.all().order_by('-id')[1:5]
context['popular_post'] = self.model.objects.all().order_by(
'-total_view'
)[:2]
context['hot_blog'] = self.model.objects.filter(is_hot=True).order_by('-id')[:4]
context['featured_blog'] = self.model.objects.filter(is_featured=True).order_by('-id')[:4]
clint_ip, is_routable = get_client_ip(self.request)
if clint_ip is None:
clint_ip = "0.0.0.0"
else:
if is_routable:
ip_type = "Public"
else:
ip_type = "Private"
clint_ip = "103.230.106.25"
url = "http://ip-api.com/json/" + clint_ip
response = urllib.request.urlopen(url)
data = json.loads(response.read())
try:
my_ip = IpStore.objects.get(ip_name=clint_ip)
except Exception as e:
try:
IpStore.objects.create(
ip_name=clint_ip,
ip_type=ip_type,
city=data['city'],
region=data['regionName'],
country=data['country'],
lat=data['lat'],
lon=data['lon'],
timezone=data['timezone'],
zip_code=data['zip'],
isp=data['isp'],
org=data['org'],
query=data['query'],
status=data['status'],
ass=data['as'],
countryCode=data['countryCode']
)
except Exception as e:
IpStore.objects.create(
ip_name=clint_ip,
ip_type=ip_type,
city="Unknown",
region="Unknown",
country="Unknown",
lat="Unknown",
lon="Unknown",
timezone="Unknown",
zip_code="Unknown",
isp="Unknown",
org="Unknown",
query="Unknown",
status="Unknown",
ass="Unknown",
countryCode="Unknown"
)
my_ip = IpStore.objects.get(ip_name=clint_ip)
context['ip_address'] = my_ip
return context
class BlogCommentLikeView(TemplateView):
model = BlogComment
def get(self, request, *args, **kwargs):
comment_id = self.kwargs['comment_id']
comment = self.model.objects.get(id=comment_id)
comment.like += 1
comment.save()
return redirect('/blog/post/' + str(comment.blog_post.id))
class BlogDetails(DetailView):
template_name = 'details.html'
model = BlogPost
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
context['comment'] = BlogComment.objects.filter(
post=self.object, parent__isnull=True
)
context['comment_count'] = BlogComment.objects.filter(
post=self.object
).count()
context['form'] = CommentForm()
context['replay_blog'] = BlogComment.objects.filter(parent=20)
self.object.total_view += 1
self.object.last_seen = datetime.now()
self.object.save()
return context
def post(self, request, pk):
object = self.model.objects.get(pk=pk)
try:
replay_comment = int(request.POST.get('replay_comment'))
replay = BlogComment.objects.get(pk=replay_comment)
except Exception as e:
replay_comment = None
replay = None
try:
clint_ip, is_routable = get_client_ip(self.request)
comment_id = int(request.POST.get('comment_id'))
cmt_lik = get_object_or_404(BlogComment, pk=comment_id)
# cmt_lik.like.add('20')
comment = BlogComment.objects.get(pk=comment_id)
comment.total_like += 1
comment.save()
return redirect('blog_details', pk=pk)
except Exception as e:
comment = None
form = CommentForm(request.POST, request.FILES)
if form.is_valid():
user = form.save(commit=False)
user.post = object
user.parent = replay
user.save()
return redirect('blog_details', pk=pk)
return render(request, self.template_name)
class SearchView(TemplateView):
template_name = 'search_page.html'
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(*args, **kwargs)
query = self.request.GET.get('search')
context['query'] = query
context['all_post'] = BlogPost.objects.filter(
title__icontains=query
)
return context
|
[
"[email protected]"
] | |
c13786863d7eb85259610ae6c03d43f058ff954f
|
5e4ddf4a8dac912a7679b0a6babe1b42d5d018e9
|
/python2/prac/pracmodules/cs_recognition/src/cs.py
|
361ad0fbe503585194797fb73ca9be2325110c30
|
[
"BSD-2-Clause"
] |
permissive
|
danielnyga/prac
|
b686d9655c56175057db2af0002348c99a7de2ee
|
7add712590dd52dd8c1692554f49b271447ef03f
|
refs/heads/master
| 2020-05-30T07:10:58.260570 | 2018-02-28T17:30:29 | 2018-02-28T17:30:29 | 45,621,476 | 4 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,364 |
py
|
# PROBABILISTIC ROBOT ACTION CORES
#
# (C) 2012-2013 by Daniel Nyga ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
from dnutils import logs
from pracmln.mln.base import parse_mln
from pracmln.mln.util import colorize
from pracmln.utils.project import MLNProject
from pracmln.utils.visualization import get_cond_prob_png
from prac.core import locations as pracloc
from prac.core.base import PRACModule, PRACPIPE
from prac.core.inference import PRACInferenceStep
from prac.pracutils.utils import prac_heading
logger = logs.getlogger(__name__, logs.DEBUG)
class ControlStructureIdentification(PRACModule):
'''
PRACModule used to identify control structures in natural-language
instructions
'''
@PRACPIPE
def __call__(self, pracinference, **params):
# ======================================================================
# Initialization
# ======================================================================
logger.debug('inference on {}'.format(self.name))
if self.prac.verbose > 0:
print prac_heading('Recognizing Control Structures')
if params.get('project', None) is None:
# load default project
projectpath = os.path.join(pracloc.pracmodules, self.name, self.defproject)
ac_project = MLNProject.open(projectpath)
else:
logger.info(colorize('Loading Project from params', (None, 'cyan', True), True))
projectpath = os.path.join(params.get('projectpath', None) or os.path.join(pracloc.pracmodules, self.name), params.get('project').name)
ac_project = params.get('project')
dbs = pracinference.inference_steps[-1].output_dbs
mlntext = ac_project.mlns.get(ac_project.queryconf['mln'], None)
mln = parse_mln(mlntext, searchpaths=[self.module_path], projectpath=projectpath, logic=ac_project.queryconf.get('logic', 'FirstOrderLogic'), grammar=ac_project.queryconf.get('grammar', 'PRACGrammar'))
inf_step = PRACInferenceStep(pracinference, self)
pngs = {}
for i, db in enumerate(dbs):
db_ = db.copy()
# ======================================================================
# Inference
# ======================================================================
infer = self.mlnquery(config=ac_project.queryconf, db=db, mln=mln)
result_db = infer.resultdb
if self.prac.verbose == 2:
print
print prac_heading('INFERENCE RESULTS')
infer.write()
# ==========================================================
# Postprocessing
# ==========================================================
for q in result_db.query('event(?w,?ac)'):
db_ << 'event({},{})'.format(q['?w'],q['?ac'])
for q in result_db.query('condition(?w)'):
db_ << 'condition({})'.format(q['?w'])
inf_step.output_dbs.append(db_)
pngs['CS' + str(i)] = get_cond_prob_png(ac_project.queryconf.get('queries', ''), dbs, filename=self.name)
inf_step.png = pngs
inf_step.applied_settings = ac_project.queryconf.config
return inf_step
|
[
"[email protected]"
] | |
2a6b7dcf34814b4cf52d13c7b049621f96bb6f85
|
cb324b8e92765c535765bbb88aa69878ce2e4fe3
|
/regtests/list/if_empty.py
|
fad3c9e5d5f0cd8f09b8e56e51887962654316cd
|
[
"BSD-3-Clause"
] |
permissive
|
pombredanne/Rusthon
|
f47756c6ae465c60012e63e02ea1e912c3b391fb
|
343c0b2b097b18fa910f616ec2f6c09048fe92d0
|
refs/heads/master
| 2021-01-17T21:24:29.744692 | 2016-09-10T10:53:59 | 2016-09-10T10:53:59 | 40,818,721 | 1 | 0 | null | 2016-09-10T10:54:00 | 2015-08-16T13:20:47 |
Python
|
UTF-8
|
Python
| false | false | 581 |
py
|
from runtime import *
"""if empty list then false"""
class A:
pass
def main():
d = []
#if d: ## this is not allowed, and will raise an error at runtime
if len(d):
err1 = 1
else:
err1 = 0
if len([]):
err2 = 1
else:
err2 = 0
d.append('xxx')
if len(d):
err3 = 0
else:
err3 = 1
assert( err1 == 0 )
assert( err2 == 0 )
assert( err3 == 0 )
a = A()
ok = False
#if a: ## this is not allowed, and will raise an error at runtime
if a is not None:
ok = True
assert ok
a.x = []
if len(a.x):
err4 = 1
else:
err4 = 0
assert( err4 == 0 )
main()
|
[
"[email protected]"
] | |
38e6b27c30a8b8c86965c575147b2d9f90a8bb76
|
9069db5ea6adce8739ccdac841317e529a5059f8
|
/creditos/migrations/0017_auto__add_field_credito_liquidado__add_field_credito_fecha_liquidacion.py
|
8ae7aa07763339dc8bb68726a0536fb880d44412
|
[] |
no_license
|
jesusmaherrera/buro-de-credito
|
f5fc0836ad4db94a8dbe9edbd2998c6beab23033
|
070bd296b9b2305b423c6547510f89fa8eb4870f
|
refs/heads/master
| 2021-01-15T23:45:49.675114 | 2015-03-12T19:51:50 | 2015-03-12T19:51:50 | 8,079,158 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,893 |
py
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Credito.liquidado'
db.add_column('creditos_credito', 'liquidado',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Adding field 'Credito.fecha_liquidacion'
db.add_column('creditos_credito', 'fecha_liquidacion',
self.gf('django.db.models.fields.DateField')(auto_now_add=True, null=True, blank=True),
keep_default=False)
# Adding field 'Credito.monto_liquidado'
db.add_column('creditos_credito', 'monto_liquidado',
self.gf('django.db.models.fields.DecimalField')(default=0, max_digits=15, decimal_places=2),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Credito.liquidado'
db.delete_column('creditos_credito', 'liquidado')
# Deleting field 'Credito.fecha_liquidacion'
db.delete_column('creditos_credito', 'fecha_liquidacion')
# Deleting field 'Credito.monto_liquidado'
db.delete_column('creditos_credito', 'monto_liquidado')
models = {
'cities_light.city': {
'Meta': {'unique_together': "(('region', 'name'),)", 'object_name': 'City'},
'alternate_names': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cities_light.Country']"}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'geoname_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'latitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '5', 'blank': 'True'}),
'longitude': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '5', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'name_ascii': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'blank': 'True'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cities_light.Region']", 'null': 'True', 'blank': 'True'}),
'search_names': ('cities_light.models.ToSearchTextField', [], {'default': "''", 'max_length': '4000', 'db_index': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': "'name_ascii'"})
},
'cities_light.country': {
'Meta': {'object_name': 'Country'},
'alternate_names': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'code2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'code3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'continent': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'}),
'geoname_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'name_ascii': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': "'name_ascii'"}),
'tld': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '5', 'blank': 'True'})
},
'cities_light.region': {
'Meta': {'unique_together': "(('country', 'name'),)", 'object_name': 'Region'},
'alternate_names': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cities_light.Country']"}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'geoname_code': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'geoname_id': ('django.db.models.fields.IntegerField', [], {'unique': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'}),
'name_ascii': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique_with': '()', 'max_length': '50', 'populate_from': "'name_ascii'"})
},
'creditos.cliente': {
'Meta': {'object_name': 'Cliente'},
'city': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cities_light.City']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'codigo_postal': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'dir_calle': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'dir_colonia': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'dir_no_exterior': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'dir_no_interior': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'dir_poblacion': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'dir_referencia': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'edad': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '3'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'ocupacion': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'rfc': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '13', 'null': 'True', 'blank': 'True'}),
'telefono': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '10', 'null': 'True', 'blank': 'True'})
},
'creditos.credito': {
'Meta': {'object_name': 'Credito'},
'cliente': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['creditos.Cliente']"}),
'empresa_otorga': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'fecha': ('django.db.models.fields.DateField', [], {}),
'fecha_limite': ('django.db.models.fields.DateField', [], {}),
'fecha_liquidacion': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'liquidado': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'monto_liquidado': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '15', 'decimal_places': '2'}),
'monto_total': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '15', 'decimal_places': '2'})
},
'creditos.empresa': {
'Meta': {'object_name': 'Empresa'},
'codigo_postal': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'direccion': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'telefono': ('django.db.models.fields.CharField', [], {'max_length': '10'})
}
}
complete_apps = ['creditos']
|
[
"[email protected]"
] | |
dfb641cfcda182da1c34f2edbfc8b2ddb29075b3
|
a04eff13392361cf6effa7b321ff6c931705534c
|
/python/ccxt/async_support/acx.py
|
1a4ee9682dd3bc362426874ff7e4156b41bbf5db
|
[
"MIT"
] |
permissive
|
Homiex/homiex-ccxt
|
89594883f06f72e8eaf3222d43a66370a030dbd2
|
f669d7cb2a9276ba07c7782c5ec1a488f13d930d
|
refs/heads/master
| 2022-07-06T19:47:38.759274 | 2020-03-16T09:27:07 | 2020-03-16T09:27:07 | 246,796,828 | 3 | 4 |
MIT
| 2022-06-23T01:48:09 | 2020-03-12T09:41:33 |
JavaScript
|
UTF-8
|
Python
| false | false | 16,718 |
py
|
# -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import OrderNotFound
class acx(Exchange):
def describe(self):
return self.deep_extend(super(acx, self).describe(), {
'id': 'acx',
'name': 'ACX',
'countries': ['AU'],
'rateLimit': 1000,
'version': 'v2',
'has': {
'CORS': True,
'fetchTickers': True,
'fetchOHLCV': True,
'withdraw': True,
'fetchOrder': True,
},
'timeframes': {
'1m': '1',
'5m': '5',
'15m': '15',
'30m': '30',
'1h': '60',
'2h': '120',
'4h': '240',
'12h': '720',
'1d': '1440',
'3d': '4320',
'1w': '10080',
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/30247614-1fe61c74-9621-11e7-9e8c-f1a627afa279.jpg',
'extension': '.json',
'api': 'https://acx.io/api',
'www': 'https://acx.io',
'doc': 'https://acx.io/documents/api_v2',
},
'api': {
'public': {
'get': [
'depth', # Get depth or specified market Both asks and bids are sorted from highest price to lowest.
'k_with_pending_trades', # Get K data with pending trades, which are the trades not included in K data yet, because there's delay between trade generated and processed by K data generator
'k', # Get OHLC(k line) of specific market
'markets', # Get all available markets
'order_book', # Get the order book of specified market
'order_book/{market}',
'tickers', # Get ticker of all markets
'tickers/{market}', # Get ticker of specific market
'timestamp', # Get server current time, in seconds since Unix epoch
'trades', # Get recent trades on market, each trade is included only once Trades are sorted in reverse creation order.
'trades/{market}',
],
},
'private': {
'get': [
'members/me', # Get your profile and accounts info
'deposits', # Get your deposits history
'deposit', # Get details of specific deposit
'deposit_address', # Where to deposit The address field could be empty when a new address is generating(e.g. for bitcoin), you should try again later in that case.
'orders', # Get your orders, results is paginated
'order', # Get information of specified order
'trades/my', # Get your executed trades Trades are sorted in reverse creation order.
'withdraws', # Get your cryptocurrency withdraws
'withdraw', # Get your cryptocurrency withdraw
],
'post': [
'orders', # Create a Sell/Buy order
'orders/multi', # Create multiple sell/buy orders
'orders/clear', # Cancel all my orders
'order/delete', # Cancel an order
'withdraw', # Create a withdraw
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.2 / 100,
'taker': 0.2 / 100,
},
'funding': {
'tierBased': False,
'percentage': True,
'withdraw': {}, # There is only 1% fee on withdrawals to your bank account.
},
},
'exceptions': {
'2002': InsufficientFunds,
'2003': OrderNotFound,
},
})
async def fetch_markets(self, params={}):
markets = await self.publicGetMarkets(params)
result = []
for i in range(0, len(markets)):
market = markets[i]
id = market['id']
symbol = market['name']
baseId = self.safe_string(market, 'base_unit')
quoteId = self.safe_string(market, 'quote_unit')
if (baseId is None) or (quoteId is None):
ids = symbol.split('/')
baseId = ids[0].lower()
quoteId = ids[1].lower()
base = baseId.upper()
quote = quoteId.upper()
base = self.safe_currency_code(base)
quote = self.safe_currency_code(quote)
# todo: find out their undocumented precision and limits
precision = {
'amount': 8,
'price': 8,
}
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'precision': precision,
'info': market,
})
return result
async def fetch_balance(self, params={}):
await self.load_markets()
response = await self.privateGetMembersMe(params)
balances = self.safe_value(response, 'accounts')
result = {'info': balances}
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_float(balance, 'balance')
account['used'] = self.safe_float(balance, 'locked')
result[code] = account
return self.parse_balance(result)
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
if limit is not None:
request['limit'] = limit # default = 300
orderbook = await self.publicGetDepth(self.extend(request, params))
timestamp = self.safe_timestamp(orderbook, 'timestamp')
return self.parse_order_book(orderbook, timestamp)
def parse_ticker(self, ticker, market=None):
timestamp = self.safe_timestamp(ticker, 'at')
ticker = ticker['ticker']
symbol = None
if market:
symbol = market['symbol']
last = self.safe_float(ticker, 'last')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': self.safe_float(ticker, 'buy'),
'bidVolume': None,
'ask': self.safe_float(ticker, 'sell'),
'askVolume': None,
'vwap': None,
'open': self.safe_float(ticker, 'open'),
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_float(ticker, 'vol'),
'quoteVolume': None,
'info': ticker,
}
async def fetch_tickers(self, symbols=None, params={}):
await self.load_markets()
response = await self.publicGetTickers(params)
ids = list(response.keys())
result = {}
for i in range(0, len(ids)):
id = ids[i]
market = None
symbol = id
if id in self.markets_by_id:
market = self.markets_by_id[id]
symbol = market['symbol']
else:
base = id[0:3]
quote = id[3:6]
base = base.upper()
quote = quote.upper()
base = self.safe_currency_code(base)
quote = self.safe_currency_code(quote)
symbol = base + '/' + quote
result[symbol] = self.parse_ticker(response[id], market)
return result
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
response = await self.publicGetTickersMarket(self.extend(request, params))
return self.parse_ticker(response, market)
def parse_trade(self, trade, market=None):
timestamp = self.parse8601(self.safe_string(trade, 'created_at'))
id = self.safe_string(trade, 'tid')
symbol = None
if market is not None:
symbol = market['symbol']
return {
'info': trade,
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': None,
'side': None,
'order': None,
'takerOrMaker': None,
'price': self.safe_float(trade, 'price'),
'amount': self.safe_float(trade, 'volume'),
'cost': self.safe_float(trade, 'funds'),
'fee': None,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'market': market['id'],
}
response = await self.publicGetTrades(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m', since=None, limit=None):
return [
ohlcv[0] * 1000,
ohlcv[1],
ohlcv[2],
ohlcv[3],
ohlcv[4],
ohlcv[5],
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
if limit is None:
limit = 500 # default is 30
request = {
'market': market['id'],
'period': self.timeframes[timeframe],
'limit': limit,
}
if since is not None:
request['timestamp'] = int(since / 1000)
response = await self.publicGetK(self.extend(request, params))
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_order_status(self, status):
statuses = {
'done': 'closed',
'wait': 'open',
'cancel': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
symbol = None
if market is not None:
symbol = market['symbol']
else:
marketId = self.safe_string(order, 'market')
symbol = self.markets_by_id[marketId]['symbol']
timestamp = self.parse8601(self.safe_string(order, 'created_at'))
status = self.parse_order_status(self.safe_string(order, 'state'))
type = self.safe_string(order, 'type')
side = self.safe_string(order, 'side')
id = self.safe_string(order, 'id')
return {
'id': id,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'status': status,
'symbol': symbol,
'type': type,
'side': side,
'price': self.safe_float(order, 'price'),
'amount': self.safe_float(order, 'volume'),
'filled': self.safe_float(order, 'executed_volume'),
'remaining': self.safe_float(order, 'remaining_volume'),
'trades': None,
'fee': None,
'info': order,
}
async def fetch_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'id': int(id),
}
response = await self.privateGetOrder(self.extend(request, params))
return self.parse_order(response)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
request = {
'market': self.market_id(symbol),
'side': side,
'volume': str(amount),
'ord_type': type,
}
if type == 'limit':
request['price'] = str(price)
response = await self.privatePostOrders(self.extend(request, params))
marketId = self.safe_value(response, 'market')
market = self.safe_value(self.markets_by_id, marketId)
return self.parse_order(response, market)
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
request = {
'id': id,
}
response = await self.privatePostOrderDelete(self.extend(request, params))
order = self.parse_order(response)
status = order['status']
if status == 'closed' or status == 'canceled':
raise OrderNotFound(self.id + ' ' + self.json(order))
return order
async def withdraw(self, code, amount, address, tag=None, params={}):
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
# they have XRP but no docs on memo/tag
request = {
'currency': currency['id'],
'sum': amount,
'address': address,
}
response = await self.privatePostWithdraw(self.extend(request, params))
# withdrawal response is undocumented
return {
'info': response,
'id': None,
}
def nonce(self):
return self.milliseconds()
def encode_params(self, params):
if 'orders' in params:
orders = params['orders']
query = self.urlencode(self.keysort(self.omit(params, 'orders')))
for i in range(0, len(orders)):
order = orders[i]
keys = list(order.keys())
for k in range(0, len(keys)):
key = keys[k]
value = order[key]
query += '&orders%5B%5D%5B' + key + '%5D=' + str(value)
return query
return self.urlencode(self.keysort(params))
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
request = '/api/' + self.version + '/' + self.implode_params(path, params)
if 'extension' in self.urls:
request += self.urls['extension']
query = self.omit(params, self.extract_params(path))
url = self.urls['api'] + request
if api == 'public':
if query:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
nonce = str(self.nonce())
query = self.encode_params(self.extend({
'access_key': self.apiKey,
'tonce': nonce,
}, params))
auth = method + '|' + request + '|' + query
signed = self.hmac(self.encode(auth), self.encode(self.secret))
suffix = query + '&signature=' + signed
if method == 'GET':
url += '?' + suffix
else:
body = suffix
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return
if code == 400:
error = self.safe_value(response, 'error')
errorCode = self.safe_string(error, 'code')
feedback = self.id + ' ' + self.json(response)
exceptions = self.exceptions
if errorCode in exceptions:
raise exceptions[errorCode](feedback)
# fallback to default error handler
|
[
"[email protected]"
] | |
3a9191bee9d44079e2dbeb83c998458f78dd8a94
|
73a2917525e56ac548563b39ead045a19d4861a1
|
/tutorial_11.py
|
b650f21148e9fdbe67e2fb3fbf193671ac46ce23
|
[] |
no_license
|
VsevolodM95/Python_training
|
468e80e5ed6dbec627b08970eb6f2d6febfcd0b5
|
120b2142a7f627a1f98db46a8b531d6e35013090
|
refs/heads/master
| 2021-01-15T14:28:36.144487 | 2014-07-11T23:14:39 | 2014-07-11T23:14:39 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 239 |
py
|
#tutorial #12
#the same as 11, but more rational
name = raw_input("What's my name?")
height = raw_input("How tall am I?")
age = raw_input("How old am I?");
print "My name is %r, I'm %r tall and I'm %r years old." % (name, height, age)
|
[
"myemail"
] |
myemail
|
140ccc842c6d7721c31339b91f2e8e6c3a027af8
|
3417c37060ec8f7089754baace34d8051572570a
|
/Python-dev/Task3/app/models/score.py
|
228822793642df91258d2585422b3e11fe1a7c6e
|
[] |
no_license
|
rrkas/SID2021
|
8a2baa41b8e4868dce0d4ac75533109bc5e717a5
|
09d2a771ca1914928fbb78a0dac58c1bb4d0dd7b
|
refs/heads/master
| 2023-06-26T13:58:35.246291 | 2021-07-25T07:34:57 | 2021-07-25T07:34:57 | 381,122,559 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 289 |
py
|
class Score:
def __init__(self, name=None, regd_num=None, score=0, id=-1):
self.id = id
self.name = name
self.regd_num = regd_num
self.score = score
def __repr__(self):
return f"Score({self.id}, {self.name}, {self.regd_num}, {self.score})"
|
[
"[email protected]"
] | |
c4df4a79f4af6383e7de96d43d1fa38fa94f3cd2
|
471c3ad9912423763295c353c3dcbb7e2e74b818
|
/pos_aware_lm.py
|
f4d8c64ad5fceab1789cd330a1b426392bc1f96c
|
[] |
no_license
|
mikekestemont/seqmod
|
a3bfcbf4c7418005e71cb5381c30a837cff88aec
|
7d8c976a03836fcf347395c192987dba531c2144
|
refs/heads/master
| 2021-01-21T22:05:18.518306 | 2017-06-21T10:26:39 | 2017-06-21T10:26:39 | 95,155,787 | 0 | 0 | null | 2017-06-22T20:55:51 | 2017-06-22T20:55:51 | null |
UTF-8
|
Python
| false | false | 12,050 |
py
|
import os
import math
import numpy as np
import torch
import torch.nn as nn
from torch.autograd import Variable
from seqmod import utils as u
from seqmod.modules.custom import StackedRNN
from seqmod.loaders import load_penn3
from seqmod.misc.dataset import Dict, BlockDataset
from seqmod.misc.optimizer import Optimizer
from seqmod.misc.trainer import Trainer
from seqmod.misc.loggers import StdLogger
class POSAwareLM(nn.Module):
def __init__(self, vocab, emb_dim, hid_dim, num_layers,
dropout=0.0, cell='LSTM', tie_weights=False):
self.pos_vocab, self.word_vocab = vocab
self.pos_emb_dim, self.word_emb_dim = emb_dim
self.hid_dim = hid_dim
self.num_layers = num_layers
self.cell = cell
super(POSAwareLM, self).__init__()
# embeddings
self.pos_emb = nn.Embedding(self.pos_vocab, self.pos_emb_dim)
self.word_emb = nn.Embedding(self.word_vocab, self.word_emb_dim)
def init_hidden_for(self, inp, **kwargs):
raise NotImplementedError
def forward(self, pos, word, hidden=None):
"""
<bos>/<bos> NNP/Pierre NNP/Vinken CD/61 ... ,/, MD/will <eos>/<eos>
==Input==
1 (pos/word) 2 3 n-1
POS: <bos>/<bos> NNP/Pierre NNP/Vinken ... MD/will
word: NNP/<bos> NNP/Pierre CD/Vinken ... <eos>/will
==Output==
1 2 3 n-1
POS: NNP NNP CD ... <eos>
word: Pierre Vinken 61 ... <eos>
"""
raise NotImplementedError
def generate(self, pos_dict, word_dict,
seed=None, temperature=1, max_seq_len=25, gpu=False):
raise NotImplementedError
class DoubleRNNPOSAwareLM(POSAwareLM):
def __init__(self, *args, **kwargs):
super(DoubleRNNPOSAwareLM, self).__init__(*args, **kwargs)
if not isinstance(self.hid_dim, tuple):
raise ValueError("two hid_dim params needed for double network")
if not isinstance(self.num_layers, tuple):
raise ValueError("two num_layers must be tuple for double network")
self.pos_hid_dim, self.word_hid_dim = self.hid_dim
self.pos_num_layers, self.word_num_layers = self.num_layers
# pos network
self.pos_rnn = StackedRNN(
self.pos_num_layers,
self.pos_emb_dim + self.word_hid_dim,
self.pos_hid_dim,
cell=self.cell)
self.pos_project = nn.Sequential(
nn.Linear(self.pos_hid_dim, self.pos_vocab),
nn.LogSoftmax())
# word network
self.word_rnn = StackedRNN(
self.word_num_layers,
# TODO: add rnn output instead of argmax embedding
self.word_emb_dim + self.pos_hid_dim,
self.word_hid_dim,
cell=self.cell)
self.word_project = nn.Sequential(
nn.Linear(self.word_hid_dim, self.word_vocab),
nn.LogSoftmax())
def init_hidden_for(self, inp, source_type):
batch = inp.size(0)
if source_type == 'pos':
size = (self.pos_num_layers, batch, self.pos_hid_dim)
else:
assert source_type == 'word'
size = (self.word_num_layers, batch, self.word_hid_dim)
h_0 = Variable(inp.data.new(*size).zero_(), requires_grad=False)
if self.cell.startswith('LSTM'):
c_0 = Variable(inp.data.new(*size).zero_(), requires_grad=False)
return h_0, c_0
else:
return h_0
def get_last_hid(self, h):
if self.cell.startswith('LSTM'):
h, _ = h
return h[-1]
def forward(self, pos, word, hidden=None):
p_outs, w_outs = [], []
p_hid, w_hid = hidden if hidden is not None else (None, None)
p_emb, w_emb = self.pos_emb(pos), self.word_emb(word)
for p, w in zip(p_emb, w_emb):
w_hid = w_hid or self.init_hidden_for(w, 'word')
p_hid = p_hid or self.init_hidden_for(p, 'pos')
p_out, p_hid = self.pos_rnn(
torch.cat((p, self.get_last_hid(w_hid)), 1),
p_hid)
w_out, w_hid = self.word_rnn(
torch.cat((w, self.get_last_hid(p_hid)), 1),
w_hid)
p_outs.append(self.pos_project(p_out))
w_outs.append(self.word_project(w_out))
return (torch.stack(p_outs), torch.stack(w_outs)), (p_hid, w_hid)
def generate(self, p_dict, w_dict, seed=None, max_seq_len=20,
temperature=1., batch_size=5, gpu=False, ignore_eos=False):
def sample(out):
prev = out.div(temperature).exp_().multinomial().t()
score = u.select_cols(out.data.cpu(), prev.squeeze().data.cpu())
return prev, score
def init_prev(bos):
out = Variable(torch.LongTensor([bos] * batch_size), volatile=True)
if gpu:
out = out.cuda()
return out
p_hyp, w_hyp, p_hid, w_hid = [], [], None, None
p_scores, w_scores = 0, 0
w_eos = word_dict.get_eos()
finished = np.array([False] * batch_size)
p_prev = init_prev(pos_dict.get_bos()).unsqueeze(0)
w_prev = init_prev(word_dict.get_bos()).unsqueeze(0)
for _ in range(max_seq_len):
# pos
p_emb, w_emb = self.pos_emb(p_prev), self.word_emb(w_prev)
w_hid = w_hid or self.init_hidden_for(w_emb[0], 'word')
p_hid = p_hid or self.init_hidden_for(p_emb[0], 'pos')
p_out, p_hid = self.pos_rnn(
torch.cat((p_emb.squeeze(0), self.get_last_hid(w_hid)), 1),
p_hid)
p_out = self.pos_project(p_out)
# word
w_out, w_hid = self.word_rnn(
torch.cat((w_emb.squeeze(0), self.get_last_hid(p_hid)), 1),
w_hid)
w_out = self.word_project(w_out)
(p_prev, p_score), (w_prev, w_score) = sample(p_out), sample(w_out)
# hyps
mask = (w_prev.squeeze().data == w_eos).cpu().numpy() == 1
finished[mask] = True
if all(finished == True): # nopep8
break
p_hyp.append(p_prev.squeeze().data.tolist())
w_hyp.append(w_prev.squeeze().data.tolist())
# scores
p_score[torch.ByteTensor(finished.tolist())] = 0
w_score[torch.ByteTensor(finished.tolist())] = 0
p_scores, w_scores = p_scores + p_score, w_scores + w_score
return (list(zip(*p_hyp)), list(zip(*w_hyp))), \
(p_score.tolist(), w_score.tolist())
def repackage_hidden(hidden):
def _repackage_hidden(h):
if type(h) == Variable:
return Variable(h.data)
else:
return tuple(_repackage_hidden(v) for v in h)
p_hid, w_hid = hidden
return (_repackage_hidden(p_hid), _repackage_hidden(w_hid))
class POSAwareLMTrainer(Trainer):
def __init__(self, *args, **kwargs):
super(POSAwareLMTrainer, self).__init__(*args, **kwargs)
self.loss_labels = ('pos', 'word')
def format_loss(self, losses):
return tuple(math.exp(min(loss, 100)) for loss in losses)
def run_batch(self, batch_data, dataset='train', **kwargs):
(src_pos, src_word), (trg_pos, trg_word) = batch_data
seq_len, batch_size = src_pos.size()
hidden = self.batch_state.get('hidden')
(p_out, w_out), hidden = self.model(src_pos, src_word, hidden=hidden)
self.batch_state['hidden'] = repackage_hidden(hidden)
p_loss, w_loss = self.criterion(
p_out.view(seq_len * batch_size, -1), trg_pos.view(-1),
w_out.view(seq_len * batch_size, -1), trg_word.view(-1))
if dataset == 'train':
(p_loss + w_loss).backward()
self.optimizer_step()
return p_loss.data[0], w_loss.data[0]
def num_batch_examples(self, batch_data):
(pos_src, _), _ = batch_data
return pos_src.nelement()
def make_pos_word_criterion(gpu=False):
p_crit, w_crit = nn.NLLLoss(), nn.NLLLoss()
if gpu:
p_crit.cuda(), w_crit.cuda()
def criterion(p_outs, p_targets, w_outs, w_targets):
return p_crit(p_outs, p_targets), w_crit(w_outs, w_targets)
return criterion
def hyp_to_str(p_hyp, w_hyp, pos_dict, word_dict):
p_str, w_str = "", ""
for p, w in zip(p_hyp, w_hyp):
p = pos_dict.vocab[p]
w = word_dict.vocab[w]
ljust = max(len(p), len(w)) + 2
p_str += p.ljust(ljust, ' ')
w_str += w.ljust(ljust, ' ')
return p_str, w_str
def make_generate_hook(pos_dict, word_dict):
def hook(trainer, epoch, batch, checkpoints):
(p_hyps, w_hyps), (p_scores, w_scores) = \
trainer.model.generate(pos_dict, word_dict, gpu=args.gpu)
for p, w, p_score, w_score in zip(p_hyps, w_hyps, p_scores, w_scores):
p_str, w_str = hyp_to_str(p, w, pos_dict, word_dict)
trainer.log("info", "Score [%g, %g]: \n%s\n%s" %
(p_score, w_score, p_str, w_str))
return hook
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
# data
parser.add_argument('--path', default='/home/enrique/corpora/penn3/')
parser.add_argument('--dataset_path')
parser.add_argument('--load_dataset', action='store_true')
parser.add_argument('--save_dataset', action='store_true')
# model
parser.add_argument('--pos_emb_dim', default=24, type=int)
parser.add_argument('--word_emb_dim', default=64, type=int)
parser.add_argument('--pos_hid_dim', default=200, type=int)
parser.add_argument('--word_hid_dim', default=200, type=int)
parser.add_argument('--pos_num_layers', default=1, type=int)
parser.add_argument('--word_num_layers', default=1, type=int)
# train
parser.add_argument('--batch_size', default=100, type=int)
parser.add_argument('--bptt', default=50, type=int)
parser.add_argument('--epochs', default=10, type=int)
parser.add_argument('--optim', default='Adam')
parser.add_argument('--lr', default=0.001, type=float)
parser.add_argument('--hooks_per_epoch', default=1, type=int)
parser.add_argument('--checkpoints', default=20, type=int)
parser.add_argument('--gpu', action='store_true')
args = parser.parse_args()
if args.load_dataset:
dataset = BlockDataset.from_disk(args.dataset_path)
dataset.set_batch_size(args.batch_size), dataset.set_gpu(args.gpu)
else:
words, pos = zip(*load_penn3(args.path, swbd=False))
word_dict = Dict(
eos_token=u.EOS, bos_token=u.BOS, force_unk=True,
max_size=100000)
pos_dict = Dict(
eos_token=u.EOS, bos_token=u.BOS, force_unk=False)
word_dict.fit(words), pos_dict.fit(pos)
dataset = BlockDataset(
(pos, words), (pos_dict, word_dict), args.batch_size, args.bptt)
if args.save_dataset and not os.path.isfile(args.dataset_path):
dataset.to_disk(args.dataset_path)
train, valid = dataset.splits(test=None)
pos_dict, word_dict = train.d
m = DoubleRNNPOSAwareLM(
(len(pos_dict.vocab), len(word_dict.vocab)), # vocabs
(args.pos_emb_dim, args.word_emb_dim),
(args.pos_hid_dim, args.word_hid_dim),
num_layers=(args.pos_num_layers, args.word_num_layers), dropout=0.3)
m.apply(u.make_initializer())
if args.gpu:
m.cuda(), train.set_gpu(args.gpu), valid.set_gpu(args.gpu)
crit = make_pos_word_criterion(gpu=args.gpu)
optim = Optimizer(m.parameters(), args.optim, lr=args.lr)
trainer = POSAwareLMTrainer(
m, {'train': train, 'valid': valid}, crit, optim)
trainer.add_loggers(StdLogger())
num_checkpoints = len(train) // (args.checkpoints * args.hooks_per_epoch)
trainer.add_hook(make_generate_hook(pos_dict, word_dict), num_checkpoints)
trainer.train(args.epochs, args.checkpoints)
|
[
"[email protected]"
] | |
f6346a64d9a515475908d829accc6318b2aab55b
|
5c1643e0e03969b91dba6f0d6428719b75b2d7e1
|
/basic/string2.py
|
31df9c32eb94b536d06ee0696cb753b5ab5edbe0
|
[
"Apache-2.0"
] |
permissive
|
KobiBeef/google-python-exercises
|
2ff0ba89f0618087301b32e8318d751690975c76
|
e5d67d2a198bcf26611df21b0306cd8c42630fdc
|
refs/heads/master
| 2021-01-10T16:49:25.235238 | 2015-10-02T05:33:48 | 2015-10-02T05:33:48 | 43,537,376 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,295 |
py
|
#!/usr/bin/python2.4 -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Additional basic string exercises
# D. verbing
# Given a string, if its length is at least 3,
# add 'ing' to its end.
# Unless it already ends in 'ing', in which case
# add 'ly' instead.
# If the string length is less than 3, leave it unchanged.
# Return the resulting string.
def verbing(s):
# +++your code here+++
if len(s) > 3 and s[-3:] == 'ing':
return s + 'ly'
elif len(s) > 3:
return s + 'ing'
elif len(s) < 3:
return s
# test(verbing('hail'), 'hailing')
# test(verbing('swiming'), 'swimingly')
# if len(s) >= 3:
# if s[-3:] != 'ing' : s = s + 'ing'
# else:
# return s + 'ly'
# return s
# E. not_bad
# Given a string, find the first appearance of the
# substring 'not' and 'bad'. If the 'bad' follows
# the 'not', replace the whole 'not'...'bad' substring
# with 'good'.
# Return the resulting string.
# So 'This dinner is not that bad!' yields:
# This dinner is good!
def not_bad(s):
# +++your code here+++
n = s.find('not')
b = s.find('bad')
if n > b:
return s
elif b == 0:
return s
else:
return s[0:n] + 'good' + s[b+3:]
# not_bad('This movie is not so bad'), 'This movie is good')
# F. front_back
# Consider dividing a string into two halves.
# If the length is even, the front and back halves are the same length.
# If the length is odd, we'll say that the extra char goes in the front half.
# e.g. 'abcde', the front half is 'abc', the back half 'de'.
# Given 2 strings, a and b, return a string of the form
# a-front + b-front + a-back + b-back
def front_back(a, b):
# +++your code here+++
test = len(a) / 2
test1 = len(b) / 2
r = 0
if len(a) % 2 == 1 and len(b) % 2 == 1:
r += 1
return a[0:test+r] + b[0:test1+r] + a[test+r:] + b[test1+r:]
if len(a) % 2 !=1 and len(b) % 2 == 1:
r += 1
return a[0:test] + b[0:test1+r] + a[test:] + b[test1+r:]
else:
return a[0:test] + b[0:test1] + a[test:] + b[test1:]
return
# ('abcd', 'xy'), 'abxcdy')
# test(front_back('abcde', 'xyz'), 'abcxydez')
# Simple provided test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# main() calls the above functions with interesting inputs,
# using the above test() to check if the result is correct or not.
def main():
print 'verbing'
test(verbing('hail'), 'hailing')
test(verbing('swiming'), 'swimingly')
test(verbing('do'), 'do')
print
print 'not_bad'
test(not_bad('This movie is not so bad'), 'This movie is good')
test(not_bad('This dinner is not that bad!'), 'This dinner is good!')
test(not_bad('This tea is not hot'), 'This tea is not hot')
test(not_bad("It's bad yet not"), "It's bad yet not")
print
print 'front_back'
test(front_back('abcd', 'xy'), 'abxcdy')
test(front_back('abcde', 'xyz'), 'abcxydez')
test(front_back('Kitten', 'Donut'), 'KitDontenut')
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
6376fc530b0e76bd4f87940ec6ad2bb947e107f8
|
a7596165a29e5186bc6c4718e3b6e835939b105d
|
/apps/impala/src/impala/conf.py
|
ebefe16ed4f96697220a65c9051ecf7c7974f66a
|
[
"Apache-2.0"
] |
permissive
|
lockhart39/HueQualityAndIngestionApp
|
f0c778665f0fbe699ec30e0df5e9f3ed8a9c3384
|
c75e55a43a8bdeb7aa0f5bf2101ec72b01dcac1c
|
refs/heads/master
| 2021-08-20T00:31:29.481333 | 2017-11-27T19:22:16 | 2017-11-27T19:22:16 | 112,237,923 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 6,295 |
py
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import sys
import socket
from django.utils.translation import ugettext_lazy as _t, ugettext as _
from desktop.conf import default_ssl_cacerts, default_ssl_validate, AUTH_USERNAME as DEFAULT_AUTH_USERNAME,\
AUTH_PASSWORD as DEFAULT_AUTH_PASSWORD
from desktop.lib.conf import ConfigSection, Config, coerce_bool, coerce_csv, coerce_password_from_script
from desktop.lib.exceptions import StructuredThriftTransportException
from impala.settings import NICE_NAME
LOG = logging.getLogger(__name__)
SERVER_HOST = Config(
key="server_host",
help=_t("Host of the Impala Server."),
default="localhost")
SERVER_PORT = Config(
key="server_port",
help=_t("Port of the Impala Server."),
default=21050,
type=int)
IMPALA_PRINCIPAL=Config(
key='impala_principal',
help=_t("Kerberos principal name for Impala. Typically 'impala/hostname.foo.com'."),
type=str,
default="impala/%s" % socket.getfqdn())
IMPERSONATION_ENABLED=Config(
key='impersonation_enabled',
help=_t("Turn on/off impersonation mechanism when talking to Impala."),
type=coerce_bool,
default=False)
QUERYCACHE_ROWS=Config(
key='querycache_rows',
help=_t("Number of initial rows of a resultset to ask Impala to cache in order to"
" support re-fetching them for downloading them."
" Set to 0 for disabling the option and backward compatibility."),
type=int,
default=50000)
SERVER_CONN_TIMEOUT = Config(
key='server_conn_timeout',
default=120,
type=int,
help=_t('Timeout in seconds for Thrift calls.'))
CLOSE_QUERIES = Config(
key="close_queries",
help=_t("Hue will try to close the Impala query when the user leaves the editor page. "
"This will free all the query resources in Impala, but also make its results inaccessible."),
type=coerce_bool,
default=True
)
QUERY_TIMEOUT_S = Config(
key="query_timeout_s",
help=_t("If QUERY_TIMEOUT_S > 0, the query will be timed out (i.e. cancelled) if Impala does not do any work"
" (compute or send back results) for that query within QUERY_TIMEOUT_S seconds."),
type=int,
default=600
)
SESSION_TIMEOUT_S = Config(
key="session_timeout_s",
help=_t("If SESSION_TIMEOUT_S > 0, the session will be timed out (i.e. cancelled) if Impala does not do any work"
" (compute or send back results) for that session within QUERY_TIMEOUT_S seconds."),
type=int,
default=12 * 60 * 60
)
CONFIG_WHITELIST = Config(
key='config_whitelist',
default='debug_action,explain_level,mem_limit,optimize_partition_key_scans,query_timeout_s,request_pool',
type=coerce_csv,
help=_t('A comma-separated list of white-listed Impala configuration properties that users are authorized to set.')
)
SSL = ConfigSection(
key='ssl',
help=_t('SSL configuration for the server.'),
members=dict(
ENABLED = Config(
key="enabled",
help=_t("SSL communication enabled for this server."),
type=coerce_bool,
default=False
),
CACERTS = Config(
key="cacerts",
help=_t("Path to Certificate Authority certificates."),
type=str,
dynamic_default=default_ssl_cacerts,
),
KEY = Config(
key="key",
help=_t("Path to the private key file, e.g. /etc/hue/key.pem"),
type=str,
default=None
),
CERT = Config(
key="cert",
help=_t("Path to the public certificate file, e.g. /etc/hue/cert.pem"),
type=str,
default=None
),
VALIDATE = Config(
key="validate",
help=_t("Choose whether Hue should validate certificates received from the server."),
type=coerce_bool,
dynamic_default=default_ssl_validate,
)
)
)
def get_auth_username():
"""Get from top level default from desktop"""
return DEFAULT_AUTH_USERNAME.get()
AUTH_USERNAME = Config(
key="auth_username",
help=_t("Auth username of the hue user used for authentications."),
private=True,
dynamic_default=get_auth_username)
def get_auth_password():
"""Get from script or backward compatibility"""
password = AUTH_PASSWORD_SCRIPT.get()
if password:
return password
return DEFAULT_AUTH_PASSWORD.get()
AUTH_PASSWORD = Config(
key="auth_password",
help=_t("LDAP/PAM/.. password of the hue user used for authentications."),
private=True,
dynamic_default=get_auth_password)
AUTH_PASSWORD_SCRIPT = Config(
key="auth_password_script",
help=_t("Execute this script to produce the auth password. This will be used when `auth_password` is not set."),
private=True,
type=coerce_password_from_script,
default=None)
def config_validator(user):
# dbms is dependent on beeswax.conf (this file)
# import in method to avoid circular dependency
from beeswax.server import dbms
from beeswax.server.dbms import get_query_server_config
res = []
try:
try:
if not 'test' in sys.argv: # Avoid tests hanging
query_server = get_query_server_config(name='impala')
server = dbms.get(user, query_server)
server.execute_statement("SELECT 'Hello World!';")
except StructuredThriftTransportException, ex:
if 'TSocket read 0 bytes' in str(ex): # this message appears when authentication fails
msg = "Failed to authenticate to Impalad, check authentication configurations."
LOG.exception(msg)
res.append((NICE_NAME, _(msg)))
else:
raise ex
except Exception, ex:
msg = "No available Impalad to send queries to."
LOG.exception(msg)
res.append((NICE_NAME, _(msg)))
return res
|
[
"[email protected]"
] | |
7025f22c61a62607db00ded8bad9e7ddf8e73873
|
d11543019903dedde9fe81b46587ebd190f9ec8b
|
/0x04-python-more_data_structures/101-square_matrix_map.py
|
3179981ed00ee3e115cc4dc9fa9eabcb3915be40
|
[] |
no_license
|
Viiic98/holbertonschool-higher_level_programming
|
553d435a8dec97f4ff83e4862ee3978d6ca02d7b
|
5507f2a94a9b58af5760052ad9ba8fd9904bbad2
|
refs/heads/master
| 2020-09-29T00:15:41.990726 | 2020-05-15T21:56:37 | 2020-05-15T21:56:37 | 226,900,083 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 123 |
py
|
#!/usr/bin/python3
def square_matrix_map(matrix=[]):
return list(map(lambda a: list(map(lambda a: a * a, a)), matrix))
|
[
"[email protected]"
] | |
57f798f3e5d7088caef9d70cee3a08ba05c77d64
|
3950cb348a4a3ff6627d502dbdf4e576575df2fb
|
/.venv/Lib/site-packages/numba/tests/test_extending.py
|
8aea9608da489164cd1072d8131cdd3a2231d44f
|
[] |
no_license
|
Bdye15/Sample_Programs
|
a90d288c8f5434f46e1d266f005d01159d8f7927
|
08218b697db91e55e8e0c49664a0b0cb44b4ab93
|
refs/heads/main
| 2023-03-02T04:40:57.737097 | 2021-01-31T03:03:59 | 2021-01-31T03:03:59 | 328,053,795 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 57,239 |
py
|
import math
import operator
import sys
import pickle
import multiprocessing
import ctypes
import warnings
from distutils.version import LooseVersion
import re
import numpy as np
from numba import njit, jit, vectorize, guvectorize, objmode
from numba.core import types, errors, typing, compiler, cgutils
from numba.core.typed_passes import type_inference_stage
from numba.core.registry import cpu_target
from numba.core.compiler import compile_isolated
from numba.tests.support import (
TestCase,
captured_stdout,
temp_directory,
override_config,
run_in_new_process_in_cache_dir,
)
from numba.core.errors import LoweringError
import unittest
from numba.extending import (
typeof_impl,
type_callable,
lower_builtin,
lower_cast,
overload,
overload_attribute,
overload_method,
models,
register_model,
box,
unbox,
NativeValue,
intrinsic,
_Intrinsic,
register_jitable,
get_cython_function_address,
is_jitted,
)
from numba.core.typing.templates import (
ConcreteTemplate,
signature,
infer,
infer_global,
AbstractTemplate,
)
# Pandas-like API implementation
from .pdlike_usecase import Index, Series
try:
import scipy
if LooseVersion(scipy.__version__) < "0.19":
sc = None
else:
import scipy.special.cython_special as sc
except ImportError:
sc = None
# -----------------------------------------------------------------------
# Define a custom type and an implicit cast on it
class MyDummy(object):
pass
class MyDummyType(types.Opaque):
def can_convert_to(self, context, toty):
if isinstance(toty, types.Number):
from numba.core.typeconv import Conversion
return Conversion.safe
mydummy_type = MyDummyType("mydummy")
mydummy = MyDummy()
@typeof_impl.register(MyDummy)
def typeof_mydummy(val, c):
return mydummy_type
@lower_cast(MyDummyType, types.Number)
def mydummy_to_number(context, builder, fromty, toty, val):
"""
Implicit conversion from MyDummy to int.
"""
return context.get_constant(toty, 42)
def get_dummy():
return mydummy
register_model(MyDummyType)(models.OpaqueModel)
@unbox(MyDummyType)
def unbox_index(typ, obj, c):
return NativeValue(c.context.get_dummy_value())
# -----------------------------------------------------------------------
# Define a second custom type but w/o implicit cast to Number
def base_dummy_type_factory(name):
class DynType(object):
pass
class DynTypeType(types.Opaque):
pass
dyn_type_type = DynTypeType(name)
@typeof_impl.register(DynType)
def typeof_mydummy(val, c):
return dyn_type_type
register_model(DynTypeType)(models.OpaqueModel)
return DynTypeType, DynType, dyn_type_type
MyDummyType2, MyDummy2, mydummy_type_2 = base_dummy_type_factory("mydummy2")
@unbox(MyDummyType2)
def unbox_index2(typ, obj, c):
return NativeValue(c.context.get_dummy_value())
# -----------------------------------------------------------------------
# Define a function's typing and implementation using the classical
# two-step API
def func1(x=None):
raise NotImplementedError
def type_func1_(context):
def typer(x=None):
if x in (None, types.none):
# 0-arg or 1-arg with None
return types.int32
elif isinstance(x, types.Float):
# 1-arg with float
return x
return typer
type_func1 = type_callable(func1)(type_func1_)
@lower_builtin(func1)
@lower_builtin(func1, types.none)
def func1_nullary(context, builder, sig, args):
return context.get_constant(sig.return_type, 42)
@lower_builtin(func1, types.Float)
def func1_unary(context, builder, sig, args):
def func1_impl(x):
return math.sqrt(2 * x)
return context.compile_internal(builder, func1_impl, sig, args)
# We can do the same for a known internal operation, here "print_item"
# which we extend to support MyDummyType.
@infer
class PrintDummy(ConcreteTemplate):
key = "print_item"
cases = [signature(types.none, mydummy_type)]
@lower_builtin("print_item", MyDummyType)
def print_dummy(context, builder, sig, args):
[x] = args
pyapi = context.get_python_api(builder)
strobj = pyapi.unserialize(pyapi.serialize_object("hello!"))
pyapi.print_object(strobj)
pyapi.decref(strobj)
return context.get_dummy_value()
# -----------------------------------------------------------------------
# Define an overloaded function (combined API)
def where(cond, x, y):
raise NotImplementedError
def np_where(cond, x, y):
"""
Wrap np.where() to allow for keyword arguments
"""
return np.where(cond, x, y)
def call_where(cond, x, y):
return where(cond, y=y, x=x)
@overload(where)
def overload_where_arrays(cond, x, y):
"""
Implement where() for arrays.
"""
# Choose implementation based on argument types.
if isinstance(cond, types.Array):
if x.dtype != y.dtype:
raise errors.TypingError("x and y should have the same dtype")
# Array where() => return an array of the same shape
if all(ty.layout == "C" for ty in (cond, x, y)):
def where_impl(cond, x, y):
"""
Fast implementation for C-contiguous arrays
"""
shape = cond.shape
if x.shape != shape or y.shape != shape:
raise ValueError("all inputs should have the same shape")
res = np.empty_like(x)
cf = cond.flat
xf = x.flat
yf = y.flat
rf = res.flat
for i in range(cond.size):
rf[i] = xf[i] if cf[i] else yf[i]
return res
else:
def where_impl(cond, x, y):
"""
Generic implementation for other arrays
"""
shape = cond.shape
if x.shape != shape or y.shape != shape:
raise ValueError("all inputs should have the same shape")
res = np.empty_like(x)
for idx, c in np.ndenumerate(cond):
res[idx] = x[idx] if c else y[idx]
return res
return where_impl
# We can define another overload function for the same function, they
# will be tried in turn until one succeeds.
@overload(where)
def overload_where_scalars(cond, x, y):
"""
Implement where() for scalars.
"""
if not isinstance(cond, types.Array):
if x != y:
raise errors.TypingError("x and y should have the same type")
def where_impl(cond, x, y):
"""
Scalar where() => return a 0-dim array
"""
scal = x if cond else y
# Can't use full_like() on Numpy < 1.8
arr = np.empty_like(scal)
arr[()] = scal
return arr
return where_impl
# -----------------------------------------------------------------------
# Overload an already defined built-in function, extending it for new types.
@overload(len)
def overload_len_dummy(arg):
if isinstance(arg, MyDummyType):
def len_impl(arg):
return 13
return len_impl
@overload(operator.add)
def overload_add_dummy(arg1, arg2):
if isinstance(arg1, (MyDummyType, MyDummyType2)) and isinstance(
arg2, (MyDummyType, MyDummyType2)
):
def dummy_add_impl(arg1, arg2):
return 42
return dummy_add_impl
@overload(operator.delitem)
def overload_dummy_delitem(obj, idx):
if isinstance(obj, MyDummyType) and isinstance(idx, types.Integer):
def dummy_delitem_impl(obj, idx):
print("del", obj, idx)
return dummy_delitem_impl
@overload(operator.getitem)
def overload_dummy_getitem(obj, idx):
if isinstance(obj, MyDummyType) and isinstance(idx, types.Integer):
def dummy_getitem_impl(obj, idx):
return idx + 123
return dummy_getitem_impl
@overload(operator.setitem)
def overload_dummy_setitem(obj, idx, val):
if all(
[
isinstance(obj, MyDummyType),
isinstance(idx, types.Integer),
isinstance(val, types.Integer),
]
):
def dummy_setitem_impl(obj, idx, val):
print(idx, val)
return dummy_setitem_impl
def call_add_operator(arg1, arg2):
return operator.add(arg1, arg2)
def call_add_binop(arg1, arg2):
return arg1 + arg2
@overload(operator.iadd)
def overload_iadd_dummy(arg1, arg2):
if isinstance(arg1, (MyDummyType, MyDummyType2)) and isinstance(
arg2, (MyDummyType, MyDummyType2)
):
def dummy_iadd_impl(arg1, arg2):
return 42
return dummy_iadd_impl
def call_iadd_operator(arg1, arg2):
return operator.add(arg1, arg2)
def call_iadd_binop(arg1, arg2):
arg1 += arg2
return arg1
def call_delitem(obj, idx):
del obj[idx]
def call_getitem(obj, idx):
return obj[idx]
def call_setitem(obj, idx, val):
obj[idx] = val
@overload_method(MyDummyType, "length")
def overload_method_length(arg):
def imp(arg):
return len(arg)
return imp
def cache_overload_method_usecase(x):
return x.length()
def call_func1_nullary():
return func1()
def call_func1_unary(x):
return func1(x)
def len_usecase(x):
return len(x)
def print_usecase(x):
print(x)
def getitem_usecase(x, key):
return x[key]
def npyufunc_usecase(x):
return np.cos(np.sin(x))
def get_data_usecase(x):
return x._data
def get_index_usecase(x):
return x._index
def is_monotonic_usecase(x):
return x.is_monotonic_increasing
def make_series_usecase(data, index):
return Series(data, index)
def clip_usecase(x, lo, hi):
return x.clip(lo, hi)
# -----------------------------------------------------------------------
def return_non_boxable():
return np
@overload(return_non_boxable)
def overload_return_non_boxable():
def imp():
return np
return imp
def non_boxable_ok_usecase(sz):
mod = return_non_boxable()
return mod.arange(sz)
def non_boxable_bad_usecase():
return return_non_boxable()
def mk_func_input(f):
pass
@infer_global(mk_func_input)
class MkFuncTyping(AbstractTemplate):
def generic(self, args, kws):
assert isinstance(args[0], types.MakeFunctionLiteral)
return signature(types.none, *args)
def mk_func_test_impl():
mk_func_input(lambda a: a)
# -----------------------------------------------------------------------
@overload(np.exp)
def overload_np_exp(obj):
if isinstance(obj, MyDummyType):
def imp(obj):
# Returns a constant if a MyDummyType is seen
return 0xDEADBEEF
return imp
class TestLowLevelExtending(TestCase):
"""
Test the low-level two-tier extension API.
"""
# We check with both @jit and compile_isolated(), to exercise the
# registration logic.
def test_func1(self):
pyfunc = call_func1_nullary
cfunc = jit(nopython=True)(pyfunc)
self.assertPreciseEqual(cfunc(), 42)
pyfunc = call_func1_unary
cfunc = jit(nopython=True)(pyfunc)
self.assertPreciseEqual(cfunc(None), 42)
self.assertPreciseEqual(cfunc(18.0), 6.0)
def test_func1_isolated(self):
pyfunc = call_func1_nullary
cr = compile_isolated(pyfunc, ())
self.assertPreciseEqual(cr.entry_point(), 42)
pyfunc = call_func1_unary
cr = compile_isolated(pyfunc, (types.float64,))
self.assertPreciseEqual(cr.entry_point(18.0), 6.0)
def test_type_callable_keeps_function(self):
self.assertIs(type_func1, type_func1_)
self.assertIsNotNone(type_func1)
def test_cast_mydummy(self):
pyfunc = get_dummy
cr = compile_isolated(pyfunc, (), types.float64)
self.assertPreciseEqual(cr.entry_point(), 42.0)
def test_mk_func_literal(self):
"""make sure make_function is passed to typer class as a literal
"""
test_ir = compiler.run_frontend(mk_func_test_impl)
typingctx = cpu_target.typing_context
typingctx.refresh()
typemap, _, _ = type_inference_stage(typingctx, test_ir, (), None)
self.assertTrue(
any(
isinstance(a, types.MakeFunctionLiteral)
for a in typemap.values()
)
)
class TestPandasLike(TestCase):
"""
Test implementing a pandas-like Index object.
Also stresses most of the high-level API.
"""
def test_index_len(self):
i = Index(np.arange(3))
cfunc = jit(nopython=True)(len_usecase)
self.assertPreciseEqual(cfunc(i), 3)
def test_index_getitem(self):
i = Index(np.int32([42, 8, -5]))
cfunc = jit(nopython=True)(getitem_usecase)
self.assertPreciseEqual(cfunc(i, 1), 8)
ii = cfunc(i, slice(1, None))
self.assertIsInstance(ii, Index)
self.assertEqual(list(ii), [8, -5])
def test_index_ufunc(self):
"""
Check Numpy ufunc on an Index object.
"""
i = Index(np.int32([42, 8, -5]))
cfunc = jit(nopython=True)(npyufunc_usecase)
ii = cfunc(i)
self.assertIsInstance(ii, Index)
self.assertPreciseEqual(ii._data, np.cos(np.sin(i._data)))
def test_index_get_data(self):
# The _data attribute is exposed with make_attribute_wrapper()
i = Index(np.int32([42, 8, -5]))
cfunc = jit(nopython=True)(get_data_usecase)
data = cfunc(i)
self.assertIs(data, i._data)
def test_index_is_monotonic(self):
# The is_monotonic_increasing attribute is exposed with
# overload_attribute()
cfunc = jit(nopython=True)(is_monotonic_usecase)
for values, expected in [
([8, 42, 5], False),
([5, 8, 42], True),
([], True),
]:
i = Index(np.int32(values))
got = cfunc(i)
self.assertEqual(got, expected)
def test_series_len(self):
i = Index(np.int32([2, 4, 3]))
s = Series(np.float64([1.5, 4.0, 2.5]), i)
cfunc = jit(nopython=True)(len_usecase)
self.assertPreciseEqual(cfunc(s), 3)
def test_series_get_index(self):
i = Index(np.int32([2, 4, 3]))
s = Series(np.float64([1.5, 4.0, 2.5]), i)
cfunc = jit(nopython=True)(get_index_usecase)
got = cfunc(s)
self.assertIsInstance(got, Index)
self.assertIs(got._data, i._data)
def test_series_ufunc(self):
"""
Check Numpy ufunc on an Series object.
"""
i = Index(np.int32([42, 8, -5]))
s = Series(np.int64([1, 2, 3]), i)
cfunc = jit(nopython=True)(npyufunc_usecase)
ss = cfunc(s)
self.assertIsInstance(ss, Series)
self.assertIsInstance(ss._index, Index)
self.assertIs(ss._index._data, i._data)
self.assertPreciseEqual(ss._values, np.cos(np.sin(s._values)))
def test_series_constructor(self):
i = Index(np.int32([42, 8, -5]))
d = np.float64([1.5, 4.0, 2.5])
cfunc = jit(nopython=True)(make_series_usecase)
got = cfunc(d, i)
self.assertIsInstance(got, Series)
self.assertIsInstance(got._index, Index)
self.assertIs(got._index._data, i._data)
self.assertIs(got._values, d)
def test_series_clip(self):
i = Index(np.int32([42, 8, -5]))
s = Series(np.float64([1.5, 4.0, 2.5]), i)
cfunc = jit(nopython=True)(clip_usecase)
ss = cfunc(s, 1.6, 3.0)
self.assertIsInstance(ss, Series)
self.assertIsInstance(ss._index, Index)
self.assertIs(ss._index._data, i._data)
self.assertPreciseEqual(ss._values, np.float64([1.6, 3.0, 2.5]))
class TestHighLevelExtending(TestCase):
"""
Test the high-level combined API.
"""
def test_where(self):
"""
Test implementing a function with @overload.
"""
pyfunc = call_where
cfunc = jit(nopython=True)(pyfunc)
def check(*args, **kwargs):
expected = np_where(*args, **kwargs)
got = cfunc(*args, **kwargs)
self.assertPreciseEqual(expected, got)
check(x=3, cond=True, y=8)
check(True, 3, 8)
check(
np.bool_([True, False, True]),
np.int32([1, 2, 3]),
np.int32([4, 5, 5]),
)
# The typing error is propagated
with self.assertRaises(errors.TypingError) as raises:
cfunc(np.bool_([]), np.int32([]), np.int64([]))
self.assertIn(
"x and y should have the same dtype", str(raises.exception)
)
def test_len(self):
"""
Test re-implementing len() for a custom type with @overload.
"""
cfunc = jit(nopython=True)(len_usecase)
self.assertPreciseEqual(cfunc(MyDummy()), 13)
self.assertPreciseEqual(cfunc([4, 5]), 2)
def test_print(self):
"""
Test re-implementing print() for a custom type with @overload.
"""
cfunc = jit(nopython=True)(print_usecase)
with captured_stdout():
cfunc(MyDummy())
self.assertEqual(sys.stdout.getvalue(), "hello!\n")
def test_add_operator(self):
"""
Test re-implementing operator.add() for a custom type with @overload.
"""
pyfunc = call_add_operator
cfunc = jit(nopython=True)(pyfunc)
self.assertPreciseEqual(cfunc(1, 2), 3)
self.assertPreciseEqual(cfunc(MyDummy2(), MyDummy2()), 42)
# this will call add(Number, Number) as MyDummy implicitly casts to
# Number
self.assertPreciseEqual(cfunc(MyDummy(), MyDummy()), 84)
def test_add_binop(self):
"""
Test re-implementing '+' for a custom type via @overload(operator.add).
"""
pyfunc = call_add_binop
cfunc = jit(nopython=True)(pyfunc)
self.assertPreciseEqual(cfunc(1, 2), 3)
self.assertPreciseEqual(cfunc(MyDummy2(), MyDummy2()), 42)
# this will call add(Number, Number) as MyDummy implicitly casts to
# Number
self.assertPreciseEqual(cfunc(MyDummy(), MyDummy()), 84)
def test_iadd_operator(self):
"""
Test re-implementing operator.add() for a custom type with @overload.
"""
pyfunc = call_iadd_operator
cfunc = jit(nopython=True)(pyfunc)
self.assertPreciseEqual(cfunc(1, 2), 3)
self.assertPreciseEqual(cfunc(MyDummy2(), MyDummy2()), 42)
# this will call add(Number, Number) as MyDummy implicitly casts to
# Number
self.assertPreciseEqual(cfunc(MyDummy(), MyDummy()), 84)
def test_iadd_binop(self):
"""
Test re-implementing '+' for a custom type via @overload(operator.add).
"""
pyfunc = call_iadd_binop
cfunc = jit(nopython=True)(pyfunc)
self.assertPreciseEqual(cfunc(1, 2), 3)
self.assertPreciseEqual(cfunc(MyDummy2(), MyDummy2()), 42)
# this will call add(Number, Number) as MyDummy implicitly casts to
# Number
self.assertPreciseEqual(cfunc(MyDummy(), MyDummy()), 84)
def test_delitem(self):
pyfunc = call_delitem
cfunc = jit(nopython=True)(pyfunc)
obj = MyDummy()
e = None
with captured_stdout() as out:
try:
cfunc(obj, 321)
except Exception as exc:
e = exc
if e is not None:
raise e
self.assertEqual(out.getvalue(), "del hello! 321\n")
def test_getitem(self):
pyfunc = call_getitem
cfunc = jit(nopython=True)(pyfunc)
self.assertPreciseEqual(cfunc(MyDummy(), 321), 321 + 123)
def test_setitem(self):
pyfunc = call_setitem
cfunc = jit(nopython=True)(pyfunc)
obj = MyDummy()
e = None
with captured_stdout() as out:
try:
cfunc(obj, 321, 123)
except Exception as exc:
e = exc
if e is not None:
raise e
self.assertEqual(out.getvalue(), "321 123\n")
def test_no_cpython_wrapper(self):
"""
Test overloading whose return value cannot be represented in CPython.
"""
# Test passing Module type from a @overload implementation to ensure
# that the *no_cpython_wrapper* flag works
ok_cfunc = jit(nopython=True)(non_boxable_ok_usecase)
n = 10
got = ok_cfunc(n)
expect = non_boxable_ok_usecase(n)
np.testing.assert_equal(expect, got)
# Verify that the Module type cannot be returned to CPython
bad_cfunc = jit(nopython=True)(non_boxable_bad_usecase)
with self.assertRaises(TypeError) as raises:
bad_cfunc()
errmsg = str(raises.exception)
expectmsg = "cannot convert native Module"
self.assertIn(expectmsg, errmsg)
def test_typing_vs_impl_signature_mismatch_handling(self):
"""
Tests that an overload which has a differing typing and implementing
signature raises an exception.
"""
def gen_ol(impl=None):
def myoverload(a, b, c, kw=None):
pass
@overload(myoverload)
def _myoverload_impl(a, b, c, kw=None):
return impl
@jit(nopython=True)
def foo(a, b, c, d):
myoverload(a, b, c, kw=d)
return foo
sentinel = "Typing and implementation arguments differ in"
# kwarg value is different
def impl1(a, b, c, kw=12):
if a > 10:
return 1
else:
return -1
with self.assertRaises(errors.TypingError) as e:
gen_ol(impl1)(1, 2, 3, 4)
msg = str(e.exception)
self.assertIn(sentinel, msg)
self.assertIn("keyword argument default values", msg)
self.assertIn('<Parameter "kw=12">', msg)
self.assertIn('<Parameter "kw=None">', msg)
# kwarg name is different
def impl2(a, b, c, kwarg=None):
if a > 10:
return 1
else:
return -1
with self.assertRaises(errors.TypingError) as e:
gen_ol(impl2)(1, 2, 3, 4)
msg = str(e.exception)
self.assertIn(sentinel, msg)
self.assertIn("keyword argument names", msg)
self.assertIn('<Parameter "kwarg=None">', msg)
self.assertIn('<Parameter "kw=None">', msg)
# arg name is different
def impl3(z, b, c, kw=None):
if a > 10: # noqa: F821
return 1
else:
return -1
with self.assertRaises(errors.TypingError) as e:
gen_ol(impl3)(1, 2, 3, 4)
msg = str(e.exception)
self.assertIn(sentinel, msg)
self.assertIn("argument names", msg)
self.assertFalse("keyword" in msg)
self.assertIn('<Parameter "a">', msg)
self.assertIn('<Parameter "z">', msg)
from .overload_usecases import impl4, impl5
with self.assertRaises(errors.TypingError) as e:
gen_ol(impl4)(1, 2, 3, 4)
msg = str(e.exception)
self.assertIn(sentinel, msg)
self.assertIn("argument names", msg)
self.assertFalse("keyword" in msg)
self.assertIn("First difference: 'z'", msg)
with self.assertRaises(errors.TypingError) as e:
gen_ol(impl5)(1, 2, 3, 4)
msg = str(e.exception)
self.assertIn(sentinel, msg)
self.assertIn("argument names", msg)
self.assertFalse("keyword" in msg)
self.assertIn('<Parameter "a">', msg)
self.assertIn('<Parameter "z">', msg)
# too many args
def impl6(a, b, c, d, e, kw=None):
if a > 10:
return 1
else:
return -1
with self.assertRaises(errors.TypingError) as e:
gen_ol(impl6)(1, 2, 3, 4)
msg = str(e.exception)
self.assertIn(sentinel, msg)
self.assertIn("argument names", msg)
self.assertFalse("keyword" in msg)
self.assertIn('<Parameter "d">', msg)
self.assertIn('<Parameter "e">', msg)
# too few args
def impl7(a, b, kw=None):
if a > 10:
return 1
else:
return -1
with self.assertRaises(errors.TypingError) as e:
gen_ol(impl7)(1, 2, 3, 4)
msg = str(e.exception)
self.assertIn(sentinel, msg)
self.assertIn("argument names", msg)
self.assertFalse("keyword" in msg)
self.assertIn('<Parameter "c">', msg)
# too many kwargs
def impl8(a, b, c, kw=None, extra_kwarg=None):
if a > 10:
return 1
else:
return -1
with self.assertRaises(errors.TypingError) as e:
gen_ol(impl8)(1, 2, 3, 4)
msg = str(e.exception)
self.assertIn(sentinel, msg)
self.assertIn("keyword argument names", msg)
self.assertIn('<Parameter "extra_kwarg=None">', msg)
# too few kwargs
def impl9(a, b, c):
if a > 10:
return 1
else:
return -1
with self.assertRaises(errors.TypingError) as e:
gen_ol(impl9)(1, 2, 3, 4)
msg = str(e.exception)
self.assertIn(sentinel, msg)
self.assertIn("keyword argument names", msg)
self.assertIn('<Parameter "kw=None">', msg)
def test_typing_vs_impl_signature_mismatch_handling_var_positional(self):
"""
Tests that an overload which has a differing typing and implementing
signature raises an exception and uses VAR_POSITIONAL (*args) in typing
"""
def myoverload(a, kw=None):
pass
from .overload_usecases import var_positional_impl
overload(myoverload)(var_positional_impl)
@jit(nopython=True)
def foo(a, b):
return myoverload(a, b, 9, kw=11)
with self.assertRaises(errors.TypingError) as e:
foo(1, 5)
msg = str(e.exception)
self.assertIn("VAR_POSITIONAL (e.g. *args) argument kind", msg)
self.assertIn("offending argument name is '*star_args_token'", msg)
def test_typing_vs_impl_signature_mismatch_handling_var_keyword(self):
"""
Tests that an overload which uses **kwargs (VAR_KEYWORD)
"""
def gen_ol(impl, strict=True):
def myoverload(a, kw=None):
pass
overload(myoverload, strict=strict)(impl)
@jit(nopython=True)
def foo(a, b):
return myoverload(a, kw=11)
return foo
# **kwargs in typing
def ol1(a, **kws):
def impl(a, kw=10):
return a
return impl
gen_ol(ol1, False)(1, 2) # no error if strictness not enforced
with self.assertRaises(errors.TypingError) as e:
gen_ol(ol1)(1, 2)
msg = str(e.exception)
self.assertIn("use of VAR_KEYWORD (e.g. **kwargs) is unsupported", msg)
self.assertIn("offending argument name is '**kws'", msg)
# **kwargs in implementation
def ol2(a, kw=0):
def impl(a, **kws):
return a
return impl
with self.assertRaises(errors.TypingError) as e:
gen_ol(ol2)(1, 2)
msg = str(e.exception)
self.assertIn("use of VAR_KEYWORD (e.g. **kwargs) is unsupported", msg)
self.assertIn("offending argument name is '**kws'", msg)
def test_overload_method_kwargs(self):
# Issue #3489
@overload_method(types.Array, "foo")
def fooimpl(arr, a_kwarg=10):
def impl(arr, a_kwarg=10):
return a_kwarg
return impl
@njit
def bar(A):
return A.foo(), A.foo(20), A.foo(a_kwarg=30)
Z = np.arange(5)
self.assertEqual(bar(Z), (10, 20, 30))
def test_overload_method_literal_unpack(self):
# Issue #3683
@overload_method(types.Array, "litfoo")
def litfoo(arr, val):
# Must be an integer
if isinstance(val, types.Integer):
# Must not be literal
if not isinstance(val, types.Literal):
def impl(arr, val):
return val
return impl
@njit
def bar(A):
return A.litfoo(0xCAFE)
A = np.zeros(1)
bar(A)
self.assertEqual(bar(A), 0xCAFE)
def test_overload_ufunc(self):
# Issue #4133.
# Use an extended type (MyDummyType) to use with a customized
# ufunc (np.exp).
@njit
def test():
return np.exp(mydummy)
self.assertEqual(test(), 0xDEADBEEF)
def test_overload_method_stararg(self):
@overload_method(MyDummyType, "method_stararg")
def _ov_method_stararg(obj, val, val2, *args):
def get(obj, val, val2, *args):
return (val, val2, args)
return get
@njit
def foo(obj, *args):
# Test with expanding stararg
return obj.method_stararg(*args)
obj = MyDummy()
self.assertEqual(foo(obj, 1, 2), (1, 2, ()))
self.assertEqual(foo(obj, 1, 2, 3), (1, 2, (3,)))
self.assertEqual(foo(obj, 1, 2, 3, 4), (1, 2, (3, 4)))
@njit
def bar(obj):
# Test with explicit argument
return (
obj.method_stararg(1, 2),
obj.method_stararg(1, 2, 3),
obj.method_stararg(1, 2, 3, 4),
)
self.assertEqual(
bar(obj), ((1, 2, ()), (1, 2, (3,)), (1, 2, (3, 4))),
)
# Check cases that put tuple type into stararg
# NOTE: the expected result has an extra tuple because of stararg.
self.assertEqual(
foo(obj, 1, 2, (3,)), (1, 2, ((3,),)),
)
self.assertEqual(
foo(obj, 1, 2, (3, 4)), (1, 2, ((3, 4),)),
)
self.assertEqual(
foo(obj, 1, 2, (3, (4, 5))), (1, 2, ((3, (4, 5)),)),
)
def _assert_cache_stats(cfunc, expect_hit, expect_misses):
hit = cfunc._cache_hits[cfunc.signatures[0]]
if hit != expect_hit:
raise AssertionError("cache not used")
miss = cfunc._cache_misses[cfunc.signatures[0]]
if miss != expect_misses:
raise AssertionError("cache not used")
class TestOverloadMethodCaching(TestCase):
# Nested multiprocessing.Pool raises AssertionError:
# "daemonic processes are not allowed to have children"
_numba_parallel_test_ = False
def test_caching_overload_method(self):
self._cache_dir = temp_directory(self.__class__.__name__)
with override_config("CACHE_DIR", self._cache_dir):
self.run_caching_overload_method()
def run_caching_overload_method(self):
cfunc = jit(nopython=True, cache=True)(cache_overload_method_usecase)
self.assertPreciseEqual(cfunc(MyDummy()), 13)
_assert_cache_stats(cfunc, 0, 1)
llvmir = cfunc.inspect_llvm((mydummy_type,))
# Ensure the inner method is not a declaration
decls = [
ln
for ln in llvmir.splitlines()
if ln.startswith("declare") and "overload_method_length" in ln
]
self.assertEqual(len(decls), 0)
# Test in a separate process
try:
ctx = multiprocessing.get_context("spawn")
except AttributeError:
ctx = multiprocessing
q = ctx.Queue()
p = ctx.Process(
target=run_caching_overload_method, args=(q, self._cache_dir)
)
p.start()
q.put(MyDummy())
p.join()
# Ensure subprocess exited normally
self.assertEqual(p.exitcode, 0)
res = q.get(timeout=1)
self.assertEqual(res, 13)
def run_caching_overload_method(q, cache_dir):
"""
Used by TestOverloadMethodCaching.test_caching_overload_method
"""
with override_config("CACHE_DIR", cache_dir):
arg = q.get()
cfunc = jit(nopython=True, cache=True)(cache_overload_method_usecase)
res = cfunc(arg)
q.put(res)
# Check cache stat
_assert_cache_stats(cfunc, 1, 0)
class TestIntrinsic(TestCase):
def test_void_return(self):
"""
Verify that returning a None from codegen function is handled
automatically for void functions, otherwise raise exception.
"""
@intrinsic
def void_func(typingctx, a):
sig = types.void(types.int32)
def codegen(context, builder, signature, args):
pass # do nothing, return None, should be turned into
# dummy value
return sig, codegen
@intrinsic
def non_void_func(typingctx, a):
sig = types.int32(types.int32)
def codegen(context, builder, signature, args):
pass # oops, should be returning a value here, raise exception
return sig, codegen
@jit(nopython=True)
def call_void_func():
void_func(1)
return 0
@jit(nopython=True)
def call_non_void_func():
non_void_func(1)
return 0
# void func should work
self.assertEqual(call_void_func(), 0)
# not void function should raise exception
with self.assertRaises(LoweringError) as e:
call_non_void_func()
self.assertIn("non-void function returns None", e.exception.msg)
def test_ll_pointer_cast(self):
"""
Usecase test: custom reinterpret cast to turn int values to pointers
"""
from ctypes import CFUNCTYPE, POINTER, c_float, c_int
# Use intrinsic to make a reinterpret_cast operation
def unsafe_caster(result_type):
assert isinstance(result_type, types.CPointer)
@intrinsic
def unsafe_cast(typingctx, src):
self.assertIsInstance(typingctx, typing.Context)
if isinstance(src, types.Integer):
sig = result_type(types.uintp)
# defines the custom code generation
def codegen(context, builder, signature, args):
[src] = args
rtype = signature.return_type
llrtype = context.get_value_type(rtype)
return builder.inttoptr(src, llrtype)
return sig, codegen
return unsafe_cast
# make a nopython function to use our cast op.
# this is not usable from cpython due to the returning of a pointer.
def unsafe_get_ctypes_pointer(src):
raise NotImplementedError("not callable from python")
@overload(unsafe_get_ctypes_pointer, strict=False)
def array_impl_unsafe_get_ctypes_pointer(arrtype):
if isinstance(arrtype, types.Array):
unsafe_cast = unsafe_caster(types.CPointer(arrtype.dtype))
def array_impl(arr):
return unsafe_cast(src=arr.ctypes.data)
return array_impl
# the ctype wrapped function for use in nopython mode
def my_c_fun_raw(ptr, n):
for i in range(n):
print(ptr[i])
prototype = CFUNCTYPE(None, POINTER(c_float), c_int)
my_c_fun = prototype(my_c_fun_raw)
# Call our pointer-cast in a @jit compiled function and use
# the pointer in a ctypes function
@jit(nopython=True)
def foo(arr):
ptr = unsafe_get_ctypes_pointer(arr)
my_c_fun(ptr, arr.size)
# Test
arr = np.arange(10, dtype=np.float32)
with captured_stdout() as buf:
foo(arr)
got = buf.getvalue().splitlines()
buf.close()
expect = list(map(str, arr))
self.assertEqual(expect, got)
def test_serialization(self):
"""
Test serialization of intrinsic objects
"""
# define a intrinsic
@intrinsic
def identity(context, x):
def codegen(context, builder, signature, args):
return args[0]
sig = x(x)
return sig, codegen
# use in a jit function
@jit(nopython=True)
def foo(x):
return identity(x)
self.assertEqual(foo(1), 1)
# get serialization memo
memo = _Intrinsic._memo
memo_size = len(memo)
# pickle foo and check memo size
serialized_foo = pickle.dumps(foo)
# increases the memo size
memo_size += 1
self.assertEqual(memo_size, len(memo))
# unpickle
foo_rebuilt = pickle.loads(serialized_foo)
self.assertEqual(memo_size, len(memo))
# check rebuilt foo
self.assertEqual(foo(1), foo_rebuilt(1))
# pickle identity directly
serialized_identity = pickle.dumps(identity)
# memo size unchanged
self.assertEqual(memo_size, len(memo))
# unpickle
identity_rebuilt = pickle.loads(serialized_identity)
# must be the same object
self.assertIs(identity, identity_rebuilt)
# memo size unchanged
self.assertEqual(memo_size, len(memo))
def test_deserialization(self):
"""
Test deserialization of intrinsic
"""
def defn(context, x):
def codegen(context, builder, signature, args):
return args[0]
return x(x), codegen
memo = _Intrinsic._memo
memo_size = len(memo)
# invoke _Intrinsic indirectly to avoid registration which keeps an
# internal reference inside the compiler
original = _Intrinsic("foo", defn)
self.assertIs(original._defn, defn)
pickled = pickle.dumps(original)
# by pickling, a new memo entry is created
memo_size += 1
self.assertEqual(memo_size, len(memo))
del original # remove original before unpickling
# by deleting, the memo entry is NOT removed due to recent
# function queue
self.assertEqual(memo_size, len(memo))
# Manually force clear of _recent queue
_Intrinsic._recent.clear()
memo_size -= 1
self.assertEqual(memo_size, len(memo))
rebuilt = pickle.loads(pickled)
# verify that the rebuilt object is different
self.assertIsNot(rebuilt._defn, defn)
# the second rebuilt object is the same as the first
second = pickle.loads(pickled)
self.assertIs(rebuilt._defn, second._defn)
class TestRegisterJitable(unittest.TestCase):
def test_no_flags(self):
@register_jitable
def foo(x, y):
return x + y
def bar(x, y):
return foo(x, y)
cbar = jit(nopython=True)(bar)
expect = bar(1, 2)
got = cbar(1, 2)
self.assertEqual(expect, got)
def test_flags_no_nrt(self):
@register_jitable(_nrt=False)
def foo(n):
return np.arange(n)
def bar(n):
return foo(n)
self.assertEqual(bar(3).tolist(), [0, 1, 2])
cbar = jit(nopython=True)(bar)
with self.assertRaises(errors.TypingError) as raises:
cbar(2)
msg = (
"Only accept returning of array passed into the function as "
"argument"
)
self.assertIn(msg, str(raises.exception))
class TestImportCythonFunction(unittest.TestCase):
@unittest.skipIf(sc is None, "Only run if SciPy >= 0.19 is installed")
def test_getting_function(self):
addr = get_cython_function_address(
"scipy.special.cython_special", "j0"
)
functype = ctypes.CFUNCTYPE(ctypes.c_double, ctypes.c_double)
_j0 = functype(addr)
j0 = jit(nopython=True)(lambda x: _j0(x))
self.assertEqual(j0(0), 1)
def test_missing_module(self):
with self.assertRaises(ImportError) as raises:
get_cython_function_address("fakemodule", "fakefunction")
# The quotes are not there in Python 2
msg = "No module named '?fakemodule'?"
match = re.match(msg, str(raises.exception))
self.assertIsNotNone(match)
@unittest.skipIf(sc is None, "Only run if SciPy >= 0.19 is installed")
def test_missing_function(self):
with self.assertRaises(ValueError) as raises:
get_cython_function_address(
"scipy.special.cython_special", "foo"
)
msg = (
"No function 'foo' found in __pyx_capi__ of "
"'scipy.special.cython_special'"
)
self.assertEqual(msg, str(raises.exception))
@overload_method(
MyDummyType, "method_jit_option_check_nrt", jit_options={"_nrt": True}
)
def ov_method_jit_option_check_nrt(obj):
def imp(obj):
return np.arange(10)
return imp
@overload_method(
MyDummyType, "method_jit_option_check_no_nrt", jit_options={"_nrt": False}
)
def ov_method_jit_option_check_no_nrt(obj):
def imp(obj):
return np.arange(10)
return imp
@overload_attribute(
MyDummyType, "attr_jit_option_check_nrt", jit_options={"_nrt": True}
)
def ov_attr_jit_option_check_nrt(obj):
def imp(obj):
return np.arange(10)
return imp
@overload_attribute(
MyDummyType, "attr_jit_option_check_no_nrt", jit_options={"_nrt": False}
)
def ov_attr_jit_option_check_no_nrt(obj):
def imp(obj):
return np.arange(10)
return imp
class TestJitOptionsNoNRT(TestCase):
# Test overload*(jit_options={...}) by turning off _nrt
def check_error_no_nrt(self, func, *args, **kwargs):
# Check that the compilation fails with a complaint about dynamic array
msg = (
"Only accept returning of array passed into "
"the function as argument"
)
with self.assertRaises(errors.TypingError) as raises:
func(*args, **kwargs)
self.assertIn(msg, str(raises.exception))
def no_nrt_overload_check(self, flag):
def dummy():
return np.arange(10)
@overload(dummy, jit_options={"_nrt": flag})
def ov_dummy():
def dummy():
return np.arange(10)
return dummy
@njit
def foo():
return dummy()
if flag:
self.assertPreciseEqual(foo(), np.arange(10))
else:
self.check_error_no_nrt(foo)
def test_overload_no_nrt(self):
self.no_nrt_overload_check(True)
self.no_nrt_overload_check(False)
def test_overload_method_no_nrt(self):
@njit
def udt(x):
return x.method_jit_option_check_nrt()
self.assertPreciseEqual(udt(mydummy), np.arange(10))
@njit
def udt(x):
return x.method_jit_option_check_no_nrt()
self.check_error_no_nrt(udt, mydummy)
def test_overload_attribute_no_nrt(self):
@njit
def udt(x):
return x.attr_jit_option_check_nrt
self.assertPreciseEqual(udt(mydummy), np.arange(10))
@njit
def udt(x):
return x.attr_jit_option_check_no_nrt
self.check_error_no_nrt(udt, mydummy)
class TestBoxingCallingJIT(TestCase):
def setUp(self):
super().setUp()
many = base_dummy_type_factory("mydummy2")
self.DynTypeType, self.DynType, self.dyn_type_type = many
self.dyn_type = self.DynType()
def test_unboxer_basic(self):
# Implements an unboxer on DynType that calls an intrinsic into the
# unboxer code.
magic_token = 0xCAFE
magic_offset = 123
@intrinsic
def my_intrinsic(typingctx, val):
# An intrinsic that returns `val + magic_offset`
def impl(context, builder, sig, args):
[val] = args
return builder.add(val, val.type(magic_offset))
sig = signature(val, val)
return sig, impl
@unbox(self.DynTypeType)
def unboxer(typ, obj, c):
# The unboxer that calls some jitcode
def bridge(x):
# proof that this is a jit'ed context by calling jit only
# intrinsic
return my_intrinsic(x)
args = [c.context.get_constant(types.intp, magic_token)]
sig = signature(types.voidptr, types.intp)
is_error, res = c.pyapi.call_jit_code(bridge, sig, args)
return NativeValue(res, is_error=is_error)
@box(self.DynTypeType)
def boxer(typ, val, c):
# The boxer that returns an integer representation
res = c.builder.ptrtoint(val, cgutils.intp_t)
return c.pyapi.long_from_ssize_t(res)
@njit
def passthru(x):
return x
out = passthru(self.dyn_type)
self.assertEqual(out, magic_token + magic_offset)
def test_unboxer_raise(self):
# Testing exception raising in jitcode called from unboxing.
@unbox(self.DynTypeType)
def unboxer(typ, obj, c):
# The unboxer that calls some jitcode
def bridge(x):
if x > 0:
raise ValueError("cannot be x > 0")
return x
args = [c.context.get_constant(types.intp, 1)]
sig = signature(types.voidptr, types.intp)
is_error, res = c.pyapi.call_jit_code(bridge, sig, args)
return NativeValue(res, is_error=is_error)
@box(self.DynTypeType)
def boxer(typ, val, c):
# The boxer that returns an integer representation
res = c.builder.ptrtoint(val, cgutils.intp_t)
return c.pyapi.long_from_ssize_t(res)
@njit
def passthru(x):
return x
with self.assertRaises(ValueError) as raises:
passthru(self.dyn_type)
self.assertIn(
"cannot be x > 0", str(raises.exception),
)
def test_boxer(self):
# Call jitcode inside the boxer
magic_token = 0xCAFE
magic_offset = 312
@intrinsic
def my_intrinsic(typingctx, val):
# An intrinsic that returns `val + magic_offset`
def impl(context, builder, sig, args):
[val] = args
return builder.add(val, val.type(magic_offset))
sig = signature(val, val)
return sig, impl
@unbox(self.DynTypeType)
def unboxer(typ, obj, c):
return NativeValue(c.context.get_dummy_value())
@box(self.DynTypeType)
def boxer(typ, val, c):
# Note: this doesn't do proper error handling
def bridge(x):
return my_intrinsic(x)
args = [c.context.get_constant(types.intp, magic_token)]
sig = signature(types.intp, types.intp)
is_error, res = c.pyapi.call_jit_code(bridge, sig, args)
return c.pyapi.long_from_ssize_t(res)
@njit
def passthru(x):
return x
r = passthru(self.dyn_type)
self.assertEqual(r, magic_token + magic_offset)
def test_boxer_raise(self):
# Call jitcode inside the boxer
@unbox(self.DynTypeType)
def unboxer(typ, obj, c):
return NativeValue(c.context.get_dummy_value())
@box(self.DynTypeType)
def boxer(typ, val, c):
def bridge(x):
if x > 0:
raise ValueError("cannot do x > 0")
return x
args = [c.context.get_constant(types.intp, 1)]
sig = signature(types.intp, types.intp)
is_error, res = c.pyapi.call_jit_code(bridge, sig, args)
# The error handling
retval = cgutils.alloca_once(c.builder, c.pyapi.pyobj, zfill=True)
with c.builder.if_then(c.builder.not_(is_error)):
obj = c.pyapi.long_from_ssize_t(res)
c.builder.store(obj, retval)
return c.builder.load(retval)
@njit
def passthru(x):
return x
with self.assertRaises(ValueError) as raises:
passthru(self.dyn_type)
self.assertIn(
"cannot do x > 0", str(raises.exception),
)
def with_objmode_cache_ov_example(x):
# This is the function stub for overloading inside
# TestCachingOverloadObjmode.test_caching_overload_objmode
pass
class TestCachingOverloadObjmode(TestCase):
"""Test caching of the use of overload implementations that use
`with objmode`
"""
_numba_parallel_test_ = False
def setUp(self):
warnings.simplefilter("error", errors.NumbaWarning)
def tearDown(self):
warnings.resetwarnings()
def test_caching_overload_objmode(self):
cache_dir = temp_directory(self.__class__.__name__)
with override_config("CACHE_DIR", cache_dir):
def realwork(x):
# uses numpy code
arr = np.arange(x) / x
return np.linalg.norm(arr)
def python_code(x):
# create indirections
return realwork(x)
@overload(with_objmode_cache_ov_example)
def _ov_with_objmode_cache_ov_example(x):
def impl(x):
with objmode(y="float64"):
y = python_code(x)
return y
return impl
@njit(cache=True)
def testcase(x):
return with_objmode_cache_ov_example(x)
expect = realwork(123)
got = testcase(123)
self.assertEqual(got, expect)
testcase_cached = njit(cache=True)(testcase.py_func)
got = testcase_cached(123)
self.assertEqual(got, expect)
@classmethod
def check_objmode_cache_ndarray(cls):
def do_this(a, b):
return np.sum(a + b)
def do_something(a, b):
return np.sum(a + b)
@overload(do_something)
def overload_do_something(a, b):
def _do_something_impl(a, b):
with objmode(y='float64'):
y = do_this(a, b)
return y
return _do_something_impl
@njit(cache=True)
def test_caching():
a = np.arange(20)
b = np.arange(20)
return do_something(a, b)
got = test_caching()
expect = test_caching.py_func()
# Check result
if got != expect:
raise AssertionError("incorrect result")
return test_caching
@classmethod
def check_objmode_cache_ndarray_check_cache(cls):
disp = cls.check_objmode_cache_ndarray()
if len(disp.stats.cache_misses) != 0:
raise AssertionError('unexpected cache miss')
if len(disp.stats.cache_hits) <= 0:
raise AssertionError("unexpected missing cache hit")
def test_check_objmode_cache_ndarray(self):
# See issue #6130.
# Env is missing after cache load.
cache_dir = temp_directory(self.__class__.__name__)
with override_config("CACHE_DIR", cache_dir):
# Test in local process to populate the cache.
self.check_objmode_cache_ndarray()
# Run in new process to use the cache in a fresh process.
res = run_in_new_process_in_cache_dir(
self.check_objmode_cache_ndarray_check_cache, cache_dir
)
self.assertEqual(res['exitcode'], 0)
class TestMisc(TestCase):
def test_is_jitted(self):
def foo(x):
pass
self.assertFalse(is_jitted(foo))
self.assertTrue(is_jitted(njit(foo)))
self.assertFalse(is_jitted(vectorize(foo)))
self.assertFalse(is_jitted(vectorize(parallel=True)(foo)))
self.assertFalse(
is_jitted(guvectorize("void(float64[:])", "(m)")(foo))
)
class TestOverloadPreferLiteral(TestCase):
def test_overload(self):
def prefer_lit(x):
pass
def non_lit(x):
pass
def ov(x):
if isinstance(x, types.IntegerLiteral):
# With prefer_literal=False, this branch will not be reached.
if x.literal_value == 1:
def impl(x):
return 0xcafe
return impl
else:
raise errors.TypingError('literal value')
else:
def impl(x):
return x * 100
return impl
overload(prefer_lit, prefer_literal=True)(ov)
overload(non_lit)(ov)
@njit
def check_prefer_lit(x):
return prefer_lit(1), prefer_lit(2), prefer_lit(x)
a, b, c = check_prefer_lit(3)
self.assertEqual(a, 0xcafe)
self.assertEqual(b, 200)
self.assertEqual(c, 300)
@njit
def check_non_lit(x):
return non_lit(1), non_lit(2), non_lit(x)
a, b, c = check_non_lit(3)
self.assertEqual(a, 100)
self.assertEqual(b, 200)
self.assertEqual(c, 300)
def test_overload_method(self):
def ov(self, x):
if isinstance(x, types.IntegerLiteral):
# With prefer_literal=False, this branch will not be reached.
if x.literal_value == 1:
def impl(self, x):
return 0xcafe
return impl
else:
raise errors.TypingError('literal value')
else:
def impl(self, x):
return x * 100
return impl
overload_method(
MyDummyType, "method_prefer_literal",
prefer_literal=True,
)(ov)
overload_method(
MyDummyType, "method_non_literal",
prefer_literal=False,
)(ov)
@njit
def check_prefer_lit(dummy, x):
return (
dummy.method_prefer_literal(1),
dummy.method_prefer_literal(2),
dummy.method_prefer_literal(x),
)
a, b, c = check_prefer_lit(MyDummy(), 3)
self.assertEqual(a, 0xcafe)
self.assertEqual(b, 200)
self.assertEqual(c, 300)
@njit
def check_non_lit(dummy, x):
return (
dummy.method_non_literal(1),
dummy.method_non_literal(2),
dummy.method_non_literal(x),
)
a, b, c = check_non_lit(MyDummy(), 3)
self.assertEqual(a, 100)
self.assertEqual(b, 200)
self.assertEqual(c, 300)
if __name__ == "__main__":
unittest.main()
|
[
"[email protected]"
] | |
e0ae885a4a52601d7b84938cb69ac26206faec5d
|
d9af28bb1f9eb67300f1dde7942f9b536cbcb1b8
|
/232.py
|
9b04d35b284fdf7b16016d547f5b8bccb6c278ae
|
[] |
no_license
|
khanjason/leetcode
|
c78d415fd5794ffb7d15f68b8e2dc89b367317ab
|
d2c0b41bdd181bee999922be820d6ce16312b7ae
|
refs/heads/master
| 2021-12-26T17:28:32.706678 | 2021-09-09T13:56:09 | 2021-09-09T13:56:09 | 235,096,416 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 941 |
py
|
class MyQueue:
def __init__(self):
"""
Initialize your data structure here.
"""
self.q=[]
def push(self, x: int) -> None:
"""
Push element x to the back of queue.
"""
el=[x]
self.q=el+self.q
def pop(self) -> int:
"""
Removes the element from in front of queue and returns that element.
"""
return self.q.pop()
def peek(self) -> int:
"""
Get the front element.
"""
return self.q[len(self.q)-1]
def empty(self) -> bool:
"""
Returns whether the queue is empty.
"""
if self.q==[]:
return True
return False
# Your MyQueue object will be instantiated and called as such:
# obj = MyQueue()
# obj.push(x)
# param_2 = obj.pop()
# param_3 = obj.peek()
# param_4 = obj.empty()
|
[
"[email protected]"
] | |
3f3975822de1286c6ce6e3a90f622ab86cdb0abb
|
c9952dcac5658940508ddc139344a7243a591c87
|
/tests/lab07/test_ch07_t03_getting_there.py
|
78b92c4a065dbc622993d8e2f3a40b16d74c4cdc
|
[] |
no_license
|
wongcyrus/ite3101_introduction_to_programming
|
5da1c15212528423b3df91997327fe148abef4de
|
7cd76d0861d5355db5a6e2e171735bee2e78f829
|
refs/heads/master
| 2023-08-31T17:27:06.193049 | 2023-08-21T08:30:26 | 2023-08-21T08:30:26 | 136,574,036 | 3 | 2 | null | 2023-08-21T08:30:28 | 2018-06-08T06:06:49 |
Python
|
UTF-8
|
Python
| false | false | 925 |
py
|
import types
import unittest
from tests.unit_test_helper.console_test_helper import *
from tests.unit_test_helper import is_answer
class TestOutput(unittest.TestCase):
def test(self):
if is_answer:
from lab.lab07.ch07_t03_getting_there_ans import plane_ride_cost
else:
from lab.lab07.ch07_t03_getting_there import plane_ride_cost
temp_globals, temp_locals, content, output = execfile("lab07/ch07_t03_getting_there.py")
print(temp_locals)
self.assertIsInstance(temp_locals['plane_ride_cost'], types.FunctionType)
self.assertEqual(183, plane_ride_cost("Charlotte"))
self.assertEqual(220, plane_ride_cost("Tampa"))
self.assertEqual(222, plane_ride_cost("Pittsburgh"))
self.assertEqual(475, plane_ride_cost("Los Angeles"))
self.assertEqual(None, plane_ride_cost(""))
if __name__ == '__main__':
unittest.main()
|
[
"[email protected]"
] | |
c859fcb34b407a626bbd148d0aa71ece70d0cff6
|
e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f
|
/indices/proven.py
|
12c5c1c4aed443fb0ba2a6dc608615d906cad3ba
|
[] |
no_license
|
psdh/WhatsintheVector
|
e8aabacc054a88b4cb25303548980af9a10c12a8
|
a24168d068d9c69dc7a0fd13f606c080ae82e2a6
|
refs/heads/master
| 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 |
Python
|
UTF-8
|
Python
| false | false | 104 |
py
|
ii = [('ChalTPW2.py', 1), ('LeakWTI4.py', 1), ('WheeJPT.py', 19), ('HallFAC.py', 2), ('MackCNH2.py', 2)]
|
[
"[email protected]"
] | |
b994d486bed5ada6105bd8330aec492f5aa07a05
|
32e01bc8b5883896a58bdb3c2443028cf61484cf
|
/RaptAutomation/Test_UI/K8s_SingleUser_SingleGpu/test_17_s3_image_classification_auto.py
|
44fad6b0452a6b74f4b82cc09e1338151c053a9c
|
[] |
no_license
|
Idur69/RaptAutmationWin
|
df8a55a6c60467ea7d048b7a6856263d5624643c
|
1f4faffa75a9cc5bf72c20baec3893d611b9d565
|
refs/heads/master
| 2022-12-03T21:01:38.841645 | 2020-08-30T08:39:43 | 2020-08-30T08:39:43 | 291,431,583 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,534 |
py
|
import datetime
import unittest
from telnetlib import EC
from time import sleep
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.common.by import By
from selenium.webdriver.support.select import Select
from selenium.webdriver.support.wait import WebDriverWait
#from Src.EnvSetup.EnvironmentSetUp import EnvironmentSetup
from Src.EnvSetup.cnfgurl import LoginUsers, Paths, EnvironmentSetup, Memory_and_Core_Percentages
from Src.PageObject.Pages.Admin_Login import AdminLogin
from Src.PageObject.Pages.MyUrl import Myurl
from Test_UI.TestUtility.ScreenShots import SS
class Kubernetes_Ui_S3_Image_Auto(EnvironmentSetup):
def test_s3_image_auto(self):
# Second browser driver
driver1 = self.driver1
myurl = Myurl(self.driver1)
myurl.access_url()
driver1.implicitly_wait(10)
print("This is Title name :", driver1.title)
# ScreenShot Relative Path
ss_path = '/K8s_UI/'
# Creating object of screenshot utility
ss = SS(driver1)
# ------- Login Details ------------
user = LoginUsers()
Admin = user.user1_name
Pwd = user.user1_password
expadmin = user.user1_expadmin
exppass = user.user1_exppass
# ------ S3 bucket ------------
paths = Paths()
bkt_name = paths.Bucket_name
bkt_keys = paths.Bucket_keys
# -------flower path -----------
flower_path = paths.S3_Image_clf_path
admin_login = AdminLogin(driver1)
admin_login.set_login_uname(Admin)
admin_login.set_login_upass(Pwd)
sleep(3)
# sub = self.driver1.find_element_by_tag_name("button")
# sub.click()
# setcookie = pickle.dump(self.driver1.get_cookies(), open("cookies.pkl", "wb"))
# print("setcookievalue :", setcookie)
admin_login.submit_login(Admin, Pwd)
sleep(5)
if Admin == expadmin and Pwd == exppass:
print("Login successful")
else:
assert print("Invalid credentials")
print("************ Image Auto *****************")
# --------Frame work--------------
# f = self.driver1.find_element_by_class_name("f-image mxnet text-center")
f = self.driver1.find_element_by_xpath("//img[@src='/images/tenserflow.png']")
f.click()
print("Selected Tensorflow")
sleep(2)
# --------if you have compound class name you should write like this-----------
inception = self.driver1.find_element_by_xpath("//*[@class='card-body text-center font-weight-normal btnNext']")
inception.click()
sleep(1)
print("Selected Inception")
# -----------S3 bucket ---------------
s3 = self.driver1.find_element(By.ID, 'r1')
s3.click()
sleep(1)
bucketname = self.driver1.find_element(By.ID, 'bkt_name')
bucketname.send_keys(bkt_name)
sleep(2)
bucketkeys = self.driver1.find_element(By.ID, 'bkt_keys')
bucketkeys.send_keys(bkt_keys)
sleep(2)
# ----------GPU Auto --------
gpu = self.driver1.find_element(By.ID, 'r4')
gpu.click()
sleep(2)
auto = self.driver1.find_element_by_id("r101")
auto.click()
sleep(2)
# ------Screenshot-1-----------
ss.ScreenShot(ss_path + "test_17_s3_image_auto_setupscreen.png")
# -------------------- setup btn -----------------
setupbtn = self.driver1.find_element(By.ID, 'setupbtn')
setupbtn.click()
sleep(24)
# -------Datsets & Training ----------------
traindir = self.driver1.find_element(By.ID, 'traindirectory')
trdirectory = Select(traindir)
trdirectory.select_by_visible_text("flower_classification")
sleep(2)
trinfile = self.driver1.find_element(By.ID, 'file_name')
trfile = Select(trinfile)
trfile.select_by_visible_text("retrain-new.py")
sleep(2)
# --------- Train --------------------
train = self.driver1.find_element_by_xpath("//a[@href='#train']")
train.click()
textpath = self.driver1.find_element_by_id("textVal")
textpath.clear()
textpath.send_keys(flower_path)
sleep(2)
Train = self.driver1.find_element(By.ID, 'train_id')
Train.click()
sleep(100)
gpuTime = driver1.find_elements_by_id("gputime")
for GpuUsage in gpuTime:
assert isinstance(GpuUsage.text, object)
print("Gpu Usage : ", str(GpuUsage.text))
sleep(240)
# --------Elapsed Time -------------------
myElem = self.driver1.find_element_by_id("elapsedTime")
myElem.click()
sleep(1)
# --------Screen shot-2 -----------
ss.ScreenShot(ss_path + "test_17_s3_image_auto_auto_elapsedtime.png")
sleep(2)
assert isinstance(myElem.text, object)
print("Image classi fication Auto -", str(myElem.text))
for logs in driver1.get_log('browser'):
print(logs)
# ---------Logout ----------------
self.driver1.find_element_by_id("navbarDropdownMenuLink").click()
logout = self.driver1.find_element_by_class_name("dropdown-item")
logout.click()
sleep(5)
for Logedout in self.driver1.find_elements_by_xpath("//*[@class='alert alert-success']"):
assert isinstance(Logedout.text, object)
print(str(Logedout.text))
|
[
"[email protected]"
] | |
a3c3c4e911bc0abc6a69bd4872fab46bdb146b9d
|
da4d071cb7d2b3f46a8e393859d6d745f8e4fa96
|
/single_sign_on/single_sign_on/settings.py
|
4785912b185a5a2f57f84ee3d7fa2a3a8501efc7
|
[] |
no_license
|
pnija/single_sign_on
|
59140395ac8bd451b719e4ad7627e33dc29a689a
|
145f007abbede74906faf4d5b37fd73254c31039
|
refs/heads/master
| 2022-12-17T01:46:26.482289 | 2018-10-01T14:31:21 | 2018-10-01T14:31:21 | 151,037,834 | 0 | 0 | null | 2022-05-25T00:50:13 | 2018-10-01T04:48:55 |
Python
|
UTF-8
|
Python
| false | false | 3,272 |
py
|
"""
Django settings for single_sign_on project.
Generated by 'django-admin startproject' using Django 2.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '&4#s39l5uok=!h37vbhd$r*ap&erj$%x&@gor!7uasoj$d%z$h'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# SITE_ID = 1
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# 'django.contrib.sites',
'accounts'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'single_sign_on.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'single_sign_on.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
JWT_SECRET = '123456789'
LOGIN_REDIRECT = '/'
LOGOUT_REDIRECT = '/accounts/success/'
|
[
"[email protected]"
] | |
226ebe40297fd07a670ccb4b161a84312e59cd5f
|
be471cdee10e2273ce41631c4a58f16227f18b5b
|
/virtual/walt/virtual/setup/__init__.py
|
897f6407a59976eac4f37043ca18e8730ef3640e
|
[
"BSD-3-Clause"
] |
permissive
|
dia38/walt-python-packages
|
d91d477c90dbc4bd134fdcc31d7cb404ef9885b8
|
e6fa1f166f45e73173195d57840d22bef87b88f5
|
refs/heads/master
| 2020-04-29T17:41:19.936575 | 2019-11-26T10:11:58 | 2019-11-26T10:11:58 | 176,303,546 | 0 | 0 |
BSD-3-Clause
| 2019-03-18T14:27:56 | 2019-03-18T14:27:56 | null |
UTF-8
|
Python
| false | false | 1,056 |
py
|
#!/usr/bin/env python
import os, sys
from walt.common.tools import failsafe_symlink
from pkg_resources import resource_string
from pathlib import Path
SYSTEMD_SERVICE_FILES = [ "walt-vpn-server.service", "walt-vpn-server.socket" ]
SYSTEMD_SERVICES_DIR = Path("/etc/systemd/system")
def run():
if os.geteuid() != 0:
sys.exit("This script must be run as root. Exiting.")
for filename in SYSTEMD_SERVICE_FILES:
service_file_content = resource_string(__name__, filename)
service_file_path = SYSTEMD_SERVICES_DIR / filename
if service_file_path.exists():
sys.exit('Virtual tools are already setup. Exiting.')
service_file_path.write_bytes(service_file_content)
if filename.endswith('.socket'):
# the following is the same as running 'systemctl enable <unit>.socket'
# on a system that is really running
failsafe_symlink(str(service_file_path),
str(SYSTEMD_SERVICES_DIR / "sockets.target.wants" / filename))
print('Done.')
|
[
"[email protected]"
] | |
1b4bf67859e3d5788fe48c327bd779ca472b00f8
|
fb49051e2cb4b9865fdde761a5892b46f5815a5c
|
/tutorial1/app/permissions.py
|
d3f8296625624a6a9e3fd99185f9ba5bb3b23814
|
[] |
no_license
|
ashish1sasmal/Django-REST-Framework
|
0bb810e17c716750faac676218bdd44847891b52
|
ca8caa9b38d86086e78a28cfad2ca9cf0216cce1
|
refs/heads/master
| 2023-02-06T09:09:56.582366 | 2020-12-27T17:37:44 | 2020-12-27T17:37:44 | 321,983,890 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 387 |
py
|
from rest_framework import permissions
class IsOwnerOrReadOnly(permissions.BasePermission):
def has_object_permission(self, request, view, obj):
# Read permissions are allowed to any request,
# so we'll always allow GET, HEAD or OPTIONS requests.
if request.method in permissions.SAFE_METHODS:
return True
return obj.user == request.user
|
[
"[email protected]"
] | |
d0db361dd3d49dc29a842821913c2f0e8337c048
|
c20a7a651e63c1e7b1c5e6b5c65c8150898bbaf2
|
/OJ/LeetCode/63. Unique Paths II.py
|
face5e92689a3a4bc7c6ff313028f2f885493362
|
[] |
no_license
|
Nobody0321/MyCodes
|
08dbc878ae1badf82afaf0c9fc608b70dfce5cea
|
b60e2b7a8f2ad604c7d28b21498991da60066dc3
|
refs/heads/master
| 2023-08-19T14:34:23.169792 | 2023-08-15T15:50:24 | 2023-08-15T15:50:24 | 175,770,050 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 982 |
py
|
# ๆ่ทฏ๏ผ็ฑปไผผunique path 1 ไฝๆฏ้ๅฐ่ทฏ้่ฆๅฐ่ทฏๅพๆฐ็ฝฎ้ถ
class Solution:
def uniquePathsWithObstacles(self, obstacleGrid):
if obstacleGrid == []:
return None
height = len(obstacleGrid)
width = len(obstacleGrid[0])
dp = [[0] * width for _ in range(height)]
for i in range(height):
if obstacleGrid[i][0]:
break
else:
dp[i][0] = 1
for i in range(width):
if obstacleGrid[0][i]:
break
else:
dp[0][i] = 1
for i in range(1,height):
for j in range(1,width):
if obstacleGrid[i][j]:
dp[i][j] = 0
else:
dp[i][j] = dp[i-1][j] + dp[i][j-1]
return dp[height-1][width-1]
if __name__ == "__main__":
grid = [[0,0,0], [0,1,0], [0,0,0]]
print(Solution().uniquePathsWithObstacles(grid))
|
[
"[email protected]"
] | |
8feff23b081e85387612f08d06a1a23ad765ae10
|
53fab060fa262e5d5026e0807d93c75fb81e67b9
|
/gaussiana/ch3_2019_03_01_18_32_46_384807.py
|
ae243968d9d574a99e555abaf06c22567dccc2a8
|
[] |
no_license
|
gabriellaec/desoft-analise-exercicios
|
b77c6999424c5ce7e44086a12589a0ad43d6adca
|
01940ab0897aa6005764fc220b900e4d6161d36b
|
refs/heads/main
| 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 173 |
py
|
import math
def calcula_gaussiana(x, mi, sigma):
x = 1/(sigma*(2*math.pi)**(1/2))
y = math.exp(-0.5*((x-mi)/sigma)**2)
f_x_mi_sigma = x*y
return f_x_mi_sigma
|
[
"[email protected]"
] | |
bdc220e948745490edc4cbef2ed6b46407352b76
|
45df3588d0ec1a2bd7dbe4af104a49aa5775d034
|
/login/migrations/0007_auto_20150704_1334.py
|
5005920a9630a6b47a59b80425c6dab87ba3debd
|
[] |
no_license
|
wittawin/DB_Project
|
043db7eb3d70ef32c9c97d51a242775b3e115f73
|
1cc1fe84c75906d670f7bb4dd130093bc15035b8
|
refs/heads/master
| 2020-04-06T03:43:21.516583 | 2015-07-13T05:47:09 | 2015-07-13T05:47:09 | 37,700,817 | 0 | 1 | null | 2015-06-19T04:06:07 | 2015-06-19T04:06:06 |
JavaScript
|
UTF-8
|
Python
| false | false | 1,546 |
py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('login', '0006_auto_20150704_0050'),
]
operations = [
migrations.AlterField(
model_name='student',
name='scheme',
field=models.CharField(max_length=1, choices=[(b'0', b'\xe0\xb8\xab\xe0\xb8\xa5\xe0\xb8\xb1\xe0\xb8\x81\xe0\xb8\xaa\xe0\xb8\xb9\xe0\xb8\x95\xe0\xb8\xa3\xe0\xb8\x9b\xe0\xb8\xa3\xe0\xb8\xb1\xe0\xb8\x9a\xe0\xb8\x9b\xe0\xb8\xa3\xe0\xb8\xb8\xe0\xb8\x87 Cpr.E 54'), (b'1', b'\xe0\xb8\xab\xe0\xb8\xa5\xe0\xb8\xb1\xe0\xb8\x81\xe0\xb8\xaa\xe0\xb8\xb9\xe0\xb8\x95\xe0\xb8\xa3\xe0\xb8\x9b\xe0\xb8\xa3\xe0\xb8\xb1\xe0\xb8\x9a\xe0\xb8\x9b\xe0\xb8\xa3\xe0\xb8\xb8\xe0\xb8\x87 EE 51'), (b'2', b'\xe0\xb8\xab\xe0\xb8\xa5\xe0\xb8\xb1\xe0\xb8\x81\xe0\xb8\xaa\xe0\xb8\xb9\xe0\xb8\x95\xe0\xb8\xa3\xe0\xb8\x9b\xe0\xb8\xa3\xe0\xb8\xb1\xe0\xb8\x9a\xe0\xb8\x9b\xe0\xb8\xa3\xe0\xb8\xb8\xe0\xb8\x87 ECE 55'), (b'3', b'\xe0\xb8\xab\xe0\xb8\xa5\xe0\xb8\xb1\xe0\xb8\x81\xe0\xb8\xaa\xe0\xb8\xb9\xe0\xb8\x95\xe0\xb8\xa3\xe0\xb8\xa1\xe0\xb8\xab\xe0\xb8\xb2\xe0\xb8\x9a\xe0\xb8\xb1\xe0\xb8\x93\xe0\xb8\x91\xe0\xb8\xb4\xe0\xb8\x95 55'), (b'4', b'\xe0\xb8\xab\xe0\xb8\xa5\xe0\xb8\xb1\xe0\xb8\x81\xe0\xb8\xaa\xe0\xb8\xb9\xe0\xb8\x95\xe0\xb8\xa3\xe0\xb8\x94\xe0\xb8\xb8\xe0\xb8\xa9\xe0\xb8\x8e\xe0\xb8\xb5\xe0\xb8\x9a\xe0\xb8\xb1\xe0\xb8\x93\xe0\xb8\x91\xe0\xb8\xb4\xe0\xb8\x95 55')]),
preserve_default=True,
),
]
|
[
"[email protected]"
] | |
bfd36c25bbcaa8e49bf606f5919191e197984ef1
|
243f64a13a1f496a08d05508ccf73c61e03a69de
|
/max_even_seq/subs/2017B/3.py
|
7de5e2179d22b93d5d7e963889ba0802ea33e9ee
|
[] |
no_license
|
RazLandau/pybryt
|
5f3e946a99338fb159d6044098380bce2aacdc6f
|
8973b15fc48d1f278e7b8a3990d2f73a3bffb128
|
refs/heads/main
| 2023-08-19T12:30:46.622088 | 2021-10-14T18:35:21 | 2021-10-14T18:35:21 | 350,324,993 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 207 |
py
|
def max_even_seq(n):
cnt=0
seq=0
for i in str(n):
if int(i)%2==0:
cnt=cnt+1
if seq<cnt:
seq=cnt
else:
cnt=0
return(seq)
|
[
"[email protected]"
] | |
92e836e1d5b811cf5b44ccf47321db9ceec18820
|
7c2e677d931a8eb7d7cffc6d54713411abbe83e4
|
/AppBuilder9000/AppBuilder9000/MovieUpapp/apps.py
|
c154b17ad65f00eca9d7444f3554a110a27b8616
|
[] |
no_license
|
r3bunker/Python_Live_Project
|
19e367b3cf74c2279c287fcd3a8a44a27f24041a
|
d3e06150d7daea6326cc1a4155309d99e4ff6244
|
refs/heads/main
| 2023-06-12T23:01:50.440371 | 2021-06-16T20:21:03 | 2021-06-16T20:21:03 | 344,883,966 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 95 |
py
|
from django.apps import AppConfig
class MovieupappConfig(AppConfig):
name = 'MovieUpapp'
|
[
"[email protected]"
] | |
ad82cdec2b606068c08bfc2d613f9ee8f4bf77c9
|
e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f
|
/indices/ism.py
|
260052b0c7fe2bd50488ddbcc28be15ea25f4185
|
[] |
no_license
|
psdh/WhatsintheVector
|
e8aabacc054a88b4cb25303548980af9a10c12a8
|
a24168d068d9c69dc7a0fd13f606c080ae82e2a6
|
refs/heads/master
| 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 |
Python
|
UTF-8
|
Python
| false | false | 470 |
py
|
ii = [('CookGHP3.py', 2), ('FerrSDO3.py', 1), ('ProuWCM.py', 1), ('CookGHP.py', 1), ('LeakWTI2.py', 3), ('FitzRNS3.py', 1), ('CarlTFR.py', 2), ('AinsWRR3.py', 1), ('CookGHP2.py', 1), ('MarrFDI2.py', 1), ('GilmCRS.py', 1), ('DaltJMA.py', 1), ('FerrSDO2.py', 1), ('NewmJLP.py', 1), ('SoutRD.py', 2), ('MereHHB3.py', 2), ('BabbCEM.py', 1), ('SomeMMH.py', 4), ('HaliTBC.py', 1), ('BrewDTO.py', 3), ('FitzRNS2.py', 1), ('SadlMLP2.py', 1), ('TaylIF.py', 1), ('KeigTSS.py', 1)]
|
[
"[email protected]"
] | |
ca31ba023e5d757237337eab00d32136418b0830
|
f09c8ecfc5b981b0ff5cd56c3ac20f5d86a9f91a
|
/nuitka/plugins/Plugins.py
|
2e84612c94ca8d7149ffb0f75830c35f7965ac50
|
[
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] |
permissive
|
Manjunath30/Nuitka
|
eb99518b14c43e9d3b0169408d2cdabf28862ffd
|
d280d1ba88dda10fbce27b6b295f773653c42e09
|
refs/heads/master
| 2023-09-04T10:00:22.982034 | 2021-10-28T14:15:36 | 2021-10-28T14:15:36 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 36,768 |
py
|
# Copyright 2021, Kay Hayen, mailto:[email protected]
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Plugins: Welcome to Nuitka! This is your shortest way to become part of it.
This is to provide the base class for all plug-ins. Some of which are part of
proper Nuitka, and some of which are waiting to be created and submitted for
inclusion by you.
The base class in PluginBase will serve as documentation of available.
"""
import inspect
import os
import shutil
from optparse import OptionConflictError, OptionGroup
import nuitka.plugins.commercial
import nuitka.plugins.standard
from nuitka import Options, OutputDirectories
from nuitka.__past__ import basestring # pylint: disable=I0021,redefined-builtin
from nuitka.__past__ import iter_modules
from nuitka.build.DataComposerInterface import deriveModuleConstantsBlobName
from nuitka.containers.odict import OrderedDict
from nuitka.containers.oset import OrderedSet
from nuitka.Errors import NuitkaPluginError
from nuitka.freezer.IncludedEntryPoints import makeDllEntryPointOld
from nuitka.ModuleRegistry import addUsedModule
from nuitka.Tracing import plugins_logger, printLine
from nuitka.utils.FileOperations import makePath, putTextFileContents, relpath
from nuitka.utils.Importing import importFileAsModule
from nuitka.utils.ModuleNames import ModuleName
from .PluginBase import NuitkaPluginBase, post_modules, pre_modules
# Maps plugin name to plugin instances.
active_plugins = OrderedDict()
plugin_name2plugin_classes = {}
plugin_options = {}
plugin_datatag2pluginclasses = {}
plugin_values = {}
user_plugins = OrderedSet()
def _addActivePlugin(plugin_class, args, force=False):
plugin_name = plugin_class.plugin_name
# No duplicates please.
if not force:
assert plugin_name not in active_plugins.keys(), (
plugin_name,
active_plugins[plugin_name],
)
if args:
plugin_args = getPluginOptions(plugin_name)
else:
plugin_args = {}
plugin_instance = plugin_class(**plugin_args)
assert isinstance(plugin_instance, NuitkaPluginBase), plugin_instance
active_plugins[plugin_name] = plugin_instance
def getActivePlugins():
"""Return list of active plugins.
Returns:
list of plugins
"""
return active_plugins.values()
def getActiveQtPlugin():
from .standard.PySidePyQtPlugin import getQtPluginNames
for plugin_name in getQtPluginNames():
if hasActivePlugin(plugin_name):
if hasActivePlugin(plugin_name):
return plugin_name
return None
def hasActivePlugin(plugin_name):
"""Decide if a plugin is active.
Args:
plugin_name - name of the plugin
Notes:
Detectors do not count as an active plugin and ignored.
Returns:
bool - plugin is loaded
"""
if plugin_name not in active_plugins:
return False
# Detectors do not count.
plugin_instance = active_plugins.get(plugin_name)
return not hasattr(plugin_instance, "detector_for")
def getPluginClass(plugin_name):
# First, load plugin classes, to know what we are talking about.
loadPlugins()
# Backward compatibility.
plugin_name = Options.getPluginNameConsideringRenames(plugin_name)
if plugin_name not in plugin_name2plugin_classes:
plugins_logger.sysexit("Error, unknown plug-in '%s' referenced." % plugin_name)
return plugin_name2plugin_classes[plugin_name][0]
def _loadPluginClassesFromPath(scan_path):
for item in iter_modules(scan_path):
if item.ispkg:
continue
module_loader = item.module_finder.find_module(item.name)
# Ignore bytecode only left overs.
try:
if module_loader.get_filename().endswith(".pyc"):
continue
except AttributeError:
# Not a bytecode loader, but e.g. extension module, which is OK in case
# it was compiled with Nuitka.
pass
try:
plugin_module = module_loader.load_module(item.name)
except Exception:
if Options.is_nondebug:
plugins_logger.warning(
"Problem loading plugin %r (%s), ignored. Use --debug to make it visible."
% (item.name, module_loader.get_filename())
)
continue
raise
plugin_classes = set(
obj
for obj in plugin_module.__dict__.values()
if isObjectAUserPluginBaseClass(obj)
)
detectors = [
plugin_class
for plugin_class in plugin_classes
if hasattr(plugin_class, "detector_for")
]
for detector in detectors:
plugin_class = detector.detector_for
assert detector.plugin_name is None, detector
detector.plugin_name = plugin_class.plugin_name
if plugin_class not in plugin_classes:
plugins_logger.sysexit(
"Plugin detector %r references unknown plugin %r"
% (detector, plugin_class)
)
plugin_classes.remove(detector)
plugin_classes.remove(plugin_class)
plugin_name2plugin_classes[plugin_class.plugin_name] = (
plugin_class,
detector,
)
for plugin_class in plugin_classes:
plugin_name2plugin_classes[plugin_class.plugin_name] = plugin_class, None
def loadStandardPluginClasses():
"""Load plugin files located in 'standard' folder.
Notes:
Scan through the 'standard' and 'commercial' sub-folder of the folder
where this module resides. Import each valid Python module (but not
packages) and process it as a plugin.
Returns:
None
"""
_loadPluginClassesFromPath(nuitka.plugins.standard.__path__)
_loadPluginClassesFromPath(nuitka.plugins.commercial.__path__)
class Plugins(object):
implicit_imports_cache = {}
@staticmethod
def _considerImplicitImports(plugin, module):
from nuitka.importing import Importing
result = []
for full_name in plugin.getImplicitImports(module):
if type(full_name) in (tuple, list):
raise NuitkaPluginError(
"Plugin %r needs to be change to only return modules names, not %r"
% (plugin, full_name)
)
full_name = ModuleName(full_name)
try:
_module_package, module_filename, _finding = Importing.findModule(
importing=module,
module_name=full_name,
parent_package=None,
level=-1,
warn=False,
)
module_filename = plugin.locateModule(
importing=module, module_name=full_name
)
except Exception:
plugin.warning(
"Problem locating '%s' for implicit imports of '%s'."
% (module.getFullName(), full_name)
)
raise
if module_filename is None:
if Options.isShowInclusion():
plugin.info(
"Implicit module '%s' suggested for '%s' not found."
% (full_name, module.getFullName())
)
continue
result.append((full_name, module_filename))
if result:
plugin.info(
"Implicit dependencies of module '%s' added '%s'."
% (module.getFullName(), ",".join(r[0] for r in result))
)
return result
@staticmethod
def _reportImplicitImports(implicit_imports, signal_change):
from nuitka.importing import Recursion
from nuitka.importing.Importing import getModuleNameAndKindFromFilename
for full_name, module_filename in implicit_imports:
_module_name2, module_kind = getModuleNameAndKindFromFilename(
module_filename
)
# This will get back to all other plugins allowing them to inhibit it though.
decision, reason = Recursion.decideRecursion(
module_filename=module_filename,
module_name=full_name,
module_kind=module_kind,
)
if decision:
imported_module = Recursion.recurseTo(
signal_change=signal_change,
module_package=full_name.getPackageName(),
module_filename=module_filename,
module_relpath=relpath(module_filename),
module_kind=module_kind,
reason=reason,
)
addUsedModule(imported_module)
@classmethod
def considerImplicitImports(cls, module, signal_change):
for plugin in getActivePlugins():
key = (module.getFullName(), plugin)
if key not in cls.implicit_imports_cache:
cls.implicit_imports_cache[key] = tuple(
cls._considerImplicitImports(plugin=plugin, module=module)
)
cls._reportImplicitImports(
implicit_imports=cls.implicit_imports_cache[key],
signal_change=signal_change,
)
# Pre and post load code may have been created, if so indicate it's used.
full_name = module.getFullName()
if full_name in pre_modules:
addUsedModule(pre_modules[full_name])
if full_name in post_modules:
addUsedModule(post_modules[full_name])
@staticmethod
def onStandaloneDistributionFinished(dist_dir):
"""Let plugins postprocess the distribution folder in standalone mode"""
for plugin in getActivePlugins():
plugin.onStandaloneDistributionFinished(dist_dir)
@staticmethod
def onOnefileFinished(filename):
"""Let plugins postprocess the onefile executable in onefile mode"""
for plugin in getActivePlugins():
plugin.onStandaloneDistributionFinished(filename)
@staticmethod
def onFinalResult(filename):
"""Let plugins add to final binary in some way"""
for plugin in getActivePlugins():
plugin.onFinalResult(filename)
@staticmethod
def considerExtraDlls(dist_dir, module):
"""Ask plugins to provide extra DLLs.
Notes:
These will be of type nuitka.freezer.IncludedEntryPoints.IncludedEntryPoint
and currently there is a backward compatibility for old style plugins that do
provide tuples of 3 elements. But plugins are really supposed to provide the
stuff created from factory functions for that type.
"""
result = []
for plugin in getActivePlugins():
for extra_dll in plugin.considerExtraDlls(dist_dir, module):
# Backward compatibility with plugins not yet migrated to getExtraDlls usage.
if len(extra_dll) == 3:
extra_dll = makeDllEntryPointOld(
source_path=extra_dll[0],
dest_path=extra_dll[1],
package_name=extra_dll[2],
)
if not os.path.isfile(extra_dll.dest_path):
plugin.sysexit(
"Error, copied filename %r for module %r that is not a file."
% (extra_dll.dest_path, module.getFullName())
)
else:
if not os.path.isfile(extra_dll.source_path):
plugin.sysexit(
"Error, attempting to copy plugin determined filename %r for module %r that is not a file."
% (extra_dll.source_path, module.getFullName())
)
makePath(os.path.dirname(extra_dll.dest_path))
shutil.copyfile(extra_dll.source_path, extra_dll.dest_path)
result.append(extra_dll)
return result
@staticmethod
def getModuleSpecificDllPaths(module_name):
"""Provide a list of directories, where DLLs should be searched for this package (or module).
Args:
module_name: name of a package or module, for which the DLL path addition applies.
"""
result = OrderedSet()
for plugin in getActivePlugins():
for dll_path in plugin.getModuleSpecificDllPaths(module_name):
result.add(dll_path)
return result
@staticmethod
def removeDllDependencies(dll_filename, dll_filenames):
"""Create list of removable shared libraries by scanning through the plugins.
Args:
dll_filename: shared library filename
dll_filenames: list of shared library filenames
Returns:
list of removable files
"""
dll_filenames = tuple(sorted(dll_filenames))
to_remove = OrderedSet()
for plugin in getActivePlugins():
removed_dlls = tuple(
plugin.removeDllDependencies(dll_filename, dll_filenames)
)
if removed_dlls and Options.isShowInclusion():
plugin.info(
"Removing DLLs %s of %s by plugin decision."
% (dll_filename, removed_dlls)
)
for removed_dll in removed_dlls:
to_remove.add(removed_dll)
for removed in to_remove:
dll_filenames.discard(removed)
@staticmethod
def considerDataFiles(module):
"""For a given module, ask plugins for any needed data files it may require.
Args:
module: module object
Yields:
Data file description pairs, either (source, dest) or (func, dest)
where the func will be called to create the content dynamically.
"""
for plugin in getActivePlugins():
for value in plugin.considerDataFiles(module):
if value:
yield plugin, value
@classmethod
def _createTriggerLoadedModule(cls, module, trigger_name, code, flags):
"""Create a "trigger" for a module to be imported.
Notes:
The trigger will incorporate the code to be prepended / appended.
Called by @onModuleDiscovered.
Args:
module: the module object (serves as dict key)
trigger_name: string ("-preload"/"-postload")
code: the code string
Returns
trigger_module
"""
from nuitka.tree.Building import buildModule
module_name = ModuleName(module.getFullName() + trigger_name)
# In debug mode, put the files in the build folder, so they can be looked up easily.
if Options.is_debug and "HIDE_SOURCE" not in flags:
source_path = os.path.join(
OutputDirectories.getSourceDirectoryPath(), module_name + ".py"
)
putTextFileContents(filename=source_path, contents=code)
try:
trigger_module, _added = buildModule(
module_filename=os.path.join(
os.path.dirname(module.getCompileTimeFilename()),
module_name.asPath() + ".py",
),
module_package=module_name.getPackageName(),
source_code=code,
is_top=False,
is_main=False,
is_shlib=False,
is_fake=module_name,
hide_syntax_error=False,
)
except SyntaxError:
plugins_logger.sysexit(
"SyntaxError in plugin provided source code for '%s'." % module_name
)
if trigger_module.getCompilationMode() == "bytecode":
trigger_module.setSourceCode(code)
return trigger_module
@classmethod
def onModuleDiscovered(cls, module):
full_name = module.getFullName()
def _untangleLoadDesc(descs):
if descs and inspect.isgenerator(descs):
descs = tuple(descs)
if descs:
if type(descs[0]) not in (tuple, list):
descs = [descs]
for desc in descs:
if len(desc) == 2:
code, reason = desc
flags = ()
else:
code, reason, flags = desc
if type(flags) is str:
flags = (flags,)
yield plugin, code, reason, flags
preload_descs = []
postload_descs = []
for plugin in getActivePlugins():
plugin.onModuleDiscovered(module)
preload_descs.extend(
_untangleLoadDesc(descs=plugin.createPreModuleLoadCode(module))
)
postload_descs.extend(
_untangleLoadDesc(descs=plugin.createPostModuleLoadCode(module))
)
if preload_descs:
total_code = []
total_flags = OrderedSet()
for plugin, pre_code, reason, flags in preload_descs:
if pre_code:
plugin.info(
"Injecting pre-module load code for module '%s':" % full_name
)
for line in reason.split("\n"):
plugin.info(" " + line)
total_code.append(pre_code)
total_flags.update(flags)
if total_code:
assert full_name not in pre_modules
pre_modules[full_name] = cls._createTriggerLoadedModule(
module=module,
trigger_name="-preLoad",
code="\n\n".join(total_code),
flags=total_flags,
)
if postload_descs:
total_code = []
total_flags = OrderedSet()
for plugin, post_code, reason, flags in postload_descs:
if post_code:
plugin.info(
"Injecting post-module load code for module '%s':" % full_name
)
for line in reason.split("\n"):
plugin.info(" " + line)
total_code.append(post_code)
total_flags.update(flags)
if total_code:
assert full_name not in post_modules
post_modules[full_name] = cls._createTriggerLoadedModule(
module=module,
trigger_name="-postLoad",
code="\n\n".join(total_code),
flags=total_flags,
)
@staticmethod
def onModuleSourceCode(module_name, source_code):
assert type(module_name) is ModuleName
assert type(source_code) is str
for plugin in getActivePlugins():
new_source_code = plugin.onModuleSourceCode(module_name, source_code)
if new_source_code is not None:
source_code = new_source_code
assert type(source_code) is str
return source_code
@staticmethod
def onFrozenModuleSourceCode(module_name, is_package, source_code):
assert type(module_name) is ModuleName
assert type(source_code) is str
for plugin in getActivePlugins():
source_code = plugin.onFrozenModuleSourceCode(
module_name, is_package, source_code
)
assert type(source_code) is str
return source_code
@staticmethod
def onFrozenModuleBytecode(module_name, is_package, bytecode):
assert type(module_name) is ModuleName
assert bytecode.__class__.__name__ == "code"
for plugin in getActivePlugins():
bytecode = plugin.onFrozenModuleBytecode(module_name, is_package, bytecode)
assert bytecode.__class__.__name__ == "code"
return bytecode
@staticmethod
def onModuleEncounter(module_filename, module_name, module_kind):
result = None
for plugin in getActivePlugins():
must_recurse = plugin.onModuleEncounter(
module_filename, module_name, module_kind
)
if must_recurse is None:
continue
if type(must_recurse) is not tuple and must_recurse not in (True, False):
plugin.sysexit(
"Error, onModuleEncounter code failed to return a None or tuple(bool, reason) result."
)
if result is not None:
# false alarm, pylint: disable=unsubscriptable-object
assert result[0] == must_recurse[0]
result = must_recurse
return result
@staticmethod
def onModuleInitialSet():
from nuitka.ModuleRegistry import addRootModule
for plugin in getActivePlugins():
for module in plugin.onModuleInitialSet():
addRootModule(module)
@staticmethod
def considerFailedImportReferrals(module_name):
for plugin in getActivePlugins():
new_module_name = plugin.considerFailedImportReferrals(module_name)
if new_module_name is not None:
return ModuleName(new_module_name)
return None
@staticmethod
def suppressUnknownImportWarning(importing, module_name):
"""Let plugins decide whether to suppress import warnings for an unknown module.
Notes:
If all plugins return False or None, the return will be False, else True.
Args:
importing: the module which is importing "module_name"
module_name: the module to be imported
returns:
True or False (default)
"""
if importing.isCompiledPythonModule() or importing.isPythonShlibModule():
importing_module = importing
else:
importing_module = importing.getParentModule()
source_ref = importing.getSourceReference()
for plugin in getActivePlugins():
if plugin.suppressUnknownImportWarning(
importing_module, module_name, source_ref
):
return True
return False
@staticmethod
def decideCompilation(module_name, source_ref):
"""Let plugins decide whether to C compile a module or include as bytecode.
Notes:
The decision is made by the first plugin not returning None.
Returns:
"compiled" (default) or "bytecode".
"""
for plugin in getActivePlugins():
value = plugin.decideCompilation(module_name, source_ref)
if value is not None:
assert value in ("compiled", "bytecode")
return value
return "compiled"
preprocessor_symbols = None
@classmethod
def getPreprocessorSymbols(cls):
"""Let plugins provide C defines to be used in compilation.
Notes:
The plugins can each contribute, but are hopefully using
a namespace for their defines.
Returns:
OrderedDict(), where None value indicates no define value,
i.e. "-Dkey=value" vs. "-Dkey"
"""
if cls.preprocessor_symbols is None:
cls.preprocessor_symbols = OrderedDict()
for plugin in getActivePlugins():
value = plugin.getPreprocessorSymbols()
if value is not None:
assert type(value) is dict, value
# We order per plugin, but from the plugins, lets just take a dict
# and achieve determism by ordering the defines by name.
for key, value in sorted(value.items()):
# False alarm, pylint: disable=I0021,unsupported-assignment-operation
cls.preprocessor_symbols[key] = value
return cls.preprocessor_symbols
@staticmethod
def getExtraCodeFiles():
result = OrderedDict()
for plugin in getActivePlugins():
value = plugin.getExtraCodeFiles()
if value is not None:
assert type(value) is dict
# We order per plugin, but from the plugins, lets just take a dict
# and achieve determism by ordering the files by name.
for key, value in sorted(value.items()):
if not key.startswith("nuitka_"):
key = "plugin." + plugin.plugin_name + "." + key
assert key not in result, key
result[key] = value
return result
extra_link_libraries = None
@classmethod
def getExtraLinkLibraries(cls):
if cls.extra_link_libraries is None:
cls.extra_link_libraries = OrderedSet()
for plugin in getActivePlugins():
value = plugin.getExtraLinkLibraries()
if value is not None:
if isinstance(value, basestring):
cls.extra_link_libraries.add(value)
else:
for library_name in value:
cls.extra_link_libraries.add(library_name)
return cls.extra_link_libraries
@classmethod
def onDataComposerResult(cls, blob_filename):
for plugin in getActivePlugins():
plugin.onDataComposerResult(blob_filename)
@classmethod
def deriveModuleConstantsBlobName(cls, data_filename):
result = deriveModuleConstantsBlobName(data_filename)
return cls.encodeDataComposerName(result)
@classmethod
def encodeDataComposerName(cls, name):
if str is not bytes:
# Encoding needs to match generated source code output.
name = name.encode("latin1")
for plugin in getActivePlugins():
r = plugin.encodeDataComposerName(name)
if r is not None:
name = r
break
return name
@classmethod
def onFunctionAssignmentParsed(cls, function_body, assign_node):
module_name = function_body.getParentModule().getFullName()
for plugin in getActivePlugins():
plugin.onFunctionAssignmentParsed(
module_name=module_name,
function_body=function_body,
assign_node=assign_node,
)
def listPlugins():
"""Print available standard plugins."""
loadPlugins()
printLine("The following plugins are available in Nuitka".center(80))
printLine("-" * 80)
plist = []
name_len = 0
for plugin_name in sorted(plugin_name2plugin_classes):
plugin = plugin_name2plugin_classes[plugin_name][0]
if hasattr(plugin, "plugin_desc"):
plist.append((plugin_name, plugin.plugin_desc))
else:
plist.append((plugin_name, ""))
name_len = max(len(plugin_name) + 1, name_len)
for line in plist:
printLine(" " + line[0].ljust(name_len), line[1])
def isObjectAUserPluginBaseClass(obj):
"""Verify that a user plugin inherits from UserPluginBase."""
try:
return (
obj is not NuitkaPluginBase
and issubclass(obj, NuitkaPluginBase)
and not inspect.isabstract(obj)
)
except TypeError:
return False
def loadUserPlugin(plugin_filename):
"""Load of a user plugins and store them in list of active plugins.
Notes:
A plugin is accepted only if it has a non-empty variable plugin_name, which
does not equal that of a disabled (standard) plugin.
Supports plugin option specifications.
Returns:
None
"""
if not os.path.exists(plugin_filename):
plugins_logger.sysexit("Error, cannot find '%s'." % plugin_filename)
user_plugin_module = importFileAsModule(plugin_filename)
valid_file = False
plugin_class = None
for key in dir(user_plugin_module):
obj = getattr(user_plugin_module, key)
if not isObjectAUserPluginBaseClass(obj):
continue
plugin_name = getattr(obj, "plugin_name", None)
if plugin_name and plugin_name not in Options.getPluginsDisabled():
plugin_class = obj
valid_file = True
break # do not look for more in that module
if not valid_file: # this is not a plugin file ...
plugins_logger.sysexit("Error, '%s' is not a plugin file." % plugin_filename)
return plugin_class
_loaded_plugins = False
def loadPlugins():
"""Initialize plugin class
Notes:
Load user plugins provided as Python script file names, and standard
plugins via their class attribute 'plugin_name'.
Several checks are made, see the loader functions.
User plugins are enabled as a first step, because they themselves may
enable standard plugins.
Returns:
None
"""
# Singleton, called potentially multiple times, pylint: disable=global-statement
global _loaded_plugins
if not _loaded_plugins:
_loaded_plugins = True
# now enable standard plugins
loadStandardPluginClasses()
def activatePlugins():
"""Activate selected plugin classes
Args:
None
Notes:
This creates actual plugin instances, before only class objects were
used.
User plugins are activated as a first step, because they themselves may
enable standard plugins.
Returns:
None
"""
loadPlugins()
# ensure plugin is known and not both, enabled and disabled
for plugin_name in Options.getPluginsEnabled() + Options.getPluginsDisabled():
if plugin_name not in plugin_name2plugin_classes:
plugins_logger.sysexit(
"Error, unknown plug-in '%s' referenced." % plugin_name
)
if (
plugin_name in Options.getPluginsEnabled()
and plugin_name in Options.getPluginsDisabled()
):
plugins_logger.sysexit(
"Error, conflicting enable/disable of plug-in '%s'." % plugin_name
)
for (plugin_name, (plugin_class, plugin_detector)) in sorted(
plugin_name2plugin_classes.items()
):
if plugin_name in Options.getPluginsEnabled():
if plugin_class.isAlwaysEnabled():
plugin_class.warning("Plugin is defined as always enabled.")
if plugin_class.isRelevant():
_addActivePlugin(plugin_class, args=True)
else:
plugin_class.warning(
"Not relevant with this OS, or Nuitka arguments given, not activated."
)
elif plugin_name in Options.getPluginsDisabled():
pass
elif plugin_class.isAlwaysEnabled() and plugin_class.isRelevant():
_addActivePlugin(plugin_class, args=True)
elif (
plugin_detector is not None
and Options.shallDetectMissingPlugins()
and plugin_detector.isRelevant()
):
_addActivePlugin(plugin_detector, args=False)
for plugin_class in user_plugins:
_addActivePlugin(plugin_class, args=True)
def lateActivatePlugin(plugin_name, option_values):
"""Activate plugin after the command line parsing, expects options to be set."""
values = getPluginClass(plugin_name).getPluginDefaultOptionValues()
values.update(option_values)
setPluginOptions(plugin_name, values)
_addActivePlugin(getPluginClass(plugin_name), args=True, force=True)
def _addPluginCommandLineOptions(parser, plugin_class, data_files_tags):
plugin_name = plugin_class.plugin_name
if plugin_name not in plugin_options:
option_group = OptionGroup(parser, "Plugin %s" % plugin_name)
try:
plugin_class.addPluginCommandLineOptions(option_group)
except OptionConflictError as e:
for other_plugin_name, other_plugin_option_list in plugin_options.items():
for other_plugin_option in other_plugin_option_list:
# no public interface for that, pylint: disable=protected-access
if (
e.option_id in other_plugin_option._long_opts
or other_plugin_option._short_opts
):
plugins_logger.sysexit(
"Plugin '%s' failed to add options due to conflict with '%s' from plugin '%s."
% (plugin_name, e.option_id, other_plugin_name)
)
if option_group.option_list:
parser.add_option_group(option_group)
plugin_options[plugin_name] = option_group.option_list
else:
plugin_options[plugin_name] = ()
plugin_data_files_tags = plugin_class.getTagDataFileTagOptions()
if plugin_data_files_tags:
for tag_name, tag_desc in plugin_data_files_tags:
if tag_name in (tag for tag, _desc in data_files_tags):
plugins_logger.sysexit(
"Plugin '%s' provides data files tag handling '%s' already provided."
% (plugin_name, tag_name)
)
data_files_tags.append((tag_name, tag_desc))
plugin_datatag2pluginclasses[tag_name] = plugin_class
def addPluginCommandLineOptions(parser, plugin_names, data_files_tags):
"""Add option group for the plugin to the parser.
Notes:
This is exclusively for use in the commandline parsing. Not all
plugins have to have options. But this will add them to the
parser in a first pass, so they can be recognized in a second
pass with them included.
Returns:
None
"""
for plugin_name in plugin_names:
plugin_class = getPluginClass(plugin_name)
_addPluginCommandLineOptions(
parser=parser, plugin_class=plugin_class, data_files_tags=data_files_tags
)
def addUserPluginCommandLineOptions(parser, filename, data_files_tags):
plugin_class = loadUserPlugin(filename)
_addPluginCommandLineOptions(
parser=parser, plugin_class=plugin_class, data_files_tags=data_files_tags
)
user_plugins.add(plugin_class)
def setPluginOptions(plugin_name, values):
"""Set the option values for the specified plugin.
Args:
plugin_name: plugin identifier
values: dictionary to be used for the plugin constructor
Notes:
Use this function, if you want to set the plugin values, without using
the actual command line parsing.
Normally the command line arguments are populating the dictionary for
the plugin, but this will be used if given, and command line parsing
is not done.
"""
assert isinstance(values, dict), values
plugin_values[plugin_name] = values
def getPluginOptions(plugin_name):
"""Return the options values for the specified plugin.
Args:
plugin_name: plugin identifier
Returns:
dict with key, value of options given, potentially from default values.
"""
result = plugin_values.get(plugin_name, {})
for option in plugin_options.get(plugin_name, {}):
option_name = option._long_opts[0] # pylint: disable=protected-access
arg_value = getattr(Options.options, option.dest)
if "[REQUIRED]" in option.help:
if not arg_value:
plugins_logger.sysexit(
"Error, required plugin argument %r of Nuitka plugin %s not given."
% (option_name, plugin_name)
)
result[option.dest] = arg_value
return result
|
[
"[email protected]"
] | |
470f2a990b04ca4e12743e5c260e9e30c6e83955
|
9652060cff5372e971c112a2a6b3495d68a824a9
|
/vagrant/wsgi.py
|
b0962de43da81f0a84401a51d88a932c4d8f161f
|
[
"BSD-3-Clause"
] |
permissive
|
open-contracting-archive/kingfisher-vagrant
|
5b02dfe60193538d3db1d3736a15033f68985651
|
f82baf5c5098e99702fbb5ba84c8364c3ebaa548
|
refs/heads/main
| 2023-03-03T23:52:20.819877 | 2021-02-20T02:43:43 | 2021-02-20T02:43:43 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 81 |
py
|
from ocdskingfisherprocess.web.app import create_app
application = create_app()
|
[
"[email protected]"
] | |
fcdc87080a8f3063b6c4cf66f424fa516756e16c
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/stdlib-big-989.py
|
43aaa042bfe2771cdb70273fcae5be3fb967c2f5
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,999 |
py
|
# ChocoPy library functions
def int_to_str(x: int) -> str:
digits:[str] = None
result:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str2(x: int, x2: int) -> str:
digits:[str] = None
digits2:[str] = None
result:str = ""
result2:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str3(x: int, x2: int, x3: int) -> str:
digits:[str] = None
digits2:[str] = None
digits3:[str] = None
result:str = ""
result2:str = ""
result3:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str4(x: int, x2: int, x3: int, x4: int) -> str:
digits:[str] = None
digits2:[str] = None
digits3:[str] = None
digits4:[str] = None
result:str = ""
result2:str = ""
result3:str = ""
result4:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def int_to_str5(x: int, x2: int, x3: int, x4: int, x5: int) -> str:
digits:[str] = None
digits2:[str] = None
digits3:[str] = None
digits4:[str] = None
digits5:[str] = None
result:str = ""
result2:str = ""
result3:str = ""
result4:str = ""
result5:str = ""
# Set-up digit mapping
digits = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
# Write sign if necessary
if x < 0:
result = "-"
x = -x
# Write digits using a recursive call
if x >= 10:
result = result + int_to_str(x // 10)
result = result + digits[x % 10]
return result
def str_to_int(x: $ID) -> int:
result:int = 0
digit:int = 0
char:str = ""
sign:int = 1
first_char:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int2(x: str, x2: str) -> int:
result:int = 0
result2:int = 0
digit:int = 0
digit2:int = 0
char:str = ""
char2:str = ""
sign:int = 1
sign2:int = 1
first_char:bool = True
first_char2:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int3(x: str, x2: str, x3: str) -> int:
result:int = 0
result2:int = 0
result3:int = 0
digit:int = 0
digit2:int = 0
digit3:int = 0
char:str = ""
char2:str = ""
char3:str = ""
sign:int = 1
sign2:int = 1
sign3:int = 1
first_char:bool = True
first_char2:bool = True
first_char3:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int4(x: str, x2: str, x3: str, x4: str) -> int:
result:int = 0
result2:int = 0
result3:int = 0
result4:int = 0
digit:int = 0
digit2:int = 0
digit3:int = 0
digit4:int = 0
char:str = ""
char2:str = ""
char3:str = ""
char4:str = ""
sign:int = 1
sign2:int = 1
sign3:int = 1
sign4:int = 1
first_char:bool = True
first_char2:bool = True
first_char3:bool = True
first_char4:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
def str_to_int5(x: str, x2: str, x3: str, x4: str, x5: str) -> int:
result:int = 0
result2:int = 0
result3:int = 0
result4:int = 0
result5:int = 0
digit:int = 0
digit2:int = 0
digit3:int = 0
digit4:int = 0
digit5:int = 0
char:str = ""
char2:str = ""
char3:str = ""
char4:str = ""
char5:str = ""
sign:int = 1
sign2:int = 1
sign3:int = 1
sign4:int = 1
sign5:int = 1
first_char:bool = True
first_char2:bool = True
first_char3:bool = True
first_char4:bool = True
first_char5:bool = True
# Parse digits
for char in x:
if char == "-":
if not first_char:
return 0 # Error
sign = -1
elif char == "0":
digit = 0
elif char == "1":
digit = 1
elif char == "2":
digit = 2
elif char == "3":
digit = 3
elif char == "3":
digit = 3
elif char == "4":
digit = 4
elif char == "5":
digit = 5
elif char == "6":
digit = 6
elif char == "7":
digit = 7
elif char == "8":
digit = 8
elif char == "9":
digit = 9
else:
return 0 # On error
first_char = False
result = result * 10 + digit
# Compute result
return result * sign
# Input parameters
c:int = 42
c2:int = 42
c3:int = 42
c4:int = 42
c5:int = 42
n:int = 10
n2:int = 10
n3:int = 10
n4:int = 10
n5:int = 10
# Run [-nc, nc] with step size c
s:str = ""
s2:str = ""
s3:str = ""
s4:str = ""
s5:str = ""
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
i = -n * c
# Crunch
while i <= n * c:
s = int_to_str(i)
print(s)
i = str_to_int(s) + c
|
[
"[email protected]"
] | |
aa7d69aee1bc7288dcc9116c62b689a7f9e24991
|
ec39ccb701d30c64e9541802e3f45aff61a16f09
|
/toontown/uberdog/ClientServicesManager.py
|
864a38a6b01ad165a11c91ef604473c9ad5679d6
|
[] |
no_license
|
ronanwow1001/toontown-src-py3.0
|
f3089ea2b0a987e236df1ae0fad8f94c45e852e0
|
b7bf9673353a1a8231652d009ef00b11da4b0290
|
refs/heads/master
| 2020-12-07T05:03:48.795733 | 2020-01-11T19:07:49 | 2020-01-11T19:07:49 | 232,640,248 | 0 | 0 | null | 2020-01-08T19:18:50 | 2020-01-08T19:18:50 | null |
UTF-8
|
Python
| false | false | 3,418 |
py
|
from direct.directnotify.DirectNotifyGlobal import directNotify
from direct.distributed.DistributedObjectGlobal import DistributedObjectGlobal
import hmac
from panda3d.core import *
from otp.distributed.PotentialAvatar import PotentialAvatar
from otp.otpbase import OTPGlobals
from toontown.chat.ChatGlobals import WTSystem
from toontown.chat.WhisperPopup import WhisperPopup
class ClientServicesManager(DistributedObjectGlobal):
notify = directNotify.newCategory('ClientServicesManager')
# --- LOGIN LOGIC ---
def performLogin(self, doneEvent):
self.doneEvent = doneEvent
self.systemMessageSfx = None
token = self.cr.playToken or 'dev'
token = token.encode('utf-8') # PY3
key = 'bG9sLndlLmNoYW5nZS50aGlzLnRvby5tdWNo'
key = key.encode('utf-8') # PY3
digest_maker = hmac.new(key)
digest_maker.update(token)
clientKey = digest_maker.hexdigest()
self.sendUpdate('login', [token, clientKey])
def acceptLogin(self, timestamp):
messenger.send(self.doneEvent, [{'mode': 'success', 'timestamp': timestamp}])
# --- AVATARS LIST ---
def requestAvatars(self):
self.sendUpdate('requestAvatars')
def setAvatars(self, avatars):
avList = []
for avNum, avName, avDNA, avPosition, nameState in avatars:
nameOpen = int(nameState == 1)
names = [avName, '', '', '']
if nameState == 2: # PENDING
names[1] = avName
elif nameState == 3: # APPROVED
names[2] = avName
elif nameState == 4: # REJECTED
names[3] = avName
avList.append(PotentialAvatar(avNum, names, avDNA, avPosition, nameOpen))
self.cr.handleAvatarsList(avList)
# --- AVATAR CREATION/DELETION ---
def sendCreateAvatar(self, avDNA, _, index):
self.sendUpdate('createAvatar', [avDNA.makeNetString(), index])
def createAvatarResp(self, avId):
messenger.send('nameShopCreateAvatarDone', [avId])
def sendDeleteAvatar(self, avId):
self.sendUpdate('deleteAvatar', [avId])
# No deleteAvatarResp; it just sends a setAvatars when the deed is done.
# --- AVATAR NAMING ---
def sendSetNameTyped(self, avId, name, callback):
self._callback = callback
self.sendUpdate('setNameTyped', [avId, name])
def setNameTypedResp(self, avId, status):
self._callback(avId, status)
def sendSetNamePattern(self, avId, p1, f1, p2, f2, p3, f3, p4, f4, callback):
self._callback = callback
self.sendUpdate('setNamePattern', [avId, p1, f1, p2, f2, p3, f3, p4, f4])
def setNamePatternResp(self, avId, status):
self._callback(avId, status)
def sendAcknowledgeAvatarName(self, avId, callback):
self._callback = callback
self.sendUpdate('acknowledgeAvatarName', [avId])
def acknowledgeAvatarNameResp(self):
self._callback()
# --- AVATAR CHOICE ---
def sendChooseAvatar(self, avId):
self.sendUpdate('chooseAvatar', [avId])
def systemMessage(self, message):
whisper = WhisperPopup(message, OTPGlobals.getInterfaceFont(), WTSystem)
whisper.manage(base.marginManager)
if self.systemMessageSfx is None:
self.systemMessageSfx = base.loader.loadSfx('phase_3/audio/sfx/clock03.ogg')
base.playSfx(self.systemMessageSfx)
|
[
"[email protected]"
] | |
451cd7b8185e807bd043961430589cdeb1c46411
|
78f3fe4a148c86ce9b80411a3433a49ccfdc02dd
|
/2016/09/florida-millennials-20160915/graphic_config.py
|
abeb5e9f7aff3c2d8157284f6417f5027493d479
|
[] |
no_license
|
nprapps/graphics-archive
|
54cfc4d4d670aca4d71839d70f23a8bf645c692f
|
fe92cd061730496cb95c9df8fa624505c3b291f8
|
refs/heads/master
| 2023-03-04T11:35:36.413216 | 2023-02-26T23:26:48 | 2023-02-26T23:26:48 | 22,472,848 | 16 | 7 | null | null | null | null |
UTF-8
|
Python
| false | false | 305 |
py
|
#!/usr/bin/env python
import base_filters
COPY_GOOGLE_DOC_KEY = '1ffq-ubcw70cvHmmrQFXNDvn79iRvVQLRnLZaA_pdltU'
USE_ASSETS = False
# Use these variables to override the default cache timeouts for this graphic
# DEFAULT_MAX_AGE = 20
# ASSETS_MAX_AGE = 300
JINJA_FILTER_FUNCTIONS = base_filters.FILTERS
|
[
"[email protected]"
] | |
f01e56e28adaf1a1870f5b9b221f031d2d9ca2ff
|
d21dbab3f374eb42a10f9ec7c434c1ca6fb2bff7
|
/Data Structures/Heap/1-qheap1_with_heapq.py
|
14cb88fd7ab7ac36671aa051029f95f47b91679f
|
[] |
no_license
|
almamuncsit/HackerRank
|
5360ad1d54aa01075dba5527f6ae695e4c6d9c7a
|
6599cde4c7541ebf27bacff8af02dc0c3eaaa678
|
refs/heads/master
| 2021-07-06T00:33:25.912754 | 2021-01-13T09:09:29 | 2021-01-13T09:09:29 | 222,364,072 | 4 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,016 |
py
|
class MinHeap:
def __init__(self, items=[]):
self.heap = [0]
for item in items:
self.push(item)
def push(self, item):
self.heap.append(item)
self.__float_up(self.size() - 1)
def peak(self):
if self.heap[1]:
return self.heap[1]
else:
return False
def pop(self):
if self.size() < 2:
return False
if self.size() == 2:
return self.heap.pop()
self.__swap(1, self.size() - 1)
item = self.heap.pop()
self.__max_heapify(1)
return item
def __swap(self, index1, index2):
self.heap[index1], self.heap[index2] = self.heap[index2], self.heap[index1]
def __float_up(self, index):
parent = index // 2
if parent < 1:
return
elif self.heap[index] < self.heap[parent]:
self.__swap(index, parent)
self.__float_up(parent)
def __max_heapify(self, index):
smallest_idx = index
left = index * 2
right = left + 1
if self.size() > left and self.heap[left] < self.heap[smallest_idx]:
smallest_idx = left
if self.size() > right and self.heap[right] < self.heap[smallest_idx]:
smallest_idx = right
if smallest_idx != index:
self.__swap(index, smallest_idx)
self.__max_heapify(smallest_idx)
def size(self):
return len(self.heap)
def delete(self, item):
for i in range(1, self.size()):
if self.heap[i] == item:
self.__swap(i, self.size() - 1)
self.heap.pop()
self.__max_heapify(i)
break
if __name__ == '__main__':
heap = MinHeap([])
n = int(input())
for _ in range(n):
val = list(map(int, input().split()))
if val[0] == 1:
heap.push(val[1])
elif val[0] == 2:
heap.delete(val[1])
else:
print(heap.peak())
|
[
"[email protected]"
] | |
ad2334b337a461b4bd675a7ad1bc2ba0f29925a3
|
711756b796d68035dc6a39060515200d1d37a274
|
/output_cog/optimized_2415.py
|
223d3fe4b277c1a262932b0bb0e952accf4ba03c
|
[] |
no_license
|
batxes/exocyst_scripts
|
8b109c279c93dd68c1d55ed64ad3cca93e3c95ca
|
a6c487d5053b9b67db22c59865e4ef2417e53030
|
refs/heads/master
| 2020-06-16T20:16:24.840725 | 2016-11-30T16:23:16 | 2016-11-30T16:23:16 | 75,075,164 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 10,838 |
py
|
import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "Cog2_GFPN" not in marker_sets:
s=new_marker_set('Cog2_GFPN')
marker_sets["Cog2_GFPN"]=s
s= marker_sets["Cog2_GFPN"]
mark=s.place_marker((455.091, 548.131, 441.132), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_0" not in marker_sets:
s=new_marker_set('Cog2_0')
marker_sets["Cog2_0"]=s
s= marker_sets["Cog2_0"]
mark=s.place_marker((523.288, 547.655, 441.377), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_1" not in marker_sets:
s=new_marker_set('Cog2_1')
marker_sets["Cog2_1"]=s
s= marker_sets["Cog2_1"]
mark=s.place_marker((603.852, 546.131, 429.081), (0.89, 0.1, 0.1), 17.1475)
if "Cog2_GFPC" not in marker_sets:
s=new_marker_set('Cog2_GFPC')
marker_sets["Cog2_GFPC"]=s
s= marker_sets["Cog2_GFPC"]
mark=s.place_marker((518.447, 575.68, 322.142), (0.89, 0.1, 0.1), 18.4716)
if "Cog2_Anch" not in marker_sets:
s=new_marker_set('Cog2_Anch')
marker_sets["Cog2_Anch"]=s
s= marker_sets["Cog2_Anch"]
mark=s.place_marker((797.322, 526.929, 442.213), (0.89, 0.1, 0.1), 18.4716)
if "Cog3_GFPN" not in marker_sets:
s=new_marker_set('Cog3_GFPN')
marker_sets["Cog3_GFPN"]=s
s= marker_sets["Cog3_GFPN"]
mark=s.place_marker((500.094, 537.392, 437.867), (1, 1, 0), 18.4716)
if "Cog3_0" not in marker_sets:
s=new_marker_set('Cog3_0')
marker_sets["Cog3_0"]=s
s= marker_sets["Cog3_0"]
mark=s.place_marker((498.617, 536.7, 437.848), (1, 1, 0.2), 17.1475)
if "Cog3_1" not in marker_sets:
s=new_marker_set('Cog3_1')
marker_sets["Cog3_1"]=s
s= marker_sets["Cog3_1"]
mark=s.place_marker((473.072, 524.667, 440.444), (1, 1, 0.2), 17.1475)
if "Cog3_2" not in marker_sets:
s=new_marker_set('Cog3_2')
marker_sets["Cog3_2"]=s
s= marker_sets["Cog3_2"]
mark=s.place_marker((452.066, 510.269, 428.431), (1, 1, 0.2), 17.1475)
if "Cog3_3" not in marker_sets:
s=new_marker_set('Cog3_3')
marker_sets["Cog3_3"]=s
s= marker_sets["Cog3_3"]
mark=s.place_marker((472.288, 505.367, 409.56), (1, 1, 0.2), 17.1475)
if "Cog3_4" not in marker_sets:
s=new_marker_set('Cog3_4')
marker_sets["Cog3_4"]=s
s= marker_sets["Cog3_4"]
mark=s.place_marker((475.77, 478.197, 415.625), (1, 1, 0.2), 17.1475)
if "Cog3_5" not in marker_sets:
s=new_marker_set('Cog3_5')
marker_sets["Cog3_5"]=s
s= marker_sets["Cog3_5"]
mark=s.place_marker((479.744, 451.046, 409.663), (1, 1, 0.2), 17.1475)
if "Cog3_GFPC" not in marker_sets:
s=new_marker_set('Cog3_GFPC')
marker_sets["Cog3_GFPC"]=s
s= marker_sets["Cog3_GFPC"]
mark=s.place_marker((479.04, 548.96, 453.22), (1, 1, 0.4), 18.4716)
if "Cog3_Anch" not in marker_sets:
s=new_marker_set('Cog3_Anch')
marker_sets["Cog3_Anch"]=s
s= marker_sets["Cog3_Anch"]
mark=s.place_marker((480.576, 351.176, 373.656), (1, 1, 0.4), 18.4716)
if "Cog4_GFPN" not in marker_sets:
s=new_marker_set('Cog4_GFPN')
marker_sets["Cog4_GFPN"]=s
s= marker_sets["Cog4_GFPN"]
mark=s.place_marker((673.497, 390.666, 419.207), (0, 0, 0.8), 18.4716)
if "Cog4_0" not in marker_sets:
s=new_marker_set('Cog4_0')
marker_sets["Cog4_0"]=s
s= marker_sets["Cog4_0"]
mark=s.place_marker((673.497, 390.666, 419.207), (0, 0, 0.8), 17.1475)
if "Cog4_1" not in marker_sets:
s=new_marker_set('Cog4_1')
marker_sets["Cog4_1"]=s
s= marker_sets["Cog4_1"]
mark=s.place_marker((650.658, 403.82, 430.589), (0, 0, 0.8), 17.1475)
if "Cog4_2" not in marker_sets:
s=new_marker_set('Cog4_2')
marker_sets["Cog4_2"]=s
s= marker_sets["Cog4_2"]
mark=s.place_marker((627.639, 417.176, 441.409), (0, 0, 0.8), 17.1475)
if "Cog4_3" not in marker_sets:
s=new_marker_set('Cog4_3')
marker_sets["Cog4_3"]=s
s= marker_sets["Cog4_3"]
mark=s.place_marker((604.779, 431.65, 450.54), (0, 0, 0.8), 17.1475)
if "Cog4_4" not in marker_sets:
s=new_marker_set('Cog4_4')
marker_sets["Cog4_4"]=s
s= marker_sets["Cog4_4"]
mark=s.place_marker((583.023, 449.587, 455.473), (0, 0, 0.8), 17.1475)
if "Cog4_5" not in marker_sets:
s=new_marker_set('Cog4_5')
marker_sets["Cog4_5"]=s
s= marker_sets["Cog4_5"]
mark=s.place_marker((566.455, 472.927, 456.359), (0, 0, 0.8), 17.1475)
if "Cog4_6" not in marker_sets:
s=new_marker_set('Cog4_6')
marker_sets["Cog4_6"]=s
s= marker_sets["Cog4_6"]
mark=s.place_marker((553.846, 498.757, 455.468), (0, 0, 0.8), 17.1475)
if "Cog4_GFPC" not in marker_sets:
s=new_marker_set('Cog4_GFPC')
marker_sets["Cog4_GFPC"]=s
s= marker_sets["Cog4_GFPC"]
mark=s.place_marker((615.326, 292.629, 312.28), (0, 0, 0.8), 18.4716)
if "Cog4_Anch" not in marker_sets:
s=new_marker_set('Cog4_Anch')
marker_sets["Cog4_Anch"]=s
s= marker_sets["Cog4_Anch"]
mark=s.place_marker((486.93, 713.637, 585.084), (0, 0, 0.8), 18.4716)
if "Cog5_GFPN" not in marker_sets:
s=new_marker_set('Cog5_GFPN')
marker_sets["Cog5_GFPN"]=s
s= marker_sets["Cog5_GFPN"]
mark=s.place_marker((588.597, 521.567, 466.245), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_0" not in marker_sets:
s=new_marker_set('Cog5_0')
marker_sets["Cog5_0"]=s
s= marker_sets["Cog5_0"]
mark=s.place_marker((588.597, 521.567, 466.245), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_1" not in marker_sets:
s=new_marker_set('Cog5_1')
marker_sets["Cog5_1"]=s
s= marker_sets["Cog5_1"]
mark=s.place_marker((583.355, 520.868, 437.624), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_2" not in marker_sets:
s=new_marker_set('Cog5_2')
marker_sets["Cog5_2"]=s
s= marker_sets["Cog5_2"]
mark=s.place_marker((580.755, 531.652, 410.706), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_3" not in marker_sets:
s=new_marker_set('Cog5_3')
marker_sets["Cog5_3"]=s
s= marker_sets["Cog5_3"]
mark=s.place_marker((590.194, 557.136, 399.873), (0.3, 0.3, 0.3), 17.1475)
if "Cog5_GFPC" not in marker_sets:
s=new_marker_set('Cog5_GFPC')
marker_sets["Cog5_GFPC"]=s
s= marker_sets["Cog5_GFPC"]
mark=s.place_marker((468.427, 583.357, 396.507), (0.3, 0.3, 0.3), 18.4716)
if "Cog5_Anch" not in marker_sets:
s=new_marker_set('Cog5_Anch')
marker_sets["Cog5_Anch"]=s
s= marker_sets["Cog5_Anch"]
mark=s.place_marker((714.296, 539.961, 401.056), (0.3, 0.3, 0.3), 18.4716)
if "Cog6_GFPN" not in marker_sets:
s=new_marker_set('Cog6_GFPN')
marker_sets["Cog6_GFPN"]=s
s= marker_sets["Cog6_GFPN"]
mark=s.place_marker((511.432, 560.558, 418.793), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_0" not in marker_sets:
s=new_marker_set('Cog6_0')
marker_sets["Cog6_0"]=s
s= marker_sets["Cog6_0"]
mark=s.place_marker((511.417, 560.59, 418.751), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_1" not in marker_sets:
s=new_marker_set('Cog6_1')
marker_sets["Cog6_1"]=s
s= marker_sets["Cog6_1"]
mark=s.place_marker((499.166, 571.577, 441.779), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_2" not in marker_sets:
s=new_marker_set('Cog6_2')
marker_sets["Cog6_2"]=s
s= marker_sets["Cog6_2"]
mark=s.place_marker((500.844, 556.204, 465.571), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_3" not in marker_sets:
s=new_marker_set('Cog6_3')
marker_sets["Cog6_3"]=s
s= marker_sets["Cog6_3"]
mark=s.place_marker((505.16, 528.884, 472.457), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_4" not in marker_sets:
s=new_marker_set('Cog6_4')
marker_sets["Cog6_4"]=s
s= marker_sets["Cog6_4"]
mark=s.place_marker((496.931, 504.103, 459.618), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_5" not in marker_sets:
s=new_marker_set('Cog6_5')
marker_sets["Cog6_5"]=s
s= marker_sets["Cog6_5"]
mark=s.place_marker((488.615, 478.02, 450.48), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_6" not in marker_sets:
s=new_marker_set('Cog6_6')
marker_sets["Cog6_6"]=s
s= marker_sets["Cog6_6"]
mark=s.place_marker((482.705, 451.06, 442.582), (0.21, 0.49, 0.72), 17.1475)
if "Cog6_GFPC" not in marker_sets:
s=new_marker_set('Cog6_GFPC')
marker_sets["Cog6_GFPC"]=s
s= marker_sets["Cog6_GFPC"]
mark=s.place_marker((519.426, 501.731, 500.317), (0.21, 0.49, 0.72), 18.4716)
if "Cog6_Anch" not in marker_sets:
s=new_marker_set('Cog6_Anch')
marker_sets["Cog6_Anch"]=s
s= marker_sets["Cog6_Anch"]
mark=s.place_marker((446.248, 399.583, 381.235), (0.21, 0.49, 0.72), 18.4716)
if "Cog7_GFPN" not in marker_sets:
s=new_marker_set('Cog7_GFPN')
marker_sets["Cog7_GFPN"]=s
s= marker_sets["Cog7_GFPN"]
mark=s.place_marker((539.353, 543.386, 500.727), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_0" not in marker_sets:
s=new_marker_set('Cog7_0')
marker_sets["Cog7_0"]=s
s= marker_sets["Cog7_0"]
mark=s.place_marker((544.394, 548.656, 473.541), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_1" not in marker_sets:
s=new_marker_set('Cog7_1')
marker_sets["Cog7_1"]=s
s= marker_sets["Cog7_1"]
mark=s.place_marker((557.887, 563.361, 420.476), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_2" not in marker_sets:
s=new_marker_set('Cog7_2')
marker_sets["Cog7_2"]=s
s= marker_sets["Cog7_2"]
mark=s.place_marker((571.34, 578.157, 367.476), (0.7, 0.7, 0.7), 17.1475)
if "Cog7_GFPC" not in marker_sets:
s=new_marker_set('Cog7_GFPC')
marker_sets["Cog7_GFPC"]=s
s= marker_sets["Cog7_GFPC"]
mark=s.place_marker((508.62, 629.225, 376.098), (0.7, 0.7, 0.7), 18.4716)
if "Cog7_Anch" not in marker_sets:
s=new_marker_set('Cog7_Anch')
marker_sets["Cog7_Anch"]=s
s= marker_sets["Cog7_Anch"]
mark=s.place_marker((636.519, 560.093, 288.137), (0.7, 0.7, 0.7), 18.4716)
if "Cog8_0" not in marker_sets:
s=new_marker_set('Cog8_0')
marker_sets["Cog8_0"]=s
s= marker_sets["Cog8_0"]
mark=s.place_marker((496.106, 537.902, 509.789), (1, 0.5, 0), 17.1475)
if "Cog8_1" not in marker_sets:
s=new_marker_set('Cog8_1')
marker_sets["Cog8_1"]=s
s= marker_sets["Cog8_1"]
mark=s.place_marker((522.62, 544.413, 500.996), (1, 0.5, 0), 17.1475)
if "Cog8_2" not in marker_sets:
s=new_marker_set('Cog8_2')
marker_sets["Cog8_2"]=s
s= marker_sets["Cog8_2"]
mark=s.place_marker((549.238, 556.598, 504.784), (1, 0.5, 0), 17.1475)
if "Cog8_3" not in marker_sets:
s=new_marker_set('Cog8_3')
marker_sets["Cog8_3"]=s
s= marker_sets["Cog8_3"]
mark=s.place_marker((572.057, 565.329, 487.574), (1, 0.5, 0), 17.1475)
if "Cog8_4" not in marker_sets:
s=new_marker_set('Cog8_4')
marker_sets["Cog8_4"]=s
s= marker_sets["Cog8_4"]
mark=s.place_marker((593.343, 576.698, 470.093), (1, 0.5, 0), 17.1475)
if "Cog8_5" not in marker_sets:
s=new_marker_set('Cog8_5')
marker_sets["Cog8_5"]=s
s= marker_sets["Cog8_5"]
mark=s.place_marker((614.895, 587.721, 452.205), (1, 0.5, 0), 17.1475)
if "Cog8_GFPC" not in marker_sets:
s=new_marker_set('Cog8_GFPC')
marker_sets["Cog8_GFPC"]=s
s= marker_sets["Cog8_GFPC"]
mark=s.place_marker((539.329, 564.984, 455.17), (1, 0.6, 0.1), 18.4716)
if "Cog8_Anch" not in marker_sets:
s=new_marker_set('Cog8_Anch')
marker_sets["Cog8_Anch"]=s
s= marker_sets["Cog8_Anch"]
mark=s.place_marker((694.073, 613.407, 442.164), (1, 0.6, 0.1), 18.4716)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
|
[
"[email protected]"
] | |
f91bae1f75347a2dca6cbfcaefc9d5b228815bb5
|
b4a39921429d9045163d7a4d030951fecbfa352d
|
/setup.py
|
f1ecdc2b187860a10daf5c4d5210c312eac3adbd
|
[
"MIT"
] |
permissive
|
MayuriKalokhe/Data_Science_Covid-19
|
814c7767e9d0b3cce27c2fa87b0407b7ea27166e
|
e4bd99ddb2d6b2467991867bfa8a658804689d9f
|
refs/heads/master
| 2022-12-17T17:39:36.342071 | 2020-09-15T21:22:50 | 2020-09-15T21:22:50 | 295,846,644 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 232 |
py
|
from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='Applied Data Science for Covid-19 Tracking ',
author='Mayuri Kalokhe',
license='MIT',
)
|
[
"[email protected]"
] | |
10cdf6ae6137753f16ca394c963e3e452646a544
|
d66818f4b951943553826a5f64413e90120e1fae
|
/hackerrank/Algorithms/Kangaroo/solution.py
|
c3eb95f215b9b283337e9308ae9ff6c5d375055a
|
[
"MIT"
] |
permissive
|
HBinhCT/Q-project
|
0f80cd15c9945c43e2e17072416ddb6e4745e7fa
|
19923cbaa3c83c670527899ece5c3ad31bcebe65
|
refs/heads/master
| 2023-08-30T08:59:16.006567 | 2023-08-29T15:30:21 | 2023-08-29T15:30:21 | 247,630,603 | 8 | 1 |
MIT
| 2020-07-22T01:20:23 | 2020-03-16T06:48:02 |
Python
|
UTF-8
|
Python
| false | false | 487 |
py
|
#!/bin/python3
import os
# Complete the kangaroo function below.
def kangaroo(x1, v1, x2, v2):
# x1 < x2
return 'YES' if v1 > v2 and not ((x2 - x1) % (v1 - v2)) else 'NO'
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
x1V1X2V2 = input().split()
x1 = int(x1V1X2V2[0])
v1 = int(x1V1X2V2[1])
x2 = int(x1V1X2V2[2])
v2 = int(x1V1X2V2[3])
result = kangaroo(x1, v1, x2, v2)
fptr.write(result + '\n')
fptr.close()
|
[
"[email protected]"
] | |
584a7b924adeb54de34928fb9018d078d9f7b946
|
64cd483b18dd9027c40c35c9f3b5153d6735db64
|
/api_ec2/chatbot_api/resources/food.py
|
011d68778c0f43d9b472fb3bddc3e7122b0d301b
|
[] |
no_license
|
taepd/sba-chatbot
|
a55673e170891d7e536a2e4017989e7874c28354
|
d58f4663c431870b5aca7412cf491f962a04f095
|
refs/heads/master
| 2023-04-23T20:49:26.947955 | 2021-05-19T14:58:16 | 2021-05-19T14:58:16 | 301,092,681 | 2 | 0 | null | 2021-05-19T14:58:17 | 2020-10-04T09:48:34 |
Python
|
UTF-8
|
Python
| false | false | 4,511 |
py
|
# from sqlalchemy import Column, Integer, Float, String, ForeignKey, create_engine
# from sqlalchemy.dialects.mysql import DECIMAL, VARCHAR, LONGTEXT
from typing import List
from flask import request
from flask_restful import Resource, reqparse
from flask import jsonify
import json
import os
import numpy as np
import pandas as pd
import joblib
import konlpy
# from eunjeon import Mecab
from chatbot_api.ext.db import db, openSession
from chatbot_api.util.file_handler import FileReader
from chatbot_api.resources.order_review import OrderReviewDto
class FoodDto(db.Model):
__tablename__ = "food"
__table_args__ = {'mysql_collate': 'utf8_general_ci'} # ํ๊ธ ์ธ์ฝ๋ฉ
food_id: int = db.Column(db.Integer, primary_key=True, index=True)
food_name: str = db.Column(db.String(200))
price: int = db.Column(db.Integer)
food_img: str = db.Column(db.String(1000)) # ์ต๋ ๊ธธ์ด๊ฐ 634์ ๋์์
food_rev_avg: float = db.Column(db.Float)
food_rev_cnt: float = db.Column(db.Integer)
shop_id: int = db.Column(db.Integer, db.ForeignKey('shop.shop_id', ondelete="CASCADE"))
# order_reviews = db.relationship('OrderReviewDto', backref='food', lazy='dynamic', cascade="all, delete, delete-orphan")
order_reviews = db.relationship('OrderReviewDto', back_populates='foods', lazy='dynamic', cascade="all, delete, delete-orphan")
def __init__(self, food_id, food_name, price, food_img, food_rev_avg,
food_rev_cnt, shop_id):
self.food_id = food_id
self.food_name = food_name
self.price = price
self.food_img = food_img
self.food_rev_avg = food_rev_avg
self.food_rev_cnt = food_rev_cnt
self.shop_id = shop_id
def __repr__(self):
return f'Food(food_id={self.food_id}, ' \
f'food_name={self.food_name}, ' \
f'price={self.price}, ' \
f'food_img={self.food_img}, ' \
f'food_rev_avg={self.food_rev_avg}, ' \
f'food_rev_cnt={self.food_rev_cnt}, ' \
f'shop_id="{self.shop_id}"'
@property
def json(self):
return {
'food_id': self.food_id,
'food_name': self.food_name,
'price': self.price,
'food_img': self.food_img,
'food_rev_avg': self.food_rev_avg,
'food_rev_cnt': self.food_rev_cnt,
'shop_id': self.shop_id
}
class FoodVo:
food_id: int = 0
food_name: str = ''
price: int = 0
food_img: str = ''
food_rev_avg: float = 0.0
food_rev_cnt: float = 0.0
shop_id: int = 0
class FoodDao(FoodDto):
@classmethod
def food_find_by_shopid(cls,shop_id):
# sql = cls.query.filter_by(shop_id = shopid)
sql = cls.query.filter_by(shop_id = shop_id)
df = pd.read_sql(sql.statement, sql.session.bind)
return json.loads(df.to_json(orient='records'))
# return cls.query.filter_by(shop_id = shopid).all()
@classmethod
def food_find_by_foodid(cls,food_id):
# sql = cls.query.filter_by(food_id = food_id)
# df = pd.read_sql(sql.statement, sql.session.bind)
return cls.query.filter_by(food_id = food_id).first()
@classmethod
def chat_food_find(cls, key):
sql = cls.query.filter(FoodDto.food_name.like('%'+key+'%')).\
order_by(FoodDao.food_rev_cnt.desc())
df = pd.read_sql(sql.statement,sql.session.bind)
df = df.head(1)
return json.loads(df.to_json(orient='records'))
class Food(Resource):
@staticmethod
def get(food_id : int):
food = FoodDao.food_find_by_foodid(food_id)
print(food)
return food.json, 200
# ------------ ์คํ ์์ญ --------------
# if __name__ == '__main__':
# chat = ChatbotService()
# model = chat.load_model_from_file()
# # import pdb
# # # ๋ฐ์ดํฐ ์ผ๊ด ์
๋ ฅ
# df = pd.read_csv('./data/db/food.csv', sep=',', encoding='utf-8-sig')
# df = df.replace(np.nan, '', regex=True)
# shop_seoul = df.loc[df['shop_addr'].str.contains('์์ธ', na=False)]
# print(shop_seoul['shop_addr'])
# shop_seoul.to_csv('./data/csv/important/shop(seoul).csv', sep=',', encoding='utf-8-sig', index=False)
# pdb.set_trace()
# ์ธ๋ํค ์์ผ๋ฉด ๋ก๋ฉ ์๋ฌ๋๋ค..
# Session = openSession()
# session = Session()
# session.bulk_insert_mappings(FoodDto, df.to_dict(orient="records"))
# session.commit()
# session.close()
|
[
"[email protected]"
] | |
19ed803340dab89ff9a3c6ced25d27987779f1f1
|
89f5a6b6ecb4aed9d5007bf2f500b7bfcb66c542
|
/saltcontainermap/modinstall.py
|
fa9ab9bce241d084bd7b8bcdbc84a37b87628ef0
|
[
"MIT"
] |
permissive
|
merll/salt-container-map
|
a752a5bcff326ba7b3402a4397b73d9f8adcca66
|
0792e7bd1e80dfb370d783a45ca23db51ef28c22
|
refs/heads/master
| 2020-05-22T04:12:57.010357 | 2018-01-16T16:51:46 | 2018-01-16T16:51:46 | 38,863,690 | 5 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,934 |
py
|
# -*- coding: utf-8 -*-
import argparse
from collections import namedtuple
import logging
import os
import salt.config
log = logging.getLogger()
InstallEnvironment = namedtuple('InstallEnvironment', ['lib_path', 'salt_config', 'salt_extmods', 'salt_root'])
def _expand_path(root, path):
if os.path.isabs(path):
return path
return os.path.join(root, path)
def _read_extmods(master_opts):
root_dir = master_opts.get('root_dir')
ext_mods = master_opts.get('extension_modules')
if ext_mods:
log.debug("Using extension_modules from master config: %s", ext_mods)
return _expand_path(root_dir, ext_mods)
module_dirs = master_opts.get('module_dirs')
if module_dirs:
log.debug("Using first module_dirs from master config: %s", module_dirs[0])
return _expand_path(root_dir, module_dirs[0])
return None
def _read_root(master_opts):
fs_base = master_opts.get('file_roots', {}).get('base')
if fs_base:
log.debug("Using salt filesystem base root from master config: %s", fs_base[0])
return fs_base[0]
return None
def link_module(salt_path, lib_dir, sub_dir, file_name):
module_path = os.path.join(lib_dir, file_name)
link_dir = os.path.join(salt_path, sub_dir)
log.debug("setting up module in %s", link_dir)
link_path = os.path.join(link_dir, file_name)
init_path = os.path.join(link_dir, '__init__.py')
if not os.path.isdir(link_dir):
log.info("creating symlink %s -> %s", link_path, module_path)
os.mkdir(link_dir)
open(init_path, 'a').close()
os.symlink(module_path, link_path)
return True
if os.path.lexists(link_path):
if os.path.islink(link_path):
link_target = os.readlink(link_path)
if link_target != module_path:
log.warning("File %s exists, but is not a symlink pointing to %s.", link_path, module_path)
else:
log.info("Found existing symlink.")
else:
log.warning("File %s exists, but is not a symbolic link.", link_path)
return False
log.info("creating symlink %s -> %s", link_path, module_path)
if not os.path.exists(init_path):
open(init_path, 'a').close()
os.symlink(module_path, link_path)
return True
def unlink_module(salt_path, sub_dir, file_name):
link_dir = os.path.join(salt_path, sub_dir)
log.info("removing module from %s", link_dir)
link_path = os.path.join(link_dir, file_name)
if os.path.islink(link_path):
os.unlink(link_path)
return True
return False
def get_env():
parser = argparse.ArgumentParser(description="Installs symlinks to the modules in the Salt module directories.")
parser.add_argument('-c', '--salt-config', default='/etc/salt/master',
help="Path to the salt master configuration file.")
parser.add_argument('--salt-extmods', help="Path for extension modules. If not set, the setting from the master "
"config is used.")
parser.add_argument('--salt-root', help="Path to the master file root, e.g. /srv/salt. If not set, looks up the "
"base environment in the master configuration file.")
output_group = parser.add_mutually_exclusive_group()
output_group.add_argument('-v', '--verbose', action='store_true', help="Show debug messages.")
output_group.add_argument('-q', '--quiet', action='store_true', help="Do not show any messages.")
args = parser.parse_args()
if not args.quiet:
ch = logging.StreamHandler()
formatter = logging.Formatter('%(levelname)s - %(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
if args.verbose:
log.setLevel(logging.DEBUG)
else:
log.setLevel(logging.INFO)
salt_config = args.salt_config
master_opts = salt.config.master_config(salt_config)
if args.salt_extmods:
salt_extmods = args.salt_extmods
else:
salt_extmods = _read_extmods(master_opts)
if args.salt_root:
salt_root = args.salt_root
else:
salt_root = _read_root(master_opts)
if not os.path.isdir(salt_extmods):
log.error("Extension module directory %s does not exist.", salt_extmods)
parser.exit(status=1)
if not os.path.isdir(salt_root):
log.error("Master file root directory %s does not exist.", salt_root)
parser.exit(status=1)
return InstallEnvironment(os.path.dirname(__file__), salt_config, salt_extmods, salt_root)
def install_modules():
env = get_env()
res_extmod = link_module(env.salt_extmods, os.path.join(env.lib_path, 'extmods'), 'renderers', 'lazy_yaml.py')
res_mod = link_module(env.salt_root, os.path.join(env.lib_path, 'modules'), '_modules', 'container_map.py')
res_state = link_module(env.salt_root, os.path.join(env.lib_path, 'states'), '_states', 'container_map.py')
if res_extmod:
log.info("Installed master extension module. Please restart the salt master process for using it.")
if res_mod and res_state:
log.info("Installed minion modules. Distribute with 'saltutil.sync_all' or 'state.highstate'.")
def remove_modules():
env = get_env()
res_extmod = unlink_module(env.salt_extmods, 'renderers', 'lazy_yaml.py')
res_mod = unlink_module(env.salt_root, '_modules', 'container_map.py')
res_state = unlink_module(env.salt_root, '_states', 'container_map.py')
if res_extmod:
log.info("Removed master extension module. It will not be available after the master process is restarted.")
if res_mod and res_state:
log.info("Removed minion modules. 'saltutil.clear_cache' can be used for distributing the removal, but "
"'saltutil.sync_all' should be run immediately afterwards if you have any other custom modules.")
|
[
"[email protected]"
] | |
e0b748369a429ce203da0543699f8fa7b8f011aa
|
5a711a4a49f94ae1497a723e29c981facf1d5f37
|
/tests/practice_centers/test_services.py
|
4d8d050dcd9f9685d0cc981749b8179fa27e3e89
|
[
"MIT"
] |
permissive
|
fabien-roy/glo-2005-sportsapp
|
2509583fb47bce9cff1e211cb1ed7adebaf3fdd0
|
3b5b5f9cdcfe53d1e6e702609587068c4bd3310d
|
refs/heads/master
| 2023-04-09T07:30:31.512069 | 2020-05-10T23:21:42 | 2020-05-10T23:21:42 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 570 |
py
|
from instance.practice_centers.services import PracticeCenterPopulationService
from tests.practice_centers.mocks import practice_center_repository
from tests.interfaces.test_basic import BasicTests
class PracticeCenterPopulationServiceTests(BasicTests):
def setUp(self):
self.practice_center_population_service = PracticeCenterPopulationService(
practice_center_repository)
def test_db_populate_adds_fakes(self):
self.practice_center_population_service.db_populate()
self.assertTrue(practice_center_repository.add.called)
|
[
"[email protected]"
] | |
6cfeb76982ef4150fc264f81b7bd13f46fcbf673
|
941b25a0d0ccd25e4e64293defc2b50a61fccb01
|
/testBoard.py
|
115ca435b072bd140be722e11fea581ae4d714cc
|
[] |
no_license
|
fanzhangg/sliding-tiles
|
c5a396818ec2d7449309f773df37a46ec7b41c8e
|
334bb7df76436aa9429ff6132db8a9ea1afce35f
|
refs/heads/master
| 2020-04-08T20:06:37.554387 | 2018-11-29T15:20:52 | 2018-11-29T15:20:52 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 757 |
py
|
import unittest
from Board import Board
class testBoard(unittest.TestCase):
board = Board(3, 3)
def test_0_initialize_board(self):
print("Start initialize board test")
tiles_1 = (1, 2, 3, 4, 5, 6, 7, 8)
self.assertEqual(self.board.initialize_board(tiles_1), None)
tiles_2 = (1, 2, 3, 4, 5, 6, 7, 8, 9)
self.assertRaises(IndexError("The size of tiles exceeds the expected size",
self.board.initialize_board, tiles_2))
tiles_3 = (1, 2, 3, 4)
self.assertRaises(IndexError("The size of tiles less than the expected size",
self.board.initialize_board, tiles_3))
if __name__ == '__main__':
unittest.main(exit=False)
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.