code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
# def get_price(post_info, attributes):
# if post_info.get_info('attr_price') is not None:
# return normalize_price(post_info.get_info('attr_price'))
# else:
# price_min = 0
# price_max = 0
# price_m2 = 0
# area_tmp = 0
# price_str = ""
# for tmp in attributes['attr_price']:
# price = normalize_price(tmp)
# if price_min == 0 and price[0]:
# price_str = tmp
# price_min = price[0]
# if price_max == 0 and price[1]:
# price_max = price[1]
# if price_m2 == 0 and price[2]:
# price_m2 = price[2]
# if area_tmp == 0 and price[4]:
# area_tmp = price[4]
# # if reach here that means not any one extracted by NLP API is valueable
# return price_min, price_max, price_m2, area_tmp, price_str
import requests
from get_addr import add_street_num_to_addr
import re
url = "http://35.240.240.251/api/v1/real-estate-extraction"
re_addr = "Địa chỉ: (\S+ )*"
def get_from_api(post_content):
request = requests.Session()
headers = {}
addr = re.search(re_addr,post_content)
data_list = [addr.group()]
response = request.post(
url=url,
headers=headers,
json=data_list
)
addr = re.search(re_addr,post_content)
print("\n===text:{}===\n".format(post_content))
print("\n===matches:{}===\n".format(addr.group()))
# there are 2 attributes in this list are list rather than single value
# the reason is for each attribute NPP API may recognise more than just single value, but we dont know which recognised values
# are correct. So we must check every single one to find the one we need
data_attrs = {
"attr_addr_number": "",
"attr_addr_street": "",
"attr_addr_district": "",
"attr_addr_ward": "",
"attr_addr_city": "",
# "attr_position": "",
# "attr_surrounding": "",
# "attr_surrounding_name": "",
# "attr_surrounding_characteristics": "",
# "attr_transaction_type": "",
# "attr_realestate_type": "",
# "attr_potential": "",
# "attr_area": [],
# "attr_price": [],
# "attr_price_m2": "",
# "attr_interior_floor": "",
# "attr_interior_room": "",
# "attr_orientation": "",
# "attr_project": "",
# "attr_legal": "",
# "normal": "",
# "phone": "",
}
json_response = response.json()
print("\n\n\n === json_response:{} === \n\n\n".format(json_response))
for content, i in zip(
json_response[0]["tags"],
range(len(
json_response[0]["tags"]
))
):
if content["type"] == "addr_street" \
and data_attrs["attr_addr_number"] == "":
if json_response[0]["tags"][i-1]["type"] == "normal":
data_attrs["attr_addr_number"] = \
add_street_num_to_addr(
json_response[0]["tags"][i-1]["content"]
)
data_attrs["attr_addr_street"] = content["content"]
# data_attrs["attr_addr_street"] = ''
elif content['type'] == "addr_ward" and \
data_attrs["attr_addr_ward"]=="":
data_attrs["attr_addr_ward"] = content["content"]
elif content['type'] == "addr_district" and \
data_attrs["attr_addr_district"]=="":
data_attrs["attr_addr_district"] = content["content"]
elif content['type'] == "addr_city" and \
data_attrs["attr_addr_city"]=="":
data_attrs["attr_addr_city"] = content["content"]
return data_attrs
get_from_api(
"""
🌈 SỐNG TẠI BIỆT THỰ NỔI VINHOMES MARINA: LÀM GÌ CŨNG DỄ, ĐẾN ĐÂU CŨNG GẦN
Bên cạnh hệ thống tiện ích nội khu đẳng cấp như bể bơi hướng hồ, sân tập yoga, đường chạy bộ, sân tennis, sân cầu lông, sân tập bóng rổ, khu BBQ, chòi vọng cảnh... Biệt thự nổi Vinhomes Marina còn kết nối thuận tiện với các công trình khác, đáp ứng mọi nhu cầu của cư dân như Bệnh viện Đa khoa Quốc tế Vinmec, Trường học Liên cấp Vinschool, Trung tâm thương mại Aeon Mall.
Không những vậy, nơi đây còn thuộc trục Đại lộ Đông Tây, Đại lộ Võ Nguyên Giáp nên kết nối thuận tiện vào khu vực trung tâm Hải Phòng như Bến xe Cầu Rào, Sân vận động Lạch Tray, Nhà hát lớn, Sân bay Cát Bi, Ủy ban Nhân dân Thành phố, Cảng Hải Phòng…
Nhờ đó, cư dân vừa được trải nghiệm không gian riêng an yên nhưng vẫn thuận tiện kết nối, di chuyển đến mọi nơi. Sống tại Biệt thự nổi Vinhomes Marina: Làm gì cũng dễ, đến đâu cũng gần!
🌈 Đặc biệt, cơ hội nhân đôi đẳng cấp đang chờ đợi các chủ nhân của Biệt thự nổi Vinhomes Marina. Vừa trải nghiệm cuộc sống thượng lưu đẳng cấp, vừa được nhận ngay những ưu đãi hấp dẫn:
✨ Quà tặng VinID Gift Card trị giá lên tới 150 triệu đồng/căn (áp dụng có điều kiện với từng loại căn)
✨ Tặng gói nội thất trị giá 300 triệu đồng/căn đối với các căn biệt thự song lập thuộc NT8,9,10,11 & SH.19 - SH.29
✨ Hỗ trợ vay vốn đến 70% giá bán
✨ Hỗ trợ lãi suất 18 tháng hoặc nhận chiết khấu không vay lên tới 4%
✨ Chính sách thanh toán sớm từ Chủ đầu tư đối với khách hàng thanh toán trước hạn
✨ Hưởng mức lãi suất lên tới 8%/năm trên khoản tiền và số ngày thanh toán sớm
* Các chương trình đi kèm điều kiện cụ thể
#VinhomesMarina #CauRao2 #HaiPhong
---
<NAME> - Dấu ấn Địa Trung Hải trên đất Cảng phồn vinh!
Tìm hiểu thêm Dự án tại: https://marina.vinhomes.vn/
Hotline: 1800 1179
Email: <EMAIL>
Địa chỉ: C<NAME> 2, P. <NAME>, Q. <NAME>, TP. <NAME>.
*Thông tin, hình ảnh, các tiện ích trên nội dung này chỉ mang tính chất minh hoạ tương đối và có thể được điều chỉnh theo quyết định của Chủ đầu tư tại từng thời điểm đảm bảo phù hợp quy hoạch và thực tế thi công dự án. Các thông tin, cam kết chính thức sẽ được quy định cụ thể tại Hợp đồng mua bán. Việc quản lý, vận hành và kinh doanh của khu đô thị sẽ theo quy định của Ban quản lý.
"""
) | [
"requests.Session",
"get_addr.add_street_num_to_addr",
"re.search"
] | [((1117, 1135), 'requests.Session', 'requests.Session', ([], {}), '()\n', (1133, 1135), False, 'import requests\n'), ((1169, 1201), 're.search', 're.search', (['re_addr', 'post_content'], {}), '(re_addr, post_content)\n', (1178, 1201), False, 'import re\n'), ((1364, 1396), 're.search', 're.search', (['re_addr', 'post_content'], {}), '(re_addr, post_content)\n', (1373, 1396), False, 'import re\n'), ((3508, 3574), 'get_addr.add_street_num_to_addr', 'add_street_num_to_addr', (["json_response[0]['tags'][i - 1]['content']"], {}), "(json_response[0]['tags'][i - 1]['content'])\n", (3530, 3574), False, 'from get_addr import add_street_num_to_addr\n')] |
from distutils.core import setup, Extension
import numpy.distutils.misc_util
import os
import platform
system_name= platform.system()
desc = open("README.rst").read()
extension_version = "0.1.0"
extension_url = "https://github.com/bckelly80/carma_pack"
BOOST_DIR = os.environ["BOOST_DIR"]
ARMADILLO_DIR = os.environ["ARMADILLO_DIR"]
NUMPY_DIR = os.environ["NUMPY_DIR"]
include_dirs = [NUMPY_DIR + "/include", BOOST_DIR + "/include", ARMADILLO_DIR + "/include",
"/usr/include/", "include"]
# needed to add "include" in order to build
for include_dir in numpy.distutils.misc_util.get_numpy_include_dirs():
include_dirs.append(include_dir)
# on my systems (Linux and MAC) ARMADILLO_DIR includes libraries, no need for extra "lib"
library_dirs = [NUMPY_DIR + "/lib", BOOST_DIR + "/lib", ARMADILLO_DIR + "/lib", ARMADILLO_DIR, "/usr/lib/"]
if system_name != 'Darwin':
# /usr/lib64 does not exist under Mac OS X
library_dirs.append("/usr/lib64")
compiler_args = ["-O3"]
if system_name == 'Darwin':
compiler_args.append("-std=c++11")
# need to build against libc++ for Mac OS X
compiler_args.append("-stdlib=libc++")
else:
compiler_args.append("-std=c++0x")
if os.path.exists(os.path.join(BOOST_DIR, "lib", "libboost_filesystem-mt.dylib")):
boost_suffix = "-mt"
else:
boost_suffix = ""
def configuration(parent_package='', top_path=None):
# http://docs.scipy.org/doc/numpy/reference/distutils.html#numpy.distutils.misc_util.Configuration
from numpy.distutils.misc_util import Configuration
config = Configuration("carmcmc", parent_package, top_path)
config.version = extension_version
config.add_data_dir((".", "carmcmc"))
config.add_library(
"carmcmc",
sources=["carmcmc.cpp", "carpack.cpp", "kfilter.cpp", "proposals.cpp", "samplers.cpp", "random.cpp",
"steps.cpp"],
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=["boost_python%s"%boost_suffix, "boost_filesystem%s"%boost_suffix, "boost_system%s"%boost_suffix,
"armadillo"],
extra_compiler_args=compiler_args
)
config.add_extension(
"_carmcmc",
sources=["boost_python_wrapper.cpp", "carmcmc.cpp"],
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=["boost_python%s"%boost_suffix, "boost_filesystem%s"%boost_suffix, "boost_system%s"%boost_suffix,
"armadillo", "carmcmc"],
extra_compile_args=compiler_args
)
config.add_data_dir(("../../../../include", "include"))
config.add_data_dir(("../../../../examples", "examples"))
config.test_suite = "cpp_tests/testCarmcmc"
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| [
"platform.system",
"os.path.join",
"numpy.distutils.misc_util.Configuration"
] | [((117, 134), 'platform.system', 'platform.system', ([], {}), '()\n', (132, 134), False, 'import platform\n'), ((1217, 1279), 'os.path.join', 'os.path.join', (['BOOST_DIR', '"""lib"""', '"""libboost_filesystem-mt.dylib"""'], {}), "(BOOST_DIR, 'lib', 'libboost_filesystem-mt.dylib')\n", (1229, 1279), False, 'import os\n'), ((1563, 1613), 'numpy.distutils.misc_util.Configuration', 'Configuration', (['"""carmcmc"""', 'parent_package', 'top_path'], {}), "('carmcmc', parent_package, top_path)\n", (1576, 1613), False, 'from numpy.distutils.misc_util import Configuration\n')] |
import paho.mqtt.client as mqtt
import time
import json
import requests
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
# Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
client.subscribe("rt_message")
# The callback for when a PUBLISH message is received from the server.
def on_message(client, userdata, msg):
print("-"*10)
# print(msg.topic+" "+str(msg.payload))
try:
msg_dict = json.loads(msg.payload.decode())
# print(msg_dict)
# for k, v in msg_dict.items():
# print(k, v)
status = msg_dict.get("status")
persons = msg_dict.get("persons")
# person_id = msg_dict.get("person_id")
# group_id = msg_dict.get("persons")[0].get("group_id")
# print(status, person_id, group_id)
if status == "known person":
for person in persons:
person_id = person.get("id")
group_id = person.get("group_id")
url = "http://workaihost.tiegushi.com/restapi/get_name_by_faceid?group_id={}&face_id={}".format(group_id, person_id)
response = requests.get(url)
name = json.loads(response.text).get("name")
person["name"] = name
person["status"] = "known person"
from main import setValue
sign = setValue(person_id)
if sign:
print("*****", name, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(person["current_ts"]/1000)))
print(msg.topic+" "+json.dumps(person))
else:
print(msg.topic+" "+json.dumps(msg_dict))
except Exception as e:
print(e)
client = mqtt.Client()
client.on_connect = on_connect
client.on_message = on_message
if __name__ == "__main__":
# ip修改为盒子的地址
client.connect("192.168.31.199", 1883, 60)
# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
client.loop_forever()
| [
"time.localtime",
"json.loads",
"paho.mqtt.client.Client",
"json.dumps",
"requests.get",
"main.setValue"
] | [((1885, 1898), 'paho.mqtt.client.Client', 'mqtt.Client', ([], {}), '()\n', (1896, 1898), True, 'import paho.mqtt.client as mqtt\n'), ((1305, 1322), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1317, 1322), False, 'import requests\n'), ((1538, 1557), 'main.setValue', 'setValue', (['person_id'], {}), '(person_id)\n', (1546, 1557), False, 'from main import setValue\n'), ((1809, 1829), 'json.dumps', 'json.dumps', (['msg_dict'], {}), '(msg_dict)\n', (1819, 1829), False, 'import json\n'), ((1346, 1371), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (1356, 1371), False, 'import json\n'), ((1659, 1702), 'time.localtime', 'time.localtime', (["(person['current_ts'] / 1000)"], {}), "(person['current_ts'] / 1000)\n", (1673, 1702), False, 'import time\n'), ((1743, 1761), 'json.dumps', 'json.dumps', (['person'], {}), '(person)\n', (1753, 1761), False, 'import json\n')] |
import srt
import random
from datetime import timedelta, datetime
from typing import List,Union
from srt import Subtitle
from .context import Context
class Timestamp:
def __init__(self, source_id: str, subtitle: srt.Subtitle, time: timedelta = None):
self.source_id = source_id
self.subtitle = subtitle
self.time = time
def get_text(self):
return self.subtitle.content
def get_time_seconds(self):
# Gets the time of the timestamp in seconds. If the time is not set, it picks a random time from the subtitle.
if self.time is None:
return random.uniform(
self.subtitle.start.total_seconds(),
self.subtitle.end.total_seconds(),
)
else:
return self.time.total_seconds()
def pick_timestamp(context: Context) -> Timestamp:
source_id = random.choice(list(context.config['sources'].keys()))
context.logger.debug(f"Picked source {source_id}, reading SRT")
with open(context.config['sources'][source_id]['srt']) as f:
srt_data = f.read()
subs = list(srt.parse(srt_data))
context.logger.debug("SRT parsed successfully")
subtitle = random.choice(subs)
return Timestamp(source_id, subtitle)
def get_timestamp_by_params(context: Context, timestamp: timedelta = None, id: str = None, tag: str = None):
if id is not None:
source_key = context.get_source_key_by_id(id)
context.logger.debug(f"Picked source {source_key} by ID, reading SRT")
elif tag is not None:
source_keys = context.get_sources_by_tag(tag)
if (len(source_keys) > 0):
source_key = random.choice(source_keys)
else:
source_key = None
context.logger.debug(f"Picked source {source_key} by TAG, reading SRT")
else:
source_key = random.choice(list(context.config['sources'].keys()))
context.logger.debug(f"Picked source {source_key} at RANDOM, reading SRT")
if source_key is None:
return None
with open(context.config['sources'][source_key]['srt']) as f:
srt_data = f.read()
subs = list(srt.parse(srt_data))
context.logger.debug("SRT parsed successfully")
if timestamp is not None:
sub = get_subtitle_by_timedelta(subs, timestamp)
else:
sub = random.choice(subs)
if sub is not None:
return Timestamp(source_key, sub, timestamp)
else:
return None
def get_subtitle_by_timedelta(subs: List[Subtitle], timestamp: timedelta) -> Union[Subtitle, None]:
for sub in subs:
if sub.start <= timestamp <= sub.end:
return sub
return None
| [
"random.choice",
"srt.parse"
] | [((1197, 1216), 'random.choice', 'random.choice', (['subs'], {}), '(subs)\n', (1210, 1216), False, 'import random\n'), ((1109, 1128), 'srt.parse', 'srt.parse', (['srt_data'], {}), '(srt_data)\n', (1118, 1128), False, 'import srt\n'), ((2144, 2163), 'srt.parse', 'srt.parse', (['srt_data'], {}), '(srt_data)\n', (2153, 2163), False, 'import srt\n'), ((2328, 2347), 'random.choice', 'random.choice', (['subs'], {}), '(subs)\n', (2341, 2347), False, 'import random\n'), ((1666, 1692), 'random.choice', 'random.choice', (['source_keys'], {}), '(source_keys)\n', (1679, 1692), False, 'import random\n')] |
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
from django.urls import reverse
from django.views import generic
from django.utils import timezone
from .models import Article, Comment
class IndexView(generic.ListView):
template_name = "articles/index.html"
context_object_name = "article_list"
def get_queryset(self):
"""
Return all articles (not including those
set to be published in the future)
"""
return Article.objects.filter(
date_published__lte=timezone.now()
).order_by("-date_published")
class ArticleView(generic.DetailView):
model = Article
template_name = "articles/article.html"
class CommentsView(generic.ListView):
template_name = "articles/comment_list.html"
context_object_name = "comment_list"
# Comment on article
def comment(request, article_id):
article = get_object_or_404(Article, pk=article_id)
try:
alias = request.POST["alias"]
text = request.POST["text"]
if len(alias) == 0 and len(text) == 0:
raise ValueError("Name or comment text empty")
except KeyError:
# Post data not sent
context = {"article": article, "danger_message": "There was a problem submitting your comment."}
return render(request, "articles/article.html", context)
except ValueError:
# Comment form not filled in
context = {"article": article, "warning_message": "Comment information not filled in."}
return render(request, "articles/article.html", context)
else:
parent = None
if "parent" in request.POST:
parent = get_object_or_404(Comment, pk=request.POST["parent"])
comment = Comment.objects.create_comment(
article,
parent,
alias,
text
)
comment.save()
# Return to article after creating comment
return HttpResponseRedirect(reverse("articles:article", args=(article.id,)))
| [
"django.shortcuts.render",
"django.utils.timezone.now",
"django.shortcuts.get_object_or_404",
"django.urls.reverse"
] | [((945, 986), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Article'], {'pk': 'article_id'}), '(Article, pk=article_id)\n', (962, 986), False, 'from django.shortcuts import get_object_or_404, render\n'), ((1348, 1397), 'django.shortcuts.render', 'render', (['request', '"""articles/article.html"""', 'context'], {}), "(request, 'articles/article.html', context)\n", (1354, 1397), False, 'from django.shortcuts import get_object_or_404, render\n'), ((1569, 1618), 'django.shortcuts.render', 'render', (['request', '"""articles/article.html"""', 'context'], {}), "(request, 'articles/article.html', context)\n", (1575, 1618), False, 'from django.shortcuts import get_object_or_404, render\n'), ((1709, 1762), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Comment'], {'pk': "request.POST['parent']"}), "(Comment, pk=request.POST['parent'])\n", (1726, 1762), False, 'from django.shortcuts import get_object_or_404, render\n'), ((2011, 2058), 'django.urls.reverse', 'reverse', (['"""articles:article"""'], {'args': '(article.id,)'}), "('articles:article', args=(article.id,))\n", (2018, 2058), False, 'from django.urls import reverse\n'), ((586, 600), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (598, 600), False, 'from django.utils import timezone\n')] |
import numpy as np
from copy import deepcopy
class Optimizer:
""" Parent class for optimizer objects """
def compute_gradients(self, layers, computations, y, y_hat):
pass
class MSEStochasticGradientDescent(Optimizer):
""" Stochastic Gradient Descent Optimizer using Mean Squared Error """
def __init__(self):
pass
def compute_gradients(self, layers, computations, y, y_hat):
""" Compute respective gradients at each layer """
gradients = list()
_error = np.multiply(np.subtract(y_hat, y), layers[-1]._activation_function(y_hat, derivative=True)) # potentially have to swap
for _layer in reversed(range(len(layers))):
gradients.append(layers[_layer].gradient(_error, computations[_layer]))
if _layer != 0:
_error = np.multiply(np.matmul(_error, layers[_layer]._weights), layers[_layer-1]._activation_function(computations[_layer]))
return deepcopy(gradients)
| [
"numpy.matmul",
"numpy.subtract",
"copy.deepcopy"
] | [((959, 978), 'copy.deepcopy', 'deepcopy', (['gradients'], {}), '(gradients)\n', (967, 978), False, 'from copy import deepcopy\n'), ((531, 552), 'numpy.subtract', 'np.subtract', (['y_hat', 'y'], {}), '(y_hat, y)\n', (542, 552), True, 'import numpy as np\n'), ((839, 881), 'numpy.matmul', 'np.matmul', (['_error', 'layers[_layer]._weights'], {}), '(_error, layers[_layer]._weights)\n', (848, 881), True, 'import numpy as np\n')] |
# MIT License
# Copyright (c) 2020 <NAME>, <NAME>, <NAME>, <NAME>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""
Conversion from x-axis pointing up (in map img), 0 to pi c.c.w., and 0 to -pi c.w. convention to x-axis pointing right (im map img), 0 to 2pi c.c.w. convention.
Use either on one csv file or all csv file in a directory
Author: <NAME>
"""
import argparse
import numpy as np
import pandas as pd
import glob
import csv
import os
"""
Script that convert coordinate system conventions
Args:
--pattern (str): pattern for glob, converts all matching file
"""
parser = argparse.ArgumentParser()
parser.add_argument('--pattern', default='*/*raceline.csv')
args = parser.parse_args()
all_files = glob.glob(args.pattern)
print('Converting following files:')
for name in all_files:
print(name)
input('Press ENTER to proceed, CTRL+C to stop.')
for file in all_files:
# get file name and extension
file_name, file_ext = os.path.splitext(file)
# create new file name
new_file = file_name + '_newconv' + file_ext
print('Working on: ' + file)
# keep original headers
headers = list(csv.reader(open(file)))[0:3]
# csv to dataframe
df = pd.read_csv(file, sep=';', header=2)
# converting the headings column
heading_np = df[' psi_rad'].to_numpy()
heading_np += np.pi/2
heading_np[heading_np > 2*np.pi] -= 2*np.pi
heading_np[heading_np < 0] += 2*np.pi
df[' psi_rad'].replace(heading_np)
# save to new file
f = open(new_file, 'w')
csv_writer = csv.writer(f)
csv_writer.writerows(headers)
f.close()
df.to_csv(new_file, sep=';', header=False, index=False, float_format='%.7f', mode='a')
print('New convention saved to: ' + new_file)
print('All files done.') | [
"argparse.ArgumentParser",
"pandas.read_csv",
"csv.writer",
"os.path.splitext",
"glob.glob"
] | [((1641, 1666), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1664, 1666), False, 'import argparse\n'), ((1771, 1794), 'glob.glob', 'glob.glob', (['args.pattern'], {}), '(args.pattern)\n', (1780, 1794), False, 'import glob\n'), ((2012, 2034), 'os.path.splitext', 'os.path.splitext', (['file'], {}), '(file)\n', (2028, 2034), False, 'import os\n'), ((2275, 2311), 'pandas.read_csv', 'pd.read_csv', (['file'], {'sep': '""";"""', 'header': '(2)'}), "(file, sep=';', header=2)\n", (2286, 2311), True, 'import pandas as pd\n'), ((2632, 2645), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (2642, 2645), False, 'import csv\n')] |
#!/usr/bin/env python
import copy
import json
from os import path
BASE = '../particle-clicker/json/'
def json_from_file(filename):
""" Load a JSON object from a file. """
with open(filename) as f:
return json.load(f)
def format_number(num, digits=0):
""" Give long numbers an SI prefix. """
formatstring = '{{:.{}f}}{{}}'.format(digits)
prefixes = [(1e24, 'Y'), (1e21, 'Z'), (1e18, 'E'), (1e15, 'P'),
(1e12, 'T'), (1e9, 'G'), (1e6, 'M'), (1e3, 'k'), (1, '')]
for magnitude, label in prefixes:
if num >= magnitude:
return formatstring.format(num / magnitude, label)
def map_out(prototype, items, levels):
""" Magic! """
return [{k: v.format(level=level, levelstring=format_number(level), **item)
for k, v in prototype.items()}
for item in items for level in levels]
objects = {
k: json_from_file(path.join(BASE, v)) for k, v in {
'workers': 'workers.json',
'upgrades': 'upgrades.json',
'research': 'research.json'
}.items()
}
researchPrototype = {
'key': 'achievement-{key}-{level}',
'description': '{name} research level {level}!',
'icon': 'fa-cogs',
'targetKey': '{key}',
'targetProperty': 'level',
'threshold': '{level}'
}
discoveryPrototype = copy.copy(researchPrototype)
discoveryPrototype['description'] = '{name} discovery!'
workersPrototype = {
'key': 'achievement-{key}-{level}',
'description': '{level} {name} working for you!',
'icon': 'fa-users',
'targetKey': '{key}',
'targetProperty': 'hired',
'threshold': '{level}'
}
firstWorkerPrototype = copy.copy(workersPrototype)
firstWorkerPrototype['description'] = 'The first {name} hired!'
firstWorkerPrototype['icon'] = 'fa-user'
clicksPrototype = {
'key': 'achievement-clicks-{levelstring}',
'description': '{levelstring} clicks!',
'icon': 'fa-hand-o-up',
'targetKey': 'lab',
'targetProperty': 'clicks',
'threshold': '{level}'
}
firstClickPrototype = copy.copy(clicksPrototype)
firstClickPrototype['description'] = 'Your first click!'
dataCollectedPrototype = {
'key': 'achievement-data-collected-{levelstring}',
'description': '{levelstring} data collected!',
'icon': 'fa-database',
'targetKey': 'lab',
'targetProperty': 'dataCollected',
'threshold': '{level}'
}
fundingCollectedPrototype = {
'key': 'achievement-funding-collected-{levelstring}',
'description': 'JTN {levelstring} funding gathered!',
'icon': 'fa-money',
'targetKey': 'lab',
'targetProperty': 'moneyCollected',
'threshold': '{level}'
}
dataProcessedPrototype = {
'key': 'achievement-data-processed-{levelstring}',
'description': '{levelstring} data processed!',
'icon': 'fa-hdd',
'targetKey': 'lab',
'targetProperty': 'dataSpent',
'threshold': '{level}'
}
fundingSpentPrototype = {
'key': 'achievement-funding-spent-{levelstring}',
'description': 'JTN {levelstring} funding spent!',
'icon': 'fa-money',
'targetKey': 'lab',
'targetProperty': 'moneySpent',
'threshold': '{level}'
}
achievements = []
achievements += map_out(discoveryPrototype, objects['research'], [1])
achievements += map_out(researchPrototype, objects['research'],
[5, 25, 50, 100])
achievements += map_out(firstWorkerPrototype, objects['workers'], [1])
achievements += map_out(workersPrototype, objects['workers'], [5, 25, 50, 100])
achievements += map_out(firstClickPrototype, [{}], [1])
achievements += map_out(clicksPrototype, [{}],
[100, 1000, 10000, 100000, 1000000])
achievements += map_out(dataCollectedPrototype, [{}],
[100, 10000, int(1e6), int(1e8), int(1e10)])
achievements += map_out(fundingCollectedPrototype, [{}],
[100, 10000, int(1e6), int(1e8), int(1e10)])
achievements += map_out(dataProcessedPrototype, [{}],
[100, 10000, int(1e6), int(1e8), int(1e10)])
achievements += map_out(fundingSpentPrototype, [{}],
[100, 10000, int(1e6), int(1e8), int(1e10)])
# fix thresholds
for achievement in achievements:
achievement['threshold'] = int(achievement['threshold'])
print(json.dumps(achievements, indent=' '))
| [
"json.load",
"copy.copy",
"json.dumps",
"os.path.join"
] | [((1314, 1342), 'copy.copy', 'copy.copy', (['researchPrototype'], {}), '(researchPrototype)\n', (1323, 1342), False, 'import copy\n'), ((1648, 1675), 'copy.copy', 'copy.copy', (['workersPrototype'], {}), '(workersPrototype)\n', (1657, 1675), False, 'import copy\n'), ((2028, 2054), 'copy.copy', 'copy.copy', (['clicksPrototype'], {}), '(clicksPrototype)\n', (2037, 2054), False, 'import copy\n'), ((4247, 4284), 'json.dumps', 'json.dumps', (['achievements'], {'indent': '""" """'}), "(achievements, indent=' ')\n", (4257, 4284), False, 'import json\n'), ((225, 237), 'json.load', 'json.load', (['f'], {}), '(f)\n', (234, 237), False, 'import json\n'), ((910, 928), 'os.path.join', 'path.join', (['BASE', 'v'], {}), '(BASE, v)\n', (919, 928), False, 'from os import path\n')] |
import numpy as np
import cv2
import matplotlib.pyplot as plt
PASS_COUNT = 8
FILE_NAME = './Passes/Checkerboard64_'
EXTENSION = '.jpg'
shift = 3
mask = 1 << shift
crop = 3
def runPass(canvas,p):
sizeX = canvas.shape[0]
sizeY = canvas.shape[1]
shape = [[1 if (i + j) % 2 == 0 else 0 for j in range(8)] for i in range(8)]
shape = np.array(shape)
shape *= mask
#print(shape)
def fillBlock(img,x,y,c):
for i in range(x,x+8,1):
for j in range(y,y+8,1):
img[i,j] &= (255 - mask)
img[i,j] |= c
for i in range(8):
for j in range(8):
fillBlock(canvas,i*8,j*8,shape[i,j])
#cv2.imwrite('Checkerboard64.png',canvas)
cv2.imwrite(FILE_NAME+str(p)+EXTENSION,canvas)
#lossy = cv2.imread('./Blank64.png')
lossy = cv2.imread('./Image64.png')
lossy = cv2.cvtColor(lossy,cv2.COLOR_BGR2GRAY)
sizeX = lossy.shape[0]
sizeY = lossy.shape[1]
for i in range(PASS_COUNT):
runPass(lossy,i)
lossy = cv2.imread(FILE_NAME+str(i)+EXTENSION)
lossy = cv2.cvtColor(lossy,cv2.COLOR_BGR2GRAY)
plane = (lossy & mask) << 7 - shift
#plane = cv2.medianBlur(plane,3)
cv2.imwrite(FILE_NAME+str(i)+'_extract'+EXTENSION,plane)
'''print(lossy[0,8+crop:24+crop] & mask)
cv2.imwrite('Checkerboard64_Cropped.jpg',canvas[crop:-crop,crop:-crop])
lossy = cv2.imread('Checkerboard64_Cropped.jpg')
lossy = cv2.cvtColor(lossy,cv2.COLOR_BGR2GRAY)
print(lossy[0,8:24] & mask)'''
'''
x = [i for i in range(sizeX-8)]
y = []
dy = [0]
ddy = [0]
for i in x:
window = lossy[0,i:i+8] & mask
y.append(np.mean(window))
if i > 0:
dy.append(y[-1] - y [-2])
if i > 1:
ddy.append(dy[-1] - dy [-2])
#print(i,window,np.mean(window))
ddy.append(0)
plt.plot(x,y)
#plt.plot(x,dy)
plt.plot(x,ddy)
plt.show()
'''
| [
"numpy.array",
"cv2.imread",
"cv2.cvtColor"
] | [((827, 854), 'cv2.imread', 'cv2.imread', (['"""./Image64.png"""'], {}), "('./Image64.png')\n", (837, 854), False, 'import cv2\n'), ((863, 902), 'cv2.cvtColor', 'cv2.cvtColor', (['lossy', 'cv2.COLOR_BGR2GRAY'], {}), '(lossy, cv2.COLOR_BGR2GRAY)\n', (875, 902), False, 'import cv2\n'), ((351, 366), 'numpy.array', 'np.array', (['shape'], {}), '(shape)\n', (359, 366), True, 'import numpy as np\n'), ((1062, 1101), 'cv2.cvtColor', 'cv2.cvtColor', (['lossy', 'cv2.COLOR_BGR2GRAY'], {}), '(lossy, cv2.COLOR_BGR2GRAY)\n', (1074, 1101), False, 'import cv2\n')] |
from PyQt5 import QtGui
from PyQt5 import QtWidgets
from PyQt5 import Qt
from PyQt5 import QtCore
from matplotlib.backends.backend_qt5agg \
import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar
class MplCanvas(FigureCanvas):
def __init__(self):
self.fig = Figure()
self.ax = self.fig.add_subplot(111)
FigureCanvas.__init__(self, self.fig)
FigureCanvas.setSizePolicy(self, Qt.QSizePolicy.Expanding, Qt.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
class MplWidget(QtWidgets.QWidget):
def __init__(self, parent = None):
QtWidgets.QWidget.__init__(self, parent)
self.canvas = MplCanvas()
self.toolbar = NavigationToolbar(self.canvas, parent)
self.vbl = QtWidgets.QVBoxLayout()
self.vbl.addWidget(self.canvas)
self.vbl.addWidget(self.toolbar)
self.setLayout(self.vbl)
| [
"matplotlib.backends.backend_qt5agg.NavigationToolbar2QT",
"matplotlib.figure.Figure",
"matplotlib.backends.backend_qt5agg.FigureCanvasQTAgg.__init__",
"matplotlib.backends.backend_qt5agg.FigureCanvasQTAgg.setSizePolicy",
"PyQt5.QtWidgets.QWidget.__init__",
"matplotlib.backends.backend_qt5agg.FigureCanvasQTAgg.updateGeometry",
"PyQt5.QtWidgets.QVBoxLayout"
] | [((374, 382), 'matplotlib.figure.Figure', 'Figure', ([], {}), '()\n', (380, 382), False, 'from matplotlib.figure import Figure\n'), ((423, 460), 'matplotlib.backends.backend_qt5agg.FigureCanvasQTAgg.__init__', 'FigureCanvas.__init__', (['self', 'self.fig'], {}), '(self, self.fig)\n', (444, 460), True, 'from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas\n'), ((463, 552), 'matplotlib.backends.backend_qt5agg.FigureCanvasQTAgg.setSizePolicy', 'FigureCanvas.setSizePolicy', (['self', 'Qt.QSizePolicy.Expanding', 'Qt.QSizePolicy.Expanding'], {}), '(self, Qt.QSizePolicy.Expanding, Qt.QSizePolicy.\n Expanding)\n', (489, 552), True, 'from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas\n'), ((550, 583), 'matplotlib.backends.backend_qt5agg.FigureCanvasQTAgg.updateGeometry', 'FigureCanvas.updateGeometry', (['self'], {}), '(self)\n', (577, 583), True, 'from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas\n'), ((659, 699), 'PyQt5.QtWidgets.QWidget.__init__', 'QtWidgets.QWidget.__init__', (['self', 'parent'], {}), '(self, parent)\n', (685, 699), False, 'from PyQt5 import QtWidgets\n'), ((745, 783), 'matplotlib.backends.backend_qt5agg.NavigationToolbar2QT', 'NavigationToolbar', (['self.canvas', 'parent'], {}), '(self.canvas, parent)\n', (762, 783), True, 'from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as NavigationToolbar\n'), ((797, 820), 'PyQt5.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', ([], {}), '()\n', (818, 820), False, 'from PyQt5 import QtWidgets\n')] |
import sqlite3
import io
import json
import os
import shutil
import textwrap
from packaging import version
from scripts.artifact_report import ArtifactHtmlReport
from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
def get_Turbo_Battery(files_found, report_folder, seeker, wrap_text):
source_file_bluetooth = ''
source_file_turbo = ''
bluetooth_db = ''
turbo_db = ''
for file_found in files_found:
file_name = str(file_found)
if file_name.lower().endswith('turbo.db'):
turbo_db = str(file_found)
source_file_bluetooth = file_found.replace(seeker.directory, '')
if file_name.lower().endswith('bluetooth.db'):
bluetooth_db = str(file_found)
source_file_turbo = file_found.replace(seeker.directory, '')
db = open_sqlite_db_readonly(turbo_db)
cursor = db.cursor()
cursor.execute('''
select
case timestamp_millis
when 0 then ''
else datetime(timestamp_millis/1000,'unixepoch')
End as D_T,
battery_level,
case charge_type
when 0 then ''
when 1 then 'Charging Rapidly'
when 2 then 'Charging Slowly'
when 3 then 'Charging Wirelessly'
End as C_Type,
case battery_saver
when 2 then ''
when 1 then 'Enabled'
End as B_Saver,
timezone
from battery_event
''')
all_rows = cursor.fetchall()
usageentries = len(all_rows)
if usageentries > 0:
report = ArtifactHtmlReport('Turbo - Phone Battery')
report.start_artifact_report(report_folder, 'Turbo - Phone Battery')
report.add_script()
data_headers = ('Timestamp','Battery Level','Charge Type','Battery Saver','Timezone') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
data_list = []
for row in all_rows:
data_list.append((row[0],row[1],row[2],row[3],row[4]))
report.write_artifact_data_table(data_headers, data_list, source_file_turbo)
report.end_artifact_report()
tsvname = f'Turbo - Phone Battery'
tsv(report_folder, data_headers, data_list, tsvname)
tlactivity = f'Turbo - Phone Battery'
timeline(report_folder, tlactivity, data_list, data_headers)
else:
logfunc('No Turbo - Phone Battery data available')
db.close()
db = open_sqlite_db_readonly(bluetooth_db)
cursor = db.cursor()
cursor.execute('''
select
datetime(timestamp_millis/1000,'unixepoch'),
bd_addr,
device_identifier,
battery_level,
volume_level,
time_zone
from battery_event
join device_address on battery_event.device_idx = device_address.device_idx
''')
all_rows = cursor.fetchall()
usageentries = len(all_rows)
if usageentries > 0:
report = ArtifactHtmlReport('Turbo - Bluetooth Device Info')
report.start_artifact_report(report_folder, 'Turbo - Bluetooth Device Info')
report.add_script()
data_headers = ('Timestamp','BT Device MAC Address','BT Device ID','Battery Level','Volume Level','Timezone') # Don't remove the comma, that is required to make this a tuple as there is only 1 element
data_list = []
for row in all_rows:
data_list.append((row[0],row[1],row[2],row[3],row[4],row[5]))
report.write_artifact_data_table(data_headers, data_list, source_file_bluetooth)
report.end_artifact_report()
tsvname = f'Turbo - Bluetooth Device Info'
tsv(report_folder, data_headers, data_list, tsvname)
tlactivity = f'Turbo - Bluetooth Device Info'
timeline(report_folder, tlactivity, data_list, data_headers)
else:
logfunc('No Turbo - Bluetooth Device Info data available')
db.close()
return
| [
"scripts.ilapfuncs.timeline",
"scripts.ilapfuncs.logfunc",
"scripts.ilapfuncs.tsv",
"scripts.ilapfuncs.open_sqlite_db_readonly",
"scripts.artifact_report.ArtifactHtmlReport"
] | [((866, 899), 'scripts.ilapfuncs.open_sqlite_db_readonly', 'open_sqlite_db_readonly', (['turbo_db'], {}), '(turbo_db)\n', (889, 899), False, 'from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly\n'), ((2389, 2426), 'scripts.ilapfuncs.open_sqlite_db_readonly', 'open_sqlite_db_readonly', (['bluetooth_db'], {}), '(bluetooth_db)\n', (2412, 2426), False, 'from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly\n'), ((1473, 1516), 'scripts.artifact_report.ArtifactHtmlReport', 'ArtifactHtmlReport', (['"""Turbo - Phone Battery"""'], {}), "('Turbo - Phone Battery')\n", (1491, 1516), False, 'from scripts.artifact_report import ArtifactHtmlReport\n'), ((2109, 2161), 'scripts.ilapfuncs.tsv', 'tsv', (['report_folder', 'data_headers', 'data_list', 'tsvname'], {}), '(report_folder, data_headers, data_list, tsvname)\n', (2112, 2161), False, 'from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly\n'), ((2225, 2285), 'scripts.ilapfuncs.timeline', 'timeline', (['report_folder', 'tlactivity', 'data_list', 'data_headers'], {}), '(report_folder, tlactivity, data_list, data_headers)\n', (2233, 2285), False, 'from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly\n'), ((2304, 2354), 'scripts.ilapfuncs.logfunc', 'logfunc', (['"""No Turbo - Phone Battery data available"""'], {}), "('No Turbo - Phone Battery data available')\n", (2311, 2354), False, 'from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly\n'), ((2843, 2894), 'scripts.artifact_report.ArtifactHtmlReport', 'ArtifactHtmlReport', (['"""Turbo - Bluetooth Device Info"""'], {}), "('Turbo - Bluetooth Device Info')\n", (2861, 2894), False, 'from scripts.artifact_report import ArtifactHtmlReport\n'), ((3538, 3590), 'scripts.ilapfuncs.tsv', 'tsv', (['report_folder', 'data_headers', 'data_list', 'tsvname'], {}), '(report_folder, data_headers, data_list, tsvname)\n', (3541, 3590), False, 'from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly\n'), ((3662, 3722), 'scripts.ilapfuncs.timeline', 'timeline', (['report_folder', 'tlactivity', 'data_list', 'data_headers'], {}), '(report_folder, tlactivity, data_list, data_headers)\n', (3670, 3722), False, 'from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly\n'), ((3741, 3799), 'scripts.ilapfuncs.logfunc', 'logfunc', (['"""No Turbo - Bluetooth Device Info data available"""'], {}), "('No Turbo - Bluetooth Device Info data available')\n", (3748, 3799), False, 'from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly\n')] |
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import sklearn
import pandas as pd
import os
import sys
import time
import tensorflow as tf
from sklearn.preprocessing import StandardScaler #用于归一化
from sklearn.datasets import fetch_california_housing
from sklearn.model_selection import train_test_split
from tensorflow import keras
from pprint import pprint
print(tf.__version__)
print(sys.version_info)
for module in mpl, np, pd, sklearn, tf, keras:
print(module.__name__, module.__version__)
housing = fetch_california_housing()
#print the description of the california housing dataset
print(housing.DESCR)
print(housing.data.shape)
print(housing.target.shape)
pprint(housing.data[:5])
pprint(housing.target[:5])
x_train_all, x_test, y_train_all, y_test = train_test_split(housing.data, housing.target, random_state = 7, test_size = 0.1)
x_train, x_valid, y_train, y_valid = train_test_split(x_train_all, y_train_all, random_state = 11)
print(x_train.shape, y_train.shape)
print(x_valid.shape, y_valid.shape)
print(x_test.shape, y_test.shape)
#nomalization
scalar = StandardScaler()
x_train_scaled = scalar.fit_transform(x_train)
x_valid_scaled = scalar.transform(x_valid)
x_test_scaled = scalar.transform(x_test)
print("x_train.shapep[1:]: ", x_train.shape[1:])
#函数式API实现
input= keras.layers.Input(shape = x_train.shape[1:])
hidden1 = keras.layers.Dense(30, activation = 'relu')(input)
hidden2 = keras.layers.Dense(30, activation = 'relu')(hidden1)
concat = keras.layers.concatenate([input, hidden2])
output = keras.layers.Dense(1)(concat)
model = keras.models.Model(inputs = [input], outputs = [output])
"""
#子类API实现
class WideDeepModel(keras.models.Model):
def __init__(self):
super(WideDeepModel, self).__init__()
#定义模型的层次
self.hidden1_layer = keras.layers.Dense(30, activation = 'relu')
self.hidden2_layer = keras.layers.Dense(30, activation = 'relu')
self.output_layer = keras.layers.Dense(1)
def call(self, input):
#完成模型正向计算
hidden1 = self.hidden1_layer(input)
hidden2 = self.hidden2_layer(hidden1)
concat = keras.layers.concatenate([input, hidden2])
output = self.output_layer(concat)
return output
model = WideDeepModel()
model.build(input_shape = (None, 8))
"""
model.summary()
model.compile(loss = "mean_squared_error", optimizer = "sgd")
callbacks = [keras.callbacks.EarlyStopping(patience = 5, min_delta = 1e-2)]
history = model.fit(x_train_scaled, y_train, validation_data = (x_valid_scaled, y_valid), epochs = 100, callbacks = callbacks)
def plot_learning_curves(history):
pd.DataFrame(history.history).plot(figsize=(8,5))
plt.grid(True)
plt.gca().set_ylim(0,1)
plt.show()
plot_learning_curves(history)
model.evaluate(x_test_scaled, y_test)
| [
"tensorflow.keras.layers.Input",
"matplotlib.pyplot.grid",
"pandas.DataFrame",
"sklearn.model_selection.train_test_split",
"matplotlib.pyplot.gca",
"sklearn.preprocessing.StandardScaler",
"tensorflow.keras.layers.concatenate",
"tensorflow.keras.callbacks.EarlyStopping",
"tensorflow.keras.layers.Dense",
"sklearn.datasets.fetch_california_housing",
"tensorflow.keras.models.Model",
"pprint.pprint",
"matplotlib.pyplot.show"
] | [((540, 566), 'sklearn.datasets.fetch_california_housing', 'fetch_california_housing', ([], {}), '()\n', (564, 566), False, 'from sklearn.datasets import fetch_california_housing\n'), ((700, 724), 'pprint.pprint', 'pprint', (['housing.data[:5]'], {}), '(housing.data[:5])\n', (706, 724), False, 'from pprint import pprint\n'), ((725, 751), 'pprint.pprint', 'pprint', (['housing.target[:5]'], {}), '(housing.target[:5])\n', (731, 751), False, 'from pprint import pprint\n'), ((795, 872), 'sklearn.model_selection.train_test_split', 'train_test_split', (['housing.data', 'housing.target'], {'random_state': '(7)', 'test_size': '(0.1)'}), '(housing.data, housing.target, random_state=7, test_size=0.1)\n', (811, 872), False, 'from sklearn.model_selection import train_test_split\n'), ((914, 973), 'sklearn.model_selection.train_test_split', 'train_test_split', (['x_train_all', 'y_train_all'], {'random_state': '(11)'}), '(x_train_all, y_train_all, random_state=11)\n', (930, 973), False, 'from sklearn.model_selection import train_test_split\n'), ((1106, 1122), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (1120, 1122), False, 'from sklearn.preprocessing import StandardScaler\n'), ((1321, 1364), 'tensorflow.keras.layers.Input', 'keras.layers.Input', ([], {'shape': 'x_train.shape[1:]'}), '(shape=x_train.shape[1:])\n', (1339, 1364), False, 'from tensorflow import keras\n'), ((1500, 1542), 'tensorflow.keras.layers.concatenate', 'keras.layers.concatenate', (['[input, hidden2]'], {}), '([input, hidden2])\n', (1524, 1542), False, 'from tensorflow import keras\n'), ((1590, 1642), 'tensorflow.keras.models.Model', 'keras.models.Model', ([], {'inputs': '[input]', 'outputs': '[output]'}), '(inputs=[input], outputs=[output])\n', (1608, 1642), False, 'from tensorflow import keras\n'), ((1377, 1418), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(30)'], {'activation': '"""relu"""'}), "(30, activation='relu')\n", (1395, 1418), False, 'from tensorflow import keras\n'), ((1438, 1479), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(30)'], {'activation': '"""relu"""'}), "(30, activation='relu')\n", (1456, 1479), False, 'from tensorflow import keras\n'), ((1552, 1573), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {}), '(1)\n', (1570, 1573), False, 'from tensorflow import keras\n'), ((2405, 2462), 'tensorflow.keras.callbacks.EarlyStopping', 'keras.callbacks.EarlyStopping', ([], {'patience': '(5)', 'min_delta': '(0.01)'}), '(patience=5, min_delta=0.01)\n', (2434, 2462), False, 'from tensorflow import keras\n'), ((2690, 2704), 'matplotlib.pyplot.grid', 'plt.grid', (['(True)'], {}), '(True)\n', (2698, 2704), True, 'import matplotlib.pyplot as plt\n'), ((2737, 2747), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2745, 2747), True, 'import matplotlib.pyplot as plt\n'), ((2636, 2665), 'pandas.DataFrame', 'pd.DataFrame', (['history.history'], {}), '(history.history)\n', (2648, 2665), True, 'import pandas as pd\n'), ((2709, 2718), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2716, 2718), True, 'import matplotlib.pyplot as plt\n')] |
from pathlib import Path
from mock import Mock, patch
from pytest import mark
from wordgoal.directory import Directory
@patch("wordgoal.directory.markdown_goal", return_value=600)
@patch("wordgoal.directory.words_in_markdown", return_value=3)
@patch("wordgoal.directory.words_in_text", return_value=7)
def test_analyse_file__markdown(
words_in_text: Mock,
words_in_markdown: Mock,
markdown_goal: Mock,
) -> None:
root = Path(".")
file = root.joinpath("foo.md")
append = Mock()
rows = Mock()
rows.append = append
directory = Directory(root)
with patch.object(directory, "rows", rows):
directory.analyse_file(file)
append.assert_called_with(name="foo.md", current=3, maximum=600)
markdown_goal.assert_called_with(file)
words_in_markdown.assert_called_with(file)
words_in_text.assert_not_called()
@patch("wordgoal.directory.words_in_markdown", return_value=3)
@patch("wordgoal.directory.words_in_text", return_value=7)
def test_analyse_file__text(
words_in_text: Mock,
words_in_markdown: Mock,
) -> None:
root = Path(".")
file = root.joinpath("foo.txt")
append = Mock()
rows = Mock()
rows.append = append
directory = Directory(root)
with patch.object(directory, "rows", rows):
directory.analyse_file(file)
append.assert_called_with(name="foo.txt", current=7, maximum=1000)
words_in_markdown.assert_not_called()
words_in_text.assert_called_with(file)
@patch("wordgoal.directory.words_in_markdown", return_value=3)
@patch("wordgoal.directory.words_in_text", return_value=7)
def test_analyse_file__unhandled(
words_in_text: Mock,
words_in_markdown: Mock,
) -> None:
root = Path(".")
file = root.joinpath("foo.bar")
append = Mock()
rows = Mock()
rows.append = append
directory = Directory(root)
with patch.object(directory, "rows", rows):
directory.analyse_file(file)
append.assert_not_called()
words_in_markdown.assert_not_called()
words_in_text.assert_not_called()
@mark.parametrize(
"directory, name, expect",
[
(Path("."), ".git", True),
(Path("."), "foo", False),
# The "wordgoal" directory has no configuration file, so no objects
# should be ignored.
(Path(".").joinpath("wordgoal"), ".git", False),
],
)
def test_ignore(directory: Path, name: str, expect: bool) -> None:
assert Directory(directory).ignore(name) == expect
def test_root__child() -> None:
root = Directory(Path(__file__).parent.parent)
assert Directory(root.directory.joinpath("wordgoal"), root).root == root.directory
def test_root__root() -> None:
root_path = Path(__file__).parent.parent
root_dir = Directory(root_path)
assert root_dir.root == root_path
assert Directory(root_path.joinpath("wordgoal"), root_dir).root == root_path
@patch("wordgoal.directory.Directory.analyse_file")
def test_walk(analyse_file: Mock) -> None:
root = Path(".")
directory = Directory(root)
with patch("wordgoal.directory.Directory") as directory_maker:
directory.walk()
directory_maker.assert_any_call(
path=root.joinpath("wordgoal"),
parent=directory,
)
analyse_file.assert_any_call(root.joinpath("Pipfile"))
| [
"mock.patch",
"pathlib.Path",
"mock.Mock",
"wordgoal.directory.Directory",
"mock.patch.object"
] | [((124, 183), 'mock.patch', 'patch', (['"""wordgoal.directory.markdown_goal"""'], {'return_value': '(600)'}), "('wordgoal.directory.markdown_goal', return_value=600)\n", (129, 183), False, 'from mock import Mock, patch\n'), ((185, 246), 'mock.patch', 'patch', (['"""wordgoal.directory.words_in_markdown"""'], {'return_value': '(3)'}), "('wordgoal.directory.words_in_markdown', return_value=3)\n", (190, 246), False, 'from mock import Mock, patch\n'), ((248, 305), 'mock.patch', 'patch', (['"""wordgoal.directory.words_in_text"""'], {'return_value': '(7)'}), "('wordgoal.directory.words_in_text', return_value=7)\n", (253, 305), False, 'from mock import Mock, patch\n'), ((868, 929), 'mock.patch', 'patch', (['"""wordgoal.directory.words_in_markdown"""'], {'return_value': '(3)'}), "('wordgoal.directory.words_in_markdown', return_value=3)\n", (873, 929), False, 'from mock import Mock, patch\n'), ((931, 988), 'mock.patch', 'patch', (['"""wordgoal.directory.words_in_text"""'], {'return_value': '(7)'}), "('wordgoal.directory.words_in_text', return_value=7)\n", (936, 988), False, 'from mock import Mock, patch\n'), ((1482, 1543), 'mock.patch', 'patch', (['"""wordgoal.directory.words_in_markdown"""'], {'return_value': '(3)'}), "('wordgoal.directory.words_in_markdown', return_value=3)\n", (1487, 1543), False, 'from mock import Mock, patch\n'), ((1545, 1602), 'mock.patch', 'patch', (['"""wordgoal.directory.words_in_text"""'], {'return_value': '(7)'}), "('wordgoal.directory.words_in_text', return_value=7)\n", (1550, 1602), False, 'from mock import Mock, patch\n'), ((2882, 2932), 'mock.patch', 'patch', (['"""wordgoal.directory.Directory.analyse_file"""'], {}), "('wordgoal.directory.Directory.analyse_file')\n", (2887, 2932), False, 'from mock import Mock, patch\n'), ((440, 449), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (444, 449), False, 'from pathlib import Path\n'), ((499, 505), 'mock.Mock', 'Mock', ([], {}), '()\n', (503, 505), False, 'from mock import Mock, patch\n'), ((517, 523), 'mock.Mock', 'Mock', ([], {}), '()\n', (521, 523), False, 'from mock import Mock, patch\n'), ((566, 581), 'wordgoal.directory.Directory', 'Directory', (['root'], {}), '(root)\n', (575, 581), False, 'from wordgoal.directory import Directory\n'), ((1094, 1103), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (1098, 1103), False, 'from pathlib import Path\n'), ((1154, 1160), 'mock.Mock', 'Mock', ([], {}), '()\n', (1158, 1160), False, 'from mock import Mock, patch\n'), ((1172, 1178), 'mock.Mock', 'Mock', ([], {}), '()\n', (1176, 1178), False, 'from mock import Mock, patch\n'), ((1221, 1236), 'wordgoal.directory.Directory', 'Directory', (['root'], {}), '(root)\n', (1230, 1236), False, 'from wordgoal.directory import Directory\n'), ((1713, 1722), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (1717, 1722), False, 'from pathlib import Path\n'), ((1773, 1779), 'mock.Mock', 'Mock', ([], {}), '()\n', (1777, 1779), False, 'from mock import Mock, patch\n'), ((1791, 1797), 'mock.Mock', 'Mock', ([], {}), '()\n', (1795, 1797), False, 'from mock import Mock, patch\n'), ((1840, 1855), 'wordgoal.directory.Directory', 'Directory', (['root'], {}), '(root)\n', (1849, 1855), False, 'from wordgoal.directory import Directory\n'), ((2739, 2759), 'wordgoal.directory.Directory', 'Directory', (['root_path'], {}), '(root_path)\n', (2748, 2759), False, 'from wordgoal.directory import Directory\n'), ((2987, 2996), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (2991, 2996), False, 'from pathlib import Path\n'), ((3013, 3028), 'wordgoal.directory.Directory', 'Directory', (['root'], {}), '(root)\n', (3022, 3028), False, 'from wordgoal.directory import Directory\n'), ((591, 628), 'mock.patch.object', 'patch.object', (['directory', '"""rows"""', 'rows'], {}), "(directory, 'rows', rows)\n", (603, 628), False, 'from mock import Mock, patch\n'), ((1246, 1283), 'mock.patch.object', 'patch.object', (['directory', '"""rows"""', 'rows'], {}), "(directory, 'rows', rows)\n", (1258, 1283), False, 'from mock import Mock, patch\n'), ((1865, 1902), 'mock.patch.object', 'patch.object', (['directory', '"""rows"""', 'rows'], {}), "(directory, 'rows', rows)\n", (1877, 1902), False, 'from mock import Mock, patch\n'), ((3039, 3076), 'mock.patch', 'patch', (['"""wordgoal.directory.Directory"""'], {}), "('wordgoal.directory.Directory')\n", (3044, 3076), False, 'from mock import Mock, patch\n'), ((2120, 2129), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (2124, 2129), False, 'from pathlib import Path\n'), ((2155, 2164), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (2159, 2164), False, 'from pathlib import Path\n'), ((2695, 2709), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2699, 2709), False, 'from pathlib import Path\n'), ((2430, 2450), 'wordgoal.directory.Directory', 'Directory', (['directory'], {}), '(directory)\n', (2439, 2450), False, 'from wordgoal.directory import Directory\n'), ((2529, 2543), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (2533, 2543), False, 'from pathlib import Path\n'), ((2295, 2304), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (2299, 2304), False, 'from pathlib import Path\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-05-10 20:23
from __future__ import unicode_literals
import datetime
import django.contrib.postgres.fields
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('meetings_management', '0008_auto_20170510_1640'),
]
operations = [
migrations.AddField(
model_name='meetingroomrequest',
name='amount',
field=models.IntegerField(default=0, verbose_name='amount of people'),
preserve_default=False,
),
migrations.AddField(
model_name='meetingroomrequest',
name='supplies',
field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=64), blank=True, default=[], size=None, verbose_name='supplies to use'),
preserve_default=False,
),
migrations.AlterField(
model_name='meetingroom',
name='available_from',
field=models.DateTimeField(default=datetime.datetime(2017, 5, 10, 6, 0, 0, 645211, tzinfo=utc), verbose_name='available from'),
),
migrations.AlterField(
model_name='meetingroom',
name='available_until',
field=models.DateTimeField(default=datetime.datetime(2017, 5, 10, 21, 0, 0, 645331, tzinfo=utc), verbose_name='available until'),
),
]
| [
"datetime.datetime",
"django.db.models.CharField",
"django.db.models.IntegerField"
] | [((515, 578), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)', 'verbose_name': '"""amount of people"""'}), "(default=0, verbose_name='amount of people')\n", (534, 578), False, 'from django.db import migrations, models\n'), ((801, 832), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(64)'}), '(max_length=64)\n', (817, 832), False, 'from django.db import migrations, models\n'), ((1100, 1159), 'datetime.datetime', 'datetime.datetime', (['(2017)', '(5)', '(10)', '(6)', '(0)', '(0)', '(645211)'], {'tzinfo': 'utc'}), '(2017, 5, 10, 6, 0, 0, 645211, tzinfo=utc)\n', (1117, 1159), False, 'import datetime\n'), ((1356, 1416), 'datetime.datetime', 'datetime.datetime', (['(2017)', '(5)', '(10)', '(21)', '(0)', '(0)', '(645331)'], {'tzinfo': 'utc'}), '(2017, 5, 10, 21, 0, 0, 645331, tzinfo=utc)\n', (1373, 1416), False, 'import datetime\n')] |
# coding: utf-8
"""
[AHOI cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) © 2016‐2017 Starfinanz - Ein Unternehmen der Finanz Informatik # noqa: E501
OpenAPI spec version: 2.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.balance import Balance # noqa: F401,E501
class Forecast(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'forecast_balance': 'Balance',
'account_id': 'int'
}
attribute_map = {
'forecast_balance': 'forecastBalance',
'account_id': 'accountId'
}
def __init__(self, forecast_balance=None, account_id=None): # noqa: E501
"""Forecast - a model defined in Swagger""" # noqa: E501
self._forecast_balance = None
self._account_id = None
self.discriminator = None
self.forecast_balance = forecast_balance
self.account_id = account_id
@property
def forecast_balance(self):
"""Gets the forecast_balance of this Forecast. # noqa: E501
Balance forecast # noqa: E501
:return: The forecast_balance of this Forecast. # noqa: E501
:rtype: Balance
"""
return self._forecast_balance
@forecast_balance.setter
def forecast_balance(self, forecast_balance):
"""Sets the forecast_balance of this Forecast.
Balance forecast # noqa: E501
:param forecast_balance: The forecast_balance of this Forecast. # noqa: E501
:type: Balance
"""
if forecast_balance is None:
raise ValueError("Invalid value for `forecast_balance`, must not be `None`") # noqa: E501
self._forecast_balance = forecast_balance
@property
def account_id(self):
"""Gets the account_id of this Forecast. # noqa: E501
ID of account to which this entry belongs # noqa: E501
:return: The account_id of this Forecast. # noqa: E501
:rtype: int
"""
return self._account_id
@account_id.setter
def account_id(self, account_id):
"""Sets the account_id of this Forecast.
ID of account to which this entry belongs # noqa: E501
:param account_id: The account_id of this Forecast. # noqa: E501
:type: int
"""
if account_id is None:
raise ValueError("Invalid value for `account_id`, must not be `None`") # noqa: E501
self._account_id = account_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Forecast):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"six.iteritems"
] | [((3160, 3193), 'six.iteritems', 'six.iteritems', (['self.swagger_types'], {}), '(self.swagger_types)\n', (3173, 3193), False, 'import six\n')] |
# -*- coding: utf-8 -*-
from .context import in2dp
from .mock_shellproxy import MockShellProxy
import unittest
class StdoutParserTestSuite(unittest.TestCase):
def setUp(self):
self.mockshell = MockShellProxy()
self.parser = in2dp.StdoutParser()
def test_parse_free_memory(self):
'''Test parsing the stdout obtained from invoking the "free" shell command'''
# stdout string returned from invoking the "free" shell command
stdout = self.mockshell.free_memory()
# parse the stdout into a json object
stats_json = self.parser.parse_free_memory(stdout)
# assert equality with expected values
self.assertEqual(stats_json['total'], 1026800)
self.assertEqual(stats_json['used'], 198248)
self.assertEqual(stats_json['free'], 828552)
self.assertEqual(stats_json['shared'], 1068)
self.assertEqual(stats_json['buffers'], 6952)
self.assertEqual(stats_json['cached'], 71672)
self.assertEqual(stats_json['used_minus_bufferscache'], 119624)
self.assertEqual(stats_json['free_plus_bufferscache'], 907176)
def test_cpu_usage(self):
'''Test parsing the stdout obtained from invoking the "free" shell command'''
# stdout string returned from invoking the "top" shell command
stdout = self.mockshell.cpu_usage()
# parse the stdout into a json object
stats_json = self.parser.parse_cpu_usage(stdout)
# assert equality with expected values
self.assertEqual(stats_json['usr'], 1)
self.assertEqual(stats_json['sys'], 4)
self.assertEqual(stats_json['nic'], 0)
self.assertEqual(stats_json['idle'], 94)
self.assertEqual(stats_json['io'], 0)
self.assertEqual(stats_json['irq'], 0)
self.assertEqual(stats_json['sirq'], 0)
def test_parse_disk_usage(self):
'''Test parsing the stdout obtained from invoking the "df" shell command'''
# stdout string returned from invoking the "df" shell command
stdout = self.mockshell.disk_usage()
# parse the stdout into a json object
stats_json = self.parser.parse_disk_usage(stdout)
# assert equality with expected values
self.assertEqual(stats_json['size'], 3936)
self.assertEqual(stats_json['used'], 2300)
self.assertEqual(stats_json['available'], 1415)
self.assertEqual(stats_json['available_percentage'], 62)
if __name__ == '__main__':
unittest.main() | [
"unittest.main"
] | [((2510, 2525), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2523, 2525), False, 'import unittest\n')] |
import pipinstall
__version__ = pipinstall.__version__
def main():
pipinstall.main(True)
if __name__ == '__main__': main() | [
"pipinstall.main"
] | [((73, 94), 'pipinstall.main', 'pipinstall.main', (['(True)'], {}), '(True)\n', (88, 94), False, 'import pipinstall\n')] |
import config
from pathlib import Path
def test_carpeta_ssh(monkeypatch):
def mock_return(*args, **kwargs):
return Path("/abc")
monkeypatch.setattr(Path, "home", mock_return)
x = config.carpeta_ssh()
assert x == Path("/abc/.ssh") | [
"config.carpeta_ssh",
"pathlib.Path"
] | [((194, 214), 'config.carpeta_ssh', 'config.carpeta_ssh', ([], {}), '()\n', (212, 214), False, 'import config\n'), ((124, 136), 'pathlib.Path', 'Path', (['"""/abc"""'], {}), "('/abc')\n", (128, 136), False, 'from pathlib import Path\n'), ((230, 247), 'pathlib.Path', 'Path', (['"""/abc/.ssh"""'], {}), "('/abc/.ssh')\n", (234, 247), False, 'from pathlib import Path\n')] |
import logging
logger = logging.getLogger('pro.log')
"""
settings的示例:
# 日志
LOGGING = {
'version': 1,
'disable_existing_loggers': False, # 是否禁用logger,建议设置为False
'formatters': { # 日志格式,提供给handler使用,非必须,如果不设置格式,默认只会打印消息体
'verbose': { # 格式名称
# INFO 2018-04-25 15:43:27,586 views 8756 123145350217728 这是一个日志
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
# INFO 这是一个日志
'format': '%(levelname)s %(message)s'
},
'standard': {
# 2018-04-25 16:40:00,195 [Thread-7:123145575223296] [myapp.log:282] [views:user_query_json_get] [INFO]-
# 这是一个日志
'format': '%(asctime)s [%(threadName)s:%(thread)d] [%(name)s:%(lineno)d] [%(module)s:%(funcName)s] [%(levelname)s]- %(message)s'
},
},
'filters': { # 过滤器,提供给handler使用,非必须
'require_debug_true': { # 要求DEBUG=True时才打印日志
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': { # 处理器,设置日志记录方式,必须
'console': { # 处理器名称
'level': 'DEBUG', # 设置级别
'filters': ['require_debug_true'], # 设置过滤器,多个用逗号分割
'class': 'logging.StreamHandler', # 处理器,这里是控制台打印
'formatter': 'verbose' # 设置日志格式
},
'timedRotatingFile': {
'level': 'DEBUG',
'class': 'logging.handlers.TimedRotatingFileHandler', # 按时间切割日志
'filename': 'logs/pro.log', # 日志输出文件
'when': 'D', # 按天分割
'backupCount': 0, # 保留日志份数,只保留最后5份,如果都保留,设置为0,默认就是0
'formatter': 'standard', # 使用哪种formatters日志格式
},
},
'loggers': { # 日志记录器
'django.request': {
'handlers': ['timedRotatingFile'],
'level': 'ERROR',
'propagate': False, # 设置为False,表示不像其父级别传递日志内容
},
'pro.log': { # 也可以这样创建logger对象,logging.getLogger('myapp.log')
'handlers': ['timedRotatingFile'],
'level': 'DEBUG', # 这里的日志级别不能低于处理器中设置的日志级别
},
},
}
""" | [
"logging.getLogger"
] | [((25, 53), 'logging.getLogger', 'logging.getLogger', (['"""pro.log"""'], {}), "('pro.log')\n", (42, 53), False, 'import logging\n')] |
# -*- coding: utf-8 -*-
import requests, re, os
from openpyxl import Workbook
import optparse
def getID(url):
for match in re.finditer(r"deal_id=[0-9]{4,}", requests.get(url).text):return match.group().split('=')[1]
def crawl(shopID,pages):
comments = []
page = 1
while page <= pages:
print("[+] Page %d saved"%page)
data = requests.get("https://www.nuomi.com/pcindex/main/comment", params = {"dealId" : shopID, "page" : page}).json()
for item in data["data"]["list"]:
comment = {}
comment['name'] = item['nickname'].encode('utf8')
comment['score'] = item['score']
comment['create_time'] = item['create_time']
comment['update_time'] = item['update_time']
comment['content'] = item['content'].encode('utf8')
comment['reply'] = ""
if len(item['reply']) != 0:
for reply in item['reply']:
comment['reply'] = reply['content'].encode('utf8')
break
comments.append(comment)
page += 1
return comments
def save(comments,shopID):
filename = os.getcwd() + os.sep + "NuomiShop%s.xlsx"%shopID
wb = Workbook()
ws = wb.active
ws.cell(row=1, column=1).value = u"create_time"
ws.cell(row=1, column=2).value = u"update_time"
ws.cell(row=1, column=3).value = u"name"
ws.cell(row=1, column=4).value = u"score"
ws.cell(row=1, column=5).value = u"content"
ws.cell(row=1, column=6).value = u"reply"
for i in range(0, len(comments)):
ws.cell(row=i+2, column=1).value = comments[i]['create_time']
ws.cell(row=i+2, column=2).value = comments[i]['update_time']
ws.cell(row=i+2, column=3).value = comments[i]['name']
ws.cell(row=i+2, column=4).value = comments[i]['score']
ws.cell(row=i+2, column=5).value = comments[i]['content']
ws.cell(row=i+2, column=6).value = comments[i]['reply']
if os.path.exists(filename):
os.remove(filename)
wb.save(filename)
print("[:)] All Done!")
print("[!] Saved to %s"%filename)
def main():
parser = optparse.OptionParser('usage %prog -u'+\
'<shop url> -p <pages>')
parser.add_option('-u',dest='shopURL',type='string',\
help='specify shop url')
parser.add_option('-p',dest='pages',type='int',\
help='specify pages to crawl')
(options,args) = parser.parse_args()
shopURL = options.shopURL
pages = options.pages
if (pages ==None) | (shopURL == None):
print('[-] You must specify a shopURL and pages to crawl.')
exit(0)
shopID = getID(shopURL)
comments = crawl(shopID,pages)
save(comments,shopID)
if __name__ =="__main__":
main() | [
"os.path.exists",
"optparse.OptionParser",
"requests.get",
"os.getcwd",
"openpyxl.Workbook",
"os.remove"
] | [((1212, 1222), 'openpyxl.Workbook', 'Workbook', ([], {}), '()\n', (1220, 1222), False, 'from openpyxl import Workbook\n'), ((1983, 2007), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (1997, 2007), False, 'import requests, re, os\n'), ((2161, 2226), 'optparse.OptionParser', 'optparse.OptionParser', (["('usage %prog -u' + '<shop url> -p <pages>')"], {}), "('usage %prog -u' + '<shop url> -p <pages>')\n", (2182, 2226), False, 'import optparse\n'), ((2017, 2036), 'os.remove', 'os.remove', (['filename'], {}), '(filename)\n', (2026, 2036), False, 'import requests, re, os\n'), ((162, 179), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (174, 179), False, 'import requests, re, os\n'), ((1154, 1165), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1163, 1165), False, 'import requests, re, os\n'), ((358, 461), 'requests.get', 'requests.get', (['"""https://www.nuomi.com/pcindex/main/comment"""'], {'params': "{'dealId': shopID, 'page': page}"}), "('https://www.nuomi.com/pcindex/main/comment', params={'dealId':\n shopID, 'page': page})\n", (370, 461), False, 'import requests, re, os\n')] |
from __future__ import print_function
from newrelic.admin import command, usage
@command('generate-config', 'license_key [output_file]',
"""Generates a sample agent configuration file for <license_key>.""")
def generate_config(args):
import os
import sys
if len(args) == 0:
usage('generate-config')
sys.exit(1)
from newrelic import __file__ as package_root
package_root = os.path.dirname(package_root)
config_file = os.path.join(package_root, 'newrelic.ini')
content = open(config_file, 'r').read()
if len(args) >= 1:
content = content.replace('*** REPLACE ME ***', args[0])
if len(args) >= 2 and args[1] != '-':
output_file = open(args[1], 'w')
output_file.write(content)
output_file.close()
else:
print(content)
| [
"os.path.join",
"os.path.dirname",
"newrelic.admin.usage",
"sys.exit",
"newrelic.admin.command"
] | [((83, 208), 'newrelic.admin.command', 'command', (['"""generate-config"""', '"""license_key [output_file]"""', '"""Generates a sample agent configuration file for <license_key>."""'], {}), "('generate-config', 'license_key [output_file]',\n 'Generates a sample agent configuration file for <license_key>.')\n", (90, 208), False, 'from newrelic.admin import command, usage\n'), ((412, 441), 'os.path.dirname', 'os.path.dirname', (['package_root'], {}), '(package_root)\n', (427, 441), False, 'import os\n'), ((461, 503), 'os.path.join', 'os.path.join', (['package_root', '"""newrelic.ini"""'], {}), "(package_root, 'newrelic.ini')\n", (473, 503), False, 'import os\n'), ((297, 321), 'newrelic.admin.usage', 'usage', (['"""generate-config"""'], {}), "('generate-config')\n", (302, 321), False, 'from newrelic.admin import command, usage\n'), ((330, 341), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (338, 341), False, 'import sys\n')] |
from datetime import date
dados = dict()
dados['nome'] = str(input('Nome: ')).capitalize()
dados['idade'] = date.today().year - int(input('Ano de Nascimento: '))
dados['ctps'] = int(input('Carteira de Trabalho (0 caso não tenha): '))
if dados['ctps'] != 0:
dados['contratação'] = int(input('Ano de contratação: '))
dados['salário'] = float(input('Salário: R$'))
dados['aposentadoria'] = dados['idade'] + (dados['contratação'] + 35) - date.today().year
print('=-' * 30)
for k, v in dados.items():
print(f' - {k} tem o valor {v}')
# krl esse foi foda man...
| [
"datetime.date.today"
] | [((109, 121), 'datetime.date.today', 'date.today', ([], {}), '()\n', (119, 121), False, 'from datetime import date\n'), ((449, 461), 'datetime.date.today', 'date.today', ([], {}), '()\n', (459, 461), False, 'from datetime import date\n')] |
'''
Copyright (c) 2018, UChicago Argonne, LLC
See LICENSE file.
'''
from PyQt5.QtWidgets import QMainWindow, QAction
class MenuWidget(QMainWindow.menuBar):
def __init__(self):
super(MenuWidget,self).__init__()
file_menu = self.addMenu('File')
newAct = QAction('Load spec file', self)
file_menu.addAction(newAct)
| [
"PyQt5.QtWidgets.QAction"
] | [((287, 318), 'PyQt5.QtWidgets.QAction', 'QAction', (['"""Load spec file"""', 'self'], {}), "('Load spec file', self)\n", (294, 318), False, 'from PyQt5.QtWidgets import QMainWindow, QAction\n')] |
from flask import current_app, jsonify
from flask_cors import cross_origin
from . import auth
@auth.route('/auth/logout', methods=['OPTIONS', 'GET', 'POST'])
@cross_origin(supports_credentials=True)
def logout():
if not current_app.config['OIDC_LOGOUT_URL']:
return jsonify(status='ok', message='OIDC end_session_endpoint not configured')
return jsonify(status='ok', logoutUrl=current_app.config['OIDC_LOGOUT_URL'])
| [
"flask_cors.cross_origin",
"flask.jsonify"
] | [((162, 201), 'flask_cors.cross_origin', 'cross_origin', ([], {'supports_credentials': '(True)'}), '(supports_credentials=True)\n', (174, 201), False, 'from flask_cors import cross_origin\n'), ((367, 436), 'flask.jsonify', 'jsonify', ([], {'status': '"""ok"""', 'logoutUrl': "current_app.config['OIDC_LOGOUT_URL']"}), "(status='ok', logoutUrl=current_app.config['OIDC_LOGOUT_URL'])\n", (374, 436), False, 'from flask import current_app, jsonify\n'), ((282, 354), 'flask.jsonify', 'jsonify', ([], {'status': '"""ok"""', 'message': '"""OIDC end_session_endpoint not configured"""'}), "(status='ok', message='OIDC end_session_endpoint not configured')\n", (289, 354), False, 'from flask import current_app, jsonify\n')] |
from lib.topology import Master, OVS, Netns, Link
m = Master()
ovs1 = OVS().add_to(m)
ovs2 = OVS().add_to(m)
ovs3 = OVS().add_to(m)
ovs4 = OVS().add_to(m)
Link.declare(ovs1, ovs2, link_type='patch')
Link.declare(ovs2, ovs3, link_type='veth')
Link.declare(ovs3, ovs4, link_type='veth', disable_offloading=True)
ns1 = Netns().add_to(m)
Link.declare((ns1, '10.113.1.1'), ovs1, link_type='port')
ns2 = Netns('wonderful-ns2').add_to(m)
Link.declare((ns2,'10.113.1.2'), ovs3, link_type='veth')
ns3 = Netns('notoffld-ns3').add_to(m)
Link.declare((ns3,'10.113.1.3'), ovs4, link_type='port', disable_offloading=True)
Link.declare((ns3,'10.113.1.4'), ovs3, link_type='veth', disable_offloading=True)
Link.declare(ns1, ns2)
Link.declare(ns1, ns3, disable_offloading=True)
print(m.get_script())
| [
"lib.topology.Master",
"lib.topology.Link.declare",
"lib.topology.OVS",
"lib.topology.Netns"
] | [((55, 63), 'lib.topology.Master', 'Master', ([], {}), '()\n', (61, 63), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((161, 204), 'lib.topology.Link.declare', 'Link.declare', (['ovs1', 'ovs2'], {'link_type': '"""patch"""'}), "(ovs1, ovs2, link_type='patch')\n", (173, 204), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((205, 247), 'lib.topology.Link.declare', 'Link.declare', (['ovs2', 'ovs3'], {'link_type': '"""veth"""'}), "(ovs2, ovs3, link_type='veth')\n", (217, 247), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((248, 315), 'lib.topology.Link.declare', 'Link.declare', (['ovs3', 'ovs4'], {'link_type': '"""veth"""', 'disable_offloading': '(True)'}), "(ovs3, ovs4, link_type='veth', disable_offloading=True)\n", (260, 315), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((341, 398), 'lib.topology.Link.declare', 'Link.declare', (["(ns1, '10.113.1.1')", 'ovs1'], {'link_type': '"""port"""'}), "((ns1, '10.113.1.1'), ovs1, link_type='port')\n", (353, 398), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((439, 496), 'lib.topology.Link.declare', 'Link.declare', (["(ns2, '10.113.1.2')", 'ovs3'], {'link_type': '"""veth"""'}), "((ns2, '10.113.1.2'), ovs3, link_type='veth')\n", (451, 496), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((535, 621), 'lib.topology.Link.declare', 'Link.declare', (["(ns3, '10.113.1.3')", 'ovs4'], {'link_type': '"""port"""', 'disable_offloading': '(True)'}), "((ns3, '10.113.1.3'), ovs4, link_type='port',\n disable_offloading=True)\n", (547, 621), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((617, 703), 'lib.topology.Link.declare', 'Link.declare', (["(ns3, '10.113.1.4')", 'ovs3'], {'link_type': '"""veth"""', 'disable_offloading': '(True)'}), "((ns3, '10.113.1.4'), ovs3, link_type='veth',\n disable_offloading=True)\n", (629, 703), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((700, 722), 'lib.topology.Link.declare', 'Link.declare', (['ns1', 'ns2'], {}), '(ns1, ns2)\n', (712, 722), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((723, 770), 'lib.topology.Link.declare', 'Link.declare', (['ns1', 'ns3'], {'disable_offloading': '(True)'}), '(ns1, ns3, disable_offloading=True)\n', (735, 770), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((72, 77), 'lib.topology.OVS', 'OVS', ([], {}), '()\n', (75, 77), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((96, 101), 'lib.topology.OVS', 'OVS', ([], {}), '()\n', (99, 101), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((120, 125), 'lib.topology.OVS', 'OVS', ([], {}), '()\n', (123, 125), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((144, 149), 'lib.topology.OVS', 'OVS', ([], {}), '()\n', (147, 149), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((323, 330), 'lib.topology.Netns', 'Netns', ([], {}), '()\n', (328, 330), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((406, 428), 'lib.topology.Netns', 'Netns', (['"""wonderful-ns2"""'], {}), "('wonderful-ns2')\n", (411, 428), False, 'from lib.topology import Master, OVS, Netns, Link\n'), ((503, 524), 'lib.topology.Netns', 'Netns', (['"""notoffld-ns3"""'], {}), "('notoffld-ns3')\n", (508, 524), False, 'from lib.topology import Master, OVS, Netns, Link\n')] |
from pathlib import Path
from fhir.resources.codesystem import CodeSystem
from oops_fhir.utils import CodeSystemConcept
__all__ = ["medicationRequestIntent"]
_resource = CodeSystem.parse_file(Path(__file__).with_suffix(".json"))
class medicationRequestIntent:
"""
Medication request intent
MedicationRequest Intent Codes
Status: draft - Version: 4.0.1
Copyright None
http://hl7.org/fhir/CodeSystem/medicationrequest-intent
"""
proposal = CodeSystemConcept(
{
"code": "proposal",
"definition": "The request is a suggestion made by someone/something that doesn't have an intention to ensure it occurs and without providing an authorization to act",
"display": "Proposal",
}
)
"""
Proposal
The request is a suggestion made by someone/something that doesn't have an intention to ensure it occurs and without providing an authorization to act
"""
plan = CodeSystemConcept(
{
"code": "plan",
"definition": "The request represents an intention to ensure something occurs without providing an authorization for others to act.",
"display": "Plan",
}
)
"""
Plan
The request represents an intention to ensure something occurs without providing an authorization for others to act.
"""
order = CodeSystemConcept(
{
"code": "order",
"definition": "The request represents a request/demand and authorization for action",
"display": "Order",
}
)
"""
Order
The request represents a request/demand and authorization for action
"""
original_order = CodeSystemConcept(
{
"code": "original-order",
"definition": "The request represents the original authorization for the medication request.",
"display": "Original Order",
}
)
"""
Original Order
The request represents the original authorization for the medication request.
"""
reflex_order = CodeSystemConcept(
{
"code": "reflex-order",
"definition": "The request represents an automatically generated supplemental authorization for action based on a parent authorization together with initial results of the action taken against that parent authorization..",
"display": "Reflex Order",
}
)
"""
Reflex Order
The request represents an automatically generated supplemental authorization for action based on a parent authorization together with initial results of the action taken against that parent authorization..
"""
filler_order = CodeSystemConcept(
{
"code": "filler-order",
"definition": "The request represents the view of an authorization instantiated by a fulfilling system representing the details of the fulfiller's intention to act upon a submitted order.",
"display": "Filler Order",
}
)
"""
Filler Order
The request represents the view of an authorization instantiated by a fulfilling system representing the details of the fulfiller's intention to act upon a submitted order.
"""
instance_order = CodeSystemConcept(
{
"code": "instance-order",
"definition": "The request represents an instance for the particular order, for example a medication administration record.",
"display": "Instance Order",
}
)
"""
Instance Order
The request represents an instance for the particular order, for example a medication administration record.
"""
option = CodeSystemConcept(
{
"code": "option",
"definition": "The request represents a component or option for a RequestGroup that establishes timing, conditionality and/or other constraints among a set of requests.",
"display": "Option",
}
)
"""
Option
The request represents a component or option for a RequestGroup that establishes timing, conditionality and/or other constraints among a set of requests.
"""
class Meta:
resource = _resource
| [
"oops_fhir.utils.CodeSystemConcept",
"pathlib.Path"
] | [((483, 722), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (['{\'code\': \'proposal\', \'definition\':\n "The request is a suggestion made by someone/something that doesn\'t have an intention to ensure it occurs and without providing an authorization to act"\n , \'display\': \'Proposal\'}'], {}), '({\'code\': \'proposal\', \'definition\':\n "The request is a suggestion made by someone/something that doesn\'t have an intention to ensure it occurs and without providing an authorization to act"\n , \'display\': \'Proposal\'})\n', (500, 722), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((972, 1169), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'plan', 'definition':\n 'The request represents an intention to ensure something occurs without providing an authorization for others to act.'\n , 'display': 'Plan'}"], {}), "({'code': 'plan', 'definition':\n 'The request represents an intention to ensure something occurs without providing an authorization for others to act.'\n , 'display': 'Plan'})\n", (989, 1169), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((1382, 1532), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'order', 'definition':\n 'The request represents a request/demand and authorization for action',\n 'display': 'Order'}"], {}), "({'code': 'order', 'definition':\n 'The request represents a request/demand and authorization for action',\n 'display': 'Order'})\n", (1399, 1532), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((1708, 1886), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'original-order', 'definition':\n 'The request represents the original authorization for the medication request.'\n , 'display': 'Original Order'}"], {}), "({'code': 'original-order', 'definition':\n 'The request represents the original authorization for the medication request.'\n , 'display': 'Original Order'})\n", (1725, 1886), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((2077, 2379), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'reflex-order', 'definition':\n 'The request represents an automatically generated supplemental authorization for action based on a parent authorization together with initial results of the action taken against that parent authorization..'\n , 'display': 'Reflex Order'}"], {}), "({'code': 'reflex-order', 'definition':\n 'The request represents an automatically generated supplemental authorization for action based on a parent authorization together with initial results of the action taken against that parent authorization..'\n , 'display': 'Reflex Order'})\n", (2094, 2379), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((2696, 2965), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (['{\'code\': \'filler-order\', \'definition\':\n "The request represents the view of an authorization instantiated by a fulfilling system representing the details of the fulfiller\'s intention to act upon a submitted order."\n , \'display\': \'Filler Order\'}'], {}), '({\'code\': \'filler-order\', \'definition\':\n "The request represents the view of an authorization instantiated by a fulfilling system representing the details of the fulfiller\'s intention to act upon a submitted order."\n , \'display\': \'Filler Order\'})\n', (2713, 2965), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((3251, 3460), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'instance-order', 'definition':\n 'The request represents an instance for the particular order, for example a medication administration record.'\n , 'display': 'Instance Order'}"], {}), "({'code': 'instance-order', 'definition':\n 'The request represents an instance for the particular order, for example a medication administration record.'\n , 'display': 'Instance Order'})\n", (3268, 3460), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((3676, 3915), 'oops_fhir.utils.CodeSystemConcept', 'CodeSystemConcept', (["{'code': 'option', 'definition':\n 'The request represents a component or option for a RequestGroup that establishes timing, conditionality and/or other constraints among a set of requests.'\n , 'display': 'Option'}"], {}), "({'code': 'option', 'definition':\n 'The request represents a component or option for a RequestGroup that establishes timing, conditionality and/or other constraints among a set of requests.'\n , 'display': 'Option'})\n", (3693, 3915), False, 'from oops_fhir.utils import CodeSystemConcept\n'), ((197, 211), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (201, 211), False, 'from pathlib import Path\n')] |
# -*- coding: utf-8 -*-
import os
admins = ['Tyranic-Moron', 'T-M|Work', 'Tyranic_Moron', 'T-M|Asleep', 'GarrusVakarian', 'LordCustardSmingleigh', 'XelaReko', 'XelaReco', 'Xel|Work', 'dave_random', 'ElementalAlchemist', 'Homoglyph', 'Heufy|Work', 'Heufneutje', 'HeufyDroid', 'HeufyCloud', 'HeufyTrain', 'HeufyBus', 'HubbeKing', 'HubbeWork', 'HubbeTrain', 'Mara', 'Didero', 'ekimekim']
finger = 'GET YOUR FINGER OUT OF THERE'
version = '1.0.0'
source = 'https://github.com/MatthewCox/PyMoronBot/'
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
os.chdir(dname)
logPath = os.path.join(dname, 'logs')
| [
"os.path.abspath",
"os.path.dirname",
"os.chdir",
"os.path.join"
] | [((509, 534), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (524, 534), False, 'import os\n'), ((543, 567), 'os.path.dirname', 'os.path.dirname', (['abspath'], {}), '(abspath)\n', (558, 567), False, 'import os\n'), ((568, 583), 'os.chdir', 'os.chdir', (['dname'], {}), '(dname)\n', (576, 583), False, 'import os\n'), ((594, 621), 'os.path.join', 'os.path.join', (['dname', '"""logs"""'], {}), "(dname, 'logs')\n", (606, 621), False, 'import os\n')] |
import pytest
from config_file import parse_dialog_from_json
def test_empty_invalid():
with pytest.raises(ValueError) as excinfo:
parse_dialog_from_json({})
assert "Invalid configuration file!" in str(excinfo.value)
def test_load_simple_sequence():
dialog_json = {
"sequence": [
["text 1",
"image 1"],
["text 2",
"image 2"]
]
}
dialog_graph = parse_dialog_from_json(dialog_json)
assert dialog_graph.current_node().text == "text 1"
assert dialog_graph.current_node().graphics.image_ids == ["image 1"]
assert dialog_graph.current_node().choices[0].text == "Next"
dialog_graph.make_choice(0)
assert dialog_graph.current_node().text == "text 2"
assert dialog_graph.current_node().graphics.image_ids == ["image 2"]
assert dialog_graph.current_node().choices[0].text == "Play from beginning"
def test_load_graph():
dialog_json = {
"graph": {
"root": "1",
"nodes": [
{
"id": "1",
"text": "text 1",
"graphics": {
"image": "image 1"
},
"choices": [
[
"stay here",
"1"
],
[
"go next",
"2"
]
]
},
{
"id": "2",
"text": "text 2",
"graphics": {
"image": "image 2"
},
"choices": [
[
"go back",
"1"
]
]
}
]
}
}
dialog_graph = parse_dialog_from_json(dialog_json)
assert dialog_graph.current_node().text == "text 1"
assert dialog_graph.current_node().graphics.image_ids == ["image 1"]
assert [c.text for c in dialog_graph.current_node().choices] == ["stay here", "go next"]
dialog_graph.make_choice(1)
assert dialog_graph.current_node().text == "text 2"
assert dialog_graph.current_node().graphics.image_ids == ["image 2"]
assert [c.text for c in dialog_graph.current_node().choices] == ["go back"]
def test_load_graph_with_animation():
dialog_json = {
"graph": {
"root": "1",
"nodes": [
{
"id": "1",
"text": "text 1",
"graphics": {
"animation": "animation 1",
},
"choices": []
}
]
}
}
dialog_graph = parse_dialog_from_json(dialog_json)
assert dialog_graph.current_node().text == "text 1"
assert dialog_graph.current_node().graphics.animation_id == "animation 1"
assert dialog_graph.current_node().choices == []
| [
"pytest.raises",
"config_file.parse_dialog_from_json"
] | [((442, 477), 'config_file.parse_dialog_from_json', 'parse_dialog_from_json', (['dialog_json'], {}), '(dialog_json)\n', (464, 477), False, 'from config_file import parse_dialog_from_json\n'), ((1968, 2003), 'config_file.parse_dialog_from_json', 'parse_dialog_from_json', (['dialog_json'], {}), '(dialog_json)\n', (1990, 2003), False, 'from config_file import parse_dialog_from_json\n'), ((2892, 2927), 'config_file.parse_dialog_from_json', 'parse_dialog_from_json', (['dialog_json'], {}), '(dialog_json)\n', (2914, 2927), False, 'from config_file import parse_dialog_from_json\n'), ((99, 124), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (112, 124), False, 'import pytest\n'), ((145, 171), 'config_file.parse_dialog_from_json', 'parse_dialog_from_json', (['{}'], {}), '({})\n', (167, 171), False, 'from config_file import parse_dialog_from_json\n')] |
# from pong import Pong
import matplotlib
matplotlib.use('Agg')
# import matplotlib.pyplot as plt
# from random import randint
# import pickle
import numpy as np
# from simple_ai import PongAi, MyAi
# # import argparse
import torch
import torch.optim as optim
from torch import nn
from torch.nn import functional as F
# from PIL import Image
# from skimage.transform import resize
np.set_printoptions(threshold=np.nan)
import collections
import gym
torch.set_default_tensor_type('torch.cuda.DoubleTensor')
# CUDA
use_cuda = torch.cuda.is_available()
print("Using cuda:", use_cuda)
class CriticNN(nn.Module):
def __init__(self, in_channels=3):
super(CriticNN, self).__init__()
self.fc1 = nn.Linear(4, 64)
self.fc2 = nn.Linear(64, 1)
self.optimizer = optim.Adam(self.parameters(), lr=1e-4)
print('self params', self.parameters)
def forward(self, x):
# x = torch.from_numpy(x)
x = F.layer_norm(x, x.size())
x = F.leaky_relu(self.fc1(x))
x = F.layer_norm(x, x.size())
x = self.fc2(x)
return x
def init_weights(self, m):
if type(m) == nn.Linear:
print('HERE ---')
# torch.nn.init.xavier_uniform(m.weight)
m.weight.data.fill_(0)
m.bias.data.fill_(0)
class ActorNN(nn.Module):
def __init__(self):
super(ActorNN, self).__init__()
self.fc1 = nn.Linear(4, 64)
self.fc2 = nn.Linear(64, 1)
self.optimizer = optim.Adam(self.parameters(), lr=1e-4)
def forward(self, x):
# print(x.size())
# print('x', x)
x = F.layer_norm(x, x.size())
x = F.leaky_relu(self.fc1(x))
x = F.layer_norm(x, x.size())
x = torch.sigmoid(self.fc2(x))
return x
def train(self, loss):
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
# class ReplayMemory(object):
#
# def __init__(self, capacity):
# self.capacity = capacity
# self.memory = []
# self.position = 0
#
# def push(self, *args):
# """Saves a transition."""
# if len(self.memory) < self.capacity:
# self.memory.append(None)
# self.memory[self.position] = Transition(*args)
# self.position = (self.position + 1) % self.capacity
#
# def sample(self, batch_size):
# return random.sample(self.memory, batch_size)
#
# def __len__(self):
# return len(self.memory)
actor = ActorNN()
critic = CriticNN()
critic.apply(critic.init_weights)
# props to karpathy
def discount_rewards(r, gamma=0.99):
""" take 1D float array of rewards and compute discounted reward """
discounted_r = np.zeros_like(r)
running_add = 0
for t in reversed(xrange(0, r.size)):
if r[t] != 0: running_add = 0 # reset the sum, since this was a game boundary (pong specific!)
running_add = running_add * gamma + r[t]
discounted_r[t] = running_add
return discounted_r
render = False
env = gym.make('CartPole-v0')
actor_update_freq = 10
critic_update_freq = 1
gamma = 0.99
log_freq = 100
running_rewards = collections.deque(maxlen=log_freq)
rewards, probs, actions, value_approx = [], [], [], []
for i_episode in range(1, 100000000):
done = False
losses_actor, losses_critic = [], []
observation_prev = torch.tensor(env.reset()).cuda
ep_reward = 0
while not done:
# env.render()
left_prob = actor.forward(torch.from_numpy(observation_prev).cuda())
action = 0 if np.random.uniform() < left_prob else 1
lprob = torch.log(left_prob) if action == 0 else torch.log(1 - left_prob)
observation, reward, done, info = env.step(action)
# s_t + 1
value_approx.append(torch.tensor(observation).cuda)
# R(s_t, a_t)
rewards.append(reward)
# logp(a_t | s_t)
probs.append(lprob)
# a_t
actions.append(action)
# for t in range(t-1 , ... t_start):
for t in reversed(xrange(0, len(rewards))):
is_terminal = t == 0
observation_t
R = critic.forward()
if i_episode % log_freq == 0:
print(f"Episode: {i_episode}, last {log_freq} episodes mean reward: { np.mean(running_rewards)}")
running_rewards.append(ep_reward)
| [
"numpy.mean",
"collections.deque",
"torch.log",
"matplotlib.use",
"torch.set_default_tensor_type",
"torch.from_numpy",
"torch.tensor",
"torch.cuda.is_available",
"torch.nn.Linear",
"numpy.random.uniform",
"numpy.zeros_like",
"gym.make",
"numpy.set_printoptions"
] | [((43, 64), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (57, 64), False, 'import matplotlib\n'), ((383, 420), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'threshold': 'np.nan'}), '(threshold=np.nan)\n', (402, 420), True, 'import numpy as np\n'), ((452, 508), 'torch.set_default_tensor_type', 'torch.set_default_tensor_type', (['"""torch.cuda.DoubleTensor"""'], {}), "('torch.cuda.DoubleTensor')\n", (481, 508), False, 'import torch\n'), ((527, 552), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (550, 552), False, 'import torch\n'), ((3008, 3031), 'gym.make', 'gym.make', (['"""CartPole-v0"""'], {}), "('CartPole-v0')\n", (3016, 3031), False, 'import gym\n'), ((3124, 3158), 'collections.deque', 'collections.deque', ([], {'maxlen': 'log_freq'}), '(maxlen=log_freq)\n', (3141, 3158), False, 'import collections\n'), ((2710, 2726), 'numpy.zeros_like', 'np.zeros_like', (['r'], {}), '(r)\n', (2723, 2726), True, 'import numpy as np\n'), ((712, 728), 'torch.nn.Linear', 'nn.Linear', (['(4)', '(64)'], {}), '(4, 64)\n', (721, 728), False, 'from torch import nn\n'), ((748, 764), 'torch.nn.Linear', 'nn.Linear', (['(64)', '(1)'], {}), '(64, 1)\n', (757, 764), False, 'from torch import nn\n'), ((1420, 1436), 'torch.nn.Linear', 'nn.Linear', (['(4)', '(64)'], {}), '(4, 64)\n', (1429, 1436), False, 'from torch import nn\n'), ((1456, 1472), 'torch.nn.Linear', 'nn.Linear', (['(64)', '(1)'], {}), '(64, 1)\n', (1465, 1472), False, 'from torch import nn\n'), ((3579, 3599), 'torch.log', 'torch.log', (['left_prob'], {}), '(left_prob)\n', (3588, 3599), False, 'import torch\n'), ((3620, 3644), 'torch.log', 'torch.log', (['(1 - left_prob)'], {}), '(1 - left_prob)\n', (3629, 3644), False, 'import torch\n'), ((3524, 3543), 'numpy.random.uniform', 'np.random.uniform', ([], {}), '()\n', (3541, 3543), True, 'import numpy as np\n'), ((3751, 3776), 'torch.tensor', 'torch.tensor', (['observation'], {}), '(observation)\n', (3763, 3776), False, 'import torch\n'), ((3459, 3493), 'torch.from_numpy', 'torch.from_numpy', (['observation_prev'], {}), '(observation_prev)\n', (3475, 3493), False, 'import torch\n'), ((4222, 4246), 'numpy.mean', 'np.mean', (['running_rewards'], {}), '(running_rewards)\n', (4229, 4246), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
"""
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
"""
from dubbo.common.util import num_2_byte_list
# 服务端的响应码
response_status_message = {
20: 'OK',
30: 'CLIENT_TIMEOUT',
31: 'SERVER_TIMEOUT',
40: 'BAD_REQUEST',
50: 'BAD_RESPONSE',
60: 'SERVICE_NOT_FOUND',
70: 'SERVICE_ERROR',
80: 'SERVER_ERROR',
90: 'CLIENT_ERROR'
}
# 32位整型的最大值
MAX_INT_32 = 2147483647
# 32位整型的最小值
MIN_INT_32 = -2147483648
# MAGIC_NUM(2) + FLAG(1) + STATUS(1)
DEFAULT_REQUEST_META = num_2_byte_list(0xdabbc200)
# 客户端对服务端发送的心跳的请求的头部
CLI_HEARTBEAT_REQ_HEAD = num_2_byte_list(0xdabbe2) + [0]
# 客户端对服务端发送的心跳的响应的头部
CLI_HEARTBEAT_RES_HEAD = num_2_byte_list(0xdabb2214)
# 心跳尾部
CLI_HEARTBEAT_TAIL = [0, 0, 0, 1] + num_2_byte_list(0x4e)
DUBBO_ZK_PROVIDERS = '/dubbo/{}/providers'
DUBBO_ZK_CONSUMERS = '/dubbo/{}/consumers'
DUBBO_ZK_CONFIGURATORS = '/dubbo/{}/configurators'
# 客户端检测与远程主机的连接是否超时的间隔
TIMEOUT_CHECK_INTERVAL = 0.03 # 30ms
# 连接最长允许的空闲时间
TIMEOUT_IDLE = 60
# 连接允许的最多的超时次数
TIMEOUT_MAX_TIMES = 3
# 数据的头部大小为16个字节
# 读取的数据类型:1 head; 2 error_body; 3 common_body;
# 头部信息不存在invoke_id,所以为None
DEFAULT_READ_PARAMS = 16, 1, None
| [
"dubbo.common.util.num_2_byte_list"
] | [((1271, 1298), 'dubbo.common.util.num_2_byte_list', 'num_2_byte_list', (['(3669737984)'], {}), '(3669737984)\n', (1286, 1298), False, 'from dubbo.common.util import num_2_byte_list\n'), ((1424, 1451), 'dubbo.common.util.num_2_byte_list', 'num_2_byte_list', (['(3669697044)'], {}), '(3669697044)\n', (1439, 1451), False, 'from dubbo.common.util import num_2_byte_list\n'), ((1346, 1371), 'dubbo.common.util.num_2_byte_list', 'num_2_byte_list', (['(14334946)'], {}), '(14334946)\n', (1361, 1371), False, 'from dubbo.common.util import num_2_byte_list\n'), ((1495, 1514), 'dubbo.common.util.num_2_byte_list', 'num_2_byte_list', (['(78)'], {}), '(78)\n', (1510, 1514), False, 'from dubbo.common.util import num_2_byte_list\n')] |
from resotolib.args import get_arg_parser, ArgumentParser
from resoto_plugin_tagvalidator import TagValidatorPlugin
def test_args():
arg_parser = get_arg_parser()
TagValidatorPlugin.add_args(arg_parser)
arg_parser.parse_args()
assert ArgumentParser.args.tagvalidator_config is None
assert ArgumentParser.args.tagvalidator_dry_run is False
| [
"resoto_plugin_tagvalidator.TagValidatorPlugin.add_args",
"resotolib.args.get_arg_parser"
] | [((152, 168), 'resotolib.args.get_arg_parser', 'get_arg_parser', ([], {}), '()\n', (166, 168), False, 'from resotolib.args import get_arg_parser, ArgumentParser\n'), ((173, 212), 'resoto_plugin_tagvalidator.TagValidatorPlugin.add_args', 'TagValidatorPlugin.add_args', (['arg_parser'], {}), '(arg_parser)\n', (200, 212), False, 'from resoto_plugin_tagvalidator import TagValidatorPlugin\n')] |
from executor import Executor
class ExecutorBuilder:
def __init__(self,
sim,
simulator,
spinnVersion,
fsa,
neal,
neuronRepository,
connectionsRepository,
activationsRepository,
logger):
self.__simulator = simulator
self.__fsa = fsa
self.__neal = neal
self.__sim = sim
self.__spinnVersion = spinnVersion
self.__neuronRepository = neuronRepository
self.__connectionsRepository = connectionsRepository
self.__activationsRepository = activationsRepository
self.__logger = logger
self.__associationTopology = None
def useAssociationTopology(self, topology):
if(topology):
self.__associationTopology = topology
return self
def build(self):
return Executor(self.__sim,
self.__simulator,
self.__fsa,
self.__neal,
self.__spinnVersion,
self.__neuronRepository,
self.__connectionsRepository,
self.__activationsRepository,
self.__associationTopology,
self.__logger) | [
"executor.Executor"
] | [((849, 1070), 'executor.Executor', 'Executor', (['self.__sim', 'self.__simulator', 'self.__fsa', 'self.__neal', 'self.__spinnVersion', 'self.__neuronRepository', 'self.__connectionsRepository', 'self.__activationsRepository', 'self.__associationTopology', 'self.__logger'], {}), '(self.__sim, self.__simulator, self.__fsa, self.__neal, self.\n __spinnVersion, self.__neuronRepository, self.__connectionsRepository,\n self.__activationsRepository, self.__associationTopology, self.__logger)\n', (857, 1070), False, 'from executor import Executor\n')] |
from itertools import permutations
from functools import reduce, partial
import re
from typing import Collection, Tuple
from pynagram.util import WordList, log
_word_list = None
def find_valid_words(dictionary: Collection[str], candidates: Collection[str]) -> Collection[str]:
"""Finds valid words from 'candidates' as found in
the given words list.
dictionary: the list to be used as a dictionary. Only strings in the dictionary are considered valid words
candidates: strings to be tested for validity
"""
dictionary, perms = set(dictionary), set(candidates)
return dictionary & perms
def _remove_chars(string, chars):
for k in chars:
string = string.replace(k, '', 1)
return string
# @log
def _const_sentences(string: str, words_list: Collection[str]) -> Tuple[bool, Collection[str]]:
if not string:
return True, []
words = sorted(get_anagrams(string, words_list, 1, len(string)), key=lambda s: (len(s), s))
# click.secho(f"words = {words}", fg='green')
if len(words) == 0:
return False, []
acc = []
for w in words:
flag, tails = _const_sentences(_remove_chars(string, w), words_list)
if flag:
acc += [f"{w} {tail}" for tail in tails] if tails else [w]
return len(acc) > 0, acc
# @log
# @timed
def construct_sentences(string: str, words_list: Collection[str]) -> Collection[str]:
if not words_list:
raise ValueError('Word list required for creating sentences')
_, sentences = _const_sentences(string, words_list)
return sentences
def get_anagrams(string: str, dictionary: Collection[str], mn: int, mx: int) -> Collection[str]:
"""Generates all anagrams of the string s using the provided dictionary,
whose lengths are >= mn and <= mx.
Thus the function returns all w such that w is in dictionary
and mn <= len(w) <= mx.
If no dictionary is given, then a list of permuted strings will be returned
s: the string to be used to generate anagrams
dictionary: the dictionary to be used to determine valid words
mn: the minimum length of words to be returned
mx: the maximum length of words to be returned
"""
if not string:
return set()
if not mx:
mx = len(string)
if not mn:
mn = mx
string = re.sub(r'\s+', '', string.lower())
strings = {''.join(e) for e in reduce(lambda acc, xs: acc | set(xs),
map(partial(permutations, string), range(mn, mx + 1)), set())}
if not dictionary:
return strings
return find_valid_words(dictionary, strings)
# @log
def load_dict(filename, mn=None, mx=None):
"""
Loads words from a dictionary (word list)
filename: the path to the word list - mandatory
mn: minimum length of words to be imported
mx: the maximum length of imported words
"""
global _word_list
if not _word_list:
if mn is None:
mn = 1
words = []
with open(filename) as f:
words += f.read().split('\n')
words_list = [s for s in words if (not mx and mn <= len(s)
or mn <= len(s) <= mx)]
# click.echo(f'[debug] <load_dict> Word list size = {len(words_list)}')
_word_list = WordList(words_list)
return _word_list
def is_word(string: str, words: Collection[str]) -> bool:
return string in words
| [
"pynagram.util.WordList",
"functools.partial"
] | [((3306, 3326), 'pynagram.util.WordList', 'WordList', (['words_list'], {}), '(words_list)\n', (3314, 3326), False, 'from pynagram.util import WordList, log\n'), ((2477, 2506), 'functools.partial', 'partial', (['permutations', 'string'], {}), '(permutations, string)\n', (2484, 2506), False, 'from functools import reduce, partial\n')] |
'''
ms1 experiment (:mod:`calour.ms1_experiment`)
=============================================
.. currentmodule:: calour.ms1_experiment
Classes
^^^^^^^
.. autosummary::
:toctree: generated
MS1Experiment
'''
# ----------------------------------------------------------------------------
# Copyright (c) 2016--, Calour development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from logging import getLogger
import matplotlib as mpl
import numpy as np
from .experiment import Experiment
from .util import _to_list
logger = getLogger(__name__)
class MS1Experiment(Experiment):
'''This class contains the data of Mass-Spec ms1 spectra experiment.
Parameters
----------
data : numpy.ndarray or scipy.sparse.csr_matrix
The abundance table for OTUs, metabolites, genes, etc. Samples
are in row and features in column
sample_metadata : pandas.DataFrame
The metadata on the samples
feature_metadata : pandas.DataFrame
The metadata on the features
description : str
name of experiment
sparse : bool
store the data array in :class:`scipy.sparse.csr_matrix`
or :class:`numpy.ndarray`
Attributes
----------
data : numpy.ndarray or scipy.sparse.csr_matrix
The abundance table for OTUs, metabolites, genes, etc. Samples
are in row and features in column
sample_metadata : pandas.DataFrame
The metadata on the samples
feature_metadata : pandas.DataFrame
The metadata on the features
shape : tuple of (int, int)
the dimension of data
sparse : bool
store the data as sparse matrix (scipy.sparse.csr_matrix) or numpy array.
info : dict
information about the experiment (data md5, filenames, etc.)
description : str
name of the experiment
See Also
--------
Experiment
'''
def __init__(self, *args, databases=('gnps',), **kwargs):
super().__init__(*args, databases=('gnps',), **kwargs)
def heatmap(self, *args, **kwargs):
'''Plot a heatmap for the ms1 experiment.
This method accepts exactly the same parameters as input with
its parent class method and does exactly the sample plotting.
The only difference is that by default, its color scale is
**in log** as its `norm` parameter is set to
`matplotlib.colors.LogNorm()`. You can always set it to other
scale as explained in :meth:`.Experiment.heatmap`.
See Also
--------
Experiment.heatmap
'''
if 'norm' not in kwargs:
kwargs['norm'] = mpl.colors.LogNorm()
if 'mz_rt' in self.feature_metadata.columns:
if 'yticklabel_len' not in kwargs:
kwargs['yticklabel_len'] = None
if 'feature_field' not in kwargs:
kwargs['feature_field'] = 'mz_rt'
if 'yticklabel_kwargs' not in kwargs:
kwargs['yticklabel_kwargs'] = {'size': 6, 'rotation': 0}
super().heatmap(*args, **kwargs)
def __repr__(self):
'''Return a string representation of this object.'''
return 'MS1Experiment %s with %d samples, %d features' % (
self.description, self.data.shape[0], self.data.shape[1])
def get_spurious_duplicates(self, mz_tolerance=0.001, rt_tolerance=2, corr_thresh=0.8, inplace=False, negate=False):
'''Get subgroups of metabolites that are suspected ms1 alignment artifacts.
The function returns a calour.MS1Experiment with groups of metabolites that (within each group) have similar m/z and rt, and are highly
correlated/anti-correlated. These are usually due to incorrect feature detection/alignment and can be used to optimize the feature selection parameters.
correlation could be due to incomplete removal of isotopes or same metabolite in multiple RTs
anti-correlation could be due to RT drift (splitting of one true metabolite)
Metabolites in the new experiment are ordered by correlation clusters
Parameters
----------
mz_tolerance: float, optional
the M/Z tolerance. Metabolites are similar if abs(metabolite_mz - mz) <= mz_tolerance
rt_tolerance: float, optional
the retention time tolerance. Metabolites are similar if abs(metabolite_rt - rt) <= rt_tolerance
corr_threshold: float, optional
the minimal (abs) correlation/anti-correlation value in order to call features correlated
inplace: bool, optional
True to replace current experiment, False to create new experiment with results
negate: bool, optional
If False, keep only metabolites that show a correlation with another metabolite
If True, remove metabolites showing correlation
Returns
-------
MS1Experiment
features filtered and ordered basen on m/z and rt similarity and correlation
'''
features = self.feature_metadata.copy()
keep_features = []
data = self.get_data(sparse=False)
while len(features) > 0:
# get the first feature
cfeature = features.iloc[0]
features.drop(index=cfeature.name, inplace=True)
# find all mz/rt neighbors of the feature
mzdist = np.abs(features['MZ'] - cfeature['MZ'])
rtdist = np.abs(features['RT'] - cfeature['RT'])
okf = features[np.logical_and(mzdist <= mz_tolerance, rtdist <= rt_tolerance)]
if len(okf) == 0:
continue
# test the correlation of each neighbor
odat = data[:, self.feature_metadata.index.get_loc(cfeature.name)]
ckeep = []
for cf, *_ in okf.iterrows():
cdat = data[:, self.feature_metadata.index.get_loc(cf)]
corrcf = np.corrcoef(odat, cdat)[0, 1]
if np.abs(corrcf) >= corr_thresh:
ckeep.append(cf)
# store the result and remove all the correlated features from the features left to process
if len(ckeep) > 0:
keep_features.append(cfeature.name)
keep_features.extend(ckeep)
features.drop(index=ckeep, inplace=True)
return self.filter_ids(keep_features, negate=negate, inplace=inplace)
def merge_similar_features(self, mz_tolerance=0.001, rt_tolerance=0.5):
'''Merge metabolites with similar mz/rt to a single metabolite
Metabolites are initially sorted by frequency and a greedy clustering algorithm (starting from the highest freq.) is used to join together
metabolites that are close in m/z and r/t, combining them to a signle metabolite with freq=sum(freq) of all metabolites in the cluster.
Parameters
----------
mz_tolerance: float, optional
metabolites with abs(metabolite_mz - mz) <= mz_tolerance are joined
rt_tolerance: float, optional
metabolites with abs(metabolite_rt - rt) <= rt_tolerance are joined
Returns
-------
MS1Experiment
With close metabolites joined to a single metabolite.
The m/z and rt of the new metabolite are the m/z and rt of the highest freq. metabolite. Frequency of the new metabolite is the sum of frequencies
of all joined metabolites.
New feature_metadata fields: _calour_merge_number, _calour_merge_ids are added listing the number and ids of the metabolites joined for each new metabolite
'''
exp = self.sort_abundance(reverse=False)
features = exp.feature_metadata
features['_metabolite_group'] = np.zeros(len(features)) - 1
gpos = list(features.columns).index('_metabolite_group')
cgroup = 0
for cgroup, cfeature in features.iterrows():
mzdist = np.abs(features['MZ'] - cfeature['MZ'])
rtdist = np.abs(features['RT'] - cfeature['RT'])
ok = (mzdist <= mz_tolerance) & (rtdist <= rt_tolerance) & (features['_metabolite_group'] == -1)
okpos = np.where(ok)[0]
for cpos in okpos:
features.iat[cpos, gpos] = cgroup
exp = exp.aggregate_by_metadata('_metabolite_group', agg='sum', axis='f')
exp.feature_metadata.drop('_metabolite_group', axis='columns', inplace=True)
logger.info('%d metabolites remaining after merge' % len(exp.feature_metadata))
return exp
def filter_mz_rt(self, mz=None, rt=None, mz_tolerance=0.05, rt_tolerance=0.2, inplace=False, negate=False):
'''Filter metabolites based on m/z and/or retention time
Keep (or remove if negate=True) metabolites that have an m/z and/or retention time close (up to tolerance)
to the requested mz and/or rt (or list of mz and/or rt).
If both mz and rt are provided, they should be matched (i.e. filtering is performed using each mz and rt pair with same index)
Parameters
----------
mz: float or list of float or None, optional
the M/Z to filter
if None, do not filter based on M/Z
rt: float or list of float or None, optional
the retention time to filter
if None, do not filter based on rt
mz_tolerance: float, optional
the M/Z tolerance. filter metabolites with abs(metabolite_mz - mz) <= mz_tolerance
rt_tolerance: float, optional
the rt tolerance. filter metabolites with abs(metabolite_rt - rt) <= rt_tolerance
inplace: bool, optional
True to replace current experiment, False to create new experiment with results
negate: bool, optional
If False, keep only metabolites matching mz
If True, remove metabolites matching mz
Returns
-------
MS1Experiment
features filtered based on mz
'''
if mz is None and rt is None:
raise ValueError('at least one of "mz" and "rt" must not be None')
if mz is not None:
if 'MZ' not in self.feature_metadata.columns:
raise ValueError('The Experiment does not contain the column "MZ". cannot filter by mz')
else:
mz = _to_list(mz)
if rt is not None:
if 'RT' not in self.feature_metadata.columns:
raise ValueError('The Experiment does not contain the column "RT". cannot filter by rt')
else:
rt = _to_list(rt)
select = np.zeros(len(self.feature_metadata), dtype='?')
notfound = 0
if mz is None:
mz = [None] * len(rt)
if rt is None:
rt = [None] * len(mz)
if len(mz) != len(rt):
raise ValueError('mz and rt must have same length')
for cmz, crt in zip(mz, rt):
if cmz is not None:
mzdiff = np.abs(self.feature_metadata['MZ'] - cmz)
keepmz = mzdiff <= mz_tolerance
else:
keepmz = np.full([len(self.feature_metadata)], True)
if crt is not None:
rtdiff = np.abs(self.feature_metadata['RT'] - crt)
keeprt = rtdiff <= rt_tolerance
else:
keeprt = np.full([len(self.feature_metadata)], True)
bothok = np.logical_and(keepmz, keeprt)
if bothok.sum() == 0:
notfound += 1
select = np.logical_or(select, bothok)
logger.info('Total from mz/rt list with no match: %d' % notfound)
logger.info('found %d matching features' % np.sum(select))
if negate:
select = np.logical_not(select)
return self.reorder(select, axis='f', inplace=inplace)
def sort_mz_rt(self, inplace=False):
'''Sort features according to m/z and retention time.
This is a convenience function wrapping calour.sort_by_metadata()
Parameters
----------
inplace: bool, optional
True to replace current experiment, False to create new experiment with results
Returns
-------
MS1Experiment
Sorted according to m/z and retention time
'''
return self.sort_by_metadata('mz_rt', axis='f', inplace=inplace)
| [
"logging.getLogger",
"numpy.abs",
"numpy.logical_and",
"numpy.corrcoef",
"numpy.where",
"numpy.logical_not",
"numpy.logical_or",
"numpy.sum",
"matplotlib.colors.LogNorm"
] | [((717, 736), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (726, 736), False, 'from logging import getLogger\n'), ((2801, 2821), 'matplotlib.colors.LogNorm', 'mpl.colors.LogNorm', ([], {}), '()\n', (2819, 2821), True, 'import matplotlib as mpl\n'), ((5520, 5559), 'numpy.abs', 'np.abs', (["(features['MZ'] - cfeature['MZ'])"], {}), "(features['MZ'] - cfeature['MZ'])\n", (5526, 5559), True, 'import numpy as np\n'), ((5581, 5620), 'numpy.abs', 'np.abs', (["(features['RT'] - cfeature['RT'])"], {}), "(features['RT'] - cfeature['RT'])\n", (5587, 5620), True, 'import numpy as np\n'), ((8073, 8112), 'numpy.abs', 'np.abs', (["(features['MZ'] - cfeature['MZ'])"], {}), "(features['MZ'] - cfeature['MZ'])\n", (8079, 8112), True, 'import numpy as np\n'), ((8134, 8173), 'numpy.abs', 'np.abs', (["(features['RT'] - cfeature['RT'])"], {}), "(features['RT'] - cfeature['RT'])\n", (8140, 8173), True, 'import numpy as np\n'), ((11540, 11570), 'numpy.logical_and', 'np.logical_and', (['keepmz', 'keeprt'], {}), '(keepmz, keeprt)\n', (11554, 11570), True, 'import numpy as np\n'), ((11656, 11685), 'numpy.logical_or', 'np.logical_or', (['select', 'bothok'], {}), '(select, bothok)\n', (11669, 11685), True, 'import numpy as np\n'), ((11868, 11890), 'numpy.logical_not', 'np.logical_not', (['select'], {}), '(select)\n', (11882, 11890), True, 'import numpy as np\n'), ((5648, 5710), 'numpy.logical_and', 'np.logical_and', (['(mzdist <= mz_tolerance)', '(rtdist <= rt_tolerance)'], {}), '(mzdist <= mz_tolerance, rtdist <= rt_tolerance)\n', (5662, 5710), True, 'import numpy as np\n'), ((8303, 8315), 'numpy.where', 'np.where', (['ok'], {}), '(ok)\n', (8311, 8315), True, 'import numpy as np\n'), ((11108, 11149), 'numpy.abs', 'np.abs', (["(self.feature_metadata['MZ'] - cmz)"], {}), "(self.feature_metadata['MZ'] - cmz)\n", (11114, 11149), True, 'import numpy as np\n'), ((11342, 11383), 'numpy.abs', 'np.abs', (["(self.feature_metadata['RT'] - crt)"], {}), "(self.feature_metadata['RT'] - crt)\n", (11348, 11383), True, 'import numpy as np\n'), ((11812, 11826), 'numpy.sum', 'np.sum', (['select'], {}), '(select)\n', (11818, 11826), True, 'import numpy as np\n'), ((6060, 6083), 'numpy.corrcoef', 'np.corrcoef', (['odat', 'cdat'], {}), '(odat, cdat)\n', (6071, 6083), True, 'import numpy as np\n'), ((6109, 6123), 'numpy.abs', 'np.abs', (['corrcf'], {}), '(corrcf)\n', (6115, 6123), True, 'import numpy as np\n')] |
import sys
import os
sys.path.append(os.path.abspath("."))
sys.dont_write_bytecode = True
__author__ = "COSAL"
from utils.lib import O
class ASTDistanceNode(O):
DELIMITER = "-"
def __init__(self, **kwargs):
O.__init__(self, **kwargs)
self.uid1 = None
self.uid2 = None
self.distance = None
def get_key(self):
return ASTDistanceNode.DELIMITER.join(sorted([self.uid1, self.uid2]))
def to_bson(self):
return {
"uid1": self.uid1,
"uid2": self.uid2,
"key": self.get_key(),
"distance": self.distance
}
@staticmethod
def from_bson(bson):
node = ASTDistanceNode()
splits = bson["key"].split(ASTDistanceNode.DELIMITER)
node.uid1 = splits[0]
node.uid2 = splits[1]
node.distance = bson["distance"]
return node
| [
"os.path.abspath",
"utils.lib.O.__init__"
] | [((38, 58), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (53, 58), False, 'import os\n'), ((221, 247), 'utils.lib.O.__init__', 'O.__init__', (['self'], {}), '(self, **kwargs)\n', (231, 247), False, 'from utils.lib import O\n')] |
from typing import Optional
import pytest
from fractional_indexing import FIError, generate_key_between, generate_n_keys_between
@pytest.mark.parametrize(['a', 'b', 'expected'], [
(None, None, 'a0'),
(None, 'a0', 'Zz'),
(None, 'Zz', 'Zy'),
('a0', None, 'a1'),
('a1', None, 'a2'),
('a0', 'a1', 'a0V'),
('a1', 'a2', 'a1V'),
('a0V', 'a1', 'a0l'),
('Zz', 'a0', 'ZzV'),
('Zz', 'a1', 'a0'),
(None, 'Y00', 'Xzzz'),
('bzz', None, 'c000'),
('a0', 'a0V', 'a0G'),
('a0', 'a0G', 'a08'),
('b125', 'b129', 'b127'),
('a0', 'a1V', 'a1'),
('Zz', 'a01', 'a0'),
(None, 'a0V', 'a0'),
(None, 'b999', 'b99'),
(None, 'A00000000000000000000000000', FIError('invalid order key: A00000000000000000000000000')),
(None, 'A000000000000000000000000001', 'A000000000000000000000000000V'),
('zzzzzzzzzzzzzzzzzzzzzzzzzzy', None, 'zzzzzzzzzzzzzzzzzzzzzzzzzzz'),
('zzzzzzzzzzzzzzzzzzzzzzzzzzz', None, 'zzzzzzzzzzzzzzzzzzzzzzzzzzzV'),
('a00', None, FIError('invalid order key: a00')),
('a00', 'a1', FIError('invalid order key: a00')),
('0', '1', FIError('invalid order key head: 0')),
('a1', 'a0', FIError('a1 >= a0')),
])
def test_generate_key_between(a: Optional[str], b: Optional[str], expected: str) -> None:
if isinstance(expected, FIError):
with pytest.raises(FIError) as e:
generate_key_between(a, b)
assert e.value.args[0] == expected.args[0]
return
else:
act = generate_key_between(a, b)
print(f'exp: {expected}')
print(f'act: {act}')
print(act == expected)
assert act == expected
@pytest.mark.parametrize(['a', 'b', 'n', 'expected'], [
(None, None, 5, 'a0 a1 a2 a3 a4'),
('a4', None, 10, 'a5 a6 a7 a8 a9 b00 b01 b02 b03 b04'),
(None, 'a0', 5, 'Z5 Z6 Z7 Z8 Z9'),
('a0', 'a2', 20, 'a01 a02 a03 a035 a04 a05 a06 a07 a08 a09 a1 a11 a12 a13 a14 a15 a16 a17 a18 a19'),
])
def test_generate_n_keys_between(a: Optional[str], b: Optional[str], n: int, expected: str) -> None:
base_10_digits = '0123456789'
act = ' '.join(generate_n_keys_between(a, b, n, base_10_digits))
print()
print(f'exp: {expected}')
print(f'act: {act}')
print(act == expected)
assert act == expected
def test_readme_example():
first = generate_key_between(None, None)
assert first == 'a0'
# Insert after 1st
second = generate_key_between(first, None)
assert second == 'a1'
# Insert after 2nd
third = generate_key_between(second, None)
assert third == 'a2'
# Insert before 1st
zeroth = generate_key_between(None, first)
assert zeroth == 'Zz'
# Insert in between 2nd and 3rd. Midpoint
second_and_half = generate_key_between(second, third)
assert second_and_half == 'a1V'
| [
"fractional_indexing.FIError",
"pytest.mark.parametrize",
"pytest.raises",
"fractional_indexing.generate_key_between",
"fractional_indexing.generate_n_keys_between"
] | [((1637, 1940), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["['a', 'b', 'n', 'expected']", "[(None, None, 5, 'a0 a1 a2 a3 a4'), ('a4', None, 10,\n 'a5 a6 a7 a8 a9 b00 b01 b02 b03 b04'), (None, 'a0', 5, 'Z5 Z6 Z7 Z8 Z9'\n ), ('a0', 'a2', 20,\n 'a01 a02 a03 a035 a04 a05 a06 a07 a08 a09 a1 a11 a12 a13 a14 a15 a16 a17 a18 a19'\n )]"], {}), "(['a', 'b', 'n', 'expected'], [(None, None, 5,\n 'a0 a1 a2 a3 a4'), ('a4', None, 10,\n 'a5 a6 a7 a8 a9 b00 b01 b02 b03 b04'), (None, 'a0', 5, 'Z5 Z6 Z7 Z8 Z9'\n ), ('a0', 'a2', 20,\n 'a01 a02 a03 a035 a04 a05 a06 a07 a08 a09 a1 a11 a12 a13 a14 a15 a16 a17 a18 a19'\n )])\n", (1660, 1940), False, 'import pytest\n'), ((2304, 2336), 'fractional_indexing.generate_key_between', 'generate_key_between', (['None', 'None'], {}), '(None, None)\n', (2324, 2336), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((2399, 2432), 'fractional_indexing.generate_key_between', 'generate_key_between', (['first', 'None'], {}), '(first, None)\n', (2419, 2432), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((2495, 2529), 'fractional_indexing.generate_key_between', 'generate_key_between', (['second', 'None'], {}), '(second, None)\n', (2515, 2529), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((2593, 2626), 'fractional_indexing.generate_key_between', 'generate_key_between', (['None', 'first'], {}), '(None, first)\n', (2613, 2626), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((2722, 2757), 'fractional_indexing.generate_key_between', 'generate_key_between', (['second', 'third'], {}), '(second, third)\n', (2742, 2757), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((1498, 1524), 'fractional_indexing.generate_key_between', 'generate_key_between', (['a', 'b'], {}), '(a, b)\n', (1518, 1524), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((2092, 2140), 'fractional_indexing.generate_n_keys_between', 'generate_n_keys_between', (['a', 'b', 'n', 'base_10_digits'], {}), '(a, b, n, base_10_digits)\n', (2115, 2140), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((1340, 1362), 'pytest.raises', 'pytest.raises', (['FIError'], {}), '(FIError)\n', (1353, 1362), False, 'import pytest\n'), ((1381, 1407), 'fractional_indexing.generate_key_between', 'generate_key_between', (['a', 'b'], {}), '(a, b)\n', (1401, 1407), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((709, 766), 'fractional_indexing.FIError', 'FIError', (['"""invalid order key: A00000000000000000000000000"""'], {}), "('invalid order key: A00000000000000000000000000')\n", (716, 766), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((1013, 1046), 'fractional_indexing.FIError', 'FIError', (['"""invalid order key: a00"""'], {}), "('invalid order key: a00')\n", (1020, 1046), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((1067, 1100), 'fractional_indexing.FIError', 'FIError', (['"""invalid order key: a00"""'], {}), "('invalid order key: a00')\n", (1074, 1100), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((1118, 1154), 'fractional_indexing.FIError', 'FIError', (['"""invalid order key head: 0"""'], {}), "('invalid order key head: 0')\n", (1125, 1154), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n'), ((1174, 1193), 'fractional_indexing.FIError', 'FIError', (['"""a1 >= a0"""'], {}), "('a1 >= a0')\n", (1181, 1193), False, 'from fractional_indexing import FIError, generate_key_between, generate_n_keys_between\n')] |
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from pathlib import Path
import soundfile as sf
from acoustic import SpeedySpeechAcoustic
from frontend.zh_frontend import Frontend
from utils import mkdir, read_txt
from vocoder import PWGANVocoder
print('初始化前处理部分')
phones_dict = 'resources/speedyspeech_nosil_baker_ckpt_0.5/phone_id_map.txt'
tones_dict = 'resources/speedyspeech_nosil_baker_ckpt_0.5/tone_id_map.txt'
frontend = Frontend(phone_vocab_path=phones_dict,
tone_vocab_path=tones_dict)
print("frontend done!")
print('初始化提取特征模型')
speedyspeech_dir = Path('resources/models/speedyspeech_csmsc')
pdmodel_path = str(speedyspeech_dir / 'speedyspeech_csmsc.pdmodel')
pdiparam_path = str(speedyspeech_dir / 'speedyspeech_csmsc.pdiparams')
am_predictor = SpeedySpeechAcoustic(pdmodel_path, pdiparam_path)
print('am_predictor done!')
print('初始化合成wav模型')
pwgan_model_path = 'resources/models/pwgan_csmsc/pwgan_csmsc.onnx'
voc_predictor = PWGANVocoder(pwgan_model_path)
save_wav_dir = 'infer_result'
mkdir(save_wav_dir)
print('合成指定句子')
sentences_path = 'sentences.txt'
sentences = read_txt(sentences_path)
for sentence_info in sentences:
start = time.time()
uuid, sentence = sentence_info.split(' ')
input_ids = frontend.get_input_ids(sentence,
merge_sentences=True,
get_tone_ids=True)
am_output_data = am_predictor(input_ids)
wav = voc_predictor(am_output_data)
elapse = time.time() - start
save_wav_path = f'{save_wav_dir}/{uuid}.wav'
sf.write(save_wav_path, wav, samplerate=24000)
print(f'{save_wav_path} done!\tcost: {elapse}s')
| [
"pathlib.Path",
"utils.read_txt",
"acoustic.SpeedySpeechAcoustic",
"soundfile.write",
"utils.mkdir",
"frontend.zh_frontend.Frontend",
"time.time",
"vocoder.PWGANVocoder"
] | [((1005, 1071), 'frontend.zh_frontend.Frontend', 'Frontend', ([], {'phone_vocab_path': 'phones_dict', 'tone_vocab_path': 'tones_dict'}), '(phone_vocab_path=phones_dict, tone_vocab_path=tones_dict)\n', (1013, 1071), False, 'from frontend.zh_frontend import Frontend\n'), ((1155, 1198), 'pathlib.Path', 'Path', (['"""resources/models/speedyspeech_csmsc"""'], {}), "('resources/models/speedyspeech_csmsc')\n", (1159, 1198), False, 'from pathlib import Path\n'), ((1354, 1403), 'acoustic.SpeedySpeechAcoustic', 'SpeedySpeechAcoustic', (['pdmodel_path', 'pdiparam_path'], {}), '(pdmodel_path, pdiparam_path)\n', (1374, 1403), False, 'from acoustic import SpeedySpeechAcoustic\n'), ((1536, 1566), 'vocoder.PWGANVocoder', 'PWGANVocoder', (['pwgan_model_path'], {}), '(pwgan_model_path)\n', (1548, 1566), False, 'from vocoder import PWGANVocoder\n'), ((1598, 1617), 'utils.mkdir', 'mkdir', (['save_wav_dir'], {}), '(save_wav_dir)\n', (1603, 1617), False, 'from utils import mkdir, read_txt\n'), ((1680, 1704), 'utils.read_txt', 'read_txt', (['sentences_path'], {}), '(sentences_path)\n', (1688, 1704), False, 'from utils import mkdir, read_txt\n'), ((1750, 1761), 'time.time', 'time.time', ([], {}), '()\n', (1759, 1761), False, 'import time\n'), ((2153, 2199), 'soundfile.write', 'sf.write', (['save_wav_path', 'wav'], {'samplerate': '(24000)'}), '(save_wav_path, wav, samplerate=24000)\n', (2161, 2199), True, 'import soundfile as sf\n'), ((2079, 2090), 'time.time', 'time.time', ([], {}), '()\n', (2088, 2090), False, 'import time\n')] |
import numpy as np
import random
from finetuna.ml_potentials.bootstrap import non_bootstrap_ensemble
import torch
import uuid
from finetuna.ml_potentials.ml_potential_calc import MLPCalc
from ocpmodels.trainers.amp_xfer_trainer import OCPXTrainer
torch.multiprocessing.set_sharing_strategy("file_system")
__author__ = "<NAME>"
__email__ = "<EMAIL>"
class OCPEnsembleCalc(MLPCalc):
"""Atomistics Machine-Learning Potential (AMP) ASE calculator
Parameters
----------
model : object
Class representing the regression model. Input arguments include training
images, descriptor type, and force_coefficient. Model structure and training schemes can be
modified directly within the class.
label : str
Location to save the trained model.
"""
implemented_properties = ["energy", "forces", "max_force_stds", "energy_stds"]
executor = None
def __init__(self, amptorch_trainer, n_ensembles):
MLPCalc.__init__(self, mlp_params=amptorch_trainer.config)
self.amptorch_trainer = amptorch_trainer
self.n_ensembles = n_ensembles
def calculate_stats(self, energies, forces):
median_idx = np.argsort(energies)[len(energies) // 2]
energy_median = energies[median_idx]
forces_median = forces[median_idx]
max_forces_var = np.nanmax(np.nanvar(forces, axis=0))
energy_var = np.nanvar(energies)
return (
energy_median,
forces_median,
max_forces_var,
energy_var,
)
def calculate(self, atoms, properties, system_changes):
MLPCalc.calculate(
self, atoms=atoms, properties=properties, system_changes=system_changes
)
energies = []
forces = []
for predictor in self.trained_trainers:
prediction = predictor.predict(atoms)
energies.append(prediction["energy"].data.numpy()[0])
forces.append(prediction["forces"].data.numpy())
energies = np.array(energies)
forces = np.array(forces)
energy_pred, force_pred, max_forces_var, energy_var = self.calculate_stats(
energies, forces
)
self.results["energy"] = energy_pred
self.results["forces"] = force_pred
atoms.info["energy_stds"] = energy_var**0.2
atoms.info["max_force_stds"] = max_forces_var**0.5
def train(self, parent_dataset, new_dataset=None):
"""
Uses Dask to parallelize, must have previously set up cluster,
image to use, and pool of workers
"""
ensemble_sets, parent_dataset = non_bootstrap_ensemble(
parent_dataset, n_ensembles=self.n_ensembles
)
def train_and_combine(args_list):
"""
method for training trainer on ensemble sets, then create neural net calc,
returns trained calc
"""
training_dataset = args_list[0]
trainer = args_list[1]
seed = args_list[2]
uniqueid = args_list[3]
trainer.model = OCPXTrainer.get_pretrained(
training_dataset, seed, uniqueid, trainer.a2g_train
)
trainer.train(raw_data=training_dataset)
# check_path = trainer.cp_dir
# trainer = AtomsTrainer()
# trainer.load_pretrained(checkpoint_path=check_path)
# trainer_calc = trainer.get_calc()
# return trainer_calc
return trainer
# split ensemble sets into separate args_lists, clone: trainer,
# base calc and add to args_lists, add: refs to args_lists
args_lists = []
random.seed(self.amptorch_trainer.config["cmd"]["seed"])
randomlist = [random.randint(0, 4294967295) for set in ensemble_sets]
for i in range(len(ensemble_sets)):
ensemble_set = ensemble_sets[i]
random.seed(randomlist[i])
random.shuffle(ensemble_set)
trainer_copy = self.amptorch_trainer.copy()
trainer_copy.config["cmd"]["seed"] = randomlist[i]
trainer_copy.config["cmd"]["identifier"] = trainer_copy.config["cmd"][
"identifier"
] + str(uuid.uuid4())
args_lists.append(
(
ensemble_set,
trainer_copy,
randomlist[i],
trainer_copy.model.config["cmd"]["identifier"] + str(uuid.uuid4()),
)
)
# map training method, returns array of delta calcs
trained_trainers = []
if self.executor is not None:
futures = []
for args_list in args_lists:
big_future = self.executor.scatter(args_list)
futures.append(self.executor.submit(train_and_combine, big_future))
trained_trainers = [future.result() for future in futures]
else:
for args_list in args_lists:
trained_trainers.append(train_and_combine(args_list))
# call init to construct ensemble calc from array of delta calcs
self.trained_trainers = trained_trainers
@classmethod
def set_executor(cls, executor):
cls.executor = executor
| [
"finetuna.ml_potentials.ml_potential_calc.MLPCalc.__init__",
"random.shuffle",
"numpy.nanvar",
"random.seed",
"uuid.uuid4",
"finetuna.ml_potentials.ml_potential_calc.MLPCalc.calculate",
"numpy.array",
"numpy.argsort",
"ocpmodels.trainers.amp_xfer_trainer.OCPXTrainer.get_pretrained",
"torch.multiprocessing.set_sharing_strategy",
"random.randint",
"finetuna.ml_potentials.bootstrap.non_bootstrap_ensemble"
] | [((249, 306), 'torch.multiprocessing.set_sharing_strategy', 'torch.multiprocessing.set_sharing_strategy', (['"""file_system"""'], {}), "('file_system')\n", (291, 306), False, 'import torch\n'), ((969, 1027), 'finetuna.ml_potentials.ml_potential_calc.MLPCalc.__init__', 'MLPCalc.__init__', (['self'], {'mlp_params': 'amptorch_trainer.config'}), '(self, mlp_params=amptorch_trainer.config)\n', (985, 1027), False, 'from finetuna.ml_potentials.ml_potential_calc import MLPCalc\n'), ((1399, 1418), 'numpy.nanvar', 'np.nanvar', (['energies'], {}), '(energies)\n', (1408, 1418), True, 'import numpy as np\n'), ((1621, 1716), 'finetuna.ml_potentials.ml_potential_calc.MLPCalc.calculate', 'MLPCalc.calculate', (['self'], {'atoms': 'atoms', 'properties': 'properties', 'system_changes': 'system_changes'}), '(self, atoms=atoms, properties=properties, system_changes=\n system_changes)\n', (1638, 1716), False, 'from finetuna.ml_potentials.ml_potential_calc import MLPCalc\n'), ((2021, 2039), 'numpy.array', 'np.array', (['energies'], {}), '(energies)\n', (2029, 2039), True, 'import numpy as np\n'), ((2057, 2073), 'numpy.array', 'np.array', (['forces'], {}), '(forces)\n', (2065, 2073), True, 'import numpy as np\n'), ((2632, 2700), 'finetuna.ml_potentials.bootstrap.non_bootstrap_ensemble', 'non_bootstrap_ensemble', (['parent_dataset'], {'n_ensembles': 'self.n_ensembles'}), '(parent_dataset, n_ensembles=self.n_ensembles)\n', (2654, 2700), False, 'from finetuna.ml_potentials.bootstrap import non_bootstrap_ensemble\n'), ((3686, 3742), 'random.seed', 'random.seed', (["self.amptorch_trainer.config['cmd']['seed']"], {}), "(self.amptorch_trainer.config['cmd']['seed'])\n", (3697, 3742), False, 'import random\n'), ((1187, 1207), 'numpy.argsort', 'np.argsort', (['energies'], {}), '(energies)\n', (1197, 1207), True, 'import numpy as np\n'), ((1351, 1376), 'numpy.nanvar', 'np.nanvar', (['forces'], {'axis': '(0)'}), '(forces, axis=0)\n', (1360, 1376), True, 'import numpy as np\n'), ((3094, 3173), 'ocpmodels.trainers.amp_xfer_trainer.OCPXTrainer.get_pretrained', 'OCPXTrainer.get_pretrained', (['training_dataset', 'seed', 'uniqueid', 'trainer.a2g_train'], {}), '(training_dataset, seed, uniqueid, trainer.a2g_train)\n', (3120, 3173), False, 'from ocpmodels.trainers.amp_xfer_trainer import OCPXTrainer\n'), ((3765, 3794), 'random.randint', 'random.randint', (['(0)', '(4294967295)'], {}), '(0, 4294967295)\n', (3779, 3794), False, 'import random\n'), ((3921, 3947), 'random.seed', 'random.seed', (['randomlist[i]'], {}), '(randomlist[i])\n', (3932, 3947), False, 'import random\n'), ((3960, 3988), 'random.shuffle', 'random.shuffle', (['ensemble_set'], {}), '(ensemble_set)\n', (3974, 3988), False, 'import random\n'), ((4241, 4253), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4251, 4253), False, 'import uuid\n'), ((4481, 4493), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4491, 4493), False, 'import uuid\n')] |
##################
# coding =utf-8
# author: <NAME>
# 载入数据集,计算数据熵和熵亏
##################
import numpy as np
import ComputeDifferenceEntropy
d1 = [1.0, 2.0, 2.23606798]
d2 = [1.0, 2.06155281]
d1 = np.array(d1)
d2 = np.array(d2)
h1 = [1.0, 0.9602297178607612, 0.9821410328348751, 1.0]
h2 = [1.0, 0.9602297178607612, 1.0]
ComputeDifferenceEntropy.ComputeDifferEntopy(d1, d2, h1, h2)
| [
"numpy.array",
"ComputeDifferenceEntropy.ComputeDifferEntopy"
] | [((214, 226), 'numpy.array', 'np.array', (['d1'], {}), '(d1)\n', (222, 226), True, 'import numpy as np\n'), ((233, 245), 'numpy.array', 'np.array', (['d2'], {}), '(d2)\n', (241, 245), True, 'import numpy as np\n'), ((343, 403), 'ComputeDifferenceEntropy.ComputeDifferEntopy', 'ComputeDifferenceEntropy.ComputeDifferEntopy', (['d1', 'd2', 'h1', 'h2'], {}), '(d1, d2, h1, h2)\n', (387, 403), False, 'import ComputeDifferenceEntropy\n')] |
# -*- coding: utf-8 -*-
"""-----------------------------------------------------------------------------
-- MIT License
--
-- Copyright (c) 2020 <NAME>
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
-- in the Software without restriction, including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-- copies of the Software, and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in
-- all copies or substantial portions of the Software.
--
-- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-- SOFTWARE.
--------------------------------------------------------------------------------
-- @file display_pmod_parse_from_spi_spy.py
--
-- @brief A script to parse specific SPI bus control of the Pmod CLS and Pmod
-- SF3 as capturerd with digital logic analyzer. Parsing and testing specific
-- to both the Pmod interfaces as well as project fpga-serial-mem-tester-1 .
-----------------------------------------------------------------------------"""
import io
import sys
import re
import copy
class N25QCommand:
def __init__(self, copi, cipo):
self._copi = copi
self._cipo = cipo
self.lineFormat = 1
self._copiFormatted = self._copi
self._cipoFormatted = self._cipo
def insertDashes(self, dashPos):
self._copiFormatted = copy.copy(self._copi)
self._cipoFormatted = copy.copy(self._cipo)
for i in reversed(dashPos):
self._copiFormatted.insert(i, '--')
self._cipoFormatted.insert(i, '--')
def __str__(self):
if self.lineFormat == 2:
return "N25Q 0x{1} {0:<30}\t(\nout: {2};\nin : {3})".format(
self.CommandName, self.CommandByte,
" ".join(self._copiFormatted),
" ".join(self._cipoFormatted))
else:
return "N25Q 0x{1} {0:<30}\t(out: {2}; in: {3})".format(
self.CommandName, self.CommandByte,
" ".join(self._copiFormatted),
" ".join(self._cipoFormatted))
def _getAddrAsInt(self, first, length, arr):
try:
s = ""
for i in range(first,first+length):
s += arr[i]
v = int(s, 16)
return v
except ValueError:
return -1
def _getFlashSequence(self, first, length, arr):
try:
v0 = int(arr[first + 0], 16)
v1 = int(arr[first + 1], 16)
diff = (v1 - v0) & 0xff
total = 1
for i in range(first+1, first+length):
v0 = int(arr[i - 1], 16)
v1 = int(arr[i], 16)
if ((v1 - v0) & 0xff == diff):
total += 1
return total
except ValueError:
return -1
except IndexError:
return -2
class N25QUnknown(N25QCommand):
CommandByte = "xx"
CommandName = "Unknown Command / Spy Fail"
def __init__(self, copi, cipo):
super().__init__(copi, cipo)
class N25QWriteEnable(N25QCommand):
CommandByte = "06"
CommandName = "WriteEnable"
def __init__(self, copi, cipo):
super().__init__(copi, cipo)
self.insertDashes((1, ))
class N25QReadStatusRegister(N25QCommand):
CommandByte = "05"
CommandName = "ReadStatusRegister"
def __init__(self, copi, cipo):
super().__init__(copi, cipo)
self.insertDashes((1, 2))
class N25QReadFlagStatusRegister(N25QCommand):
CommandByte = "70"
CommandName = "ReadFlagStatusRegister"
def __init__(self, copi, cipo):
super().__init__(copi, cipo)
self.insertDashes((1, 2))
class N25QSectorErase(N25QCommand):
CommandByte = "D8"
CommandName = "SectorErase"
def __init__(self, copi, cipo):
super().__init__(copi, cipo)
self.insertDashes((1, 4))
def getEraseAddrAsInt(self):
return self._getAddrAsInt(1, 3, self._copi)
class N25Q4ByteSubsectorErase(N25QCommand):
CommandByte = "21"
CommandName = "4ByteSubsectorErase"
def __init__(self, copi, cipo):
super().__init__(copi, cipo)
self.insertDashes((1, 5))
def getEraseAddrAsInt(self):
return self._getAddrAsInt(1, 4, self._copi)
class N25QPageProgram(N25QCommand):
CommandByte = "02"
CommandName = "PageProgram"
def __init__(self, copi, cipo):
super().__init__(copi, cipo)
self.insertDashes((1, 4, 4+256))
self.lineFormat = 2
def getProgAddrAsInt(self):
return self._getAddrAsInt(1, 3, self._copi)
def getProgSequence(self):
return self._getFlashSequence(4, 256, self._copi)
class N25Q4BytePageProgram(N25QCommand):
CommandByte = "12"
CommandName = "4BytePageProgram"
def __init__(self, copi, cipo):
super().__init__(copi, cipo)
self.insertDashes((1, 5, 5+256))
self.lineFormat = 2
def getProgAddrAsInt(self):
return self._getAddrAsInt(1, 4, self._copi)
def getProgSequence(self):
return self._getFlashSequence(5, 256, self._copi)
class N25QRead(N25QCommand):
CommandByte = "03"
CommandName = "Read"
def __init__(self, copi, cipo):
super().__init__(copi, cipo)
self.insertDashes((1, 4, 4+256))
self.lineFormat = 2
def getReadAddrAsInt(self):
return self._getAddrAsInt(1, 3, self._copi)
def getReadSequence(self):
return self._getFlashSequence(4, 256, self._cipo)
class N25Q4ByteFastRead(N25QCommand):
CommandByte = "0C"
CommandName = "4ByteFastRead"
def __init__(self, copi, cipo):
super().__init__(copi, cipo)
self.insertDashes((1, 5, 6, 6+256))
self.lineFormat = 2
def getReadAddrAsInt(self):
return self._getAddrAsInt(1, 4, self._copi)
def getReadSequence(self):
return self._getFlashSequence(6, 256, self._cipo)
class N25QCommandFactory:
def __init__(self):
pass
def getCmd(self, bCopi, bCipo):
if (len(bCopi) > 0):
b = bCopi[0]
print(b)
if (b == N25QWriteEnable.CommandByte):
cmd = N25QWriteEnable(bCopi, bCipo)
elif (b == N25QReadStatusRegister.CommandByte):
cmd = N25QReadStatusRegister(bCopi, bCipo)
elif (b == N25QSectorErase.CommandByte):
cmd = N25QSectorErase(bCopi, bCipo)
elif (b == N25QPageProgram.CommandByte):
cmd = N25QPageProgram(bCopi, bCipo)
elif (b == N25QRead.CommandByte):
cmd = N25QRead(bCopi, bCipo)
elif (b == N25QReadFlagStatusRegister.CommandByte):
cmd = N25QReadFlagStatusRegister(bCopi, bCipo)
elif (b == N25Q4ByteSubsectorErase.CommandByte):
cmd = N25Q4ByteSubsectorErase(bCopi, bCipo)
elif (b == N25Q4BytePageProgram.CommandByte):
cmd = N25Q4BytePageProgram(bCopi, bCipo)
elif (b == N25Q4ByteFastRead.CommandByte):
cmd = N25Q4ByteFastRead(bCopi, bCipo)
else:
cmd = N25QUnknown(bCopi, bCipo)
else:
print(None)
cmd = N25QUnknown(bCopi, bCipo)
return cmd
class AnalogDiscoverySpiSpyParser:
EscCharacters = ["1B",]
PartsCopi = ["c", "cp"]
PartsCipo = ["p", "cp"]
rexData = re.compile(r"^Data[:][ ]")
def __init__(self, fileName):
self._currentLine = None
self._ioParts = None
self._fh = io.open(fileName, "r")
self._strCopi = None
self._strCipo = None
self._asciiCopi = None
self._asciiCipo = None
self._flashCmds = []
self._cmdFactory = N25QCommandFactory()
def readCurrentLine(self):
self._currentLine = self._fh.readline()
if self._currentLine:
return True
else:
return False
def parseDataParts(self):
if self._currentLine:
if self.rexData.match(self._currentLine):
dataParts = self._currentLine.split(":")
lineParts = dataParts[1].split(",")
self._ioParts = []
for linePart in lineParts:
partRep = linePart.replace('h', '')
self._ioParts.append(partRep.split("|"))
return True
else:
return False
else:
return False
def close(self):
self._fh.close()
def getIoParts(self):
return self._ioParts
def getFlashCmds(self):
return self._flashCmds
def getIoPartsAsN25Q(self):
bCopi = []
bCipo = []
for ioPart in self.getIoParts():
if (len(ioPart) == 2):
bCopi.append(ioPart[0].strip())
bCipo.append(ioPart[1].strip())
cmd = self._cmdFactory.getCmd(bCopi, bCipo)
self._flashCmds.append(cmd)
return str(cmd)
def _genHexStrAndEsc(self, arr):
strArr = ""
escArr = []
for a in arr:
if (a not in self.EscCharacters):
strArr += a
else:
escArr.append(len(strArr))
return (strArr, escArr)
def _genAsciiEsc(self, arr):
(strArr, escArr) = self._genHexStrAndEsc(arr)
asciiArr = ""
ba = str(bytearray.fromhex(strArr).decode())
for b in range(len(ba)):
if (len(escArr) > 0):
l = escArr[0]
while(b == l):
escArr.pop(0)
asciiArr += r"\x"
if (len(escArr) > 0):
l = escArr[0]
else:
l = -1
asciiArr += ba[b]
return (strArr, asciiArr)
def getIoPartsAsEscAscii(self):
bCopi = []
bCipo = []
for ioPart in self.getIoParts():
if (len(ioPart) == 2):
bCopi.append(ioPart[0].strip())
bCipo.append(ioPart[1].strip())
(self._strCopi, self._asciiCopi) = self._genAsciiEsc(bCopi)
(self._strCipo, self._asciiCipo) = self._genAsciiEsc(bCipo)
def getCurrentLine(self):
return self._currentLine
def getStrCopi(self):
return self._strCopi
def getStrCipo(self):
return self._strCipo
def getAsciiCopi(self):
return self._asciiCopi
def getAsciiCipo(self):
return self._asciiCipo
class PmodCLSTranslator:
def __init__(self, partFlag, filename):
self._partFlag = partFlag
self._adssp = AnalogDiscoverySpiSpyParser(filename)
self._fhParse = io.open(filename + "_parse.txt", "w")
def parseWithAdssp(self):
i = 0
while(self._adssp.readCurrentLine()):
i = i + 1
if self._adssp.parseDataParts():
self._adssp.getIoPartsAsEscAscii()
if (self._partFlag in self._adssp.PartsCopi):
self._fhParse.write(self._adssp.getStrCopi())
self._fhParse.write("\n")
self._fhParse.write(self._adssp.getAsciiCopi())
self._fhParse.write("\n")
if (self._partFlag in self._adssp.PartsCipo):
self._fhParse.write(self._adssp.getStrCipo())
self._fhParse.write("\n")
self._fhParse.write(self._adssp.getAsciiCipo())
self._fhParse.write("\n")
self._fhParse.write("\n")
self._adssp.close()
self._fhParse.close()
class PmodSF3TesterValidator:
def __init__(self, filename):
self._adssp = AnalogDiscoverySpiSpyParser(filename)
self._fhParse = io.open(filename + "_parse.txt", "w")
self._fhCheck = io.open(filename + "_check.txt", "w")
self._thisAddr = 0
self._prevAddr = 0
self._eraseIncr = 4096
self._readIncr = 256
self._progIncr = 256
self._ssEraseIncr = self._progIncr * 16
self._sEraseIncr = self._ssEraseIncr * 16
def parseWithAdssp(self):
i = 0
while(self._adssp.readCurrentLine()):
i = i + 1
if self._adssp.parseDataParts():
s = self._adssp.getIoPartsAsN25Q()
if s:
self._fhParse.write(s)
self._fhParse.write("\n")
self._fhParse.write("\n")
self._adssp.close()
self._fhParse.close()
def _checkEraseAddr(self, cmd):
if (hasattr(cmd, 'getEraseAddrAsInt')):
self._prevAddr = self._thisAddr
self._thisAddr = cmd.getEraseAddrAsInt()
self._diffAddr = self._thisAddr - self._prevAddr
if (isinstance(cmd, N25QSectorErase)):
self._eraseIncr = self._sEraseIncr
else:
self._eraseIncr = self._ssEraseIncr
if (self._diffAddr == self._eraseIncr):
print(f"N25Q{cmd.CommandName} Check: valid erase address"
f" increment by {self._diffAddr}", file=self._fhCheck)
else:
print(f"N25Q{cmd.CommandName} Check: invalid erase address"
f" increment by {self._diffAddr}", file=self._fhCheck)
else:
pass
def _checkReadAddr(self, cmd):
if (hasattr(cmd, 'getReadAddrAsInt')):
self._prevAddr = self._thisAddr
self._thisAddr = cmd.getReadAddrAsInt()
self._diffAddr = self._thisAddr - self._prevAddr
if (self._diffAddr == self._readIncr):
print(f"N25Q{cmd.CommandName} Check: valid read address"
f" increment by {self._diffAddr}", file=self._fhCheck)
else:
print(f"N25Q{cmd.CommandName} Check: invalid read address"
f" increment by {self._diffAddr}", file=self._fhCheck)
else:
pass
def _checkProgAddr(self, cmd):
if (hasattr(cmd, 'getProgAddrAsInt')):
self._prevAddr = self._thisAddr
self._thisAddr = cmd.getProgAddrAsInt()
self._diffAddr = self._thisAddr - self._prevAddr
if (self._diffAddr == self._progIncr):
print(f"N25Q{cmd.CommandName} Check: valid prog address"
f" increment by {self._diffAddr}", file=self._fhCheck)
else:
print(f"N25Q{cmd.CommandName} Check: invalid prog address"
f" increment by {self._diffAddr}", file=self._fhCheck)
else:
pass
def _checkReadSeq(self, cmd):
if (hasattr(cmd, 'getReadSequence')):
self._seqCnt = cmd.getReadSequence()
if (self._seqCnt == self._readIncr):
print(f"N25Q{cmd.CommandName} Check: valid read data"
f" increment for {self._seqCnt} bytes\n", file=self._fhCheck)
else:
print(f"N25Q{cmd.CommandName} Check: invalid read data"
f" increment for {self._seqCnt} bytes\n", file=self._fhCheck)
else:
pass
def _checkProgSeq(self, cmd):
if (hasattr(cmd, 'getProgSequence')):
self._seqCnt = cmd.getProgSequence()
if (self._seqCnt == self._progIncr):
print(f"N25Q{cmd.CommandName} Check: valid prog data"
f" increment for {self._seqCnt} bytes\n", file=self._fhCheck)
else:
print(f"N25Q{cmd.CommandName} Check: invalid prog data"
f" increment for {self._seqCnt} bytes\n", file=self._fhCheck)
else:
pass
def checkValidateCommandBytes(self):
for cmd in self._adssp.getFlashCmds():
print(cmd, file=self._fhCheck)
self._checkEraseAddr(cmd)
self._checkReadAddr(cmd)
self._checkProgAddr(cmd)
self._checkReadSeq(cmd)
self._checkProgSeq(cmd)
print(file=self._fhCheck)
self._fhCheck.close()
def mainPmodCLS(filename, partFlag):
parser = PmodCLSTranslator(partFlag, filename)
parser.parseWithAdssp()
def mainPmodSF3(filename, partFlag):
validator = PmodSF3TesterValidator(filename)
validator.parseWithAdssp()
validator.checkValidateCommandBytes()
def usage():
print("{} : <c | p | cp> <filename.txt>" .formatt(sys.argv[0]))
print("{}".format(sys.argv[0]))
sys.exit(1)
if __name__ == "__main__":
if (len(sys.argv) == 1):
partFlag = "c"
pmodCLSfileNames = [\
"SF-Tester-Design-AXI/CLS SPI Spy Capture of Boot-Time Display at ext_spi_clk SCK.txt",
"SF-Tester-Design-AXI/CLS SPI Spy Capture of First-Iteration Display at ext_spi_clk SCK.txt",
"SF-Tester-Design-VHDL/CLS SPI Spy Capture of Boot-Time Display at 50 KHz SCK.txt",
"SF-Tester-Design-VHDL/CLS SPI Spy Capture of First-Iteration Display at 50 KHz SCK.txt"]
for fileName in pmodCLSfileNames:
mainPmodCLS(fileName, partFlag)
partFlag = "cp"
pmodSF3fileNames = [\
"SF-Tester-Design-VHDL/t.txt",
"SF-Tester-Design-AXI/SF3 SPI Spy Capture of Erase Subsector at ext_spi_clk SCK.txt",
"SF-Tester-Design-AXI/SF3 SPI Spy Capture of Page Program at ext_spi_clk SCK.txt",
"SF-Tester-Design-AXI/SF3 SPI Spy Capture of Random Read at ext_spi_clk SCK.txt",
"SF-Tester-Design-VHDL/SF3 SPI Spy Capture of Erase Subsector at 50 KHz SCK.txt",
"SF-Tester-Design-VHDL/SF3 SPI Spy Capture of Erase Subsector at 500 KHz SCK.txt",
"SF-Tester-Design-VHDL/SF3 SPI Spy Capture of Page Program at 50 KHz SCK.txt",
"SF-Tester-Design-VHDL/SF3 SPI Spy Capture of Page Program at 500 KHz SCK.txt",
"SF-Tester-Design-VHDL/SF3 SPI Spy Capture of Random Read at 50 KHz SCK.txt",
"SF-Tester-Design-VHDL/SF3 SPI Spy Capture of Random Read at 500 KHz SCK.txt"]
for fileName in pmodSF3fileNames:
mainPmodSF3(fileName, partFlag)
else:
usage()
| [
"copy.copy",
"sys.exit",
"io.open",
"re.compile"
] | [((8146, 8171), 're.compile', 're.compile', (['"""^Data[:][ ]"""'], {}), "('^Data[:][ ]')\n", (8156, 8171), False, 'import re\n'), ((17606, 17617), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (17614, 17617), False, 'import sys\n'), ((1999, 2020), 'copy.copy', 'copy.copy', (['self._copi'], {}), '(self._copi)\n', (2008, 2020), False, 'import copy\n'), ((2051, 2072), 'copy.copy', 'copy.copy', (['self._cipo'], {}), '(self._cipo)\n', (2060, 2072), False, 'import copy\n'), ((8289, 8311), 'io.open', 'io.open', (['fileName', '"""r"""'], {}), "(fileName, 'r')\n", (8296, 8311), False, 'import io\n'), ((11566, 11603), 'io.open', 'io.open', (["(filename + '_parse.txt')", '"""w"""'], {}), "(filename + '_parse.txt', 'w')\n", (11573, 11603), False, 'import io\n'), ((12731, 12768), 'io.open', 'io.open', (["(filename + '_parse.txt')", '"""w"""'], {}), "(filename + '_parse.txt', 'w')\n", (12738, 12768), False, 'import io\n'), ((12793, 12830), 'io.open', 'io.open', (["(filename + '_check.txt')", '"""w"""'], {}), "(filename + '_check.txt', 'w')\n", (12800, 12830), False, 'import io\n')] |
# Generated by Django 4.0.1 on 2022-02-22 20:51
import ckeditor_uploader.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('projects', '0017_project_body'),
]
operations = [
migrations.RemoveField(
model_name='project',
name='body',
),
migrations.AlterField(
model_name='project',
name='content',
field=ckeditor_uploader.fields.RichTextUploadingField(blank=True, null=True),
),
]
| [
"django.db.migrations.RemoveField"
] | [((254, 311), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""project"""', 'name': '"""body"""'}), "(model_name='project', name='body')\n", (276, 311), False, 'from django.db import migrations\n')] |
"""Test the /admin/user blueprint routes."""
import json
from . import tmp_app_with_users # NOQA
from . import (
snowwhite_token,
grumpy_token,
noone_token,
)
def test_register_user_route(tmp_app_with_users): # NOQA
from dtool_lookup_server.utils import user_exists
assert not user_exists("evil-witch")
assert not user_exists("dopey")
users = [
{"username": "evil-witch", "is_admin": True},
{"username": "dopey"}
]
headers = dict(Authorization="Bearer " + snowwhite_token)
r = tmp_app_with_users.post(
"/admin/user/register",
headers=headers,
data=json.dumps(users),
content_type="application/json"
)
assert r.status_code == 201
assert user_exists("evil-witch")
assert user_exists("dopey")
# Ensure idempotent.
r = tmp_app_with_users.post(
"/admin/user/register",
headers=headers,
data=json.dumps(users),
content_type="application/json"
)
assert r.status_code == 201
assert user_exists("evil-witch")
assert user_exists("dopey")
# Only admins allowed. However, don't give away that URL exists to
# non-admins.
headers = dict(Authorization="Bearer " + grumpy_token)
r = tmp_app_with_users.post(
"/admin/user/register",
headers=headers,
data=json.dumps(users),
content_type="application/json"
)
assert r.status_code == 404
headers = dict(Authorization="Bearer " + noone_token)
r = tmp_app_with_users.post(
"/admin/user/register",
headers=headers,
data=json.dumps(users),
content_type="application/json"
)
assert r.status_code == 404
def test_list_user_route(tmp_app_with_users): # NOQA
headers = dict(Authorization="Bearer " + snowwhite_token)
r = tmp_app_with_users.get(
"/admin/user/list",
headers=headers,
)
assert r.status_code == 200
# Only admins allowed. However, don't give away that URL exists to
# non-admins.
headers = dict(Authorization="Bearer " + grumpy_token)
r = tmp_app_with_users.get(
"/admin/user/list",
headers=headers
)
assert r.status_code == 404
headers = dict(Authorization="Bearer " + noone_token)
r = tmp_app_with_users.get(
"/admin/user/list",
headers=headers
)
assert r.status_code == 404
| [
"dtool_lookup_server.utils.user_exists",
"json.dumps"
] | [((746, 771), 'dtool_lookup_server.utils.user_exists', 'user_exists', (['"""evil-witch"""'], {}), "('evil-witch')\n", (757, 771), False, 'from dtool_lookup_server.utils import user_exists\n'), ((783, 803), 'dtool_lookup_server.utils.user_exists', 'user_exists', (['"""dopey"""'], {}), "('dopey')\n", (794, 803), False, 'from dtool_lookup_server.utils import user_exists\n'), ((1041, 1066), 'dtool_lookup_server.utils.user_exists', 'user_exists', (['"""evil-witch"""'], {}), "('evil-witch')\n", (1052, 1066), False, 'from dtool_lookup_server.utils import user_exists\n'), ((1078, 1098), 'dtool_lookup_server.utils.user_exists', 'user_exists', (['"""dopey"""'], {}), "('dopey')\n", (1089, 1098), False, 'from dtool_lookup_server.utils import user_exists\n'), ((306, 331), 'dtool_lookup_server.utils.user_exists', 'user_exists', (['"""evil-witch"""'], {}), "('evil-witch')\n", (317, 331), False, 'from dtool_lookup_server.utils import user_exists\n'), ((347, 367), 'dtool_lookup_server.utils.user_exists', 'user_exists', (['"""dopey"""'], {}), "('dopey')\n", (358, 367), False, 'from dtool_lookup_server.utils import user_exists\n'), ((638, 655), 'json.dumps', 'json.dumps', (['users'], {}), '(users)\n', (648, 655), False, 'import json\n'), ((933, 950), 'json.dumps', 'json.dumps', (['users'], {}), '(users)\n', (943, 950), False, 'import json\n'), ((1351, 1368), 'json.dumps', 'json.dumps', (['users'], {}), '(users)\n', (1361, 1368), False, 'import json\n'), ((1610, 1627), 'json.dumps', 'json.dumps', (['users'], {}), '(users)\n', (1620, 1627), False, 'import json\n')] |
from django.db import models
from django.db.models.fields import DateTimeCheckMixin, DateTimeField
from datetime import datetime
"""
* investimento
* valor
* pago
* data
"""
class Investimento(models.Model):
investimento = models.TextField(max_length=255)
valor = models.FloatField()
pago = models.BooleanField(default=False)
data = models.DateField(default=datetime.now)
| [
"django.db.models.DateField",
"django.db.models.FloatField",
"django.db.models.TextField",
"django.db.models.BooleanField"
] | [((231, 263), 'django.db.models.TextField', 'models.TextField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (247, 263), False, 'from django.db import models\n'), ((276, 295), 'django.db.models.FloatField', 'models.FloatField', ([], {}), '()\n', (293, 295), False, 'from django.db import models\n'), ((307, 341), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (326, 341), False, 'from django.db import models\n'), ((353, 391), 'django.db.models.DateField', 'models.DateField', ([], {'default': 'datetime.now'}), '(default=datetime.now)\n', (369, 391), False, 'from django.db import models\n')] |
import numpy as np
import torch
from base.base import Learner
from runners.runners import make_ppo_runner, SavedRewardsResetsRunner
from selection.select_layers import SelectModelFromLayers
from utils.additional import GPU_ids
from .policies_algs import ActorCriticPolicy, PPO
class PPOLearner(Learner):
""" Proximal Policy Optimization learner. """
@staticmethod
def get_defaults(env_type="atari"):
defaults = {
"atari": {
"num_train_steps": 10e6,
"nenvs": 8,
"num_runner_steps": 128,
"gamma": 0.99,
"lambda_": 0.95,
"num_epochs": 3,
"num_minibatches": 4,
"cliprange": 0.1,
"value_loss_coef": 0.25,
"entropy_coef": 0.01,
"max_grad_norm": 0.5,
"lr": 2.5e-4,
"optimizer_epsilon": 1e-5,
},
"mujoco": {
"num_train_steps": 1e6,
"nenvs": dict(type=int, default=None),
"num_runner_steps": 2048,
"gamma": 0.99,
"lambda_": 0.95,
"num_epochs": 10,
"num_minibatches": 32,
"cliprange": 0.2,
"value_loss_coef": 0.25,
"entropy_coef": 0.,
"max_grad_norm": 0.5,
"lr": 3e-4,
"optimizer_epsilon": 1e-5,
}
}
return defaults.get(env_type)
@staticmethod
def make_runner(env, args, model, device):
policy = ActorCriticPolicy(model, device)
kwargs = args # vars(args)
runner_kwargs = {key: kwargs[key] for key in
["gamma", "lambda_", "num_epochs", "num_minibatches"]
if key in kwargs}
runner = make_ppo_runner(env, policy, args['num_runner_steps'],
**runner_kwargs)
return runner
@staticmethod
def make_alg(runner, args, device):
lr = args['lr']
model = runner.policy.model
model.to(device)
model = torch.nn.DataParallel(model, device_ids=GPU_ids)
if "optimizer_epsilon" in args:
optimizer = torch.optim.Adam(model.parameters(), lr, eps=args['optimizer_epsilon'])
else:
optimizer = torch.optim.Adam(model.parameters(), lr)
lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, args['num_train_steps'])
kwargs = args
ppo_kwargs = {key: kwargs[key]
for key in ["value_loss_coef", "entropy_coef",
"cliprange", "max_grad_norm"]
if key in kwargs}
ppo = PPO(runner.policy, device, optimizer, lr_scheduler, **ppo_kwargs)
return ppo
def learning_body(self):
# self.runner.step_var+=1
data = self.runner.get_next()
loss = self.alg.step(data)
# save_to_file('new_logs/random_loss.csv', {'loss':loss})
yield data, loss
while not self.runner.trajectory_is_stale():
data = self.runner.get_next()
loss = self.alg.step(data)
yield data, loss
class ScoredLearner(Learner):
""" Scored learner. """
# pylint: disable=abstract-method
def __init__(self, runner, alg):
if not isinstance(alg.model, SelectModelFromLayers):
raise ValueError("alg.model must be an instance of SelectModel, "
f"got type {type(alg.model)} instead")
runner = SavedRewardsResetsRunner(runner)
super().__init__(runner=runner, alg=alg)
self.select_model = alg.model
self.current_data = None
self.current_loss = None
def get_score(self):
""" Returns score over the last learning trajectory. """
rewards, resets = self.runner.get_rewards_resets()
self.runner.clear_rewards_resets()
assert rewards.ndim == 1 and resets.ndim == 1, (rewards.ndim, resets.ndim)
assert rewards.shape[0] == resets.shape[0], (rewards.shape, resets.shape)
scores = [0]
for t in reversed(range(rewards.shape[0])):
if resets[t]:
scores.append(0)
scores[-1] += rewards[t]
return np.mean(scores)
def learning_body(self):
data = self.runner.get_next()
loss = self.alg.step(data)
self.current_data = data
self.current_loss = loss
yield data, loss
while not self.runner.trajectory_is_stale():
data = self.runner.get_next()
loss = self.alg.step(data)
yield data, loss
| [
"numpy.mean",
"torch.optim.lr_scheduler.CosineAnnealingLR",
"runners.runners.make_ppo_runner",
"torch.nn.DataParallel",
"runners.runners.SavedRewardsResetsRunner"
] | [((1869, 1940), 'runners.runners.make_ppo_runner', 'make_ppo_runner', (['env', 'policy', "args['num_runner_steps']"], {}), "(env, policy, args['num_runner_steps'], **runner_kwargs)\n", (1884, 1940), False, 'from runners.runners import make_ppo_runner, SavedRewardsResetsRunner\n'), ((2156, 2204), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['model'], {'device_ids': 'GPU_ids'}), '(model, device_ids=GPU_ids)\n', (2177, 2204), False, 'import torch\n'), ((2445, 2523), 'torch.optim.lr_scheduler.CosineAnnealingLR', 'torch.optim.lr_scheduler.CosineAnnealingLR', (['optimizer', "args['num_train_steps']"], {}), "(optimizer, args['num_train_steps'])\n", (2487, 2523), False, 'import torch\n'), ((3610, 3642), 'runners.runners.SavedRewardsResetsRunner', 'SavedRewardsResetsRunner', (['runner'], {}), '(runner)\n', (3634, 3642), False, 'from runners.runners import make_ppo_runner, SavedRewardsResetsRunner\n'), ((4339, 4354), 'numpy.mean', 'np.mean', (['scores'], {}), '(scores)\n', (4346, 4354), True, 'import numpy as np\n')] |
import asyncio
import websockets
import can
import struct
from threading import Timer
class Watchdog(Exception):
def __init__(self, timeout, userHandler=None): # timeout in seconds
self.timeout = timeout
self.handler = userHandler if userHandler is not None else self.defaultHandler
self.timer = Timer(self.timeout, self.handler)
self.timer.start()
def reset(self):
self.timer.cancel()
self.timer = Timer(self.timeout, self.handler)
self.timer.start()
def stop(self):
self.timer.cancel()
def defaultHandler(self):
raise self
ws=None
def pack_can(address, data, bus):
CAN_TRANSMIT = 1
CAN_EXTENDED = 4
if(len(data) > 8):
#can't have more than 8 bytes of data in a can frame
return
if ( address >= 0x800):
address = ((address << 3) | CAN_TRANSMIT | CAN_EXTENDED) >> 0
else:
address = ((address << 21) | CAN_TRANSMIT) >> 0
buff = bytearray(struct.pack('<I', address))
buff.extend(struct.pack('<I', (len(data) | (bus << 4)) >> 0))
buff.extend(data)
print(buff)
return buff
msg_count=0
can_packet = bytearray()
MAX_MESSAGE_QUEUE = 100
watchdog = None
async def on_can_message(msg):
global ws
global can_packet
global msg_count
global MAX_MESSAGE_QUEUE
global watchdog
print(msg)
watchdog.reset()
can_frame = pack_can(msg.arbitration_id, msg.data, 0)
if(len(can_frame) < 16):
diff = 16-len(can_frame)
can_frame.extend(bytearray(diff))
can_packet.extend(can_frame)
msg_count+=1
if(ws is not None):
if(msg_count >= MAX_MESSAGE_QUEUE):
msg_count = 0
await ws.send(can_packet)
can_packet = bytearray()
def can_watchdog_expired():
global ws
global can_packet
global msg_count
if(ws is not None):
print("sending last little bit of data")
print(can_packet)
if(msg_count>0):
asyncio.run(ws.send(can_packet))
can_packet = bytearray()
msg_count = 0
async def on_new_ws_client(websocket, path):
global ws
ws = websocket
print("New WS Client Connected")
while True:
try:
name = await websocket.recv()
except websockets.ConnectionClosed:
print(f"Terminated")
break
# await websocket.send(greeting)
async def can_setup():
global watchdog
can_interface = 'can0'
bus = can.interface.Bus(can_interface, bustype='socketcan')
loop = asyncio.get_event_loop()
notifier = can.Notifier(bus, [on_can_message], loop=loop)
watchdog = Watchdog(0.5, can_watchdog_expired)
start_server = websockets.serve(on_new_ws_client, "localhost", 8080)
asyncio.get_event_loop().run_until_complete(can_setup())
asyncio.get_event_loop().run_until_complete(start_server)
asyncio.get_event_loop().run_forever()
| [
"can.interface.Bus",
"threading.Timer",
"struct.pack",
"websockets.serve",
"asyncio.get_event_loop",
"can.Notifier"
] | [((2714, 2767), 'websockets.serve', 'websockets.serve', (['on_new_ws_client', '"""localhost"""', '(8080)'], {}), "(on_new_ws_client, 'localhost', 8080)\n", (2730, 2767), False, 'import websockets\n'), ((2495, 2548), 'can.interface.Bus', 'can.interface.Bus', (['can_interface'], {'bustype': '"""socketcan"""'}), "(can_interface, bustype='socketcan')\n", (2512, 2548), False, 'import can\n'), ((2560, 2584), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (2582, 2584), False, 'import asyncio\n'), ((2600, 2646), 'can.Notifier', 'can.Notifier', (['bus', '[on_can_message]'], {'loop': 'loop'}), '(bus, [on_can_message], loop=loop)\n', (2612, 2646), False, 'import can\n'), ((327, 360), 'threading.Timer', 'Timer', (['self.timeout', 'self.handler'], {}), '(self.timeout, self.handler)\n', (332, 360), False, 'from threading import Timer\n'), ((459, 492), 'threading.Timer', 'Timer', (['self.timeout', 'self.handler'], {}), '(self.timeout, self.handler)\n', (464, 492), False, 'from threading import Timer\n'), ((994, 1020), 'struct.pack', 'struct.pack', (['"""<I"""', 'address'], {}), "('<I', address)\n", (1005, 1020), False, 'import struct\n'), ((2770, 2794), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (2792, 2794), False, 'import asyncio\n'), ((2827, 2851), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (2849, 2851), False, 'import asyncio\n'), ((2885, 2909), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (2907, 2909), False, 'import asyncio\n')] |
from prometheus_client.core import GaugeMetricFamily
from BaseCollector import BaseCollector
class UcsmCollector(BaseCollector):
def __init__(self, creds, inventory_file):
super().__init__(creds, inventory_file)
def describe(self):
yield GaugeMetricFamily("ucsm_metrics", "ucsm_collector_registered")
def collect(self):
print("UcsmCollector: Get updated handles !")
self.get_handles()
g = GaugeMetricFamily('ucsm_info', 'UCSM server information',
labels=['server', 'firmware_version'])
for server, handle in self.handles.items():
sys = handle.query_dn("sys")
firmware_status = handle.query_children(sys, class_id="FirmwareStatus")
firmware_version = firmware_status[0].package_version
g.add_metric(labels=[server, firmware_version], value=0)
yield g
self.logout_handles()
| [
"prometheus_client.core.GaugeMetricFamily"
] | [((445, 545), 'prometheus_client.core.GaugeMetricFamily', 'GaugeMetricFamily', (['"""ucsm_info"""', '"""UCSM server information"""'], {'labels': "['server', 'firmware_version']"}), "('ucsm_info', 'UCSM server information', labels=['server',\n 'firmware_version'])\n", (462, 545), False, 'from prometheus_client.core import GaugeMetricFamily\n'), ((265, 327), 'prometheus_client.core.GaugeMetricFamily', 'GaugeMetricFamily', (['"""ucsm_metrics"""', '"""ucsm_collector_registered"""'], {}), "('ucsm_metrics', 'ucsm_collector_registered')\n", (282, 327), False, 'from prometheus_client.core import GaugeMetricFamily\n')] |
import dbus
import re
import subprocess as s
from dbus.mainloop.glib import DBusGMainLoop
DBusGMainLoop(set_as_default=True)
bus = dbus.SystemBus()
DeviceName = {}
def valid(name):
return re.search("^sd[a-z][0-9][0-9]*$",name)
def notify(title, desc):
s.call(["notify-send", title, desc])
def extract_name(name):
"""
Extract /dev/ from the name
"""
return name.replace('/dev/', '').replace('/', '')
def get_device_from_dbus(cls):
deviceinfo = cls.get('org.freedesktop.UDisks2.Block')
dev = bytearray(deviceinfo.get('Device')).replace(b'\x00', b'').decode('utf-8')
return dev
# Function which will run when signal is received
def callback_added_function(address, cls):
device = get_device_from_dbus(cls)
naming = extract_name(device)
DeviceName[address] = naming
if valid(naming):
notify("USB plugged in", "Mounting to /media/"+naming)
def callback_removed_function(address, cls):
device=DeviceName[address]
if valid(device):
notify("USB removed", "Unmounting from /media/"+device)
# Which signal to have an eye for
iface = 'org.freedesktop.DBus.ObjectManager'
signal = 'InterfacesAdded'
signalR = 'InterfacesRemoved'
bus.add_signal_receiver(callback_added_function, signal, iface)
bus.add_signal_receiver(callback_removed_function, signalR, iface)
# Let's start the loop
import gi.repository.GLib as gi
loop = gi.MainLoop()
loop.run()
| [
"dbus.mainloop.glib.DBusGMainLoop",
"gi.repository.GLib.MainLoop",
"dbus.SystemBus",
"subprocess.call",
"re.search"
] | [((90, 124), 'dbus.mainloop.glib.DBusGMainLoop', 'DBusGMainLoop', ([], {'set_as_default': '(True)'}), '(set_as_default=True)\n', (103, 124), False, 'from dbus.mainloop.glib import DBusGMainLoop\n'), ((133, 149), 'dbus.SystemBus', 'dbus.SystemBus', ([], {}), '()\n', (147, 149), False, 'import dbus\n'), ((1400, 1413), 'gi.repository.GLib.MainLoop', 'gi.MainLoop', ([], {}), '()\n', (1411, 1413), True, 'import gi.repository.GLib as gi\n'), ((195, 234), 're.search', 're.search', (['"""^sd[a-z][0-9][0-9]*$"""', 'name'], {}), "('^sd[a-z][0-9][0-9]*$', name)\n", (204, 234), False, 'import re\n'), ((264, 300), 'subprocess.call', 's.call', (["['notify-send', title, desc]"], {}), "(['notify-send', title, desc])\n", (270, 300), True, 'import subprocess as s\n')] |
"""Load and query configuration data.
This module handles loading of the hil.cfg file, and querying the options
therein. the `cfg` attribute is an instance of `ConfigParser.RawConfigParser`.
Once `load` has been called, it will be ready to use.
"""
import ConfigParser
import logging.handlers
import importlib
from schema import Schema, Optional, Use, And, Or
import os
import sys
from urlparse import urlparse
import errno
cfg = ConfigParser.RawConfigParser()
cfg.optionxform = str
def string_is_bool(option):
"""Check if a string matches ConfigParser's definition of a bool"""
return And(Use(str.lower), Or('true', 'yes', 'on', '1',
'false', 'no', 'off', '0')).validate(option)
def string_is_web_url(option):
"""Check if a string is a valid web URL"""
return And(lambda s: urlparse(s).scheme != '',
lambda s: urlparse(s).netloc != '').validate(option)
def string_is_db_uri(option):
"""Check if a string is a valid DB URI"""
return And(Use(lambda s: urlparse(s).scheme),
Or('postgresql', 'sqlite')).validate(option)
def string_is_dir(option):
"""Check if a string is a valid directory path"""
return Use(os.path.isabs).validate(option)
def string_is_log_level(option):
"""Check if a string is a valid log level"""
return And(Use(str.lower), Or('debug', 'info', 'warn', 'warning', 'error',
'critical', 'fatal')).validate(option)
def string_has_vlans(option):
"""Check if a string is a valid list of VLANs"""
for r in option.split(","):
r = r.strip().split("-")
if not all(s.isdigit() and 0 < int(s) <= 4096 for s in r):
return False
return True
# Note: headnode section receiving minimal checking due to soon replacement
core_schema = {
Optional('general'): {
'log_level': string_is_log_level,
Optional('log_dir'): string_is_dir,
},
Optional('auth'): {
Optional('require_authentication'): string_is_bool,
},
'headnode': {
'trunk_nic': str,
'base_imgs': str,
'libvirt_endpoint': str,
},
'client': {
Optional('endpoint'): string_is_web_url,
},
'database': {
'uri': string_is_db_uri,
},
Optional('devel'): {
Optional('dry_run'): string_is_bool,
},
Optional('maintenance'): {
Optional('maintenance_project'): str,
Optional('url'): string_is_web_url,
Optional('shutdown'): '',
},
Optional('network-daemon'): {
Optional('sleep_time'): int,
},
'extensions': {
Optional(str): '',
},
}
def load(filename='hil.cfg'):
"""Load the configuration from the file 'hil.cfg' in the current directory.
This must be called once at program startup; no configuration options will
be available until then.
If the config file is not found, it will simply exit.
"""
if (os.stat(filename).st_mode & 0o077) != 0:
sys.exit("Config file has overly-permissive permissions; make sure "
"that only the HIL user may access the file.")
opened_file = cfg.read(filename)
if filename not in opened_file:
sys.exit("Config file not found. Please create hil.cfg")
def configure_logging():
"""Configure the logger according to the settings in the config file.
This must be called *after* the config is loaded.
"""
if cfg.has_option('general', 'log_level'):
LOG_SET = ["CRITICAL", "DEBUG", "ERROR", "FATAL", "INFO", "WARN",
"WARNING"]
log_level = cfg.get('general', 'log_level').upper()
if log_level in LOG_SET:
# Set to mnemonic log level
logging.basicConfig(level=getattr(logging, log_level))
else:
# Set to 'warning', and warn that the config is bad
logging.basicConfig(level=logging.WARNING)
logging.getLogger(__name__).warn(
"Invalid debugging level %s defaulted to WARNING", log_level)
else:
# Default to 'warning'
logging.basicConfig(level=logging.WARNING)
# Configure the formatter
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logging._defaultFormatter = formatter
# Add the file handlers for the modules
if cfg.has_option('general', 'log_dir'):
log_dir = cfg.get('general', 'log_dir')
# logging
log_file = os.path.join(log_dir, 'hil.log')
logger = logging.getLogger('hil')
# Catch bad log directories
try:
logger.addHandler(logging.handlers.TimedRotatingFileHandler(
log_file, when='D', interval=1))
except IOError as e:
if e.errno == errno.ENOENT:
sys.exit("Error: log directory does not exist")
elif e.errno == errno.EACCES:
sys.exit("Error: insufficient permissions to "
"access log directory")
else:
raise(e)
def load_extensions():
"""Load extensions.
Each extension is specified as ``module =`` in the ``[extensions]`` section
of ``hil.cfg``. This must be called after ``load``.
"""
if not cfg.has_section('extensions'):
return
for name in cfg.options('extensions'):
importlib.import_module(name)
for name in cfg.options('extensions'):
if hasattr(sys.modules[name], 'setup'):
sys.modules[name].setup()
def validate_config():
"""Validate the current config file"""
cfg_dict = dict()
for section in cfg.sections():
cfg_dict[section] = dict(cfg.items(section))
validated = Schema(core_schema).validate(cfg_dict)
assert validated == cfg_dict
def setup(filename='hil.cfg'):
"""Do full configuration setup.
This is equivalent to calling load, configure_logging, and
load_extensions in sequence.
"""
load(filename)
load_extensions()
validate_config()
configure_logging()
| [
"schema.Optional",
"importlib.import_module",
"ConfigParser.RawConfigParser",
"schema.Use",
"schema.Schema",
"os.path.join",
"schema.Or",
"sys.exit",
"os.stat",
"urlparse.urlparse"
] | [((433, 463), 'ConfigParser.RawConfigParser', 'ConfigParser.RawConfigParser', ([], {}), '()\n', (461, 463), False, 'import ConfigParser\n'), ((1836, 1855), 'schema.Optional', 'Optional', (['"""general"""'], {}), "('general')\n", (1844, 1855), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((1956, 1972), 'schema.Optional', 'Optional', (['"""auth"""'], {}), "('auth')\n", (1964, 1972), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((2287, 2304), 'schema.Optional', 'Optional', (['"""devel"""'], {}), "('devel')\n", (2295, 2304), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((2364, 2387), 'schema.Optional', 'Optional', (['"""maintenance"""'], {}), "('maintenance')\n", (2372, 2387), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((2526, 2552), 'schema.Optional', 'Optional', (['"""network-daemon"""'], {}), "('network-daemon')\n", (2534, 2552), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((1909, 1928), 'schema.Optional', 'Optional', (['"""log_dir"""'], {}), "('log_dir')\n", (1917, 1928), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((1984, 2018), 'schema.Optional', 'Optional', (['"""require_authentication"""'], {}), "('require_authentication')\n", (1992, 2018), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((2177, 2197), 'schema.Optional', 'Optional', (['"""endpoint"""'], {}), "('endpoint')\n", (2185, 2197), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((2316, 2335), 'schema.Optional', 'Optional', (['"""dry_run"""'], {}), "('dry_run')\n", (2324, 2335), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((2399, 2430), 'schema.Optional', 'Optional', (['"""maintenance_project"""'], {}), "('maintenance_project')\n", (2407, 2430), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((2445, 2460), 'schema.Optional', 'Optional', (['"""url"""'], {}), "('url')\n", (2453, 2460), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((2489, 2509), 'schema.Optional', 'Optional', (['"""shutdown"""'], {}), "('shutdown')\n", (2497, 2509), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((2564, 2586), 'schema.Optional', 'Optional', (['"""sleep_time"""'], {}), "('sleep_time')\n", (2572, 2586), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((2628, 2641), 'schema.Optional', 'Optional', (['str'], {}), '(str)\n', (2636, 2641), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((3001, 3123), 'sys.exit', 'sys.exit', (['"""Config file has overly-permissive permissions; make sure that only the HIL user may access the file."""'], {}), "(\n 'Config file has overly-permissive permissions; make sure that only the HIL user may access the file.'\n )\n", (3009, 3123), False, 'import sys\n'), ((3215, 3271), 'sys.exit', 'sys.exit', (['"""Config file not found. Please create hil.cfg"""'], {}), "('Config file not found. Please create hil.cfg')\n", (3223, 3271), False, 'import sys\n'), ((4483, 4515), 'os.path.join', 'os.path.join', (['log_dir', '"""hil.log"""'], {}), "(log_dir, 'hil.log')\n", (4495, 4515), False, 'import os\n'), ((5361, 5390), 'importlib.import_module', 'importlib.import_module', (['name'], {}), '(name)\n', (5384, 5390), False, 'import importlib\n'), ((1208, 1226), 'schema.Use', 'Use', (['os.path.isabs'], {}), '(os.path.isabs)\n', (1211, 1226), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((5714, 5733), 'schema.Schema', 'Schema', (['core_schema'], {}), '(core_schema)\n', (5720, 5733), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((603, 617), 'schema.Use', 'Use', (['str.lower'], {}), '(str.lower)\n', (606, 617), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((619, 674), 'schema.Or', 'Or', (['"""true"""', '"""yes"""', '"""on"""', '"""1"""', '"""false"""', '"""no"""', '"""off"""', '"""0"""'], {}), "('true', 'yes', 'on', '1', 'false', 'no', 'off', '0')\n", (621, 674), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((1069, 1095), 'schema.Or', 'Or', (['"""postgresql"""', '"""sqlite"""'], {}), "('postgresql', 'sqlite')\n", (1071, 1095), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((1343, 1357), 'schema.Use', 'Use', (['str.lower'], {}), '(str.lower)\n', (1346, 1357), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((1359, 1427), 'schema.Or', 'Or', (['"""debug"""', '"""info"""', '"""warn"""', '"""warning"""', '"""error"""', '"""critical"""', '"""fatal"""'], {}), "('debug', 'info', 'warn', 'warning', 'error', 'critical', 'fatal')\n", (1361, 1427), False, 'from schema import Schema, Optional, Use, And, Or\n'), ((2952, 2969), 'os.stat', 'os.stat', (['filename'], {}), '(filename)\n', (2959, 2969), False, 'import os\n'), ((4814, 4861), 'sys.exit', 'sys.exit', (['"""Error: log directory does not exist"""'], {}), "('Error: log directory does not exist')\n", (4822, 4861), False, 'import sys\n'), ((4920, 4987), 'sys.exit', 'sys.exit', (['"""Error: insufficient permissions to access log directory"""'], {}), "('Error: insufficient permissions to access log directory')\n", (4928, 4987), False, 'import sys\n'), ((832, 843), 'urlparse.urlparse', 'urlparse', (['s'], {}), '(s)\n', (840, 843), False, 'from urlparse import urlparse\n'), ((883, 894), 'urlparse.urlparse', 'urlparse', (['s'], {}), '(s)\n', (891, 894), False, 'from urlparse import urlparse\n'), ((1033, 1044), 'urlparse.urlparse', 'urlparse', (['s'], {}), '(s)\n', (1041, 1044), False, 'from urlparse import urlparse\n')] |
import os
import json
from datetime import datetime
import tornado.escape
import tornado.web
from bs4 import BeautifulSoup
from sklearn.externals.joblib import Parallel, delayed
from elephant_sense.evaluator import Evaluator
from elephant_sense.qiita_api import search_posts
class Application(tornado.web.Application):
def __init__(self):
self.evaluator = Evaluator().load()
handlers = [
(r"/", IndexHandler),
(r"/e/search", SearchHandler, dict(evaluator=self.evaluator)),
]
settings = dict(
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
cookie_secret=os.environ.get("SECRET_TOKEN", "__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__"),
debug=True,
)
super(Application, self).__init__(handlers, **settings)
class IndexHandler(tornado.web.RequestHandler):
def get(self):
self.render("index.html")
class SearchHandler(tornado.web.RequestHandler):
def initialize(self, evaluator):
self.evaluator = evaluator
def post(self):
data = tornado.escape.json_decode(self.request.body)
is_debug = data["debug"]
query = data["query"]
message = {"posts": []}
if is_debug:
from elephant_sense.debug import search_posts_dummy
posts = search_posts_dummy(query, count=30)
posts = self.scoring(posts)
message["posts"] = [self.trim(p) for p in posts]
self.write(message)
else:
posts = search_posts(query, n=50) # limit for performance. need improvements for feature extraction.
process = 4
batch_size = len(posts) / process
tasks = [(int(i * batch_size), int(i * batch_size + batch_size)) for i in range(process)]
dones = Parallel(n_jobs=process)(delayed(parallel_scoring)(self.evaluator, posts[t[0]:t[1]]) for t in tasks)
posts = []
for scoreds in dones:
posts += [self.trim(s) for s in scoreds]
posts = sorted(posts, key=lambda p: p["score"], reverse=True)
message["posts"] = posts
self.write(message)
@classmethod
def trim(self, post):
body = BeautifulSoup(post["rendered_body"], "html.parser")
header = body.get_text()[:140]
del post["rendered_body"]
if "body" in post:
del post["body"]
post["header"] = header.strip().replace("\n", " ")
update_time = datetime.strptime("".join(post["updated_at"].rsplit(":", 1)), "%Y-%m-%dT%H:%M:%S%z")
post["update_time"] = update_time.strftime("%Y/%m/%d")
return post
def scoring(self, posts):
scored = []
for p in posts:
score = self.evaluator.evaluate(p)
p["score"] = score
scored.append(p)
scored = sorted(scored, key=lambda p: p["score"], reverse=True)
return scored
def write_json(self, message):
serialized = json.dumps(message, ensure_ascii=False)
self.write(serialized)
def parallel_scoring(evaluator, posts):
scored = []
for p in posts:
score = evaluator.evaluate(p)
p["score"] = score
scored.append(p) # sort after merge
return scored
| [
"elephant_sense.qiita_api.search_posts",
"sklearn.externals.joblib.delayed",
"elephant_sense.evaluator.Evaluator",
"json.dumps",
"os.environ.get",
"bs4.BeautifulSoup",
"os.path.dirname",
"sklearn.externals.joblib.Parallel",
"elephant_sense.debug.search_posts_dummy"
] | [((2366, 2417), 'bs4.BeautifulSoup', 'BeautifulSoup', (["post['rendered_body']", '"""html.parser"""'], {}), "(post['rendered_body'], 'html.parser')\n", (2379, 2417), False, 'from bs4 import BeautifulSoup\n'), ((3129, 3168), 'json.dumps', 'json.dumps', (['message'], {'ensure_ascii': '(False)'}), '(message, ensure_ascii=False)\n', (3139, 3168), False, 'import json\n'), ((1456, 1491), 'elephant_sense.debug.search_posts_dummy', 'search_posts_dummy', (['query'], {'count': '(30)'}), '(query, count=30)\n', (1474, 1491), False, 'from elephant_sense.debug import search_posts_dummy\n'), ((1659, 1684), 'elephant_sense.qiita_api.search_posts', 'search_posts', (['query'], {'n': '(50)'}), '(query, n=50)\n', (1671, 1684), False, 'from elephant_sense.qiita_api import search_posts\n'), ((371, 382), 'elephant_sense.evaluator.Evaluator', 'Evaluator', ([], {}), '()\n', (380, 382), False, 'from elephant_sense.evaluator import Evaluator\n'), ((767, 846), 'os.environ.get', 'os.environ.get', (['"""SECRET_TOKEN"""', '"""__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__"""'], {}), "('SECRET_TOKEN', '__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__')\n", (781, 846), False, 'import os\n'), ((1945, 1969), 'sklearn.externals.joblib.Parallel', 'Parallel', ([], {'n_jobs': 'process'}), '(n_jobs=process)\n', (1953, 1969), False, 'from sklearn.externals.joblib import Parallel, delayed\n'), ((594, 619), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (609, 619), False, 'import os\n'), ((672, 697), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (687, 697), False, 'import os\n'), ((1970, 1995), 'sklearn.externals.joblib.delayed', 'delayed', (['parallel_scoring'], {}), '(parallel_scoring)\n', (1977, 1995), False, 'from sklearn.externals.joblib import Parallel, delayed\n')] |
# SPDX-FileCopyrightText: 2017 Fermi Research Alliance, LLC
# SPDX-License-Identifier: Apache-2.0
"""Fixture based tests of the SourceProxy module."""
# pylint: disable=redefined-outer-name
import os
import re
import pytest
from decisionengine.framework.modules.SourceProxy import SourceProxy
from decisionengine.framework.tests.fixtures import ( # noqa: F401
DEServer,
PG_DE_DB_WITHOUT_SCHEMA,
PG_PROG,
SQLALCHEMY_PG_WITH_SCHEMA,
SQLALCHEMY_TEMPFILE_SQLITE,
TEST_CONFIG_PATH,
)
from decisionengine.framework.tests.WriteToDisk import wait_for_n_writes
_channel_config_dir = os.path.join(TEST_CONFIG_PATH, "test-source-proxy") # noqa: F405
deserver = DEServer(conf_path=TEST_CONFIG_PATH, channel_conf_path=_channel_config_dir) # pylint: disable=invalid-name
def test_cannot_inherit_from_source_proxy():
with pytest.raises(RuntimeError, match="Cannot inherit from SourceProxy."):
class CannotInheritFrom(SourceProxy):
pass
@pytest.mark.timeout(180)
@pytest.mark.usefixtures("deserver")
def test_single_source_proxy(deserver):
output = deserver.de_client_run_cli("--status")
assert re.search("test_source_proxy.*state = STEADY", output, re.DOTALL)
wait_for_n_writes(deserver.stdout_at_setup, 2)
deserver.de_client_run_cli("--stop-channel", "test_source_proxy")
output = deserver.de_client_run_cli("--status")
assert re.search("test_source_proxy", output, re.DOTALL) is None
_combined_channel_config_dir = os.path.join(TEST_CONFIG_PATH, "test-combined-channels") # noqa: F405
deserver_combined = DEServer(
conf_path=TEST_CONFIG_PATH, channel_conf_path=_combined_channel_config_dir
) # pylint: disable=invalid-name
@pytest.mark.usefixtures("deserver_combined")
def test_combined_channels(deserver_combined):
# Mimics the 'test_single_source_proxy' workflow but using a
# combined-configuration approach.
output = deserver_combined.de_client_run_cli("--status")
assert re.search("test_combined_channels.*state = STEADY", output, re.DOTALL)
wait_for_n_writes(deserver_combined.stdout_at_setup, 2)
deserver_combined.de_client_run_cli("--stop-channel", "test_combined_channels")
output = deserver_combined.de_client_run_cli("--status")
assert re.search("test_combined_channels", output, re.DOTALL) is None
_fail_channel_config_dir = os.path.join(TEST_CONFIG_PATH, "test-failing-source-proxy") # noqa: F405
deserver_fail = DEServer(
conf_path=TEST_CONFIG_PATH, channel_conf_path=_fail_channel_config_dir
) # pylint: disable=invalid-name
@pytest.mark.usefixtures("deserver_fail")
def test_stop_failing_source_proxy(deserver_fail):
output = deserver_fail.de_client_run_cli("--status")
assert re.search("test_source_proxy.*state = OFFLINE", output, re.DOTALL)
deserver_fail.de_client_run_cli("--stop-channel", "test_source_proxy")
output = deserver_fail.de_client_run_cli("--status")
assert re.search("test_source_proxy", output, re.DOTALL) is None
| [
"decisionengine.framework.tests.WriteToDisk.wait_for_n_writes",
"os.path.join",
"decisionengine.framework.tests.fixtures.DEServer",
"pytest.raises",
"pytest.mark.usefixtures",
"pytest.mark.timeout",
"re.search"
] | [((604, 655), 'os.path.join', 'os.path.join', (['TEST_CONFIG_PATH', '"""test-source-proxy"""'], {}), "(TEST_CONFIG_PATH, 'test-source-proxy')\n", (616, 655), False, 'import os\n'), ((681, 756), 'decisionengine.framework.tests.fixtures.DEServer', 'DEServer', ([], {'conf_path': 'TEST_CONFIG_PATH', 'channel_conf_path': '_channel_config_dir'}), '(conf_path=TEST_CONFIG_PATH, channel_conf_path=_channel_config_dir)\n', (689, 756), False, 'from decisionengine.framework.tests.fixtures import DEServer, PG_DE_DB_WITHOUT_SCHEMA, PG_PROG, SQLALCHEMY_PG_WITH_SCHEMA, SQLALCHEMY_TEMPFILE_SQLITE, TEST_CONFIG_PATH\n'), ((983, 1007), 'pytest.mark.timeout', 'pytest.mark.timeout', (['(180)'], {}), '(180)\n', (1002, 1007), False, 'import pytest\n'), ((1009, 1044), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""deserver"""'], {}), "('deserver')\n", (1032, 1044), False, 'import pytest\n'), ((1491, 1547), 'os.path.join', 'os.path.join', (['TEST_CONFIG_PATH', '"""test-combined-channels"""'], {}), "(TEST_CONFIG_PATH, 'test-combined-channels')\n", (1503, 1547), False, 'import os\n'), ((1582, 1671), 'decisionengine.framework.tests.fixtures.DEServer', 'DEServer', ([], {'conf_path': 'TEST_CONFIG_PATH', 'channel_conf_path': '_combined_channel_config_dir'}), '(conf_path=TEST_CONFIG_PATH, channel_conf_path=\n _combined_channel_config_dir)\n', (1590, 1671), False, 'from decisionengine.framework.tests.fixtures import DEServer, PG_DE_DB_WITHOUT_SCHEMA, PG_PROG, SQLALCHEMY_PG_WITH_SCHEMA, SQLALCHEMY_TEMPFILE_SQLITE, TEST_CONFIG_PATH\n'), ((1708, 1752), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""deserver_combined"""'], {}), "('deserver_combined')\n", (1731, 1752), False, 'import pytest\n'), ((2357, 2416), 'os.path.join', 'os.path.join', (['TEST_CONFIG_PATH', '"""test-failing-source-proxy"""'], {}), "(TEST_CONFIG_PATH, 'test-failing-source-proxy')\n", (2369, 2416), False, 'import os\n'), ((2447, 2532), 'decisionengine.framework.tests.fixtures.DEServer', 'DEServer', ([], {'conf_path': 'TEST_CONFIG_PATH', 'channel_conf_path': '_fail_channel_config_dir'}), '(conf_path=TEST_CONFIG_PATH, channel_conf_path=_fail_channel_config_dir\n )\n', (2455, 2532), False, 'from decisionengine.framework.tests.fixtures import DEServer, PG_DE_DB_WITHOUT_SCHEMA, PG_PROG, SQLALCHEMY_PG_WITH_SCHEMA, SQLALCHEMY_TEMPFILE_SQLITE, TEST_CONFIG_PATH\n'), ((2569, 2609), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""deserver_fail"""'], {}), "('deserver_fail')\n", (2592, 2609), False, 'import pytest\n'), ((1148, 1213), 're.search', 're.search', (['"""test_source_proxy.*state = STEADY"""', 'output', 're.DOTALL'], {}), "('test_source_proxy.*state = STEADY', output, re.DOTALL)\n", (1157, 1213), False, 'import re\n'), ((1219, 1265), 'decisionengine.framework.tests.WriteToDisk.wait_for_n_writes', 'wait_for_n_writes', (['deserver.stdout_at_setup', '(2)'], {}), '(deserver.stdout_at_setup, 2)\n', (1236, 1265), False, 'from decisionengine.framework.tests.WriteToDisk import wait_for_n_writes\n'), ((1976, 2046), 're.search', 're.search', (['"""test_combined_channels.*state = STEADY"""', 'output', 're.DOTALL'], {}), "('test_combined_channels.*state = STEADY', output, re.DOTALL)\n", (1985, 2046), False, 'import re\n'), ((2052, 2107), 'decisionengine.framework.tests.WriteToDisk.wait_for_n_writes', 'wait_for_n_writes', (['deserver_combined.stdout_at_setup', '(2)'], {}), '(deserver_combined.stdout_at_setup, 2)\n', (2069, 2107), False, 'from decisionengine.framework.tests.WriteToDisk import wait_for_n_writes\n'), ((2729, 2795), 're.search', 're.search', (['"""test_source_proxy.*state = OFFLINE"""', 'output', 're.DOTALL'], {}), "('test_source_proxy.*state = OFFLINE', output, re.DOTALL)\n", (2738, 2795), False, 'import re\n'), ((845, 914), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {'match': '"""Cannot inherit from SourceProxy."""'}), "(RuntimeError, match='Cannot inherit from SourceProxy.')\n", (858, 914), False, 'import pytest\n'), ((1400, 1449), 're.search', 're.search', (['"""test_source_proxy"""', 'output', 're.DOTALL'], {}), "('test_source_proxy', output, re.DOTALL)\n", (1409, 1449), False, 'import re\n'), ((2265, 2319), 're.search', 're.search', (['"""test_combined_channels"""', 'output', 're.DOTALL'], {}), "('test_combined_channels', output, re.DOTALL)\n", (2274, 2319), False, 'import re\n'), ((2939, 2988), 're.search', 're.search', (['"""test_source_proxy"""', 'output', 're.DOTALL'], {}), "('test_source_proxy', output, re.DOTALL)\n", (2948, 2988), False, 'import re\n')] |
from eth_account import Account
from eth_account.signers.local import LocalAccount
from eth_typing import ChecksumAddress
ETHEREUM_DEFAULT_PATH = "m/44'/60'/0'/0/0"
ETHEREUM_BASE_PATH = "m/44'/60'/0'/0"
def get_account_from_words(
words: str, index: int = 0, hd_path: str = ETHEREUM_DEFAULT_PATH
) -> LocalAccount:
"""
:param words: Mnemonic words(BIP39) for a Hierarchical Deterministic Wallet(BIP32)
:param index: Index of account
:param hd_path: BIP44 Path. By default Ethereum with 0 index is used
:return: Ethereum Account
:raises: eth_utils.ValidationError
"""
Account.enable_unaudited_hdwallet_features()
if index:
hd_path = f"{ETHEREUM_BASE_PATH}/{index}"
return Account.from_mnemonic(words, account_path=hd_path)
def get_address_from_words(
words: str, index: int = 0, hd_path: str = ETHEREUM_DEFAULT_PATH
) -> ChecksumAddress:
"""
:param words: Mnemonic words(BIP39) for a Hierarchical Deterministic Wallet(BIP32)
:param index: Index of account
:param hd_path: BIP44 Path. By default Ethereum with 0 index is used
:return: Ethereum checksummed public address
:raises: eth_utils.ValidationError
"""
return get_account_from_words(words, index, hd_path).address
| [
"eth_account.Account.enable_unaudited_hdwallet_features",
"eth_account.Account.from_mnemonic"
] | [((606, 650), 'eth_account.Account.enable_unaudited_hdwallet_features', 'Account.enable_unaudited_hdwallet_features', ([], {}), '()\n', (648, 650), False, 'from eth_account import Account\n'), ((726, 776), 'eth_account.Account.from_mnemonic', 'Account.from_mnemonic', (['words'], {'account_path': 'hd_path'}), '(words, account_path=hd_path)\n', (747, 776), False, 'from eth_account import Account\n')] |
from datasets import list_datasets, list_metrics, load_dataset, load_metric
from pprint import pprint
import sys
import transformers
import argparse
TOKENIZERS = {
"GPT2Fast" : "transformers.GPT2TokenizerFast.from_pretrained('gpt2')"
} # add more tokenizers here as per need
def get_dataset(name, tokenizer, split='train[:20%]', cache_dir=None, num_workers=8, bptt_len=1024):
"""Get a PyTorch Dataset object for popular LM datasets supported by huggingface
Args:
name (str): Name of dataset, eg:- wikitext-2, wikitext-103, openwebtext
tokenizer (transformers.Tokenizer): a huggingface tokenizer object
split (str, optional): Split of the dataset (train, test, val). Defaults to 'train[:20%]'.
cache_dir (str, optional): The directory where the dataset is stored. Defaults to None (None implies -> ~/.cache/huggingface). On clusters dont leave this as None, change it to the filesystem for heavy I/O.
num_workers (int, optional): number of processes for preprocessing. Defaults to 8.
bptt_len (int, optional): Back-propagation through time length i.e. number of words in each training example. Defaults to 1024.
Returns:
torch.utils.data.Dataset : A PyTorch Dataset object that can be used with a PyTorch dataloader
"""
if name == "wikitext-103":
dataset = load_dataset('wikitext', 'wikitext-103-v1', split=split, cache_dir=cache_dir)
elif name == "wikitext-2":
dataset = load_dataset('wikitext', 'wikitext-2-v1', split=split, cache_dir=cache_dir)
else:
dataset = load_dataset(name, split=split, cache_dir=cache_dir)
encoded_dataset = dataset.map(lambda example : tokenizer(example['text']), batched=True, num_proc=num_workers, load_from_cache_file=True)
print(encoded_dataset.column_names)
block_size = bptt_len
def chunk_examples(examples):
concat = []
for input_ids in examples['input_ids']:
if input_ids:
concat.extend(input_ids + [tokenizer.eos_token_id])
chunks = [concat[i:i+block_size] for i in range(0, len(concat), block_size)]
src = []
trg = []
for chunk in chunks:
if len(chunk) >= block_size:
src.append(chunk[:-1])
trg.append(chunk[1:])
return {"src" : src, "trg" : trg}
lm_dataset = encoded_dataset.map(
chunk_examples,
batched=True,
num_proc=num_workers,
load_from_cache_file=True,
remove_columns=encoded_dataset.column_names,
batch_size = 2000,
keep_in_memory=True
)
lm_dataset.set_format(type='torch', columns=['src', 'trg'])
return lm_dataset
| [
"datasets.load_dataset"
] | [((1353, 1430), 'datasets.load_dataset', 'load_dataset', (['"""wikitext"""', '"""wikitext-103-v1"""'], {'split': 'split', 'cache_dir': 'cache_dir'}), "('wikitext', 'wikitext-103-v1', split=split, cache_dir=cache_dir)\n", (1365, 1430), False, 'from datasets import list_datasets, list_metrics, load_dataset, load_metric\n'), ((1480, 1555), 'datasets.load_dataset', 'load_dataset', (['"""wikitext"""', '"""wikitext-2-v1"""'], {'split': 'split', 'cache_dir': 'cache_dir'}), "('wikitext', 'wikitext-2-v1', split=split, cache_dir=cache_dir)\n", (1492, 1555), False, 'from datasets import list_datasets, list_metrics, load_dataset, load_metric\n'), ((1584, 1636), 'datasets.load_dataset', 'load_dataset', (['name'], {'split': 'split', 'cache_dir': 'cache_dir'}), '(name, split=split, cache_dir=cache_dir)\n', (1596, 1636), False, 'from datasets import list_datasets, list_metrics, load_dataset, load_metric\n')] |
import numpy as np
import time
from fileReader import read_file
#Returns tuple of 2 values added together resulting in value provided for "goal"
def find_values(goal, list_of_values):
for i in range(0, len(list_of_values) - 1):
value_to_find = goal - list_of_values[i]
for j in range(i + 1, len(list_of_values) - 1):
if list_of_values[j] == value_to_find:
return (list_of_values[i],list_of_values[j])
return ()
def find_2020(file_path):
sum_to_find = 2020
expenses = read_file(file_path)
result = find_values(sum_to_find, expenses)
if len(result) != 0:
product = np.prod(result)
print("Values added resulting in sum", sum_to_find, ":", result[0:len(result)])
print("Product:", product)
return product
if __name__ == "__main__":
find_2020("C:\\Users\\oscar\\Documents\\Repo\\AdventOfCode2020\\Day01\\exampleInput.txt") | [
"numpy.prod",
"fileReader.read_file"
] | [((534, 554), 'fileReader.read_file', 'read_file', (['file_path'], {}), '(file_path)\n', (543, 554), False, 'from fileReader import read_file\n'), ((647, 662), 'numpy.prod', 'np.prod', (['result'], {}), '(result)\n', (654, 662), True, 'import numpy as np\n')] |
from rdflib import ConjunctiveGraph, exceptions, Namespace
from rdflib import RDFS, RDF, BNode
from rdflib.collection import Collection
from .ntr_terms import (
ntrs
)
from .manual_slims import slim_shims
import json
EPILOG = __doc__
OWLNS = Namespace("http://www.w3.org/2002/07/owl#")
OBO_OWL = Namespace("http://www.geneontology.org/formats/oboInOwl#")
Synonym = OBO_OWL["hasExactSynonym"]
Ontology = OWLNS["Ontology"]
Class = OWLNS["Class"]
Thing = OWLNS["Thing"]
OnProperty = OWLNS["onProperty"]
SomeValuesFrom = OWLNS["someValuesFrom"]
IntersectionOf = OWLNS["intersectionOf"]
PART_OF = "http://purl.obolibrary.org/obo/BFO_0000050"
DERIVES_FROM = "http://www.obofoundry.org/ro/ro.owl#derives_from"
DEFAULT_LANGUAGE = "en"
class Inspector(object):
""" Class that includes methods for querying an RDFS/OWL ontology """
def __init__(self, uri, language=""):
super(Inspector, self).__init__()
self.rdfGraph = ConjunctiveGraph()
try:
self.rdfGraph.parse(uri, format="application/rdf+xml")
except:
try:
self.rdfGraph.parse(uri, format="n3")
except:
raise exceptions.Error(
"Could not parse the file! Is `%s` a valid RDF/OWL ontology?" % uri)
finally:
self.baseURI = self.get_OntologyURI() or uri
self.allclasses = self.__getAllClasses(
includeDomainRange=True, includeImplicit=True, removeBlankNodes=False, excludeRDF_OWL=False)
def get_OntologyURI(self, return_as_string=True):
test = [x for x, y, z in self.rdfGraph.triples(
(None, RDF.type, Ontology))]
if test:
if return_as_string:
return str(test[0])
else:
return test[0]
else:
return None
def __getAllClasses(self, classPredicate="", includeDomainRange=False, includeImplicit=False, removeBlankNodes=True, addOWLThing=True, excludeRDF_OWL=True):
rdfGraph = self.rdfGraph
exit = {}
def addIfYouCan(x, mydict):
if excludeRDF_OWL:
if x.startswith('http://www.w3.org/2002/07/owl#') or \
x.startswith("http://www.w3.org/1999/02/22-rdf-syntax-ns#") or \
x.startswith("http://www.w3.org/2000/01/rdf-schema#"):
return mydict
if x not in mydict:
mydict[x] = None
return mydict
if addOWLThing:
exit = addIfYouCan(Thing, exit)
if classPredicate == "rdfs" or classPredicate == "":
for s in rdfGraph.subjects(RDF.type, RDFS.Class):
exit = addIfYouCan(s, exit)
if classPredicate == "owl" or classPredicate == "":
for s in rdfGraph.subjects(RDF.type, Class):
exit = addIfYouCan(s, exit)
if includeDomainRange:
for o in rdfGraph.objects(None, RDFS.domain):
exit = addIfYouCan(o, exit)
for o in rdfGraph.objects(None, RDFS.range):
exit = addIfYouCan(o, exit)
if includeImplicit:
for s, v, o in rdfGraph.triples((None, RDFS.subClassOf, None)):
exit = addIfYouCan(s, exit)
exit = addIfYouCan(o, exit)
for o in rdfGraph.objects(None, RDF.type):
exit = addIfYouCan(o, exit)
# get a list
exit = exit.keys()
if removeBlankNodes:
exit = [x for x in exit if not isBlankNode(x)]
return sort_uri_list_by_name(exit)
def __getTopclasses(self, classPredicate=''):
returnlist = []
for eachclass in self.__getAllClasses(classPredicate):
x = self.get_classDirectSupers(eachclass)
if not x:
returnlist.append(eachclass)
return sort_uri_list_by_name(returnlist)
def __getTree(self, father=None, out=None):
if not father:
out = {}
topclasses = self.toplayer
out[0] = topclasses
for top in topclasses:
children = self.get_classDirectSubs(top)
out[top] = children
for potentialfather in children:
self.__getTree(potentialfather, out)
return out
else:
children = self.get_classDirectSubs(father)
out[father] = children
for ch in children:
self.__getTree(ch, out)
def __buildClassTree(self, father=None, out=None):
if not father:
out = {}
topclasses = self.toplayer
out[0] = [Thing]
out[Thing] = sort_uri_list_by_name(topclasses)
for top in topclasses:
children = self.get_classDirectSubs(top)
out[top] = sort_uri_list_by_name(children)
for potentialfather in children:
self.__buildClassTree(potentialfather, out)
return out
else:
children = self.get_classDirectSubs(father)
out[father] = sort_uri_list_by_name(children)
for ch in children:
self.__buildClassTree(ch, out)
# methods for getting ancestors and descendants of classes: by default, we do not include blank nodes
def get_classDirectSupers(self, aClass, excludeBnodes=True, sortUriName=False):
returnlist = []
for o in self.rdfGraph.objects(aClass, RDFS.subClassOf):
if not (o == Thing):
if excludeBnodes:
if not isBlankNode(o):
returnlist.append(o)
else:
returnlist.append(o)
if sortUriName:
return sort_uri_list_by_name(remove_duplicates(returnlist))
else:
return remove_duplicates(returnlist)
def get_classDirectSubs(self, aClass, excludeBnodes=True):
returnlist = []
for s, v, o in self.rdfGraph.triples((None, RDFS.subClassOf, aClass)):
if excludeBnodes:
if not isBlankNode(s):
returnlist.append(s)
else:
returnlist.append(s)
return sort_uri_list_by_name(remove_duplicates(returnlist))
def get_classSiblings(self, aClass, excludeBnodes=True):
returnlist = []
for father in self.get_classDirectSupers(aClass, excludeBnodes):
for child in self.get_classDirectSubs(father, excludeBnodes):
if child != aClass:
returnlist.append(child)
return sort_uri_list_by_name(remove_duplicates(returnlist))
def entitySynonyms(self, anEntity, getall=True):
temp = []
for o in self.rdfGraph.objects(anEntity, Synonym):
temp += [o]
return temp
def classFind(self, name, exact=False):
temp = []
if name:
for x in self.allclasses:
if exact:
if x.__str__().lower() == str(name).lower():
return [x]
else:
if x.__str__().lower().find(str(name).lower()) >= 0:
temp.append(x)
return temp
def inferNamespacePrefix(aUri):
stringa = aUri.__str__()
try:
prefix = stringa.replace("#", "").split("/")[-1]
except:
prefix = ""
return prefix
def sort_uri_list_by_name(uri_list):
def get_last_bit(uri_string):
try:
x = uri_string.split("#")[1]
except:
x = uri_string.split("/")[-1]
return x
try:
return sorted(uri_list, key=lambda x: get_last_bit(x.__str__()))
except:
# TODO: do more testing.. maybe use a unicode-safe method instead of __str__
print("Error in <sort_uri_list_by_name>: possibly a UnicodeEncodeError")
return uri_list
def remove_duplicates(seq, idfun=None):
if seq:
if idfun is None:
def idfun(x):
return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
if marker in seen:
continue
seen[marker] = 1
result.append(item)
return result
else:
return []
def isBlankNode(aClass):
''' Checks for blank node '''
if type(aClass) == BNode:
return True
else:
return False
def splitNameFromNamespace(aUri):
stringa = aUri.__str__()
try:
ns = stringa.split("#")[0]
name = stringa.split("#")[1]
except:
ns = stringa.rsplit("/", 1)[0]
name = stringa.rsplit("/", 1)[1]
return (name, ns)
def iterativeChildren(nodes, terms):
data = 'data'
results = []
while 1:
newNodes = []
if len(nodes) == 0:
break
for node in nodes:
results.append(node)
if node in terms.keys():
if terms[node][data]:
for child in terms[node][data]:
if child not in results:
newNodes.append(child)
nodes = list(set(newNodes))
return list(set(results))
def getTermStructure():
return {
'id': '',
'name': '',
'parents': [],
'part_of': [],
'derives_from': [],
'ancestors': [],
'data': [],
'synonyms': []
}
def main():
''' Downloads various ontologies and create a JSON file '''
efo_url = 'http://www.ebi.ac.uk/efo/efo.owl'
uberon_url = 'http://purl.obolibrary.org/obo/uberon.owl'
mondo_url = 'http://purl.obolibrary.org/obo/mondo.owl'
hancestro_url = 'http://purl.obolibrary.org/obo/hancestro.owl'
cl_url = 'http://purl.obolibrary.org/obo/cl.owl'
hsapdv_url = 'http://purl.obolibrary.org/obo/hsapdv.owl'
mmusdv_url = 'http://purl.obolibrary.org/obo/mmusdv.owl'
url_whitelist = {
uberon_url: ['UBERON', 'CL'],
efo_url: ['EFO'],
mondo_url: ['MONDO'],
hancestro_url: ['HANCESTRO'],
cl_url: ['CL'],
hsapdv_url: ['HsapDv'],
mmusdv_url: ['MmusDv']
}
terms = {}
# Run on ontologies defined in whitelist
for url in url_whitelist.keys():
data = Inspector(url)
for c in data.allclasses:
if isBlankNode(c):
for o in data.rdfGraph.objects(c, RDFS.subClassOf):
if isBlankNode(o):
pass
else:
for o1 in data.rdfGraph.objects(c, IntersectionOf):
collection = Collection(data.rdfGraph, o1)
col_list = []
for col in data.rdfGraph.objects(collection[1]):
col_list.append(col.__str__())
if PART_OF in col_list:
for subC in data.rdfGraph.objects(c, RDFS.subClassOf):
term_id = splitNameFromNamespace(
collection[0])[0].replace('_', ':')
if term_id.split(':')[0] in url_whitelist[url]:
if term_id not in terms:
terms[term_id] = getTermStructure()
terms[term_id]['part_of'].append(
splitNameFromNamespace(subC)[0].replace('_', ':'))
else:
term_id = splitNameFromNamespace(c)[0].replace('_', ':')
if term_id.split(':')[0] in url_whitelist[url]:
if term_id not in terms:
terms[term_id] = getTermStructure()
terms[term_id]['id'] = term_id
try:
terms[term_id]['name'] = data.rdfGraph.label(c).__str__()
except:
terms[term_id]['name'] = ''
# Get all parents
for parent in data.get_classDirectSupers(c, excludeBnodes=False):
if isBlankNode(parent):
for s, v, o in data.rdfGraph.triples((parent, OnProperty, None)):
if o.__str__() == PART_OF:
for o1 in data.rdfGraph.objects(parent, SomeValuesFrom):
if not isBlankNode(o1):
terms[term_id]['part_of'].append(
splitNameFromNamespace(o1)[0].replace('_', ':'))
elif o.__str__() == DERIVES_FROM:
for o1 in data.rdfGraph.objects(parent, SomeValuesFrom):
if not isBlankNode(o1):
terms[term_id]['derives_from'].append(
splitNameFromNamespace(o1)[0].replace('_', ':'))
else:
for o2 in data.rdfGraph.objects(o1, IntersectionOf):
for o3 in data.rdfGraph.objects(o2, RDF.first):
if not isBlankNode(o3):
terms[term_id]['derives_from'].append(
splitNameFromNamespace(o3)[0].replace('_', ':'))
for o3 in data.rdfGraph.objects(o2, RDF.rest):
for o4 in data.rdfGraph.objects(o3, RDF.first):
for o5 in data.rdfGraph.objects(o4, SomeValuesFrom):
for o6 in data.rdfGraph.objects(o5, IntersectionOf):
for o7 in data.rdfGraph.objects(o6, RDF.first):
if not isBlankNode(o7):
terms[term_id]['derives_from'].append(
splitNameFromNamespace(o7)[0].replace('_', ':'))
for o8 in data.rdfGraph.objects(o6, RDF.rest):
for o9 in data.rdfGraph.objects(o8, RDF.first):
if not isBlankNode(o9):
terms[term_id]['derives_from'].append(
splitNameFromNamespace(o9)[0].replace('_', ':'))
else:
terms[term_id]['parents'].append(
splitNameFromNamespace(parent)[0].replace('_', ':'))
for syn in data.entitySynonyms(c):
try:
terms[term_id]['synonyms'].append(syn.__str__())
except:
pass
for term in terms:
terms[term]['data'] = list(set(terms[term]['parents']) | set(terms[term]['part_of']) | set(
terms[term]['derives_from']))
for term in terms:
ont_whitelist = [i for sublist in url_whitelist.values() for i in sublist]
d = iterativeChildren(
terms[term]['data'], terms)
for dd in d:
if dd.split(':')[0] in ont_whitelist:
terms[term]['ancestors'].append(dd)
terms[term]['ancestors'].append(term)
if term in slim_shims:
terms[term]['ancestors'].extend(slim_shims[term])
for term in terms:
del terms[term]['parents'], terms[term]['derives_from']
del terms[term]['part_of'], terms[term]['id'], terms[term]['data']
for ntr in ntrs:
ancestors = set()
for parent in ntr.get('child_of'):
ancestors.update(terms[parent]['ancestors'])
terms[ntr['term_id']] = {
'name': ntr['name'],
'synonyms': ntr['synonyms'],
'ancestors': list(ancestors)
}
with open('ontology.json', 'w') as outfile:
json.dump(terms, outfile)
if __name__ == '__main__':
main()
| [
"rdflib.collection.Collection",
"rdflib.ConjunctiveGraph",
"rdflib.Namespace",
"rdflib.exceptions.Error",
"json.dump"
] | [((248, 291), 'rdflib.Namespace', 'Namespace', (['"""http://www.w3.org/2002/07/owl#"""'], {}), "('http://www.w3.org/2002/07/owl#')\n", (257, 291), False, 'from rdflib import ConjunctiveGraph, exceptions, Namespace\n'), ((302, 360), 'rdflib.Namespace', 'Namespace', (['"""http://www.geneontology.org/formats/oboInOwl#"""'], {}), "('http://www.geneontology.org/formats/oboInOwl#')\n", (311, 360), False, 'from rdflib import ConjunctiveGraph, exceptions, Namespace\n'), ((946, 964), 'rdflib.ConjunctiveGraph', 'ConjunctiveGraph', ([], {}), '()\n', (962, 964), False, 'from rdflib import ConjunctiveGraph, exceptions, Namespace\n'), ((16614, 16639), 'json.dump', 'json.dump', (['terms', 'outfile'], {}), '(terms, outfile)\n', (16623, 16639), False, 'import json\n'), ((1174, 1264), 'rdflib.exceptions.Error', 'exceptions.Error', (["('Could not parse the file! Is `%s` a valid RDF/OWL ontology?' % uri)"], {}), "(\n 'Could not parse the file! Is `%s` a valid RDF/OWL ontology?' % uri)\n", (1190, 1264), False, 'from rdflib import ConjunctiveGraph, exceptions, Namespace\n'), ((10638, 10667), 'rdflib.collection.Collection', 'Collection', (['data.rdfGraph', 'o1'], {}), '(data.rdfGraph, o1)\n', (10648, 10667), False, 'from rdflib.collection import Collection\n')] |
# Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import unittest
from google.cloud import bigquery
from bq_benchmarks.generic_benchmark_tools import table_util
from bq_benchmarks.query_benchmark_tools import query_generator
SELECT_ALL_ID = 'SIMPLE_SELECT_*'
SELECT_ONE_STRING_ID = 'SELECT_ONE_STRING'
SELECT_50_PERCENT_ID = 'SELECT_50_PERCENT'
class TestQueryGenerator(unittest.TestCase):
"""Tests functionality of query_benchmark_tools.query_generator.
Attributes:
bq_client(google.cloud.bigquery.client.Client): Client to hold
configurations needed for BigQuery API requests.
dataset_id(str): ID of the dataset that holds the test table.
dataset_ref(google.cloud.bigquery.dataset.DatasetReference): Pointer
to the dataset that holds the test table.
dataset(google.cloud.bigquery.dataset.Dataset): Dataset that holds the
test table.
table_id(str): The name of the test table.
table_util(generic_benchmark.TableUtil): BigQuery to handle test table.
"""
def setUp(self):
"""Sets up resources for tests.
"""
self.bq_client = bigquery.Client()
self.dataset_id = 'bq_benchmark_test_dataset'
self.dataset_ref = self.bq_client.dataset(self.dataset_id)
dataset = bigquery.Dataset(self.dataset_ref)
self.dataset = self.bq_client.create_dataset(dataset)
self.table_id = 'test_table'
abs_path = os.path.abspath(os.path.dirname(__file__))
json_schema_filename = os.path.join(abs_path,
'test_schemas/test_schema.json')
self.table_util = table_util.TableUtil(
table_id=self.table_id,
dataset_id=self.dataset_id,
json_schema_filename=json_schema_filename,
)
self.table_util.create_table()
self.test_query_generator = query_generator.QueryGenerator(
table_id=self.table_id, dataset_id=self.dataset_id)
def test_get_query_strings(self):
"""Tests QueryGenerator.get_query_strings().
Tests QueryGenerators's ability to create queries for a given table.
Returns:
True if test passes, else False.
"""
query_strings = self.test_query_generator.get_query_strings()
expected_query_strings = {
SELECT_ALL_ID: 'SELECT * FROM `{0:s}`',
SELECT_ONE_STRING_ID: 'SELECT string1 FROM `{0:s}`',
SELECT_50_PERCENT_ID: 'SELECT string1 FROM `{0:s}`'
}
assert query_strings == expected_query_strings
def tearDown(self):
"""Deletes any resources used by tests.
"""
self.bq_client.delete_dataset(dataset=self.dataset_ref,
delete_contents=True)
| [
"bq_benchmarks.query_benchmark_tools.query_generator.QueryGenerator",
"os.path.join",
"google.cloud.bigquery.Dataset",
"os.path.dirname",
"bq_benchmarks.generic_benchmark_tools.table_util.TableUtil",
"google.cloud.bigquery.Client"
] | [((1693, 1710), 'google.cloud.bigquery.Client', 'bigquery.Client', ([], {}), '()\n', (1708, 1710), False, 'from google.cloud import bigquery\n'), ((1850, 1884), 'google.cloud.bigquery.Dataset', 'bigquery.Dataset', (['self.dataset_ref'], {}), '(self.dataset_ref)\n', (1866, 1884), False, 'from google.cloud import bigquery\n'), ((2077, 2132), 'os.path.join', 'os.path.join', (['abs_path', '"""test_schemas/test_schema.json"""'], {}), "(abs_path, 'test_schemas/test_schema.json')\n", (2089, 2132), False, 'import os\n'), ((2203, 2322), 'bq_benchmarks.generic_benchmark_tools.table_util.TableUtil', 'table_util.TableUtil', ([], {'table_id': 'self.table_id', 'dataset_id': 'self.dataset_id', 'json_schema_filename': 'json_schema_filename'}), '(table_id=self.table_id, dataset_id=self.dataset_id,\n json_schema_filename=json_schema_filename)\n', (2223, 2322), False, 'from bq_benchmarks.generic_benchmark_tools import table_util\n'), ((2441, 2528), 'bq_benchmarks.query_benchmark_tools.query_generator.QueryGenerator', 'query_generator.QueryGenerator', ([], {'table_id': 'self.table_id', 'dataset_id': 'self.dataset_id'}), '(table_id=self.table_id, dataset_id=self.\n dataset_id)\n', (2471, 2528), False, 'from bq_benchmarks.query_benchmark_tools import query_generator\n'), ((2019, 2044), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2034, 2044), False, 'import os\n')] |
# stdlib
import itertools
from collections import OrderedDict
from functools import partial
from typing import List, Tuple
# 3rd party
import numpy # type: ignore
from cycler import cycler # type: ignore
from matplotlib.axes import Axes # type: ignore
from matplotlib.figure import Figure # type: ignore
# this package
from domplotlib.styles.default import plt
def koch_snowflake() -> Tuple[Figure, Axes]:
"""
From https://matplotlib.org/3.3.3/gallery/lines_bars_and_markers/fill.html#sphx-glr-gallery-lines-bars-and-markers-fill-py
"""
def _koch_snowflake_complex(order):
if order == 0:
# initial triangle
angles = numpy.array([0, 120, 240]) + 90
return 10 / numpy.sqrt(3) * numpy.exp(numpy.deg2rad(angles) * 1j)
else:
ZR = 0.5 - 0.5j * numpy.sqrt(3) / 3
p1 = _koch_snowflake_complex(order - 1) # start points
p2 = numpy.roll(p1, shift=-1) # end points
dp = p2 - p1 # connection vectors
new_points = numpy.empty(len(p1) * 4, dtype=numpy.complex128)
new_points[::4] = p1
new_points[1::4] = p1 + dp / 3
new_points[2::4] = p1 + dp * ZR
new_points[3::4] = p1 + dp / 3 * 2
return new_points
points = _koch_snowflake_complex(5)
x, y = points.real, points.imag
fig: Figure = plt.figure(figsize=(8, 8))
ax = fig.subplots()
ax.axis("equal", emit=True)
ax.fill(x, y, label="Koch Snowflake")
return fig, ax
def hatch_filled_histograms() -> Tuple[Figure, Axes]:
"""
From https://matplotlib.org/3.3.3/gallery/lines_bars_and_markers/filled_step.html#sphx-glr-gallery-lines-bars-and-markers-filled-step-py
"""
def filled_hist(ax, edges, values, bottoms=None, orientation='v', **kwargs):
"""
Draw a histogram as a stepped patch.
Extra kwargs are passed through to `fill_between`
Parameters
----------
ax : Axes
The axes to plot to
edges : array
A length n+1 array giving the left edges of each bin and the
right edge of the last bin.
values : array
A length n array of bin counts or values
bottoms : float or array, optional
A length n array of the bottom of the bars. If None, zero is used.
orientation : {'v', 'h'}
Orientation of the histogram. 'v' (default) has
the bars increasing in the positive y-direction.
Returns
-------
ret : PolyCollection
Artist added to the Axes
"""
print(orientation)
if orientation not in "hv":
raise ValueError(f"orientation must be in {{'h', 'v'}} not {orientation}")
kwargs.setdefault("step", "post")
edges = numpy.asarray(edges)
values = numpy.asarray(values)
if len(edges) - 1 != len(values):
raise ValueError(
'Must provide one more bin edge than value not: '
'len(edges): {lb} len(values): {lv}'.format(lb=len(edges), lv=len(values))
)
if bottoms is None:
bottoms = 0
bottoms = numpy.broadcast_to(bottoms, values.shape)
values = numpy.append(values, values[-1])
bottoms = numpy.append(bottoms, bottoms[-1])
if orientation == 'h':
return ax.fill_betweenx(edges, values, bottoms, **kwargs)
elif orientation == 'v':
return ax.fill_between(edges, values, bottoms, **kwargs)
else:
raise AssertionError("you should never be here")
def stack_hist(
ax,
stacked_data,
sty_cycle,
bottoms=None,
hist_func=None,
labels=None,
plot_func=None,
plot_kwargs=None
):
# deal with default binning function
if hist_func is None:
hist_func = numpy.histogram
# deal with default plotting function
if plot_func is None:
plot_func = filled_hist
# deal with default
if plot_kwargs is None:
plot_kwargs = {}
print(plot_kwargs)
try:
l_keys = stacked_data.keys()
label_data = True
if labels is None:
labels = l_keys
except AttributeError:
label_data = False
if labels is None:
labels = itertools.repeat(None)
if label_data:
loop_iter = enumerate((stacked_data[lab], lab, s) for lab, s in zip(labels, sty_cycle))
else:
loop_iter = enumerate(zip(stacked_data, labels, sty_cycle))
arts = {}
for j, (data, label, sty) in loop_iter:
if label is None:
label = f'dflt set {j}'
label = sty.pop("label", label)
vals, edges = hist_func(data)
if bottoms is None:
bottoms = numpy.zeros_like(vals)
top = bottoms + vals
print(sty)
sty.update(plot_kwargs)
print(sty)
ret = plot_func(ax, edges, top, bottoms=bottoms, label=label, **sty)
bottoms = top
arts[label] = ret
ax.legend(fontsize=10)
return arts
# set up histogram function to fixed bins
edges = numpy.linspace(-3, 3, 20, endpoint=True)
hist_func = partial(numpy.histogram, bins=edges)
# set up style cycles
color_cycle = cycler(facecolor=plt.rcParams["axes.prop_cycle"][:4])
label_cycle = cycler(label=[f'set {n}' for n in range(4)])
hatch_cycle = cycler(hatch=['/', '*', '+', '|'])
# Fixing random state for reproducibility
numpy.random.seed(19680801)
stack_data = numpy.random.randn(4, 12250)
dict_data = OrderedDict(zip((c["label"] for c in label_cycle), stack_data))
fig, ax = plt.subplots(1, 1, figsize=(9, 4.5), tight_layout=True)
arts = stack_hist(ax, stack_data, color_cycle + label_cycle + hatch_cycle, hist_func=hist_func)
ax.set_ylabel("counts")
ax.set_xlabel('x')
return fig, ax
def h_bar_chart() -> Tuple[Figure, Axes]:
"""
https://matplotlib.org/3.3.3/gallery/lines_bars_and_markers/horizontal_barchart_distribution.html#sphx-glr-gallery-lines-bars-and-markers-horizontal-barchart-distribution-py
"""
category_names = ["Strongly disagree", "Disagree", "Neither agree nor disagree", "Agree", "Strongly agree"]
results = {
"Question 1": [10, 15, 17, 32, 26],
"Question 2": [26, 22, 29, 10, 13],
"Question 3": [35, 37, 7, 2, 19],
"Question 4": [32, 11, 9, 15, 33],
"Question 5": [21, 29, 5, 5, 40],
"Question 6": [8, 19, 5, 30, 38]
}
def survey(results, category_names):
"""
Parameters
----------
results : dict
A mapping from question labels to a list of answers per category.
It is assumed all lists contain the same number of entries and that
it matches the length of *category_names*.
category_names : list of str
The category labels.
"""
labels = list(results.keys())
data = numpy.array(list(results.values()))
data_cum = data.cumsum(axis=1)
category_colors = plt.get_cmap("RdYlGn")(numpy.linspace(0.15, 0.85, data.shape[1]))
fig, ax = plt.subplots(figsize=(9.2, 5))
ax.invert_yaxis()
ax.xaxis.set_visible(False)
ax.set_xlim(0, numpy.sum(data, axis=1).max())
for i, (colname, color) in enumerate(zip(category_names, category_colors)):
widths = data[:, i]
starts = data_cum[:, i] - widths
ax.barh(labels, widths, left=starts, height=0.5, label=colname, color=color)
xcenters = starts + widths / 2
r, g, b, _ = color
text_color = "white" if r * g * b < 0.5 else "darkgrey"
for y, (x, c) in enumerate(zip(xcenters, widths)):
ax.text(x, y, str(int(c)), ha="center", va="center", color=text_color)
ax.legend(ncol=len(category_names), bbox_to_anchor=(0, 1), loc="lower left", fontsize="small")
return fig, ax
return survey(results, category_names)
def markevery() -> Tuple[Figure, List[Axes]]:
"""
From https://matplotlib.org/3.3.3/gallery/lines_bars_and_markers/markevery_demo.html#sphx-glr-gallery-lines-bars-and-markers-markevery-demo-py
"""
# define a list of markevery cases to plot
cases = [None, 8, (30, 8), [16, 24, 30], [0, -1], slice(100, 200, 3), 0.1, 0.3, 1.5, (0.0, 0.1), (0.45, 0.1)]
# define the figure size and grid layout properties
figsize = (10, 8)
cols = 3
rows = len(cases) // cols + 1
# define the data for cartesian plots
delta = 0.11
x = numpy.linspace(0, 10 - 2 * delta, 200) + delta
y = numpy.sin(x) + 1.0 + delta
def trim_axs(axs, N):
"""
Reduce *axs* to *N* Axes. All further Axes are removed from the figure.
"""
axs = axs.flat
for ax in axs[N:]:
ax.remove()
return axs[:N]
fig = plt.figure(figsize=figsize, constrained_layout=True)
axs = fig.subplots(rows, cols)
axs = trim_axs(axs, len(cases))
colour_cycle = itertools.cycle(plt.rcParams["axes.prop_cycle"].by_key()["color"])
for ax, case in zip(axs, cases):
ax.set_title(f"markevery={case}")
ax.plot(x, y, 'o', ls='-', ms=4, markevery=case, label=f"markevery={case}", color=next(colour_cycle))
return fig, axs
| [
"numpy.sqrt",
"numpy.array",
"numpy.sin",
"itertools.repeat",
"domplotlib.styles.default.plt.get_cmap",
"numpy.asarray",
"numpy.zeros_like",
"numpy.linspace",
"numpy.random.seed",
"numpy.deg2rad",
"cycler.cycler",
"numpy.random.randn",
"domplotlib.styles.default.plt.figure",
"numpy.roll",
"domplotlib.styles.default.plt.subplots",
"numpy.append",
"numpy.sum",
"functools.partial",
"numpy.broadcast_to"
] | [((1235, 1261), 'domplotlib.styles.default.plt.figure', 'plt.figure', ([], {'figsize': '(8, 8)'}), '(figsize=(8, 8))\n', (1245, 1261), False, 'from domplotlib.styles.default import plt\n'), ((4485, 4525), 'numpy.linspace', 'numpy.linspace', (['(-3)', '(3)', '(20)'], {'endpoint': '(True)'}), '(-3, 3, 20, endpoint=True)\n', (4499, 4525), False, 'import numpy\n'), ((4539, 4575), 'functools.partial', 'partial', (['numpy.histogram'], {'bins': 'edges'}), '(numpy.histogram, bins=edges)\n', (4546, 4575), False, 'from functools import partial\n'), ((4615, 4668), 'cycler.cycler', 'cycler', ([], {'facecolor': "plt.rcParams['axes.prop_cycle'][:4]"}), "(facecolor=plt.rcParams['axes.prop_cycle'][:4])\n", (4621, 4668), False, 'from cycler import cycler\n'), ((4744, 4778), 'cycler.cycler', 'cycler', ([], {'hatch': "['/', '*', '+', '|']"}), "(hatch=['/', '*', '+', '|'])\n", (4750, 4778), False, 'from cycler import cycler\n'), ((4824, 4851), 'numpy.random.seed', 'numpy.random.seed', (['(19680801)'], {}), '(19680801)\n', (4841, 4851), False, 'import numpy\n'), ((4867, 4895), 'numpy.random.randn', 'numpy.random.randn', (['(4)', '(12250)'], {}), '(4, 12250)\n', (4885, 4895), False, 'import numpy\n'), ((4985, 5040), 'domplotlib.styles.default.plt.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(9, 4.5)', 'tight_layout': '(True)'}), '(1, 1, figsize=(9, 4.5), tight_layout=True)\n', (4997, 5040), False, 'from domplotlib.styles.default import plt\n'), ((7873, 7925), 'domplotlib.styles.default.plt.figure', 'plt.figure', ([], {'figsize': 'figsize', 'constrained_layout': '(True)'}), '(figsize=figsize, constrained_layout=True)\n', (7883, 7925), False, 'from domplotlib.styles.default import plt\n'), ((2484, 2504), 'numpy.asarray', 'numpy.asarray', (['edges'], {}), '(edges)\n', (2497, 2504), False, 'import numpy\n'), ((2516, 2537), 'numpy.asarray', 'numpy.asarray', (['values'], {}), '(values)\n', (2529, 2537), False, 'import numpy\n'), ((2787, 2828), 'numpy.broadcast_to', 'numpy.broadcast_to', (['bottoms', 'values.shape'], {}), '(bottoms, values.shape)\n', (2805, 2828), False, 'import numpy\n'), ((2841, 2873), 'numpy.append', 'numpy.append', (['values', 'values[-1]'], {}), '(values, values[-1])\n', (2853, 2873), False, 'import numpy\n'), ((2886, 2920), 'numpy.append', 'numpy.append', (['bottoms', 'bottoms[-1]'], {}), '(bottoms, bottoms[-1])\n', (2898, 2920), False, 'import numpy\n'), ((6328, 6358), 'domplotlib.styles.default.plt.subplots', 'plt.subplots', ([], {'figsize': '(9.2, 5)'}), '(figsize=(9.2, 5))\n', (6340, 6358), False, 'from domplotlib.styles.default import plt\n'), ((7606, 7644), 'numpy.linspace', 'numpy.linspace', (['(0)', '(10 - 2 * delta)', '(200)'], {}), '(0, 10 - 2 * delta, 200)\n', (7620, 7644), False, 'import numpy\n'), ((853, 877), 'numpy.roll', 'numpy.roll', (['p1'], {'shift': '(-1)'}), '(p1, shift=-1)\n', (863, 877), False, 'import numpy\n'), ((6249, 6271), 'domplotlib.styles.default.plt.get_cmap', 'plt.get_cmap', (['"""RdYlGn"""'], {}), "('RdYlGn')\n", (6261, 6271), False, 'from domplotlib.styles.default import plt\n'), ((6272, 6313), 'numpy.linspace', 'numpy.linspace', (['(0.15)', '(0.85)', 'data.shape[1]'], {}), '(0.15, 0.85, data.shape[1])\n', (6286, 6313), False, 'import numpy\n'), ((7658, 7670), 'numpy.sin', 'numpy.sin', (['x'], {}), '(x)\n', (7667, 7670), False, 'import numpy\n'), ((637, 663), 'numpy.array', 'numpy.array', (['[0, 120, 240]'], {}), '([0, 120, 240])\n', (648, 663), False, 'import numpy\n'), ((4181, 4203), 'numpy.zeros_like', 'numpy.zeros_like', (['vals'], {}), '(vals)\n', (4197, 4203), False, 'import numpy\n'), ((684, 697), 'numpy.sqrt', 'numpy.sqrt', (['(3)'], {}), '(3)\n', (694, 697), False, 'import numpy\n'), ((3769, 3791), 'itertools.repeat', 'itertools.repeat', (['None'], {}), '(None)\n', (3785, 3791), False, 'import itertools\n'), ((6426, 6449), 'numpy.sum', 'numpy.sum', (['data'], {'axis': '(1)'}), '(data, axis=1)\n', (6435, 6449), False, 'import numpy\n'), ((710, 731), 'numpy.deg2rad', 'numpy.deg2rad', (['angles'], {}), '(angles)\n', (723, 731), False, 'import numpy\n'), ((767, 780), 'numpy.sqrt', 'numpy.sqrt', (['(3)'], {}), '(3)\n', (777, 780), False, 'import numpy\n')] |
#!/usr/bin/env python3
import time
from cmd import Cmd
from node import Node
from threading import Thread, Event
import random as r
import math as m
import queue
class BNode(Cmd):
def do_init(self):
"""Initializes the node."""
self.node = Node(debug=True)
self.helping = Event()
self.helper = Thread(target=self.__helper).start()
print('init Pi Monte Carlo Node Started')
def do_quit(self, args):
"""Terminates the node."""
self.helping.clear()
self.node.kill()
raise SystemExit
def do_list(self, args):
peers = self.node.peers()
for peer in peers.keys():
print("peer %s-(%s, %5d)" % \
(peer, peers[peer][0], peers[peer][1]))
print('done')
def do_calc(self, args):
args = args.split()
if len(args) != 1:
print('---- Invalid arguments!')
return
self.peers = self.node.peers()
self.distributed_monte_carlo(int(args[0]))
def distributed_monte_carlo(self, points):
start = time.time()
count = len(self.peers)
if count < 1:
print('---- No peers available')
return
for peer in self.peers:
self.node.send_message(peer, 'calculate-' + str(m.ceil(points/count)))
received = 0
points_inside = 0
while received < count:
if not self.node.messages.empty():
message = self.node.messages.get()
split = message['contents'].split('-')
if split[0] == 'results':
points_inside += int(split[1])
received += 1
else:
self.node.messages.put(message)
# inside / total = pi / 4
pi = (float(points_inside) / points) * 4
# It works!
end = time.time()
cost = round(end - start, 3)
print('calc Calculated Pi =', pi, 'Time =', end-start, 'secs')
def __helper(self):
self.helping.set()
while self.helping.is_set():
if not self.node.messages.empty():
message = self.node.messages.get()
split = message['contents'].split('-')
if split[0] == 'calculate':
print('calc Processing', split[1], 'points for', message['sender'])
points_inside = 0
for i in range(int(split[1])):
# Generate random x, y in [0, 1].
x = r.random()**2
y = r.random()**2
# Increment if inside unit circle.
if m.sqrt(x + y) < 1.0:
points_inside += 1
print('send results')
self.node.send_message(message['sender'], 'results-'+str(points_inside))
else:
self.node.messages.put(message)
if __name__ == '__main__':
cmd = BNode()
cmd.do_init()
cmd.prompt = ''
cmd.cmdloop('')
| [
"math.ceil",
"math.sqrt",
"threading.Event",
"random.random",
"threading.Thread",
"node.Node",
"time.time"
] | [((264, 280), 'node.Node', 'Node', ([], {'debug': '(True)'}), '(debug=True)\n', (268, 280), False, 'from node import Node\n'), ((304, 311), 'threading.Event', 'Event', ([], {}), '()\n', (309, 311), False, 'from threading import Thread, Event\n'), ((1086, 1097), 'time.time', 'time.time', ([], {}), '()\n', (1095, 1097), False, 'import time\n'), ((1881, 1892), 'time.time', 'time.time', ([], {}), '()\n', (1890, 1892), False, 'import time\n'), ((334, 362), 'threading.Thread', 'Thread', ([], {'target': 'self.__helper'}), '(target=self.__helper)\n', (340, 362), False, 'from threading import Thread, Event\n'), ((1308, 1330), 'math.ceil', 'm.ceil', (['(points / count)'], {}), '(points / count)\n', (1314, 1330), True, 'import math as m\n'), ((2550, 2560), 'random.random', 'r.random', ([], {}), '()\n', (2558, 2560), True, 'import random as r\n'), ((2592, 2602), 'random.random', 'r.random', ([], {}), '()\n', (2600, 2602), True, 'import random as r\n'), ((2692, 2705), 'math.sqrt', 'm.sqrt', (['(x + y)'], {}), '(x + y)\n', (2698, 2705), True, 'import math as m\n')] |
"""
ClientSync
"""
import os
import logging
import asyncio
import bson
from file_system import FileSystem
class ClientSync:
"""
Client Sync class
"""
def __init__(self, host: str, port: int, source: str,
loop: bool = None) -> None:
"""
Initialize the class. Add some detail here.
:param host: Host address
:param port: Port number
:param loop: Event loop
"""
self.host = host
self.port = port
self.source = source
self._loop = loop or asyncio.get_event_loop()
def __repr__(self) -> str:
return f'{self.__class__.__name__}({self.host}, {self.port})'
async def task_builder(self):
"""
Client tasks runner.
"""
tasks = []
# Select files for sending
files = []
for file_ in FileSystem.get_files_with_size(source_path=self.source):
files.append({
'name': file_['name'],
'size': file_['size'],
})
meta = {
'chunks': files
}
for file in files:
task = asyncio.ensure_future(
self.send_file(file_path=file['name'], **meta)
)
tasks.append(task)
contents = await asyncio.gather(*tasks)
return contents
def start(self) -> None:
"""
Start sync client.
"""
logging.info('Client Sync is started')
future = asyncio.ensure_future(self.task_builder())
self._loop.run_until_complete(future)
def stop(self) -> None:
"""
Stop sync client.
"""
self._loop.close()
logging.info('Client Sync has been stopped')
async def _echo(self, message):
"""
Sender.
:param message: Sending message
"""
_, writer = await asyncio.open_connection(
host=self.host,
port=self.port,
loop=self._loop
)
logging.info('Send the message')
writer.write(message)
await writer.drain()
logging.info('Close the connection')
writer.close()
await writer.wait_closed()
async def send_file(self, file_path: str, **meta):
"""
Send file.
:param file: Full file path
:param meta: Meta information
"""
full_file_path = os.path.join(self.source, file_path)
with open(full_file_path, mode='rb') as f_handle:
file_chunk = f_handle.read()
# build data frame
data_frame = {
'data': file_chunk,
'meta': {
'name': file_path,
'chunks': meta.get('chunks', [])
}
}
# data frame to BSON
bson_data_frame = bson.dumps(data_frame)
await self._echo(bson_data_frame)
| [
"bson.dumps",
"os.path.join",
"asyncio.open_connection",
"asyncio.gather",
"asyncio.get_event_loop",
"logging.info",
"file_system.FileSystem.get_files_with_size"
] | [((868, 923), 'file_system.FileSystem.get_files_with_size', 'FileSystem.get_files_with_size', ([], {'source_path': 'self.source'}), '(source_path=self.source)\n', (898, 923), False, 'from file_system import FileSystem\n'), ((1443, 1481), 'logging.info', 'logging.info', (['"""Client Sync is started"""'], {}), "('Client Sync is started')\n", (1455, 1481), False, 'import logging\n'), ((1703, 1747), 'logging.info', 'logging.info', (['"""Client Sync has been stopped"""'], {}), "('Client Sync has been stopped')\n", (1715, 1747), False, 'import logging\n'), ((2021, 2053), 'logging.info', 'logging.info', (['"""Send the message"""'], {}), "('Send the message')\n", (2033, 2053), False, 'import logging\n'), ((2122, 2158), 'logging.info', 'logging.info', (['"""Close the connection"""'], {}), "('Close the connection')\n", (2134, 2158), False, 'import logging\n'), ((2416, 2452), 'os.path.join', 'os.path.join', (['self.source', 'file_path'], {}), '(self.source, file_path)\n', (2428, 2452), False, 'import os\n'), ((556, 580), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (578, 580), False, 'import asyncio\n'), ((1305, 1327), 'asyncio.gather', 'asyncio.gather', (['*tasks'], {}), '(*tasks)\n', (1319, 1327), False, 'import asyncio\n'), ((1893, 1965), 'asyncio.open_connection', 'asyncio.open_connection', ([], {'host': 'self.host', 'port': 'self.port', 'loop': 'self._loop'}), '(host=self.host, port=self.port, loop=self._loop)\n', (1916, 1965), False, 'import asyncio\n'), ((2862, 2884), 'bson.dumps', 'bson.dumps', (['data_frame'], {}), '(data_frame)\n', (2872, 2884), False, 'import bson\n')] |
from typing import Any, Tuple
from dice_rolling import Die
class RollBuilder:
"""Class to build a complete throw of the dice.
"""
def __init__(self, seed: Any = None):
"""Constructor of RollBuilder.
:param Any seed: Seed for the dice, if any.
"""
self.__n_dice = 1
self.__n_sides = 0
self.__addition = 0
self.__keep = 0
self.__rolls = []
self.__discarded = []
Die.set_seed(seed)
def set_amount_of_dice(self, n_dice: int) -> None:
"""Method to set the amount of dice.
:param int n_dice: Amount of dice.
"""
self.__n_dice = n_dice
def set_number_of_sides(self, n_sides: int) -> None:
"""Method to set the number of sides of the dice.
:param int n_sides: Number of sides of the dice.
"""
self.__n_sides = n_sides
def addition_to_roll(self, addition: int) -> None:
"""Method to set the amount to add to every rolled die.
:param int addition: Amount to add.
"""
self.__addition += addition
def keep_n(self, n_items: int) -> None:
"""Method to set the number and preference to keep after every die has been
rolled.
- If n_items > 0: It will keep the highest n_items.
- If n_items < 0: It will keep the lowest n_items.
:param int n_items: Number and preference to keep.
"""
if abs(n_items) <= self.__n_dice:
self.__keep = n_items
def build(self) -> None:
"""Method to build the complete throw of the dice after every parameter
has been set.
"""
self.__rolls = []
for _ in range(self.__n_dice):
self.__rolls.append(
Die(self.__n_sides).roll() + self.__addition
)
if self.__keep:
sort = sorted(self.__rolls, reverse=self.__keep > 0)
self.__rolls = sort[:self.__keep]
self.__discarded = sort[self.__keep:]
def get_result(self) -> list:
"""Method to obtain the kept results of the complete roll.
The discarded dice are not included.
:returns: The kept results of the complete roll.
"""
return self.__rolls
def get_full_result(self) -> Tuple[int, list, list]:
"""Method to obtain the full results of the complete roll.
:returns: The full results of the full roll. The kept and the discarded rolls.
"""
return sum(self.__rolls), self.__rolls, self.__discarded
| [
"dice_rolling.Die.set_seed",
"dice_rolling.Die"
] | [((458, 476), 'dice_rolling.Die.set_seed', 'Die.set_seed', (['seed'], {}), '(seed)\n', (470, 476), False, 'from dice_rolling import Die\n'), ((1768, 1787), 'dice_rolling.Die', 'Die', (['self.__n_sides'], {}), '(self.__n_sides)\n', (1771, 1787), False, 'from dice_rolling import Die\n')] |
#!/usr/bin/python
"""
Electobot
by <NAME> (http://github.com/ZsigE)
Analysis and prediction tool based on the 2010 UK General Election results
Constants for use by all modules
"""
import math
import logging
import os
# Party names
CON = "Conservative"
LAB = "Labour"
LD = "Lib-Dem"
SNP = "SNP"
PC = "PC"
GRN = "Green"
BNP = "BNP"
UKP = "UKIP"
OTH = "Other"
# List of all party names
PARTY_NAMES = [CON, LAB, LD, SNP, PC, GRN, BNP, UKP, OTH]
# Colours to represent each party, in matplotlib colour identifiers
PARTY_COLOURS = {CON: "b",
LAB: "r",
LD: "Orange",
SNP: "Yellow",
PC: "DarkGreen",
GRN: "g",
BNP: "Indigo",
UKP: "Purple",
OTH: "Gray"}
# Data CSV file paths
RESOURCE_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"..",
"res")
HARVARD_CSV = os.path.join(RESOURCE_DIR, "harvard_election_results_2010.csv")
GUARDIAN_CSV = os.path.join(RESOURCE_DIR, "guardian_election_results_2010.csv")
# Wikipedia's historical poll data API URL
WIKI_POLLS_URL = ("http://en.wikipedia.org/w/api.php?action=parse&prop=text&"
"page=Opinion_polling_for_the_2015_United_Kingdom_general_"
"election&format=json")
# Constituency numbers (only including those contested in 2010)
NUM_OF_CONSTITUENCIES = 649
NEEDED_FOR_MAJORITY = int(math.ceil(NUM_OF_CONSTITUENCIES / 2))
# Tuning parameters for the model
RESULT_TOLERANCE = 0.03 # In percentage points divided by 100
SUPPORT_VARIATION = 0.005 # Also in percentage points
SWING_SCALE_FACTOR = 70.0 # Scale the amount of variance in vote numbers by this
# User agent to use when fetching historical poll data
USER_AGENT_STR = "Electobot PollScrape http://github.com/ZsigE/electobot"
# Results timeout. Set this to None on very slow machines.
RESULTS_TIMEOUT = 10
# Logging
LOGS_DIR = "logs"
LOG_FILE = os.path.join(LOGS_DIR, "electobot.log")
LOG_LEVEL = logging.INFO | [
"math.ceil",
"os.path.join",
"os.path.abspath"
] | [((967, 1030), 'os.path.join', 'os.path.join', (['RESOURCE_DIR', '"""harvard_election_results_2010.csv"""'], {}), "(RESOURCE_DIR, 'harvard_election_results_2010.csv')\n", (979, 1030), False, 'import os\n'), ((1046, 1110), 'os.path.join', 'os.path.join', (['RESOURCE_DIR', '"""guardian_election_results_2010.csv"""'], {}), "(RESOURCE_DIR, 'guardian_election_results_2010.csv')\n", (1058, 1110), False, 'import os\n'), ((1995, 2034), 'os.path.join', 'os.path.join', (['LOGS_DIR', '"""electobot.log"""'], {}), "(LOGS_DIR, 'electobot.log')\n", (2007, 2034), False, 'import os\n'), ((1472, 1508), 'math.ceil', 'math.ceil', (['(NUM_OF_CONSTITUENCIES / 2)'], {}), '(NUM_OF_CONSTITUENCIES / 2)\n', (1481, 1508), False, 'import math\n'), ((856, 881), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (871, 881), False, 'import os\n')] |
# -*- coding: utf-8 -*-
"""
run using: blender --python render.py
Note that this is coded in Python 3
"""
#%% Imports %
import bpy
import sys, os, time, re
import numpy as np
from numpy.linalg import norm
from numpy import array, pi, reshape, round
import scipy.io
# Define font
times = bpy.data.fonts.load("/usr/share/fonts/TTF/times.ttf")
#%% Function definitions %
rodLib = {} # Contains rods of various aspect ratios
def CreateRod(pos, r, material):
pos = array(pos)
LV = pos[1]-pos[0]
L = norm(LV)
assert r[0] == r[1]
rMean = sum(r)/2
aspect = round(L/(2*rMean),1)
keyName = str(aspect)+material # Dictionary stored based on aspect ratio and material
if not keyName in rodLib:
Say("\t\tAspect = {} with material = {} NOT in rodLib".format(aspect, material), verbosity=2)
bpy.ops.mesh.primitive_cylinder_add(depth=L, location=(0, 0, L/2), radius=rMean)
spring = bpy.context.object # Ugly but appears to be the way to do it
bpy.ops.mesh.primitive_uv_sphere_add(location=(0, 0, L), size=rMean)
ball1 = bpy.context.object
bpy.ops.mesh.primitive_uv_sphere_add(location=(0, 0, 0), size=rMean)
ball0 = bpy.context.object
# Deselect everything, then select objects in the right order (making ball0 the active object)
bpy.ops.object.select_all(action='DESELECT')
spring.select = True
ball1.select = True
ball0.select = True
rod = ball0
# Join meshes, easier to work on entire cell
bpy.ops.object.join()
# Apply modifiers and smoothing
bpy.ops.object.modifier_add(type='EDGE_SPLIT') # Required to get proper "shinyness"
bpy.ops.object.shade_smooth()
# Set rotation mode
rod.rotation_mode = 'AXIS_ANGLE' # Other rotations are sequential: rotate around X, THEN around y, etc.
# Set material
rod.active_material = bpy.data.materials[material]
# Add this object to the library
rodLib[keyName] = [rod, rMean] # rMean required for scaling only, material is copied by reference, not value, so needs to be recreated for different material
else:
Say("\t\tAspect = {} in rodLib".format(aspect), verbosity=2)
originalRod,originalR = rodLib.get(keyName)
rod = originalRod.copy()
rod.scale = [rMean/originalR]*3 # Make it a len 3 vector
rod.name = rod.name + "_copy_r{}".format(rMean) # Need to set something as to not have duplicate names in scene
# Link to scene (not needed for original CreateRod)
bpy.context.scene.objects.link(rod)
# Identical for copied or new. Define vector in XY plane we will revolve along
rod.rotation_axis_angle[1] = -1*LV[1] # X axis (use Y position). Relative, absolute doesn't matter. -1* because we want perpendicular vector
rod.rotation_axis_angle[2] = LV[0] # Y axis (use X position)
# Calculate how much we need to rotate (angle from [0 0 L] to [x y 0] to get vector [0 0 L] to overlay [x y z])
rotationRad = np.arccos(LV[2]/L)
rod.rotation_axis_angle[0] = rotationRad # W (amount of rotation around defined axis)
# Displace. Since selected object is still ball0 (and this is at the origin), displace to correct position for this one.
rod.location = pos[0,:]
return rod # Returns entire, merged cell
sphereLib = {}
def CreateSphere(pos, r, material):
pos = array(pos)
r = r
keyName = material
if not keyName in sphereLib:
Say("\t\tDefining initial spherical cell", verbosity=2)
bpy.ops.mesh.primitive_uv_sphere_add(location=pos, size=r)
sphere = bpy.context.object
bpy.ops.object.shade_smooth()
sphere.active_material = bpy.data.materials[material]
sphereLib[keyName] = [sphere, r]
else:
Say("\t\tCopying existing sphere", verbosity=2)
originalSphere,originalR = sphereLib[keyName]
sphere = originalSphere.copy()
sphere.scale = [r/originalR]*3
sphere.location = pos
# Add to scene
bpy.context.scene.objects.link(sphere)
return sphere
cylLib = {}
def CreateSpring(pos, r, material):
pos = array(pos)
LV = pos[1,:]-pos[0,:]
L = norm(LV)
keyName = material
if not keyName in cylLib:
Say("\t\tDefining initial spring", verbosity=2)
bpy.ops.mesh.primitive_cylinder_add(depth=L, location=(0, 0, L/2), radius=0.15)
cyl = bpy.context.object
bpy.ops.object.shade_smooth()
# Set rotation mode
cyl.rotation_mode = 'AXIS_ANGLE'
# Set material
cyl.active_material = bpy.data.materials[material]
# Set as original spring
cylLib[keyName] = [cyl, L]
else:
Say("\t\tCopying existing spring", verbosity=2)
originalCyl,originalL = cylLib[keyName]
cyl = originalCyl.copy()
cyl.scale[2] = L/originalL
# Add to scene
bpy.context.scene.objects.link(cyl)
# Define vector in XY plane we will revolve along (note: see CreateRod for method)
cyl.rotation_axis_angle[1] = -1*LV[1]
cyl.rotation_axis_angle[2] = LV[0]
# Calculate how much we need to rotate
rotationRad = np.arccos(LV[2]/L)
cyl.rotation_axis_angle[0] = rotationRad
# Displace.
cyl.location = (pos[0,:]+pos[1,:])/2
return cyl
###############################################################################
def Offset(offset):
offsetObjects = ['Sphere', 'Rod', 'Stick', 'Fil', 'Anchor', 'Sun'] # Objects that will be offset
for k in bpy.data.objects.keys():
for o in offsetObjects:
if o in k: # This is an object that will be offset
obj = bpy.data.objects[k]
obj.location = offset + np.array(obj.location)
###############################################################################
def DefineMaterials():
# Prepare materials
inkM = bpy.data.materials.new('ink') # ink (text, lines)
inkM.diffuse_intensity = 0
inkM.specular_intensity = 0
inkM.use_cast_shadows = False
whiteM = bpy.data.materials.new('white') # Bottom plane (to cash shadows on)
whiteM.diffuse_color = (1, 1, 1)
whiteM.diffuse_intensity = 1
whiteM.specular_intensity = 0
whiteM.diffuse_shader = 'TOON' # Give it a cartoon-ish finish, clear shadows and lines
greyM = bpy.data.materials.new('grey') # Bottom plane (to cash shadows on), for E. coli
greyM.diffuse_color = (0.5, 0.5, 0.5) # Grey (substratum)
greyM.diffuse_intensity = 1
greyM.specular_intensity = 0
greyM.diffuse_shader = 'LAMBERT'
wireM = bpy.data.materials.new('wire') # wire (grid)
wireM.type = 'WIRE'
wireM.specular_intensity = 0
wireM.diffuse_color = (0, 0, 0)
cellDssM = bpy.data.materials.new('cellDss')
cellDssM.diffuse_color = (0.3, 1.0, 0.0) # Medium-dark green/DSS
cellDssM.diffuse_intensity = 0.7
cellDssM.specular_color = (0.6, 1.0, 0.5)
cellDssM.specular_intensity = 0.1
cellDssM.specular_hardness = 5
cellDssM.specular_shader = 'PHONG'
cellAnmeM = bpy.data.materials.new('cellAnme')
cellAnmeM.diffuse_color = (0.4, 0.0, 0.0) # Dark red/ANME
cellAnmeM.diffuse_intensity = 0.7
cellAnmeM.specular_color = (1.0, 0.25, 0.25)
cellAnmeM.specular_intensity = 0.1
cellAnmeM.specular_hardness = 5
cellAnmeM.specular_shader = 'PHONG'
cell0M = bpy.data.materials.new('cell0')
cell0M.diffuse_color = (0.4, 0.0, 0.0) # Dark red/E. coli gen. 1
cell0M.diffuse_intensity = 0.7
cell0M.specular_color = (1.0, 0.25, 0.25)
cell0M.specular_intensity = 0.1
cell0M.specular_hardness = 5
cell0M.specular_shader = 'PHONG'
cell1M = bpy.data.materials.new('cell1')
cell1M.diffuse_color = (1.0, 1.0, 0.5) # Bright yellow/E. coli gen. 2
cell1M.diffuse_intensity = 0.6
cell1M.specular_color = (1.0, 1.0, 0.8)
cell1M.specular_intensity = 0.1
cell1M.specular_hardness = 5
cell1M.specular_shader = 'PHONG'
cell2M = bpy.data.materials.new('cell2')
cell2M.diffuse_color = (0.1, 1.0, 1.0) # Medium-bright blue/E. coli gen 3
cell2M.diffuse_intensity = 0.6
cell2M.specular_color = (1.0, 1.0, 1.0)
cell2M.specular_intensity = 0.1
cell2M.specular_hardness = 5
cell2M.specular_shader = 'PHONG'
cell3M = bpy.data.materials.new('cell3')
cell3M.diffuse_color = (0.0, 1.0, 0.0) # Medium-dark green/E. coli gen 4
cell3M.diffuse_intensity = 0.6
cell3M.specular_color = (0.6, 1.0, 0.5)
cell3M.specular_intensity = 0.1
cell3M.specular_hardness = 5
cell3M.specular_shader = 'PHONG'
stickM = bpy.data.materials.new('stick') # EPS (sticking, adhesive)
stickM.diffuse_color = (1.0, 1.0, 1.0)
stickM.diffuse_intensity = 1.0
stickM.specular_intensity = 0.1
#stickM.use_shadows = False # Shadows are not cast on the nanowire
#stickM.use_object_color = True # So we can assign colour scale to the nanowire, for rates
filM = bpy.data.materials.new('fil') # EPS (filament)
filM.diffuse_color = (0.0, 0.0, 0.0)
filM.diffuse_intensity = 0.5
filM.specular_intensity = 0.1
anchorM = bpy.data.materials.new('anchor') # EPS (anchoring)
anchorM.diffuse_color = (0.0, 0.0, 0.0)
anchorM.diffuse_intensity = 0.5
anchorM.specular_intensity = 0.1
###############################################################################
def CameraPerspSetup(location, rotation): # Does not allow automatic configuration
bpy.ops.object.camera_add(location=location, rotation=rotation)
cam = bpy.context.object
cam.name = 'CameraPersp'
cam.data.clip_end = 1000 # Render whole range. This number will suffice
cam.data.lens = 25
cam.data.sensor_width = 30
bpy.context.scene.camera = cam # Set as active camera
"""
Focal blur: see http://wiki.blender.org/index.php/Doc:2.6/Tutorials/Composite_Nodes/Setups/Depth_Of_Field
Good settings:
- Add defocus composite
- Use Z buffer
- Link Zs
- Distance 70-100
- fStop 0.3-0.5
Don't forget to deselect preview!
"""
# Light point behind camera
bpy.ops.object.lamp_add(type='POINT', location=(0,0,5)) # Location is relative to camera
light = bpy.context.object
light.name = 'PointCamPersp'
light.data.energy = 0.8 # 1.0 is too bright for E. coli
light.data.falloff_type = 'CONSTANT'
bpy.ops.object.select_all(action='DESELECT') # Make camera the light point's parent (more detail in animation section, bottom of file)
light.parent = cam
lightTracker = light.constraints.new('TRACK_TO') # Tell camera to track empty (for rotation)
lightTracker.target = cam
lightTracker.track_axis = 'TRACK_Z'
def CameraSideSetup(distance=None, Lx=20, Ly=20, Lz=20, camWidth=1920, camHeight=1080): # Add camera to the scene, for side view
if distance is None:
distance = max([camWidth/camHeight*Ly+20, camWidth/camHeight*Lz+20, Lx+20])
camPos,camRot,camName = [(Lx/2.0, Ly-distance, Lz/2.0), (0.5*pi, 0, 0), 'CameraSide'] # This is a useful formula
bpy.ops.object.camera_add(location=camPos, rotation=camRot)
cam = bpy.context.object
cam.name = camName
cam.data.clip_end = 1000 # Render whole range. This number will suffice
cam.data.type = 'ORTHO'
cam.data.ortho_scale = distance
bpy.context.scene.camera = cam # Set as active camera
# Add light point
bpy.ops.object.lamp_add(type='POINT', location=camPos) # Location is relative to camera
light = bpy.context.object
light.name = 'PointCamSide'
light.data.falloff_type = 'CONSTANT'
def CameraTopSetup(distance=None, Lx=20, Ly=20, Lz=20, camWidth=1920, camHeight=1080): # Add camera to the scene, for top view
if distance is None:
distance = max([camWidth/camHeight*Ly+20, camWidth/camHeight*Lz+20, Lx+20])
camPos,camRot,camName = [(Lx/2.0, Ly/2.0, distance), (0, 0, 0), 'CameraTop']
bpy.ops.object.camera_add(location=camPos, rotation=camRot)
cam = bpy.context.object
cam.name = camName
cam.data.clip_end = 1000 # Render whole range. This number will suffice
cam.data.type = 'ORTHO'
cam.data.ortho_scale = distance
bpy.context.scene.camera = cam # Set as active camera
def CamerasDelete(): # Delete only the side and top camera
for cam in ['CameraSide', 'CameraTop']:
obj = bpy.data.objects[cam]
bpy.data.scenes[0].objects.unlink(obj)
bpy.data.objects.remove(obj)
def CameraPerspDisable():
bpy.data.objects['PointCamPersp'].hide_render=True
def CameraSideDisable():
bpy.data.objects['PointCamSide'].hide_render=True
###############################################################################
def SetupXYZLegend(location=(0.0, 0.0, 0.0), fontSize=1, textSpacing=2): # The three arrows at the origin, showing which direction is X, Y, Z
inkM = bpy.data.materials['ink']
#%% Draw XYZ axis legend
axLegCylH = 3.0*fontSize # Arrow body
axLegConeH = 0.8*fontSize # Arrow head
axLegCylR = 0.2*fontSize # Arrow radius
for ax,locCyl,locCone,rot in zip(['X', 'Y', 'Z'], \
[np.add((axLegCylH/2, 0.0, 0.0), location), np.add((0.0, axLegCylH/2, 0), location), np.add((0.0, 0.0, axLegCylH/2), location)], \
[np.add((axLegCylH+axLegConeH/2, 0.0, 0.0), location), np.add((0.0, axLegCylH+axLegConeH/2, 0), location), np.add((0.0, 0.0, axLegCylH+axLegConeH/2), location)], \
[(0, pi/2, 0), (3*pi/2, 0, 0), (0, 0, 0)]):
bpy.ops.mesh.primitive_cylinder_add(radius=axLegCylR, depth=axLegCylH, location=locCyl, rotation=rot)
bpy.ops.object.shade_smooth()
bpy.context.object.name = 'legendCyl'+ax
bpy.context.object.active_material = inkM
bpy.ops.mesh.primitive_cone_add(radius1=axLegCylR*2, depth=axLegConeH, location=locCone, rotation=rot)
bpy.ops.object.shade_smooth()
bpy.context.object.name = 'legendCone'+ax
bpy.context.object.active_material = inkM
# Create text
bpy.ops.object.text_add(location=np.add((textSpacing, -fontSize*5.0*0.5, 0), location))
xText = bpy.context.object
xText.name = 'legendX'
xText.data.body = 'x'
bpy.ops.object.text_add(location=np.add((-fontSize*5.0*0.5, textSpacing, 0), location))
yText = bpy.context.object
yText.name = 'legendY'
yText.data.body = 'y'
bpy.ops.object.text_add(location=np.add((-fontSize*5.0*0.5, -fontSize*5.0*0.5, 0), location))
zText = bpy.context.object
zText.name = 'legendZ'
zText.data.body = 'z'
# Set, move text in place
for text in (xText, yText, zText):
text.data.size = fontSize*5.0
text.active_material = inkM
text.data.font = times
return [xText, yText, zText]
def DeleteLegends():
for k in bpy.data.objects.keys():
if 'legend' in k:
obj = bpy.data.objects[k]
bpy.data.scenes[0].objects.unlink(obj)
bpy.data.objects.remove(obj)
def SetupScalebarLegend(location=(-20,-20, 0), length=10, fontSize=1):
inkM = bpy.data.materials['ink']
bpy.ops.mesh.primitive_cylinder_add(radius=0.2*fontSize, depth=length, location=location, rotation=(0, pi/2, 0))
bpy.ops.object.shade_smooth()
scalebarCyl = bpy.context.object
scalebarCyl.name = 'legendScalebarCyl'
scalebarCyl.active_material = inkM
bpy.ops.mesh.primitive_cube_add(radius=1, location=np.array(location)-np.array((length/2.0, 0.0, 0.0)))
scalebarLeftMarker = bpy.context.object
scalebarLeftMarker.name = 'legendScalebarLeftMarker'
scalebarLeftMarker.dimensions = (0.1*fontSize, fontSize, fontSize)
scalebarLeftMarker.active_material = inkM
bpy.ops.mesh.primitive_cube_add(radius=1, location=np.array(location)+np.array((length/2.0, 0.0, 0.0)))
scalebarRightMarker = bpy.context.object
scalebarRightMarker.name = 'legendScalebarRightMarker'
scalebarRightMarker.dimensions = (0.1*fontSize, fontSize, fontSize)
scalebarRightMarker.active_material = inkM
bpy.ops.object.text_add(location=np.array(location)-np.array([0,fontSize*5.0,0]))
text = bpy.context.object
text.data.body = str(int(length)) + ' um'
text.data.align = 'CENTER'
text.name = 'legendScalebarText'
text.data.size = fontSize*5.0
text.active_material = inkM
text.data.font = times
###############################################################################
def SetupPlanes(drawPlaneZ=True, drawPlaneGrid=(False, True, True), Lx=20, Ly=20, Lz=20, radiusZPlane=None, stepSize=10.0):
surfaceM = surfaceMaterial
wireM = bpy.data.materials['wire']
# Plane to project shadows on
if drawPlaneZ:
if radiusZPlane is None:
planeRadius = Lx/2
else:
planeRadius = radiusZPlane
bpy.ops.mesh.primitive_plane_add(radius=planeRadius, location=(Lx/2, Ly/2, -0.1))
planeZ = bpy.context.object
planeZ.name = 'planeZ'
planeZHeightScale = Ly/Lx
planeZ.scale[1] = planeZHeightScale
planeZ.active_material = surfaceM
#%% Draw grid
if drawPlaneGrid[2]:
# Z plane (horizontal)
bpy.ops.mesh.primitive_grid_add(x_subdivisions=int(Lx/stepSize)+1, y_subdivisions=int(Ly/stepSize)+1, radius=Lx/2)
planeZGrid = bpy.context.object
planeZGrid.name = 'planeZGrid'
planeZGrid.location = [Lx/2, Ly/2, 0.0]
planeZGrid.active_material = wireM
planeZGrid.rotation_euler[2] = 1*pi
planeZGrid.scale[1] = planeZHeightScale
if drawPlaneGrid[1]:
# Y plane (back)
PlaneYHeightScale = Lz/Lx
bpy.ops.mesh.primitive_grid_add(x_subdivisions=int(Lx/stepSize)+1, y_subdivisions=int(Lz/stepSize)+1, radius=Lx/2)
PlaneYGrid = bpy.context.object
PlaneYGrid.name = 'planeYGrid'
PlaneYGrid.active_material = wireM
PlaneYGrid.location = [Lx/2, Ly, Lz/2]
PlaneYGrid.rotation_euler[0] = 0.5*pi
PlaneYGrid.scale[1] = PlaneYHeightScale
def DeletePlanes():
for k in bpy.data.objects.keys():
if 'plane' in k:
obj = bpy.data.objects[k]
bpy.data.scenes[0].objects.unlink(obj)
bpy.data.objects.remove(obj)
###############################################################################
def SetupTicks(drawTicks = (True, True, True), Lx = 20.0, Ly = 20.0, Lz = 20.0, fontSize=1.0, stepSize=10.0):
inkM = bpy.data.materials['ink']
pos = 0.0
tickListLarge = []
tickDone = False
while not tickDone:
tickList = []
tickDone = True
if drawTicks[0] and pos <= Lx: # x ticks (x plane)
tickDone = False
bpy.ops.object.text_add(location=(pos, -fontSize*4.0, 0))
xTick = bpy.context.object
xTick.name = "tickX{:g}".format(pos)
xTick.data.body = "{:g}".format(pos)
tickList.append(xTick)
if drawTicks[1] and pos <= Ly: # y ticks (x plane)
tickDone = False
bpy.ops.object.text_add(location=(-fontSize*4.0, pos-fontSize/2.0, 0))
yTick = bpy.context.object
yTick.name = "tickY{:g}".format(pos)
yTick.data.body = "{:g}".format(pos)
tickList.append(yTick)
if drawTicks[2] and pos <= Lz: # z ticks (y plane)
tickDone = False
bpy.ops.object.text_add(location=(-fontSize*4.0, Ly, pos-fontSize/2.0))
zTick = bpy.context.object
zTick.name = "tickZ{:g}".format(pos)
zTick.data.body = "{:g}".format(pos)
zTick.rotation_euler[0] = 0.5*pi
tickList.append(zTick)
for tick in tickList: # assign material
tick.data.size = fontSize*4.0
tick.active_material = inkM
tick.data.font = times
tick.data.align = 'CENTER' # only horizontal
tickListLarge.append(tick)
pos += stepSize
return tickListLarge
def DeleteTicks():
for k in bpy.data.objects.keys():
if 'tick' in k:
obj = bpy.data.objects[k]
bpy.data.scenes[0].objects.unlink(obj)
bpy.data.objects.remove(obj)
def DeleteTick(x=None, y=None, z=None):
for a,prefix in zip([x,y,z],["tickX", "tickY", "tickZ"]):
if a is not None:
if type(a) is list or type(a) is tuple:
for t in a:
obj = bpy.data.objects[prefix + str(t)]
bpy.data.scenes[0].objects.unlink(obj)
bpy.data.objects.remove(obj)
else:
print("deleting "+prefix + str(a))
obj = bpy.data.objects[prefix + str(a)]
bpy.data.scenes[0].objects.unlink(obj)
bpy.data.objects.remove(obj)
def RotateX():
for t in bpy.data.objects.keys():
if 'tickX' in t:
obj = bpy.data.objects[t]
if obj.rotation_euler[0] == 0.0: # Default
obj.rotation_euler[0] = 0.5*pi
obj.location[2] -= 5
else:
obj.rotation_euler[0] = 0.0
obj.location[2] += 5
if 'legendX' in t:
obj = bpy.data.objects[t]
if obj.rotation_euler[0] == 0.0: # Default
obj.rotation_euler[0] = 0.5*pi
obj.location[2] -= 3
else:
obj.rotation_euler[0] = 0.0
obj.location[2] += 3
if 'legendScalebarText' in t:
obj = bpy.data.objects[t]
if obj.rotation_euler[0] == 0.0: # Default
obj.rotation_euler[0] = 0.5*pi
obj.location[2] -= 5
else:
obj.rotation_euler[0] = 0.0
obj.location[2] += 5
if t == 'legendZ':
obj = bpy.data.objects[t]
if obj.rotation_euler[0] == 0.0: # Default
obj.rotation_euler[0] = 0.5*pi
obj.location[2] += 3
else:
obj.rotation_euler[0] = 0.0
obj.location[2] -= 3
###############################################################################
def Render():
bpy.ops.render.render(write_still=True)
###############################################################################
def Say(text, verbosity=0):
if verbosity<=VERBOSITY:
if verbosity == 0:
printText = time.strftime('%H:%M:%S ') + text
else:
printText = time.strftime('%H:%M:%S-DEBUG: ') + text
print(printText)
def ParseVal(val):
if val.isnumeric():
return float(val)
elif val.lower() == 'true':
return True
elif val.lower() == 'false':
return False
elif re.search('\(.*\)',val): # If val contains a function, evaluate it (TODO security hazard)
return eval(val)
else:
return val
###############################################################################
###############################################################################
# Default settings for render.py (better to override from command line or rendermonitor.py)
settingsDict = {'camPos':None,
'camRot':array([65, 0, -25]),
'colourByGeneration':False,
'drawAxisLegend':True,
'drawPlane':True,
'drawPlaneGrid':(False, True, True),
'drawPlaneGridY':True,
'drawAnchor':True,
'drawFil':True,
'drawStick':True,
'gridStepSize':10.0,
'offset':array([0,0,0]),
'planeInfinite':False,
'configMaterial':None, # Set materials, pre-configured
'renderDir':'render',
'resolution_percentage':100, # in percent
'saveBlend':True,
'suppressRender':False,
'textSizeDivider':50,
}
VERBOSITY = 0
###############################################################################
if __name__ == '__main__': # Run if not imported as module
#%% Import model
argv = sys.argv[sys.argv.index("--")+1:] # Get all arguments after -- (Blender won't touch these)
matPath = argv[0]; # Get matPath
VERBOSITY = 0 if not 'VERBOSITY' in argv else int(argv[argv.index('VERBOSITY')+1]) # Get VERBOSITY if defined
model = scipy.io.loadmat(matPath, chars_as_strings=True, mat_dtype=False, squeeze_me=False, struct_as_record=False)['model'][0,0]
# Get overwriting dictionary for render.py and model class
modelFields = dir(model)
Say('Argument parsing, analysing {} possible setting values and {} model fields'.format(len(settingsDict), len(modelFields)), verbosity=3)
for key,val in zip(argv[1::2], argv[2::2]):
if key.startswith('model.'):
parsedKey = key[6:] # 6 is length of 'model.'
if parsedKey in modelFields:
Say("Found key = {} in model".format(parsedKey), verbosity=3)
parsedVal = ParseVal(val)
Say("parsedVal = " + str(parsedVal) + " of type " + str(type(parsedVal)), verbosity=4)
setattr(model, parsedKey, reshape(parsedVal, [len(parsedVal) if not type(parsedVal) is bool else 1,1]))
else:
raise(key + " not found in model class")
else:
if key in settingsDict:
parsedVal = ParseVal(val)
settingsDict[key] = parsedVal
elif not key=='VERBOSITY': # VERBOSITY is already evaluated
raise(key + " not found in settings dictionary")
#%% Get common parameters
Say("Analysing domain and setting common parameters", verbosity=1)
Lx = model.L[0,0] * 1e6
Ly = model.L[1,0] * 1e6
Lz = model.L[2,0] * 1e6
offset = settingsDict['offset']
#Lx = Ly = Lz = 200
# Throw warning if cells are outside of domain
NBallOutsideDomain = 0
for ball in model.ballArray[:,0]: # Must loop row-wise, that's how MATLAB works
pos = ball.pos[:,0]*1e6
if not np.all([(array([0,0,0]) < pos), (pos < array([Lx, Ly, Lz]))]):
NBallOutsideDomain += 1
if NBallOutsideDomain > 0:
Say("WARNING: {} balls are outside the domain".format(NBallOutsideDomain))
#%% Clean up geometry (default cube, camera, light source)
Say("Cleaning up geometry", verbosity=1)
bpy.ops.object.select_all(action='SELECT')
bpy.ops.object.delete(use_global=False)
#%%
# Set up world
Say("Setting up world", verbosity=1)
bpy.context.scene.world.horizon_color = (1, 1, 1) # White background
bpy.context.scene.render.resolution_x = 1920
bpy.context.scene.render.resolution_y = 1080
bpy.context.scene.render.resolution_percentage = settingsDict['resolution_percentage'] # Allows for quick scaling
"""
# Mist/fog, fading distant cells and nanowires
bpy.context.scene.world.mist_settings.falloff = 'LINEAR'
bpy.context.scene.world.mist_settings.intensity = 0
bpy.context.scene.world.mist_settings.height = 0
bpy.context.scene.world.mist_settings.start = 100
bpy.context.scene.world.mist_settings.depth = 40
bpy.context.scene.world.mist_settings.use_mist = True
"""
#%% Create camera
Say("Calculating and creating camera", verbosity=1)
if settingsDict['camPos'] == None:
camPos = Lx/30* (array([-15, -46, 42])) # limited by camera width
else:
camPos= settingsDict['camPos']
camRot = (np.deg2rad(settingsDict['camRot'][0]), np.deg2rad(settingsDict['camRot'][1]), np.deg2rad(settingsDict['camRot'][2]))
CameraPerspSetup(location=camPos, rotation=camRot)
#%% Create light sources
Say("Creating light sources", verbosity=1)
# Sun
bpy.ops.object.lamp_add(type='SUN', location=(0, 0, 40))
sun = bpy.context.object
sun.data.shadow_method = 'RAY_SHADOW' # Sun casts shadow
sun.data.shadow_soft_size = 1.5 # Soft shadow, based on distance to light source/plane
sun.data.shadow_ray_samples = 10
#%% Materials # FIXME remove
DefineMaterials()
#%% Legend
if settingsDict['drawAxisLegend']:
Say("Drawing XYZ arrows/legend", verbosity=1)
SetupXYZLegend(fontSize=round((norm(camPos)/settingsDict['textSizeDivider'])**0.5))
#%% Draw planes with all bells and whistles
Say("Drawing plane, grid, etc", verbosity=1)
if settingsDict['drawPlane']:
if settingsDict['planeInfinite']:
radiusZPlane = Lx*50
else:
radiusZPlane = None
SetupPlanes(Lx=Lx, Ly=Ly, Lz=Lz, drawPlaneGrid=settingsDict['drawPlaneGrid'], radiusZPlane=radiusZPlane, stepSize=settingsDict['gridStepSize'])
SetupTicks(Lx=Lx, Ly=Ly, Lz=Lz, fontSize=round((norm(camPos)/settingsDict['textSizeDivider'])**0.5))
DeleteTick(x=0, y=[0, int(Ly)])
###############################################################################
#%% Draw cells
Say("Drawing cells", verbosity=1)
for iCell,cell in enumerate(model.cellArray[:,0]):
Say("\tCell = {}".format(iCell, ), verbosity=1)
if settingsDict['colourByGeneration']:
ancestor = cell
while np.where(model.cellArray[:,0]==ancestor)[0][0] > 3: # 0 through 3, because there will be 4 colours available
ancestor = model.cellArray[ancestor.mother[0][0],0]
cellType = int(np.where(model.cellArray==ancestor)[0][0])
Say("\t\tCell generation = " + str(cellType), verbosity=3)
else:
cellType = cell.type[0][0].astype(int)
if cell.type[0,0].astype(int) <= 1:
iBall = cell.ballArray[0,0].astype(int)
ball = model.ballArray[iBall,0]
pos = ball.pos[:,0] * 1e6
r = ball.radius[0,0] * 1e6
cellG = CreateSphere(pos, r, cellMaterial[cellType])
cellG.name = 'Sphere{:d}-{:04d}'.format(cellType, iCell)
else:
pos = np.empty([2,3])
r = np.empty(2)
for ii,iBall in enumerate(cell.ballArray[:,0].astype(int)):
ball = model.ballArray[iBall,0]
pos[ii,:] = ball.pos[:,0] * 1e6
r[ii] = ball.radius[0,0] * 1e6
cellG = CreateRod(pos, r, cellMaterial[cellType])
cellG.name = 'Rod{:d}-{:04d}'.format(cellType, iCell)
Say("fraction {} in rodLib".format(round(1-len(rodLib)/len(model.cellArray[:,0]),2)), verbosity=1)
if settingsDict['drawStick']:
stickM = bpy.data.materials['stick']
for iStick,stick in enumerate(model.stickSpringArray[:,0]):
Say("\tSticking spring = {}".format(iStick), verbosity=1)
pos = np.empty([2,3])
for ii,iBall in enumerate(stick.ballArray[:,0]):
ball = model.ballArray[int(iBall),0]
pos[ii,:] = ball.pos[:,0] * 1e6
stickG = CreateSpring(pos, 0.1, stickM.name)
stickG.name = 'Stick-{:04d}'.format(int(iStick))
if settingsDict['drawFil']:
filM = bpy.data.materials['fil']
for iFil,fil in enumerate(model.filSpringArray[:,0]):
Say("\tFilament spring = {}".format(iFil), verbosity=1)
pos = np.empty([2,3])
for ii,iBall in enumerate(fil.ballArray):
ball = model.ballArray[int(iBall),0]
pos[ii,:] = ball.pos[:,0] * 1e6
filG = CreateSpring(pos, 0.1, filM.name)
filG.name = 'Fil-{:04d}'.format(int(iFil))
if settingsDict['drawAnchor']:
anchorM = bpy.data.materials['anchor']
for iAnchor,anchor in enumerate(model.anchorSpringArray[:,0]):
Say("\tAnchoring spring = {}".format(iAnchor), verbosity=1)
iBall = anchor.ballArray[0,0]
ball = model.ballArray[int(iBall),0]
pos = ball.pos[:,0] * 1e6
anchorG = CreateSpring(np.concatenate([[pos, [pos[0],pos[1],0.0]]], 0), 0.1, anchorM.name)
anchorG.name = 'Anchor-{:04d}'.format(int(iAnchor))
#CreateSphere([Lx/2+5,Ly/2+5,5],10)
#CreateRod(array([[Lx/2,Ly/2,0],[Lx/2,Ly/2,4]]),array([1,1]))
#CreateSpring(array([[Lx/2,Ly/2,0],[Lx/2,Ly/2,4]]))
#CreateSphere(array([ -8.64662208, 14.65630608, 9.16357743]),0.3)
"""
# Create coloured nanowires
rx = cellRx * (4./3.*pi*(0.5e-6)**3)*6 * N_A
rx_max = rx.max()
rx_min = rx[rx>0].min()
cFact = 255/(rx_max-rx_min)
cMat = (cFact*(rx-rx_min)).astype(int)
for i0, i1 in cellPair:
cyl = CreateNanowire(cellPos[i0], cellPos[i1])
cyl.active_material = nanowire
cyl.color = cMap(cMat[i0,i1]) # Needs 4, last one being alpha (1 using cMap directly)
cyl.name = 'Nanowire '+str(i0)+'-'+str(i1)
"""
# Unselect everything, get ready for playing around
bpy.ops.object.select_all(action='DESELECT')
# Offset
Offset(offset)
###########################################################################
## Set viewport clipping to something reasonable
#bpy.context.space_data.clip_end = 2000
###############################################################################
#%% Save
Say("Saving", verbosity=1)
matName = os.path.splitext( matPath.split("/")[-1] )[0]
matDir = "/".join(matPath.split('/')[:-1])
if "/output/"+matName in matPath:
renderPath = matPath[:matPath.index("/output/"+matName)] + "/" + settingsDict['renderDir']
if not os.path.isdir(renderPath):
os.mkdir(renderPath)
else:
Say("WARNING: output directory not found, writing .png and .blend to same folder as .mat")
renderPath = matDir
if not os.path.isdir(renderPath):
os.mkdir(renderPath)
if settingsDict['saveBlend']:
bpy.ops.wm.save_as_mainfile(filepath = renderPath + "/" + matName + ".blend", check_existing=False)
#%% Render
bpy.data.scenes['Scene'].render.filepath = renderPath + "/" + matName + ".png"
if not settingsDict['suppressRender']:
Say("Rendering", verbosity=1)
Render()
###############################################################
| [
"bpy.context.scene.objects.link",
"numpy.arccos",
"numpy.array",
"bpy.ops.object.shade_smooth",
"numpy.linalg.norm",
"re.search",
"bpy.ops.object.delete",
"bpy.ops.object.camera_add",
"numpy.where",
"bpy.ops.object.lamp_add",
"os.path.isdir",
"bpy.data.objects.keys",
"bpy.data.objects.remove",
"bpy.ops.mesh.primitive_plane_add",
"os.mkdir",
"numpy.empty",
"numpy.concatenate",
"bpy.data.fonts.load",
"bpy.ops.wm.save_as_mainfile",
"numpy.round",
"numpy.add",
"bpy.ops.object.select_all",
"bpy.ops.object.modifier_add",
"bpy.ops.mesh.primitive_cone_add",
"numpy.deg2rad",
"bpy.ops.mesh.primitive_uv_sphere_add",
"bpy.ops.object.join",
"sys.argv.index",
"bpy.ops.object.text_add",
"time.strftime",
"bpy.ops.mesh.primitive_cylinder_add",
"bpy.data.materials.new",
"bpy.ops.render.render"
] | [((293, 346), 'bpy.data.fonts.load', 'bpy.data.fonts.load', (['"""/usr/share/fonts/TTF/times.ttf"""'], {}), "('/usr/share/fonts/TTF/times.ttf')\n", (312, 346), False, 'import bpy\n'), ((514, 524), 'numpy.array', 'array', (['pos'], {}), '(pos)\n', (519, 524), False, 'from numpy import array, pi, reshape, round\n'), ((556, 564), 'numpy.linalg.norm', 'norm', (['LV'], {}), '(LV)\n', (560, 564), False, 'from numpy.linalg import norm\n'), ((623, 648), 'numpy.round', 'round', (['(L / (2 * rMean))', '(1)'], {}), '(L / (2 * rMean), 1)\n', (628, 648), False, 'from numpy import array, pi, reshape, round\n'), ((3224, 3244), 'numpy.arccos', 'np.arccos', (['(LV[2] / L)'], {}), '(LV[2] / L)\n', (3233, 3244), True, 'import numpy as np\n'), ((3644, 3654), 'numpy.array', 'array', (['pos'], {}), '(pos)\n', (3649, 3654), False, 'from numpy import array, pi, reshape, round\n'), ((4412, 4422), 'numpy.array', 'array', (['pos'], {}), '(pos)\n', (4417, 4422), False, 'from numpy import array, pi, reshape, round\n'), ((4458, 4466), 'numpy.linalg.norm', 'norm', (['LV'], {}), '(LV)\n', (4462, 4466), False, 'from numpy.linalg import norm\n'), ((5440, 5460), 'numpy.arccos', 'np.arccos', (['(LV[2] / L)'], {}), '(LV[2] / L)\n', (5449, 5460), True, 'import numpy as np\n'), ((5801, 5824), 'bpy.data.objects.keys', 'bpy.data.objects.keys', ([], {}), '()\n', (5822, 5824), False, 'import bpy\n'), ((6216, 6245), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""ink"""'], {}), "('ink')\n", (6238, 6245), False, 'import bpy\n'), ((6401, 6432), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""white"""'], {}), "('white')\n", (6423, 6432), False, 'import bpy\n'), ((6727, 6757), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""grey"""'], {}), "('grey')\n", (6749, 6757), False, 'import bpy\n'), ((7025, 7055), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""wire"""'], {}), "('wire')\n", (7047, 7055), False, 'import bpy\n'), ((7205, 7238), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""cellDss"""'], {}), "('cellDss')\n", (7227, 7238), False, 'import bpy\n'), ((7544, 7578), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""cellAnme"""'], {}), "('cellAnme')\n", (7566, 7578), False, 'import bpy\n'), ((7878, 7909), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""cell0"""'], {}), "('cell0')\n", (7900, 7909), False, 'import bpy\n'), ((8206, 8237), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""cell1"""'], {}), "('cell1')\n", (8228, 8237), False, 'import bpy\n'), ((8541, 8572), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""cell2"""'], {}), "('cell2')\n", (8563, 8572), False, 'import bpy\n'), ((8871, 8902), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""cell3"""'], {}), "('cell3')\n", (8893, 8902), False, 'import bpy\n'), ((9208, 9239), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""stick"""'], {}), "('stick')\n", (9230, 9239), False, 'import bpy\n'), ((9640, 9669), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""fil"""'], {}), "('fil')\n", (9662, 9669), False, 'import bpy\n'), ((9837, 9869), 'bpy.data.materials.new', 'bpy.data.materials.new', (['"""anchor"""'], {}), "('anchor')\n", (9859, 9869), False, 'import bpy\n'), ((10218, 10281), 'bpy.ops.object.camera_add', 'bpy.ops.object.camera_add', ([], {'location': 'location', 'rotation': 'rotation'}), '(location=location, rotation=rotation)\n', (10243, 10281), False, 'import bpy\n'), ((10910, 10967), 'bpy.ops.object.lamp_add', 'bpy.ops.object.lamp_add', ([], {'type': '"""POINT"""', 'location': '(0, 0, 5)'}), "(type='POINT', location=(0, 0, 5))\n", (10933, 10967), False, 'import bpy\n'), ((11215, 11259), 'bpy.ops.object.select_all', 'bpy.ops.object.select_all', ([], {'action': '"""DESELECT"""'}), "(action='DESELECT')\n", (11240, 11259), False, 'import bpy\n'), ((11956, 12015), 'bpy.ops.object.camera_add', 'bpy.ops.object.camera_add', ([], {'location': 'camPos', 'rotation': 'camRot'}), '(location=camPos, rotation=camRot)\n', (11981, 12015), False, 'import bpy\n'), ((12440, 12494), 'bpy.ops.object.lamp_add', 'bpy.ops.object.lamp_add', ([], {'type': '"""POINT"""', 'location': 'camPos'}), "(type='POINT', location=camPos)\n", (12463, 12494), False, 'import bpy\n'), ((12978, 13037), 'bpy.ops.object.camera_add', 'bpy.ops.object.camera_add', ([], {'location': 'camPos', 'rotation': 'camRot'}), '(location=camPos, rotation=camRot)\n', (13003, 13037), False, 'import bpy\n'), ((16097, 16120), 'bpy.data.objects.keys', 'bpy.data.objects.keys', ([], {}), '()\n', (16118, 16120), False, 'import bpy\n'), ((16404, 16524), 'bpy.ops.mesh.primitive_cylinder_add', 'bpy.ops.mesh.primitive_cylinder_add', ([], {'radius': '(0.2 * fontSize)', 'depth': 'length', 'location': 'location', 'rotation': '(0, pi / 2, 0)'}), '(radius=0.2 * fontSize, depth=length,\n location=location, rotation=(0, pi / 2, 0))\n', (16439, 16524), False, 'import bpy\n'), ((16521, 16550), 'bpy.ops.object.shade_smooth', 'bpy.ops.object.shade_smooth', ([], {}), '()\n', (16548, 16550), False, 'import bpy\n'), ((19389, 19412), 'bpy.data.objects.keys', 'bpy.data.objects.keys', ([], {}), '()\n', (19410, 19412), False, 'import bpy\n'), ((21416, 21439), 'bpy.data.objects.keys', 'bpy.data.objects.keys', ([], {}), '()\n', (21437, 21439), False, 'import bpy\n'), ((22234, 22257), 'bpy.data.objects.keys', 'bpy.data.objects.keys', ([], {}), '()\n', (22255, 22257), False, 'import bpy\n'), ((23668, 23707), 'bpy.ops.render.render', 'bpy.ops.render.render', ([], {'write_still': '(True)'}), '(write_still=True)\n', (23689, 23707), False, 'import bpy\n'), ((24703, 24722), 'numpy.array', 'array', (['[65, 0, -25]'], {}), '([65, 0, -25])\n', (24708, 24722), False, 'from numpy import array, pi, reshape, round\n'), ((25096, 25112), 'numpy.array', 'array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (25101, 25112), False, 'from numpy import array, pi, reshape, round\n'), ((28102, 28144), 'bpy.ops.object.select_all', 'bpy.ops.object.select_all', ([], {'action': '"""SELECT"""'}), "(action='SELECT')\n", (28127, 28144), False, 'import bpy\n'), ((28149, 28188), 'bpy.ops.object.delete', 'bpy.ops.object.delete', ([], {'use_global': '(False)'}), '(use_global=False)\n', (28170, 28188), False, 'import bpy\n'), ((29534, 29590), 'bpy.ops.object.lamp_add', 'bpy.ops.object.lamp_add', ([], {'type': '"""SUN"""', 'location': '(0, 0, 40)'}), "(type='SUN', location=(0, 0, 40))\n", (29557, 29590), False, 'import bpy\n'), ((34740, 34784), 'bpy.ops.object.select_all', 'bpy.ops.object.select_all', ([], {'action': '"""DESELECT"""'}), "(action='DESELECT')\n", (34765, 34784), False, 'import bpy\n'), ((895, 982), 'bpy.ops.mesh.primitive_cylinder_add', 'bpy.ops.mesh.primitive_cylinder_add', ([], {'depth': 'L', 'location': '(0, 0, L / 2)', 'radius': 'rMean'}), '(depth=L, location=(0, 0, L / 2), radius\n =rMean)\n', (930, 982), False, 'import bpy\n'), ((1094, 1162), 'bpy.ops.mesh.primitive_uv_sphere_add', 'bpy.ops.mesh.primitive_uv_sphere_add', ([], {'location': '(0, 0, L)', 'size': 'rMean'}), '(location=(0, 0, L), size=rMean)\n', (1130, 1162), False, 'import bpy\n'), ((1206, 1274), 'bpy.ops.mesh.primitive_uv_sphere_add', 'bpy.ops.mesh.primitive_uv_sphere_add', ([], {'location': '(0, 0, 0)', 'size': 'rMean'}), '(location=(0, 0, 0), size=rMean)\n', (1242, 1274), False, 'import bpy\n'), ((1421, 1465), 'bpy.ops.object.select_all', 'bpy.ops.object.select_all', ([], {'action': '"""DESELECT"""'}), "(action='DESELECT')\n", (1446, 1465), False, 'import bpy\n'), ((1632, 1653), 'bpy.ops.object.join', 'bpy.ops.object.join', ([], {}), '()\n', (1651, 1653), False, 'import bpy\n'), ((1702, 1748), 'bpy.ops.object.modifier_add', 'bpy.ops.object.modifier_add', ([], {'type': '"""EDGE_SPLIT"""'}), "(type='EDGE_SPLIT')\n", (1729, 1748), False, 'import bpy\n'), ((1807, 1836), 'bpy.ops.object.shade_smooth', 'bpy.ops.object.shade_smooth', ([], {}), '()\n', (1834, 1836), False, 'import bpy\n'), ((2730, 2765), 'bpy.context.scene.objects.link', 'bpy.context.scene.objects.link', (['rod'], {}), '(rod)\n', (2760, 2765), False, 'import bpy\n'), ((3793, 3851), 'bpy.ops.mesh.primitive_uv_sphere_add', 'bpy.ops.mesh.primitive_uv_sphere_add', ([], {'location': 'pos', 'size': 'r'}), '(location=pos, size=r)\n', (3829, 3851), False, 'import bpy\n'), ((3896, 3925), 'bpy.ops.object.shade_smooth', 'bpy.ops.object.shade_smooth', ([], {}), '()\n', (3923, 3925), False, 'import bpy\n'), ((4288, 4326), 'bpy.context.scene.objects.link', 'bpy.context.scene.objects.link', (['sphere'], {}), '(sphere)\n', (4318, 4326), False, 'import bpy\n'), ((4584, 4670), 'bpy.ops.mesh.primitive_cylinder_add', 'bpy.ops.mesh.primitive_cylinder_add', ([], {'depth': 'L', 'location': '(0, 0, L / 2)', 'radius': '(0.15)'}), '(depth=L, location=(0, 0, L / 2), radius\n =0.15)\n', (4619, 4670), False, 'import bpy\n'), ((4705, 4734), 'bpy.ops.object.shade_smooth', 'bpy.ops.object.shade_smooth', ([], {}), '()\n', (4732, 4734), False, 'import bpy\n'), ((5167, 5202), 'bpy.context.scene.objects.link', 'bpy.context.scene.objects.link', (['cyl'], {}), '(cyl)\n', (5197, 5202), False, 'import bpy\n'), ((13579, 13607), 'bpy.data.objects.remove', 'bpy.data.objects.remove', (['obj'], {}), '(obj)\n', (13602, 13607), False, 'import bpy\n'), ((14814, 14919), 'bpy.ops.mesh.primitive_cylinder_add', 'bpy.ops.mesh.primitive_cylinder_add', ([], {'radius': 'axLegCylR', 'depth': 'axLegCylH', 'location': 'locCyl', 'rotation': 'rot'}), '(radius=axLegCylR, depth=axLegCylH,\n location=locCyl, rotation=rot)\n', (14849, 14919), False, 'import bpy\n'), ((14924, 14953), 'bpy.ops.object.shade_smooth', 'bpy.ops.object.shade_smooth', ([], {}), '()\n', (14951, 14953), False, 'import bpy\n'), ((15061, 15169), 'bpy.ops.mesh.primitive_cone_add', 'bpy.ops.mesh.primitive_cone_add', ([], {'radius1': '(axLegCylR * 2)', 'depth': 'axLegConeH', 'location': 'locCone', 'rotation': 'rot'}), '(radius1=axLegCylR * 2, depth=axLegConeH,\n location=locCone, rotation=rot)\n', (15092, 15169), False, 'import bpy\n'), ((15172, 15201), 'bpy.ops.object.shade_smooth', 'bpy.ops.object.shade_smooth', ([], {}), '()\n', (15199, 15201), False, 'import bpy\n'), ((18140, 18229), 'bpy.ops.mesh.primitive_plane_add', 'bpy.ops.mesh.primitive_plane_add', ([], {'radius': 'planeRadius', 'location': '(Lx / 2, Ly / 2, -0.1)'}), '(radius=planeRadius, location=(Lx / 2, Ly /\n 2, -0.1))\n', (18172, 18229), False, 'import bpy\n'), ((29267, 29304), 'numpy.deg2rad', 'np.deg2rad', (["settingsDict['camRot'][0]"], {}), "(settingsDict['camRot'][0])\n", (29277, 29304), True, 'import numpy as np\n'), ((29306, 29343), 'numpy.deg2rad', 'np.deg2rad', (["settingsDict['camRot'][1]"], {}), "(settingsDict['camRot'][1])\n", (29316, 29343), True, 'import numpy as np\n'), ((29345, 29382), 'numpy.deg2rad', 'np.deg2rad', (["settingsDict['camRot'][2]"], {}), "(settingsDict['camRot'][2])\n", (29355, 29382), True, 'import numpy as np\n'), ((35619, 35644), 'os.path.isdir', 'os.path.isdir', (['renderPath'], {}), '(renderPath)\n', (35632, 35644), False, 'import sys, os, time, re\n'), ((35658, 35678), 'os.mkdir', 'os.mkdir', (['renderPath'], {}), '(renderPath)\n', (35666, 35678), False, 'import sys, os, time, re\n'), ((35734, 35835), 'bpy.ops.wm.save_as_mainfile', 'bpy.ops.wm.save_as_mainfile', ([], {'filepath': "(renderPath + '/' + matName + '.blend')", 'check_existing': '(False)'}), "(filepath=renderPath + '/' + matName + '.blend',\n check_existing=False)\n", (35761, 35835), False, 'import bpy\n'), ((14414, 14457), 'numpy.add', 'np.add', (['(axLegCylH / 2, 0.0, 0.0)', 'location'], {}), '((axLegCylH / 2, 0.0, 0.0), location)\n', (14420, 14457), True, 'import numpy as np\n'), ((14471, 14512), 'numpy.add', 'np.add', (['(0.0, axLegCylH / 2, 0)', 'location'], {}), '((0.0, axLegCylH / 2, 0), location)\n', (14477, 14512), True, 'import numpy as np\n'), ((14525, 14568), 'numpy.add', 'np.add', (['(0.0, 0.0, axLegCylH / 2)', 'location'], {}), '((0.0, 0.0, axLegCylH / 2), location)\n', (14531, 14568), True, 'import numpy as np\n'), ((14583, 14639), 'numpy.add', 'np.add', (['(axLegCylH + axLegConeH / 2, 0.0, 0.0)', 'location'], {}), '((axLegCylH + axLegConeH / 2, 0.0, 0.0), location)\n', (14589, 14639), True, 'import numpy as np\n'), ((14640, 14694), 'numpy.add', 'np.add', (['(0.0, axLegCylH + axLegConeH / 2, 0)', 'location'], {}), '((0.0, axLegCylH + axLegConeH / 2, 0), location)\n', (14646, 14694), True, 'import numpy as np\n'), ((14694, 14750), 'numpy.add', 'np.add', (['(0.0, 0.0, axLegCylH + axLegConeH / 2)', 'location'], {}), '((0.0, 0.0, axLegCylH + axLegConeH / 2), location)\n', (14700, 14750), True, 'import numpy as np\n'), ((15357, 15414), 'numpy.add', 'np.add', (['(textSpacing, -fontSize * 5.0 * 0.5, 0)', 'location'], {}), '((textSpacing, -fontSize * 5.0 * 0.5, 0), location)\n', (15363, 15414), True, 'import numpy as np\n'), ((15533, 15590), 'numpy.add', 'np.add', (['(-fontSize * 5.0 * 0.5, textSpacing, 0)', 'location'], {}), '((-fontSize * 5.0 * 0.5, textSpacing, 0), location)\n', (15539, 15590), True, 'import numpy as np\n'), ((15709, 15776), 'numpy.add', 'np.add', (['(-fontSize * 5.0 * 0.5, -fontSize * 5.0 * 0.5, 0)', 'location'], {}), '((-fontSize * 5.0 * 0.5, -fontSize * 5.0 * 0.5, 0), location)\n', (15715, 15776), True, 'import numpy as np\n'), ((16249, 16277), 'bpy.data.objects.remove', 'bpy.data.objects.remove', (['obj'], {}), '(obj)\n', (16272, 16277), False, 'import bpy\n'), ((19540, 19568), 'bpy.data.objects.remove', 'bpy.data.objects.remove', (['obj'], {}), '(obj)\n', (19563, 19568), False, 'import bpy\n'), ((20061, 20120), 'bpy.ops.object.text_add', 'bpy.ops.object.text_add', ([], {'location': '(pos, -fontSize * 4.0, 0)'}), '(location=(pos, -fontSize * 4.0, 0))\n', (20084, 20120), False, 'import bpy\n'), ((20405, 20481), 'bpy.ops.object.text_add', 'bpy.ops.object.text_add', ([], {'location': '(-fontSize * 4.0, pos - fontSize / 2.0, 0)'}), '(location=(-fontSize * 4.0, pos - fontSize / 2.0, 0))\n', (20428, 20481), False, 'import bpy\n'), ((20762, 20839), 'bpy.ops.object.text_add', 'bpy.ops.object.text_add', ([], {'location': '(-fontSize * 4.0, Ly, pos - fontSize / 2.0)'}), '(location=(-fontSize * 4.0, Ly, pos - fontSize / 2.0))\n', (20785, 20839), False, 'import bpy\n'), ((21566, 21594), 'bpy.data.objects.remove', 'bpy.data.objects.remove', (['obj'], {}), '(obj)\n', (21589, 21594), False, 'import bpy\n'), ((29132, 29153), 'numpy.array', 'array', (['[-15, -46, 42]'], {}), '([-15, -46, 42])\n', (29137, 29153), False, 'from numpy import array, pi, reshape, round\n'), ((31835, 31851), 'numpy.empty', 'np.empty', (['[2, 3]'], {}), '([2, 3])\n', (31843, 31851), True, 'import numpy as np\n'), ((31869, 31880), 'numpy.empty', 'np.empty', (['(2)'], {}), '(2)\n', (31877, 31880), True, 'import numpy as np\n'), ((32582, 32598), 'numpy.empty', 'np.empty', (['[2, 3]'], {}), '([2, 3])\n', (32590, 32598), True, 'import numpy as np\n'), ((33106, 33122), 'numpy.empty', 'np.empty', (['[2, 3]'], {}), '([2, 3])\n', (33114, 33122), True, 'import numpy as np\n'), ((35402, 35427), 'os.path.isdir', 'os.path.isdir', (['renderPath'], {}), '(renderPath)\n', (35415, 35427), False, 'import sys, os, time, re\n'), ((35441, 35461), 'os.mkdir', 'os.mkdir', (['renderPath'], {}), '(renderPath)\n', (35449, 35461), False, 'import sys, os, time, re\n'), ((16730, 16748), 'numpy.array', 'np.array', (['location'], {}), '(location)\n', (16738, 16748), True, 'import numpy as np\n'), ((16749, 16783), 'numpy.array', 'np.array', (['(length / 2.0, 0.0, 0.0)'], {}), '((length / 2.0, 0.0, 0.0))\n', (16757, 16783), True, 'import numpy as np\n'), ((17057, 17075), 'numpy.array', 'np.array', (['location'], {}), '(location)\n', (17065, 17075), True, 'import numpy as np\n'), ((17076, 17110), 'numpy.array', 'np.array', (['(length / 2.0, 0.0, 0.0)'], {}), '((length / 2.0, 0.0, 0.0))\n', (17084, 17110), True, 'import numpy as np\n'), ((17379, 17397), 'numpy.array', 'np.array', (['location'], {}), '(location)\n', (17387, 17397), True, 'import numpy as np\n'), ((17398, 17430), 'numpy.array', 'np.array', (['[0, fontSize * 5.0, 0]'], {}), '([0, fontSize * 5.0, 0])\n', (17406, 17430), True, 'import numpy as np\n'), ((22176, 22204), 'bpy.data.objects.remove', 'bpy.data.objects.remove', (['obj'], {}), '(obj)\n', (22199, 22204), False, 'import bpy\n'), ((23914, 23942), 'time.strftime', 'time.strftime', (['"""%H:%M:%S """'], {}), "('%H:%M:%S ')\n", (23927, 23942), False, 'import sys, os, time, re\n'), ((23989, 24023), 'time.strftime', 'time.strftime', (['"""%H:%M:%S-DEBUG: """'], {}), "('%H:%M:%S-DEBUG: ')\n", (24002, 24023), False, 'import sys, os, time, re\n'), ((24246, 24272), 're.search', 're.search', (['"""\\\\(.*\\\\)"""', 'val'], {}), "('\\\\(.*\\\\)', val)\n", (24255, 24272), False, 'import sys, os, time, re\n'), ((25702, 25722), 'sys.argv.index', 'sys.argv.index', (['"""--"""'], {}), "('--')\n", (25716, 25722), False, 'import sys, os, time, re\n'), ((33783, 33832), 'numpy.concatenate', 'np.concatenate', (['[[pos, [pos[0], pos[1], 0.0]]]', '(0)'], {}), '([[pos, [pos[0], pos[1], 0.0]]], 0)\n', (33797, 33832), True, 'import numpy as np\n'), ((6048, 6070), 'numpy.array', 'np.array', (['obj.location'], {}), '(obj.location)\n', (6056, 6070), True, 'import numpy as np\n'), ((21951, 21979), 'bpy.data.objects.remove', 'bpy.data.objects.remove', (['obj'], {}), '(obj)\n', (21974, 21979), False, 'import bpy\n'), ((27781, 27797), 'numpy.array', 'array', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (27786, 27797), False, 'from numpy import array, pi, reshape, round\n'), ((27811, 27830), 'numpy.array', 'array', (['[Lx, Ly, Lz]'], {}), '([Lx, Ly, Lz])\n', (27816, 27830), False, 'from numpy import array, pi, reshape, round\n'), ((31061, 31104), 'numpy.where', 'np.where', (['(model.cellArray[:, 0] == ancestor)'], {}), '(model.cellArray[:, 0] == ancestor)\n', (31069, 31104), True, 'import numpy as np\n'), ((31272, 31309), 'numpy.where', 'np.where', (['(model.cellArray == ancestor)'], {}), '(model.cellArray == ancestor)\n', (31280, 31309), True, 'import numpy as np\n'), ((30079, 30091), 'numpy.linalg.norm', 'norm', (['camPos'], {}), '(camPos)\n', (30083, 30091), False, 'from numpy.linalg import norm\n'), ((30606, 30618), 'numpy.linalg.norm', 'norm', (['camPos'], {}), '(camPos)\n', (30610, 30618), False, 'from numpy.linalg import norm\n')] |
from copy import deepcopy
from datetime import datetime
from celery import current_app as celery
from celery.utils.log import get_task_logger
from celeryservice import celeryconfig
from messenger.utils.requests_util import update_request, query_request, do_request
from messenger.utils.response_util import RET
from celeryservice.lib.job.case_handler import RunCaseHandler
logger = get_task_logger('manage')
@celery.task(bind=True)
def run_case(self, user, body, env_params, suites_cases, pmachine_pool):
return RunCaseHandler(user, logger, self, body).work(
env_params, suites_cases, pmachine_pool,
)
def _callback_task_job_result(job_id, auth, taskmilestone_id, status):
try:
_resp = dict()
_r = do_request(
method="put",
url="https://{}/api/v1/task/milestones/{}".format(
celeryconfig.server_addr,
taskmilestone_id
),
body={
"job_id": job_id,
"result": status,
},
headers={
"content-type": "application/json;charset=utf-8",
"authorization": auth,
},
obj=_resp,
verify=True if celeryconfig.ca_verify == "True"\
else celeryconfig.server_cert_path
)
if _r == 0 and _resp.get("error_code") == RET.OK:
logger.info(
"Task job has been call back => " + _resp.get("error_msg")
)
else:
logger.error(
"Error in calling back to TaskMilestones => " + _resp.get(
"error_msg"
)
)
except (AttributeError, TypeError, RuntimeError, KeyError) as e:
logger.error(
"Error in calling back to TaskMilestones => " + str(e)
)
@celery.task(bind=True)
def job_result_callback(self, results, auth, job_id=None, taskmilestone_id=None):
try:
job = query_request(
"/api/v1/job/{}".format(
job_id
),
None,
auth
)
if not job:
raise RuntimeError("Job has already not existed")
job["running_time"] = 0
for result in results:
if result.get("status") == "BLOCK":
raise RuntimeError(
"one of subtask blocked: {}, because {}".format(
result.get("name"),
result.get("remark"),
)
)
job["success_cases"] += result.get("success_cases")
job["fail_cases"] += result.get("fail_cases")
job["running_time"] = max(
job["running_time"],
result.get("running_time")
)
job["status"] = "DONE"
job["end_time"] = datetime.now()
if job["total"] == job["success_cases"]:
job["result"] = "success"
else:
job["result"] = "fail"
except RuntimeError as e:
job["result"] = "fail"
job["status"] = "BLOCK"
job["remark"] = str(e)
finally:
status = job.get("status")
_body = deepcopy(job)
_body.pop("id")
if isinstance(_body.get("master"), list):
_body["master"] = ','.join(_body.get("master"))
update_request(
"/api/v1/job/{}".format(
job_id,
),
_body,
auth
)
if taskmilestone_id is not None:
_callback_task_job_result(job_id, auth, taskmilestone_id, status)
| [
"celery.current_app.task",
"celery.utils.log.get_task_logger",
"datetime.datetime.now",
"celeryservice.lib.job.case_handler.RunCaseHandler",
"copy.deepcopy"
] | [((385, 410), 'celery.utils.log.get_task_logger', 'get_task_logger', (['"""manage"""'], {}), "('manage')\n", (400, 410), False, 'from celery.utils.log import get_task_logger\n'), ((414, 436), 'celery.current_app.task', 'celery.task', ([], {'bind': '(True)'}), '(bind=True)\n', (425, 436), True, 'from celery import current_app as celery\n'), ((1980, 2002), 'celery.current_app.task', 'celery.task', ([], {'bind': '(True)'}), '(bind=True)\n', (1991, 2002), True, 'from celery import current_app as celery\n'), ((3009, 3023), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3021, 3023), False, 'from datetime import datetime\n'), ((3364, 3377), 'copy.deepcopy', 'deepcopy', (['job'], {}), '(job)\n', (3372, 3377), False, 'from copy import deepcopy\n'), ((521, 561), 'celeryservice.lib.job.case_handler.RunCaseHandler', 'RunCaseHandler', (['user', 'logger', 'self', 'body'], {}), '(user, logger, self, body)\n', (535, 561), False, 'from celeryservice.lib.job.case_handler import RunCaseHandler\n')] |
#!/usr/bin/python3
import os, sys, subprocess
if len(sys.argv) != 3:
print(f"{sys.argv[0]} <url filename path> <output file path>\nEx: {sys.argv[0]} url.txt output.txt\n")
sys.exit(1)
file = open(sys.argv[2], "w")
with open(sys.argv[1]) as f:
for i in f:
cmd = "node src/drivers/npm/cli.js " + i
print(f"Scanning IP: {i}")
try:
output = subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT).decode()
print(output)
file.write(output)
except subprocess.CalledProcessError:
print("Execution of '%s' failed!\n" % cmd)
sys.exit(1)
file.close() | [
"subprocess.check_output",
"sys.exit"
] | [((175, 186), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (183, 186), False, 'import os, sys, subprocess\n'), ((557, 568), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (565, 568), False, 'import os, sys, subprocess\n'), ((353, 419), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {'shell': '(True)', 'stderr': 'subprocess.STDOUT'}), '(cmd, shell=True, stderr=subprocess.STDOUT)\n', (376, 419), False, 'import os, sys, subprocess\n')] |
import numpy as np
import matplotlib.pyplot as plt
a0s = np.load('a0s_r2.npy')
a1s = np.load('a1s_r2.npy')
ls = np.load('ls_r2.npy').transpose()
trace = np.load('run2.npy')[:, 0:2]
levels = np.linspace(8.5, 1600)
c2 = plt.contour(a0s, a1s, ls, levels)
plt.scatter(2.0, 1.3, c='r', marker='x')
plt.scatter(trace[:,0], trace[:,1])
plt.title('Level sets of the loss function for D\'')
plt.xlabel('A[0]')
plt.ylabel('A[1]')
plt.xlim(-3, 3)
plt.ylim(-1, 5)
# plt.clabel(c2, inline=1, fontsize=10)
ns = np.arange(10)
for i, txt in enumerate(ns):
plt.annotate(txt, (trace[i,0], trace[i,1]))
plt.show()
a0s = np.load('a0s_r1.npy')
a1s = np.load('a1s_r1.npy')
ls = np.load('ls_r1.npy').transpose()
trace = np.load('run1.npy')[:, 0:2]
# trace = trace[0:10, :]
levels = np.linspace(8.5, 5000)
c1 = plt.contour(a0s, a1s, ls, levels)
plt.scatter(2.0, 0.013, c='r', marker='x')
plt.scatter(trace[:,0], trace[:,1])
plt.title('Level sets of the loss function for D')
plt.xlabel('A[0]')
plt.ylabel('A[1]')
plt.xlim(1.4985, 1.5010)
plt.ylim(-0.05, 0.06)
# plt.clabel(c1, inline=1, fontsize=10)
ns = np.arange(10)
for i, txt in enumerate(ns):
plt.annotate(txt, (trace[i,0], trace[i,1]))
plt.show()
| [
"matplotlib.pyplot.ylabel",
"numpy.arange",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.contour",
"numpy.linspace",
"matplotlib.pyplot.annotate",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.title",
"matplotlib.pyplot.xlim",
"numpy.load",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.show"
] | [((58, 79), 'numpy.load', 'np.load', (['"""a0s_r2.npy"""'], {}), "('a0s_r2.npy')\n", (65, 79), True, 'import numpy as np\n'), ((86, 107), 'numpy.load', 'np.load', (['"""a1s_r2.npy"""'], {}), "('a1s_r2.npy')\n", (93, 107), True, 'import numpy as np\n'), ((193, 215), 'numpy.linspace', 'np.linspace', (['(8.5)', '(1600)'], {}), '(8.5, 1600)\n', (204, 215), True, 'import numpy as np\n'), ((221, 254), 'matplotlib.pyplot.contour', 'plt.contour', (['a0s', 'a1s', 'ls', 'levels'], {}), '(a0s, a1s, ls, levels)\n', (232, 254), True, 'import matplotlib.pyplot as plt\n'), ((255, 295), 'matplotlib.pyplot.scatter', 'plt.scatter', (['(2.0)', '(1.3)'], {'c': '"""r"""', 'marker': '"""x"""'}), "(2.0, 1.3, c='r', marker='x')\n", (266, 295), True, 'import matplotlib.pyplot as plt\n'), ((296, 333), 'matplotlib.pyplot.scatter', 'plt.scatter', (['trace[:, 0]', 'trace[:, 1]'], {}), '(trace[:, 0], trace[:, 1])\n', (307, 333), True, 'import matplotlib.pyplot as plt\n'), ((332, 383), 'matplotlib.pyplot.title', 'plt.title', (['"""Level sets of the loss function for D\'"""'], {}), '("Level sets of the loss function for D\'")\n', (341, 383), True, 'import matplotlib.pyplot as plt\n'), ((385, 403), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""A[0]"""'], {}), "('A[0]')\n", (395, 403), True, 'import matplotlib.pyplot as plt\n'), ((404, 422), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""A[1]"""'], {}), "('A[1]')\n", (414, 422), True, 'import matplotlib.pyplot as plt\n'), ((423, 438), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-3)', '(3)'], {}), '(-3, 3)\n', (431, 438), True, 'import matplotlib.pyplot as plt\n'), ((439, 454), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-1)', '(5)'], {}), '(-1, 5)\n', (447, 454), True, 'import matplotlib.pyplot as plt\n'), ((500, 513), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (509, 513), True, 'import numpy as np\n'), ((591, 601), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (599, 601), True, 'import matplotlib.pyplot as plt\n'), ((611, 632), 'numpy.load', 'np.load', (['"""a0s_r1.npy"""'], {}), "('a0s_r1.npy')\n", (618, 632), True, 'import numpy as np\n'), ((639, 660), 'numpy.load', 'np.load', (['"""a1s_r1.npy"""'], {}), "('a1s_r1.npy')\n", (646, 660), True, 'import numpy as np\n'), ((771, 793), 'numpy.linspace', 'np.linspace', (['(8.5)', '(5000)'], {}), '(8.5, 5000)\n', (782, 793), True, 'import numpy as np\n'), ((799, 832), 'matplotlib.pyplot.contour', 'plt.contour', (['a0s', 'a1s', 'ls', 'levels'], {}), '(a0s, a1s, ls, levels)\n', (810, 832), True, 'import matplotlib.pyplot as plt\n'), ((833, 875), 'matplotlib.pyplot.scatter', 'plt.scatter', (['(2.0)', '(0.013)'], {'c': '"""r"""', 'marker': '"""x"""'}), "(2.0, 0.013, c='r', marker='x')\n", (844, 875), True, 'import matplotlib.pyplot as plt\n'), ((876, 913), 'matplotlib.pyplot.scatter', 'plt.scatter', (['trace[:, 0]', 'trace[:, 1]'], {}), '(trace[:, 0], trace[:, 1])\n', (887, 913), True, 'import matplotlib.pyplot as plt\n'), ((912, 962), 'matplotlib.pyplot.title', 'plt.title', (['"""Level sets of the loss function for D"""'], {}), "('Level sets of the loss function for D')\n", (921, 962), True, 'import matplotlib.pyplot as plt\n'), ((963, 981), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""A[0]"""'], {}), "('A[0]')\n", (973, 981), True, 'import matplotlib.pyplot as plt\n'), ((982, 1000), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""A[1]"""'], {}), "('A[1]')\n", (992, 1000), True, 'import matplotlib.pyplot as plt\n'), ((1001, 1024), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(1.4985)', '(1.501)'], {}), '(1.4985, 1.501)\n', (1009, 1024), True, 'import matplotlib.pyplot as plt\n'), ((1026, 1047), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.05)', '(0.06)'], {}), '(-0.05, 0.06)\n', (1034, 1047), True, 'import matplotlib.pyplot as plt\n'), ((1094, 1107), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (1103, 1107), True, 'import numpy as np\n'), ((1186, 1196), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1194, 1196), True, 'import matplotlib.pyplot as plt\n'), ((155, 174), 'numpy.load', 'np.load', (['"""run2.npy"""'], {}), "('run2.npy')\n", (162, 174), True, 'import numpy as np\n'), ((547, 592), 'matplotlib.pyplot.annotate', 'plt.annotate', (['txt', '(trace[i, 0], trace[i, 1])'], {}), '(txt, (trace[i, 0], trace[i, 1]))\n', (559, 592), True, 'import matplotlib.pyplot as plt\n'), ((708, 727), 'numpy.load', 'np.load', (['"""run1.npy"""'], {}), "('run1.npy')\n", (715, 727), True, 'import numpy as np\n'), ((1141, 1186), 'matplotlib.pyplot.annotate', 'plt.annotate', (['txt', '(trace[i, 0], trace[i, 1])'], {}), '(txt, (trace[i, 0], trace[i, 1]))\n', (1153, 1186), True, 'import matplotlib.pyplot as plt\n'), ((113, 133), 'numpy.load', 'np.load', (['"""ls_r2.npy"""'], {}), "('ls_r2.npy')\n", (120, 133), True, 'import numpy as np\n'), ((666, 686), 'numpy.load', 'np.load', (['"""ls_r1.npy"""'], {}), "('ls_r1.npy')\n", (673, 686), True, 'import numpy as np\n')] |
# coding=utf-8
__source__ = 'https://leetcode.com/problems/longest-word-in-dictionary/'
# Time: O(m) sum of the length of words[i]
# Space: O(m) the space used by our trie
#
# Description: Leetcode # 720. Longest Word in Dictionary
#
# Given a list of strings words representing an English Dictionary,
# find the longest word in words that can be built one character at a time by other words in words.
# If there is more than one possible answer,
# return the longest word with the smallest lexicographical order.
#
# If there is no answer, return the empty string.
# Example 1:
# Input:
# words = ["w","wo","wor","worl", "world"]
# Output: "world"
# Explanation:
# The word "world" can be built one character at a time by "w", "wo", "wor", and "worl".
# Example 2:
# Input:
# words = ["a", "banana", "app", "appl", "ap", "apply", "apple"]
# Output: "apple"
# Explanation:
# Both "apply" and "apple" can be built from other words in the dictionary.
# However, "apple" is lexicographically smaller than "apply".
# Note:
#
# All the strings in the input will only contain lowercase letters.
# The length of words will be in the range [1, 1000].
# The length of words[i] will be in the range [1, 30].
#
import unittest
import collections
#32 ms 100%
class Solution(object):
def longestWord(self, words):
ans=""
wordset=set(words)
for word in words:
if len(word)>len(ans) or (len(ans)==len(word) and word<ans):
if all(word[:k] in wordset for k in xrange(1,len(word))):
ans=word
return ans
#48ms 65.44%
class Solution2(object):
def longestWord(self, words):
"""
:type words: List[str]
:rtype: str
"""
ans = ""
wordset = set(words)
words.sort(key = lambda c : (-len(c), c))
for word in words:
if all(word[:k] in wordset for k in xrange(1, len(word))):
return word
return ""
#With Trie:
#104ms 41.72%
class SolutionTrie(object):
def longestWord(self, words):
Trie = lambda: collections.defaultdict(Trie)
trie = Trie()
END = True
for i, word in enumerate(words):
reduce(dict.__getitem__, word, trie)[END] = i
stack = trie.values()
ans = ""
while stack:
cur = stack.pop()
if END in cur:
word = words[cur[END]]
if len(word) > len(ans) or len(word) == len(ans) and word < ans:
ans = word
stack.extend([cur[letter] for letter in cur if letter != END])
return ans
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/longest-word-in-dictionary/solution/
#
# Approach #2: Trie + Depth-First Search [Accepted]
#
# Time complexity : O(∑w i2), where w_i is the length of words[i].
# This is the complexity to build the trie and to search it.
# If we used a BFS instead of a DFS, and ordered the children in an array,
# we could drop the need to check whether the candidate word at each node is better than the answer,
# by forcing that the last node visited will be the best answer.
#
# Space Complexity: O(∑w i0), the space used by our trie.
# 8ms 100%
class Solution {
public String longestWord(String[] words) {
Trie trie = new Trie();
for (String word : words) {
trie.insert(word);
}
return dfs(trie.root, "");
}
class TrieNode{
TrieNode [] base = new TrieNode[26];
String word;
}
class Trie{
TrieNode root;
Trie() {
root = new TrieNode();
root.word = "";
}
void insert(String word) {
TrieNode node = root;
for (char c : word.toCharArray()) {
if (node.base[c - 'a'] == null) node.base[c- 'a'] = new TrieNode();
node = node.base[c- 'a'];
}
node.word = word;
}
}
public String dfs(TrieNode node, String res) {
if (node.word == null) return res;
if (node.word.length() > res.length()) res = node.word;
for (TrieNode child : node.base) {
if (child != null) res = dfs(child, res);
}
return res;
}
}
Complexity Analysis
Time complexity : O(∑w i2), where w_i is the length of words[i].
Checking whether all prefixes of words[i] are in the set is O(∑wi2).
Space complexity : O(∑wi2) to create the substrings.
# BruceForce
# 8ms 100%
class Solution {
public String longestWord(String[] words) {
String res = "";
Set<String> set = new HashSet();
for (String word: words) {
set.add(word);
}
for(String word: words) {
if (isBetter(word, res) && contains(set, word)) res = word;
}
return res;
}
private boolean isBetter(String a, String b) {
if (a.length() > b.length()) return true;
else if (a.length() < b.length()) return false;
for (int i = 0; i < a.length(); i++) {
if (a.charAt(i) > b.charAt(i)) return false;
else if (a.charAt(i) < b.charAt(i)) return true;
}
return true;
}
private boolean contains(Set<String> set, String target) {
for (int i = 1; i < target.length(); i++) {
if (!set.contains(target.substring(0, i))) return false;
}
return true;
}
}
'''
| [
"unittest.main",
"collections.defaultdict"
] | [((2748, 2763), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2761, 2763), False, 'import unittest\n'), ((2072, 2101), 'collections.defaultdict', 'collections.defaultdict', (['Trie'], {}), '(Trie)\n', (2095, 2101), False, 'import collections\n')] |
from h3 import h3
from math import radians, cos, sin, asin, sqrt , floor, pow
import numpy as np
import math
import csv
from folium import folium
from folium import features
from folium import map
print('ifd')
n = 0
while n < 21 :
print('n=',n,'ifd=', 0.152 * pow(2,n))
n += 1
print('edge length')
res = 0
while res < 16 :
print('res=', res, 'edge length=', h3.edge_length(res, unit='km'), 'km')
res += 1
| [
"math.pow",
"h3.h3.edge_length"
] | [((364, 394), 'h3.h3.edge_length', 'h3.edge_length', (['res'], {'unit': '"""km"""'}), "(res, unit='km')\n", (378, 394), False, 'from h3 import h3\n'), ((262, 271), 'math.pow', 'pow', (['(2)', 'n'], {}), '(2, n)\n', (265, 271), False, 'from math import radians, cos, sin, asin, sqrt, floor, pow\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This is a project powered by Codecademy students.
The project features a modified single-player version of the classic game: battleships.
Game based on tutorials by <NAME> in his book 'Making Games with Python
& Pygame"
http://inventwithpython.com/pygame/chapters/
The game requires python 2 and the pygame modules.
The game is a battleship puzzle game. The objective is to sink all the ships in as few shots as possible.
The markers on the edges of the game board tell you how many ship pieces are in each column and row.
"""
# Importing pygame modules
import random, sys, pygame
from pygame.locals import *
# Set variables, like screen width and height
# globals
FPS = 30 #Determines the number of frames per second
REVEALSPEED = 8 #Determines the speed at which the squares reveals after being clicked
WINDOWWIDTH = 800 #Width of game window
WINDOWHEIGHT = 600 #Height of game window
TILESIZE = 40 #Size of the squares in each grid(tile)
MARKERSIZE = 40 #Size of the box which contatins the number that indicates how many ships in this row/col
BUTTONHEIGHT = 20 #Height of a standard button
BUTTONWIDTH = 40 #Width of a standard button
TEXT_HEIGHT = 25 #Size of the text
TEXT_LEFT_POSN = 10 #Where the text will be positioned
BOARDWIDTH = 10 #Number of grids horizontally
BOARDHEIGHT = 10 #Number of grids vertically
DISPLAYWIDTH = 200 #Width of the game board
EXPLOSIONSPEED = 10 #How fast the explosion graphics will play
XMARGIN = int((WINDOWWIDTH - (BOARDWIDTH * TILESIZE) - DISPLAYWIDTH - MARKERSIZE) / 2) #x-position of the top left corner of board
YMARGIN = int((WINDOWHEIGHT - (BOARDHEIGHT * TILESIZE) - MARKERSIZE) / 2) #y-position of the top left corner of board
#Colours which will be used by the game
BLACK = ( 0, 0, 0)
WHITE = (255, 255, 255)
GREEN = ( 0, 204, 0)
GRAY = ( 60, 60, 60)
BLUE = ( 0, 50, 255)
YELLOW = (255, 255, 0)
DARKGRAY =( 40, 40, 40)
#Determine what to colour each element of the game
BGCOLOR = GRAY
BUTTONCOLOR = GREEN
TEXTCOLOR = WHITE
TILECOLOR = GREEN
BORDERCOLOR = BLUE
TEXTSHADOWCOLOR = BLUE
SHIPCOLOR = YELLOW
HIGHLIGHTCOLOR = BLUE
def main():
"""
The main function intializes the variables which will be used by the game.
"""
global DISPLAYSURF, FPSCLOCK, BASICFONT, HELP_SURF, HELP_RECT, NEW_SURF, \
NEW_RECT, SHOTS_SURF, SHOTS_RECT, BIGFONT, COUNTER_SURF, \
COUNTER_RECT, HBUTTON_SURF, EXPLOSION_IMAGES
pygame.init()
FPSCLOCK = pygame.time.Clock()
#Fonts used by the game
DISPLAYSURF = pygame.display.set_mode((WINDOWWIDTH, WINDOWHEIGHT))
BASICFONT = pygame.font.Font('freesansbold.ttf', 20)
BIGFONT = pygame.font.Font('freesansbold.ttf', 50)
# Create and label the buttons
HELP_SURF = BASICFONT.render("HELP", True, WHITE)
HELP_RECT = HELP_SURF.get_rect()
HELP_RECT.topleft = (WINDOWWIDTH - 180, WINDOWHEIGHT - 350)
NEW_SURF = BASICFONT.render("NEW GAME", True, WHITE)
NEW_RECT = NEW_SURF.get_rect()
NEW_RECT.topleft = (WINDOWWIDTH - 200, WINDOWHEIGHT - 200)
# The 'Shots:' label at the top
SHOTS_SURF = BASICFONT.render("Shots: ", True, WHITE)
SHOTS_RECT = SHOTS_SURF.get_rect()
SHOTS_RECT.topleft = (WINDOWWIDTH - 750, WINDOWHEIGHT - 570)
# Load the explosion graphics from the /img folder
EXPLOSION_IMAGES = [
pygame.image.load("img/blowup1.png"), pygame.image.load("img/blowup2.png"),
pygame.image.load("img/blowup3.png"),pygame.image.load("img/blowup4.png"),
pygame.image.load("img/blowup5.png"),pygame.image.load("img/blowup6.png")]
# Set the title in the menu bar to 'Battleship'
pygame.display.set_caption('Battleship')
# Keep the game running at all times
while True:
shots_taken = run_game() #Run the game until it stops and save the result in shots_taken
show_gameover_screen(shots_taken) #Display a gameover screen by passing in shots_taken
def run_game():
"""
Function is executed while a game is running.
returns the amount of shots taken
"""
revealed_tiles = generate_default_tiles(False) #Contains the list of the tiles revealed by user
# main board object,
main_board = generate_default_tiles(None) #Contains the list of the ships which exists on board
ship_objs = ['battleship','cruiser1','cruiser2','destroyer1','destroyer2',
'destroyer3','submarine1','submarine2','submarine3','submarine4'] # List of the ships available
main_board = add_ships_to_board(main_board, ship_objs) #call add_ships_to_board to add the list of ships to the main_board
mousex, mousey = 0, 0 #location of mouse
counter = [] #counter to track number of shots fired
xmarkers, ymarkers = set_markers(main_board) #The numerical markers on each side of the board
while True:
# counter display (it needs to be here in order to refresh it)
COUNTER_SURF = BASICFONT.render(str(len(counter)), True, WHITE)
COUNTER_RECT = SHOTS_SURF.get_rect()
COUNTER_RECT.topleft = (WINDOWWIDTH - 680, WINDOWHEIGHT - 570)
# Fill background
DISPLAYSURF.fill(BGCOLOR)
# draw the buttons
DISPLAYSURF.blit(HELP_SURF, HELP_RECT)
DISPLAYSURF.blit(NEW_SURF, NEW_RECT)
DISPLAYSURF.blit(SHOTS_SURF, SHOTS_RECT)
DISPLAYSURF.blit(COUNTER_SURF, COUNTER_RECT)
# Draw the tiles onto the board and their respective markers
draw_board(main_board, revealed_tiles)
draw_markers(xmarkers, ymarkers)
mouse_clicked = False
check_for_quit()
#Check for pygame events
for event in pygame.event.get():
if event.type == MOUSEBUTTONUP:
if HELP_RECT.collidepoint(event.pos): #if the help button is clicked on
DISPLAYSURF.fill(BGCOLOR)
show_help_screen() #Show the help screen
elif NEW_RECT.collidepoint(event.pos): #if the new game button is clicked on
main() #goto main, which resets the game
else: #otherwise
mousex, mousey = event.pos #set mouse positions to the new position
mouse_clicked = True #mouse is clicked but not on a button
elif event.type == MOUSEMOTION: #Detected mouse motion
mousex, mousey = event.pos #set mouse positions to the new position
#Check if the mouse is clicked at a position with a ship piece
tilex, tiley = get_tile_at_pixel(mousex, mousey)
if tilex != None and tiley != None:
if not revealed_tiles[tilex][tiley]: #if the tile the mouse is on is not revealed
draw_highlight_tile(tilex, tiley) # draws the hovering highlight over the tile
if not revealed_tiles[tilex][tiley] and mouse_clicked: #if the mouse is clicked on the not revealed tile
reveal_tile_animation(main_board, [(tilex, tiley)])
revealed_tiles[tilex][tiley] = True #set the tile to now be revealed
if check_revealed_tile(main_board, [(tilex, tiley)]): # if the clicked position contains a ship piece
left, top = left_top_coords_tile(tilex, tiley)
blowup_animation((left, top))
if check_for_win(main_board, revealed_tiles): # check for a win
counter.append((tilex, tiley))
return len(counter) # return the amount of shots taken
counter.append((tilex, tiley))
pygame.display.update()
FPSCLOCK.tick(FPS)
def generate_default_tiles(default_value):
"""
Function generates a list of 10 x 10 tiles. The list will contain tuples
('shipName', boolShot) set to their (default_value).
default_value -> boolean which tells what the value to set to
returns the list of tuples
"""
default_tiles = [[default_value]*BOARDHEIGHT for i in xrange(BOARDWIDTH)]
return default_tiles
def blowup_animation(coord):
"""
Function creates the explosition played if a ship is shot.
coord -> tuple of tile coords to apply the blowup animation
"""
for image in EXPLOSION_IMAGES: # go through the list of images in the list of pictures and play them in sequence
#Determine the location and size to display the image
image = pygame.transform.scale(image, (TILESIZE+10, TILESIZE+10))
DISPLAYSURF.blit(image, coord)
pygame.display.flip()
FPSCLOCK.tick(EXPLOSIONSPEED) #Determine the delay to play the image with
def check_revealed_tile(board, tile):
"""
Function checks if a tile location contains a ship piece.
board -> the tiled board either a ship piece or none
tile -> location of tile
returns True if ship piece exists at tile location
"""
return board[tile[0][0]][tile[0][1]] != None
def reveal_tile_animation(board, tile_to_reveal):
"""
Function creates an animation which plays when the mouse is clicked on a tile, and whatever is
behind the tile needs to be revealed.
board -> list of board tile tuples ('shipName', boolShot)
tile_to_reveal -> tuple of tile coords to apply the reveal animation to
"""
for coverage in xrange(TILESIZE, (-REVEALSPEED) - 1, -REVEALSPEED): #Plays animation based on reveal speed
draw_tile_covers(board, tile_to_reveal, coverage)
def draw_tile_covers(board, tile, coverage):
"""
Function draws the tiles according to a set of variables.
board -> list; of board tiles
tile -> tuple; of tile coords to reveal
coverage -> int; amount of the tile that is covered
"""
left, top = left_top_coords_tile(tile[0][0], tile[0][1])
if check_revealed_tile(board, tile):
pygame.draw.rect(DISPLAYSURF, SHIPCOLOR, (left, top, TILESIZE,
TILESIZE))
else:
pygame.draw.rect(DISPLAYSURF, BGCOLOR, (left, top, TILESIZE,
TILESIZE))
if coverage > 0:
pygame.draw.rect(DISPLAYSURF, TILECOLOR, (left, top, coverage,
TILESIZE))
pygame.display.update()
FPSCLOCK.tick(FPS)
def check_for_quit():
"""
Function checks if the user has attempted to quit the game.
"""
for event in pygame.event.get(QUIT):
pygame.quit()
sys.exit()
def check_for_win(board, revealed):
"""
Function checks if the current board state is a winning state.
board -> the board which contains the ship pieces
revealed -> list of revealed tiles
returns True if all the ships are revealed
"""
for tilex in xrange(BOARDWIDTH):
for tiley in xrange(BOARDHEIGHT):
if board[tilex][tiley] != None and not revealed[tilex][tiley]: # check if every board with a ship is revealed, return false if not
return False
return True
def draw_board(board, revealed):
"""
Function draws the game board.
board -> list of board tiles
revealed -> list of revealed tiles
"""
#draws the grids depending on its state
for tilex in xrange(BOARDWIDTH):
for tiley in xrange(BOARDHEIGHT):
left, top = left_top_coords_tile(tilex, tiley)
if not revealed[tilex][tiley]:
pygame.draw.rect(DISPLAYSURF, TILECOLOR, (left, top, TILESIZE,
TILESIZE))
else:
if board[tilex][tiley] != None:
pygame.draw.rect(DISPLAYSURF, SHIPCOLOR, (left, top,
TILESIZE, TILESIZE))
else:
pygame.draw.rect(DISPLAYSURF, BGCOLOR, (left, top,
TILESIZE, TILESIZE))
#draws the horizontal lines
for x in xrange(0, (BOARDWIDTH + 1) * TILESIZE, TILESIZE):
pygame.draw.line(DISPLAYSURF, DARKGRAY, (x + XMARGIN + MARKERSIZE,
YMARGIN + MARKERSIZE), (x + XMARGIN + MARKERSIZE,
WINDOWHEIGHT - YMARGIN))
#draws the vertical lines
for y in xrange(0, (BOARDHEIGHT + 1) * TILESIZE, TILESIZE):
pygame.draw.line(DISPLAYSURF, DARKGRAY, (XMARGIN + MARKERSIZE, y +
YMARGIN + MARKERSIZE), (WINDOWWIDTH - (DISPLAYWIDTH + MARKERSIZE *
2), y + YMARGIN + MARKERSIZE))
def set_markers(board):
"""
Function creates the lists of the markers to the side of the game board which indicates
the number of ship pieces in each row and column.
board: list of board tiles
returns the 2 lists of markers with number of ship pieces in each row (xmarkers)
and column (ymarkers)
"""
xmarkers = [0 for i in xrange(BOARDWIDTH)]
ymarkers = [0 for i in xrange(BOARDHEIGHT)]
#Loop through the tiles
for tilex in xrange(BOARDWIDTH):
for tiley in xrange(BOARDHEIGHT):
if board[tilex][tiley] != None: #if the tile is a ship piece, then increment the markers
xmarkers[tilex] += 1
ymarkers[tiley] += 1
return xmarkers, ymarkers
def draw_markers(xlist, ylist):
"""
Function draws the two list of markers to the side of the board.
xlist -> list of row markers
ylist -> list of column markers
"""
for i in xrange(len(xlist)): #Draw the x-marker list
left = i * MARKERSIZE + XMARGIN + MARKERSIZE + (TILESIZE / 3)
top = YMARGIN
marker_surf, marker_rect = make_text_objs(str(xlist[i]),
BASICFONT, TEXTCOLOR)
marker_rect.topleft = (left, top)
DISPLAYSURF.blit(marker_surf, marker_rect)
for i in range(len(ylist)): #Draw the y-marker list
left = XMARGIN
top = i * MARKERSIZE + YMARGIN + MARKERSIZE + (TILESIZE / 3)
marker_surf, marker_rect = make_text_objs(str(ylist[i]),
BASICFONT, TEXTCOLOR)
marker_rect.topleft = (left, top)
DISPLAYSURF.blit(marker_surf, marker_rect)
def add_ships_to_board(board, ships):
"""
Function goes through a list of ships and add them randomly into a board.
board -> list of board tiles
ships -> list of ships to place on board
returns list of board tiles with ships placed on certain tiles
"""
new_board = board[:]
ship_length = 0
for ship in ships: #go through each ship declared in the list
#Randomly find a valid position that fits the ship
valid_ship_position = False
while not valid_ship_position:
xStartpos = random.randint(0, 9)
yStartpos = random.randint(0, 9)
isHorizontal = random.randint(0, 1) #vertical or horizontal positioning
#Type of ship and their respective length
if 'battleship' in ship:
ship_length = 4
elif 'cruiser' in ship:
ship_length = 3
elif 'destroyer'in ship:
ship_length = 2
elif 'submarine' in ship:
ship_length = 1
#check if position is valid
valid_ship_position, ship_coords = make_ship_position(new_board,
xStartpos, yStartpos, isHorizontal, ship_length, ship)
#add the ship if it is valid
if valid_ship_position:
for coord in ship_coords:
new_board[coord[0]][coord[1]] = ship
return new_board
def make_ship_position(board, xPos, yPos, isHorizontal, length, ship):
"""
Function makes a ship on a board given a set of variables
board -> list of board tiles
xPos -> x-coordinate of first ship piece
yPos -> y-coordinate of first ship piece
isHorizontal -> True if ship is horizontal
length -> length of ship
returns tuple: True if ship position is valid and list ship coordinates
"""
ship_coordinates = [] #the coordinates the ship will occupy
if isHorizontal:
for i in xrange(length):
if (i+xPos > 9) or (board[i+xPos][yPos] != None) or \
hasAdjacent(board, i+xPos, yPos, ship): #if the ship goes out of bound, hits another ship, or is adjacent to another ship
return (False, ship_coordinates) #then return false
else:
ship_coordinates.append((i+xPos, yPos))
else:
for i in xrange(length):
if (i+yPos > 9) or (board[xPos][i+yPos] != None) or \
hasAdjacent(board, xPos, i+yPos, ship): #if the ship goes out of bound, hits another ship, or is adjacent to another ship
return (False, ship_coordinates) #then return false
else:
ship_coordinates.append((xPos, i+yPos))
return (True, ship_coordinates) #ship is successfully added
def hasAdjacent(board, xPos, yPos, ship):
"""
Funtion checks if a ship has adjacent ships
board -> list of board tiles
xPos -> x-coordinate of first ship piece
yPos -> y-coordinate of first ship piece
ship -> the ship being checked for adjacency
returns true if there are adjacent ships and false if there are no adjacent ships
"""
for x in xrange(xPos-1,xPos+2):
for y in xrange(yPos-1,yPos+2):
if (x in range (10)) and (y in range (10)) and \
(board[x][y] not in (ship, None)):
return True
return False
def left_top_coords_tile(tilex, tiley):
"""
Function calculates and returns the pixel of the tile in the top left corner
tilex -> int; x position of tile
tiley -> int; y position of tile
returns tuple (int, int) which indicates top-left pixel coordinates of tile
"""
left = tilex * TILESIZE + XMARGIN + MARKERSIZE
top = tiley * TILESIZE + YMARGIN + MARKERSIZE
return (left, top)
def get_tile_at_pixel(x, y):
"""
Function finds the corresponding tile coordinates of pixel at top left, defaults to (None, None) given a coordinate.
x -> int; x position of pixel
y -> int; y position of pixel
returns tuple (tilex, tiley)
"""
for tilex in xrange(BOARDWIDTH):
for tiley in xrange(BOARDHEIGHT):
left, top = left_top_coords_tile(tilex, tiley)
tile_rect = pygame.Rect(left, top, TILESIZE, TILESIZE)
if tile_rect.collidepoint(x, y):
return (tilex, tiley)
return (None, None)
def draw_highlight_tile(tilex, tiley):
"""
Function draws the hovering highlight over the tile.
tilex -> int; x position of tile
tiley -> int; y position of tile
"""
left, top = left_top_coords_tile(tilex, tiley)
pygame.draw.rect(DISPLAYSURF, HIGHLIGHTCOLOR,
(left, top, TILESIZE, TILESIZE), 4)
def show_help_screen():
"""
Function display a help screen until any button is pressed.
"""
line1_surf, line1_rect = make_text_objs('Press a key to return to the game',
BASICFONT, TEXTCOLOR)
line1_rect.topleft = (TEXT_LEFT_POSN, TEXT_HEIGHT)
DISPLAYSURF.blit(line1_surf, line1_rect)
line2_surf, line2_rect = make_text_objs(
'This is a battleship puzzle game. Your objective is ' \
'to sink all the ships in as few', BASICFONT, TEXTCOLOR)
line2_rect.topleft = (TEXT_LEFT_POSN, TEXT_HEIGHT * 3)
DISPLAYSURF.blit(line2_surf, line2_rect)
line3_surf, line3_rect = make_text_objs('shots as possible. The markers on'\
' the edges of the game board tell you how', BASICFONT, TEXTCOLOR)
line3_rect.topleft = (TEXT_LEFT_POSN, TEXT_HEIGHT * 4)
DISPLAYSURF.blit(line3_surf, line3_rect)
line4_surf, line4_rect = make_text_objs('many ship pieces are in each'\
' column and row. To reset your game click on', BASICFONT, TEXTCOLOR)
line4_rect.topleft = (TEXT_LEFT_POSN, TEXT_HEIGHT * 5)
DISPLAYSURF.blit(line4_surf, line4_rect)
line5_surf, line5_rect = make_text_objs('the "New Game" button.',
BASICFONT, TEXTCOLOR)
line5_rect.topleft = (TEXT_LEFT_POSN, TEXT_HEIGHT * 6)
DISPLAYSURF.blit(line5_surf, line5_rect)
while check_for_keypress() == None: #Check if the user has pressed keys, if so go back to the game
pygame.display.update()
FPSCLOCK.tick()
def check_for_keypress():
"""
Function checks for any key presses by pulling out all KEYDOWN and KEYUP events from queue.
returns any KEYUP events, otherwise return None
"""
for event in pygame.event.get([KEYDOWN, KEYUP, MOUSEBUTTONDOWN, MOUSEBUTTONUP, MOUSEMOTION]):
if event.type in (KEYDOWN, MOUSEBUTTONUP, MOUSEBUTTONDOWN, MOUSEMOTION):
continue
return event.key
return None
def make_text_objs(text, font, color):
"""
Function creates a text.
text -> string; content of text
font -> Font object; face of font
color -> tuple of color (red, green blue); colour of text
returns the surface object, rectangle object
"""
surf = font.render(text, True, color)
return surf, surf.get_rect()
def show_gameover_screen(shots_fired):
"""
Function display a gameover screen when the user has successfully shot at every ship pieces.
shots_fired -> the number of shots taken before game is over
"""
DISPLAYSURF.fill(BGCOLOR)
titleSurf, titleRect = make_text_objs('Congrats! Puzzle solved in:',
BIGFONT, TEXTSHADOWCOLOR)
titleRect.center = (int(WINDOWWIDTH / 2), int(WINDOWHEIGHT / 2))
DISPLAYSURF.blit(titleSurf, titleRect)
titleSurf, titleRect = make_text_objs('Congrats! Puzzle solved in:',
BIGFONT, TEXTCOLOR)
titleRect.center = (int(WINDOWWIDTH / 2) - 3, int(WINDOWHEIGHT / 2) - 3)
DISPLAYSURF.blit(titleSurf, titleRect)
titleSurf, titleRect = make_text_objs(str(shots_fired) + ' shots',
BIGFONT, TEXTSHADOWCOLOR)
titleRect.center = (int(WINDOWWIDTH / 2), int(WINDOWHEIGHT / 2 + 50))
DISPLAYSURF.blit(titleSurf, titleRect)
titleSurf, titleRect = make_text_objs(str(shots_fired) + ' shots',
BIGFONT, TEXTCOLOR)
titleRect.center = (int(WINDOWWIDTH / 2) - 3, int(WINDOWHEIGHT / 2 + 50) - 3)
DISPLAYSURF.blit(titleSurf, titleRect)
pressKeySurf, pressKeyRect = make_text_objs(
'Press a key to try to beat that score.', BASICFONT, TEXTCOLOR)
pressKeyRect.center = (int(WINDOWWIDTH / 2), int(WINDOWHEIGHT / 2) + 100)
DISPLAYSURF.blit(pressKeySurf, pressKeyRect)
while check_for_keypress() == None: #Check if the user has pressed keys, if so start a new game
pygame.display.update()
FPSCLOCK.tick()
if __name__ == "__main__": #This calls the game loop
main()
| [
"sys.exit",
"pygame.init",
"pygame.quit",
"pygame.event.get",
"pygame.draw.line",
"pygame.display.set_mode",
"pygame.display.flip",
"pygame.time.Clock",
"pygame.Rect",
"pygame.draw.rect",
"pygame.display.set_caption",
"pygame.image.load",
"pygame.font.Font",
"pygame.display.update",
"random.randint",
"pygame.transform.scale"
] | [((2479, 2492), 'pygame.init', 'pygame.init', ([], {}), '()\n', (2490, 2492), False, 'import random, sys, pygame\n'), ((2508, 2527), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (2525, 2527), False, 'import random, sys, pygame\n'), ((2574, 2626), 'pygame.display.set_mode', 'pygame.display.set_mode', (['(WINDOWWIDTH, WINDOWHEIGHT)'], {}), '((WINDOWWIDTH, WINDOWHEIGHT))\n', (2597, 2626), False, 'import random, sys, pygame\n'), ((2643, 2683), 'pygame.font.Font', 'pygame.font.Font', (['"""freesansbold.ttf"""', '(20)'], {}), "('freesansbold.ttf', 20)\n", (2659, 2683), False, 'import random, sys, pygame\n'), ((2698, 2738), 'pygame.font.Font', 'pygame.font.Font', (['"""freesansbold.ttf"""', '(50)'], {}), "('freesansbold.ttf', 50)\n", (2714, 2738), False, 'import random, sys, pygame\n'), ((3684, 3724), 'pygame.display.set_caption', 'pygame.display.set_caption', (['"""Battleship"""'], {}), "('Battleship')\n", (3710, 3724), False, 'import random, sys, pygame\n'), ((10355, 10378), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (10376, 10378), False, 'import random, sys, pygame\n'), ((10527, 10549), 'pygame.event.get', 'pygame.event.get', (['QUIT'], {}), '(QUIT)\n', (10543, 10549), False, 'import random, sys, pygame\n'), ((18965, 19050), 'pygame.draw.rect', 'pygame.draw.rect', (['DISPLAYSURF', 'HIGHLIGHTCOLOR', '(left, top, TILESIZE, TILESIZE)', '(4)'], {}), '(DISPLAYSURF, HIGHLIGHTCOLOR, (left, top, TILESIZE,\n TILESIZE), 4)\n', (18981, 19050), False, 'import random, sys, pygame\n'), ((20816, 20895), 'pygame.event.get', 'pygame.event.get', (['[KEYDOWN, KEYUP, MOUSEBUTTONDOWN, MOUSEBUTTONUP, MOUSEMOTION]'], {}), '([KEYDOWN, KEYUP, MOUSEBUTTONDOWN, MOUSEBUTTONUP, MOUSEMOTION])\n', (20832, 20895), False, 'import random, sys, pygame\n'), ((3381, 3417), 'pygame.image.load', 'pygame.image.load', (['"""img/blowup1.png"""'], {}), "('img/blowup1.png')\n", (3398, 3417), False, 'import random, sys, pygame\n'), ((3419, 3455), 'pygame.image.load', 'pygame.image.load', (['"""img/blowup2.png"""'], {}), "('img/blowup2.png')\n", (3436, 3455), False, 'import random, sys, pygame\n'), ((3465, 3501), 'pygame.image.load', 'pygame.image.load', (['"""img/blowup3.png"""'], {}), "('img/blowup3.png')\n", (3482, 3501), False, 'import random, sys, pygame\n'), ((3502, 3538), 'pygame.image.load', 'pygame.image.load', (['"""img/blowup4.png"""'], {}), "('img/blowup4.png')\n", (3519, 3538), False, 'import random, sys, pygame\n'), ((3548, 3584), 'pygame.image.load', 'pygame.image.load', (['"""img/blowup5.png"""'], {}), "('img/blowup5.png')\n", (3565, 3584), False, 'import random, sys, pygame\n'), ((3585, 3621), 'pygame.image.load', 'pygame.image.load', (['"""img/blowup6.png"""'], {}), "('img/blowup6.png')\n", (3602, 3621), False, 'import random, sys, pygame\n'), ((5731, 5749), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (5747, 5749), False, 'import random, sys, pygame\n'), ((7663, 7686), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (7684, 7686), False, 'import random, sys, pygame\n'), ((8498, 8559), 'pygame.transform.scale', 'pygame.transform.scale', (['image', '(TILESIZE + 10, TILESIZE + 10)'], {}), '(image, (TILESIZE + 10, TILESIZE + 10))\n', (8520, 8559), False, 'import random, sys, pygame\n'), ((8603, 8624), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (8622, 8624), False, 'import random, sys, pygame\n'), ((9923, 9996), 'pygame.draw.rect', 'pygame.draw.rect', (['DISPLAYSURF', 'SHIPCOLOR', '(left, top, TILESIZE, TILESIZE)'], {}), '(DISPLAYSURF, SHIPCOLOR, (left, top, TILESIZE, TILESIZE))\n', (9939, 9996), False, 'import random, sys, pygame\n'), ((10065, 10136), 'pygame.draw.rect', 'pygame.draw.rect', (['DISPLAYSURF', 'BGCOLOR', '(left, top, TILESIZE, TILESIZE)'], {}), '(DISPLAYSURF, BGCOLOR, (left, top, TILESIZE, TILESIZE))\n', (10081, 10136), False, 'import random, sys, pygame\n'), ((10214, 10287), 'pygame.draw.rect', 'pygame.draw.rect', (['DISPLAYSURF', 'TILECOLOR', '(left, top, coverage, TILESIZE)'], {}), '(DISPLAYSURF, TILECOLOR, (left, top, coverage, TILESIZE))\n', (10230, 10287), False, 'import random, sys, pygame\n'), ((10559, 10572), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (10570, 10572), False, 'import random, sys, pygame\n'), ((10581, 10591), 'sys.exit', 'sys.exit', ([], {}), '()\n', (10589, 10591), False, 'import random, sys, pygame\n'), ((12126, 12271), 'pygame.draw.line', 'pygame.draw.line', (['DISPLAYSURF', 'DARKGRAY', '(x + XMARGIN + MARKERSIZE, YMARGIN + MARKERSIZE)', '(x + XMARGIN + MARKERSIZE, WINDOWHEIGHT - YMARGIN)'], {}), '(DISPLAYSURF, DARKGRAY, (x + XMARGIN + MARKERSIZE, YMARGIN +\n MARKERSIZE), (x + XMARGIN + MARKERSIZE, WINDOWHEIGHT - YMARGIN))\n', (12142, 12271), False, 'import random, sys, pygame\n'), ((12395, 12567), 'pygame.draw.line', 'pygame.draw.line', (['DISPLAYSURF', 'DARKGRAY', '(XMARGIN + MARKERSIZE, y + YMARGIN + MARKERSIZE)', '(WINDOWWIDTH - (DISPLAYWIDTH + MARKERSIZE * 2), y + YMARGIN + MARKERSIZE)'], {}), '(DISPLAYSURF, DARKGRAY, (XMARGIN + MARKERSIZE, y + YMARGIN +\n MARKERSIZE), (WINDOWWIDTH - (DISPLAYWIDTH + MARKERSIZE * 2), y +\n YMARGIN + MARKERSIZE))\n', (12411, 12567), False, 'import random, sys, pygame\n'), ((20546, 20569), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (20567, 20569), False, 'import random, sys, pygame\n'), ((23059, 23082), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (23080, 23082), False, 'import random, sys, pygame\n'), ((14846, 14866), 'random.randint', 'random.randint', (['(0)', '(9)'], {}), '(0, 9)\n', (14860, 14866), False, 'import random, sys, pygame\n'), ((14891, 14911), 'random.randint', 'random.randint', (['(0)', '(9)'], {}), '(0, 9)\n', (14905, 14911), False, 'import random, sys, pygame\n'), ((14939, 14959), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (14953, 14959), False, 'import random, sys, pygame\n'), ((18559, 18601), 'pygame.Rect', 'pygame.Rect', (['left', 'top', 'TILESIZE', 'TILESIZE'], {}), '(left, top, TILESIZE, TILESIZE)\n', (18570, 18601), False, 'import random, sys, pygame\n'), ((11529, 11602), 'pygame.draw.rect', 'pygame.draw.rect', (['DISPLAYSURF', 'TILECOLOR', '(left, top, TILESIZE, TILESIZE)'], {}), '(DISPLAYSURF, TILECOLOR, (left, top, TILESIZE, TILESIZE))\n', (11545, 11602), False, 'import random, sys, pygame\n'), ((11747, 11820), 'pygame.draw.rect', 'pygame.draw.rect', (['DISPLAYSURF', 'SHIPCOLOR', '(left, top, TILESIZE, TILESIZE)'], {}), '(DISPLAYSURF, SHIPCOLOR, (left, top, TILESIZE, TILESIZE))\n', (11763, 11820), False, 'import random, sys, pygame\n'), ((11901, 11972), 'pygame.draw.rect', 'pygame.draw.rect', (['DISPLAYSURF', 'BGCOLOR', '(left, top, TILESIZE, TILESIZE)'], {}), '(DISPLAYSURF, BGCOLOR, (left, top, TILESIZE, TILESIZE))\n', (11917, 11972), False, 'import random, sys, pygame\n')] |
import random
import sys
listword=["hello","computer","python","java","html","world","apple","windows"]
guessword=[]
random_word=random.choice(listword)
lenghtword=len(random_word)
alphabet="abcdefghijklmnopqrstuvwxyz"
letter_storage=[]
def intro():
print("\tHello and Welcome to Hangman (A word prediction game)")
while True:
name=input("Enter your name:\n").strip()
if name=="":
print("Enter a valid name\n")
else:
break
print("\n\t\tSo %s welcome to the Game :) " % name)
intro()
def game():
while True:
String=input("So you ready to play :\n ")
if String=="yes" or String=="Y" or String=="y":
break
elif String=="No" or String=="N" or String=="n":
sys.exit()
else:
print("Please Enter something ")
continue
game()
def rules():
for character in random_word:
guessword.append("_")
print("Ok, so the word You need to guess has", lenghtword, "characters")
print("Be aware that You can enter only 1 letter from a-z\n\n")
print(guessword)
def guessing():
guess_no=1
while guess_no<10:
guess=input("\nPick a letter : ")
if not guess in alphabet:
print("pick a letter from a-z ")
elif guess in letter_storage:
print("Already guessed this letter.")
else:
letter_storage.append(guess)
if guess in random_word:
print("You guessed correctly")
for x in range(0,lenghtword):
if random_word[x]==guess:
guessword[x]=guess
print(guessword)
if not '_' in guessword:
print("You won")
break
else:
print("Guessed letter not in the word")
guess_no+=1
if guess_no==10:
print("Sorry, you have used all your chances. YOU LOST !!")
rules()
guessing()
print("\tGAME OVER !! ")
# By: <NAME> (https://github.com/DarshAsawa)
| [
"random.choice",
"sys.exit"
] | [((131, 154), 'random.choice', 'random.choice', (['listword'], {}), '(listword)\n', (144, 154), False, 'import random\n'), ((688, 698), 'sys.exit', 'sys.exit', ([], {}), '()\n', (696, 698), False, 'import sys\n')] |
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
# std libs
import logging
import random
# third-party libs
# our libs
logger = logging.getLogger('D42Experiment')
ALGO_POOL = ['astar', 'astarbi', 'dijkstra', 'dijkstrabi', 'dijkstraNativebi']
BI_POOL = ['astarbi', 'dijkstrabi', 'dijkstraNativebi']
NONBI_POOL = ['astar', 'dijkstra']
DIJSKTRA_POOL = ['dijkstra', 'dijkstrabi', 'dijkstraNativebi']
ASTAR_POOL = ['astar', 'astarbi']
ASTAR_MIX = ['astar', 'astarbi', 'dijkstraNativebi']
DIJKSTRA_MIX = ['dijkstra', 'dijkstrabi', 'dijkstraNativebi', 'astar']
TWO_MIX = [['astar', 'dijkstrabi'], ['astar', 'dijkstraNativebi'],['astar', \
'dijkstra']]
THREE_MIX = [['astarbi', 'dijkstra', 'dijkstrabi'], ['astarbi', 'dijkstra', \
'dijkstraNativebi'], ['astarbi','dijkstrabi', 'dijkstraNativebi']]
WEIGHTINGS = ['fastest', 'shortest']
def pickFirst():
first = ALGO_POOL[0]
logger.info("Picking algorithm: %s"%(first))
return first
def pickDijkstra():
return ALGO_POOL[2]
def pickFromTwoMix():
two_mix_pool = random.sample(TWO_MIX, 1)[0]
logger.info("Picked two mix pool: %s"%(str(two_mix_pool)))
while True:
randAlgo = random.sample(two_mix_pool, 1)[0]
yield randAlgo
def pickFromThreeMix():
three_mix_pool = random.sample(THREE_MIX, 1)[0]
logger.info("Picked three mix pool: %s"%(str(three_mix_pool)))
while True:
randAlgo = random.sample(three_mix_pool, 1)[0]
yield randAlgo
def pickLast():
last = ALGO_POOL[-1]
logger.info("Picking algorithm: %s"%(last))
return last
def pickRandom():
randAlgo = random.sample(ALGO_POOL, 1)[0]
logger.info("Picking random algorithm: %s"%(randAlgo))
return randAlgo
def pickRandomBi():
randAlgo = random.sample(BI_POOL, 1)[0]
logger.info("Picking random bidirectional algorithm: %s"%(randAlgo))
return randAlgo
def pickRandomNonBi():
randAlgo = random.sample(NONBI_POOL, 1)[0]
logger.info("Picking random non-bidirectional algorithm: %s"%(randAlgo))
return randAlgo
def pickFastestWeighting():
weighting = WEIGHTINGS[0]
logger.debug("Picking weighting to be: %s"%(weighting))
return weighting
def pickShortestWeighting():
weighting = WEIGHTINGS[1]
logger.debug("Picking weighting to be: %s"%(weighting))
return weighting
def pickRandomWeighting():
weighting = random.sample(WEIGHTINGS, 1)[0]
logger.debug("Picking weighting to be: %s"%(weighting))
return weighting
| [
"logging.getLogger",
"random.sample"
] | [((137, 171), 'logging.getLogger', 'logging.getLogger', (['"""D42Experiment"""'], {}), "('D42Experiment')\n", (154, 171), False, 'import logging\n'), ((1037, 1062), 'random.sample', 'random.sample', (['TWO_MIX', '(1)'], {}), '(TWO_MIX, 1)\n', (1050, 1062), False, 'import random\n'), ((1267, 1294), 'random.sample', 'random.sample', (['THREE_MIX', '(1)'], {}), '(THREE_MIX, 1)\n', (1280, 1294), False, 'import random\n'), ((1599, 1626), 'random.sample', 'random.sample', (['ALGO_POOL', '(1)'], {}), '(ALGO_POOL, 1)\n', (1612, 1626), False, 'import random\n'), ((1745, 1770), 'random.sample', 'random.sample', (['BI_POOL', '(1)'], {}), '(BI_POOL, 1)\n', (1758, 1770), False, 'import random\n'), ((1906, 1934), 'random.sample', 'random.sample', (['NONBI_POOL', '(1)'], {}), '(NONBI_POOL, 1)\n', (1919, 1934), False, 'import random\n'), ((2361, 2389), 'random.sample', 'random.sample', (['WEIGHTINGS', '(1)'], {}), '(WEIGHTINGS, 1)\n', (2374, 2389), False, 'import random\n'), ((1164, 1194), 'random.sample', 'random.sample', (['two_mix_pool', '(1)'], {}), '(two_mix_pool, 1)\n', (1177, 1194), False, 'import random\n'), ((1400, 1432), 'random.sample', 'random.sample', (['three_mix_pool', '(1)'], {}), '(three_mix_pool, 1)\n', (1413, 1432), False, 'import random\n')] |
'''
default
'''
from datetime import timedelta
from typing import List, Generator
from srtsync.srt_sequence import SRTSequence
class SRT:
'''
default
'''
def __init__(self, srt_file: str = None):
self.file = srt_file
self.sequences = []
def parse_file(self) -> None:
'''
default
'''
def init_sequence(sequence_arr: List[str]) -> SRTSequence:
try:
seq = sequence_arr.pop(0).strip('\n')
seq_number = int(seq)
except ValueError:
seq_number = int(seq.replace('\ufeff', ''))
seq_time = SRTSequence.strp_seq_time(
sequence_arr.pop(0).strip('\n')
)
srt_sequence = SRTSequence(
seq_number=seq_number,
seq_time=seq_time
)
for caption_line in sequence_arr:
srt_sequence.append_caption(caption_line.strip('\n'))
return srt_sequence
def generate_sequence() -> Generator[SRTSequence, None, None]:
with open(self.file, 'r') as srt_file:
temp_sequence = []
for line in srt_file:
if line == '\n':
yield init_sequence(temp_sequence)
temp_sequence = []
else:
temp_sequence.append(line)
self.sequences = [sequence for sequence in generate_sequence()]
def write_file(self, output_file: str = None) -> None:
'''
default
'''
output_file = output_file if output_file else self.file
with open(output_file, 'w') as srt_file:
for sequence in self.sequences:
srt_file.write(str(sequence))
def time_shift(self, time_shift: int) -> None:
'''
default
'''
time_shift_delta = timedelta(milliseconds=time_shift)
for sequence in self.sequences:
sequence.set_time((
sequence.time[0] + time_shift_delta,
sequence.time[1] + time_shift_delta
))
| [
"datetime.timedelta",
"srtsync.srt_sequence.SRTSequence"
] | [((1917, 1951), 'datetime.timedelta', 'timedelta', ([], {'milliseconds': 'time_shift'}), '(milliseconds=time_shift)\n', (1926, 1951), False, 'from datetime import timedelta\n'), ((757, 810), 'srtsync.srt_sequence.SRTSequence', 'SRTSequence', ([], {'seq_number': 'seq_number', 'seq_time': 'seq_time'}), '(seq_number=seq_number, seq_time=seq_time)\n', (768, 810), False, 'from srtsync.srt_sequence import SRTSequence\n')] |
#!/usr/bin/env python
import numpy as np
from LLC_Membranes.analysis.rdf import System
from LLC_Membranes.llclib import file_rw, stats
import matplotlib.pyplot as plt
import names
import tqdm
def calculate_rdf(res, path, gro='berendsen.gro', traj='PR_nojump.xtc', atoms=None):
print('Calculating RDF of residue %s' % r)
if atoms is not None:
rdf = System('%s/%s' %(path, gro), '%s/%s' %(path, traj), r, 'NAcarb11V', atoms=atoms)
else:
rdf = System('%s/%s' %(path, gro), '%s/%s' %(path, traj), r, 'NAcarb11V')
rdf.radial_distribution_function(spline=True, npts_spline=10)
rdf.bootstrap(200)
file_rw.save_object(rdf, '%s/rdf_%s.pl' % (path, res))
return rdf
recalculate = False
simple_alcohols = False
polyols = False
head_groups = True
thiol_comparison = False
ketones = False
nondonors = True
probability = False
if simple_alcohols:
residues=["MET", "ETH", "PR", "BUT"] # simple_alcohol_rdf.pdf
elif polyols:
residues=["GCL", "PG", "GLY", "TET", "RIB"]
elif thiol_comparison:
#residues=["SOH", "GCL"]
#residues=["DMP", "GLY"]
residues=["DMS", "ATO"]
elif ketones:
residues=["ACH", "URE", "ACN", "ATO"]
elif nondonors:
residues=["THF", "PCB", "EAC", "DMF"]
#residues=["THF", "DMF"]
else:
residues=["PG", "GCL"]
# residues=["DMP", "GLY"]
#residues = ["GLY", "TET", "RIB"]
wt=10
maximum = 0
i = 0
v = np.zeros([len(residues), 49])
#equil = 200 # chop off first equil frames
opacity = 0.2
for r in residues:
path = "/home/bcoscia/Documents/Gromacs/Transport/NaGA3C11/%s/%dwt" %(r,wt)
if recalculate:
rdf = calculate_rdf(r, path)
else:
try:
rdf = file_rw.load_object('%s/rdf_%s.pl' %(path, r))
except FileNotFoundError:
rdf = calculate_rdf(r, path)
mean = rdf.density.mean(axis=0)
if probability:
rdf.errorbars /= sum(mean)
mean /= sum(mean)
new_max = np.amax(mean[np.argwhere(rdf.r > 0.4)]) # really looking for the head group peak
maximum = max(maximum, new_max)
plt.plot(rdf.r, mean, label='%s' % names.res_to_name[r], linewidth=2)
plt.fill_between(rdf.r, rdf.errorbars[1, :] + mean, mean - rdf.errorbars[0, :], alpha=opacity)
#v[i, :] = [mean[i] * np.pi*(rdf.r[i + 1] ** 2 - rdf.r[i] ** 2) for i in range(len(rdf.r) - 1)]
#print(r, sum(v[i, :np.argmin(np.abs(rdf.r - 0.4)**2)]))
#plt.plot(rdf.r[:-1], v[i, :])
i += 1
nboot = 200
nselect = 400 * len(residues) # each system has 400 head groups. Each bootstrap trial should randomly select "400 * n residues being plotted" RDFs
if head_groups:
nframes = 200
# d_head_groups = np.zeros([len(residues)*nframes, 50, len(residues)])
d_head_groups = np.zeros([len(residues)*nframes, 50])
for i, r in enumerate(residues):
path = "/home/bcoscia/Documents/Gromacs/Transport/NaGA3C11/%s/%dwt" %(r,wt)
hg = file_rw.load_object('%s/rdf_HII_CC1C2C3C4C5.pl' % path)
#d_head_groups[:, i] = hg.density.mean(axis=0)
d_head_groups[i*nframes:(i+1)*nframes, :] = hg.density
boot = np.zeros([nboot, 50])
for b in tqdm.tqdm(range(nboot)):
ndx = np.random.choice(np.arange(d_head_groups.shape[0]), size=nselect, replace=True)
boot[b, :] = d_head_groups[ndx, :].mean(axis=0)
mean = boot.mean(axis=0)
error = stats.confidence_interval(boot, 68) * (maximum / np.max(mean))
mean *= (maximum / np.max(mean))
#mean *= (24 / 400)
#std *= (24 / 400)
#mean *= (24 / 400)
#plt.plot(hg.r, maximum * hg.density.mean(axis=0) / np.max(hg.density.mean(axis=0)), '--')
# Option 1
plt.plot(hg.r, mean, '--', color='black', label='Head Groups')
#plt.fill_between(hg.r, mean + std, mean - std, alpha=0.6, color='black')
plt.fill_between(hg.r, mean + error[1, :], mean - error[0, :], alpha=opacity, color='black')
# Option 2
#rmax = hg.r[np.argmax(mean)]
#plt.plot([rmax, rmax], [0, 1.05*mean.max()], '--', color='black')
# Option 3
# rmax_ndx = np.argmax(mean)
# r = hg.r[:rmax_ndx]
# mean = mean[:rmax_ndx]
# std = std[:rmax_ndx]
# plt.plot(r, mean, '--', color='black')
#plt.fill_between(r, mean + std, mean - std, alpha=0.7, color='black')
#r = residues[0]
#path = "/home/bcoscia/Documents/Gromacs/Transport/NaGA3C11/%s/%dwt" % (r, wt)
#try:
# rdf = file_rw.load_object('%s/rdf_HII.pl' %path)
#except FileNotFoundError:
# rdf = calculate_rdf(r, path, atoms=['C', 'C1', 'C2', 'C3', 'C4', 'C5'])
#normalization = 24 / 400
#plt.plot(rdf.r, maximum * rdf.density.mean(axis=0) / np.amax(rdf.density.mean(axis=0)), '--', color='black')
#plt.plot(rdf.r, normalization * rdf.density.mean(axis=0), '--', color='black')
if not head_groups:
top = maximum * 1.05
plt.plot([0.75, 0.75], [0, top], '--', color='black')
plt.fill_between([0.73, 0.77], [top, top], y2=[0, 0], color='grey', alpha=0.5)
plt.ylabel('Density (count / nm$^3$)', fontsize=14)
plt.xlabel('Distance from pore center (nm)', fontsize=14)
if polyols:
plt.ylim(-0.05, 1) # for diols only
if thiol_comparison:
if 'DMS' in residues:
plt.ylim(-0.015, 0.45) # for DMSO and acetone thiol comparison
elif 'SOH' in residues:
plt.ylim(-0.015, 0.5) # for mercaptoethanol and ethylene glycol comparison
#plt.ylim(-0.015, 12) # for nondonors
#plt.ylim(-0.015, 1)
plt.gcf().get_axes()[0].tick_params(labelsize=14)
#plt.legend(fontsize=13, loc=1, ncol=2, columnspacing=0.5) # for nondonors
plt.legend(fontsize=13, loc=1)
plt.tight_layout()
if simple_alcohols:
plt.savefig('simple_alcohol_rdf.pdf')
elif polyols:
plt.savefig('polyols_rdf.pdf')
elif thiol_comparison:
plt.savefig('thiol_comparison_%s.pdf' % residues[0])
elif ketones:
plt.savefig('ketone_rdf.pdf')
elif nondonors:
plt.savefig('nondonors_rdf.pdf')
plt.show()
| [
"LLC_Membranes.llclib.file_rw.save_object",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.ylabel",
"LLC_Membranes.llclib.file_rw.load_object",
"numpy.arange",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"LLC_Membranes.analysis.rdf.System",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.fill_between",
"numpy.max",
"numpy.zeros",
"numpy.argwhere",
"matplotlib.pyplot.tight_layout",
"LLC_Membranes.llclib.stats.confidence_interval",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] | [((4637, 4688), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Density (count / nm$^3$)"""'], {'fontsize': '(14)'}), "('Density (count / nm$^3$)', fontsize=14)\n", (4647, 4688), True, 'import matplotlib.pyplot as plt\n'), ((4689, 4746), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Distance from pore center (nm)"""'], {'fontsize': '(14)'}), "('Distance from pore center (nm)', fontsize=14)\n", (4699, 4746), True, 'import matplotlib.pyplot as plt\n'), ((5193, 5223), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'fontsize': '(13)', 'loc': '(1)'}), '(fontsize=13, loc=1)\n', (5203, 5223), True, 'import matplotlib.pyplot as plt\n'), ((5224, 5242), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (5240, 5242), True, 'import matplotlib.pyplot as plt\n'), ((5520, 5530), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5528, 5530), True, 'import matplotlib.pyplot as plt\n'), ((607, 661), 'LLC_Membranes.llclib.file_rw.save_object', 'file_rw.save_object', (['rdf', "('%s/rdf_%s.pl' % (path, res))"], {}), "(rdf, '%s/rdf_%s.pl' % (path, res))\n", (626, 661), False, 'from LLC_Membranes.llclib import file_rw, stats\n'), ((1932, 2001), 'matplotlib.pyplot.plot', 'plt.plot', (['rdf.r', 'mean'], {'label': "('%s' % names.res_to_name[r])", 'linewidth': '(2)'}), "(rdf.r, mean, label='%s' % names.res_to_name[r], linewidth=2)\n", (1940, 2001), True, 'import matplotlib.pyplot as plt\n'), ((2003, 2101), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['rdf.r', '(rdf.errorbars[1, :] + mean)', '(mean - rdf.errorbars[0, :])'], {'alpha': 'opacity'}), '(rdf.r, rdf.errorbars[1, :] + mean, mean - rdf.errorbars[0,\n :], alpha=opacity)\n', (2019, 2101), True, 'import matplotlib.pyplot as plt\n'), ((2908, 2929), 'numpy.zeros', 'np.zeros', (['[nboot, 50]'], {}), '([nboot, 50])\n', (2916, 2929), True, 'import numpy as np\n'), ((3407, 3469), 'matplotlib.pyplot.plot', 'plt.plot', (['hg.r', 'mean', '"""--"""'], {'color': '"""black"""', 'label': '"""Head Groups"""'}), "(hg.r, mean, '--', color='black', label='Head Groups')\n", (3415, 3469), True, 'import matplotlib.pyplot as plt\n'), ((3546, 3643), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['hg.r', '(mean + error[1, :])', '(mean - error[0, :])'], {'alpha': 'opacity', 'color': '"""black"""'}), "(hg.r, mean + error[1, :], mean - error[0, :], alpha=\n opacity, color='black')\n", (3562, 3643), True, 'import matplotlib.pyplot as plt\n'), ((4502, 4555), 'matplotlib.pyplot.plot', 'plt.plot', (['[0.75, 0.75]', '[0, top]', '"""--"""'], {'color': '"""black"""'}), "([0.75, 0.75], [0, top], '--', color='black')\n", (4510, 4555), True, 'import matplotlib.pyplot as plt\n'), ((4557, 4635), 'matplotlib.pyplot.fill_between', 'plt.fill_between', (['[0.73, 0.77]', '[top, top]'], {'y2': '[0, 0]', 'color': '"""grey"""', 'alpha': '(0.5)'}), "([0.73, 0.77], [top, top], y2=[0, 0], color='grey', alpha=0.5)\n", (4573, 4635), True, 'import matplotlib.pyplot as plt\n'), ((4760, 4778), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.05)', '(1)'], {}), '(-0.05, 1)\n', (4768, 4778), True, 'import matplotlib.pyplot as plt\n'), ((5264, 5301), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""simple_alcohol_rdf.pdf"""'], {}), "('simple_alcohol_rdf.pdf')\n", (5275, 5301), True, 'import matplotlib.pyplot as plt\n'), ((355, 442), 'LLC_Membranes.analysis.rdf.System', 'System', (["('%s/%s' % (path, gro))", "('%s/%s' % (path, traj))", 'r', '"""NAcarb11V"""'], {'atoms': 'atoms'}), "('%s/%s' % (path, gro), '%s/%s' % (path, traj), r, 'NAcarb11V', atoms\n =atoms)\n", (361, 442), False, 'from LLC_Membranes.analysis.rdf import System\n'), ((451, 520), 'LLC_Membranes.analysis.rdf.System', 'System', (["('%s/%s' % (path, gro))", "('%s/%s' % (path, traj))", 'r', '"""NAcarb11V"""'], {}), "('%s/%s' % (path, gro), '%s/%s' % (path, traj), r, 'NAcarb11V')\n", (457, 520), False, 'from LLC_Membranes.analysis.rdf import System\n'), ((2736, 2791), 'LLC_Membranes.llclib.file_rw.load_object', 'file_rw.load_object', (["('%s/rdf_HII_CC1C2C3C4C5.pl' % path)"], {}), "('%s/rdf_HII_CC1C2C3C4C5.pl' % path)\n", (2755, 2791), False, 'from LLC_Membranes.llclib import file_rw, stats\n'), ((3139, 3174), 'LLC_Membranes.llclib.stats.confidence_interval', 'stats.confidence_interval', (['boot', '(68)'], {}), '(boot, 68)\n', (3164, 3174), False, 'from LLC_Membranes.llclib import file_rw, stats\n'), ((3222, 3234), 'numpy.max', 'np.max', (['mean'], {}), '(mean)\n', (3228, 3234), True, 'import numpy as np\n'), ((4843, 4865), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.015)', '(0.45)'], {}), '(-0.015, 0.45)\n', (4851, 4865), True, 'import matplotlib.pyplot as plt\n'), ((5317, 5347), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""polyols_rdf.pdf"""'], {}), "('polyols_rdf.pdf')\n", (5328, 5347), True, 'import matplotlib.pyplot as plt\n'), ((1596, 1643), 'LLC_Membranes.llclib.file_rw.load_object', 'file_rw.load_object', (["('%s/rdf_%s.pl' % (path, r))"], {}), "('%s/rdf_%s.pl' % (path, r))\n", (1615, 1643), False, 'from LLC_Membranes.llclib import file_rw, stats\n'), ((1829, 1853), 'numpy.argwhere', 'np.argwhere', (['(rdf.r > 0.4)'], {}), '(rdf.r > 0.4)\n', (1840, 1853), True, 'import numpy as np\n'), ((2990, 3023), 'numpy.arange', 'np.arange', (['d_head_groups.shape[0]'], {}), '(d_head_groups.shape[0])\n', (2999, 3023), True, 'import numpy as np\n'), ((3188, 3200), 'numpy.max', 'np.max', (['mean'], {}), '(mean)\n', (3194, 3200), True, 'import numpy as np\n'), ((4933, 4954), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-0.015)', '(0.5)'], {}), '(-0.015, 0.5)\n', (4941, 4954), True, 'import matplotlib.pyplot as plt\n'), ((5372, 5424), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('thiol_comparison_%s.pdf' % residues[0])"], {}), "('thiol_comparison_%s.pdf' % residues[0])\n", (5383, 5424), True, 'import matplotlib.pyplot as plt\n'), ((5440, 5469), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""ketone_rdf.pdf"""'], {}), "('ketone_rdf.pdf')\n", (5451, 5469), True, 'import matplotlib.pyplot as plt\n'), ((5067, 5076), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (5074, 5076), True, 'import matplotlib.pyplot as plt\n'), ((5487, 5519), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""nondonors_rdf.pdf"""'], {}), "('nondonors_rdf.pdf')\n", (5498, 5519), True, 'import matplotlib.pyplot as plt\n')] |
import sys
import os
import json
import nltk
import collections
from tensorflow.core.example import example_pb2
import struct
dm_single_close_quote = u'\u2019' # unicode
dm_double_close_quote = u'\u201d'
END_TOKENS = ['.', '!', '?', '...', "'", "`", '"', dm_single_close_quote, dm_double_close_quote,
")"] # acceptable ways to end a sentence
VOCAB_SIZE = 200000
CHUNK_SIZE = 1000 # num examples per chunk, for the chunked data
# We use these to separate the summary sentences in the .bin datafiles
SENTENCE_START = '<s>'
SENTENCE_END = '</s>'
finished_files_dir = "finished_files_piccolo"
chunks_dir = os.path.join(finished_files_dir, "chunked")
def fix_missing_period(line):
if line[-1] in END_TOKENS: return line
return line + " ."
def get_art_summary(text,summary):
# Lowercase everything
text =text.lower()
summary =summary.lower()
# Make article into a single string
text = ' '.join(nltk.word_tokenize(text))
# Make abstract into a signle string, putting <s> and </s> tags around the sentences
summary=f"{SENTENCE_START} {' '.join(nltk.word_tokenize(summary))} {SENTENCE_END}"
return text, summary
def write_to_bin(dataset_jsonl_file, out_file, makevocab=False):
if makevocab:
vocab_counter = collections.Counter()
with open(out_file, 'wb') as writer:
with open(dataset_jsonl_file,'r') as f:
for idx,s in enumerate(f):
obj = json.loads(s)
if idx%1000==0:
print(idx)
# Get the strings to write to .bin file
article, abstract = get_art_summary(obj['text'],obj['summary'])
# Write to tf.Example
tf_example = example_pb2.Example()
tf_example.features.feature['article'].bytes_list.value.extend([article.encode()])
tf_example.features.feature['abstract'].bytes_list.value.extend([abstract.encode()])
tf_example_str = tf_example.SerializeToString()
str_len = len(tf_example_str)
writer.write(struct.pack('q', str_len))
writer.write(struct.pack('%ds' % str_len, tf_example_str))
# Write the vocab to file, if applicable
if makevocab:
art_tokens = article.split(' ')
abs_tokens = abstract.split(' ')
abs_tokens = [t for t in abs_tokens if t not in [SENTENCE_START, SENTENCE_END]] # remove these tags from vocab
tokens = art_tokens + abs_tokens
tokens = [t.strip() for t in tokens] # strip
tokens = [t for t in tokens if t!=""] # remove empty
vocab_counter.update(tokens)
print(f"Finished writing file {out_file}")
# write vocab to file
if makevocab:
print("Writing vocab file...")
with open(os.path.join(finished_files_dir, "vocab"), 'w') as writer:
for word, count in vocab_counter.most_common(VOCAB_SIZE):
writer.write(word + ' ' + str(count) + '\n')
print("Finished writing vocab file")
def chunk_file(set_name):
in_file = finished_files_dir+'/%s.bin' % set_name
reader = open(in_file, "rb")
chunk = 0
finished = False
while not finished:
chunk_fname = os.path.join(chunks_dir, '%s_%03d.bin' % (set_name, chunk)) # new chunk
with open(chunk_fname, 'wb') as writer:
for _ in range(CHUNK_SIZE):
len_bytes = reader.read(8)
if not len_bytes:
finished = True
break
str_len = struct.unpack('q', len_bytes)[0]
example_str = struct.unpack('%ds' % str_len, reader.read(str_len))[0]
writer.write(struct.pack('q', str_len))
writer.write(struct.pack('%ds' % str_len, example_str))
chunk += 1
def chunk_all():
# Make a dir to hold the chunks
if not os.path.isdir(chunks_dir):
os.mkdir(chunks_dir)
# Chunk the data
for set_name in ['train', 'test']:#TODO VAL
print("Splitting %s data into chunks..." % set_name)
chunk_file(set_name)
print("Saved chunked data in %s" % chunks_dir)
if __name__ == '__main__':
if len(sys.argv) != 3:
print("USAGE: python make_datafiles.py <train> <test>")
sys.exit()
train = sys.argv[1]
test = sys.argv[2]
write_to_bin(train, os.path.join(finished_files_dir, "train.bin"),makevocab=True)
write_to_bin(test, os.path.join(finished_files_dir, "test.bin"))
# Chunk the data. This splits each of train.bin, val.bin and test.bin into smaller chunks, each containing e.g. 1000 examples, and saves them in finished_files/chunks
chunk_all() | [
"json.loads",
"nltk.word_tokenize",
"os.path.join",
"tensorflow.core.example.example_pb2.Example",
"struct.pack",
"collections.Counter",
"os.path.isdir",
"struct.unpack",
"os.mkdir",
"sys.exit"
] | [((622, 665), 'os.path.join', 'os.path.join', (['finished_files_dir', '"""chunked"""'], {}), "(finished_files_dir, 'chunked')\n", (634, 665), False, 'import os\n'), ((940, 964), 'nltk.word_tokenize', 'nltk.word_tokenize', (['text'], {}), '(text)\n', (958, 964), False, 'import nltk\n'), ((1273, 1294), 'collections.Counter', 'collections.Counter', ([], {}), '()\n', (1292, 1294), False, 'import collections\n'), ((3333, 3392), 'os.path.join', 'os.path.join', (['chunks_dir', "('%s_%03d.bin' % (set_name, chunk))"], {}), "(chunks_dir, '%s_%03d.bin' % (set_name, chunk))\n", (3345, 3392), False, 'import os\n'), ((3996, 4021), 'os.path.isdir', 'os.path.isdir', (['chunks_dir'], {}), '(chunks_dir)\n', (4009, 4021), False, 'import os\n'), ((4031, 4051), 'os.mkdir', 'os.mkdir', (['chunks_dir'], {}), '(chunks_dir)\n', (4039, 4051), False, 'import os\n'), ((4389, 4399), 'sys.exit', 'sys.exit', ([], {}), '()\n', (4397, 4399), False, 'import sys\n'), ((4473, 4518), 'os.path.join', 'os.path.join', (['finished_files_dir', '"""train.bin"""'], {}), "(finished_files_dir, 'train.bin')\n", (4485, 4518), False, 'import os\n'), ((4558, 4602), 'os.path.join', 'os.path.join', (['finished_files_dir', '"""test.bin"""'], {}), "(finished_files_dir, 'test.bin')\n", (4570, 4602), False, 'import os\n'), ((1096, 1123), 'nltk.word_tokenize', 'nltk.word_tokenize', (['summary'], {}), '(summary)\n', (1114, 1123), False, 'import nltk\n'), ((1446, 1459), 'json.loads', 'json.loads', (['s'], {}), '(s)\n', (1456, 1459), False, 'import json\n'), ((1728, 1749), 'tensorflow.core.example.example_pb2.Example', 'example_pb2.Example', ([], {}), '()\n', (1747, 1749), False, 'from tensorflow.core.example import example_pb2\n'), ((2903, 2944), 'os.path.join', 'os.path.join', (['finished_files_dir', '"""vocab"""'], {}), "(finished_files_dir, 'vocab')\n", (2915, 2944), False, 'import os\n'), ((2089, 2114), 'struct.pack', 'struct.pack', (['"""q"""', 'str_len'], {}), "('q', str_len)\n", (2100, 2114), False, 'import struct\n'), ((2145, 2189), 'struct.pack', 'struct.pack', (["('%ds' % str_len)", 'tf_example_str'], {}), "('%ds' % str_len, tf_example_str)\n", (2156, 2189), False, 'import struct\n'), ((3659, 3688), 'struct.unpack', 'struct.unpack', (['"""q"""', 'len_bytes'], {}), "('q', len_bytes)\n", (3672, 3688), False, 'import struct\n'), ((3808, 3833), 'struct.pack', 'struct.pack', (['"""q"""', 'str_len'], {}), "('q', str_len)\n", (3819, 3833), False, 'import struct\n'), ((3864, 3905), 'struct.pack', 'struct.pack', (["('%ds' % str_len)", 'example_str'], {}), "('%ds' % str_len, example_str)\n", (3875, 3905), False, 'import struct\n')] |
"""
Illustration of Oja's rule (Hebbian learning) in Spiking Neural Networks using LIF neuron.
Author: <NAME>
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_digits
import matplotlib.animation as animation
"""
Functions
"""
def sample_spherical(npoints, ndim): #sample a vector of dimension "ndim" from the unit sphere randomly
vec = np.random.randn(ndim, npoints)
vec /= np.linalg.norm(vec, axis=0)
return vec[:,0]
def simulate_neuron_Oja(Tsim, dt, trc, tref, vrest, vth, Jbias, alpha, e, input_vec, tau, eta):
N = int(np.round(Tsim/dt))
Vprev = 0
Jprev = 0
spike_train = np.zeros(N)
Vhist = np.zeros(N)
psc = np.zeros(N)
W_vec = np.zeros((N, len(e)))
W = alpha*e
W_vec[0,:] = W
Jbias_vec = np.zeros(N)
Jbias_vec[0] = Jbias
mutex = 0
for i in range(N):
J = np.inner(W, input_vec[i,:]) + Jbias
if mutex == 0:
V = (J + Jprev - (1-2*trc/dt)*Vprev)/(1+2*trc/dt) #bilinear transform
if V < vrest:
V = vrest
elif V > vth:
spike_train[i] = 1
V = vrest
mutex = np.round(tref/dt)
Vhist[i] = V
Jprev = J
Vprev = V
else:
mutex -= 1
if i > 0:
psc[i] = (spike_train[i] + spike_train[i-1])*(1/(1+2*tau/dt)) - ((1-2*tau/dt)/(1+2*tau/dt))*psc[i-1]
#update weights following Oja's rule
DELW = psc[i]*input_vec[i,:] - (psc[i]**2)*W
W = W + eta*DELW
W_vec[i,:] = W
Jbias_vec[i] = Jbias
return Vhist, spike_train, W_vec, psc, Jbias_vec
def PSC_filter(Tsim, dt, tau):
t = np.linspace(0,Tsim,int(np.round(Tsim/dt)))
h = np.exp(-(t-Tsim/2)/tau)
h[0:len(h)//2] = 0
h = (1/dt)*h/np.sum(h)
return h
def normalize_imges(data): #normalize pixel values to [-1, 1]
for i in range(data.shape[0]):
img = data[i]
data[i] = 2*(img - min(img))/(max(img) - min(img)) - 1
return data
"""
Main
"""
np.random.seed(2) #to get reproducable results
plt.close('all')
D = 64 #data dimensions
F_max_l = 100 #100
F_max_h = 200
in_l = -1.0
in_h = 1.0
tref = 0.002 #2ms
trc = 0.02 #20ms
Tsim = 0.2
dt = 0.002
vrest = 0
vth = 1
Tlen = int(np.round(Tsim/dt))
digits = load_digits(n_class=1)
x = normalize_imges(digits.data)
input_vec = x[:Tlen,:]
amax = np.random.uniform(F_max_l,F_max_h,1) # maximum rate uniformly distributed between 100 and 200 HZ
#xi = np.random.uniform(in_l+0.05,in_h-0.05,1) # x-intercept
xi = np.random.uniform(in_l+0.05,in_h-0.05,1) # new idea x-intercept
alpha = (1/(1-np.exp((tref - 1/amax)/trc)) - 1)/(1-xi) #for LIF neuron
Jbias = 1-xi*alpha
e = sample_spherical(1, D)
Vhist, spike_train, W_vec, psc, Jbias_vec = simulate_neuron_Oja(Tsim, dt, trc, tref, vrest, vth, Jbias, alpha, e, input_vec, 0.05, 1)
plt.figure(1)
plt.gray()
plt.axis('off')
im = np.reshape(W_vec[-1,:], (8,8))
plt.imshow(im)
#if True:
plt.figure(2)
fig, ax = plt.subplots(1,1)
plt.gray()
plt.axis('off')
plt.title('On-line and Unsuppervised Pattern Learning', fontsize=15, color='black')
def img_anim(i):
im = np.reshape(W_vec[i,:], (8,8))
ax.matshow(im)
print("Time step: " + str(i) + "/" + str(Tlen))
anim2 = animation.FuncAnimation(fig, img_anim, frames=Tlen, interval=1)
#plt.show()
Writer = animation.writers['ffmpeg']
writer = Writer(fps=15, metadata=dict(artist='Me'), bitrate=1800)
#anim2.save('Hebbian.mp4', writer=writer)
plt.figure(4)
plt.subplot(2,2,1)
plt.gray()
plt.axis('off')
plt.title('Time step: 0ms', fontsize=15, color='black')
im = np.reshape(W_vec[0,:], (8,8))
plt.imshow(im)
plt.subplot(2,2,2)
plt.gray()
plt.axis('off')
plt.title('Time step: 20ms', fontsize=15, color='black')
im = np.reshape(W_vec[10,:], (8,8))
plt.imshow(im)
plt.subplot(2,2,3)
plt.gray()
plt.axis('off')
plt.title('Time step: 28ms', fontsize=15, color='black')
im = np.reshape(W_vec[14,:], (8,8))
plt.imshow(im)
plt.subplot(2,2,4)
plt.gray()
plt.axis('off')
plt.title('Time step: 60ms', fontsize=15, color='black')
im = np.reshape(W_vec[30,:], (8,8))
plt.imshow(im)
| [
"numpy.linalg.norm",
"matplotlib.pyplot.imshow",
"numpy.reshape",
"matplotlib.pyplot.close",
"numpy.exp",
"numpy.random.seed",
"matplotlib.pyplot.axis",
"numpy.round",
"matplotlib.pyplot.gray",
"numpy.inner",
"matplotlib.pyplot.title",
"numpy.random.randn",
"matplotlib.animation.FuncAnimation",
"sklearn.datasets.load_digits",
"numpy.sum",
"matplotlib.pyplot.figure",
"numpy.zeros",
"numpy.random.uniform",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.subplots"
] | [((2114, 2131), 'numpy.random.seed', 'np.random.seed', (['(2)'], {}), '(2)\n', (2128, 2131), True, 'import numpy as np\n'), ((2161, 2177), 'matplotlib.pyplot.close', 'plt.close', (['"""all"""'], {}), "('all')\n", (2170, 2177), True, 'import matplotlib.pyplot as plt\n'), ((2375, 2397), 'sklearn.datasets.load_digits', 'load_digits', ([], {'n_class': '(1)'}), '(n_class=1)\n', (2386, 2397), False, 'from sklearn.datasets import load_digits\n'), ((2462, 2500), 'numpy.random.uniform', 'np.random.uniform', (['F_max_l', 'F_max_h', '(1)'], {}), '(F_max_l, F_max_h, 1)\n', (2479, 2500), True, 'import numpy as np\n'), ((2625, 2671), 'numpy.random.uniform', 'np.random.uniform', (['(in_l + 0.05)', '(in_h - 0.05)', '(1)'], {}), '(in_l + 0.05, in_h - 0.05, 1)\n', (2642, 2671), True, 'import numpy as np\n'), ((2942, 2955), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (2952, 2955), True, 'import matplotlib.pyplot as plt\n'), ((2956, 2966), 'matplotlib.pyplot.gray', 'plt.gray', ([], {}), '()\n', (2964, 2966), True, 'import matplotlib.pyplot as plt\n'), ((2968, 2983), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (2976, 2983), True, 'import matplotlib.pyplot as plt\n'), ((2989, 3021), 'numpy.reshape', 'np.reshape', (['W_vec[-1, :]', '(8, 8)'], {}), '(W_vec[-1, :], (8, 8))\n', (2999, 3021), True, 'import numpy as np\n'), ((3020, 3034), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {}), '(im)\n', (3030, 3034), True, 'import matplotlib.pyplot as plt\n'), ((3048, 3061), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {}), '(2)\n', (3058, 3061), True, 'import matplotlib.pyplot as plt\n'), ((3072, 3090), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {}), '(1, 1)\n', (3084, 3090), True, 'import matplotlib.pyplot as plt\n'), ((3090, 3100), 'matplotlib.pyplot.gray', 'plt.gray', ([], {}), '()\n', (3098, 3100), True, 'import matplotlib.pyplot as plt\n'), ((3102, 3117), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (3110, 3117), True, 'import matplotlib.pyplot as plt\n'), ((3118, 3206), 'matplotlib.pyplot.title', 'plt.title', (['"""On-line and Unsuppervised Pattern Learning"""'], {'fontsize': '(15)', 'color': '"""black"""'}), "('On-line and Unsuppervised Pattern Learning', fontsize=15, color=\n 'black')\n", (3127, 3206), True, 'import matplotlib.pyplot as plt\n'), ((3348, 3411), 'matplotlib.animation.FuncAnimation', 'animation.FuncAnimation', (['fig', 'img_anim'], {'frames': 'Tlen', 'interval': '(1)'}), '(fig, img_anim, frames=Tlen, interval=1)\n', (3371, 3411), True, 'import matplotlib.animation as animation\n'), ((3571, 3584), 'matplotlib.pyplot.figure', 'plt.figure', (['(4)'], {}), '(4)\n', (3581, 3584), True, 'import matplotlib.pyplot as plt\n'), ((3585, 3605), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(1)'], {}), '(2, 2, 1)\n', (3596, 3605), True, 'import matplotlib.pyplot as plt\n'), ((3604, 3614), 'matplotlib.pyplot.gray', 'plt.gray', ([], {}), '()\n', (3612, 3614), True, 'import matplotlib.pyplot as plt\n'), ((3616, 3631), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (3624, 3631), True, 'import matplotlib.pyplot as plt\n'), ((3632, 3687), 'matplotlib.pyplot.title', 'plt.title', (['"""Time step: 0ms"""'], {'fontsize': '(15)', 'color': '"""black"""'}), "('Time step: 0ms', fontsize=15, color='black')\n", (3641, 3687), True, 'import matplotlib.pyplot as plt\n'), ((3693, 3724), 'numpy.reshape', 'np.reshape', (['W_vec[0, :]', '(8, 8)'], {}), '(W_vec[0, :], (8, 8))\n', (3703, 3724), True, 'import numpy as np\n'), ((3723, 3737), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {}), '(im)\n', (3733, 3737), True, 'import matplotlib.pyplot as plt\n'), ((3739, 3759), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(2)'], {}), '(2, 2, 2)\n', (3750, 3759), True, 'import matplotlib.pyplot as plt\n'), ((3758, 3768), 'matplotlib.pyplot.gray', 'plt.gray', ([], {}), '()\n', (3766, 3768), True, 'import matplotlib.pyplot as plt\n'), ((3770, 3785), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (3778, 3785), True, 'import matplotlib.pyplot as plt\n'), ((3786, 3842), 'matplotlib.pyplot.title', 'plt.title', (['"""Time step: 20ms"""'], {'fontsize': '(15)', 'color': '"""black"""'}), "('Time step: 20ms', fontsize=15, color='black')\n", (3795, 3842), True, 'import matplotlib.pyplot as plt\n'), ((3848, 3880), 'numpy.reshape', 'np.reshape', (['W_vec[10, :]', '(8, 8)'], {}), '(W_vec[10, :], (8, 8))\n', (3858, 3880), True, 'import numpy as np\n'), ((3879, 3893), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {}), '(im)\n', (3889, 3893), True, 'import matplotlib.pyplot as plt\n'), ((3895, 3915), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(3)'], {}), '(2, 2, 3)\n', (3906, 3915), True, 'import matplotlib.pyplot as plt\n'), ((3914, 3924), 'matplotlib.pyplot.gray', 'plt.gray', ([], {}), '()\n', (3922, 3924), True, 'import matplotlib.pyplot as plt\n'), ((3926, 3941), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (3934, 3941), True, 'import matplotlib.pyplot as plt\n'), ((3942, 3998), 'matplotlib.pyplot.title', 'plt.title', (['"""Time step: 28ms"""'], {'fontsize': '(15)', 'color': '"""black"""'}), "('Time step: 28ms', fontsize=15, color='black')\n", (3951, 3998), True, 'import matplotlib.pyplot as plt\n'), ((4004, 4036), 'numpy.reshape', 'np.reshape', (['W_vec[14, :]', '(8, 8)'], {}), '(W_vec[14, :], (8, 8))\n', (4014, 4036), True, 'import numpy as np\n'), ((4035, 4049), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {}), '(im)\n', (4045, 4049), True, 'import matplotlib.pyplot as plt\n'), ((4051, 4071), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(2)', '(4)'], {}), '(2, 2, 4)\n', (4062, 4071), True, 'import matplotlib.pyplot as plt\n'), ((4070, 4080), 'matplotlib.pyplot.gray', 'plt.gray', ([], {}), '()\n', (4078, 4080), True, 'import matplotlib.pyplot as plt\n'), ((4082, 4097), 'matplotlib.pyplot.axis', 'plt.axis', (['"""off"""'], {}), "('off')\n", (4090, 4097), True, 'import matplotlib.pyplot as plt\n'), ((4098, 4154), 'matplotlib.pyplot.title', 'plt.title', (['"""Time step: 60ms"""'], {'fontsize': '(15)', 'color': '"""black"""'}), "('Time step: 60ms', fontsize=15, color='black')\n", (4107, 4154), True, 'import matplotlib.pyplot as plt\n'), ((4160, 4192), 'numpy.reshape', 'np.reshape', (['W_vec[30, :]', '(8, 8)'], {}), '(W_vec[30, :], (8, 8))\n', (4170, 4192), True, 'import numpy as np\n'), ((4191, 4205), 'matplotlib.pyplot.imshow', 'plt.imshow', (['im'], {}), '(im)\n', (4201, 4205), True, 'import matplotlib.pyplot as plt\n'), ((381, 411), 'numpy.random.randn', 'np.random.randn', (['ndim', 'npoints'], {}), '(ndim, npoints)\n', (396, 411), True, 'import numpy as np\n'), ((423, 450), 'numpy.linalg.norm', 'np.linalg.norm', (['vec'], {'axis': '(0)'}), '(vec, axis=0)\n', (437, 450), True, 'import numpy as np\n'), ((645, 656), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (653, 656), True, 'import numpy as np\n'), ((669, 680), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (677, 680), True, 'import numpy as np\n'), ((691, 702), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (699, 702), True, 'import numpy as np\n'), ((789, 800), 'numpy.zeros', 'np.zeros', (['N'], {}), '(N)\n', (797, 800), True, 'import numpy as np\n'), ((1813, 1842), 'numpy.exp', 'np.exp', (['(-(t - Tsim / 2) / tau)'], {}), '(-(t - Tsim / 2) / tau)\n', (1819, 1842), True, 'import numpy as np\n'), ((2346, 2365), 'numpy.round', 'np.round', (['(Tsim / dt)'], {}), '(Tsim / dt)\n', (2354, 2365), True, 'import numpy as np\n'), ((3229, 3260), 'numpy.reshape', 'np.reshape', (['W_vec[i, :]', '(8, 8)'], {}), '(W_vec[i, :], (8, 8))\n', (3239, 3260), True, 'import numpy as np\n'), ((580, 599), 'numpy.round', 'np.round', (['(Tsim / dt)'], {}), '(Tsim / dt)\n', (588, 599), True, 'import numpy as np\n'), ((1877, 1886), 'numpy.sum', 'np.sum', (['h'], {}), '(h)\n', (1883, 1886), True, 'import numpy as np\n'), ((876, 904), 'numpy.inner', 'np.inner', (['W', 'input_vec[i, :]'], {}), '(W, input_vec[i, :])\n', (884, 904), True, 'import numpy as np\n'), ((1785, 1804), 'numpy.round', 'np.round', (['(Tsim / dt)'], {}), '(Tsim / dt)\n', (1793, 1804), True, 'import numpy as np\n'), ((2703, 2734), 'numpy.exp', 'np.exp', (['((tref - 1 / amax) / trc)'], {}), '((tref - 1 / amax) / trc)\n', (2709, 2734), True, 'import numpy as np\n'), ((1180, 1199), 'numpy.round', 'np.round', (['(tref / dt)'], {}), '(tref / dt)\n', (1188, 1199), True, 'import numpy as np\n')] |
import doctest
from unittest import TextTestRunner
import ciphers
import polymod
if __name__ == '__main__':
runner = TextTestRunner()
for module in [
ciphers,
polymod
]:
runner.run(doctest.DocTestSuite(module))
| [
"doctest.DocTestSuite",
"unittest.TextTestRunner"
] | [((123, 139), 'unittest.TextTestRunner', 'TextTestRunner', ([], {}), '()\n', (137, 139), False, 'from unittest import TextTestRunner\n'), ((219, 247), 'doctest.DocTestSuite', 'doctest.DocTestSuite', (['module'], {}), '(module)\n', (239, 247), False, 'import doctest\n')] |
from django.contrib import admin
from django.utils.translation import gettext as _
from django.contrib.auth.models import User
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from .models import *
class EstablishmentOperatingHoursInline(admin.TabularInline):
model = EstablishmentOperatingHours
extra = 0
class EstablishmentPromotionsInline(admin.TabularInline):
model = EstablishmentPromotions
extra = 0
class EstablishmentEventsInline(admin.TabularInline):
model = EstablishmentEvents
extra = 0
class EstablishmentPhotoInline(admin.TabularInline):
model = EstablishmentPhoto
extra = 0
class EstablishmentManagerInline(admin.TabularInline):
model = EstablishmentManager
extra = 0
class TableInline(admin.TabularInline):
model = Table
extra = 0
class EstablishmentAdmin(admin.ModelAdmin):
model = Establishment
inlines = (
EstablishmentOperatingHoursInline,
EstablishmentPromotionsInline,
EstablishmentEventsInline,
EstablishmentPhotoInline,
EstablishmentManagerInline,
TableInline,
)
list_display = (
'name',
'cuisine_type',
'enabled',
'opened',
)
class MenuItemInline(admin.TabularInline):
model = MenuItem
extra = 0
class MenuAdmin(admin.ModelAdmin):
model = Menu
inlines = (
MenuItemInline,
)
list_display = ('name', 'establishment', 'menu_items_count')
def menu_items_count(self, obj) -> int:
return obj.items.count()
menu_items_count.short_description = _('Menu items')
class MenuItemAdmin(admin.ModelAdmin):
model = MenuItem
list_display = ('name', 'establishment', 'price', 'category', 'serve_up', 'available')
def establishment(self, obj) -> Establishment:
return obj.menu.establishment
establishment.short_description = _('Establishment')
establishment.admin_order_field = 'menu__establishment'
class OrderInline(admin.TabularInline):
model = Order
extra = 0
class BillPaymentInline(admin.TabularInline):
model = BillPayment
extra = 0
class UserRatingInline(admin.TabularInline):
model = UserRating
extra = 0
class BillAdmin(admin.ModelAdmin):
model = Bill
inlines = (
OrderInline,
BillPaymentInline,
UserRatingInline,
)
readonly_fields = (
'table',
'customers',
'establishment',
'payment_date',
'opening_date',
)
list_display = (
'table',
'establishment',
'payment_date',
'opening_date',
)
class TableAdmin(admin.ModelAdmin):
model = Table
list_display = ('name', 'establishment',
'table_zone', 'enabled', 'is_available')
def establishment(self, obj) -> Establishment:
return obj.menu.establishment
establishment.short_description = _('Establishment')
establishment.admin_order_field = 'menu__establishment'
def is_available(self, obj) -> bool:
return obj.is_available
is_available.short_description = _('Available')
is_available.admin_order_field = 'bill__payment_date'
is_available.boolean = True
class TableZoneAdmin(admin.ModelAdmin):
model = TableZone
list_display = ('name', 'enabled', 'tables_count')
def tables_count(self, obj) -> int:
return obj.zone.count()
tables_count.short_description = _('# tables')
tables_count.admin_order_field = 'zone__name'
class EmployeeAdmin(admin.ModelAdmin):
model = Employee
list_display = ('user', 'establishment', 'user_type', 'cpf')
class OfflineCompensationsAdmin(admin.ModelAdmin):
model = OfflineCompensations
list_display = ('establishment', 'month', 'value', 'date_compensation')
class UserProfileInline(admin.TabularInline):
model = UserProfile
class UserAdmin(BaseUserAdmin):
model = User
admin.site.register(Establishment, EstablishmentAdmin)
admin.site.register(Amenity)
admin.site.register(Menu, MenuAdmin)
admin.site.register(ItemObservations)
admin.site.register(MenuItem, MenuItemAdmin)
admin.site.register(ItemCategory)
admin.site.register(TableZone, TableZoneAdmin)
admin.site.register(Table, TableAdmin)
admin.site.register(Bill, BillAdmin)
admin.site.register(OfflineCompensations, OfflineCompensationsAdmin)
admin.site.register(CuisineType)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(UserPayment)
admin.site.register(MoipWirecardAPP)
admin.site.register(MoipWirecardCustomer)
admin.site.register(UserCreditCard)
admin.site.register(Employee, EmployeeAdmin)
admin.site.register(Profile)
| [
"django.contrib.admin.site.unregister",
"django.contrib.admin.site.register",
"django.utils.translation.gettext"
] | [((3918, 3972), 'django.contrib.admin.site.register', 'admin.site.register', (['Establishment', 'EstablishmentAdmin'], {}), '(Establishment, EstablishmentAdmin)\n', (3937, 3972), False, 'from django.contrib import admin\n'), ((3973, 4001), 'django.contrib.admin.site.register', 'admin.site.register', (['Amenity'], {}), '(Amenity)\n', (3992, 4001), False, 'from django.contrib import admin\n'), ((4002, 4038), 'django.contrib.admin.site.register', 'admin.site.register', (['Menu', 'MenuAdmin'], {}), '(Menu, MenuAdmin)\n', (4021, 4038), False, 'from django.contrib import admin\n'), ((4039, 4076), 'django.contrib.admin.site.register', 'admin.site.register', (['ItemObservations'], {}), '(ItemObservations)\n', (4058, 4076), False, 'from django.contrib import admin\n'), ((4077, 4121), 'django.contrib.admin.site.register', 'admin.site.register', (['MenuItem', 'MenuItemAdmin'], {}), '(MenuItem, MenuItemAdmin)\n', (4096, 4121), False, 'from django.contrib import admin\n'), ((4122, 4155), 'django.contrib.admin.site.register', 'admin.site.register', (['ItemCategory'], {}), '(ItemCategory)\n', (4141, 4155), False, 'from django.contrib import admin\n'), ((4156, 4202), 'django.contrib.admin.site.register', 'admin.site.register', (['TableZone', 'TableZoneAdmin'], {}), '(TableZone, TableZoneAdmin)\n', (4175, 4202), False, 'from django.contrib import admin\n'), ((4203, 4241), 'django.contrib.admin.site.register', 'admin.site.register', (['Table', 'TableAdmin'], {}), '(Table, TableAdmin)\n', (4222, 4241), False, 'from django.contrib import admin\n'), ((4242, 4278), 'django.contrib.admin.site.register', 'admin.site.register', (['Bill', 'BillAdmin'], {}), '(Bill, BillAdmin)\n', (4261, 4278), False, 'from django.contrib import admin\n'), ((4279, 4347), 'django.contrib.admin.site.register', 'admin.site.register', (['OfflineCompensations', 'OfflineCompensationsAdmin'], {}), '(OfflineCompensations, OfflineCompensationsAdmin)\n', (4298, 4347), False, 'from django.contrib import admin\n'), ((4348, 4380), 'django.contrib.admin.site.register', 'admin.site.register', (['CuisineType'], {}), '(CuisineType)\n', (4367, 4380), False, 'from django.contrib import admin\n'), ((4381, 4408), 'django.contrib.admin.site.unregister', 'admin.site.unregister', (['User'], {}), '(User)\n', (4402, 4408), False, 'from django.contrib import admin\n'), ((4409, 4445), 'django.contrib.admin.site.register', 'admin.site.register', (['User', 'UserAdmin'], {}), '(User, UserAdmin)\n', (4428, 4445), False, 'from django.contrib import admin\n'), ((4446, 4478), 'django.contrib.admin.site.register', 'admin.site.register', (['UserPayment'], {}), '(UserPayment)\n', (4465, 4478), False, 'from django.contrib import admin\n'), ((4479, 4515), 'django.contrib.admin.site.register', 'admin.site.register', (['MoipWirecardAPP'], {}), '(MoipWirecardAPP)\n', (4498, 4515), False, 'from django.contrib import admin\n'), ((4516, 4557), 'django.contrib.admin.site.register', 'admin.site.register', (['MoipWirecardCustomer'], {}), '(MoipWirecardCustomer)\n', (4535, 4557), False, 'from django.contrib import admin\n'), ((4558, 4593), 'django.contrib.admin.site.register', 'admin.site.register', (['UserCreditCard'], {}), '(UserCreditCard)\n', (4577, 4593), False, 'from django.contrib import admin\n'), ((4594, 4638), 'django.contrib.admin.site.register', 'admin.site.register', (['Employee', 'EmployeeAdmin'], {}), '(Employee, EmployeeAdmin)\n', (4613, 4638), False, 'from django.contrib import admin\n'), ((4639, 4667), 'django.contrib.admin.site.register', 'admin.site.register', (['Profile'], {}), '(Profile)\n', (4658, 4667), False, 'from django.contrib import admin\n'), ((1598, 1613), 'django.utils.translation.gettext', '_', (['"""Menu items"""'], {}), "('Menu items')\n", (1599, 1613), True, 'from django.utils.translation import gettext as _\n'), ((1895, 1913), 'django.utils.translation.gettext', '_', (['"""Establishment"""'], {}), "('Establishment')\n", (1896, 1913), True, 'from django.utils.translation import gettext as _\n'), ((2916, 2934), 'django.utils.translation.gettext', '_', (['"""Establishment"""'], {}), "('Establishment')\n", (2917, 2934), True, 'from django.utils.translation import gettext as _\n'), ((3106, 3120), 'django.utils.translation.gettext', '_', (['"""Available"""'], {}), "('Available')\n", (3107, 3120), True, 'from django.utils.translation import gettext as _\n'), ((3440, 3453), 'django.utils.translation.gettext', '_', (['"""# tables"""'], {}), "('# tables')\n", (3441, 3453), True, 'from django.utils.translation import gettext as _\n')] |
import math
from torch import nn, Tensor
from helpers import freeze_params
import torch
class MaskedNorm(nn.Module):
"""
Original Code from:
https://discuss.pytorch.org/t/batchnorm-for-different-sized-samples-in-batch/44251/8
"""
def __init__(self, norm_type, num_groups, num_features):
super().__init__()
self.norm_type = norm_type
if self.norm_type == "batch":
self.norm = nn.BatchNorm1d(num_features=num_features)
elif self.norm_type == "group":
self.norm = nn.GroupNorm(num_groups=num_groups, num_channels=num_features)
elif self.norm_type == "layer":
self.norm = nn.LayerNorm(normalized_shape=num_features)
else:
raise ValueError("Unsupported Normalization Layer")
self.num_features = num_features
def forward(self, x: Tensor, mask: Tensor):
if self.training:
reshaped = x.reshape([-1, self.num_features])
reshaped_mask = mask.reshape([-1, 1]) > 0
selected = torch.masked_select(reshaped, reshaped_mask).reshape(
[-1, self.num_features]
)
batch_normed = self.norm(selected)
scattered = reshaped.masked_scatter(reshaped_mask, batch_normed)
return scattered.reshape([x.shape[0], -1, self.num_features])
else:
reshaped = x.reshape([-1, self.num_features])
batched_normed = self.norm(reshaped)
return batched_normed.reshape([x.shape[0], -1, self.num_features])
class Embeddings(nn.Module):
"""
Simple embeddings class
"""
# pylint: disable=unused-argument
def __init__(self,
embedding_dim: int = 64,
scale: bool = False,
vocab_size: int = 0,
padding_idx: int = 1,
freeze: bool = False,
**kwargs):
"""
Create new embeddings for the vocabulary.
Use scaling for the Transformer.
:param embedding_dim:
:param scale:
:param vocab_size:
:param padding_idx:
:param freeze: freeze the embeddings during training
"""
super(Embeddings, self).__init__()
self.embedding_dim = embedding_dim
self.scale = scale
self.vocab_size = vocab_size
self.lut = nn.Embedding(vocab_size, self.embedding_dim,
padding_idx=padding_idx)
if freeze:
freeze_params(self)
# pylint: disable=arguments-differ
def forward(self, x: Tensor) -> Tensor:
"""
Perform lookup for input `x` in the embedding table.
:param x: index in the vocabulary
:return: embedded representation for `x`
"""
if self.scale:
return self.lut(x) * math.sqrt(self.embedding_dim)
return self.lut(x)
def __repr__(self):
return "%s(embedding_dim=%d, vocab_size=%d)" % (
self.__class__.__name__, self.embedding_dim, self.vocab_size)
| [
"torch.nn.GroupNorm",
"helpers.freeze_params",
"torch.nn.LayerNorm",
"math.sqrt",
"torch.masked_select",
"torch.nn.BatchNorm1d",
"torch.nn.Embedding"
] | [((2369, 2438), 'torch.nn.Embedding', 'nn.Embedding', (['vocab_size', 'self.embedding_dim'], {'padding_idx': 'padding_idx'}), '(vocab_size, self.embedding_dim, padding_idx=padding_idx)\n', (2381, 2438), False, 'from torch import nn, Tensor\n'), ((441, 482), 'torch.nn.BatchNorm1d', 'nn.BatchNorm1d', ([], {'num_features': 'num_features'}), '(num_features=num_features)\n', (455, 482), False, 'from torch import nn, Tensor\n'), ((2503, 2522), 'helpers.freeze_params', 'freeze_params', (['self'], {}), '(self)\n', (2516, 2522), False, 'from helpers import freeze_params\n'), ((547, 609), 'torch.nn.GroupNorm', 'nn.GroupNorm', ([], {'num_groups': 'num_groups', 'num_channels': 'num_features'}), '(num_groups=num_groups, num_channels=num_features)\n', (559, 609), False, 'from torch import nn, Tensor\n'), ((2840, 2869), 'math.sqrt', 'math.sqrt', (['self.embedding_dim'], {}), '(self.embedding_dim)\n', (2849, 2869), False, 'import math\n'), ((674, 717), 'torch.nn.LayerNorm', 'nn.LayerNorm', ([], {'normalized_shape': 'num_features'}), '(normalized_shape=num_features)\n', (686, 717), False, 'from torch import nn, Tensor\n'), ((1048, 1092), 'torch.masked_select', 'torch.masked_select', (['reshaped', 'reshaped_mask'], {}), '(reshaped, reshaped_mask)\n', (1067, 1092), False, 'import torch\n')] |
import os
import types
from io import BytesIO
from tarfile import TarFile
import tempfile
import importlib
import importlib.machinery
import hashlib
import sys
from logging import getLogger
__all__ = ('extend', 'extend_ray', 'extend_cloudpickle')
log = getLogger(__name__)
TEMPDIR_ID = 'MODULEPICKLE'
def md5(compressed):
md5 = hashlib.md5()
md5.update(compressed)
return md5.hexdigest()[:16] # 32 bytes ought to be enough for everyone
class Package():
def __init__(self, name, compressed):
self.name = name
self.compressed = compressed
self.md5 = md5(compressed)
def invalidate_caches(self):
# Chuck out any modules that come from one of our temp dirs, so that when they get importer next time it's imported from
# the shiny new temp dir
modules = list(sys.modules)
for k in modules:
v = sys.modules[k]
filepath = getattr(v, '__file__', '') or ''
if f'{TEMPDIR_ID}-{self.name}-' in filepath:
del sys.modules[k]
# And then invalidate the cache of everyone on the meta_path, just to be safe.
importlib.invalidate_caches()
def uninstall(self):
sys.path = [p for p in sys.path if f'{TEMPDIR_ID}-{self.name}-' not in p]
def extract(self):
# Salt the temp directory with the hashcode of the compressed dir, so that when the next copy of it comes down the line,
# we can either reuse the existing dir if it's the same, or point ourselves at a new one if it isn't.
dirpath = tempfile.mkdtemp(prefix=f'{TEMPDIR_ID}-{self.name}-{self.md5}-')
bs = BytesIO(self.compressed)
with TarFile(fileobj=bs) as tf:
tf.extractall(os.path.join(dirpath))
return dirpath
def install(self):
"""'Installing' this package means extracting it to a hash-salted temp dir and then appending the dir to the path"""
# Only need to install it if the hash of the dir has changed since we last added it to the path
if not any(self.md5 in p for p in sys.path):
self.uninstall()
self.invalidate_caches()
sys.path.append(self.extract())
def load(self, name):
self.install()
return importlib.import_module(name)
def compress(packagename):
tar = BytesIO()
with TarFile(fileobj=tar, mode='w') as tf:
tf.add(packagename, packagename)
#TODO: This was originally gzipped, but the gzipped value seems to change on repeated compressions, breaking hashing.
# Looks like the issue is a timestamp that can be overriden with a parameter, but let's leave it uncompressed for now.
return tar.getvalue()
def import_compressed(name, package):
return package.load(name)
def import_global(module, obj):
return obj
def packagename(module):
# The package we want to zip up is the first part of the module name
#TODO: Check this holds on relative imports
return module.__name__.split('.')[0]
def is_local(module):
# If the module is in the current working directory,
# and it doesn't have `site-packages` in it's path (which probably means it's part of a local virtualenv)
# assume it's local and that it's cool to pickle it.
path = getattr(module, '__file__', '')
return path.startswith(os.getcwd()) and ('site-packages' not in path)
def extend(base):
"""Create a Pickler that can pickle packages by inheriting from `base`
We're dynamically inheriting from `base` here because my principal use case is extending ray's pickler, and ray's
installation dependencies are vast. Don't want to truck that around for a one-module package which works just as
well with cloudpickle.
"""
class ModulePickler(base):
dispatch = base.dispatch.copy()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.packages = {}
def compress_package(self, name):
# The same package might contain many of the modules a function references, so it makes sense to cache them
# as we go.
if name not in self.packages:
compressed = compress(name)
self.packages[name] = Package(name, compressed)
return self.packages[name]
def save_module(self, obj):
if is_local(obj):
args = (obj.__name__, self.compress_package(packagename(obj)))
return self.save_reduce(import_compressed, args, obj=obj)
else:
return super().save_module(obj)
dispatch[types.ModuleType] = save_module
def save_global(self, obj, *args, **kwargs):
module = sys.modules[obj.__module__]
# This is a dumb trick to handle my incomprehension of pickletools.
# The problem is that sometimes a global will be unpickled before it's module is, which will throw an error.
# Here, if we haven't seen the package before, we require it to reconstruct the global.
# There is surely a better way if you understand the pickle VM better than I do.
if is_local(module) and (packagename(module) not in self.packages):
args = (module, obj)
return self.save_reduce(import_global, args, obj=obj)
return super().save_global(obj, *args, **kwargs)
dispatch[type] = save_global
dispatch[types.ClassType] = save_global
return ModulePickler
def extend_ray():
"""Extends Ray's CloudPickler with a ModulePickler"""
import ray
import ray.cloudpickle
ray.cloudpickle.CloudPickler = extend(ray.cloudpickle.CloudPickler)
ray.cloudpickle.dump.__globals__['CloudPickler'] = ray.cloudpickle.CloudPickler
ray.cloudpickle.dumps.__globals__['CloudPickler'] = ray.cloudpickle.CloudPickler
def extend_cloudpickle():
"""Extends cloudpickle's CloudPickler with a ModulePickler"""
import cloudpickle
cloudpickle.CloudPickler = extend(cloudpickle.CloudPickler)
cloudpickle.dump.__globals__['CloudPickler'] = cloudpickle.CloudPickler
cloudpickle.dumps.__globals__['CloudPickler'] = cloudpickle.CloudPickler
| [
"logging.getLogger",
"importlib.invalidate_caches",
"hashlib.md5",
"importlib.import_module",
"tarfile.TarFile",
"io.BytesIO",
"os.path.join",
"os.getcwd",
"tempfile.mkdtemp"
] | [((255, 274), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (264, 274), False, 'from logging import getLogger\n'), ((336, 349), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (347, 349), False, 'import hashlib\n'), ((2328, 2337), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (2335, 2337), False, 'from io import BytesIO\n'), ((1144, 1173), 'importlib.invalidate_caches', 'importlib.invalidate_caches', ([], {}), '()\n', (1171, 1173), False, 'import importlib\n'), ((1564, 1628), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': 'f"""{TEMPDIR_ID}-{self.name}-{self.md5}-"""'}), "(prefix=f'{TEMPDIR_ID}-{self.name}-{self.md5}-')\n", (1580, 1628), False, 'import tempfile\n'), ((1642, 1666), 'io.BytesIO', 'BytesIO', (['self.compressed'], {}), '(self.compressed)\n', (1649, 1666), False, 'from io import BytesIO\n'), ((2260, 2289), 'importlib.import_module', 'importlib.import_module', (['name'], {}), '(name)\n', (2283, 2289), False, 'import importlib\n'), ((2347, 2377), 'tarfile.TarFile', 'TarFile', ([], {'fileobj': 'tar', 'mode': '"""w"""'}), "(fileobj=tar, mode='w')\n", (2354, 2377), False, 'from tarfile import TarFile\n'), ((1680, 1699), 'tarfile.TarFile', 'TarFile', ([], {'fileobj': 'bs'}), '(fileobj=bs)\n', (1687, 1699), False, 'from tarfile import TarFile\n'), ((3320, 3331), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (3329, 3331), False, 'import os\n'), ((1733, 1754), 'os.path.join', 'os.path.join', (['dirpath'], {}), '(dirpath)\n', (1745, 1754), False, 'import os\n')] |
from sklearn.metrics import accuracy_score
from sklearn.ensemble import RandomForestRegressor
import pickle
import json
import numpy as np
import torch
# relation pairs: {class1query1...class1query15,class2query1...}
# rf relations are rounded to the nearest 0.01
# rfRelations = rf.getBatchRelScores(supportNames, batchQueryNames) #(relation_pairs_sizex1) -> embedding as additional channel
class RF():
def __init__(self):
print('loading dataset')
with open('rf_trainX.pkl', 'rb') as f:
trainX = pickle.load(f)
with open('rf_trainY.pkl', 'rb') as f:
trainY = pickle.load(f)
with open('embedding_new.pkl', 'rb') as f:
self.embeddings = pickle.load(f)
with open('embedding_val.pkl', 'rb') as f:
self.embeddingsVal = pickle.load(f)
with open('imgNameToIdx.json', 'r') as f:
self.nameToIdx = json.load(f)
with open('imgNameToIdxVal.json', 'r') as f:
self.nameToIdxVal = json.load(f)
print(trainX.shape, trainY.shape)
print('start RF training')
self.classifier = RandomForestRegressor(n_estimators = 200, random_state = 42, max_features=4)
self.classifier.fit(trainX, trainY) #to-do
del trainX
del trainY
def getBatchRelScores(self, supportNames, batchQueryNames):
relations = []
for sName in supportNames:
sName = sName[len('./train/n03347037/'):]
sEmbedding = self.embeddings[self.nameToIdx[sName]]
for qName in batchQueryNames:
qName = qName[len('./train/n03347037/'):]
qEmbedding = self.embeddings[self.nameToIdx[qName]]
concat = np.concatenate([sEmbedding, qEmbedding], axis = 1).squeeze()
relations.append(concat)
relations = np.stack(relations).round(2)
# print(relations)
# print(relations.shape)
preds = self.classifier.predict(relations)
preds *= 100
preds = preds.astype(int)
return torch.from_numpy(preds).cuda()#to-do: check rf is correct
def getBatchRelScoresVal(self, supportNames, batchQueryNames):
relations = []
for sName in supportNames:
sName = sName[len('./val/n03347037/'):]
sEmbedding = self.embeddingsVal[self.nameToIdxVal[sName]]
for qName in batchQueryNames:
qName = qName[len('./val/n03347037/'):]
qEmbedding = self.embeddingsVal[self.nameToIdxVal[qName]]
concat = np.concatenate([sEmbedding, qEmbedding], axis = 1).squeeze()
relations.append(concat)
relations = np.stack(relations).round(2)
# print(relations)
# print(relations.shape)
preds = self.classifier.predict(relations)
preds *= 100
preds = preds.astype(int)
return torch.from_numpy(preds).cuda()
| [
"sklearn.ensemble.RandomForestRegressor",
"pickle.load",
"torch.from_numpy",
"numpy.stack",
"numpy.concatenate",
"json.load"
] | [((1123, 1195), 'sklearn.ensemble.RandomForestRegressor', 'RandomForestRegressor', ([], {'n_estimators': '(200)', 'random_state': '(42)', 'max_features': '(4)'}), '(n_estimators=200, random_state=42, max_features=4)\n', (1144, 1195), False, 'from sklearn.ensemble import RandomForestRegressor\n'), ((531, 545), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (542, 545), False, 'import pickle\n'), ((615, 629), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (626, 629), False, 'import pickle\n'), ((712, 726), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (723, 726), False, 'import pickle\n'), ((812, 826), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (823, 826), False, 'import pickle\n'), ((907, 919), 'json.load', 'json.load', (['f'], {}), '(f)\n', (916, 919), False, 'import json\n'), ((1006, 1018), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1015, 1018), False, 'import json\n'), ((1845, 1864), 'numpy.stack', 'np.stack', (['relations'], {}), '(relations)\n', (1853, 1864), True, 'import numpy as np\n'), ((2055, 2078), 'torch.from_numpy', 'torch.from_numpy', (['preds'], {}), '(preds)\n', (2071, 2078), False, 'import torch\n'), ((2680, 2699), 'numpy.stack', 'np.stack', (['relations'], {}), '(relations)\n', (2688, 2699), True, 'import numpy as np\n'), ((2890, 2913), 'torch.from_numpy', 'torch.from_numpy', (['preds'], {}), '(preds)\n', (2906, 2913), False, 'import torch\n'), ((1723, 1771), 'numpy.concatenate', 'np.concatenate', (['[sEmbedding, qEmbedding]'], {'axis': '(1)'}), '([sEmbedding, qEmbedding], axis=1)\n', (1737, 1771), True, 'import numpy as np\n'), ((2558, 2606), 'numpy.concatenate', 'np.concatenate', (['[sEmbedding, qEmbedding]'], {'axis': '(1)'}), '([sEmbedding, qEmbedding], axis=1)\n', (2572, 2606), True, 'import numpy as np\n')] |
from datetime import date, datetime
from decimal import Decimal
from fractions import Fraction
import pytest
from apteco.query import (
ArrayClause,
CombinedCategoriesClause,
DateListClause,
DateRangeClause,
DateTimeRangeClause,
FlagArrayClause,
NumericClause,
ReferenceClause,
SelectorClause,
TextClause,
)
class TestSelectorVariable:
def test_eq(self, chy_selector_var, chy_session):
high_value_supporters = chy_selector_var == ("Gold", "Platinum")
assert type(high_value_supporters) == SelectorClause
assert high_value_supporters.table_name == "Supporters"
assert high_value_supporters.variable_name == "suMember"
assert high_value_supporters.values == ["Gold", "Platinum"]
assert high_value_supporters.include is True
assert high_value_supporters.session is chy_session
bronze_supporters = chy_selector_var == "Bronze"
assert type(bronze_supporters) == SelectorClause
assert bronze_supporters.table_name == "Supporters"
assert bronze_supporters.variable_name == "suMember"
assert bronze_supporters.values == ["Bronze"]
assert bronze_supporters.include is True
assert bronze_supporters.session is chy_session
with pytest.raises(ValueError) as exc_info:
trying_with_a_number = chy_selector_var == 3
assert exc_info.value.args[0] == (
"Chosen value(s) for a selector variable"
" must be given as a string or an iterable of strings."
)
def test_ne(self, chy_selector_var, chy_session):
higher_value_supporters = chy_selector_var != ("Bronze", "Silver")
assert type(higher_value_supporters) == SelectorClause
assert higher_value_supporters.table_name == "Supporters"
assert higher_value_supporters.variable_name == "suMember"
assert higher_value_supporters.values == ["Bronze", "Silver"]
assert higher_value_supporters.include is False
assert higher_value_supporters.session is chy_session
not_platinum = chy_selector_var != "Platinum"
assert type(not_platinum) == SelectorClause
assert not_platinum.table_name == "Supporters"
assert not_platinum.variable_name == "suMember"
assert not_platinum.values == ["Platinum"]
assert not_platinum.include is False
assert not_platinum.session is chy_session
with pytest.raises(ValueError) as exc_info:
trying_with_a_float = chy_selector_var != 2.5
assert exc_info.value.args[0] == (
"Chosen value(s) for a selector variable"
" must be given as a string or an iterable of strings."
)
@pytest.mark.xfail(reason="Not yet implemented.")
class TestCombinedCategoriesVariable:
# TODO: update when implemented
def test_eq(self, chy_combined_categories_var, chy_session):
northern_supporters = chy_combined_categories_var == ["NE", "NW", "YRK"]
assert type(northern_supporters) == CombinedCategoriesClause
assert northern_supporters.table_name == "Supporters"
assert northern_supporters.variable_name == "suRegion"
assert northern_supporters.values == ["NE", "NW", "YRK"]
assert northern_supporters.include is True
assert northern_supporters.session is chy_session
# TODO: update when implemented
def test_ne(self, chy_combined_categories_var, chy_session):
supporters_outside_london = chy_combined_categories_var != "LDN"
assert type(supporters_outside_london) == CombinedCategoriesClause
assert supporters_outside_london.table_name == "Supporters"
assert supporters_outside_london.variable_name == "suRegion"
assert supporters_outside_london.values == ["LDN"]
assert supporters_outside_london.include is False
assert supporters_outside_london.session is chy_session
class TestNumericVariable:
def test_eq(self, chy_numeric_var_amount, chy_session):
donations_100 = chy_numeric_var_amount == 100
assert type(donations_100) == NumericClause
assert donations_100.table_name == "Donations"
assert donations_100.variable_name == "doAmount"
assert donations_100.values == ["100"]
assert donations_100.include is True
assert donations_100.session is chy_session
hundreds_donations = chy_numeric_var_amount == (i * 100 for i in range(1, 10))
assert type(hundreds_donations) == NumericClause
assert hundreds_donations.table_name == "Donations"
assert hundreds_donations.variable_name == "doAmount"
assert hundreds_donations.values == [
"100",
"200",
"300",
"400",
"500",
"600",
"700",
"800",
"900",
]
assert hundreds_donations.include is True
assert hundreds_donations.session is chy_session
with pytest.raises(ValueError) as exc_info:
trying_with_a_string = chy_numeric_var_amount == "256"
assert exc_info.value.args[0] == (
"Chosen value(s) for a numeric variable"
" must be given as a number or an iterable of numbers."
)
def test_ne(self, chy_numeric_var_amount, chy_session):
not_this = chy_numeric_var_amount != 72.1896
assert type(not_this) == NumericClause
assert not_this.table_name == "Donations"
assert not_this.variable_name == "doAmount"
assert not_this.values == ["72.1896"]
assert not_this.include is False
assert not_this.session is chy_session
not_one_of_these = chy_numeric_var_amount != (17.5, 8192)
assert type(not_one_of_these) == NumericClause
assert not_one_of_these.table_name == "Donations"
assert not_one_of_these.variable_name == "doAmount"
assert not_one_of_these.values == ["17.5", "8192"]
assert not_one_of_these.include is False
assert not_one_of_these.session is chy_session
with pytest.raises(ValueError) as exc_info:
trying_with_a_boolean = chy_numeric_var_amount != False
assert exc_info.value.args[0] == (
"Chosen value(s) for a numeric variable"
" must be given as a number or an iterable of numbers."
)
def test_lt(self, chy_numeric_var_amount, chy_session):
small_donations = chy_numeric_var_amount < Decimal("10.00")
assert type(small_donations) == NumericClause
assert small_donations.table_name == "Donations"
assert small_donations.variable_name == "doAmount"
assert small_donations.values == ["<10.0000"]
assert small_donations.include is True
assert small_donations.session is chy_session
with pytest.raises(ValueError) as exc_info:
less_than_a_list = chy_numeric_var_amount < [512.64, 646.464_646]
assert exc_info.value.args[0] == (
"Must specify a single number for this type of operation."
)
def test_le(self, chy_numeric_var_amount, chy_session):
up_to_including_10k = chy_numeric_var_amount <= 10000
assert type(up_to_including_10k) == NumericClause
assert up_to_including_10k.table_name == "Donations"
assert up_to_including_10k.variable_name == "doAmount"
assert up_to_including_10k.values == ["<=10000"]
assert up_to_including_10k.include is True
assert up_to_including_10k.session is chy_session
with pytest.raises(ValueError) as exc_info:
less_than_equal_tuple = chy_numeric_var_amount <= (52, 27, 9.75)
assert exc_info.value.args[0] == (
"Must specify a single number for this type of operation."
)
def test_gt(self, chy_numeric_var_amount, chy_session):
big_donations = chy_numeric_var_amount > 0.01 * 26000
assert type(big_donations) == NumericClause
assert big_donations.table_name == "Donations"
assert big_donations.variable_name == "doAmount"
assert big_donations.values == [">260.0"]
assert big_donations.include is True
assert big_donations.session is chy_session
with pytest.raises(ValueError) as exc_info:
more_than_a_set = chy_numeric_var_amount > {15, 30, 40, 40}
assert exc_info.value.args[0] == (
"Must specify a single number for this type of operation."
)
def test_ge(self, chy_numeric_var_amount, chy_session):
at_least_this_ratio = chy_numeric_var_amount >= Fraction(65432, 987)
assert type(at_least_this_ratio) == NumericClause
assert at_least_this_ratio.table_name == "Donations"
assert at_least_this_ratio.variable_name == "doAmount"
assert at_least_this_ratio.values == [">=66.2938"]
assert at_least_this_ratio.include is True
assert at_least_this_ratio.session is chy_session
with pytest.raises(ValueError) as exc_info:
number_gen = (n for n in "12.3 4.56 789".split())
at_least_a_generator = chy_numeric_var_amount >= number_gen
assert exc_info.value.args[0] == (
"Must specify a single number for this type of operation."
)
class TestTextVariable:
def test_eq(self, chy_text_var_email, chy_session):
specific_donor = chy_text_var_email == "<EMAIL>"
assert type(specific_donor) == TextClause
assert specific_donor.table_name == "Supporters"
assert specific_donor.variable_name == "suEmail"
assert specific_donor.values == ["<EMAIL>"]
assert specific_donor.match_type == "Is"
assert specific_donor.match_case is True
assert specific_donor.include is True
assert specific_donor.session is chy_session
donors_by_email = chy_text_var_email == [
f"<EMAIL>" for i in range(4)
]
assert type(donors_by_email) == TextClause
assert donors_by_email.table_name == "Supporters"
assert donors_by_email.variable_name == "suEmail"
assert donors_by_email.values == [
"<EMAIL>",
"<EMAIL>",
"<EMAIL>",
"<EMAIL>",
]
assert donors_by_email.match_type == "Is"
assert donors_by_email.match_case is True
assert donors_by_email.include is True
assert donors_by_email.session is chy_session
with pytest.raises(ValueError) as exc_info:
donors_by_number = chy_text_var_email == {34, 765, 2930}
assert exc_info.value.args[0] == (
"Chosen value(s) for a text variable"
" must be given as a string or an iterable of strings."
)
def test_ne(self, chy_text_var_email, chy_session):
dont_want_this_person = chy_text_var_email != "<EMAIL>"
assert type(dont_want_this_person) == TextClause
assert dont_want_this_person.table_name == "Supporters"
assert dont_want_this_person.variable_name == "suEmail"
assert dont_want_this_person.values == ["<EMAIL>"]
assert dont_want_this_person.match_type == "Is"
assert dont_want_this_person.match_case is True
assert dont_want_this_person.include is False
assert dont_want_this_person.session is chy_session
not_these_people = chy_text_var_email != {
"<EMAIL>",
"<EMAIL>",
}
assert type(not_these_people) == TextClause
assert not_these_people.table_name == "Supporters"
assert not_these_people.variable_name == "suEmail"
assert sorted(not_these_people.values) == [
"<EMAIL>",
"<EMAIL>",
]
assert not_these_people.match_type == "Is"
assert not_these_people.match_case is True
assert not_these_people.include is False
assert not_these_people.session is chy_session
with pytest.raises(ValueError) as exc_info:
donor_not_an_obj = chy_text_var_email != object()
assert exc_info.value.args[0] == (
"Chosen value(s) for a text variable"
" must be given as a string or an iterable of strings."
)
def test_lt(self, chy_text_var_surname, chy_session):
before_breakfast = chy_text_var_surname < "breakfast"
assert type(before_breakfast) == TextClause
assert before_breakfast.table_name == "Supporters"
assert before_breakfast.variable_name == "suSrName"
assert before_breakfast.values == ['<"breakfast"']
assert before_breakfast.match_type == "Ranges"
assert before_breakfast.match_case is False
assert before_breakfast.include is True
assert before_breakfast.session is chy_session
with pytest.raises(ValueError) as exc_info:
less_than_a_zero = chy_text_var_surname < 0
assert exc_info.value.args[0] == (
"Must specify a single string for this type of operation."
)
def test_le(self, chy_text_var_surname, chy_session):
first_half_alphabet = chy_text_var_surname <= "n"
assert type(first_half_alphabet) == TextClause
assert first_half_alphabet.table_name == "Supporters"
assert first_half_alphabet.variable_name == "suSrName"
assert first_half_alphabet.values == ['<="n"']
assert first_half_alphabet.match_type == "Ranges"
assert first_half_alphabet.match_case is False
assert first_half_alphabet.include is True
assert first_half_alphabet.session is chy_session
with pytest.raises(ValueError) as exc_info:
earlier_than_letters = chy_text_var_surname <= list("abcedfgh")
assert exc_info.value.args[0] == (
"Must specify a single string for this type of operation."
)
def test_gt(self, chy_text_var_surname, chy_session):
after_tea = chy_text_var_surname > "Tea"
assert type(after_tea) == TextClause
assert after_tea.table_name == "Supporters"
assert after_tea.variable_name == "suSrName"
assert after_tea.values == ['>"Tea"']
assert after_tea.match_type == "Ranges"
assert after_tea.match_case is False
assert after_tea.include is True
assert after_tea.session is chy_session
with pytest.raises(ValueError) as exc_info:
more_than_a_date = chy_text_var_surname > date(2020, 10, 5)
assert exc_info.value.args[0] == (
"Must specify a single string for this type of operation."
)
def test_ge(self, chy_text_var_surname, chy_session):
smith_or_later = chy_text_var_surname >= "Smith"
assert type(smith_or_later) == TextClause
assert smith_or_later.table_name == "Supporters"
assert smith_or_later.variable_name == "suSrName"
assert smith_or_later.values == ['>="Smith"']
assert smith_or_later.match_type == "Ranges"
assert smith_or_later.match_case is False
assert smith_or_later.include is True
assert smith_or_later.session is chy_session
with pytest.raises(ValueError) as exc_info:
later_than_tuple = chy_text_var_surname >= ("A", "e", "i", "O")
assert exc_info.value.args[0] == (
"Must specify a single string for this type of operation."
)
class TestArrayVariable:
def test_eq(self, chy_array_var, chy_session):
national_campaigns = chy_array_var == "National"
assert type(national_campaigns) == ArrayClause
assert national_campaigns.table_name == "Campaigns"
assert national_campaigns.variable_name == "caTags"
assert national_campaigns.values == ["National"]
assert national_campaigns.logic == "OR"
assert national_campaigns.include is True
assert national_campaigns.session is chy_session
autumn_campaigns = chy_array_var == {
"Autumn",
"Fall",
"Sep",
"Oct",
"Nov",
"Halloween",
"Back-to-School",
}
assert type(autumn_campaigns) == ArrayClause
assert autumn_campaigns.table_name == "Campaigns"
assert autumn_campaigns.variable_name == "caTags"
assert sorted(autumn_campaigns.values) == [
"Autumn",
"Back-to-School",
"Fall",
"Halloween",
"Nov",
"Oct",
"Sep",
]
assert autumn_campaigns.logic == "OR"
assert autumn_campaigns.include is True
assert autumn_campaigns.session is chy_session
with pytest.raises(ValueError) as exc_info:
forgot_string_quotes = chy_array_var == ["4", 6]
assert exc_info.value.args[0] == (
"Chosen value(s) for an array variable"
" must be given as a string or an iterable of strings."
)
def test_ne(self, chy_array_var, chy_session):
not_christmas = chy_array_var != "Christmas"
assert type(not_christmas) == ArrayClause
assert not_christmas.table_name == "Campaigns"
assert not_christmas.variable_name == "caTags"
assert not_christmas.values == ["Christmas"]
assert not_christmas.logic == "OR"
assert not_christmas.include is False
assert not_christmas.session is chy_session
one_off_campaigns = chy_array_var != [
"Recurrent",
"Annual",
"Regular",
"Monthly",
"Weekly",
"Daily",
"Seasonal",
]
assert type(one_off_campaigns) == ArrayClause
assert one_off_campaigns.table_name == "Campaigns"
assert one_off_campaigns.variable_name == "caTags"
assert one_off_campaigns.values == [
"Recurrent",
"Annual",
"Regular",
"Monthly",
"Weekly",
"Daily",
"Seasonal",
]
assert one_off_campaigns.logic == "OR"
assert one_off_campaigns.include is False
assert one_off_campaigns.session is chy_session
with pytest.raises(ValueError) as exc_info:
undesired_values = ("value_we_dont_like", None)
not_none = chy_array_var != undesired_values
assert exc_info.value.args[0] == (
"Chosen value(s) for an array variable"
" must be given as a string or an iterable of strings."
)
class TestFlagArrayVariable:
def test_eq(self, chy_flag_array_var, chy_session):
can_post = chy_flag_array_var == "DirectMail"
assert type(can_post) == FlagArrayClause
assert can_post.table_name == "Supporters"
assert can_post.variable_name == "suCtcPrf"
assert can_post.values == ["DirectMail"]
assert can_post.logic == "OR"
assert can_post.include is True
assert can_post.session is chy_session
phone_or_text = chy_flag_array_var == ("SMS", "Telephone")
assert type(phone_or_text) == FlagArrayClause
assert phone_or_text.table_name == "Supporters"
assert phone_or_text.variable_name == "suCtcPrf"
assert phone_or_text.values == ["SMS", "Telephone"]
assert phone_or_text.logic == "OR"
assert phone_or_text.include is True
assert phone_or_text.session is chy_session
with pytest.raises(ValueError) as exc_info:
true = True # so editor doesn't complain about comparison not using `is`
contactable = chy_flag_array_var == true
assert exc_info.value.args[0] == (
"Chosen value(s) for a flag array variable"
" must be given as a string or an iterable of strings."
)
def test_ne(self, chy_flag_array_var, chy_session):
cant_email = chy_flag_array_var != "Email"
assert type(cant_email) == FlagArrayClause
assert cant_email.table_name == "Supporters"
assert cant_email.variable_name == "suCtcPrf"
assert cant_email.values == ["Email"]
assert cant_email.logic == "OR"
assert cant_email.include is False
assert cant_email.session is chy_session
not_business = chy_flag_array_var != {
"BusinessPhone",
"BusinessDirectMail",
"BusinessEmail",
}
assert type(not_business) == FlagArrayClause
assert not_business.table_name == "Supporters"
assert not_business.variable_name == "suCtcPrf"
assert sorted(not_business.values) == [
"BusinessDirectMail",
"BusinessEmail",
"BusinessPhone",
]
assert not_business.logic == "OR"
assert not_business.include is False
assert not_business.session is chy_session
with pytest.raises(ValueError) as exc_info:
contactable = chy_flag_array_var != 0
assert exc_info.value.args[0] == (
"Chosen value(s) for a flag array variable"
" must be given as a string or an iterable of strings."
)
class TestDateVariable:
def test_eq(self, chy_date_var, chy_session):
august_bank_holiday_2018 = chy_date_var == date(2018, 8, 27)
assert type(august_bank_holiday_2018) == DateListClause
assert august_bank_holiday_2018.table_name == "Donations"
assert august_bank_holiday_2018.variable_name == "doDate"
assert august_bank_holiday_2018.values == ["20180827"]
assert august_bank_holiday_2018.include is True
assert august_bank_holiday_2018.session is chy_session
festive_days_from_random_years = chy_date_var == [
date(1912, 12, 25),
date(1934, 2, 14),
date(1956, 4, 1),
date(1978, 10, 31),
date(1990, 11, 5),
date(2011, 4, 29),
date(2023, 9, 23),
]
assert type(festive_days_from_random_years) == DateListClause
assert festive_days_from_random_years.table_name == "Donations"
assert festive_days_from_random_years.variable_name == "doDate"
assert festive_days_from_random_years.values == [
"19121225",
"19340214",
"19560401",
"19781031",
"19901105",
"20110429",
"20230923",
]
assert festive_days_from_random_years.include is True
assert festive_days_from_random_years.session is chy_session
with pytest.raises(ValueError) as exc_info:
trying_with_date_string = chy_date_var == "20180528"
assert exc_info.value.args[0] == (
"Chosen value for a date variable"
" must be a date object or an iterable of date objects."
)
def test_ne(self, chy_date_var, chy_session):
not_easter_2050 = chy_date_var != date(2050, 4, 10)
assert type(not_easter_2050) == DateListClause
assert not_easter_2050.table_name == "Donations"
assert not_easter_2050.variable_name == "doDate"
assert not_easter_2050.values == ["20500410"]
assert not_easter_2050.include is False
assert not_easter_2050.session is chy_session
exclude_solstices_and_equinoxes_2030 = chy_date_var != [
date(2030, 3, 20),
datetime(2030, 6, 21, 7, 31),
date(2030, 9, 22),
datetime(2030, 12, 21, 20, 9),
]
assert type(exclude_solstices_and_equinoxes_2030) == DateListClause
assert exclude_solstices_and_equinoxes_2030.table_name == "Donations"
assert exclude_solstices_and_equinoxes_2030.variable_name == "doDate"
assert exclude_solstices_and_equinoxes_2030.values == [
"20300320",
"20300621",
"20300922",
"20301221",
]
assert exclude_solstices_and_equinoxes_2030.include is False
assert exclude_solstices_and_equinoxes_2030.session is chy_session
with pytest.raises(ValueError) as exc_info:
trying_with_list_some_invalid = chy_date_var == [
date(2012, 7, 27),
"20221121",
datetime(2018, 2, 9, 11, 0, 0),
]
assert exc_info.value.args[0] == (
"Chosen value for a date variable"
" must be a date object or an iterable of date objects."
)
def test_le(self, chy_date_var, chy_session):
before_tax_year_end_2018_19 = chy_date_var <= date(2019, 4, 5)
assert type(before_tax_year_end_2018_19) == DateRangeClause
assert before_tax_year_end_2018_19.table_name == "Donations"
assert before_tax_year_end_2018_19.variable_name == "doDate"
assert before_tax_year_end_2018_19.start == "Earliest"
assert before_tax_year_end_2018_19.end == "2019-04-05"
assert before_tax_year_end_2018_19.include is True
assert before_tax_year_end_2018_19.session is chy_session
with pytest.raises(ValueError) as exc_info:
two_dates = (date(2019, 2, 14), date(2019, 6, 21))
less_than_equal_a_pair = chy_date_var <= two_dates
assert exc_info.value.args[0] == (
"Must specify a single date for this type of operation."
)
def test_ge(self, chy_date_var, chy_session):
after_christmas_2015 = chy_date_var >= date(2015, 12, 25)
assert type(after_christmas_2015) == DateRangeClause
assert after_christmas_2015.table_name == "Donations"
assert after_christmas_2015.variable_name == "doDate"
assert after_christmas_2015.start == "2015-12-25"
assert after_christmas_2015.end == "Latest"
assert after_christmas_2015.include is True
assert after_christmas_2015.session is chy_session
with pytest.raises(ValueError) as exc_info:
trying_with_a_string = chy_date_var >= "2011-11-20"
assert exc_info.value.args[0] == (
"Must specify a single date for this type of operation."
)
class TestDateTimeVariable:
def test_le(self, chy_datetime_var, chy_session):
xmas_campaign_launch = datetime(2019, 11, 25, 11, 22, 33)
before_christmas_campaign = chy_datetime_var <= xmas_campaign_launch
assert type(before_christmas_campaign) == DateTimeRangeClause
assert before_christmas_campaign.table_name == "WebsiteVisits"
assert before_christmas_campaign.variable_name == "weSessSt"
assert before_christmas_campaign.start == "Earliest"
assert before_christmas_campaign.end == "2019-11-25T11:22:33"
assert before_christmas_campaign.include is True
assert before_christmas_campaign.session is chy_session
with pytest.raises(ValueError) as exc_info:
trying_with_date_only = chy_datetime_var <= date(2019, 11, 25)
assert exc_info.value.args[0] == (
"Must specify a single datetime for this type of operation."
)
def test_ge(self, chy_datetime_var, chy_session):
sale_start = datetime(2019, 12, 26, 4, 32, 10)
after_boxing_day_sale_start = chy_datetime_var >= sale_start
assert type(after_boxing_day_sale_start) == DateTimeRangeClause
assert after_boxing_day_sale_start.table_name == "WebsiteVisits"
assert after_boxing_day_sale_start.variable_name == "weSessSt"
assert after_boxing_day_sale_start.start == "2019-12-26T04:32:10"
assert after_boxing_day_sale_start.end == "Latest"
assert after_boxing_day_sale_start.include is True
assert after_boxing_day_sale_start.session is chy_session
with pytest.raises(ValueError) as exc_info:
trying_with_number = chy_datetime_var >= 2_019_122_643_210
assert exc_info.value.args[0] == (
"Must specify a single datetime for this type of operation."
)
@pytest.mark.xfail(reason="Not yet implemented.")
class TestReferenceVariable:
def test_eq(self, chy_reference_var, chy_session):
abc_campaign = chy_reference_var == "abc"
assert type(abc_campaign) == ReferenceClause
assert abc_campaign.table_name == "Campaigns"
assert abc_campaign.variable_name == "caID"
assert abc_campaign.values == ["abc"]
assert abc_campaign.include is True
assert abc_campaign.session is chy_session
def test_ne(self, chy_reference_var, chy_session):
not_x_campaigns = chy_reference_var != ["x", "xy", "xs", "xyz", "x1"]
assert type(not_x_campaigns) == ReferenceClause
assert not_x_campaigns.table_name == "Campaigns"
assert not_x_campaigns.variable_name == "caID"
assert not_x_campaigns.values == ["x", "xy", "xs", "xyz", "x1"]
assert not_x_campaigns.include is False
assert not_x_campaigns.session is chy_session
| [
"datetime.datetime",
"pytest.mark.xfail",
"fractions.Fraction",
"pytest.raises",
"datetime.date",
"decimal.Decimal"
] | [((2726, 2774), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'reason': '"""Not yet implemented."""'}), "(reason='Not yet implemented.')\n", (2743, 2774), False, 'import pytest\n'), ((27858, 27906), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'reason': '"""Not yet implemented."""'}), "(reason='Not yet implemented.')\n", (27875, 27906), False, 'import pytest\n'), ((26124, 26158), 'datetime.datetime', 'datetime', (['(2019)', '(11)', '(25)', '(11)', '(22)', '(33)'], {}), '(2019, 11, 25, 11, 22, 33)\n', (26132, 26158), False, 'from datetime import date, datetime\n'), ((27028, 27061), 'datetime.datetime', 'datetime', (['(2019)', '(12)', '(26)', '(4)', '(32)', '(10)'], {}), '(2019, 12, 26, 4, 32, 10)\n', (27036, 27061), False, 'from datetime import date, datetime\n'), ((1287, 1312), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1300, 1312), False, 'import pytest\n'), ((2451, 2476), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2464, 2476), False, 'import pytest\n'), ((4997, 5022), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5010, 5022), False, 'import pytest\n'), ((6091, 6116), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6104, 6116), False, 'import pytest\n'), ((6484, 6500), 'decimal.Decimal', 'Decimal', (['"""10.00"""'], {}), "('10.00')\n", (6491, 6500), False, 'from decimal import Decimal\n'), ((6840, 6865), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6853, 6865), False, 'import pytest\n'), ((7566, 7591), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (7579, 7591), False, 'import pytest\n'), ((8254, 8279), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (8267, 8279), False, 'import pytest\n'), ((8606, 8626), 'fractions.Fraction', 'Fraction', (['(65432)', '(987)'], {}), '(65432, 987)\n', (8614, 8626), False, 'from fractions import Fraction\n'), ((8991, 9016), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (9004, 9016), False, 'import pytest\n'), ((10469, 10494), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10482, 10494), False, 'import pytest\n'), ((11945, 11970), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (11958, 11970), False, 'import pytest\n'), ((12792, 12817), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (12805, 12817), False, 'import pytest\n'), ((13599, 13624), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (13612, 13624), False, 'import pytest\n'), ((14338, 14363), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (14351, 14363), False, 'import pytest\n'), ((15124, 15149), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (15137, 15149), False, 'import pytest\n'), ((16644, 16669), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (16657, 16669), False, 'import pytest\n'), ((18148, 18173), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (18161, 18173), False, 'import pytest\n'), ((19393, 19418), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (19406, 19418), False, 'import pytest\n'), ((20808, 20833), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (20821, 20833), False, 'import pytest\n'), ((21201, 21218), 'datetime.date', 'date', (['(2018)', '(8)', '(27)'], {}), '(2018, 8, 27)\n', (21205, 21218), False, 'from datetime import date, datetime\n'), ((22480, 22505), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (22493, 22505), False, 'import pytest\n'), ((22846, 22863), 'datetime.date', 'date', (['(2050)', '(4)', '(10)'], {}), '(2050, 4, 10)\n', (22850, 22863), False, 'from datetime import date, datetime\n'), ((23972, 23997), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (23985, 23997), False, 'import pytest\n'), ((24472, 24488), 'datetime.date', 'date', (['(2019)', '(4)', '(5)'], {}), '(2019, 4, 5)\n', (24476, 24488), False, 'from datetime import date, datetime\n'), ((24960, 24985), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (24973, 24985), False, 'import pytest\n'), ((25345, 25363), 'datetime.date', 'date', (['(2015)', '(12)', '(25)'], {}), '(2015, 12, 25)\n', (25349, 25363), False, 'from datetime import date, datetime\n'), ((25784, 25809), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (25797, 25809), False, 'import pytest\n'), ((26712, 26737), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (26725, 26737), False, 'import pytest\n'), ((27619, 27644), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (27632, 27644), False, 'import pytest\n'), ((14431, 14448), 'datetime.date', 'date', (['(2020)', '(10)', '(5)'], {}), '(2020, 10, 5)\n', (14435, 14448), False, 'from datetime import date, datetime\n'), ((21669, 21687), 'datetime.date', 'date', (['(1912)', '(12)', '(25)'], {}), '(1912, 12, 25)\n', (21673, 21687), False, 'from datetime import date, datetime\n'), ((21701, 21718), 'datetime.date', 'date', (['(1934)', '(2)', '(14)'], {}), '(1934, 2, 14)\n', (21705, 21718), False, 'from datetime import date, datetime\n'), ((21732, 21748), 'datetime.date', 'date', (['(1956)', '(4)', '(1)'], {}), '(1956, 4, 1)\n', (21736, 21748), False, 'from datetime import date, datetime\n'), ((21762, 21780), 'datetime.date', 'date', (['(1978)', '(10)', '(31)'], {}), '(1978, 10, 31)\n', (21766, 21780), False, 'from datetime import date, datetime\n'), ((21794, 21811), 'datetime.date', 'date', (['(1990)', '(11)', '(5)'], {}), '(1990, 11, 5)\n', (21798, 21811), False, 'from datetime import date, datetime\n'), ((21825, 21842), 'datetime.date', 'date', (['(2011)', '(4)', '(29)'], {}), '(2011, 4, 29)\n', (21829, 21842), False, 'from datetime import date, datetime\n'), ((21856, 21873), 'datetime.date', 'date', (['(2023)', '(9)', '(23)'], {}), '(2023, 9, 23)\n', (21860, 21873), False, 'from datetime import date, datetime\n'), ((23267, 23284), 'datetime.date', 'date', (['(2030)', '(3)', '(20)'], {}), '(2030, 3, 20)\n', (23271, 23284), False, 'from datetime import date, datetime\n'), ((23298, 23326), 'datetime.datetime', 'datetime', (['(2030)', '(6)', '(21)', '(7)', '(31)'], {}), '(2030, 6, 21, 7, 31)\n', (23306, 23326), False, 'from datetime import date, datetime\n'), ((23340, 23357), 'datetime.date', 'date', (['(2030)', '(9)', '(22)'], {}), '(2030, 9, 22)\n', (23344, 23357), False, 'from datetime import date, datetime\n'), ((23371, 23400), 'datetime.datetime', 'datetime', (['(2030)', '(12)', '(21)', '(20)', '(9)'], {}), '(2030, 12, 21, 20, 9)\n', (23379, 23400), False, 'from datetime import date, datetime\n'), ((25024, 25041), 'datetime.date', 'date', (['(2019)', '(2)', '(14)'], {}), '(2019, 2, 14)\n', (25028, 25041), False, 'from datetime import date, datetime\n'), ((25043, 25060), 'datetime.date', 'date', (['(2019)', '(6)', '(21)'], {}), '(2019, 6, 21)\n', (25047, 25060), False, 'from datetime import date, datetime\n'), ((26807, 26825), 'datetime.date', 'date', (['(2019)', '(11)', '(25)'], {}), '(2019, 11, 25)\n', (26811, 26825), False, 'from datetime import date, datetime\n'), ((24089, 24106), 'datetime.date', 'date', (['(2012)', '(7)', '(27)'], {}), '(2012, 7, 27)\n', (24093, 24106), False, 'from datetime import date, datetime\n'), ((24152, 24182), 'datetime.datetime', 'datetime', (['(2018)', '(2)', '(9)', '(11)', '(0)', '(0)'], {}), '(2018, 2, 9, 11, 0, 0)\n', (24160, 24182), False, 'from datetime import date, datetime\n')] |
import json
import os
from argparse import ArgumentParser
from typing import Dict
from boto3 import client
from core.constructs.commands import BaseCommand
from core.default.commands.function.utils import get_cloud_id_from_cdev_name
RUUID = "cdev::simple::function"
class execute(BaseCommand):
help = """
Execute a function in the cloud.
"""
def add_arguments(self, parser: ArgumentParser) -> None:
parser.add_argument(
"function_id", type=str, help="The id of the function to execute."
)
parser.add_argument(
"--event",
type=str,
help="File (json) location of event object to provide as input to the function. Can not be used with '--event-data` flag.",
)
parser.add_argument(
"--event-data",
type=str,
help="Raw string form of event object to provide as input to the function. Can not be used with '--event' flag.",
)
def command(self, *args, **kwargs) -> None:
event_data = self._get_event_data(*args, **kwargs)
full_function_name = kwargs.get("function_id")
(
component_name,
function_name,
) = self.get_component_and_resource_from_qualified_name(full_function_name)
cloud_name = get_cloud_id_from_cdev_name(component_name, function_name)
lambda_client = client("lambda")
self.stdout.write(f"executing {full_function_name}")
response = lambda_client.invoke(
FunctionName=cloud_name,
InvocationType="RequestResponse",
Payload=json.dumps(event_data),
)
self.stdout.write(str(response))
def _get_event_data(self, *args, **kwargs) -> Dict:
event_file_location: str = kwargs.get("event")
event_raw_data: str = kwargs.get("event_data")
if event_file_location and event_raw_data:
raise Exception("Can not provide both '--event-data' and '--event'")
if event_file_location:
if not os.path.isfile(event_file_location):
raise Exception(f"{event_file_location} is not a valid file location")
with open(event_file_location) as fh:
try:
event_data = json.load(fh)
return event_data
except Exception as e:
print(e)
raise Exception(f"Could not load {event_file_location} as json")
if event_raw_data:
try:
event_data = json.loads(event_raw_data)
return event_data
except Exception as e:
print(e)
raise Exception(f"Could not load {event_raw_data} as json")
return {}
| [
"json.loads",
"boto3.client",
"json.dumps",
"os.path.isfile",
"core.default.commands.function.utils.get_cloud_id_from_cdev_name",
"json.load"
] | [((1315, 1373), 'core.default.commands.function.utils.get_cloud_id_from_cdev_name', 'get_cloud_id_from_cdev_name', (['component_name', 'function_name'], {}), '(component_name, function_name)\n', (1342, 1373), False, 'from core.default.commands.function.utils import get_cloud_id_from_cdev_name\n'), ((1399, 1415), 'boto3.client', 'client', (['"""lambda"""'], {}), "('lambda')\n", (1405, 1415), False, 'from boto3 import client\n'), ((1622, 1644), 'json.dumps', 'json.dumps', (['event_data'], {}), '(event_data)\n', (1632, 1644), False, 'import json\n'), ((2050, 2085), 'os.path.isfile', 'os.path.isfile', (['event_file_location'], {}), '(event_file_location)\n', (2064, 2085), False, 'import os\n'), ((2558, 2584), 'json.loads', 'json.loads', (['event_raw_data'], {}), '(event_raw_data)\n', (2568, 2584), False, 'import json\n'), ((2279, 2292), 'json.load', 'json.load', (['fh'], {}), '(fh)\n', (2288, 2292), False, 'import json\n')] |
from torchvision import transforms
from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST
from torch.utils.data import DataLoader, Subset
__all__ = ['cifar10_dataloaders', 'cifar100_dataloaders', 'svhn_dataloaders', 'fashionmnist_dataloaders']
def cifar10_dataloaders(batch_size=64, data_dir = 'datasets/cifar10'):
normalize = transforms.Normalize(mean=[0.4914, 0.4822, 0.4465], std=[0.2023, 0.1994, 0.2010])
train_transform = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize
])
test_transform = transforms.Compose([
transforms.ToTensor(),
normalize
])
train_set = Subset(CIFAR10(data_dir, train=True, transform=train_transform, download=True), list(range(45000)))
val_set = Subset(CIFAR10(data_dir, train=True, transform=test_transform, download=True), list(range(45000, 50000)))
test_set = CIFAR10(data_dir, train=False, transform=test_transform, download=True)
train_loader = DataLoader(train_set, batch_size=batch_size, shuffle=True, num_workers=2,
drop_last=True, pin_memory=True)
val_loader = DataLoader(val_set, batch_size=batch_size, shuffle=False, num_workers=2, pin_memory=True)
test_loader = DataLoader(test_set, batch_size=batch_size, shuffle=False, num_workers=2, pin_memory=True)
return train_loader, val_loader, test_loader
def cifar100_dataloaders(batch_size=64, data_dir = 'datasets/cifar100'):
normalize = transforms.Normalize(mean=[0.5071, 0.4866, 0.4409], std=[0.2009, 0.1984, 0.2023])
train_transform = transforms.Compose([
transforms.RandomCrop(32, padding=4),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize
])
test_transform = transforms.Compose([
transforms.ToTensor(),
normalize
])
train_set = Subset(CIFAR100(data_dir, train=True, transform=train_transform, download=True), list(range(45000)))
val_set = Subset(CIFAR100(data_dir, train=True, transform=test_transform, download=True), list(range(45000, 50000)))
test_set = CIFAR100(data_dir, train=False, transform=test_transform, download=True)
train_loader = DataLoader(train_set, batch_size=batch_size, shuffle=True, num_workers=2,
drop_last=True, pin_memory=True)
val_loader = DataLoader(val_set, batch_size=batch_size, shuffle=False, num_workers=2, pin_memory=True)
test_loader = DataLoader(test_set, batch_size=batch_size, shuffle=False, num_workers=2, pin_memory=True)
return train_loader, val_loader, test_loader
def svhn_dataloaders(batch_size=64, data_dir = 'datasets/svhn'):
normalize = transforms.Normalize(mean=[0.4377, 0.4438, 0.4728], std=[0.1201, 0.1231, 0.1052])
train_transform = transforms.Compose([
transforms.ToTensor(),
normalize
])
test_transform = transforms.Compose([
transforms.ToTensor(),
normalize
])
train_set = Subset(SVHN(data_dir, split='train', transform=train_transform, download=True),list(range(68257)))
val_set = Subset(SVHN(data_dir, split='train', transform=train_transform, download=True),list(range(68257,73257)))
test_set = SVHN(data_dir, split='test', transform=test_transform, download=True)
train_loader = DataLoader(train_set, batch_size=batch_size, shuffle=True, num_workers=2, drop_last=True, pin_memory=True)
val_loader = DataLoader(val_set, batch_size=batch_size, shuffle=False, num_workers=2, pin_memory=True)
test_loader = DataLoader(test_set, batch_size=batch_size, shuffle=False, num_workers=2, pin_memory=True)
return train_loader, val_loader, test_loader
def fashionmnist_dataloaders(batch_size=64, data_dir = 'datasets/fashionmnist'):
normalize = transforms.Normalize(mean=[0.1436], std=[0.1609])
train_transform = transforms.Compose([
transforms.ToTensor(),
normalize
])
test_transform = transforms.Compose([
transforms.ToTensor(),
normalize
])
train_set = Subset(FashionMNIST(data_dir, train=True, transform=train_transform, download=True), list(range(55000)))
val_set = Subset(FashionMNIST(data_dir, train=True, transform=test_transform, download=True), list(range(55000, 60000)))
test_set = FashionMNIST(data_dir, train=False, transform=test_transform, download=True)
train_loader = DataLoader(train_set, batch_size=batch_size, shuffle=True, num_workers=2, drop_last=True, pin_memory=True)
val_loader = DataLoader(val_set, batch_size=batch_size, shuffle=False, num_workers=2, pin_memory=True)
test_loader = DataLoader(test_set, batch_size=batch_size, shuffle=False, num_workers=2, pin_memory=True)
return train_loader, val_loader, test_loader
| [
"torchvision.datasets.CIFAR100",
"torchvision.datasets.FashionMNIST",
"torchvision.transforms.RandomHorizontalFlip",
"torchvision.transforms.RandomCrop",
"torchvision.datasets.SVHN",
"torchvision.datasets.CIFAR10",
"torchvision.transforms.Normalize",
"torch.utils.data.DataLoader",
"torchvision.transforms.ToTensor"
] | [((350, 435), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.4914, 0.4822, 0.4465]', 'std': '[0.2023, 0.1994, 0.201]'}), '(mean=[0.4914, 0.4822, 0.4465], std=[0.2023, 0.1994, 0.201]\n )\n', (370, 435), False, 'from torchvision import transforms\n'), ((971, 1042), 'torchvision.datasets.CIFAR10', 'CIFAR10', (['data_dir'], {'train': '(False)', 'transform': 'test_transform', 'download': '(True)'}), '(data_dir, train=False, transform=test_transform, download=True)\n', (978, 1042), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((1063, 1173), 'torch.utils.data.DataLoader', 'DataLoader', (['train_set'], {'batch_size': 'batch_size', 'shuffle': '(True)', 'num_workers': '(2)', 'drop_last': '(True)', 'pin_memory': '(True)'}), '(train_set, batch_size=batch_size, shuffle=True, num_workers=2,\n drop_last=True, pin_memory=True)\n', (1073, 1173), False, 'from torch.utils.data import DataLoader, Subset\n'), ((1219, 1312), 'torch.utils.data.DataLoader', 'DataLoader', (['val_set'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': '(2)', 'pin_memory': '(True)'}), '(val_set, batch_size=batch_size, shuffle=False, num_workers=2,\n pin_memory=True)\n', (1229, 1312), False, 'from torch.utils.data import DataLoader, Subset\n'), ((1327, 1421), 'torch.utils.data.DataLoader', 'DataLoader', (['test_set'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': '(2)', 'pin_memory': '(True)'}), '(test_set, batch_size=batch_size, shuffle=False, num_workers=2,\n pin_memory=True)\n', (1337, 1421), False, 'from torch.utils.data import DataLoader, Subset\n'), ((1559, 1645), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.5071, 0.4866, 0.4409]', 'std': '[0.2009, 0.1984, 0.2023]'}), '(mean=[0.5071, 0.4866, 0.4409], std=[0.2009, 0.1984, \n 0.2023])\n', (1579, 1645), False, 'from torchvision import transforms\n'), ((2182, 2254), 'torchvision.datasets.CIFAR100', 'CIFAR100', (['data_dir'], {'train': '(False)', 'transform': 'test_transform', 'download': '(True)'}), '(data_dir, train=False, transform=test_transform, download=True)\n', (2190, 2254), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((2275, 2385), 'torch.utils.data.DataLoader', 'DataLoader', (['train_set'], {'batch_size': 'batch_size', 'shuffle': '(True)', 'num_workers': '(2)', 'drop_last': '(True)', 'pin_memory': '(True)'}), '(train_set, batch_size=batch_size, shuffle=True, num_workers=2,\n drop_last=True, pin_memory=True)\n', (2285, 2385), False, 'from torch.utils.data import DataLoader, Subset\n'), ((2431, 2524), 'torch.utils.data.DataLoader', 'DataLoader', (['val_set'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': '(2)', 'pin_memory': '(True)'}), '(val_set, batch_size=batch_size, shuffle=False, num_workers=2,\n pin_memory=True)\n', (2441, 2524), False, 'from torch.utils.data import DataLoader, Subset\n'), ((2539, 2633), 'torch.utils.data.DataLoader', 'DataLoader', (['test_set'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': '(2)', 'pin_memory': '(True)'}), '(test_set, batch_size=batch_size, shuffle=False, num_workers=2,\n pin_memory=True)\n', (2549, 2633), False, 'from torch.utils.data import DataLoader, Subset\n'), ((2763, 2849), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.4377, 0.4438, 0.4728]', 'std': '[0.1201, 0.1231, 0.1052]'}), '(mean=[0.4377, 0.4438, 0.4728], std=[0.1201, 0.1231, \n 0.1052])\n', (2783, 2849), False, 'from torchvision import transforms\n'), ((3293, 3362), 'torchvision.datasets.SVHN', 'SVHN', (['data_dir'], {'split': '"""test"""', 'transform': 'test_transform', 'download': '(True)'}), "(data_dir, split='test', transform=test_transform, download=True)\n", (3297, 3362), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((3395, 3505), 'torch.utils.data.DataLoader', 'DataLoader', (['train_set'], {'batch_size': 'batch_size', 'shuffle': '(True)', 'num_workers': '(2)', 'drop_last': '(True)', 'pin_memory': '(True)'}), '(train_set, batch_size=batch_size, shuffle=True, num_workers=2,\n drop_last=True, pin_memory=True)\n', (3405, 3505), False, 'from torch.utils.data import DataLoader, Subset\n'), ((3519, 3612), 'torch.utils.data.DataLoader', 'DataLoader', (['val_set'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': '(2)', 'pin_memory': '(True)'}), '(val_set, batch_size=batch_size, shuffle=False, num_workers=2,\n pin_memory=True)\n', (3529, 3612), False, 'from torch.utils.data import DataLoader, Subset\n'), ((3627, 3721), 'torch.utils.data.DataLoader', 'DataLoader', (['test_set'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': '(2)', 'pin_memory': '(True)'}), '(test_set, batch_size=batch_size, shuffle=False, num_workers=2,\n pin_memory=True)\n', (3637, 3721), False, 'from torch.utils.data import DataLoader, Subset\n'), ((3871, 3920), 'torchvision.transforms.Normalize', 'transforms.Normalize', ([], {'mean': '[0.1436]', 'std': '[0.1609]'}), '(mean=[0.1436], std=[0.1609])\n', (3891, 3920), False, 'from torchvision import transforms\n'), ((4381, 4457), 'torchvision.datasets.FashionMNIST', 'FashionMNIST', (['data_dir'], {'train': '(False)', 'transform': 'test_transform', 'download': '(True)'}), '(data_dir, train=False, transform=test_transform, download=True)\n', (4393, 4457), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((4478, 4588), 'torch.utils.data.DataLoader', 'DataLoader', (['train_set'], {'batch_size': 'batch_size', 'shuffle': '(True)', 'num_workers': '(2)', 'drop_last': '(True)', 'pin_memory': '(True)'}), '(train_set, batch_size=batch_size, shuffle=True, num_workers=2,\n drop_last=True, pin_memory=True)\n', (4488, 4588), False, 'from torch.utils.data import DataLoader, Subset\n'), ((4602, 4695), 'torch.utils.data.DataLoader', 'DataLoader', (['val_set'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': '(2)', 'pin_memory': '(True)'}), '(val_set, batch_size=batch_size, shuffle=False, num_workers=2,\n pin_memory=True)\n', (4612, 4695), False, 'from torch.utils.data import DataLoader, Subset\n'), ((4710, 4804), 'torch.utils.data.DataLoader', 'DataLoader', (['test_set'], {'batch_size': 'batch_size', 'shuffle': '(False)', 'num_workers': '(2)', 'pin_memory': '(True)'}), '(test_set, batch_size=batch_size, shuffle=False, num_workers=2,\n pin_memory=True)\n', (4720, 4804), False, 'from torch.utils.data import DataLoader, Subset\n'), ((743, 814), 'torchvision.datasets.CIFAR10', 'CIFAR10', (['data_dir'], {'train': '(True)', 'transform': 'train_transform', 'download': '(True)'}), '(data_dir, train=True, transform=train_transform, download=True)\n', (750, 814), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((857, 927), 'torchvision.datasets.CIFAR10', 'CIFAR10', (['data_dir'], {'train': '(True)', 'transform': 'test_transform', 'download': '(True)'}), '(data_dir, train=True, transform=test_transform, download=True)\n', (864, 927), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((1952, 2024), 'torchvision.datasets.CIFAR100', 'CIFAR100', (['data_dir'], {'train': '(True)', 'transform': 'train_transform', 'download': '(True)'}), '(data_dir, train=True, transform=train_transform, download=True)\n', (1960, 2024), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((2067, 2138), 'torchvision.datasets.CIFAR100', 'CIFAR100', (['data_dir'], {'train': '(True)', 'transform': 'test_transform', 'download': '(True)'}), '(data_dir, train=True, transform=test_transform, download=True)\n', (2075, 2138), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((3067, 3138), 'torchvision.datasets.SVHN', 'SVHN', (['data_dir'], {'split': '"""train"""', 'transform': 'train_transform', 'download': '(True)'}), "(data_dir, split='train', transform=train_transform, download=True)\n", (3071, 3138), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((3180, 3251), 'torchvision.datasets.SVHN', 'SVHN', (['data_dir'], {'split': '"""train"""', 'transform': 'train_transform', 'download': '(True)'}), "(data_dir, split='train', transform=train_transform, download=True)\n", (3184, 3251), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((4143, 4219), 'torchvision.datasets.FashionMNIST', 'FashionMNIST', (['data_dir'], {'train': '(True)', 'transform': 'train_transform', 'download': '(True)'}), '(data_dir, train=True, transform=train_transform, download=True)\n', (4155, 4219), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((4262, 4337), 'torchvision.datasets.FashionMNIST', 'FashionMNIST', (['data_dir'], {'train': '(True)', 'transform': 'test_transform', 'download': '(True)'}), '(data_dir, train=True, transform=test_transform, download=True)\n', (4274, 4337), False, 'from torchvision.datasets import CIFAR10, CIFAR100, SVHN, FashionMNIST\n'), ((483, 519), 'torchvision.transforms.RandomCrop', 'transforms.RandomCrop', (['(32)'], {'padding': '(4)'}), '(32, padding=4)\n', (504, 519), False, 'from torchvision import transforms\n'), ((529, 562), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (560, 562), False, 'from torchvision import transforms\n'), ((572, 593), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (591, 593), False, 'from torchvision import transforms\n'), ((671, 692), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (690, 692), False, 'from torchvision import transforms\n'), ((1692, 1728), 'torchvision.transforms.RandomCrop', 'transforms.RandomCrop', (['(32)'], {'padding': '(4)'}), '(32, padding=4)\n', (1713, 1728), False, 'from torchvision import transforms\n'), ((1738, 1771), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (1769, 1771), False, 'from torchvision import transforms\n'), ((1781, 1802), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1800, 1802), False, 'from torchvision import transforms\n'), ((1880, 1901), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (1899, 1901), False, 'from torchvision import transforms\n'), ((2896, 2917), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (2915, 2917), False, 'from torchvision import transforms\n'), ((2995, 3016), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (3014, 3016), False, 'from torchvision import transforms\n'), ((3972, 3993), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (3991, 3993), False, 'from torchvision import transforms\n'), ((4071, 4092), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (4090, 4092), False, 'from torchvision import transforms\n')] |
from apscheduler.schedulers.blocking import BlockingScheduler
import time
import setdb
scheduler = BlockingScheduler()
SCHEDULED_HOUR = 9
SCHEDULED_MINUTE = 0
def flush_db():
deleted_row_cnt = setdb.flushDb()
print(time.strftime('%Y-%m-%d %H:%M:%S'), 'old record delete count :', deleted_row_cnt, flush=True)
scheduler.add_job(flush_db, 'cron', hour=SCHEDULED_HOUR, minute=SCHEDULED_MINUTE, id='flush_db')
print(time.strftime('%Y-%m-%d %H:%M:%S'), ' [*] db_refresher started. Runs every day at [',
'{0:02d}'.format(SCHEDULED_HOUR),':','{0:02d}'.format(SCHEDULED_MINUTE),']. To exit press CTRL+C', flush=True)
scheduler.start() | [
"apscheduler.schedulers.blocking.BlockingScheduler",
"time.strftime",
"setdb.flushDb"
] | [((100, 119), 'apscheduler.schedulers.blocking.BlockingScheduler', 'BlockingScheduler', ([], {}), '()\n', (117, 119), False, 'from apscheduler.schedulers.blocking import BlockingScheduler\n'), ((199, 214), 'setdb.flushDb', 'setdb.flushDb', ([], {}), '()\n', (212, 214), False, 'import setdb\n'), ((423, 457), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M:%S"""'], {}), "('%Y-%m-%d %H:%M:%S')\n", (436, 457), False, 'import time\n'), ((225, 259), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M:%S"""'], {}), "('%Y-%m-%d %H:%M:%S')\n", (238, 259), False, 'import time\n')] |
'''
Map from the ACA catalogue to the IRAM catalogue from Corbelli+2017
We'll also determine which clouds have 13CO detected and
their evolutionary phase
'''
from astropy.table import Table
from astropy.io import fits
import os
import astropy.units as u
from astropy.coordinates import SkyCoord
from spectral_cube import SpectralCube
import numpy as np
# data_path = os.path.expanduser("~/storage/M33/")
data_path = os.path.expanduser("~/bigdata/ekoch/M33/")
aca_path = f"{data_path}/ALMA/ACA_Band6/"
corbelli_table = Table.read(f"{data_path}/Corbelli_17_catalogues/J_A+A_601_A146_table5.dat.fits")
aca_table = Table.read(f"{aca_path}/cprops_12CO21/M33_ACA_12CO21_0p7kms_fullmosaic_roundbeam.image_K_M33_co21_m33_props.fits")
# The beam is ~12 arcsec. We're going to require matched clouds be within
# 1.5 beams
max_sep = 12 * u.arcsec * 1.5
iram_cloud_coords = SkyCoord(corbelli_table['RAdeg'],
corbelli_table['DEdeg'],
frame='icrs')
dist_matrix = np.zeros((len(aca_table), len(corbelli_table))) * u.deg
for idx in range(len(aca_table)):
cloud_coord = SkyCoord(aca_table['XCTR_DEG'][idx] * u.deg,
aca_table['YCTR_DEG'][idx] * u.deg,
frame='icrs')
dist_matrix[idx] = cloud_coord.separation(iram_cloud_coords)
# Match the clouds. Assume that each ACA cloud is associated with 0 or 1
# IRAM clouds
mapping_dict = {}
iram_cloud_index = np.arange(len(corbelli_table))
for idx in range(len(aca_table)):
mapping_dict[idx] = []
matches = np.where(dist_matrix[idx] < max_sep)[0]
if len(matches) == 0:
continue
match_dists = dist_matrix[idx][matches]
# Otherwise, match the minimum, if it's closest.
for idx2 in np.argsort(match_dists):
match_idx = matches[idx2]
match_dist = match_dists[idx2]
# If this is the smallest distance from the IRAM cloud,
# include the mapping and ignore the rest.
if match_dist == dist_matrix[:, match_idx].min():
mapping_dict[idx].append(match_idx)
# Otherwise, we don't map those clouds.
# Need to save this in some format.
# Convert to an array. Pad empty spots
max_match = 0
for match in mapping_dict:
nmatch = len(mapping_dict[match])
if max_match < nmatch:
max_match = nmatch
out_array = np.zeros((len(aca_table), max_match + 1), dtype=int)
for match in mapping_dict:
nmatch = len(mapping_dict[match])
out_array[match, 0] = match
out_array[match, 1:nmatch + 1] = mapping_dict[match]
if nmatch < max_match:
out_array[match, nmatch + 1:] = (2 - nmatch) * [-1]
columns = ['ACA_IDX'] + [f'IRAM_IDX_{i +1 }' for i in range(max_match)]
match_table = Table(data=out_array, names=columns)
match_table.write(f"{aca_path}/cprops_12CO21/M33_ACA_12CO21_0p7kms_fullmosaic_roundbeam.image_K_M33_co21.GMCcat_mapto_IRAM.fits",
overwrite=True)
| [
"astropy.table.Table",
"numpy.where",
"astropy.coordinates.SkyCoord",
"numpy.argsort",
"os.path.expanduser",
"astropy.table.Table.read"
] | [((421, 463), 'os.path.expanduser', 'os.path.expanduser', (['"""~/bigdata/ekoch/M33/"""'], {}), "('~/bigdata/ekoch/M33/')\n", (439, 463), False, 'import os\n'), ((525, 610), 'astropy.table.Table.read', 'Table.read', (['f"""{data_path}/Corbelli_17_catalogues/J_A+A_601_A146_table5.dat.fits"""'], {}), "(f'{data_path}/Corbelli_17_catalogues/J_A+A_601_A146_table5.dat.fits'\n )\n", (535, 610), False, 'from astropy.table import Table\n'), ((619, 743), 'astropy.table.Table.read', 'Table.read', (['f"""{aca_path}/cprops_12CO21/M33_ACA_12CO21_0p7kms_fullmosaic_roundbeam.image_K_M33_co21_m33_props.fits"""'], {}), "(\n f'{aca_path}/cprops_12CO21/M33_ACA_12CO21_0p7kms_fullmosaic_roundbeam.image_K_M33_co21_m33_props.fits'\n )\n", (629, 743), False, 'from astropy.table import Table\n'), ((873, 945), 'astropy.coordinates.SkyCoord', 'SkyCoord', (["corbelli_table['RAdeg']", "corbelli_table['DEdeg']"], {'frame': '"""icrs"""'}), "(corbelli_table['RAdeg'], corbelli_table['DEdeg'], frame='icrs')\n", (881, 945), False, 'from astropy.coordinates import SkyCoord\n'), ((2759, 2795), 'astropy.table.Table', 'Table', ([], {'data': 'out_array', 'names': 'columns'}), '(data=out_array, names=columns)\n', (2764, 2795), False, 'from astropy.table import Table\n'), ((1129, 1228), 'astropy.coordinates.SkyCoord', 'SkyCoord', (["(aca_table['XCTR_DEG'][idx] * u.deg)", "(aca_table['YCTR_DEG'][idx] * u.deg)"], {'frame': '"""icrs"""'}), "(aca_table['XCTR_DEG'][idx] * u.deg, aca_table['YCTR_DEG'][idx] * u\n .deg, frame='icrs')\n", (1137, 1228), False, 'from astropy.coordinates import SkyCoord\n'), ((1780, 1803), 'numpy.argsort', 'np.argsort', (['match_dists'], {}), '(match_dists)\n', (1790, 1803), True, 'import numpy as np\n'), ((1581, 1617), 'numpy.where', 'np.where', (['(dist_matrix[idx] < max_sep)'], {}), '(dist_matrix[idx] < max_sep)\n', (1589, 1617), True, 'import numpy as np\n')] |
# -*- coding:utf-8 -*-
import os
import shutil
import subprocess
import time
import pytest
from Basic import Log
# 清空app缓存数据
# os.system("adb shell pm clear com.baoneng.appstore")
# 给虚拟机设置网络
# os.system("adb shell setprop net.dns1 192.168.1.1")
# 判断测试结果数据所在目录是否存在日志,有则删除日志
# file = 'result'
# files = os.listdir(file)
# for i in files:
# if i.endswith(".json"):
# os.remove(os.path.join(file + '//' +i))
PATH = os.path.split(os.path.realpath(__file__))[0]
xml_report_path = PATH + "/result/xml"
html_report_path = PATH + "/report/html"
tm = time.strftime("%Y-%m-%d-%H:%M:%S", time.localtime(time.time()))
def invoke(md):
output, errors = subprocess.Popen(md, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
o = output.decode("utf-8")
return o
if __name__ == '__main__':
log = Log.MyLog()
log.info("-----------------------------START: %s----------------------------------" % tm)
shutil.rmtree(xml_report_path)
args = ['-s', '-v', '测试用例路径', '--alluredir', xml_report_path]
pytest.main(args)
cmd = 'allure generate %s -o %s --clean' % (xml_report_path, html_report_path)
invoke(cmd)
log.info("-----------------------------END: %s------------------------------------" % tm)
os.system("allure open report/html")
| [
"subprocess.Popen",
"pytest.main",
"Basic.Log.MyLog",
"os.path.realpath",
"shutil.rmtree",
"os.system",
"time.time"
] | [((838, 849), 'Basic.Log.MyLog', 'Log.MyLog', ([], {}), '()\n', (847, 849), False, 'from Basic import Log\n'), ((948, 978), 'shutil.rmtree', 'shutil.rmtree', (['xml_report_path'], {}), '(xml_report_path)\n', (961, 978), False, 'import shutil\n'), ((1049, 1066), 'pytest.main', 'pytest.main', (['args'], {}), '(args)\n', (1060, 1066), False, 'import pytest\n'), ((1264, 1300), 'os.system', 'os.system', (['"""allure open report/html"""'], {}), "('allure open report/html')\n", (1273, 1300), False, 'import os\n'), ((442, 468), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (458, 468), False, 'import os\n'), ((608, 619), 'time.time', 'time.time', ([], {}), '()\n', (617, 619), False, 'import time\n'), ((661, 746), 'subprocess.Popen', 'subprocess.Popen', (['md'], {'shell': '(True)', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), '(md, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE\n )\n', (677, 746), False, 'import subprocess\n')] |
import torch
import math
# definition of the general Module class
class Module(object):
def forward(self, *input):
raise NotImplementedError
def backward(self, *gradwrtoutput):
raise NotImplementedError
def param(self):
return []
# definition of the general Linear class
class Linear(Module):
def __init__(self, input_dim, output_dim):
super().__init__()
self.input = 0
# init the weight tensor
self.weight = torch.Tensor(input_dim, output_dim).normal_()
# init the bias tensor
self.bias = torch.Tensor(1, output_dim).normal_()
# init the derivative tensors
self.dl_dw = torch.Tensor(self.weight.size())
self.dl_db = torch.Tensor(self.bias.size())
def forward(self, input):
# store the input for the backward step
self.input = input
output = self.input.mm(self.weight) + self.bias
return output
def backward(self, grdwrtoutput):
self.dl_dw += self.input.t().mm(grdwrtoutput)
self.dl_db += grdwrtoutput.mean(0) * self.input.size(0)
output = grdwrtoutput.mm(self.weight.t())
return output
def param(self):
# store the pairs of weights and derivatives
return [(self.weight, self.dl_dw), (self.bias, self.dl_db)]
# definition of the general ReLu class : max(0, x)
class ReLu(Module):
def __init__(self):
super().__init__()
self.s = 0
def forward(self, input):
self.s = input
return input.clamp(min=0.0)
def backward(self, grdwrtoutput):
drelu = self.s.sign().clamp(min=0.0)
return grdwrtoutput * drelu
def param(self):
return []
# definition of the general LeakyReLu class : max(alpha*x, x)
class LeakyReLu(Module):
def __init__(self, alpha=0.01):
super().__init__()
self.s = 0
self.alpha = alpha
def forward(self, input):
self.s = input
return input.clamp(min=0.0) + self.alpha*input.clamp(max=0.0)
def backward(self, grdwrtoutput):
drelu = torch.ones(self.s.size())
drelu[self.s < 0] = self.alpha
return grdwrtoutput * drelu
def param(self):
return []
# definition of the general tanh class
class Tanh(Module):
def __init__(self):
super().__init__()
self.s = 0
def forward(self, input):
self.s = input
return input.tanh() # call the func
def backward(self, grdwrtoutput):
dtanh = 1 - self.s.tanh().pow(2) # formula of deriv of tanh
return grdwrtoutput * dtanh
def param(self):
return []
# definition of the general sigmoid class
class Sigmoid(Module):
def __init__(self):
super().__init__()
self.s = 0
def forward(self, input):
self.s = input
return self.sigmoid_f(input) # call the sigmoid func below
def backward(self, grdwrtoutput):
dsigmoid = self.sigmoid_f(self.s) * (1 - self.sigmoid_f(self.s))
return grdwrtoutput * dsigmoid
def sigmoid_f(self, x):
return 1 / (1 + torch.exp(-x))
def param(self):
return []
# definition of the general Sequential class
class Sequential(Module):
def __init__(self, modules):
super().__init__()
self.modules = modules
def add_module(self, ind, module):
# add the module to the list of modules
self.modules.append(module)
return module
def forward(self, input):
output = input
for module in self.modules:
# apply forward of each module to the input
output = module.forward(output)
return output
def backward(self, grdwrtoutput):
output = grdwrtoutput
for module in self.modules[::-1]:
# apply backward of each module in reverse order
output = module.backward(output)
def param(self):
parameters = []
for module in self.modules:
# append all the parameters of all the modules
parameters.append(module.param())
return parameters
# definition of the general SGD class
class SGD():
def __init__(self, params, lr, reduce_lr_patience, reduce_lr_factor, early_stop_patience, monitor='val'):
self.params = params # the parameters of the model
self.lr = lr # the learning rate
self.plateau_counter = 0 # the counter to know since how many epochs we are stucked in a local minima
self.reduce_lr_patience = reduce_lr_patience # the number of epochs to wait stucked before reducing the learning rate
self.reduce_lr_factor = reduce_lr_factor # the factor by which we reduce the learning rate
self.early_stop_patience = early_stop_patience # the number of epochs to wait stucked before stopping the learning
self.monitor = monitor # the loss to monitor (validation or training)
# perform the gradient descent step
def step(self):
for module in self.params:
for weight, grad in module:
# remove from weight learningrate*grad for each module in each param (perform gradient descent)
weight -= self.lr * grad
# reset the gradients to zero
def zero_grad(self):
for module in self.params:
for weight, grad in module:
grad.zero_()
# reduce the learning rate based on the monitored loss
def reduce_lr_on_plateau(self, loss):
# if the feature is enabled
if self.reduce_lr_patience is not None:
self.plateau_counter += 1
# if the last value of val_loss is equal to the min, then reset the counter
if loss[-1] == min(loss):
self.plateau_counter = 0
# if counter bigger than the patience, reset and mul learning rate by reducing factor
elif self.plateau_counter > self.reduce_lr_patience:
self.plateau_counter = 0
self.lr *= self.reduce_lr_factor
print('New lr:', self.lr)
# stop the training based on the monitored loss
def early_stopping(self, loss):
# if the feature is enabled
if self.early_stop_patience is None:
return False
return torch.Tensor(loss).argmin() < len(loss) - self.early_stop_patience
# definition of the mean squared loss
class LossMSE(Module):
def __init__(self):
super().__init__()
def forward(self, y_pred, y):
return 0.5 * (y_pred - y.float()).pow(2).mean(1).sum()
def backward(self, y_pred, y):
return (y_pred - y.float()) / y.size(1)
def param(self):
return []
# function for the training of the model
def train_model(model, optimizer, X_train, y_train, X_val, y_val, epochs, batch_size):
all_train_loss = []
all_train_acc = []
all_val_loss = []
all_val_acc = []
for epoch in range(epochs):
# Training -------------------------------------------------------------------------------
train_loss = 0
train_errors = 0
for b in range(0, X_train.size(0), batch_size):
# begin by setting all grad of the optimizer to 0
optimizer.zero_grad()
x = X_train[b:b+batch_size]
y = y_train[b:b+batch_size]
# will call forward of all modules of the model (Sequential)
output = model.forward(x)
# number of errors on training set
train_errors += (output.argmax(1) != y.argmax(1)).sum()
# compute the loss and its derivatives
train_loss += LossMSE().forward(output, y.float())
dl_dloss = LossMSE().backward(output, y.float())
# will call backward of all modules of the model (Sequential)
model.backward(dl_dloss)
# perform the optimization step (gradient descent)
optimizer.step()
# store the training loss and accuracy
train_loss = train_loss.item() / X_train.size(0)
all_train_loss.append(train_loss)
all_train_acc.append(1 - float(train_errors) / X_train.size(0))
# Validation --------------------------------------------------------------------------------
val_loss = 0
val_errors = 0
for b in range(0, X_val.size(0), batch_size):
x = X_val[b:b+batch_size]
y = y_val[b:b+batch_size]
# will call forward of all modules of the model (Sequential)
output = model.forward(x)
# number of errors on the validation set
val_errors += (output.argmax(1) != y.argmax(1)).sum()
# compute the validation loss
val_loss += LossMSE().forward(output, y.float())
# store the validation loss and accuracy
val_loss = val_loss.item() / X_val.size(0)
all_val_loss.append(val_loss)
all_val_acc.append(1 - float(val_errors) / X_val.size(0))
if epoch % (epochs//20) == 0:
print('Epoch: {}: train -> {:.5f}, validation -> {:.5f}'.format(epoch, train_loss, val_loss))
# base on the loss to monitor, reduce learning size or stop earlier if needed
loss_to_analyse = all_val_loss if optimizer.monitor == 'val' else all_train_loss
optimizer.reduce_lr_on_plateau(loss_to_analyse)
if optimizer.early_stopping(loss_to_analyse):
print('Early Stopping')
break
return all_train_loss, all_train_acc, all_val_loss, all_val_acc
# function for testing the model
def test_model(model, X_test, y_test, batch_size):
test_errors = 0
for b in range(0, X_test.size(0), batch_size):
x = X_test[b:b+batch_size]
y = y_test[b:b+batch_size]
# we compute the output by forwarding in all modules
output = model.forward(x)
# number of errors for this batch
test_errors += (output.argmax(1) != y.argmax(1)).sum()
test_acc = 1 - float(test_errors)/X_test.size(0)
return test_acc
# function to generate n data uniformly in [0,1]² X, and labels Y
# depending of position outside (0) of disk of radius 1/sqrt(2*pi) else val 1
def generate_data(n):
X = torch.empty(n, 2).uniform_()
y = (((X[:, 0]-0.5).pow(2) + (X[:, 1]-0.5).pow(2)) <= 1 /
(2*math.pi)).long().view(-1, 1) # circle centered at 0.5, 0.5
return X, y
# function to normalize data
def normalize(data):
return (data - data.mean(0)) / data.std(0)
# function that splits dataset given in two, with percentage
# and possibility of shuffling
def split_train_test(X, y, train_size=0.8, shuffle=True):
if shuffle:
perm = torch.randperm(X.size(0))
X = X[perm]
y = y[perm]
cut = int(train_size * X.size(0))
X_train = X[:cut]
y_train = y[:cut]
X_val = X[cut:]
y_val = y[cut:]
return X_train, y_train, X_val, y_val
# function that takes labels and one hot encode them
def one_hot_encode(y):
one_hot = torch.empty(y.size(0), 2).zero_()
one_hot[torch.arange(y.size(0)), y[:, 0]] = 1
return one_hot
| [
"torch.empty",
"torch.exp",
"torch.Tensor"
] | [((10193, 10210), 'torch.empty', 'torch.empty', (['n', '(2)'], {}), '(n, 2)\n', (10204, 10210), False, 'import torch\n'), ((487, 522), 'torch.Tensor', 'torch.Tensor', (['input_dim', 'output_dim'], {}), '(input_dim, output_dim)\n', (499, 522), False, 'import torch\n'), ((584, 611), 'torch.Tensor', 'torch.Tensor', (['(1)', 'output_dim'], {}), '(1, output_dim)\n', (596, 611), False, 'import torch\n'), ((3112, 3125), 'torch.exp', 'torch.exp', (['(-x)'], {}), '(-x)\n', (3121, 3125), False, 'import torch\n'), ((6274, 6292), 'torch.Tensor', 'torch.Tensor', (['loss'], {}), '(loss)\n', (6286, 6292), False, 'import torch\n')] |
__doc__ = """
QCP rotation calculation
This is an RMSD and optimal rotation calculator, written in pure Python. The
goal of this is to allow the code to be run in a JIT compiler such as PyPy,
Jython that cannot interface with extern C modules, such as numpy.
The algorithm was originally developed by D<NAME>obald as a C module,
[qcp][qcp], which solves the eigenvalue decomposition in quaternion space, and
thus avoids the expensive SVD decomposition of 3D rotational matrices. The
current code is based on a Cython adaption of qcp, [pyqcprot][pyqcprot],
written by <NAME>.
[pyqcprot]: https://github.com/synapticarbors/pyqcprot
[qcp]: http://theobald.brandeis.edu/qcp/
References:
<NAME>. (2005) "Rapid calculation of RMSD using a quaternion-
based characteristic polynomial." Acta Crystallographica A. 61(4):478-480
<NAME>, <NAME> and <NAME>. (2010) "Fast
determination of the optimal rotational matrix for macromolecular
superpositions."J. Comput. Chem. 31, 1561-1563
<NAME> (2011) "Pyqcprot"
https://github.com/synapticarbors/pyqcprot
# BSD License
-----------------------------------------------------------------------------
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the <ORGANIZATION> nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-----------------------------------------------------------------------------
"""
import math
def make_correlation_matrix(coords1, coords2):
"""
Returns E0, and A, a 3x3 matrix reprsented as a list of 9 values, which
represents the correlation matrix between the coords. E0 is the static
component of the RMSD, which is half the sum of the squared lengths of the
coordinate vectors.
Parameters:
- coords1, coords2: a list of 3 floats, a list of N coordinates
"""
N = len(coords1)
assert N == len(coords2)
G1 = 0.0
G2 = 0.0
A = [0.0 for i in range(9)]
for i in xrange(N):
x1 = coords1[i][0]
y1 = coords1[i][1]
z1 = coords1[i][2]
G1 += (x1*x1 + y1*y1 + z1*z1)
x2 = coords2[i][0]
y2 = coords2[i][1]
z2 = coords2[i][2]
G2 += (x2*x2 + y2*y2 + z2*z2)
A[0] += (x1 * x2)
A[1] += (x1 * y2)
A[2] += (x1 * z2)
A[3] += (y1 * x2)
A[4] += (y1 * y2)
A[5] += (y1 * z2)
A[6] += (z1 * x2)
A[7] += (z1 * y2)
A[8] += (z1 * z2)
E0 = (G1 + G2) * 0.5
return E0, A
def calc_rms_rot(coords1, coords2):
"""
Returns rms and a list of 9 values that represents a rotation
matrix.
Args:
coords1, coords2: a list of 3 floats, representing an Nx3 matrix,
or a list of N set of coordinate vectors.
"""
E0, A = make_correlation_matrix(coords1, coords2)
N = len(coords1)
oldg = 0.0
evecprec = 1e-6
evalprec = 1e-14
Sxx = A[0]
Sxy = A[1]
Sxz = A[2]
Syx = A[3]
Syy = A[4]
Syz = A[5]
Szx = A[6]
Szy = A[7]
Szz = A[8]
Sxx2 = Sxx * Sxx
Syy2 = Syy * Syy
Szz2 = Szz * Szz
Sxy2 = Sxy * Sxy
Syz2 = Syz * Syz
Sxz2 = Sxz * Sxz
Syx2 = Syx * Syx
Szy2 = Szy * Szy
Szx2 = Szx * Szx
SyzSzymSyySzz2 = 2.0*(Syz*Szy - Syy*Szz)
Sxx2Syy2Szz2Syz2Szy2 = Syy2 + Szz2 - Sxx2 + Syz2 + Szy2
C = [0.0 for i in range(3)]
C[2] = -2.0 * (Sxx2 + Syy2 + Szz2 + Sxy2 + Syx2 + Sxz2 + Szx2 + Syz2 + Szy2)
C[1] = 8.0 * (Sxx*Syz*Szy + Syy*Szx*Sxz + Szz*Sxy*Syx - Sxx*Syy*Szz - Syz*Szx*Sxy - Szy*Syx*Sxz)
SxzpSzx = Sxz + Szx
SyzpSzy = Syz + Szy
SxypSyx = Sxy + Syx
SyzmSzy = Syz - Szy
SxzmSzx = Sxz - Szx
SxymSyx = Sxy - Syx
SxxpSyy = Sxx + Syy
SxxmSyy = Sxx - Syy
Sxy2Sxz2Syx2Szx2 = Sxy2 + Sxz2 - Syx2 - Szx2
C[0] = (Sxy2Sxz2Syx2Szx2 * Sxy2Sxz2Syx2Szx2
+ (Sxx2Syy2Szz2Syz2Szy2 + SyzSzymSyySzz2) * (Sxx2Syy2Szz2Syz2Szy2 - SyzSzymSyySzz2)
+ (-(SxzpSzx)*(SyzmSzy)+(SxymSyx)*(SxxmSyy-Szz)) * (-(SxzmSzx)*(SyzpSzy)+(SxymSyx)*(SxxmSyy+Szz))
+ (-(SxzpSzx)*(SyzpSzy)-(SxypSyx)*(SxxpSyy-Szz)) * (-(SxzmSzx)*(SyzmSzy)-(SxypSyx)*(SxxpSyy+Szz))
+ (+(SxypSyx)*(SyzpSzy)+(SxzpSzx)*(SxxmSyy+Szz)) * (-(SxymSyx)*(SyzmSzy)+(SxzpSzx)*(SxxpSyy+Szz))
+ (+(SxypSyx)*(SyzmSzy)+(SxzmSzx)*(SxxmSyy-Szz)) * (-(SxymSyx)*(SyzpSzy)+(SxzmSzx)*(SxxpSyy-Szz)))
mxEigenV = E0
n_iter = 50
for i in range(n_iter):
oldg = mxEigenV
x2 = mxEigenV*mxEigenV
b = (x2 + C[2])*mxEigenV
a = b + C[1]
delta = ((a*mxEigenV + C[0])/(2.0*x2*mxEigenV + b + a))
mxEigenV -= delta
if (abs(mxEigenV - oldg) < abs((evalprec)*mxEigenV)):
break
else:
raise Exception("More iterations needed to find eigenvalue")
val = 2.0 * (E0 - mxEigenV)/float(N)
if abs(val) < evecprec:
rms = 0.0
else:
rms = math.sqrt(val)
rot = [0.0 for i in range(9)]
a11 = SxxpSyy + Szz-mxEigenV
a12 = SyzmSzy
a13 = - SxzmSzx
a14 = SxymSyx
a21 = SyzmSzy
a22 = SxxmSyy - Szz-mxEigenV
a23 = SxypSyx
a24= SxzpSzx
a31 = a13
a32 = a23
a33 = Syy-Sxx-Szz - mxEigenV
a34 = SyzpSzy
a41 = a14
a42 = a24
a43 = a34
a44 = Szz - SxxpSyy - mxEigenV
a3344_4334 = a33 * a44 - a43 * a34
a3244_4234 = a32 * a44-a42*a34
a3243_4233 = a32 * a43 - a42 * a33
a3143_4133 = a31 * a43-a41*a33
a3144_4134 = a31 * a44 - a41 * a34
a3142_4132 = a31 * a42-a41*a32
q1 = a22*a3344_4334-a23*a3244_4234+a24*a3243_4233
q2 = -a21*a3344_4334+a23*a3144_4134-a24*a3143_4133
q3 = a21*a3244_4234-a22*a3144_4134+a24*a3142_4132
q4 = -a21*a3243_4233+a22*a3143_4133-a23*a3142_4132
qsqr = q1 * q1 + q2 * q2 + q3 * q3 + q4 * q4
# The following code tries to calculate another column in the adjoint
# matrix when the norm of the current column is too small. Usually
# this commented block will never be activated. To be absolutely safe
# this should be uncommented, but it is most likely unnecessary.
if (qsqr < evecprec):
q1 = a12*a3344_4334 - a13*a3244_4234 + a14*a3243_4233
q2 = -a11*a3344_4334 + a13*a3144_4134 - a14*a3143_4133
q3 = a11*a3244_4234 - a12*a3144_4134 + a14*a3142_4132
q4 = -a11*a3243_4233 + a12*a3143_4133 - a13*a3142_4132
qsqr = q1*q1 + q2 *q2 + q3*q3+q4*q4
if (qsqr < evecprec):
a1324_1423 = a13 * a24 - a14 * a23
a1224_1422 = a12 * a24 - a14 * a22
a1223_1322 = a12 * a23 - a13 * a22
a1124_1421 = a11 * a24 - a14 * a21
a1123_1321 = a11 * a23 - a13 * a21
a1122_1221 = a11 * a22 - a12 * a21
q1 = a42 * a1324_1423 - a43 * a1224_1422 + a44 * a1223_1322
q2 = -a41 * a1324_1423 + a43 * a1124_1421 - a44 * a1123_1321
q3 = a41 * a1224_1422 - a42 * a1124_1421 + a44 * a1122_1221
q4 = -a41 * a1223_1322 + a42 * a1123_1321 - a43 * a1122_1221
qsqr = q1*q1 + q2 *q2 + q3*q3+q4*q4
if (qsqr < evecprec):
q1 = a32 * a1324_1423 - a33 * a1224_1422 + a34 * a1223_1322
q2 = -a31 * a1324_1423 + a33 * a1124_1421 - a34 * a1123_1321
q3 = a31 * a1224_1422 - a32 * a1124_1421 + a34 * a1122_1221
q4 = -a31 * a1223_1322 + a32 * a1123_1321 - a33 * a1122_1221
qsqr = q1*q1 + q2 *q2 + q3*q3 + q4*q4
if (qsqr < evecprec):
# if qsqr is still too small, return the identity matrix. #
rot[0] = rot[4] = rot[8] = 1.0
rot[1] = rot[2] = rot[3] = rot[5] = rot[6] = rot[7] = 0.0
return rms, rot
normq = math.sqrt(qsqr)
q1 /= normq
q2 /= normq
q3 /= normq
q4 /= normq
a2 = q1 * q1
x2 = q2 * q2
y2 = q3 * q3
z2 = q4 * q4
xy = q2 * q3
az = q1 * q4
zx = q4 * q2
ay = q1 * q3
yz = q3 * q4
ax = q1 * q2
rot[0] = a2 + x2 - y2 - z2
rot[1] = 2 * (xy + az)
rot[2] = 2 * (zx - ay)
rot[3] = 2 * (xy - az)
rot[4] = a2 - x2 + y2 - z2
rot[5] = 2 * (yz + ax)
rot[6] = 2 * (zx + ay)
rot[7] = 2 * (yz - ax)
rot[8] = a2 - x2 - y2 + z2
return rms, rot
| [
"math.sqrt"
] | [((8490, 8505), 'math.sqrt', 'math.sqrt', (['qsqr'], {}), '(qsqr)\n', (8499, 8505), False, 'import math\n'), ((5899, 5913), 'math.sqrt', 'math.sqrt', (['val'], {}), '(val)\n', (5908, 5913), False, 'import math\n')] |
# Copyright (C) 2017 MetaBrainz Foundation
# Distributed under the MIT license, see the LICENSE file for details.
# Simple WSGI module intended to be used by uWSGI.
from artwork_redirect.server import Server
from artwork_redirect.config import load_config
from artwork_redirect.loggers import init_raven_client
config = load_config()
sentry_dsn = config.sentry.dsn
if sentry_dsn:
init_raven_client(sentry_dsn)
application = Server(config)
| [
"artwork_redirect.config.load_config",
"artwork_redirect.loggers.init_raven_client",
"artwork_redirect.server.Server"
] | [((324, 337), 'artwork_redirect.config.load_config', 'load_config', ([], {}), '()\n', (335, 337), False, 'from artwork_redirect.config import load_config\n'), ((434, 448), 'artwork_redirect.server.Server', 'Server', (['config'], {}), '(config)\n', (440, 448), False, 'from artwork_redirect.server import Server\n'), ((389, 418), 'artwork_redirect.loggers.init_raven_client', 'init_raven_client', (['sentry_dsn'], {}), '(sentry_dsn)\n', (406, 418), False, 'from artwork_redirect.loggers import init_raven_client\n')] |
from keepa_request import get_varies
import pandas as pd
import datetime
import pymysql
def stock_handle(file):
stock_list = []
# data = pd.read_excel(file)
# asin_list = data['asin'].tolist()
asin_list = ['B07XFCX2Z5']
for asin in asin_list:
stock_list.extend(get_varies(asin))
print(stock_list)
aft = "./data/stock_" + datetime.datetime.now().strftime("%m%d%H%M")
data_pd = pd.DataFrame(stock_list, columns=['parent_asin', 'asin', 'style', 'stock', 'model'])
data_pd.drop_duplicates(subset=['asin'], inplace=True)
data_pd.to_excel(aft + '.xlsx')
conn = pymysql.connect(host='localhost', port=3306, db='amazon_test', user='root', passwd='<PASSWORD>')
cs = conn.cursor()
for each in data_pd.values.tolist():
parent_asin, asin, style, stock, model = each
stock_date = datetime.datetime.now()
insert_sql = "INSERT INTO amazon_test.amazon_stock(parent_asin, asin, style, stock, model, stock_date) VALUES" \
"(%s,%s,%s,%s,%s,%s)"
count = cs.execute(insert_sql, (parent_asin, asin, style, stock, model, stock_date))
print(count)
try:
conn.commit()
except:
conn.rollback()
cs.close()
if __name__ == '__main__':
file = r'E:\爬虫pycharm\data\goods_detail\目标产品.xlsx'
stock_handle(file) | [
"pandas.DataFrame",
"datetime.datetime.now",
"pymysql.connect",
"keepa_request.get_varies"
] | [((422, 510), 'pandas.DataFrame', 'pd.DataFrame', (['stock_list'], {'columns': "['parent_asin', 'asin', 'style', 'stock', 'model']"}), "(stock_list, columns=['parent_asin', 'asin', 'style', 'stock',\n 'model'])\n", (434, 510), True, 'import pandas as pd\n'), ((614, 714), 'pymysql.connect', 'pymysql.connect', ([], {'host': '"""localhost"""', 'port': '(3306)', 'db': '"""amazon_test"""', 'user': '"""root"""', 'passwd': '"""<PASSWORD>"""'}), "(host='localhost', port=3306, db='amazon_test', user='root',\n passwd='<PASSWORD>')\n", (629, 714), False, 'import pymysql\n'), ((850, 873), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (871, 873), False, 'import datetime\n'), ((290, 306), 'keepa_request.get_varies', 'get_varies', (['asin'], {}), '(asin)\n', (300, 306), False, 'from keepa_request import get_varies\n'), ((363, 386), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (384, 386), False, 'import datetime\n')] |
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Unit tests for security_hub_custom_framework.py.
"""
import boto3
from botocore.exceptions import ClientError
import pytest
from security_hub_custom_framework import SecurityHub
@pytest.mark.parametrize('tokens, error_code, stop_on_action', [
([None, None], None, None),
([None, '1', None], None, None),
([None, None], 'TestException', 'stub_list_controls')])
def test_get_sechub_controls(
make_stubber, stub_runner, tokens, error_code, stop_on_action):
auditmanager_client = boto3.client('auditmanager')
auditmanager_stubber = make_stubber(auditmanager_client)
sechub = SecurityHub(auditmanager_client)
control_list = [f'ctl-{"1"*36}', f'ctl-{"2"*36}']
ctl_sets = 0
with stub_runner(error_code, stop_on_action) as runner:
for i_token in range(len(tokens) - 1):
ctl_sets += 1
runner.add(
auditmanager_stubber.stub_list_controls,
'Standard', 100, tokens[i_token:i_token+2], control_list)
for ctl in control_list:
runner.add(
auditmanager_stubber.stub_get_control,
ctl, 'AWS Security Hub')
if error_code is None:
got_control_list = sechub.get_sechub_controls()
assert [ctl['id'] for ctl in got_control_list] == control_list * ctl_sets
else:
with pytest.raises(ClientError) as exc_info:
sechub.get_sechub_controls()
assert exc_info.value.response['Error']['Code'] == error_code
@pytest.mark.parametrize('error_code', [None, 'TestException'])
def test_create_custom_framework(make_stubber, error_code):
auditmanager_client = boto3.client('auditmanager')
auditmanager_stubber = make_stubber(auditmanager_client)
sechub = SecurityHub(auditmanager_client)
controls = [{'id': f'ctl-{index*36}'} for index in ['1', '2']]
control_sets = [{'name': 'Security-Hub', 'controls': controls}]
fw = {'name': 'All Security Hub Controls Framework', 'id': f'fw-{"1"*36}'}
auditmanager_stubber.stub_create_assessment_framework(
fw['name'], control_sets, fw['id'], error_code=error_code)
if error_code is None:
sechub.create_custom_framework(controls)
else:
with pytest.raises(ClientError) as exc_info:
sechub.create_custom_framework(controls)
assert exc_info.value.response['Error']['Code'] == error_code
| [
"pytest.mark.parametrize",
"boto3.client",
"pytest.raises",
"security_hub_custom_framework.SecurityHub"
] | [((295, 483), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""tokens, error_code, stop_on_action"""', "[([None, None], None, None), ([None, '1', None], None, None), ([None, None],\n 'TestException', 'stub_list_controls')]"], {}), "('tokens, error_code, stop_on_action', [([None, None\n ], None, None), ([None, '1', None], None, None), ([None, None],\n 'TestException', 'stub_list_controls')])\n", (318, 483), False, 'import pytest\n'), ((1624, 1686), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""error_code"""', "[None, 'TestException']"], {}), "('error_code', [None, 'TestException'])\n", (1647, 1686), False, 'import pytest\n'), ((616, 644), 'boto3.client', 'boto3.client', (['"""auditmanager"""'], {}), "('auditmanager')\n", (628, 644), False, 'import boto3\n'), ((719, 751), 'security_hub_custom_framework.SecurityHub', 'SecurityHub', (['auditmanager_client'], {}), '(auditmanager_client)\n', (730, 751), False, 'from security_hub_custom_framework import SecurityHub\n'), ((1773, 1801), 'boto3.client', 'boto3.client', (['"""auditmanager"""'], {}), "('auditmanager')\n", (1785, 1801), False, 'import boto3\n'), ((1876, 1908), 'security_hub_custom_framework.SecurityHub', 'SecurityHub', (['auditmanager_client'], {}), '(auditmanager_client)\n', (1887, 1908), False, 'from security_hub_custom_framework import SecurityHub\n'), ((1470, 1496), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (1483, 1496), False, 'import pytest\n'), ((2351, 2377), 'pytest.raises', 'pytest.raises', (['ClientError'], {}), '(ClientError)\n', (2364, 2377), False, 'import pytest\n')] |
# -*- coding: utf-8 -*-
if 'test' not in __import__('sys').argv[0]:
import gevent.monkey
gevent.monkey.patch_all()
from couchdb import Server as CouchdbServer, Session
from logging import getLogger
from openprocurement.edge.utils import (
add_logging_context,
set_logging_context,
prepare_couchdb,
prepare_couchdb_views,
beforerender,
request_params,
set_renderer
)
LOGGER = getLogger("{}.init".format(__name__))
from pyramid.config import Configurator
from pyramid.events import NewRequest, BeforeRender, ContextFound
from pyramid.renderers import JSON, JSONP
from pyramid.settings import asbool
VALIDATE_DOC_ID = '_design/_auth'
VALIDATE_DOC_UPDATE = """function(newDoc, oldDoc, userCtx){
if(newDoc._deleted && newDoc.tenderID) {
throw({forbidden: 'Not authorized to delete this document'});
}
if(userCtx.roles.indexOf('_admin') !== -1 && newDoc._id.indexOf('_design/') === 0) {
return;
}
if(userCtx.name === '%s') {
return;
} else {
throw({forbidden: 'Only authorized user may edit the database'});
}
}"""
class Server(CouchdbServer):
_uuid = None
@property
def uuid(self):
"""The uuid of the server.
:rtype: basestring
"""
if self._uuid is None:
_, _, data = self.resource.get_json()
self._uuid = data['uuid']
return self._uuid
def main(global_config, **settings):
version = settings.get('api_version')
route_prefix = '/api/{}'.format(version)
config = Configurator(
autocommit=True,
settings=settings,
route_prefix=route_prefix,
)
config.include('pyramid_exclog')
config.include("cornice")
config.add_request_method(request_params, 'params', reify=True)
config.add_renderer('prettyjson', JSON(indent=4))
config.add_renderer('jsonp', JSONP(param_name='opt_jsonp'))
config.add_renderer('prettyjsonp', JSONP(indent=4, param_name='opt_jsonp'))
config.add_subscriber(add_logging_context, NewRequest)
config.add_subscriber(set_logging_context, ContextFound)
config.add_subscriber(set_renderer, NewRequest)
config.add_subscriber(beforerender, BeforeRender)
config.scan("openprocurement.edge.views.spore")
config.scan("openprocurement.edge.views.health")
resources = settings.get('resources') and settings['resources'].split(',')
couch_url = settings.get('couchdb.url') + settings.get('couchdb.db_name')
for resource in resources:
config.scan("openprocurement.edge.views." + resource)
prepare_couchdb_views(couch_url, resource, LOGGER)
LOGGER.info('Push couch {} views successful.'.format(resource))
LOGGER.info('{} resource initialized successful.'.format(resource.title()))
# CouchDB connection
server = Server(settings.get('couchdb.url'),
session=Session(retry_delays=range(10)))
config.registry.couchdb_server = server
config.registry.db = prepare_couchdb(settings.get('couchdb.url'),
settings.get('couchdb.db_name'),
LOGGER)
config.registry.server_id = settings.get('id', '')
config.registry.health_threshold = float(settings.get('health_threshold', 99))
config.registry.api_version = version
config.registry.update_after = asbool(settings.get('update_after', True))
return config.make_wsgi_app()
| [
"pyramid.config.Configurator",
"openprocurement.edge.utils.prepare_couchdb_views",
"pyramid.renderers.JSON",
"pyramid.renderers.JSONP"
] | [((1550, 1625), 'pyramid.config.Configurator', 'Configurator', ([], {'autocommit': '(True)', 'settings': 'settings', 'route_prefix': 'route_prefix'}), '(autocommit=True, settings=settings, route_prefix=route_prefix)\n', (1562, 1625), False, 'from pyramid.config import Configurator\n'), ((1830, 1844), 'pyramid.renderers.JSON', 'JSON', ([], {'indent': '(4)'}), '(indent=4)\n', (1834, 1844), False, 'from pyramid.renderers import JSON, JSONP\n'), ((1879, 1908), 'pyramid.renderers.JSONP', 'JSONP', ([], {'param_name': '"""opt_jsonp"""'}), "(param_name='opt_jsonp')\n", (1884, 1908), False, 'from pyramid.renderers import JSON, JSONP\n'), ((1949, 1988), 'pyramid.renderers.JSONP', 'JSONP', ([], {'indent': '(4)', 'param_name': '"""opt_jsonp"""'}), "(indent=4, param_name='opt_jsonp')\n", (1954, 1988), False, 'from pyramid.renderers import JSON, JSONP\n'), ((2580, 2630), 'openprocurement.edge.utils.prepare_couchdb_views', 'prepare_couchdb_views', (['couch_url', 'resource', 'LOGGER'], {}), '(couch_url, resource, LOGGER)\n', (2601, 2630), False, 'from openprocurement.edge.utils import add_logging_context, set_logging_context, prepare_couchdb, prepare_couchdb_views, beforerender, request_params, set_renderer\n')] |
import numpy as np
import matplotlib.pyplot as plt
import networkx as nx
import os
import pickle
import sys
import glob
from scipy.linalg import polar
from numpy import linalg as LA
import moviepy.editor as mp
import imageio
##########################################################################################
def get_frame_matrix(folder_name, frame):
"""Get the npy matrix for a frame of the movie."""
if frame < 10: file_root = '_matrices/frame-000%i'%(frame)
elif frame < 100: file_root = '_matrices/frame-00%i'%(frame)
else: file_root = '_matrices/frame-0%i'%(frame)
root = 'ALL_MOVIES_MATRICES/' + folder_name + file_root + '.npy'
raw_img = np.load(root)
return raw_img
##########################################################################################
def compute_frame_OOP(folder_name,frame_num):
"""Given a specific frame, compute Orientation Order Parameter (OOP) of the frame."""
num_frames = len(glob.glob('ALL_MOVIES_MATRICES/' + folder_name + '_matrices/*.npy'))
out_file = 'ALL_MOVIES_PROCESSED' + '/' + folder_name + '/segmented_sarc'
ang = []
dat_fname = out_file + '/frame-%04d_sarc_data.txt'%(frame_num)
dat = np.loadtxt(dat_fname)
ang_dat = dat[:,6]
for jj in range(0,ang_dat.shape[0]):
val = ang_dat[jj]
ang.append(val)
mat = np.zeros((2,2))
for kk in range(0,len(ang)):
x = np.cos(ang[kk])
y = np.sin(ang[kk])
vec = np.asarray([x,y])
n = np.outer(vec,vec)
mat += 2.0*n - np.asarray([[1,0],[0,1]])
mat = mat / len(ang)
u, v = np.linalg.eig(mat)
OOP = np.max(u)
OOP_vec = v[:,np.argmax(u)]
return OOP, OOP_vec
##########################################################################################
def compute_frame_F(folder_name,frame_0,frame_t):
"""Compute the average deformation gradient given frame 0 and current frame."""
x_pos = np.loadtxt('ALL_MOVIES_PROCESSED/' + folder_name + '/timeseries/tracking_results_x_pos.txt')
y_pos = np.loadtxt('ALL_MOVIES_PROCESSED/' + folder_name + '/timeseries/tracking_results_y_pos.txt')
num_sarc = x_pos.shape[0]
num_time = x_pos.shape[1]
num_vec = int((num_sarc * num_sarc - num_sarc) / 2.0)
Lambda_0 = np.zeros((2,num_vec))
ix = 0
for kk in range(0,num_sarc):
for jj in range(kk+1,num_sarc):
x_vec = x_pos[kk,frame_0] - x_pos[jj,frame_0]
y_vec = y_pos[kk,frame_0] - y_pos[jj,frame_0]
Lambda_0[0,ix] = x_vec
Lambda_0[1,ix] = y_vec
ix += 1
Lambda_t = np.zeros((2,num_vec))
ix = 0
for kk in range(0,num_sarc):
for jj in range(kk+1,num_sarc):
x_vec = x_pos[kk,frame_t] - x_pos[jj,frame_t]
y_vec = y_pos[kk,frame_t] - y_pos[jj,frame_t]
Lambda_t[0,ix] = x_vec
Lambda_t[1,ix] = y_vec
ix += 1
term_1 = np.dot( Lambda_t , np.transpose(Lambda_0) )
term_2 = np.linalg.inv( np.dot( Lambda_0 , np.transpose(Lambda_0) ) )
F = np.dot(term_1 , term_2)
J = F[0,0]*F[1,1] - F[0,1]*F[1,0]
return F, J
##########################################################################################
def compute_all_OOP(folder_name):
"""Compute OOP for every frame."""
num_frames = len(glob.glob('ALL_MOVIES_MATRICES/' + folder_name + '_matrices/*.npy'))
OOP_list = []; OOP_vec_list = []
for kk in range(0,num_frames):
OOP, OOP_vec = compute_frame_OOP(folder_name,kk)
OOP_list.append(OOP)
OOP_vec_list.append(OOP_vec)
return OOP_list, OOP_vec_list
##########################################################################################
def compute_all_F(folder_name, reference_frame):
"""Compute F and J for every frame."""
num_frames = len(glob.glob('ALL_MOVIES_MATRICES/' + folder_name + '_matrices/*.npy'))
F_list = []; J_list = []
for kk in range(0,num_frames):
F, J = compute_frame_F(folder_name,reference_frame,kk)
F_list.append(F)
J_list.append(J)
return F_list, J_list
##########################################################################################
def compute_all_F_adjusted(folder_name):
"""Compute F and J for every frame. Reference frame is most relaxed frame."""
F_list, J_list = compute_all_F(folder_name, 0)
reference_frame = np.argmax(J_list)
F_list, J_list = compute_all_F(folder_name, reference_frame)
return F_list, J_list, reference_frame
##########################################################################################
def visualize_OOP_and_F_timeseries(OOP_list,J_list,folder_name):
"""Plot timeseries."""
external_folder_name = 'ALL_MOVIES_PROCESSED'
out_analysis = external_folder_name + '/' + folder_name + '/analysis'
plt.figure()
plt.subplot(1,2,1)
plt.plot(OOP_list)
plt.xlabel('frame number')
plt.ylabel('OOP')
plt.tight_layout()
plt.subplot(1,2,2)
plt.plot(J_list)
plt.xlabel('frame number')
plt.ylabel('average deformation J')
plt.tight_layout()
plt.savefig(out_analysis + '/OOP_J_timeseries')
return
##########################################################################################
def visualize_OOP_and_F_on_image(folder_name, frame_num, F_list, OOP_vec_list, OOP_list):
"""Plot the OOP and F visualize don the image"""
external_folder_name = 'ALL_MOVIES_PROCESSED'
out_analysis = external_folder_name + '/' + folder_name + '/analysis'
F = F_list[frame_num]
J = F[0,0]*F[1,1] - F[0,1]*F[1,0]
R, U = polar(F)
w, v = LA.eig(U)
v = np.dot(R, v)
vec_1 = v[:,np.argmin(w)]
vec_2 = v[:,np.argmax(w)]
raw_img = get_frame_matrix(folder_name, frame_num)
x_pos_mean = raw_img.shape[0]/2.0; y_pos_mean = raw_img.shape[1]/2.0
plt.figure(figsize=(5,5))
plt.imshow(raw_img, cmap=plt.cm.gray)
rad = .2*np.min([raw_img.shape[0],raw_img.shape[1]]); th = np.linspace(0,2.0*np.pi,100)
plt.plot([y_pos_mean-rad*vec_1[1],y_pos_mean+rad*vec_1[1]],[x_pos_mean-rad*vec_1[0],x_pos_mean+rad*vec_1[0]],'-',color=(255/255,204/255,203/255),linewidth=0.3)
plt.plot([y_pos_mean-rad*vec_2[1],y_pos_mean+rad*vec_2[1]],[x_pos_mean-rad*vec_2[0],x_pos_mean+rad*vec_2[0]],'-',color=(0.5,0.5,0.5),linewidth=0.3)
x_vec = []; y_vec = [] ; x_vec_circ = []; y_vec_circ = []
scale = np.asarray([[.9,0],[0,.9]])
for jj in range(0,100):
v = np.asarray([rad*np.cos(th[jj]),rad*np.sin(th[jj])])
#v_def = np.dot(np.dot(F_list_mat[jj],scale),v)
nest1 = np.dot(F,F); nest2 = np.dot(F,nest1); nest3 = np.dot(F,nest2)
nest4 = np.dot(F,nest3); nest5 = np.dot(F,nest4); nest6 = np.dot(F,nest5)
nest7 = np.dot(F,nest6); nest8 = np.dot(F,nest7)
v_def = np.dot(nest8,v)
x_vec.append(v_def[0] + x_pos_mean); y_vec.append(v_def[1] + y_pos_mean)
x_vec_circ.append(x_pos_mean + v[0]); y_vec_circ.append(y_pos_mean + v[1])
plt.plot(y_vec_circ,x_vec_circ,'-',color=(255/255,204/255,203/255),linewidth=0.3)
plt.plot(y_vec,x_vec,'-',color=(255/255,204/255,203/255),linewidth=1.0)
OOP_vec = OOP_vec_list[frame_num]
rad_OOP = rad*OOP_list[frame_num]
plt.plot([y_pos_mean - rad_OOP*OOP_vec[1],y_pos_mean + rad_OOP*OOP_vec[1]],[x_pos_mean - rad_OOP*OOP_vec[0],x_pos_mean + rad_OOP*OOP_vec[0]],'r-',linewidth=5)
plt.title('J: %.3f, OOP:%.3f, frame: %i'%(J,OOP_list[frame_num],frame_num))
ax = plt.gca()
ax.set_xticks([]); ax.set_yticks([]);
plt.savefig(out_analysis + '/OOP_J_on_img')
return
##########################################################################################
def compute_s(y_vec):
y_max = np.max(y_vec)
y_min = np.min(y_vec)
s = (y_max - y_min) / (y_max + 1)
return s
##########################################################################################
##########################################################################################
def compute_s_median(y_mat):
s_list = []
for kk in range(0,y_mat.shape[0]):
s = compute_s(y_mat[kk,:])
s_list.append(s)
return np.median(s_list), s_list
##########################################################################################
def compute_shortening(folder_name):
"""Compute \bar{s} and s_avg, two measures of sarcomere shortening."""
external_folder_name = 'ALL_MOVIES_PROCESSED/'
out_analysis = external_folder_name + '/' + folder_name + '/analysis'
# timeseries data
fname_leng = external_folder_name + folder_name + '/timeseries/tracking_results_leng.txt'
dat_leng = np.loadtxt(fname_leng)
dat_avg = np.mean(dat_leng,axis=0)
s_til, s_list = compute_s_median(dat_leng)
s_avg = compute_s(dat_avg)
np.savetxt(out_analysis + '/s_til.txt', np.asarray([s_til]))
np.savetxt(out_analysis + '/s_avg.txt', np.asarray([s_avg]))
return s_til, s_avg, s_list
##########################################################################################
def compute_metrics(folder_name):
"""Compute metrics, OOP, Ciso and C||."""
external_folder_name = 'ALL_MOVIES_PROCESSED'
out_analysis = external_folder_name + '/' + folder_name + '/analysis'
F_list, J_list, reference_frame = compute_all_F_adjusted(folder_name)
with open(out_analysis + '/F_list.pkl', 'wb') as f:
pickle.dump(F_list, f)
with open(out_analysis + '/J_list.pkl', 'wb') as f:
pickle.dump(J_list, f)
OOP_list, OOP_vec_list = compute_all_OOP(folder_name)
with open(out_analysis + '/OOP_list.pkl', 'wb') as f:
pickle.dump(OOP_list, f)
with open(out_analysis + '/OOP_vec_list.pkl', 'wb') as f:
pickle.dump(OOP_vec_list, f)
max_contract_frame = np.argmin(J_list)
visualize_OOP_and_F_timeseries(OOP_list,J_list,folder_name)
visualize_OOP_and_F_on_image(folder_name,max_contract_frame, F_list, OOP_vec_list, OOP_list)
selected_frame = np.argmin(J_list)
OOP_selected = OOP_list[selected_frame]
J = J_list[selected_frame]
F = F_list[selected_frame]
avg_contract = 1.0 - np.sqrt(J)
v = OOP_vec_list[selected_frame]
v0 = np.dot(np.linalg.inv(F),v)
v_abs = np.sqrt((v[0])**2.0 + (v[1])**2.0)
v0_abs = np.sqrt((v0[0])**2.0 + (v0[1])**2.0)
avg_aligned_contract = (v0_abs - v_abs)/v0_abs
s_til, s_avg, s_list = compute_shortening(folder_name)
np.savetxt(out_analysis + '/OOP.txt', np.asarray([OOP_selected]))
np.savetxt(out_analysis + '/C_iso.txt',np.asarray([avg_contract]))
np.savetxt(out_analysis + '/C_OOP.txt',np.asarray([avg_aligned_contract]))
np.savetxt(out_analysis + '/s_til.txt',np.asarray([s_til]))
np.savetxt(out_analysis + '/s_avg.txt',np.asarray([s_avg]))
return OOP_selected, avg_contract, avg_aligned_contract, s_til, s_avg
##########################################################################################
def compute_metrics_load_state(folder_name):
"""Compute metrics, OOP, Ciso and C||. Start from loaded """
external_folder_name = 'ALL_MOVIES_PROCESSED'
out_analysis = external_folder_name + '/' + folder_name + '/analysis'
with open(out_analysis + '/F_list.pkl', 'rb') as f: F_list = pickle.load(f)
with open(out_analysis + '/J_list.pkl', 'rb') as f: J_list = pickle.load(f)
with open(out_analysis + '/OOP_list.pkl', 'rb') as f: OOP_list = pickle.load(f)
with open(out_analysis + '/OOP_vec_list.pkl', 'rb') as f: OOP_vec_list = pickle.load(f)
max_contract_frame = np.argmin(J_list)
visualize_OOP_and_F_timeseries(OOP_list,J_list,folder_name)
visualize_OOP_and_F_on_image(folder_name,max_contract_frame, F_list, OOP_vec_list, OOP_list)
selected_frame = np.argmin(J_list)
OOP_selected = OOP_list[selected_frame]
J = J_list[selected_frame]
F = F_list[selected_frame]
avg_contract = 1.0 - np.sqrt(J)
v = OOP_vec_list[selected_frame]
v0 = np.dot(np.linalg.inv(F),v)
v_abs = np.sqrt((v[0])**2.0 + (v[1])**2.0)
v0_abs = np.sqrt((v0[0])**2.0 + (v0[1])**2.0)
avg_aligned_contract = (v0_abs - v_abs)/v0_abs
np.savetxt(out_analysis + '/OOP.txt', np.asarray([OOP_selected]))
np.savetxt(out_analysis + '/C_iso.txt',np.asarray([avg_contract]))
np.savetxt(out_analysis + '/C_OOP.txt',np.asarray([avg_aligned_contract]))
return OOP_selected, avg_contract, avg_aligned_contract
##########################################################################################
def visualize_lambda_as_functional_metric(folder_name, include_eps=False):
"""Plot lambda 1 and lambda 2 along with a movie of the cell deforming with tracked sarcomeres marked."""
external_folder_name = 'ALL_MOVIES_PROCESSED/'
out_analysis = external_folder_name + '/' + folder_name + '/analysis'
# timeseries data
fname_leng = external_folder_name + folder_name + '/timeseries/tracking_results_leng.txt'
dat_leng = np.loadtxt(fname_leng)
avg_leng = np.mean(dat_leng,axis=0)
##########################################################################################
plot_info_frames_fname = 'ALL_MOVIES_PROCESSED/' + folder_name + '/timeseries/' + 'plotting_all_frames.pkl'
ALL_frames_above_thresh = pickle.load( open( plot_info_frames_fname , "rb" ) )
plot_info_x_pos_fname = 'ALL_MOVIES_PROCESSED/' + folder_name + '/timeseries/' + 'plotting_all_x.pkl'
ALL_x_pos_above_thresh = pickle.load( open( plot_info_x_pos_fname , "rb" ) )
plot_info_y_pos_fname = 'ALL_MOVIES_PROCESSED/' + folder_name + '/timeseries/' + 'plotting_all_y.pkl'
ALL_y_pos_above_thresh = pickle.load( open( plot_info_y_pos_fname , "rb" ) )
sarc_data_normalized_fname = 'ALL_MOVIES_PROCESSED/' + folder_name + '/timeseries/' + 'tracking_results_leng.txt'
all_normalized = np.loadtxt(sarc_data_normalized_fname)
color_matrix = np.zeros(all_normalized.shape)
for kk in range(0,all_normalized.shape[0]):
for jj in range(0,all_normalized.shape[1]):
of = all_normalized[kk,jj]
if of < -.1: color_matrix[kk,jj] = 0
elif of > .1: color_matrix[kk,jj] = 1
else: color_matrix[kk,jj] = of*5 + .5
##########################################################################################
out_plots = out_analysis + '/summary_plot'
if not os.path.exists(out_plots): os.makedirs(out_plots)
# F data
F_list = np.loadtxt(external_folder_name + '/' + folder_name + '/analysis/recovered_F.txt')
num_frames = F_list.shape[0]; x = []
lambda_1_list = []; vec_1_list = []
lambda_2_list = []; vec_2_list = []
J_list = []; F_list_mat = []
for kk in range(0,num_frames):
F00 = F_list[kk,0]; F01 = F_list[kk,1]; F10 = F_list[kk,2]; F11 = F_list[kk,3]
J_list.append(F00*F11 - F01*F10)
x.append(kk)
R, U = polar(np.asarray([[F00,F01],[F10,F11]]))
w, v = LA.eig(U)
lambda_1_list.append(np.min(w)); lambda_2_list.append(np.max(w))
v = np.dot(R, v)
vec_1_list.append(v[:,np.argmin(w)]); vec_2_list.append(v[:,np.argmax(w)])
F_list_mat.append(np.asarray([[F00,F01],[F10,F11]]))
##########################################################################################
img_list = []
for kk in range(0,num_frames):
t = kk
if t < 10: file_root = '/frame-000%i'%(t)
elif t < 100: file_root = '/frame-00%i'%(t)
else: file_root = '/frame-0%i'%(t)
fig = plt.figure(figsize=(10*.7,5*.7))
gs = fig.add_gridspec(2,2)
ax1 = fig.add_subplot(gs[:,0])
raw_img = get_frame_matrix(folder_name, kk)
x_pos_mean = raw_img.shape[0]/2.0; y_pos_mean = raw_img.shape[1]/2.0
plt.imshow(raw_img, cmap=plt.cm.gray)
##########################################################################################
for zz in range(0,all_normalized.shape[0]):
if kk in ALL_frames_above_thresh[zz]:
ix = np.argwhere(np.asarray(ALL_frames_above_thresh[zz]) == kk)[0][0]
col = (1-color_matrix[zz,kk], 0 , color_matrix[zz,kk])
yy = ALL_y_pos_above_thresh[zz][ix]
xx = ALL_x_pos_above_thresh[zz][ix]
plt.scatter(yy,xx,s=3,color=col,marker='o')
##########################################################################################
rad = .2*np.min([raw_img.shape[0],raw_img.shape[1]]); th = np.linspace(0,2.0*np.pi,100)
plt.plot([y_pos_mean-rad*vec_1_list[kk][1],y_pos_mean+rad*vec_1_list[kk][1]],[x_pos_mean-rad*vec_1_list[kk][0],x_pos_mean+rad*vec_1_list[kk][0]],'-',color=(255/255,204/255,203/255),linewidth=0.3)
plt.plot([y_pos_mean-rad*vec_2_list[kk][1],y_pos_mean+rad*vec_2_list[kk][1]],[x_pos_mean-rad*vec_2_list[kk][0],x_pos_mean+rad*vec_2_list[kk][0]],'-',color=(0.5,0.5,0.5),linewidth=0.3)
#plt.plot([y_pos_mean,y_pos_mean],[x_pos_mean-rad,x_pos_mean+rad],'-',color=(255/255,204/255,203/255),linewidth=0.2)
# add in eigenvector directions
x_vec = []; y_vec = [] ; x_vec_circ = []; y_vec_circ = []
scale = np.asarray([[.9,0],[0,.9]])
for jj in range(0,100):
v = np.asarray([rad*np.cos(th[jj]),rad*np.sin(th[jj])])
#v_def = np.dot(np.dot(F_list_mat[jj],scale),v)
nest1 = np.dot(F_list_mat[kk],F_list_mat[kk])
nest2 = np.dot(F_list_mat[kk],nest1)
nest3 = np.dot(F_list_mat[kk],nest2)
nest4 = np.dot(F_list_mat[kk],nest3)
nest5 = np.dot(F_list_mat[kk],nest4)
nest6 = np.dot(F_list_mat[kk],nest5)
nest7 = np.dot(F_list_mat[kk],nest6)
nest8 = np.dot(F_list_mat[kk],nest7)
v_def = np.dot(nest8,v)
x_vec.append(v_def[0] + x_pos_mean); y_vec.append(v_def[1] + y_pos_mean)
x_vec_circ.append(x_pos_mean + v[0]); y_vec_circ.append(y_pos_mean + v[1])
plt.plot(y_vec_circ,x_vec_circ,'-',color=(255/255,204/255,203/255),linewidth=0.3)
plt.plot(y_vec,x_vec,'-',color=(255/255,204/255,203/255),linewidth=1.0)
ax = plt.gca()
ax.set_xticks([]); ax.set_yticks([]);
##########################################################################################
##########################################################################################
ax = fig.add_subplot(gs[0,1])
ax.set_title('average deformation')
ax.plot(x,lambda_1_list,'-',color='k',linewidth=1,label='λ1')
ax.plot(x,lambda_2_list,'-',color=(0.5,0.5,0.5),linewidth=1,label='λ2')
ax.plot(x[kk],lambda_1_list[kk],'o',mfc=(.7,0,0),mec=(0,0,0),markersize=7)
ax.plot(x[kk],lambda_2_list[kk],'o',mfc=(.7,0,0),mec=(0.5,0.5,0.5),markersize=7)
ax.set_xlim((np.min(x)-2,np.max(x)+2))
plt.legend(loc='upper right')
#ax.set_ylabel('avg deformation')
ax2 = fig.add_subplot(gs[1,1])
#ax2.set_ylabel('sarc length')
ax2.set_title('normalized sarcomere length')
ax2.plot(dat_leng.T,linewidth=5/dat_leng.shape[0],color=(0.75,0.75,0.75),alpha=.75)
ax2.plot(x,avg_leng,'-',color=(0,0,0),linewidth=1,label='mean')
val = np.max(np.abs(avg_leng))
ax2.set_ylim((-2*val,2*val))
ax2.set_xlim((np.min(x)-2,np.max(x)+2))
ax2.plot(x[kk],avg_leng[kk],'o',mfc=(.7,0,0),mec=(0,0,0),markersize=7)
plt.xlabel('frame number')
plt.legend(loc='upper right')
plt.tight_layout()
plt.savefig(out_plots + '/' + file_root + '_summary')
if include_eps or kk == np.argmin(J_list):
plt.savefig(out_plots + '/' + 'frame-%i'%(t) + '_summary.eps')
plt.close()
img_list.append(plt.imread(out_plots + '/' + file_root + '_summary.png'))
imageio.mimsave(out_plots + '/summary.gif', img_list, loop = 10)
clip = mp.VideoFileClip(out_plots + '/summary.gif')
clip.write_videofile(out_plots + '/summary.mp4')
| [
"numpy.sqrt",
"matplotlib.pyplot.ylabel",
"numpy.sin",
"matplotlib.pyplot.imshow",
"numpy.mean",
"os.path.exists",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.asarray",
"numpy.max",
"matplotlib.pyplot.close",
"numpy.dot",
"numpy.linspace",
"matplotlib.pyplot.scatter",
"numpy.min",
"numpy.argmin",
"moviepy.editor.VideoFileClip",
"glob.glob",
"numpy.abs",
"matplotlib.pyplot.savefig",
"numpy.linalg.eig",
"matplotlib.pyplot.gca",
"pickle.load",
"numpy.argmax",
"numpy.outer",
"numpy.cos",
"matplotlib.pyplot.title",
"imageio.mimsave",
"numpy.transpose",
"matplotlib.pyplot.legend",
"numpy.median",
"pickle.dump",
"os.makedirs",
"matplotlib.pyplot.imread",
"scipy.linalg.polar",
"numpy.zeros",
"matplotlib.pyplot.figure",
"numpy.linalg.inv",
"matplotlib.pyplot.tight_layout",
"numpy.loadtxt",
"numpy.load",
"matplotlib.pyplot.subplot"
] | [((660, 673), 'numpy.load', 'np.load', (['root'], {}), '(root)\n', (667, 673), True, 'import numpy as np\n'), ((1159, 1180), 'numpy.loadtxt', 'np.loadtxt', (['dat_fname'], {}), '(dat_fname)\n', (1169, 1180), True, 'import numpy as np\n'), ((1286, 1302), 'numpy.zeros', 'np.zeros', (['(2, 2)'], {}), '((2, 2))\n', (1294, 1302), True, 'import numpy as np\n'), ((1503, 1521), 'numpy.linalg.eig', 'np.linalg.eig', (['mat'], {}), '(mat)\n', (1516, 1521), True, 'import numpy as np\n'), ((1531, 1540), 'numpy.max', 'np.max', (['u'], {}), '(u)\n', (1537, 1540), True, 'import numpy as np\n'), ((1825, 1921), 'numpy.loadtxt', 'np.loadtxt', (["('ALL_MOVIES_PROCESSED/' + folder_name +\n '/timeseries/tracking_results_x_pos.txt')"], {}), "('ALL_MOVIES_PROCESSED/' + folder_name +\n '/timeseries/tracking_results_x_pos.txt')\n", (1835, 1921), True, 'import numpy as np\n'), ((1927, 2023), 'numpy.loadtxt', 'np.loadtxt', (["('ALL_MOVIES_PROCESSED/' + folder_name +\n '/timeseries/tracking_results_y_pos.txt')"], {}), "('ALL_MOVIES_PROCESSED/' + folder_name +\n '/timeseries/tracking_results_y_pos.txt')\n", (1937, 2023), True, 'import numpy as np\n'), ((2145, 2167), 'numpy.zeros', 'np.zeros', (['(2, num_vec)'], {}), '((2, num_vec))\n', (2153, 2167), True, 'import numpy as np\n'), ((2418, 2440), 'numpy.zeros', 'np.zeros', (['(2, num_vec)'], {}), '((2, num_vec))\n', (2426, 2440), True, 'import numpy as np\n'), ((2807, 2829), 'numpy.dot', 'np.dot', (['term_1', 'term_2'], {}), '(term_1, term_2)\n', (2813, 2829), True, 'import numpy as np\n'), ((4062, 4079), 'numpy.argmax', 'np.argmax', (['J_list'], {}), '(J_list)\n', (4071, 4079), True, 'import numpy as np\n'), ((4482, 4494), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4492, 4494), True, 'import matplotlib.pyplot as plt\n'), ((4496, 4516), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (4507, 4516), True, 'import matplotlib.pyplot as plt\n'), ((4516, 4534), 'matplotlib.pyplot.plot', 'plt.plot', (['OOP_list'], {}), '(OOP_list)\n', (4524, 4534), True, 'import matplotlib.pyplot as plt\n'), ((4536, 4562), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""frame number"""'], {}), "('frame number')\n", (4546, 4562), True, 'import matplotlib.pyplot as plt\n'), ((4564, 4581), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""OOP"""'], {}), "('OOP')\n", (4574, 4581), True, 'import matplotlib.pyplot as plt\n'), ((4583, 4601), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (4599, 4601), True, 'import matplotlib.pyplot as plt\n'), ((4603, 4623), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (4614, 4623), True, 'import matplotlib.pyplot as plt\n'), ((4623, 4639), 'matplotlib.pyplot.plot', 'plt.plot', (['J_list'], {}), '(J_list)\n', (4631, 4639), True, 'import matplotlib.pyplot as plt\n'), ((4641, 4667), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""frame number"""'], {}), "('frame number')\n", (4651, 4667), True, 'import matplotlib.pyplot as plt\n'), ((4669, 4704), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""average deformation J"""'], {}), "('average deformation J')\n", (4679, 4704), True, 'import matplotlib.pyplot as plt\n'), ((4706, 4724), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (4722, 4724), True, 'import matplotlib.pyplot as plt\n'), ((4726, 4773), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(out_analysis + '/OOP_J_timeseries')"], {}), "(out_analysis + '/OOP_J_timeseries')\n", (4737, 4773), True, 'import matplotlib.pyplot as plt\n'), ((5199, 5207), 'scipy.linalg.polar', 'polar', (['F'], {}), '(F)\n', (5204, 5207), False, 'from scipy.linalg import polar\n'), ((5216, 5225), 'numpy.linalg.eig', 'LA.eig', (['U'], {}), '(U)\n', (5222, 5225), True, 'from numpy import linalg as LA\n'), ((5231, 5243), 'numpy.dot', 'np.dot', (['R', 'v'], {}), '(R, v)\n', (5237, 5243), True, 'import numpy as np\n'), ((5421, 5447), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(5, 5)'}), '(figsize=(5, 5))\n', (5431, 5447), True, 'import matplotlib.pyplot as plt\n'), ((5448, 5485), 'matplotlib.pyplot.imshow', 'plt.imshow', (['raw_img'], {'cmap': 'plt.cm.gray'}), '(raw_img, cmap=plt.cm.gray)\n', (5458, 5485), True, 'import matplotlib.pyplot as plt\n'), ((5546, 5578), 'numpy.linspace', 'np.linspace', (['(0)', '(2.0 * np.pi)', '(100)'], {}), '(0, 2.0 * np.pi, 100)\n', (5557, 5578), True, 'import numpy as np\n'), ((5576, 5775), 'matplotlib.pyplot.plot', 'plt.plot', (['[y_pos_mean - rad * vec_1[1], y_pos_mean + rad * vec_1[1]]', '[x_pos_mean - rad * vec_1[0], x_pos_mean + rad * vec_1[0]]', '"""-"""'], {'color': '(255 / 255, 204 / 255, 203 / 255)', 'linewidth': '(0.3)'}), "([y_pos_mean - rad * vec_1[1], y_pos_mean + rad * vec_1[1]], [\n x_pos_mean - rad * vec_1[0], x_pos_mean + rad * vec_1[0]], '-', color=(\n 255 / 255, 204 / 255, 203 / 255), linewidth=0.3)\n", (5584, 5775), True, 'import matplotlib.pyplot as plt\n'), ((5737, 5918), 'matplotlib.pyplot.plot', 'plt.plot', (['[y_pos_mean - rad * vec_2[1], y_pos_mean + rad * vec_2[1]]', '[x_pos_mean - rad * vec_2[0], x_pos_mean + rad * vec_2[0]]', '"""-"""'], {'color': '(0.5, 0.5, 0.5)', 'linewidth': '(0.3)'}), "([y_pos_mean - rad * vec_2[1], y_pos_mean + rad * vec_2[1]], [\n x_pos_mean - rad * vec_2[0], x_pos_mean + rad * vec_2[0]], '-', color=(\n 0.5, 0.5, 0.5), linewidth=0.3)\n", (5745, 5918), True, 'import matplotlib.pyplot as plt\n'), ((5956, 5988), 'numpy.asarray', 'np.asarray', (['[[0.9, 0], [0, 0.9]]'], {}), '([[0.9, 0], [0, 0.9]])\n', (5966, 5988), True, 'import numpy as np\n'), ((6497, 6595), 'matplotlib.pyplot.plot', 'plt.plot', (['y_vec_circ', 'x_vec_circ', '"""-"""'], {'color': '(255 / 255, 204 / 255, 203 / 255)', 'linewidth': '(0.3)'}), "(y_vec_circ, x_vec_circ, '-', color=(255 / 255, 204 / 255, 203 / \n 255), linewidth=0.3)\n", (6505, 6595), True, 'import matplotlib.pyplot as plt\n'), ((6580, 6667), 'matplotlib.pyplot.plot', 'plt.plot', (['y_vec', 'x_vec', '"""-"""'], {'color': '(255 / 255, 204 / 255, 203 / 255)', 'linewidth': '(1.0)'}), "(y_vec, x_vec, '-', color=(255 / 255, 204 / 255, 203 / 255),\n linewidth=1.0)\n", (6588, 6667), True, 'import matplotlib.pyplot as plt\n'), ((6725, 6905), 'matplotlib.pyplot.plot', 'plt.plot', (['[y_pos_mean - rad_OOP * OOP_vec[1], y_pos_mean + rad_OOP * OOP_vec[1]]', '[x_pos_mean - rad_OOP * OOP_vec[0], x_pos_mean + rad_OOP * OOP_vec[0]]', '"""r-"""'], {'linewidth': '(5)'}), "([y_pos_mean - rad_OOP * OOP_vec[1], y_pos_mean + rad_OOP * OOP_vec\n [1]], [x_pos_mean - rad_OOP * OOP_vec[0], x_pos_mean + rad_OOP *\n OOP_vec[0]], 'r-', linewidth=5)\n", (6733, 6905), True, 'import matplotlib.pyplot as plt\n'), ((6887, 6966), 'matplotlib.pyplot.title', 'plt.title', (["('J: %.3f, OOP:%.3f, frame: %i' % (J, OOP_list[frame_num], frame_num))"], {}), "('J: %.3f, OOP:%.3f, frame: %i' % (J, OOP_list[frame_num], frame_num))\n", (6896, 6966), True, 'import matplotlib.pyplot as plt\n'), ((6969, 6978), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6976, 6978), True, 'import matplotlib.pyplot as plt\n'), ((7019, 7062), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(out_analysis + '/OOP_J_on_img')"], {}), "(out_analysis + '/OOP_J_on_img')\n", (7030, 7062), True, 'import matplotlib.pyplot as plt\n'), ((7195, 7208), 'numpy.max', 'np.max', (['y_vec'], {}), '(y_vec)\n', (7201, 7208), True, 'import numpy as np\n'), ((7218, 7231), 'numpy.min', 'np.min', (['y_vec'], {}), '(y_vec)\n', (7224, 7231), True, 'import numpy as np\n'), ((8065, 8087), 'numpy.loadtxt', 'np.loadtxt', (['fname_leng'], {}), '(fname_leng)\n', (8075, 8087), True, 'import numpy as np\n'), ((8099, 8124), 'numpy.mean', 'np.mean', (['dat_leng'], {'axis': '(0)'}), '(dat_leng, axis=0)\n', (8106, 8124), True, 'import numpy as np\n'), ((9120, 9137), 'numpy.argmin', 'np.argmin', (['J_list'], {}), '(J_list)\n', (9129, 9137), True, 'import numpy as np\n'), ((9311, 9328), 'numpy.argmin', 'np.argmin', (['J_list'], {}), '(J_list)\n', (9320, 9328), True, 'import numpy as np\n'), ((9535, 9569), 'numpy.sqrt', 'np.sqrt', (['(v[0] ** 2.0 + v[1] ** 2.0)'], {}), '(v[0] ** 2.0 + v[1] ** 2.0)\n', (9542, 9569), True, 'import numpy as np\n'), ((9580, 9616), 'numpy.sqrt', 'np.sqrt', (['(v0[0] ** 2.0 + v0[1] ** 2.0)'], {}), '(v0[0] ** 2.0 + v0[1] ** 2.0)\n', (9587, 9616), True, 'import numpy as np\n'), ((10798, 10815), 'numpy.argmin', 'np.argmin', (['J_list'], {}), '(J_list)\n', (10807, 10815), True, 'import numpy as np\n'), ((10989, 11006), 'numpy.argmin', 'np.argmin', (['J_list'], {}), '(J_list)\n', (10998, 11006), True, 'import numpy as np\n'), ((11213, 11247), 'numpy.sqrt', 'np.sqrt', (['(v[0] ** 2.0 + v[1] ** 2.0)'], {}), '(v[0] ** 2.0 + v[1] ** 2.0)\n', (11220, 11247), True, 'import numpy as np\n'), ((11258, 11294), 'numpy.sqrt', 'np.sqrt', (['(v0[0] ** 2.0 + v0[1] ** 2.0)'], {}), '(v0[0] ** 2.0 + v0[1] ** 2.0)\n', (11265, 11294), True, 'import numpy as np\n'), ((12131, 12153), 'numpy.loadtxt', 'np.loadtxt', (['fname_leng'], {}), '(fname_leng)\n', (12141, 12153), True, 'import numpy as np\n'), ((12166, 12191), 'numpy.mean', 'np.mean', (['dat_leng'], {'axis': '(0)'}), '(dat_leng, axis=0)\n', (12173, 12191), True, 'import numpy as np\n'), ((12970, 13008), 'numpy.loadtxt', 'np.loadtxt', (['sarc_data_normalized_fname'], {}), '(sarc_data_normalized_fname)\n', (12980, 13008), True, 'import numpy as np\n'), ((13025, 13055), 'numpy.zeros', 'np.zeros', (['all_normalized.shape'], {}), '(all_normalized.shape)\n', (13033, 13055), True, 'import numpy as np\n'), ((13514, 13600), 'numpy.loadtxt', 'np.loadtxt', (["(external_folder_name + '/' + folder_name + '/analysis/recovered_F.txt')"], {}), "(external_folder_name + '/' + folder_name +\n '/analysis/recovered_F.txt')\n", (13524, 13600), True, 'import numpy as np\n'), ((18317, 18379), 'imageio.mimsave', 'imageio.mimsave', (["(out_plots + '/summary.gif')", 'img_list'], {'loop': '(10)'}), "(out_plots + '/summary.gif', img_list, loop=10)\n", (18332, 18379), False, 'import imageio\n'), ((18391, 18435), 'moviepy.editor.VideoFileClip', 'mp.VideoFileClip', (["(out_plots + '/summary.gif')"], {}), "(out_plots + '/summary.gif')\n", (18407, 18435), True, 'import moviepy.editor as mp\n'), ((933, 1000), 'glob.glob', 'glob.glob', (["('ALL_MOVIES_MATRICES/' + folder_name + '_matrices/*.npy')"], {}), "('ALL_MOVIES_MATRICES/' + folder_name + '_matrices/*.npy')\n", (942, 1000), False, 'import glob\n'), ((1338, 1353), 'numpy.cos', 'np.cos', (['ang[kk]'], {}), '(ang[kk])\n', (1344, 1353), True, 'import numpy as np\n'), ((1360, 1375), 'numpy.sin', 'np.sin', (['ang[kk]'], {}), '(ang[kk])\n', (1366, 1375), True, 'import numpy as np\n'), ((1384, 1402), 'numpy.asarray', 'np.asarray', (['[x, y]'], {}), '([x, y])\n', (1394, 1402), True, 'import numpy as np\n'), ((1408, 1426), 'numpy.outer', 'np.outer', (['vec', 'vec'], {}), '(vec, vec)\n', (1416, 1426), True, 'import numpy as np\n'), ((2706, 2728), 'numpy.transpose', 'np.transpose', (['Lambda_0'], {}), '(Lambda_0)\n', (2718, 2728), True, 'import numpy as np\n'), ((3062, 3129), 'glob.glob', 'glob.glob', (["('ALL_MOVIES_MATRICES/' + folder_name + '_matrices/*.npy')"], {}), "('ALL_MOVIES_MATRICES/' + folder_name + '_matrices/*.npy')\n", (3071, 3129), False, 'import glob\n'), ((3535, 3602), 'glob.glob', 'glob.glob', (["('ALL_MOVIES_MATRICES/' + folder_name + '_matrices/*.npy')"], {}), "('ALL_MOVIES_MATRICES/' + folder_name + '_matrices/*.npy')\n", (3544, 3602), False, 'import glob\n'), ((5496, 5540), 'numpy.min', 'np.min', (['[raw_img.shape[0], raw_img.shape[1]]'], {}), '([raw_img.shape[0], raw_img.shape[1]])\n', (5502, 5540), True, 'import numpy as np\n'), ((6127, 6139), 'numpy.dot', 'np.dot', (['F', 'F'], {}), '(F, F)\n', (6133, 6139), True, 'import numpy as np\n'), ((6148, 6164), 'numpy.dot', 'np.dot', (['F', 'nest1'], {}), '(F, nest1)\n', (6154, 6164), True, 'import numpy as np\n'), ((6173, 6189), 'numpy.dot', 'np.dot', (['F', 'nest2'], {}), '(F, nest2)\n', (6179, 6189), True, 'import numpy as np\n'), ((6199, 6215), 'numpy.dot', 'np.dot', (['F', 'nest3'], {}), '(F, nest3)\n', (6205, 6215), True, 'import numpy as np\n'), ((6224, 6240), 'numpy.dot', 'np.dot', (['F', 'nest4'], {}), '(F, nest4)\n', (6230, 6240), True, 'import numpy as np\n'), ((6249, 6265), 'numpy.dot', 'np.dot', (['F', 'nest5'], {}), '(F, nest5)\n', (6255, 6265), True, 'import numpy as np\n'), ((6275, 6291), 'numpy.dot', 'np.dot', (['F', 'nest6'], {}), '(F, nest6)\n', (6281, 6291), True, 'import numpy as np\n'), ((6300, 6316), 'numpy.dot', 'np.dot', (['F', 'nest7'], {}), '(F, nest7)\n', (6306, 6316), True, 'import numpy as np\n'), ((6326, 6342), 'numpy.dot', 'np.dot', (['nest8', 'v'], {}), '(nest8, v)\n', (6332, 6342), True, 'import numpy as np\n'), ((7597, 7614), 'numpy.median', 'np.median', (['s_list'], {}), '(s_list)\n', (7606, 7614), True, 'import numpy as np\n'), ((8237, 8256), 'numpy.asarray', 'np.asarray', (['[s_til]'], {}), '([s_til])\n', (8247, 8256), True, 'import numpy as np\n'), ((8299, 8318), 'numpy.asarray', 'np.asarray', (['[s_avg]'], {}), '([s_avg])\n', (8309, 8318), True, 'import numpy as np\n'), ((8765, 8787), 'pickle.dump', 'pickle.dump', (['F_list', 'f'], {}), '(F_list, f)\n', (8776, 8787), False, 'import pickle\n'), ((8843, 8865), 'pickle.dump', 'pickle.dump', (['J_list', 'f'], {}), '(J_list, f)\n', (8854, 8865), False, 'import pickle\n'), ((8981, 9005), 'pickle.dump', 'pickle.dump', (['OOP_list', 'f'], {}), '(OOP_list, f)\n', (8992, 9005), False, 'import pickle\n'), ((9067, 9095), 'pickle.dump', 'pickle.dump', (['OOP_vec_list', 'f'], {}), '(OOP_vec_list, f)\n', (9078, 9095), False, 'import pickle\n'), ((9448, 9458), 'numpy.sqrt', 'np.sqrt', (['J'], {}), '(J)\n', (9455, 9458), True, 'import numpy as np\n'), ((9506, 9522), 'numpy.linalg.inv', 'np.linalg.inv', (['F'], {}), '(F)\n', (9519, 9522), True, 'import numpy as np\n'), ((9764, 9790), 'numpy.asarray', 'np.asarray', (['[OOP_selected]'], {}), '([OOP_selected])\n', (9774, 9790), True, 'import numpy as np\n'), ((9832, 9858), 'numpy.asarray', 'np.asarray', (['[avg_contract]'], {}), '([avg_contract])\n', (9842, 9858), True, 'import numpy as np\n'), ((9900, 9934), 'numpy.asarray', 'np.asarray', (['[avg_aligned_contract]'], {}), '([avg_aligned_contract])\n', (9910, 9934), True, 'import numpy as np\n'), ((9976, 9995), 'numpy.asarray', 'np.asarray', (['[s_til]'], {}), '([s_til])\n', (9986, 9995), True, 'import numpy as np\n'), ((10037, 10056), 'numpy.asarray', 'np.asarray', (['[s_avg]'], {}), '([s_avg])\n', (10047, 10056), True, 'import numpy as np\n'), ((10512, 10526), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (10523, 10526), False, 'import pickle\n'), ((10589, 10603), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (10600, 10603), False, 'import pickle\n'), ((10670, 10684), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (10681, 10684), False, 'import pickle\n'), ((10759, 10773), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (10770, 10773), False, 'import pickle\n'), ((11126, 11136), 'numpy.sqrt', 'np.sqrt', (['J'], {}), '(J)\n', (11133, 11136), True, 'import numpy as np\n'), ((11184, 11200), 'numpy.linalg.inv', 'np.linalg.inv', (['F'], {}), '(F)\n', (11197, 11200), True, 'import numpy as np\n'), ((11384, 11410), 'numpy.asarray', 'np.asarray', (['[OOP_selected]'], {}), '([OOP_selected])\n', (11394, 11410), True, 'import numpy as np\n'), ((11452, 11478), 'numpy.asarray', 'np.asarray', (['[avg_contract]'], {}), '([avg_contract])\n', (11462, 11478), True, 'import numpy as np\n'), ((11520, 11554), 'numpy.asarray', 'np.asarray', (['[avg_aligned_contract]'], {}), '([avg_aligned_contract])\n', (11530, 11554), True, 'import numpy as np\n'), ((13443, 13468), 'os.path.exists', 'os.path.exists', (['out_plots'], {}), '(out_plots)\n', (13457, 13468), False, 'import os\n'), ((13470, 13492), 'os.makedirs', 'os.makedirs', (['out_plots'], {}), '(out_plots)\n', (13481, 13492), False, 'import os\n'), ((13965, 13974), 'numpy.linalg.eig', 'LA.eig', (['U'], {}), '(U)\n', (13971, 13974), True, 'from numpy import linalg as LA\n'), ((14048, 14060), 'numpy.dot', 'np.dot', (['R', 'v'], {}), '(R, v)\n', (14054, 14060), True, 'import numpy as np\n'), ((14482, 14521), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10 * 0.7, 5 * 0.7)'}), '(figsize=(10 * 0.7, 5 * 0.7))\n', (14492, 14521), True, 'import matplotlib.pyplot as plt\n'), ((14698, 14735), 'matplotlib.pyplot.imshow', 'plt.imshow', (['raw_img'], {'cmap': 'plt.cm.gray'}), '(raw_img, cmap=plt.cm.gray)\n', (14708, 14735), True, 'import matplotlib.pyplot as plt\n'), ((15334, 15366), 'numpy.linspace', 'np.linspace', (['(0)', '(2.0 * np.pi)', '(100)'], {}), '(0, 2.0 * np.pi, 100)\n', (15345, 15366), True, 'import numpy as np\n'), ((15365, 15602), 'matplotlib.pyplot.plot', 'plt.plot', (['[y_pos_mean - rad * vec_1_list[kk][1], y_pos_mean + rad * vec_1_list[kk][1]]', '[x_pos_mean - rad * vec_1_list[kk][0], x_pos_mean + rad * vec_1_list[kk][0]]', '"""-"""'], {'color': '(255 / 255, 204 / 255, 203 / 255)', 'linewidth': '(0.3)'}), "([y_pos_mean - rad * vec_1_list[kk][1], y_pos_mean + rad *\n vec_1_list[kk][1]], [x_pos_mean - rad * vec_1_list[kk][0], x_pos_mean +\n rad * vec_1_list[kk][0]], '-', color=(255 / 255, 204 / 255, 203 / 255),\n linewidth=0.3)\n", (15373, 15602), True, 'import matplotlib.pyplot as plt\n'), ((15563, 15778), 'matplotlib.pyplot.plot', 'plt.plot', (['[y_pos_mean - rad * vec_2_list[kk][1], y_pos_mean + rad * vec_2_list[kk][1]]', '[x_pos_mean - rad * vec_2_list[kk][0], x_pos_mean + rad * vec_2_list[kk][0]]', '"""-"""'], {'color': '(0.5, 0.5, 0.5)', 'linewidth': '(0.3)'}), "([y_pos_mean - rad * vec_2_list[kk][1], y_pos_mean + rad *\n vec_2_list[kk][1]], [x_pos_mean - rad * vec_2_list[kk][0], x_pos_mean +\n rad * vec_2_list[kk][0]], '-', color=(0.5, 0.5, 0.5), linewidth=0.3)\n", (15571, 15778), True, 'import matplotlib.pyplot as plt\n'), ((15971, 16003), 'numpy.asarray', 'np.asarray', (['[[0.9, 0], [0, 0.9]]'], {}), '([[0.9, 0], [0, 0.9]])\n', (15981, 16003), True, 'import numpy as np\n'), ((16648, 16746), 'matplotlib.pyplot.plot', 'plt.plot', (['y_vec_circ', 'x_vec_circ', '"""-"""'], {'color': '(255 / 255, 204 / 255, 203 / 255)', 'linewidth': '(0.3)'}), "(y_vec_circ, x_vec_circ, '-', color=(255 / 255, 204 / 255, 203 / \n 255), linewidth=0.3)\n", (16656, 16746), True, 'import matplotlib.pyplot as plt\n'), ((16732, 16819), 'matplotlib.pyplot.plot', 'plt.plot', (['y_vec', 'x_vec', '"""-"""'], {'color': '(255 / 255, 204 / 255, 203 / 255)', 'linewidth': '(1.0)'}), "(y_vec, x_vec, '-', color=(255 / 255, 204 / 255, 203 / 255),\n linewidth=1.0)\n", (16740, 16819), True, 'import matplotlib.pyplot as plt\n'), ((16812, 16821), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (16819, 16821), True, 'import matplotlib.pyplot as plt\n'), ((17459, 17488), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper right"""'}), "(loc='upper right')\n", (17469, 17488), True, 'import matplotlib.pyplot as plt\n'), ((17975, 18001), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""frame number"""'], {}), "('frame number')\n", (17985, 18001), True, 'import matplotlib.pyplot as plt\n'), ((18004, 18033), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper right"""'}), "(loc='upper right')\n", (18014, 18033), True, 'import matplotlib.pyplot as plt\n'), ((18036, 18054), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (18052, 18054), True, 'import matplotlib.pyplot as plt\n'), ((18059, 18112), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(out_plots + '/' + file_root + '_summary')"], {}), "(out_plots + '/' + file_root + '_summary')\n", (18070, 18112), True, 'import matplotlib.pyplot as plt\n'), ((18226, 18237), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (18235, 18237), True, 'import matplotlib.pyplot as plt\n'), ((1443, 1471), 'numpy.asarray', 'np.asarray', (['[[1, 0], [0, 1]]'], {}), '([[1, 0], [0, 1]])\n', (1453, 1471), True, 'import numpy as np\n'), ((1556, 1568), 'numpy.argmax', 'np.argmax', (['u'], {}), '(u)\n', (1565, 1568), True, 'import numpy as np\n'), ((2775, 2797), 'numpy.transpose', 'np.transpose', (['Lambda_0'], {}), '(Lambda_0)\n', (2787, 2797), True, 'import numpy as np\n'), ((5257, 5269), 'numpy.argmin', 'np.argmin', (['w'], {}), '(w)\n', (5266, 5269), True, 'import numpy as np\n'), ((5284, 5296), 'numpy.argmax', 'np.argmax', (['w'], {}), '(w)\n', (5293, 5296), True, 'import numpy as np\n'), ((13921, 13957), 'numpy.asarray', 'np.asarray', (['[[F00, F01], [F10, F11]]'], {}), '([[F00, F01], [F10, F11]])\n', (13931, 13957), True, 'import numpy as np\n'), ((13998, 14007), 'numpy.min', 'np.min', (['w'], {}), '(w)\n', (14004, 14007), True, 'import numpy as np\n'), ((14031, 14040), 'numpy.max', 'np.max', (['w'], {}), '(w)\n', (14037, 14040), True, 'import numpy as np\n'), ((14158, 14194), 'numpy.asarray', 'np.asarray', (['[[F00, F01], [F10, F11]]'], {}), '([[F00, F01], [F10, F11]])\n', (14168, 14194), True, 'import numpy as np\n'), ((15284, 15328), 'numpy.min', 'np.min', (['[raw_img.shape[0], raw_img.shape[1]]'], {}), '([raw_img.shape[0], raw_img.shape[1]])\n', (15290, 15328), True, 'import numpy as np\n'), ((16146, 16184), 'numpy.dot', 'np.dot', (['F_list_mat[kk]', 'F_list_mat[kk]'], {}), '(F_list_mat[kk], F_list_mat[kk])\n', (16152, 16184), True, 'import numpy as np\n'), ((16195, 16224), 'numpy.dot', 'np.dot', (['F_list_mat[kk]', 'nest1'], {}), '(F_list_mat[kk], nest1)\n', (16201, 16224), True, 'import numpy as np\n'), ((16235, 16264), 'numpy.dot', 'np.dot', (['F_list_mat[kk]', 'nest2'], {}), '(F_list_mat[kk], nest2)\n', (16241, 16264), True, 'import numpy as np\n'), ((16275, 16304), 'numpy.dot', 'np.dot', (['F_list_mat[kk]', 'nest3'], {}), '(F_list_mat[kk], nest3)\n', (16281, 16304), True, 'import numpy as np\n'), ((16315, 16344), 'numpy.dot', 'np.dot', (['F_list_mat[kk]', 'nest4'], {}), '(F_list_mat[kk], nest4)\n', (16321, 16344), True, 'import numpy as np\n'), ((16355, 16384), 'numpy.dot', 'np.dot', (['F_list_mat[kk]', 'nest5'], {}), '(F_list_mat[kk], nest5)\n', (16361, 16384), True, 'import numpy as np\n'), ((16395, 16424), 'numpy.dot', 'np.dot', (['F_list_mat[kk]', 'nest6'], {}), '(F_list_mat[kk], nest6)\n', (16401, 16424), True, 'import numpy as np\n'), ((16435, 16464), 'numpy.dot', 'np.dot', (['F_list_mat[kk]', 'nest7'], {}), '(F_list_mat[kk], nest7)\n', (16441, 16464), True, 'import numpy as np\n'), ((16475, 16491), 'numpy.dot', 'np.dot', (['nest8', 'v'], {}), '(nest8, v)\n', (16481, 16491), True, 'import numpy as np\n'), ((17807, 17823), 'numpy.abs', 'np.abs', (['avg_leng'], {}), '(avg_leng)\n', (17813, 17823), True, 'import numpy as np\n'), ((18161, 18223), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(out_plots + '/' + 'frame-%i' % t + '_summary.eps')"], {}), "(out_plots + '/' + 'frame-%i' % t + '_summary.eps')\n", (18172, 18223), True, 'import matplotlib.pyplot as plt\n'), ((18256, 18312), 'matplotlib.pyplot.imread', 'plt.imread', (["(out_plots + '/' + file_root + '_summary.png')"], {}), "(out_plots + '/' + file_root + '_summary.png')\n", (18266, 18312), True, 'import matplotlib.pyplot as plt\n'), ((15134, 15181), 'matplotlib.pyplot.scatter', 'plt.scatter', (['yy', 'xx'], {'s': '(3)', 'color': 'col', 'marker': '"""o"""'}), "(yy, xx, s=3, color=col, marker='o')\n", (15145, 15181), True, 'import matplotlib.pyplot as plt\n'), ((18139, 18156), 'numpy.argmin', 'np.argmin', (['J_list'], {}), '(J_list)\n', (18148, 18156), True, 'import numpy as np\n'), ((6031, 6045), 'numpy.cos', 'np.cos', (['th[jj]'], {}), '(th[jj])\n', (6037, 6045), True, 'import numpy as np\n'), ((6050, 6064), 'numpy.sin', 'np.sin', (['th[jj]'], {}), '(th[jj])\n', (6056, 6064), True, 'import numpy as np\n'), ((14085, 14097), 'numpy.argmin', 'np.argmin', (['w'], {}), '(w)\n', (14094, 14097), True, 'import numpy as np\n'), ((14123, 14135), 'numpy.argmax', 'np.argmax', (['w'], {}), '(w)\n', (14132, 14135), True, 'import numpy as np\n'), ((17431, 17440), 'numpy.min', 'np.min', (['x'], {}), '(x)\n', (17437, 17440), True, 'import numpy as np\n'), ((17443, 17452), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (17449, 17452), True, 'import numpy as np\n'), ((17872, 17881), 'numpy.min', 'np.min', (['x'], {}), '(x)\n', (17878, 17881), True, 'import numpy as np\n'), ((17884, 17893), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (17890, 17893), True, 'import numpy as np\n'), ((16048, 16062), 'numpy.cos', 'np.cos', (['th[jj]'], {}), '(th[jj])\n', (16054, 16062), True, 'import numpy as np\n'), ((16067, 16081), 'numpy.sin', 'np.sin', (['th[jj]'], {}), '(th[jj])\n', (16073, 16081), True, 'import numpy as np\n'), ((14938, 14977), 'numpy.asarray', 'np.asarray', (['ALL_frames_above_thresh[zz]'], {}), '(ALL_frames_above_thresh[zz])\n', (14948, 14977), True, 'import numpy as np\n')] |
from sotabencheval.question_answering.evaluate_v11 import evaluate as evaluate_v11
from sotabencheval.question_answering.evaluate_v20 import get_raw_scores
__all__ = ["evaluate_v11", "evaluate_v20"]
def evaluate_v20(dataset, predictions):
exact_scores, f1_scores = get_raw_scores(dataset, predictions)
total = sum([len(p['qas']) for article in dataset for p in article['paragraphs']])
exact_match = 100.0 * sum(exact_scores.values()) / total
f1 = 100.0 * sum(f1_scores.values()) / total
return {'exact_match': exact_match, 'f1': f1}
| [
"sotabencheval.question_answering.evaluate_v20.get_raw_scores"
] | [((272, 308), 'sotabencheval.question_answering.evaluate_v20.get_raw_scores', 'get_raw_scores', (['dataset', 'predictions'], {}), '(dataset, predictions)\n', (286, 308), False, 'from sotabencheval.question_answering.evaluate_v20 import get_raw_scores\n')] |
# -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
"""AiiDA profile related code"""
import collections
from copy import deepcopy
import os
import pathlib
from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Type
from aiida.common import exceptions
from .options import parse_option
from .settings import DAEMON_DIR, DAEMON_LOG_DIR
if TYPE_CHECKING:
from aiida.orm.implementation import StorageBackend
__all__ = ('Profile',)
CIRCUS_PID_FILE_TEMPLATE = os.path.join(DAEMON_DIR, 'circus-{}.pid')
DAEMON_PID_FILE_TEMPLATE = os.path.join(DAEMON_DIR, 'aiida-{}.pid')
CIRCUS_LOG_FILE_TEMPLATE = os.path.join(DAEMON_LOG_DIR, 'circus-{}.log')
DAEMON_LOG_FILE_TEMPLATE = os.path.join(DAEMON_LOG_DIR, 'aiida-{}.log')
CIRCUS_PORT_FILE_TEMPLATE = os.path.join(DAEMON_DIR, 'circus-{}.port')
CIRCUS_SOCKET_FILE_TEMPATE = os.path.join(DAEMON_DIR, 'circus-{}.sockets')
CIRCUS_CONTROLLER_SOCKET_TEMPLATE = 'circus.c.sock'
CIRCUS_PUBSUB_SOCKET_TEMPLATE = 'circus.p.sock'
CIRCUS_STATS_SOCKET_TEMPLATE = 'circus.s.sock'
class Profile: # pylint: disable=too-many-public-methods
"""Class that models a profile as it is stored in the configuration file of an AiiDA instance."""
KEY_UUID = 'PROFILE_UUID'
KEY_DEFAULT_USER_EMAIL = 'default_user_email'
KEY_STORAGE = 'storage'
KEY_PROCESS = 'process_control'
KEY_STORAGE_BACKEND = 'backend'
KEY_STORAGE_CONFIG = 'config'
KEY_PROCESS_BACKEND = 'backend'
KEY_PROCESS_CONFIG = 'config'
KEY_OPTIONS = 'options'
KEY_TEST_PROFILE = 'test_profile'
# keys that are expected to be in the parsed configuration
REQUIRED_KEYS = (
KEY_STORAGE,
KEY_PROCESS,
)
def __init__(self, name: str, config: Mapping[str, Any], validate=True):
"""Load a profile with the profile configuration."""
if not isinstance(config, collections.abc.Mapping):
raise TypeError(f'config should be a mapping but is {type(config)}')
if validate and not set(config.keys()).issuperset(self.REQUIRED_KEYS):
raise exceptions.ConfigurationError(
f'profile {name!r} configuration does not contain all required keys: {self.REQUIRED_KEYS}'
)
self._name = name
self._attributes: Dict[str, Any] = deepcopy(config)
# Create a default UUID if not specified
if self._attributes.get(self.KEY_UUID, None) is None:
from uuid import uuid4
self._attributes[self.KEY_UUID] = uuid4().hex
def __str__(self) -> str:
return f'Profile<{self.uuid!r} ({self.name!r})>'
def copy(self):
"""Return a copy of the profile."""
return self.__class__(self.name, self._attributes)
@property
def uuid(self) -> str:
"""Return the profile uuid.
:return: string UUID
"""
return self._attributes[self.KEY_UUID]
@uuid.setter
def uuid(self, value: str) -> None:
self._attributes[self.KEY_UUID] = value
@property
def default_user_email(self) -> Optional[str]:
"""Return the default user email."""
return self._attributes.get(self.KEY_DEFAULT_USER_EMAIL, None)
@default_user_email.setter
def default_user_email(self, value: Optional[str]) -> None:
"""Set the default user email."""
self._attributes[self.KEY_DEFAULT_USER_EMAIL] = value
@property
def storage_backend(self) -> str:
"""Return the type of the storage backend."""
return self._attributes[self.KEY_STORAGE][self.KEY_STORAGE_BACKEND]
@property
def storage_config(self) -> Dict[str, Any]:
"""Return the configuration required by the storage backend."""
return self._attributes[self.KEY_STORAGE][self.KEY_STORAGE_CONFIG]
def set_storage(self, name: str, config: Dict[str, Any]) -> None:
"""Set the storage backend and its configuration.
:param name: the name of the storage backend
:param config: the configuration of the storage backend
"""
self._attributes.setdefault(self.KEY_STORAGE, {})
self._attributes[self.KEY_STORAGE][self.KEY_STORAGE_BACKEND] = name
self._attributes[self.KEY_STORAGE][self.KEY_STORAGE_CONFIG] = config
@property
def storage_cls(self) -> Type['StorageBackend']:
"""Return the storage backend class for this profile."""
if self.storage_backend == 'psql_dos':
from aiida.storage.psql_dos.backend import PsqlDosBackend
return PsqlDosBackend
if self.storage_backend == 'sqlite_zip':
from aiida.storage.sqlite_zip.backend import SqliteZipBackend
return SqliteZipBackend
raise ValueError(f'unknown storage backend type: {self.storage_backend}')
@property
def process_control_backend(self) -> str:
"""Return the type of the process control backend."""
return self._attributes[self.KEY_PROCESS][self.KEY_PROCESS_BACKEND]
@property
def process_control_config(self) -> Dict[str, Any]:
"""Return the configuration required by the process control backend."""
return self._attributes[self.KEY_PROCESS][self.KEY_PROCESS_CONFIG]
def set_process_controller(self, name: str, config: Dict[str, Any]) -> None:
"""Set the process control backend and its configuration.
:param name: the name of the process backend
:param config: the configuration of the process backend
"""
self._attributes.setdefault(self.KEY_PROCESS, {})
self._attributes[self.KEY_PROCESS][self.KEY_PROCESS_BACKEND] = name
self._attributes[self.KEY_PROCESS][self.KEY_PROCESS_CONFIG] = config
@property
def options(self):
self._attributes.setdefault(self.KEY_OPTIONS, {})
return self._attributes[self.KEY_OPTIONS]
@options.setter
def options(self, value):
self._attributes[self.KEY_OPTIONS] = value
def get_option(self, option_key, default=None):
return self.options.get(option_key, default)
def set_option(self, option_key, value, override=True):
"""Set a configuration option for a certain scope.
:param option_key: the key of the configuration option
:param option_value: the option value
:param override: boolean, if False, will not override the option if it already exists
"""
_, parsed_value = parse_option(option_key, value) # ensure the value is validated
if option_key not in self.options or override:
self.options[option_key] = parsed_value
def unset_option(self, option_key):
self.options.pop(option_key, None)
@property
def name(self):
"""Return the profile name.
:return: the profile name
"""
return self._name
@property
def dictionary(self) -> Dict[str, Any]:
"""Return the profile attributes as a dictionary with keys as it is stored in the config
:return: the profile configuration dictionary
"""
return self._attributes
@property
def is_test_profile(self) -> bool:
"""Return whether the profile is a test profile
:return: boolean, True if test profile, False otherwise
"""
# Check explicitly for ``True`` for safety. If an invalid value is defined, we default to treating it as not
# a test profile as that can unintentionally clear the database.
return self._attributes.get(self.KEY_TEST_PROFILE, False) is True
@is_test_profile.setter
def is_test_profile(self, value: bool) -> None:
"""Set whether the profile is a test profile.
:param value: boolean indicating whether this profile is a test profile.
"""
self._attributes[self.KEY_TEST_PROFILE] = value
@property
def repository_path(self) -> pathlib.Path:
"""Return the absolute path of the repository configured for this profile.
The URI should be in the format `protocol://address`
:note: At the moment, only the file protocol is supported.
:return: absolute filepath of the profile's file repository
"""
from urllib.parse import urlparse
parts = urlparse(self.storage_config['repository_uri'])
if parts.scheme != 'file':
raise exceptions.ConfigurationError('invalid repository protocol, only the local `file://` is supported')
if not os.path.isabs(parts.path):
raise exceptions.ConfigurationError('invalid repository URI: the path has to be absolute')
return pathlib.Path(os.path.expanduser(parts.path))
@property
def rmq_prefix(self) -> str:
"""Return the prefix that should be used for RMQ resources
:return: the rmq prefix string
"""
return f'aiida-{self.uuid}'
def get_rmq_url(self) -> str:
"""Return the RMQ url for this profile."""
from aiida.manage.external.rmq import get_rmq_url
if self.process_control_backend != 'rabbitmq':
raise exceptions.ConfigurationError(
f"invalid process control backend, only 'rabbitmq' is supported: {self.process_control_backend}"
)
kwargs = {key[7:]: val for key, val in self.process_control_config.items() if key.startswith('broker_')}
additional_kwargs = kwargs.pop('parameters', {})
return get_rmq_url(**kwargs, **additional_kwargs)
@property
def filepaths(self):
"""Return the filepaths used by this profile.
:return: a dictionary of filepaths
"""
return {
'circus': {
'log': CIRCUS_LOG_FILE_TEMPLATE.format(self.name),
'pid': CIRCUS_PID_FILE_TEMPLATE.format(self.name),
'port': CIRCUS_PORT_FILE_TEMPLATE.format(self.name),
'socket': {
'file': CIRCUS_SOCKET_FILE_TEMPATE.format(self.name),
'controller': CIRCUS_CONTROLLER_SOCKET_TEMPLATE,
'pubsub': CIRCUS_PUBSUB_SOCKET_TEMPLATE,
'stats': CIRCUS_STATS_SOCKET_TEMPLATE,
}
},
'daemon': {
'log': DAEMON_LOG_FILE_TEMPLATE.format(self.name),
'pid': DAEMON_PID_FILE_TEMPLATE.format(self.name),
}
}
| [
"urllib.parse.urlparse",
"os.path.isabs",
"os.path.join",
"uuid.uuid4",
"aiida.common.exceptions.ConfigurationError",
"copy.deepcopy",
"aiida.manage.external.rmq.get_rmq_url",
"os.path.expanduser"
] | [((1052, 1093), 'os.path.join', 'os.path.join', (['DAEMON_DIR', '"""circus-{}.pid"""'], {}), "(DAEMON_DIR, 'circus-{}.pid')\n", (1064, 1093), False, 'import os\n'), ((1121, 1161), 'os.path.join', 'os.path.join', (['DAEMON_DIR', '"""aiida-{}.pid"""'], {}), "(DAEMON_DIR, 'aiida-{}.pid')\n", (1133, 1161), False, 'import os\n'), ((1189, 1234), 'os.path.join', 'os.path.join', (['DAEMON_LOG_DIR', '"""circus-{}.log"""'], {}), "(DAEMON_LOG_DIR, 'circus-{}.log')\n", (1201, 1234), False, 'import os\n'), ((1262, 1306), 'os.path.join', 'os.path.join', (['DAEMON_LOG_DIR', '"""aiida-{}.log"""'], {}), "(DAEMON_LOG_DIR, 'aiida-{}.log')\n", (1274, 1306), False, 'import os\n'), ((1335, 1377), 'os.path.join', 'os.path.join', (['DAEMON_DIR', '"""circus-{}.port"""'], {}), "(DAEMON_DIR, 'circus-{}.port')\n", (1347, 1377), False, 'import os\n'), ((1407, 1452), 'os.path.join', 'os.path.join', (['DAEMON_DIR', '"""circus-{}.sockets"""'], {}), "(DAEMON_DIR, 'circus-{}.sockets')\n", (1419, 1452), False, 'import os\n'), ((2846, 2862), 'copy.deepcopy', 'deepcopy', (['config'], {}), '(config)\n', (2854, 2862), False, 'from copy import deepcopy\n'), ((8759, 8806), 'urllib.parse.urlparse', 'urlparse', (["self.storage_config['repository_uri']"], {}), "(self.storage_config['repository_uri'])\n", (8767, 8806), False, 'from urllib.parse import urlparse\n'), ((9932, 9974), 'aiida.manage.external.rmq.get_rmq_url', 'get_rmq_url', ([], {}), '(**kwargs, **additional_kwargs)\n', (9943, 9974), False, 'from aiida.manage.external.rmq import get_rmq_url\n'), ((2624, 2755), 'aiida.common.exceptions.ConfigurationError', 'exceptions.ConfigurationError', (['f"""profile {name!r} configuration does not contain all required keys: {self.REQUIRED_KEYS}"""'], {}), "(\n f'profile {name!r} configuration does not contain all required keys: {self.REQUIRED_KEYS}'\n )\n", (2653, 2755), False, 'from aiida.common import exceptions\n'), ((8861, 8965), 'aiida.common.exceptions.ConfigurationError', 'exceptions.ConfigurationError', (['"""invalid repository protocol, only the local `file://` is supported"""'], {}), "(\n 'invalid repository protocol, only the local `file://` is supported')\n", (8890, 8965), False, 'from aiida.common import exceptions\n'), ((8977, 9002), 'os.path.isabs', 'os.path.isabs', (['parts.path'], {}), '(parts.path)\n', (8990, 9002), False, 'import os\n'), ((9022, 9111), 'aiida.common.exceptions.ConfigurationError', 'exceptions.ConfigurationError', (['"""invalid repository URI: the path has to be absolute"""'], {}), "(\n 'invalid repository URI: the path has to be absolute')\n", (9051, 9111), False, 'from aiida.common import exceptions\n'), ((9136, 9166), 'os.path.expanduser', 'os.path.expanduser', (['parts.path'], {}), '(parts.path)\n', (9154, 9166), False, 'import os\n'), ((9589, 9726), 'aiida.common.exceptions.ConfigurationError', 'exceptions.ConfigurationError', (['f"""invalid process control backend, only \'rabbitmq\' is supported: {self.process_control_backend}"""'], {}), '(\n f"invalid process control backend, only \'rabbitmq\' is supported: {self.process_control_backend}"\n )\n', (9618, 9726), False, 'from aiida.common import exceptions\n'), ((3056, 3063), 'uuid.uuid4', 'uuid4', ([], {}), '()\n', (3061, 3063), False, 'from uuid import uuid4\n')] |
import unittest
from unittest_expander import expand, foreach
from pypidb._compat import PY2
from pypidb._db import _fetch_mapping
from pypidb._github import GitHubAPIMessage, check_repo
from pypidb._pypi import InvalidPackage
from pypidb._similarity import _compute_similarity, normalize
from tests.data import (
exact,
exact_fetched,
exact_metadata,
invalid,
mismatch,
missing_repos,
name_mismatch_fetched,
name_mismatch_metadata,
)
from tests.utils import _TestBase, normalise_list, web_session
expected = {}
expected.update(exact)
expected.update(mismatch)
missing_repos = normalise_list(missing_repos)
class _ExplicitBase(object):
def _check_result(self, name, url):
pass
def _test_package(self, name):
try:
url = self._get_scm(name)
except InvalidPackage:
self.assertIn(name, invalid)
raise unittest.SkipTest("{} is an invalid package".format(name))
except unittest.SkipTest:
raise
except Exception as e:
self.assertIn(name, self.expected_failures, e)
if name in self.expected_failures:
return
self.assertIsNotNone(url)
self.assertNotIn(name, invalid)
expected = self.expected[name]
self.assertInsensitiveEqual(url, expected)
self._check_result(name, url)
return url
@expand
class TestExactFromJson(_TestBase):
expected = expected
def _check_result(self, name, url):
normalised_name = normalize(name)
r = None
if normalised_name in missing_repos:
pass
elif url.startswith("https://github.com/"):
slug = url[len("https://github.com/") :]
rv = self._check_github_repo(slug)
self.assertTrue(rv)
try:
rv = self._check_github_setuppy(slug, normalised_name)
except GitHubAPIMessage as e:
raise unittest.SkipTest(str(e))
if rv is False:
return
self.assertTrue(rv)
else:
r = web_session.get(url, allow_redirects=False)
if r is not None:
r.raise_for_status()
self.assertEqual(r.url, url)
location = r.headers.get("location")
if location:
self.assertIn(r.status_code, [301, 302])
location = location.replace(
"code.google.com/archive/p/", "code.google.com/p/"
)
self.assertIn(location, [url, url + "/"])
else:
self.assertEqual(r.status_code, 200)
@foreach(exact_metadata.keys())
def test_package(self, name):
expected = self.expected[name]
url = self._get_scm(name)
self.assertIsNotNone(url)
self.assertInsensitiveEqual(url, expected)
normalised_name = normalize(name)
fetch_list = _fetch_mapping[normalised_name]
self.assertFalse(fetch_list)
self._check_result(name, url)
if PY2:
return
if isinstance(expected, str):
self.assertLess(
_compute_similarity(name, expected),
0.05,
"{} - {} should be moved to name mismatches".format(name, expected),
)
else:
for i in expected:
self.assertLess(
_compute_similarity(name, i),
0.05,
"{} - {} should be moved to name mismatches".format(name, expected),
)
@expand
class TestExactFetched(_TestBase):
expected = expected
@foreach(exact_fetched.keys())
def test_package(self, name):
expected = self.expected[name]
url = self._get_scm(name)
self.assertIsNotNone(url)
self.assertInsensitiveEqual(url, expected)
normalised_name = normalize(name)
fetch_list = _fetch_mapping[normalised_name]
self.assertTrue(fetch_list)
if normalised_name in missing_repos:
pass
elif url.startswith("https://github.com/"):
slug = url[len("https://github.com/") :]
rv = self._check_github_repo(slug)
self.assertTrue(rv)
try:
rv = self._check_github_setuppy(slug, normalised_name)
except GitHubAPIMessage as e:
raise unittest.SkipTest(str(e))
if rv is False:
return
self.assertTrue(rv)
else:
r = web_session.get(url)
r.raise_for_status()
if PY2:
return
if isinstance(expected, str):
self.assertLess(
_compute_similarity(name, expected),
0.05,
"{} - {} should be moved to name mismatches".format(name, expected),
)
else:
for i in expected:
self.assertLess(
_compute_similarity(name, i),
0.05,
"{} - {} should be moved to name mismatches".format(name, expected),
)
@expand
class TestMismatchFromJson(_TestBase):
expected = expected
names = mismatch
expected_failures = []
@foreach(name_mismatch_metadata.keys())
def test_package(self, name):
expected = self.expected[name]
try:
url = self._get_scm(name)
except unittest.SkipTest:
raise
except Exception:
if name in self.expected_failures:
return
raise
if name in self.expected_failures:
return
self.assertIsNotNone(url)
self.assertInsensitiveEqual(url, expected)
if isinstance(expected, str):
self.assertIn("/", expected, "{} should be {}".format(expected, url))
normalised_name = normalize(name)
fetch_list = _fetch_mapping[normalised_name]
self.assertFalse(fetch_list)
if normalised_name in missing_repos:
pass
elif url.startswith("https://github.com/"):
slug = url[len("https://github.com/") :]
rv = self._check_github_repo(slug)
self.assertTrue(rv)
try:
rv = self._check_github_setuppy(slug, normalised_name)
except GitHubAPIMessage as e:
raise unittest.SkipTest(str(e))
if rv is False:
return
self.assertTrue(rv)
else:
r = web_session.get(url)
r.raise_for_status()
if PY2:
return
if isinstance(expected, str):
self.assertGreater(_compute_similarity(name, expected), 0.05)
else:
highest = 0
for i in expected:
val = _compute_similarity(name, i)
highest = max(highest, val)
self.assertGreater(highest, 0.05)
@expand
class TestMismatchFetched(_ExplicitBase, _TestBase):
expected = expected
names = mismatch
expected_failures = ["marionette-driver"]
@foreach(name_mismatch_fetched.keys())
def test_package(self, name):
expected = self.expected[name]
url = self._test_package(name)
if name in self.expected_failures:
return
self.assertIsNotNone(url)
self.assertInsensitiveEqual(url, expected)
if isinstance(expected, str):
self.assertIn("/", expected, "{} should be {}".format(expected, url))
normalised_name = normalize(name)
fetch_list = _fetch_mapping[normalised_name]
self.assertTrue(fetch_list)
if normalised_name in missing_repos:
pass
elif url.startswith("https://github.com/"):
slug = url[len("https://github.com/") :]
rv = self._check_github_repo(slug)
self.assertTrue(rv)
try:
rv = self._check_github_setuppy(slug, normalised_name)
except GitHubAPIMessage as e:
raise unittest.SkipTest(str(e))
if rv is False:
return
self.assertTrue(rv)
elif url == "https://wiki.mozilla.org/Auto-tools/Projects/Mozbase":
# Fetching is a bit slow, and failures for moz* are very repetitive
pass
else:
r = web_session.get(url)
r.raise_for_status()
if PY2:
return
if isinstance(expected, str):
self.assertGreater(_compute_similarity(name, expected), 0.05)
else:
highest = 0
for i in expected:
val = _compute_similarity(name, i)
highest = max(highest, val)
self.assertGreater(highest, 0.05)
| [
"tests.utils.web_session.get",
"pypidb._similarity._compute_similarity",
"tests.data.name_mismatch_metadata.keys",
"pypidb._similarity.normalize",
"tests.utils.normalise_list",
"tests.data.name_mismatch_fetched.keys",
"tests.data.exact_fetched.keys",
"tests.data.exact_metadata.keys"
] | [((613, 642), 'tests.utils.normalise_list', 'normalise_list', (['missing_repos'], {}), '(missing_repos)\n', (627, 642), False, 'from tests.utils import _TestBase, normalise_list, web_session\n'), ((1530, 1545), 'pypidb._similarity.normalize', 'normalize', (['name'], {}), '(name)\n', (1539, 1545), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((2902, 2917), 'pypidb._similarity.normalize', 'normalize', (['name'], {}), '(name)\n', (2911, 2917), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((2658, 2679), 'tests.data.exact_metadata.keys', 'exact_metadata.keys', ([], {}), '()\n', (2677, 2679), False, 'from tests.data import exact, exact_fetched, exact_metadata, invalid, mismatch, missing_repos, name_mismatch_fetched, name_mismatch_metadata\n'), ((3913, 3928), 'pypidb._similarity.normalize', 'normalize', (['name'], {}), '(name)\n', (3922, 3928), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((3670, 3690), 'tests.data.exact_fetched.keys', 'exact_fetched.keys', ([], {}), '()\n', (3688, 3690), False, 'from tests.data import exact, exact_fetched, exact_metadata, invalid, mismatch, missing_repos, name_mismatch_fetched, name_mismatch_metadata\n'), ((5907, 5922), 'pypidb._similarity.normalize', 'normalize', (['name'], {}), '(name)\n', (5916, 5922), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((5289, 5318), 'tests.data.name_mismatch_metadata.keys', 'name_mismatch_metadata.keys', ([], {}), '()\n', (5316, 5318), False, 'from tests.data import exact, exact_fetched, exact_metadata, invalid, mismatch, missing_repos, name_mismatch_fetched, name_mismatch_metadata\n'), ((7576, 7591), 'pypidb._similarity.normalize', 'normalize', (['name'], {}), '(name)\n', (7585, 7591), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((7137, 7165), 'tests.data.name_mismatch_fetched.keys', 'name_mismatch_fetched.keys', ([], {}), '()\n', (7163, 7165), False, 'from tests.data import exact, exact_fetched, exact_metadata, invalid, mismatch, missing_repos, name_mismatch_fetched, name_mismatch_metadata\n'), ((2105, 2148), 'tests.utils.web_session.get', 'web_session.get', (['url'], {'allow_redirects': '(False)'}), '(url, allow_redirects=False)\n', (2120, 2148), False, 'from tests.utils import _TestBase, normalise_list, web_session\n'), ((3167, 3202), 'pypidb._similarity._compute_similarity', '_compute_similarity', (['name', 'expected'], {}), '(name, expected)\n', (3186, 3202), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((4560, 4580), 'tests.utils.web_session.get', 'web_session.get', (['url'], {}), '(url)\n', (4575, 4580), False, 'from tests.utils import _TestBase, normalise_list, web_session\n'), ((4734, 4769), 'pypidb._similarity._compute_similarity', '_compute_similarity', (['name', 'expected'], {}), '(name, expected)\n', (4753, 4769), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((6555, 6575), 'tests.utils.web_session.get', 'web_session.get', (['url'], {}), '(url)\n', (6570, 6575), False, 'from tests.utils import _TestBase, normalise_list, web_session\n'), ((6715, 6750), 'pypidb._similarity._compute_similarity', '_compute_similarity', (['name', 'expected'], {}), '(name, expected)\n', (6734, 6750), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((6849, 6877), 'pypidb._similarity._compute_similarity', '_compute_similarity', (['name', 'i'], {}), '(name, i)\n', (6868, 6877), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((8557, 8592), 'pypidb._similarity._compute_similarity', '_compute_similarity', (['name', 'expected'], {}), '(name, expected)\n', (8576, 8592), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((8691, 8719), 'pypidb._similarity._compute_similarity', '_compute_similarity', (['name', 'i'], {}), '(name, i)\n', (8710, 8719), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((3423, 3451), 'pypidb._similarity._compute_similarity', '_compute_similarity', (['name', 'i'], {}), '(name, i)\n', (3442, 3451), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((4990, 5018), 'pypidb._similarity._compute_similarity', '_compute_similarity', (['name', 'i'], {}), '(name, i)\n', (5009, 5018), False, 'from pypidb._similarity import _compute_similarity, normalize\n'), ((8397, 8417), 'tests.utils.web_session.get', 'web_session.get', (['url'], {}), '(url)\n', (8412, 8417), False, 'from tests.utils import _TestBase, normalise_list, web_session\n')] |
import pandas as pd
import numpy as np
import pickle
from web_constants import *
from project_data import ProjectData, get_selected_project_data
# Read in meta file
meta_clinical_df = pd.read_csv(META_CLINICAL_FILE, sep='\t')
meta_clinical_df = meta_clinical_df.loc[~meta_clinical_df[META_COL_CLINICAL_COL].isin([ICD_O_3_SITE_DESC, ICD_O_3_HISTOLOGY_DESC, SURVIVAL_DAYS_TO_DEATH, SURVIVAL_DAYS_TO_LAST_FOLLOWUP])]
def append_icd_desc(row, code_col, desc_col):
if row[desc_col] != 'nan':
return ("%s (%s)" % (row[code_col], row[desc_col]))
else:
return row[code_col]
def get_clinical_variables():
return list(meta_clinical_df[META_COL_CLINICAL_COL].unique())
def get_clinical_variable_scale_types():
return meta_clinical_df.drop_duplicates(subset=[META_COL_CLINICAL_COL])[[META_COL_CLINICAL_COL, META_COL_CLINICAL_SCALE_TYPE]].to_dict('records')
def plot_clinical(projects, return_df=False):
result = []
clinical_vars = get_clinical_variables()
project_data = get_selected_project_data(projects)
clinical_df = pd.DataFrame(index=[], data=[], columns=clinical_vars + [ICD_O_3_SITE_DESC, ICD_O_3_HISTOLOGY_DESC])
for proj in project_data:
samples = proj.get_samples_list()
if proj.has_clinical_df():
proj_clinical_df = proj.get_clinical_df()
else:
proj_clinical_df = pd.DataFrame(index=samples, data=[], columns=[])
clinical_df = clinical_df.append(proj_clinical_df, ignore_index=False)
# Try to convert columns to float if continuous-valued variables
for clinical_var in clinical_vars:
if meta_clinical_df.loc[(meta_clinical_df[META_COL_CLINICAL_COL] == clinical_var) & \
(meta_clinical_df[META_COL_CLINICAL_SCALE_TYPE] == 'continuous')].shape[0] > 0:
try:
clinical_df[clinical_var] = clinical_df[clinical_var].astype(float)
except:
pass
else:
clinical_df[clinical_var] = clinical_df[clinical_var].fillna(value='nan')
# "special" variable behavior
if ICD_O_3_SITE_CODE in clinical_vars:
clinical_df[ICD_O_3_SITE_CODE] = clinical_df.apply(
lambda row: append_icd_desc(row, ICD_O_3_SITE_CODE, ICD_O_3_SITE_DESC),
axis='columns'
)
if ICD_O_3_HISTOLOGY_CODE in clinical_vars:
clinical_df[ICD_O_3_HISTOLOGY_CODE] = clinical_df.apply(
lambda row: append_icd_desc(row, ICD_O_3_HISTOLOGY_CODE, ICD_O_3_HISTOLOGY_DESC),
axis='columns'
)
if SURVIVAL_DAYS_TO_DEATH in clinical_vars:
clinical_df[SURVIVAL_DAYS_TO_DEATH] = clinical_df[SURVIVAL_DAYS_TO_DEATH].clip(lower=0.0)
if SURVIVAL_DAYS_TO_LAST_FOLLOWUP in clinical_vars:
clinical_df[SURVIVAL_DAYS_TO_LAST_FOLLOWUP] = clinical_df[SURVIVAL_DAYS_TO_LAST_FOLLOWUP].clip(lower=0.0)
clinical_df.index = clinical_df.index.rename("sample_id")
clinical_df = clinical_df[clinical_vars]
if return_df:
return clinical_df
clinical_df = clinical_df.fillna(value='nan')
clinical_df = clinical_df.reset_index()
result = clinical_df.to_dict('records')
return result
| [
"project_data.get_selected_project_data",
"pandas.DataFrame",
"pandas.read_csv"
] | [((186, 227), 'pandas.read_csv', 'pd.read_csv', (['META_CLINICAL_FILE'], {'sep': '"""\t"""'}), "(META_CLINICAL_FILE, sep='\\t')\n", (197, 227), True, 'import pandas as pd\n'), ((1010, 1045), 'project_data.get_selected_project_data', 'get_selected_project_data', (['projects'], {}), '(projects)\n', (1035, 1045), False, 'from project_data import ProjectData, get_selected_project_data\n'), ((1065, 1169), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': '[]', 'data': '[]', 'columns': '(clinical_vars + [ICD_O_3_SITE_DESC, ICD_O_3_HISTOLOGY_DESC])'}), '(index=[], data=[], columns=clinical_vars + [ICD_O_3_SITE_DESC,\n ICD_O_3_HISTOLOGY_DESC])\n', (1077, 1169), True, 'import pandas as pd\n'), ((1372, 1420), 'pandas.DataFrame', 'pd.DataFrame', ([], {'index': 'samples', 'data': '[]', 'columns': '[]'}), '(index=samples, data=[], columns=[])\n', (1384, 1420), True, 'import pandas as pd\n')] |
import json
from unittest.mock import patch
import flask_testing
from webapp import app
from webapp.test_utils import mocks
from webapp.json_api import serializers
class TestContainerEndpoints(flask_testing.TestCase):
def create_app(self):
return app
def test_getting_contain_without_id(self):
mocks.DockerClient.containers.get = mocks.NotFound
with patch("webapp.containers.views.docker.from_env", mocks.from_env):
response = self.client.get(
"/containers/1",
headers={"Accept": "application/json"}
)
self.assertEqual(response.status_code, 404)
def test_getting_container_with_id(self):
mocks.DockerClient.containers.get = mocks.SingleContainer
with patch("webapp.containers.views.docker.from_env", mocks.from_env):
response = self.client.get(
"/containers/1",
headers={"Accept": "application/json"}
)
self.assertEqual(response.status_code, 200)
def test_get_container_logs_with_unknown_id(self):
mocks.DockerClient.containers.get = mocks.NotFound
with patch("webapp.containers.views.docker.from_env", mocks.from_env):
response = self.client.get(
"/containers/1/logs",
headers={"Accept": "application/json"}
)
self.assertEqual(response.status_code, 404)
def test_get_container_logs_with_known_id(self):
mocks.DockerClient.containers.get = mocks.SingleContainer
with patch("webapp.containers.views.docker.from_env", mocks.from_env):
response = self.client.get(
"/containers/1/logs",
headers={"Accept": "application/json"}
)
self.assertEqual(response.status_code, 200)
def test_get_all_containers(self):
expected_response = {"data": []}
for container in mocks.Containers:
serialized_container = serializers.dict_serializer(
container, fields=["id", "image", "name", "short_id", "status"]
)
expected_response["data"].append(serialized_container)
mocks.DockerClient.containers.list = mocks.ContainersList
with patch("webapp.containers.views.docker.from_env", mocks.from_env):
response = self.client.get(
"/containers",
headers={"Accept": "application/json"}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.headers.get("Content-Type"), "application/json"
)
self.assertDictEqual(response.get_json(), expected_response)
def test_get_all_containers_with_filters(self):
expected_response = {"data": []}
for container in mocks.Containers[:3]:
serialized_container = serializers.dict_serializer(
container, fields=["id", "image", "name", "short_id", "status"]
)
expected_response["data"].append(serialized_container)
mocks.DockerClient.containers.list = mocks.ContainersList
with patch("webapp.containers.views.docker.from_env", mocks.from_env):
response = self.client.get(
"/containers",
query_string={"active": "true"},
headers={"Accept": "application/json"}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.headers.get("Content-Type"), "application/json"
)
self.assertDictEqual(response.get_json(), expected_response)
def test_delete_container_without_id(self):
mocks.DockerClient.containers.get = mocks.NotFound
with patch("webapp.containers.views.docker.from_env", mocks.from_env):
response = self.client.delete(
"/containers/1",
headers={"Accept": "application/json"}
)
self.assertEqual(response.status_code, 404)
self.assertEqual(
response.headers.get("Content-Type"), "application/json"
)
def test_delete_container_with_id(self):
mocks.DockerClient.containers.get = mocks.SingleContainer
with patch("webapp.containers.views.docker.from_env", mocks.from_env):
response = self.client.delete(
"/containers/1",
headers={"Accept": "application/json"}
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.headers.get("Content-Type"), "application/json"
)
def test_update_container_patch_without_id(self):
mocks.DockerClient.containers.get = mocks.NotFound
with patch("webapp.containers.views.docker.from_env", mocks.from_env):
response = self.client.patch(
"/containers/1",
headers={
"Accept": "application/json",
"Content-Type": "application/json"
},
data=json.dumps({"state": "start"})
)
self.assertEqual(response.status_code, 404)
def test_update_container_patch_request_with_id(self):
mocks.DockerClient.containers.get = mocks.SingleContainer
with patch("webapp.containers.views.docker.from_env", mocks.from_env):
response = self.client.patch(
"/containers/1",
headers={
"Accept": "application/json",
"Content-Type": "application/json"
},
data=json.dumps({"state": "start"})
)
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.headers.get("Content-Type"),
"application/json"
)
def test_container_create_without_valid_api_params(self):
requests = (
{},
{"image": 23},
{"image": "alpine", "command": 323},
{"image": "alpine", "command": "echo hello world", "ports": 234},
{"image": "alpine", "command": "echo hello world", "ports": "te"},
)
for request in requests:
response = self.client.post(
"/containers",
headers={
"Accept": "application/json",
"Content-Type": "application/json"
},
data=json.dumps(request)
)
with self.subTest(request=request):
self.assertEqual(response.status_code, 400)
self.assertIn("error", response.get_json())
self.assertEqual(
response.headers.get("Content-Type"),
"application/json"
)
def test_container_create_with_valid_api_params(self):
request_data = {
"image": "alpine:latest",
"command": "echo hello world",
"ports": {
'8000': '8000'
}
}
expected_response = serializers.dict_serializer(
mocks.Containers[0],
fields=["id", "image", "name", "short_id", "status"]
)
mocks.DockerClient.containers.run.return_value = mocks.Containers[0]
with patch("webapp.containers.views.docker.from_env", mocks.from_env):
response = self.client.post(
"/containers",
headers={
"Accept": "application/json",
"Content-Type": "application/json"
},
data=json.dumps(request_data)
)
self.assertEqual(response.status_code, 201)
self.assertEqual(
response.headers.get("Content-Type"),
"application/json"
)
self.assertDictEqual(response.get_json(), expected_response)
| [
"webapp.json_api.serializers.dict_serializer",
"json.dumps",
"unittest.mock.patch"
] | [((7071, 7177), 'webapp.json_api.serializers.dict_serializer', 'serializers.dict_serializer', (['mocks.Containers[0]'], {'fields': "['id', 'image', 'name', 'short_id', 'status']"}), "(mocks.Containers[0], fields=['id', 'image',\n 'name', 'short_id', 'status'])\n", (7098, 7177), False, 'from webapp.json_api import serializers\n'), ((387, 451), 'unittest.mock.patch', 'patch', (['"""webapp.containers.views.docker.from_env"""', 'mocks.from_env'], {}), "('webapp.containers.views.docker.from_env', mocks.from_env)\n", (392, 451), False, 'from unittest.mock import patch\n'), ((773, 837), 'unittest.mock.patch', 'patch', (['"""webapp.containers.views.docker.from_env"""', 'mocks.from_env'], {}), "('webapp.containers.views.docker.from_env', mocks.from_env)\n", (778, 837), False, 'from unittest.mock import patch\n'), ((1161, 1225), 'unittest.mock.patch', 'patch', (['"""webapp.containers.views.docker.from_env"""', 'mocks.from_env'], {}), "('webapp.containers.views.docker.from_env', mocks.from_env)\n", (1166, 1225), False, 'from unittest.mock import patch\n'), ((1563, 1627), 'unittest.mock.patch', 'patch', (['"""webapp.containers.views.docker.from_env"""', 'mocks.from_env'], {}), "('webapp.containers.views.docker.from_env', mocks.from_env)\n", (1568, 1627), False, 'from unittest.mock import patch\n'), ((1992, 2088), 'webapp.json_api.serializers.dict_serializer', 'serializers.dict_serializer', (['container'], {'fields': "['id', 'image', 'name', 'short_id', 'status']"}), "(container, fields=['id', 'image', 'name',\n 'short_id', 'status'])\n", (2019, 2088), False, 'from webapp.json_api import serializers\n'), ((2262, 2326), 'unittest.mock.patch', 'patch', (['"""webapp.containers.views.docker.from_env"""', 'mocks.from_env'], {}), "('webapp.containers.views.docker.from_env', mocks.from_env)\n", (2267, 2326), False, 'from unittest.mock import patch\n'), ((2871, 2967), 'webapp.json_api.serializers.dict_serializer', 'serializers.dict_serializer', (['container'], {'fields': "['id', 'image', 'name', 'short_id', 'status']"}), "(container, fields=['id', 'image', 'name',\n 'short_id', 'status'])\n", (2898, 2967), False, 'from webapp.json_api import serializers\n'), ((3141, 3205), 'unittest.mock.patch', 'patch', (['"""webapp.containers.views.docker.from_env"""', 'mocks.from_env'], {}), "('webapp.containers.views.docker.from_env', mocks.from_env)\n", (3146, 3205), False, 'from unittest.mock import patch\n'), ((3743, 3807), 'unittest.mock.patch', 'patch', (['"""webapp.containers.views.docker.from_env"""', 'mocks.from_env'], {}), "('webapp.containers.views.docker.from_env', mocks.from_env)\n", (3748, 3807), False, 'from unittest.mock import patch\n'), ((4236, 4300), 'unittest.mock.patch', 'patch', (['"""webapp.containers.views.docker.from_env"""', 'mocks.from_env'], {}), "('webapp.containers.views.docker.from_env', mocks.from_env)\n", (4241, 4300), False, 'from unittest.mock import patch\n'), ((4731, 4795), 'unittest.mock.patch', 'patch', (['"""webapp.containers.views.docker.from_env"""', 'mocks.from_env'], {}), "('webapp.containers.views.docker.from_env', mocks.from_env)\n", (4736, 4795), False, 'from unittest.mock import patch\n'), ((5283, 5347), 'unittest.mock.patch', 'patch', (['"""webapp.containers.views.docker.from_env"""', 'mocks.from_env'], {}), "('webapp.containers.views.docker.from_env', mocks.from_env)\n", (5288, 5347), False, 'from unittest.mock import patch\n'), ((7298, 7362), 'unittest.mock.patch', 'patch', (['"""webapp.containers.views.docker.from_env"""', 'mocks.from_env'], {}), "('webapp.containers.views.docker.from_env', mocks.from_env)\n", (7303, 7362), False, 'from unittest.mock import patch\n'), ((5043, 5073), 'json.dumps', 'json.dumps', (["{'state': 'start'}"], {}), "({'state': 'start'})\n", (5053, 5073), False, 'import json\n'), ((5595, 5625), 'json.dumps', 'json.dumps', (["{'state': 'start'}"], {}), "({'state': 'start'})\n", (5605, 5625), False, 'import json\n'), ((6448, 6467), 'json.dumps', 'json.dumps', (['request'], {}), '(request)\n', (6458, 6467), False, 'import json\n'), ((7607, 7631), 'json.dumps', 'json.dumps', (['request_data'], {}), '(request_data)\n', (7617, 7631), False, 'import json\n')] |
'''
Code generator for message protocol xml files.
'''
import os
import sys
import yaml
import genutil as util
Version = 11
#-------------------------------------------------------------------------------
def writeHeaderTop(f, desc) :
'''
Write header area for the generated C++ header.
'''
f.write('#pragma once\n')
f.write('//-----------------------------------------------------------------------------\n')
f.write('/* #version:{}#\n'.format(Version))
f.write(' machine generated, do not edit!\n')
f.write('*/\n')
f.write('#include <cstring>\n')
#-------------------------------------------------------------------------------
def writeIncludes(f, desc) :
'''
Write include statements in the generated C++ header.
'''
f.write('#include "Messaging/Message.h"\n')
f.write('#include "Messaging/Serializer.h"\n')
parentHdr = desc.get('parentProtocolHeader', 'Messaging/Protocol.h')
f.write('#include "{}"\n'.format(parentHdr))
for hdr in desc.get('headers', []) :
f.write('#include "{}"\n'.format(hdr))
f.write('\n')
#-------------------------------------------------------------------------------
def writeProtocolMethods(f, desc) :
'''
Write the protocol methods
'''
f.write(' static ProtocolIdType GetProtocolId() {\n')
f.write(" return '{}';\n".format(desc['id']))
f.write(' };\n')
#-------------------------------------------------------------------------------
def writeMessageIdEnum(f, desc) :
'''
Write the enum with message ids
'''
protocol = desc['name']
parentProtocol = desc.get('parentProtocol', 'Protocol')
f.write(' class MessageId {\n')
f.write(' public:\n')
f.write(' enum {\n')
msgCount = 0
for msg in desc['messages'] :
if msgCount == 0:
f.write(' ' + msg['name'] + 'Id = ' + parentProtocol + '::MessageId::NumMessageIds, \n')
else :
f.write(' ' + msg['name'] + 'Id,\n')
msgCount += 1
f.write(' NumMessageIds\n')
f.write(' };\n')
f.write(' static const char* ToString(MessageIdType c) {\n')
f.write(' switch (c) {\n')
for msg in desc['messages'] :
msgName = msg['name'] + 'Id'
f.write(' case ' + msgName + ': return "' + msgName + '";\n')
f.write(' default: return "InvalidMessageId";\n')
f.write(' }\n')
f.write(' };\n')
f.write(' static MessageIdType FromString(const char* str) {\n')
for msg in desc['messages'] :
msgName = msg['name'] + 'Id'
f.write(' if (std::strcmp("' + msgName + '", str) == 0) return ' + msgName + ';\n')
f.write(' return InvalidMessageId;\n')
f.write(' };\n')
f.write(' };\n')
f.write(' typedef Ptr<Message> (*CreateCallback)();\n')
f.write(' static CreateCallback jumpTable[' + protocol + '::MessageId::NumMessageIds];\n')
#-------------------------------------------------------------------------------
def writeFactoryClassDecl(f, desc) :
'''
Writes the message factory for this protocol
'''
f.write(' class Factory {\n')
f.write(' public:\n')
f.write(' static Ptr<Message> Create(MessageIdType id);\n')
f.write(' };\n')
#-------------------------------------------------------------------------------
def writeFactoryClassImpl(f, desc) :
'''
Writes the factory class implementation
'''
protocol = desc['name']
parentProtocol = desc.get('parentProtocol', 'Protocol')
f.write(protocol + '::CreateCallback ' + protocol + '::jumpTable[' + protocol + '::MessageId::NumMessageIds] = { \n')
for msg in desc['messages'] :
f.write(' &' + protocol + '::' + msg['name'] + '::FactoryCreate,\n')
f.write('};\n')
f.write('Ptr<Message>\n')
f.write(protocol + '::Factory::Create(MessageIdType id) {\n')
f.write(' if (id < ' + parentProtocol + '::MessageId::NumMessageIds) {\n')
f.write(' return ' + parentProtocol + '::Factory::Create(id);\n')
f.write(' }\n')
f.write(' else {\n')
f.write(' o_assert(id < ' + protocol + '::MessageId::NumMessageIds);\n')
f.write(' return jumpTable[id - ' + parentProtocol + '::MessageId::NumMessageIds]();\n')
f.write(' };\n')
f.write('}\n')
#-------------------------------------------------------------------------------
def getAttrDefaultValue(attr) :
'''
Get the default value for a given attribute
'''
defValue = attr.get('default')
attrType = attr['type']
if attrType in ('int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64') :
if not defValue :
defValue = '0'
elif attrType in ('char', 'unsigned char', 'int', 'unsigned int', 'short', 'unsigned short', 'long', 'unsigned long') :
if not defValue :
defValue = '0'
elif attrType == 'bool' :
if not defValue :
defValue = 'false'
elif attrType in ('float32', 'float') :
if not defValue :
defValue = '0.0f'
elif attrType in ('float64', 'double') :
if not defValue :
defValue = '0.0'
return defValue;
#-------------------------------------------------------------------------------
def getRefType(attrType) :
'''
Get the reference type string for an attribute type
'''
if attrType in ('int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64') :
return attrType
elif attrType in ('bool', 'char', 'unsigned char', 'int', 'unsigned int', 'short', 'unsigned short', 'long', 'unsigned long') :
return attrType
elif attrType in ('float32', 'float') :
return attrType
elif attrType in ('float64', 'double') :
return attrType
else :
return 'const ' + attrType + '&'
#-------------------------------------------------------------------------------
def getValueType(attrType) :
'''
Get the value type string for an attribute type
'''
return attrType
#-------------------------------------------------------------------------------
def isArrayType(attrType) :
'''
Test if the type string is an array type (Array<TYPE>)
'''
return attrType.startswith('Array<') and attrType.endswith('>')
#-------------------------------------------------------------------------------
def getArrayType(attrType) :
'''
Get the element type of an array type.
'''
# strip the 'Array<' at the left, and the '>' at the right
return attrType[12:-1]
#-------------------------------------------------------------------------------
def writeMessageClasses(f, desc) :
'''
Write the message classes to the generated C++ header
'''
protocolId = desc['id']
for msg in desc['messages'] :
msgClassName = msg['name']
msgParentClassName = msg.get('parent', 'Message')
f.write(' class ' + msgClassName + ' : public ' + msgParentClassName + ' {\n')
f.write(' OryolClassDecl(' + msgClassName + ');\n')
f.write(' OryolTypeDecl(' + msgClassName + ',' + msgParentClassName + ');\n')
f.write(' public:\n')
# write constructor
f.write(' ' + msgClassName + '() {\n')
f.write(' this->msgId = MessageId::' + msgClassName + 'Id;\n')
for attr in msg.get('attrs', []) :
attrName = attr['name'].lower()
defValue = getAttrDefaultValue(attr)
if defValue :
f.write(' this->' + attrName + ' = ' + defValue + ';\n')
f.write(' };\n')
# special factory create method
f.write(' static Ptr<Message> FactoryCreate() {\n')
f.write(' return Create();\n')
f.write(' };\n')
# special class message id static method
f.write(' static MessageIdType ClassMessageId() {\n')
f.write(' return MessageId::' + msgClassName + 'Id;\n')
f.write(' };\n')
# virtual method which checks whether the method belongs to a protocol
f.write(' virtual bool IsMemberOf(ProtocolIdType protId) const override {\n')
f.write(" if (protId == '" + protocolId + "') return true;\n")
f.write(' else return ' + msgParentClassName + '::IsMemberOf(protId);\n')
f.write(' };\n')
# write serializer methods
if msg.get('serialize', False) :
f.write(' virtual int32 EncodedSize() const override;\n')
f.write(' virtual uint8* Encode(uint8* dstPtr, const uint8* maxValidPtr) const override;\n')
f.write(' virtual const uint8* Decode(const uint8* srcPtr, const uint8* maxValidPtr) override;\n')
# write setters/getters
for attr in msg.get('attrs', []) :
attrName = attr['name']
attrType = attr['type']
f.write(' void Set' + attrName + '(' + getRefType(attrType) + ' val) {\n')
f.write(' this->' + attrName.lower() + ' = val;\n')
f.write(' };\n')
f.write(' ' + getRefType(attrType) + ' Get' + attrName + '() const {\n')
f.write(' return this->' + attrName.lower() + ';\n')
f.write(' };\n')
# write members
f.write('private:\n')
for attr in msg.get('attrs', []) :
attrName = attr['name'].lower()
attrType = attr['type']
f.write(' ' + getValueType(attrType) + ' ' + attrName + ';\n')
f.write(' };\n')
#-------------------------------------------------------------------------------
def writeSerializeMethods(f, desc) :
'''
Writes the serializer methods of the message to the source file.
'''
for msg in desc['messages'] :
if msg.get('serialize', False) :
protocol = desc['name']
msgClassName = msg['name']
msgParentClassName = msg.get('parent', 'Message')
# EncodedSize()
f.write('int32 ' + protocol + '::' + msgClassName + '::EncodedSize() const {\n')
f.write(' int32 s = ' + msgParentClassName + '::EncodedSize();\n')
for attr in msg.get('attrs', []) :
attrName = attr['name'].lower()
attrType = attr['type']
if isArrayType(attrType) :
elmType = getArrayType(attrType)
f.write(' s += Serializer::EncodedArraySize<' + elmType + '>(this->' + attrName + ');\n')
else :
f.write(' s += Serializer::EncodedSize<' + attrType + '>(this->' + attrName + ');\n')
f.write(' return s;\n')
f.write('}\n')
# Encode
# FIXME: I think we need to diffentiate between "send" and "receive" attributes!
# ... so: EncodeSend/DecodeSend, EncodeReceive/DecodeReceive
f.write('uint8* ' + protocol + '::' + msgClassName + '::Encode(uint8* dstPtr, const uint8* maxValidPtr) const {\n')
f.write(' dstPtr = ' + msgParentClassName + '::Encode(dstPtr, maxValidPtr);\n')
for attr in msg.get('attrs', []) :
attrName = attr['name'].lower()
attrType = attr['type']
if isArrayType(attrType) :
elmType = getArrayType(attrType)
f.write(' dstPtr = Serializer::EncodeArray<' + elmType + '>(this->' + attrName + ', dstPtr, maxValidPtr);\n')
else :
f.write(' dstPtr = Serializer::Encode<' + attrType + '>(this->' + attrName + ', dstPtr, maxValidPtr);\n')
f.write(' return dstPtr;\n')
f.write('}\n')
# Decode
f.write('const uint8* ' + protocol + '::' + msgClassName + '::Decode(const uint8* srcPtr, const uint8* maxValidPtr) {\n')
f.write(' srcPtr = ' + msgParentClassName + '::Decode(srcPtr, maxValidPtr);\n')
for attr in msg.get('attrs', []) :
attrName = attr['name'].lower()
attrType = attr['type']
if isArrayType(attrType) :
elmType = getArrayType(attrType)
f.write(' srcPtr = Serializer::DecodeArray<' + elmType + '>(srcPtr, maxValidPtr, this->' + attrName + ');\n')
else :
f.write(' srcPtr = Serializer::Decode<' + attrType + '>(srcPtr, maxValidPtr, this->' + attrName + ');\n')
f.write(' return srcPtr;\n')
f.write('}\n')
#-------------------------------------------------------------------------------
def generateHeader(desc, absHeaderPath) :
'''
Generate the C++ header file
'''
f = open(absHeaderPath, 'w')
protocol = desc['name']
writeHeaderTop(f, desc)
writeIncludes(f, desc)
f.write('namespace Oryol {\n')
f.write('class ' + protocol + ' {\n')
f.write('public:\n')
writeProtocolMethods(f, desc)
writeMessageIdEnum(f, desc)
writeFactoryClassDecl(f, desc)
writeMessageClasses(f, desc)
f.write('};\n')
f.write('}\n')
f.close()
#-------------------------------------------------------------------------------
def writeSourceTop(f, desc, absSourcePath) :
'''
Write source file header area
'''
path, hdrFileAndExt = os.path.split(absSourcePath)
hdrFile, ext = os.path.splitext(hdrFileAndExt)
f.write('//-----------------------------------------------------------------------------\n')
f.write('// #version:{}# machine generated, do not edit!\n'.format(Version))
f.write('//-----------------------------------------------------------------------------\n')
f.write('#include "Pre.h"\n')
f.write('#include "' + hdrFile + '.h"\n')
f.write('\n')
#-------------------------------------------------------------------------------
def generateSource(desc, absSourcePath) :
'''
Generate the C++ source file
'''
protocol = desc['name']
f = open(absSourcePath, 'w')
writeSourceTop(f, desc, absSourcePath)
f.write('namespace Oryol {\n')
for msg in desc['messages'] :
msgClassName = msg['name']
f.write('OryolClassImpl(' + protocol + '::' + msgClassName + ');\n')
writeFactoryClassImpl(f, desc)
writeSerializeMethods(f, desc)
f.write('}\n')
f.close()
#-------------------------------------------------------------------------------
def generate(input, out_src, out_hdr) :
if util.isDirty(Version, [input], [out_src, out_hdr]) :
with open(input, 'r') as f :
desc = yaml.load(f)
generateHeader(desc, out_hdr)
generateSource(desc, out_src)
| [
"yaml.load",
"os.path.splitext",
"genutil.isDirty",
"os.path.split"
] | [((13647, 13675), 'os.path.split', 'os.path.split', (['absSourcePath'], {}), '(absSourcePath)\n', (13660, 13675), False, 'import os\n'), ((13695, 13726), 'os.path.splitext', 'os.path.splitext', (['hdrFileAndExt'], {}), '(hdrFileAndExt)\n', (13711, 13726), False, 'import os\n'), ((14801, 14851), 'genutil.isDirty', 'util.isDirty', (['Version', '[input]', '[out_src, out_hdr]'], {}), '(Version, [input], [out_src, out_hdr])\n', (14813, 14851), True, 'import genutil as util\n'), ((14910, 14922), 'yaml.load', 'yaml.load', (['f'], {}), '(f)\n', (14919, 14922), False, 'import yaml\n')] |
"""
"""
import numpy as np
from afib import BaseRisk
# points for each variable
CHADS2_PTS = [1, 1, 2, 1, 2, 1, 1, 1]
def chad(chf, htn, age, dm, stroke, vd, fem):
feat = np.array([chf,
htn,
age >= 75,
dm,
stroke,
vd,
65 <= age <= 74,
fem], dtype=int)
return feat.dot(CHADS2_PTS)
class Chads2(BaseRisk):
#features = ["chf","htn","index_age","dm","stroke","vd","fem"]
def score(self, row):
return chad(row["chf"],
row["htn"],
row["index_age"],
row["dm"],
row["stroke"],
row["vd"],
row["fem"])
| [
"numpy.array"
] | [((179, 264), 'numpy.array', 'np.array', (['[chf, htn, age >= 75, dm, stroke, vd, 65 <= age <= 74, fem]'], {'dtype': 'int'}), '([chf, htn, age >= 75, dm, stroke, vd, 65 <= age <= 74, fem], dtype=int\n )\n', (187, 264), True, 'import numpy as np\n')] |
#!/usr/bin/env python
# Copyright 2019 <NAME>
# License: Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
import torch as th
import torch.nn as nn
import torch.nn.functional as F
from typing import Tuple, List, Union, Optional
from aps.sse.base import SseBase
from aps.libs import ApsRegisters
def parse_1dstr(sstr: str) -> List[int]:
return list(map(int, sstr.split(",")))
def parse_2dstr(sstr: str) -> List[List[int]]:
return [parse_1dstr(tok) for tok in sstr.split(";")]
class ComplexConv2d(nn.Module):
"""
Complex 2D Convolution
"""
def __init__(self, *args, **kwargs):
super(ComplexConv2d, self).__init__()
self.real = nn.Conv2d(*args, **kwargs)
self.imag = nn.Conv2d(*args, **kwargs)
def forward(self, x: th.Tensor) -> th.Tensor:
"""
Args:
x (Tensor): N x C x 2F x T
Return:
y (Tensor): N x C' x 2F' x T'
"""
xr, xi = th.chunk(x, 2, -2)
yr = self.real(xr) - self.imag(xi)
yi = self.imag(xr) + self.real(xi)
y = th.cat([yr, yi], -2)
return y
class ComplexConvTranspose2d(nn.Module):
"""
Complex Transpose 2D Convolution
"""
def __init__(self, *args, **kwargs):
super(ComplexConvTranspose2d, self).__init__()
self.real = nn.ConvTranspose2d(*args, **kwargs)
self.imag = nn.ConvTranspose2d(*args, **kwargs)
def forward(self, x: th.Tensor) -> th.Tensor:
"""
Args:
x (Tensor): N x C x 2F x T
Return:
y (Tensor): N x C' x 2F' x T'
"""
xr, xi = th.chunk(x, 2, -2)
yr = self.real(xr) - self.imag(xi)
yi = self.imag(xr) + self.real(xi)
y = th.cat([yr, yi], -2)
return y
class ComplexBatchNorm2d(nn.Module):
"""
A easy implementation of complex 2d batchnorm
"""
def __init__(self, *args, **kwargs):
super(ComplexBatchNorm2d, self).__init__()
self.real_bn = nn.BatchNorm2d(*args, **kwargs)
self.imag_bn = nn.BatchNorm2d(*args, **kwargs)
def forward(self, x: th.Tensor) -> th.Tensor:
xr, xi = th.chunk(x, 2, -2)
xr = self.real_bn(xr)
xi = self.imag_bn(xi)
x = th.cat([xr, xi], -2)
return x
class EncoderBlock(nn.Module):
"""
Convolutional block in encoder
"""
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Tuple[int],
stride: int = 1,
padding: int = 0,
causal: bool = False,
cplx: bool = True) -> None:
super(EncoderBlock, self).__init__()
conv_impl = ComplexConv2d if cplx else nn.Conv2d
# NOTE: time stride should be 1
var_kt = kernel_size[1] - 1
time_axis_pad = var_kt if causal else var_kt // 2
self.conv = conv_impl(in_channels,
out_channels,
kernel_size,
stride=stride,
padding=(padding, time_axis_pad))
if cplx:
self.bn = ComplexBatchNorm2d(out_channels)
else:
self.bn = nn.BatchNorm2d(out_channels)
self.causal = causal
self.time_axis_pad = time_axis_pad
def forward(self, x: th.Tensor) -> th.Tensor:
"""
Args:
x (Tensor): N x 2C x F x T
"""
x = self.conv(x)
if self.causal:
x = x[..., :-self.time_axis_pad]
x = self.bn(x)
x = F.leaky_relu(x)
return x
class DecoderBlock(nn.Module):
"""
Convolutional block in decoder
"""
def __init__(self,
in_channels: int,
out_channels: int,
kernel_size: Tuple[int],
stride: int = 1,
padding: int = 0,
output_padding: int = 0,
causal: bool = False,
cplx: bool = True,
last_layer: bool = False) -> None:
super(DecoderBlock, self).__init__()
conv_impl = ComplexConvTranspose2d if cplx else nn.ConvTranspose2d
var_kt = kernel_size[1] - 1
time_axis_pad = var_kt if causal else var_kt // 2
self.trans_conv = conv_impl(in_channels,
out_channels,
kernel_size,
stride=stride,
padding=(padding, var_kt - time_axis_pad),
output_padding=(output_padding, 0))
if last_layer:
self.bn = None
else:
if cplx:
self.bn = ComplexBatchNorm2d(out_channels)
else:
self.bn = nn.BatchNorm2d(out_channels)
self.causal = causal
self.time_axis_pad = time_axis_pad
def forward(self, x: th.Tensor) -> th.Tensor:
"""
Args:
x (Tensor): N x 2C x F x T
"""
x = self.trans_conv(x)
if self.causal:
x = x[..., :-self.time_axis_pad]
if self.bn:
x = self.bn(x)
x = F.leaky_relu(x)
return x
class Encoder(nn.Module):
"""
Encoder of the UNet
K: filters
S: strides
C: output channels
"""
def __init__(self,
cplx: bool,
K: List[Tuple[int, int]],
S: List[Tuple[int, int]],
C: List[int],
P: List[int],
causal: bool = False) -> None:
super(Encoder, self).__init__()
layers = [
EncoderBlock(C[i],
C[i + 1],
k,
stride=S[i],
padding=P[i],
cplx=cplx,
causal=causal) for i, k in enumerate(K)
]
self.layers = nn.ModuleList(layers)
self.num_layers = len(layers)
def forward(self, x: th.Tensor) -> Tuple[List[th.Tensor], th.Tensor]:
enc_h = []
for index, layer in enumerate(self.layers):
x = layer(x)
# print(f"encoder-{index}: {x.shape}")
if index + 1 != self.num_layers:
enc_h.append(x)
return enc_h, x
class Decoder(nn.Module):
"""
Decoder of the UNet
K: filters
S: strides
C: output channels
"""
def __init__(self,
cplx: bool,
K: List[Tuple[int, int]],
S: List[Tuple[int, int]],
C: List[int],
P: List[int],
O: List[int],
causal: bool = False,
connection: str = "sum") -> None:
super(Decoder, self).__init__()
if connection not in ["cat", "sum"]:
raise ValueError(f"Unknown connection mode: {connection}")
layers = [
DecoderBlock(C[i] * 2 if connection == "cat" and i != 0 else C[i],
C[i + 1],
k,
stride=S[i],
padding=P[i],
output_padding=O[i],
causal=causal,
cplx=cplx,
last_layer=(i == len(K) - 1)) for i, k in enumerate(K)
]
self.layers = nn.ModuleList(layers)
self.connection = connection
def forward(self, x: th.Tensor, enc_h: List[th.Tensor]) -> th.Tensor:
# N = len(self.layers)
for index, layer in enumerate(self.layers):
if index == 0:
x = layer(x)
else:
# N x C x F x T
if self.connection == "sum":
inp = x + enc_h[index - 1]
else:
# N x 2C x F x T
inp = th.cat([x, enc_h[index - 1]], 1)
x = layer(inp)
# print(f"decoder-{N - 1 - index}: {x.shape}")
return x
@ApsRegisters.sse.register("sse@dcunet")
class DCUNet(SseBase):
"""
Real or Complex UNet for Speech Enhancement
Args:
K, S, C: kernel, stride, padding, channel size for convolution in encoder/decoder
P: padding on frequency axis for convolution in encoder/decoder
O: output_padding on frequency axis for transposed_conv2d in decoder
NOTE: make sure that stride size on time axis is 1 (we do not do subsampling on time axis)
"""
def __init__(self,
cplx: bool = True,
K: str = "7,5;7,5;7,5;5,3;5,3;5,3;5,3",
S: str = "2,1;2,1;2,1;2,1;2,1;2,1;2,1",
C: str = "32,32,64,64,64,64,64",
P: str = "1,1,1,1,1,1,1",
O: str = "0,0,0,0,0,0,0",
num_branch: int = 1,
causal_conv: bool = False,
enh_transform: Optional[nn.Module] = None,
freq_padding: bool = True,
connection: str = "sum") -> None:
super(DCUNet, self).__init__(enh_transform, training_mode="freq")
assert enh_transform is not None
self.cplx = cplx
self.forward_stft = enh_transform.ctx(name="forward_stft")
self.inverse_stft = enh_transform.ctx(name="inverse_stft")
K = parse_2dstr(K)
S = parse_2dstr(S)
C = parse_1dstr(C)
P = parse_1dstr(P)
O = parse_1dstr(O)
self.encoder = Encoder(cplx, K, S, [1] + C, P, causal=causal_conv)
self.decoder = Decoder(cplx,
K[::-1],
S[::-1],
C[::-1] + [num_branch],
P[::-1],
O[::-1],
causal=causal_conv,
connection=connection)
self.num_branch = num_branch
def sep(self, m: th.Tensor, sr: th.Tensor, si: th.Tensor) -> th.Tensor:
# m: N x 2F x T
if self.cplx:
# N x F x T
mr, mi = th.chunk(m, 2, -2)
m_abs = (mr**2 + mi**2)**0.5
m_mag = th.tanh(m_abs)
mr, mi = m_mag * mr / m_abs, m_mag * mi / m_abs
s = self.inverse_stft((sr * mr - si * mi, sr * mi + si * mr),
input="complex")
else:
s = self.inverse_stft((sr * m, si * m), input="complex")
return s
def infer(self,
mix: th.Tensor,
mode="time") -> Union[th.Tensor, List[th.Tensor]]:
"""
Args:
mix (Tensor): S
Return:
Tensor: S
"""
self.check_args(mix, training=False, valid_dim=[1])
with th.no_grad():
mix = mix[None, :]
sep = self.forward(mix)
if self.num_branch == 1:
return sep[0]
else:
return [s[0] for s in sep]
def forward(self, s: th.Tensor) -> Union[th.Tensor, List[th.Tensor]]:
"""
Args:
s (Tensor): N x S
Return:
Tensor: N x S
"""
self.check_args(s, training=True, valid_dim=[2])
# N x F x T
sr, si = self.forward_stft(s, output="complex")
if self.cplx:
# N x 2F x T
s = th.cat([sr, si], -2)
else:
# N x F x T
s = (sr**2 + si**2)**0.5
# encoder
enc_h, h = self.encoder(s[:, None])
# reverse
enc_h = enc_h[::-1]
# decoder
m = self.decoder(h, enc_h)
# N x C x 2F x T
if self.num_branch == 1:
s = self.sep(m[:, 0], sr, si)
else:
s = [self.sep(m[:, i], sr, si) for i in range(self.num_branch)]
return s
| [
"torch.tanh",
"torch.nn.BatchNorm2d",
"torch.nn.functional.leaky_relu",
"torch.nn.ModuleList",
"torch.nn.Conv2d",
"aps.libs.ApsRegisters.sse.register",
"torch.chunk",
"torch.no_grad",
"torch.nn.ConvTranspose2d",
"torch.cat"
] | [((8120, 8159), 'aps.libs.ApsRegisters.sse.register', 'ApsRegisters.sse.register', (['"""sse@dcunet"""'], {}), "('sse@dcunet')\n", (8145, 8159), False, 'from aps.libs import ApsRegisters\n'), ((682, 708), 'torch.nn.Conv2d', 'nn.Conv2d', (['*args'], {}), '(*args, **kwargs)\n', (691, 708), True, 'import torch.nn as nn\n'), ((729, 755), 'torch.nn.Conv2d', 'nn.Conv2d', (['*args'], {}), '(*args, **kwargs)\n', (738, 755), True, 'import torch.nn as nn\n'), ((959, 977), 'torch.chunk', 'th.chunk', (['x', '(2)', '(-2)'], {}), '(x, 2, -2)\n', (967, 977), True, 'import torch as th\n'), ((1076, 1096), 'torch.cat', 'th.cat', (['[yr, yi]', '(-2)'], {}), '([yr, yi], -2)\n', (1082, 1096), True, 'import torch as th\n'), ((1327, 1362), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['*args'], {}), '(*args, **kwargs)\n', (1345, 1362), True, 'import torch.nn as nn\n'), ((1383, 1418), 'torch.nn.ConvTranspose2d', 'nn.ConvTranspose2d', (['*args'], {}), '(*args, **kwargs)\n', (1401, 1418), True, 'import torch.nn as nn\n'), ((1622, 1640), 'torch.chunk', 'th.chunk', (['x', '(2)', '(-2)'], {}), '(x, 2, -2)\n', (1630, 1640), True, 'import torch as th\n'), ((1739, 1759), 'torch.cat', 'th.cat', (['[yr, yi]', '(-2)'], {}), '([yr, yi], -2)\n', (1745, 1759), True, 'import torch as th\n'), ((1998, 2029), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['*args'], {}), '(*args, **kwargs)\n', (2012, 2029), True, 'import torch.nn as nn\n'), ((2053, 2084), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['*args'], {}), '(*args, **kwargs)\n', (2067, 2084), True, 'import torch.nn as nn\n'), ((2153, 2171), 'torch.chunk', 'th.chunk', (['x', '(2)', '(-2)'], {}), '(x, 2, -2)\n', (2161, 2171), True, 'import torch as th\n'), ((2244, 2264), 'torch.cat', 'th.cat', (['[xr, xi]', '(-2)'], {}), '([xr, xi], -2)\n', (2250, 2264), True, 'import torch as th\n'), ((3597, 3612), 'torch.nn.functional.leaky_relu', 'F.leaky_relu', (['x'], {}), '(x)\n', (3609, 3612), True, 'import torch.nn.functional as F\n'), ((6011, 6032), 'torch.nn.ModuleList', 'nn.ModuleList', (['layers'], {}), '(layers)\n', (6024, 6032), True, 'import torch.nn as nn\n'), ((7477, 7498), 'torch.nn.ModuleList', 'nn.ModuleList', (['layers'], {}), '(layers)\n', (7490, 7498), True, 'import torch.nn as nn\n'), ((3239, 3267), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_channels'], {}), '(out_channels)\n', (3253, 3267), True, 'import torch.nn as nn\n'), ((5233, 5248), 'torch.nn.functional.leaky_relu', 'F.leaky_relu', (['x'], {}), '(x)\n', (5245, 5248), True, 'import torch.nn.functional as F\n'), ((10185, 10203), 'torch.chunk', 'th.chunk', (['m', '(2)', '(-2)'], {}), '(m, 2, -2)\n', (10193, 10203), True, 'import torch as th\n'), ((10265, 10279), 'torch.tanh', 'th.tanh', (['m_abs'], {}), '(m_abs)\n', (10272, 10279), True, 'import torch as th\n'), ((10858, 10870), 'torch.no_grad', 'th.no_grad', ([], {}), '()\n', (10868, 10870), True, 'import torch as th\n'), ((11448, 11468), 'torch.cat', 'th.cat', (['[sr, si]', '(-2)'], {}), '([sr, si], -2)\n', (11454, 11468), True, 'import torch as th\n'), ((4841, 4869), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['out_channels'], {}), '(out_channels)\n', (4855, 4869), True, 'import torch.nn as nn\n'), ((7977, 8009), 'torch.cat', 'th.cat', (['[x, enc_h[index - 1]]', '(1)'], {}), '([x, enc_h[index - 1]], 1)\n', (7983, 8009), True, 'import torch as th\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['ModelBiasJobDefinitionArgs', 'ModelBiasJobDefinition']
@pulumi.input_type
class ModelBiasJobDefinitionArgs:
def __init__(__self__, *,
job_resources: pulumi.Input['ModelBiasJobDefinitionMonitoringResourcesArgs'],
model_bias_app_specification: pulumi.Input['ModelBiasJobDefinitionModelBiasAppSpecificationArgs'],
model_bias_job_input: pulumi.Input['ModelBiasJobDefinitionModelBiasJobInputArgs'],
model_bias_job_output_config: pulumi.Input['ModelBiasJobDefinitionMonitoringOutputConfigArgs'],
role_arn: pulumi.Input[str],
job_definition_name: Optional[pulumi.Input[str]] = None,
model_bias_baseline_config: Optional[pulumi.Input['ModelBiasJobDefinitionModelBiasBaselineConfigArgs']] = None,
network_config: Optional[pulumi.Input['ModelBiasJobDefinitionNetworkConfigArgs']] = None,
stopping_condition: Optional[pulumi.Input['ModelBiasJobDefinitionStoppingConditionArgs']] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['ModelBiasJobDefinitionTagArgs']]]] = None):
"""
The set of arguments for constructing a ModelBiasJobDefinition resource.
:param pulumi.Input[str] role_arn: The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
:param pulumi.Input[Sequence[pulumi.Input['ModelBiasJobDefinitionTagArgs']]] tags: An array of key-value pairs to apply to this resource.
"""
pulumi.set(__self__, "job_resources", job_resources)
pulumi.set(__self__, "model_bias_app_specification", model_bias_app_specification)
pulumi.set(__self__, "model_bias_job_input", model_bias_job_input)
pulumi.set(__self__, "model_bias_job_output_config", model_bias_job_output_config)
pulumi.set(__self__, "role_arn", role_arn)
if job_definition_name is not None:
pulumi.set(__self__, "job_definition_name", job_definition_name)
if model_bias_baseline_config is not None:
pulumi.set(__self__, "model_bias_baseline_config", model_bias_baseline_config)
if network_config is not None:
pulumi.set(__self__, "network_config", network_config)
if stopping_condition is not None:
pulumi.set(__self__, "stopping_condition", stopping_condition)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="jobResources")
def job_resources(self) -> pulumi.Input['ModelBiasJobDefinitionMonitoringResourcesArgs']:
return pulumi.get(self, "job_resources")
@job_resources.setter
def job_resources(self, value: pulumi.Input['ModelBiasJobDefinitionMonitoringResourcesArgs']):
pulumi.set(self, "job_resources", value)
@property
@pulumi.getter(name="modelBiasAppSpecification")
def model_bias_app_specification(self) -> pulumi.Input['ModelBiasJobDefinitionModelBiasAppSpecificationArgs']:
return pulumi.get(self, "model_bias_app_specification")
@model_bias_app_specification.setter
def model_bias_app_specification(self, value: pulumi.Input['ModelBiasJobDefinitionModelBiasAppSpecificationArgs']):
pulumi.set(self, "model_bias_app_specification", value)
@property
@pulumi.getter(name="modelBiasJobInput")
def model_bias_job_input(self) -> pulumi.Input['ModelBiasJobDefinitionModelBiasJobInputArgs']:
return pulumi.get(self, "model_bias_job_input")
@model_bias_job_input.setter
def model_bias_job_input(self, value: pulumi.Input['ModelBiasJobDefinitionModelBiasJobInputArgs']):
pulumi.set(self, "model_bias_job_input", value)
@property
@pulumi.getter(name="modelBiasJobOutputConfig")
def model_bias_job_output_config(self) -> pulumi.Input['ModelBiasJobDefinitionMonitoringOutputConfigArgs']:
return pulumi.get(self, "model_bias_job_output_config")
@model_bias_job_output_config.setter
def model_bias_job_output_config(self, value: pulumi.Input['ModelBiasJobDefinitionMonitoringOutputConfigArgs']):
pulumi.set(self, "model_bias_job_output_config", value)
@property
@pulumi.getter(name="roleArn")
def role_arn(self) -> pulumi.Input[str]:
"""
The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
"""
return pulumi.get(self, "role_arn")
@role_arn.setter
def role_arn(self, value: pulumi.Input[str]):
pulumi.set(self, "role_arn", value)
@property
@pulumi.getter(name="jobDefinitionName")
def job_definition_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "job_definition_name")
@job_definition_name.setter
def job_definition_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "job_definition_name", value)
@property
@pulumi.getter(name="modelBiasBaselineConfig")
def model_bias_baseline_config(self) -> Optional[pulumi.Input['ModelBiasJobDefinitionModelBiasBaselineConfigArgs']]:
return pulumi.get(self, "model_bias_baseline_config")
@model_bias_baseline_config.setter
def model_bias_baseline_config(self, value: Optional[pulumi.Input['ModelBiasJobDefinitionModelBiasBaselineConfigArgs']]):
pulumi.set(self, "model_bias_baseline_config", value)
@property
@pulumi.getter(name="networkConfig")
def network_config(self) -> Optional[pulumi.Input['ModelBiasJobDefinitionNetworkConfigArgs']]:
return pulumi.get(self, "network_config")
@network_config.setter
def network_config(self, value: Optional[pulumi.Input['ModelBiasJobDefinitionNetworkConfigArgs']]):
pulumi.set(self, "network_config", value)
@property
@pulumi.getter(name="stoppingCondition")
def stopping_condition(self) -> Optional[pulumi.Input['ModelBiasJobDefinitionStoppingConditionArgs']]:
return pulumi.get(self, "stopping_condition")
@stopping_condition.setter
def stopping_condition(self, value: Optional[pulumi.Input['ModelBiasJobDefinitionStoppingConditionArgs']]):
pulumi.set(self, "stopping_condition", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['ModelBiasJobDefinitionTagArgs']]]]:
"""
An array of key-value pairs to apply to this resource.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['ModelBiasJobDefinitionTagArgs']]]]):
pulumi.set(self, "tags", value)
class ModelBiasJobDefinition(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
job_definition_name: Optional[pulumi.Input[str]] = None,
job_resources: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionMonitoringResourcesArgs']]] = None,
model_bias_app_specification: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionModelBiasAppSpecificationArgs']]] = None,
model_bias_baseline_config: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionModelBiasBaselineConfigArgs']]] = None,
model_bias_job_input: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionModelBiasJobInputArgs']]] = None,
model_bias_job_output_config: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionMonitoringOutputConfigArgs']]] = None,
network_config: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionNetworkConfigArgs']]] = None,
role_arn: Optional[pulumi.Input[str]] = None,
stopping_condition: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionStoppingConditionArgs']]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionTagArgs']]]]] = None,
__props__=None):
"""
Resource Type definition for AWS::SageMaker::ModelBiasJobDefinition
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] role_arn: The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionTagArgs']]]] tags: An array of key-value pairs to apply to this resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ModelBiasJobDefinitionArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Resource Type definition for AWS::SageMaker::ModelBiasJobDefinition
:param str resource_name: The name of the resource.
:param ModelBiasJobDefinitionArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ModelBiasJobDefinitionArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
job_definition_name: Optional[pulumi.Input[str]] = None,
job_resources: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionMonitoringResourcesArgs']]] = None,
model_bias_app_specification: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionModelBiasAppSpecificationArgs']]] = None,
model_bias_baseline_config: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionModelBiasBaselineConfigArgs']]] = None,
model_bias_job_input: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionModelBiasJobInputArgs']]] = None,
model_bias_job_output_config: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionMonitoringOutputConfigArgs']]] = None,
network_config: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionNetworkConfigArgs']]] = None,
role_arn: Optional[pulumi.Input[str]] = None,
stopping_condition: Optional[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionStoppingConditionArgs']]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ModelBiasJobDefinitionTagArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ModelBiasJobDefinitionArgs.__new__(ModelBiasJobDefinitionArgs)
__props__.__dict__["job_definition_name"] = job_definition_name
if job_resources is None and not opts.urn:
raise TypeError("Missing required property 'job_resources'")
__props__.__dict__["job_resources"] = job_resources
if model_bias_app_specification is None and not opts.urn:
raise TypeError("Missing required property 'model_bias_app_specification'")
__props__.__dict__["model_bias_app_specification"] = model_bias_app_specification
__props__.__dict__["model_bias_baseline_config"] = model_bias_baseline_config
if model_bias_job_input is None and not opts.urn:
raise TypeError("Missing required property 'model_bias_job_input'")
__props__.__dict__["model_bias_job_input"] = model_bias_job_input
if model_bias_job_output_config is None and not opts.urn:
raise TypeError("Missing required property 'model_bias_job_output_config'")
__props__.__dict__["model_bias_job_output_config"] = model_bias_job_output_config
__props__.__dict__["network_config"] = network_config
if role_arn is None and not opts.urn:
raise TypeError("Missing required property 'role_arn'")
__props__.__dict__["role_arn"] = role_arn
__props__.__dict__["stopping_condition"] = stopping_condition
__props__.__dict__["tags"] = tags
__props__.__dict__["creation_time"] = None
__props__.__dict__["job_definition_arn"] = None
super(ModelBiasJobDefinition, __self__).__init__(
'aws-native:sagemaker:ModelBiasJobDefinition',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ModelBiasJobDefinition':
"""
Get an existing ModelBiasJobDefinition resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ModelBiasJobDefinitionArgs.__new__(ModelBiasJobDefinitionArgs)
__props__.__dict__["creation_time"] = None
__props__.__dict__["job_definition_arn"] = None
__props__.__dict__["job_definition_name"] = None
__props__.__dict__["job_resources"] = None
__props__.__dict__["model_bias_app_specification"] = None
__props__.__dict__["model_bias_baseline_config"] = None
__props__.__dict__["model_bias_job_input"] = None
__props__.__dict__["model_bias_job_output_config"] = None
__props__.__dict__["network_config"] = None
__props__.__dict__["role_arn"] = None
__props__.__dict__["stopping_condition"] = None
__props__.__dict__["tags"] = None
return ModelBiasJobDefinition(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="creationTime")
def creation_time(self) -> pulumi.Output[str]:
"""
The time at which the job definition was created.
"""
return pulumi.get(self, "creation_time")
@property
@pulumi.getter(name="jobDefinitionArn")
def job_definition_arn(self) -> pulumi.Output[str]:
"""
The Amazon Resource Name (ARN) of job definition.
"""
return pulumi.get(self, "job_definition_arn")
@property
@pulumi.getter(name="jobDefinitionName")
def job_definition_name(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "job_definition_name")
@property
@pulumi.getter(name="jobResources")
def job_resources(self) -> pulumi.Output['outputs.ModelBiasJobDefinitionMonitoringResources']:
return pulumi.get(self, "job_resources")
@property
@pulumi.getter(name="modelBiasAppSpecification")
def model_bias_app_specification(self) -> pulumi.Output['outputs.ModelBiasJobDefinitionModelBiasAppSpecification']:
return pulumi.get(self, "model_bias_app_specification")
@property
@pulumi.getter(name="modelBiasBaselineConfig")
def model_bias_baseline_config(self) -> pulumi.Output[Optional['outputs.ModelBiasJobDefinitionModelBiasBaselineConfig']]:
return pulumi.get(self, "model_bias_baseline_config")
@property
@pulumi.getter(name="modelBiasJobInput")
def model_bias_job_input(self) -> pulumi.Output['outputs.ModelBiasJobDefinitionModelBiasJobInput']:
return pulumi.get(self, "model_bias_job_input")
@property
@pulumi.getter(name="modelBiasJobOutputConfig")
def model_bias_job_output_config(self) -> pulumi.Output['outputs.ModelBiasJobDefinitionMonitoringOutputConfig']:
return pulumi.get(self, "model_bias_job_output_config")
@property
@pulumi.getter(name="networkConfig")
def network_config(self) -> pulumi.Output[Optional['outputs.ModelBiasJobDefinitionNetworkConfig']]:
return pulumi.get(self, "network_config")
@property
@pulumi.getter(name="roleArn")
def role_arn(self) -> pulumi.Output[str]:
"""
The Amazon Resource Name (ARN) of an IAM role that Amazon SageMaker can assume to perform tasks on your behalf.
"""
return pulumi.get(self, "role_arn")
@property
@pulumi.getter(name="stoppingCondition")
def stopping_condition(self) -> pulumi.Output[Optional['outputs.ModelBiasJobDefinitionStoppingCondition']]:
return pulumi.get(self, "stopping_condition")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence['outputs.ModelBiasJobDefinitionTag']]]:
"""
An array of key-value pairs to apply to this resource.
"""
return pulumi.get(self, "tags")
| [
"pulumi.getter",
"pulumi.set",
"pulumi.ResourceOptions",
"pulumi.get"
] | [((2900, 2934), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""jobResources"""'}), "(name='jobResources')\n", (2913, 2934), False, 'import pulumi\n'), ((3273, 3320), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""modelBiasAppSpecification"""'}), "(name='modelBiasAppSpecification')\n", (3286, 3320), False, 'import pulumi\n'), ((3746, 3785), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""modelBiasJobInput"""'}), "(name='modelBiasJobInput')\n", (3759, 3785), False, 'import pulumi\n'), ((4155, 4201), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""modelBiasJobOutputConfig"""'}), "(name='modelBiasJobOutputConfig')\n", (4168, 4201), False, 'import pulumi\n'), ((4621, 4650), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""roleArn"""'}), "(name='roleArn')\n", (4634, 4650), False, 'import pulumi\n'), ((5020, 5059), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""jobDefinitionName"""'}), "(name='jobDefinitionName')\n", (5033, 5059), False, 'import pulumi\n'), ((5360, 5405), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""modelBiasBaselineConfig"""'}), "(name='modelBiasBaselineConfig')\n", (5373, 5405), False, 'import pulumi\n'), ((5837, 5872), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""networkConfig"""'}), "(name='networkConfig')\n", (5850, 5872), False, 'import pulumi\n'), ((6224, 6263), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""stoppingCondition"""'}), "(name='stoppingCondition')\n", (6237, 6263), False, 'import pulumi\n'), ((15306, 15340), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""creationTime"""'}), "(name='creationTime')\n", (15319, 15340), False, 'import pulumi\n'), ((15543, 15581), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""jobDefinitionArn"""'}), "(name='jobDefinitionArn')\n", (15556, 15581), False, 'import pulumi\n'), ((15794, 15833), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""jobDefinitionName"""'}), "(name='jobDefinitionName')\n", (15807, 15833), False, 'import pulumi\n'), ((15976, 16010), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""jobResources"""'}), "(name='jobResources')\n", (15989, 16010), False, 'import pulumi\n'), ((16179, 16226), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""modelBiasAppSpecification"""'}), "(name='modelBiasAppSpecification')\n", (16192, 16226), False, 'import pulumi\n'), ((16431, 16476), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""modelBiasBaselineConfig"""'}), "(name='modelBiasBaselineConfig')\n", (16444, 16476), False, 'import pulumi\n'), ((16685, 16724), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""modelBiasJobInput"""'}), "(name='modelBiasJobInput')\n", (16698, 16724), False, 'import pulumi\n'), ((16905, 16951), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""modelBiasJobOutputConfig"""'}), "(name='modelBiasJobOutputConfig')\n", (16918, 16951), False, 'import pulumi\n'), ((17153, 17188), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""networkConfig"""'}), "(name='networkConfig')\n", (17166, 17188), False, 'import pulumi\n'), ((17363, 17392), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""roleArn"""'}), "(name='roleArn')\n", (17376, 17392), False, 'import pulumi\n'), ((17647, 17686), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""stoppingCondition"""'}), "(name='stoppingCondition')\n", (17660, 17686), False, 'import pulumi\n'), ((1956, 2008), 'pulumi.set', 'pulumi.set', (['__self__', '"""job_resources"""', 'job_resources'], {}), "(__self__, 'job_resources', job_resources)\n", (1966, 2008), False, 'import pulumi\n'), ((2017, 2103), 'pulumi.set', 'pulumi.set', (['__self__', '"""model_bias_app_specification"""', 'model_bias_app_specification'], {}), "(__self__, 'model_bias_app_specification',\n model_bias_app_specification)\n", (2027, 2103), False, 'import pulumi\n'), ((2108, 2174), 'pulumi.set', 'pulumi.set', (['__self__', '"""model_bias_job_input"""', 'model_bias_job_input'], {}), "(__self__, 'model_bias_job_input', model_bias_job_input)\n", (2118, 2174), False, 'import pulumi\n'), ((2183, 2269), 'pulumi.set', 'pulumi.set', (['__self__', '"""model_bias_job_output_config"""', 'model_bias_job_output_config'], {}), "(__self__, 'model_bias_job_output_config',\n model_bias_job_output_config)\n", (2193, 2269), False, 'import pulumi\n'), ((2274, 2316), 'pulumi.set', 'pulumi.set', (['__self__', '"""role_arn"""', 'role_arn'], {}), "(__self__, 'role_arn', role_arn)\n", (2284, 2316), False, 'import pulumi\n'), ((3044, 3077), 'pulumi.get', 'pulumi.get', (['self', '"""job_resources"""'], {}), "(self, 'job_resources')\n", (3054, 3077), False, 'import pulumi\n'), ((3212, 3252), 'pulumi.set', 'pulumi.set', (['self', '"""job_resources"""', 'value'], {}), "(self, 'job_resources', value)\n", (3222, 3252), False, 'import pulumi\n'), ((3451, 3499), 'pulumi.get', 'pulumi.get', (['self', '"""model_bias_app_specification"""'], {}), "(self, 'model_bias_app_specification')\n", (3461, 3499), False, 'import pulumi\n'), ((3670, 3725), 'pulumi.set', 'pulumi.set', (['self', '"""model_bias_app_specification"""', 'value'], {}), "(self, 'model_bias_app_specification', value)\n", (3680, 3725), False, 'import pulumi\n'), ((3900, 3940), 'pulumi.get', 'pulumi.get', (['self', '"""model_bias_job_input"""'], {}), "(self, 'model_bias_job_input')\n", (3910, 3940), False, 'import pulumi\n'), ((4087, 4134), 'pulumi.set', 'pulumi.set', (['self', '"""model_bias_job_input"""', 'value'], {}), "(self, 'model_bias_job_input', value)\n", (4097, 4134), False, 'import pulumi\n'), ((4329, 4377), 'pulumi.get', 'pulumi.get', (['self', '"""model_bias_job_output_config"""'], {}), "(self, 'model_bias_job_output_config')\n", (4339, 4377), False, 'import pulumi\n'), ((4545, 4600), 'pulumi.set', 'pulumi.set', (['self', '"""model_bias_job_output_config"""', 'value'], {}), "(self, 'model_bias_job_output_config', value)\n", (4555, 4600), False, 'import pulumi\n'), ((4855, 4883), 'pulumi.get', 'pulumi.get', (['self', '"""role_arn"""'], {}), "(self, 'role_arn')\n", (4865, 4883), False, 'import pulumi\n'), ((4964, 4999), 'pulumi.set', 'pulumi.set', (['self', '"""role_arn"""', 'value'], {}), "(self, 'role_arn', value)\n", (4974, 4999), False, 'import pulumi\n'), ((5141, 5180), 'pulumi.get', 'pulumi.get', (['self', '"""job_definition_name"""'], {}), "(self, 'job_definition_name')\n", (5151, 5180), False, 'import pulumi\n'), ((5293, 5339), 'pulumi.set', 'pulumi.set', (['self', '"""job_definition_name"""', 'value'], {}), "(self, 'job_definition_name', value)\n", (5303, 5339), False, 'import pulumi\n'), ((5542, 5588), 'pulumi.get', 'pulumi.get', (['self', '"""model_bias_baseline_config"""'], {}), "(self, 'model_bias_baseline_config')\n", (5552, 5588), False, 'import pulumi\n'), ((5763, 5816), 'pulumi.set', 'pulumi.set', (['self', '"""model_bias_baseline_config"""', 'value'], {}), "(self, 'model_bias_baseline_config', value)\n", (5773, 5816), False, 'import pulumi\n'), ((5987, 6021), 'pulumi.get', 'pulumi.get', (['self', '"""network_config"""'], {}), "(self, 'network_config')\n", (5997, 6021), False, 'import pulumi\n'), ((6162, 6203), 'pulumi.set', 'pulumi.set', (['self', '"""network_config"""', 'value'], {}), "(self, 'network_config', value)\n", (6172, 6203), False, 'import pulumi\n'), ((6386, 6424), 'pulumi.get', 'pulumi.get', (['self', '"""stopping_condition"""'], {}), "(self, 'stopping_condition')\n", (6396, 6424), False, 'import pulumi\n'), ((6577, 6622), 'pulumi.set', 'pulumi.set', (['self', '"""stopping_condition"""', 'value'], {}), "(self, 'stopping_condition', value)\n", (6587, 6622), False, 'import pulumi\n'), ((6862, 6886), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (6872, 6886), False, 'import pulumi\n'), ((7021, 7052), 'pulumi.set', 'pulumi.set', (['self', '"""tags"""', 'value'], {}), "(self, 'tags', value)\n", (7031, 7052), False, 'import pulumi\n'), ((15489, 15522), 'pulumi.get', 'pulumi.get', (['self', '"""creation_time"""'], {}), "(self, 'creation_time')\n", (15499, 15522), False, 'import pulumi\n'), ((15735, 15773), 'pulumi.get', 'pulumi.get', (['self', '"""job_definition_arn"""'], {}), "(self, 'job_definition_arn')\n", (15745, 15773), False, 'import pulumi\n'), ((15916, 15955), 'pulumi.get', 'pulumi.get', (['self', '"""job_definition_name"""'], {}), "(self, 'job_definition_name')\n", (15926, 15955), False, 'import pulumi\n'), ((16125, 16158), 'pulumi.get', 'pulumi.get', (['self', '"""job_resources"""'], {}), "(self, 'job_resources')\n", (16135, 16158), False, 'import pulumi\n'), ((16362, 16410), 'pulumi.get', 'pulumi.get', (['self', '"""model_bias_app_specification"""'], {}), "(self, 'model_bias_app_specification')\n", (16372, 16410), False, 'import pulumi\n'), ((16618, 16664), 'pulumi.get', 'pulumi.get', (['self', '"""model_bias_baseline_config"""'], {}), "(self, 'model_bias_baseline_config')\n", (16628, 16664), False, 'import pulumi\n'), ((16844, 16884), 'pulumi.get', 'pulumi.get', (['self', '"""model_bias_job_input"""'], {}), "(self, 'model_bias_job_input')\n", (16854, 16884), False, 'import pulumi\n'), ((17084, 17132), 'pulumi.get', 'pulumi.get', (['self', '"""model_bias_job_output_config"""'], {}), "(self, 'model_bias_job_output_config')\n", (17094, 17132), False, 'import pulumi\n'), ((17308, 17342), 'pulumi.get', 'pulumi.get', (['self', '"""network_config"""'], {}), "(self, 'network_config')\n", (17318, 17342), False, 'import pulumi\n'), ((17598, 17626), 'pulumi.get', 'pulumi.get', (['self', '"""role_arn"""'], {}), "(self, 'role_arn')\n", (17608, 17626), False, 'import pulumi\n'), ((17814, 17852), 'pulumi.get', 'pulumi.get', (['self', '"""stopping_condition"""'], {}), "(self, 'stopping_condition')\n", (17824, 17852), False, 'import pulumi\n'), ((18083, 18107), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (18093, 18107), False, 'import pulumi\n'), ((2373, 2437), 'pulumi.set', 'pulumi.set', (['__self__', '"""job_definition_name"""', 'job_definition_name'], {}), "(__self__, 'job_definition_name', job_definition_name)\n", (2383, 2437), False, 'import pulumi\n'), ((2501, 2579), 'pulumi.set', 'pulumi.set', (['__self__', '"""model_bias_baseline_config"""', 'model_bias_baseline_config'], {}), "(__self__, 'model_bias_baseline_config', model_bias_baseline_config)\n", (2511, 2579), False, 'import pulumi\n'), ((2631, 2685), 'pulumi.set', 'pulumi.set', (['__self__', '"""network_config"""', 'network_config'], {}), "(__self__, 'network_config', network_config)\n", (2641, 2685), False, 'import pulumi\n'), ((2741, 2803), 'pulumi.set', 'pulumi.set', (['__self__', '"""stopping_condition"""', 'stopping_condition'], {}), "(__self__, 'stopping_condition', stopping_condition)\n", (2751, 2803), False, 'import pulumi\n'), ((2845, 2879), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (2855, 2879), False, 'import pulumi\n'), ((11476, 11500), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {}), '()\n', (11498, 11500), False, 'import pulumi\n'), ((14420, 14449), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'id': 'id'}), '(id=id)\n', (14442, 14449), False, 'import pulumi\n')] |
Subsets and Splits