code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
#!/usr/bin/env python3
"""
Base-Client Class
This is the parent-class of all client-classes and holds properties and functions they all depend on.
Author: <NAME>
"""
import src.util.debugger as Debugger
import src.util.configmaker as configmaker
class BaseClient(object):
"""Base-Client Class"""
def __init__(self, configpath, configtype, debugFlag = False):
self._Debug = Debugger.Debugger(debugFlag)
self._Debug.write("INIT BaseClient")
defaultPrompt = "-"
self._prompt = defaultPrompt
self._clientConfig = configmaker.getConfig(configpath, configtype)
self._Debug.write("INIT_END BaseClient")
@property
def prompt(self):
return self._prompt
def get_client_configuration():
"""Base Class for getting client configuration"""
def load_client_configuration():
"""Base Class for loading client configuration into memory"""
| [
"src.util.debugger.Debugger",
"src.util.configmaker.getConfig"
]
| [((392, 420), 'src.util.debugger.Debugger', 'Debugger.Debugger', (['debugFlag'], {}), '(debugFlag)\n', (409, 420), True, 'import src.util.debugger as Debugger\n'), ((562, 607), 'src.util.configmaker.getConfig', 'configmaker.getConfig', (['configpath', 'configtype'], {}), '(configpath, configtype)\n', (583, 607), True, 'import src.util.configmaker as configmaker\n')] |
# -*- coding: utf-8 -*-
import sys
from cryptomon.common import Colors
if sys.version_info >= (3, 0):
import io
else:
import StringIO as io
ascii_title = """
/$$$$$$ /$$ /$$ /$$
/$$__ $$ | $$ | $$$ /$$$
| $$ \__/ /$$$$$$ /$$ /$$ /$$$$$$ /$$$$$$ /$$$$$$ | $$$$ /$$$$ /$$$$$$ /$$$$$$$
| $$ /$$__ $$| $$ | $$ /$$__ $$|_ $$_/ /$$__ $$| $$ $$/$$ $$ /$$__ $$| $$__ $$
| $$ | $$ \__/| $$ | $$| $$ \ $$ | $$ | $$ \ $$| $$ $$$| $$| $$ \ $$| $$ \ $$
| $$ $$| $$ | $$ | $$| $$ | $$ | $$ /$$| $$ | $$| $$\ $ | $$| $$ | $$| $$ | $$
| $$$$$$/| $$ | $$$$$$$| $$$$$$$/ | $$$$/| $$$$$$/| $$ \/ | $$| $$$$$$/| $$ | $$
\______/ |__/ \____ $$| $$____/ \___/ \______/ |__/ |__/ \______/ |__/ |__/
/$$ | $$| $$
| $$$$$$/| $$
\______/ |__/
"""
def process_title(title):
buf = io.StringIO(title)
lines = buf.readlines()
lines = lines[1:-1]
colored_lines = []
colored_title = ""
for line in lines:
colored_lines.append(Colors.BLUE + line[:13] + Colors.YELLOW + line[14:])
for line in colored_lines:
colored_title += line
return colored_title + Colors.ENDLINE
| [
"StringIO.StringIO"
]
| [((1247, 1265), 'StringIO.StringIO', 'io.StringIO', (['title'], {}), '(title)\n', (1258, 1265), True, 'import StringIO as io\n')] |
import cv2
video=cv2.VideoCapture(r'C:\Users\ISHITA\Desktop\ML project\UEM_PROJECT_COM\pedestrian.mp4')
#pre trained pedestrian and car classifier
car_tracker_file=(r'C:\Users\ISHITA\Desktop\ML project\UEM_PROJECT_COM\car.xml')
pedestrian_tracker_file=(r'C:\Users\ISHITA\Desktop\ML project\UEM_PROJECT_COM\pedestrian.xml')
#create car n pedestrian classifier
car_tracker=cv2.CascadeClassifier(car_tracker_file)
pedestrian_tracker=cv2.CascadeClassifier(pedestrian_tracker_file)
#run forever untill car stop
while True:
(read_successful,frame)=video.read()
gr_frame=cv2.cvtColor(frame,cv2.COLOR_BGR2GRAY)
#detect cars n pedestrian
cars=car_tracker.detectMultiScale(gr_frame)
pedestrians=pedestrian_tracker.detectMultiScale(gr_frame)
#draw rectangle around cars
for(x,y,w,h) in cars:
cv2.rectangle(frame,(x+1,y+2),(x+w,y+h),(255,0,0),2)
cv2.rectangle(frame,(x,y),(x+w,y+h),(0,0,255),2)
#draw rectangle around pedestrian
for(x,y,w,h) in pedestrians:
cv2.rectangle(frame,(x,y),(x+w,y+h),(0,255,255),2)
#display
cv2.imshow('car n pedestrians',frame)
key = cv2.waitKey(1)
#stopping condition
if key == 83 or key== 115:
break
# release the VideoCapture object
video.release()
print('Press "s" to stop')
print('Hey!')
| [
"cv2.rectangle",
"cv2.imshow",
"cv2.VideoCapture",
"cv2.cvtColor",
"cv2.CascadeClassifier",
"cv2.waitKey"
]
| [((18, 114), 'cv2.VideoCapture', 'cv2.VideoCapture', (['"""C:\\\\Users\\\\ISHITA\\\\Desktop\\\\ML project\\\\UEM_PROJECT_COM\\\\pedestrian.mp4"""'], {}), "(\n 'C:\\\\Users\\\\ISHITA\\\\Desktop\\\\ML project\\\\UEM_PROJECT_COM\\\\pedestrian.mp4')\n", (34, 114), False, 'import cv2\n'), ((374, 413), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['car_tracker_file'], {}), '(car_tracker_file)\n', (395, 413), False, 'import cv2\n'), ((433, 479), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['pedestrian_tracker_file'], {}), '(pedestrian_tracker_file)\n', (454, 479), False, 'import cv2\n'), ((584, 623), 'cv2.cvtColor', 'cv2.cvtColor', (['frame', 'cv2.COLOR_BGR2GRAY'], {}), '(frame, cv2.COLOR_BGR2GRAY)\n', (596, 623), False, 'import cv2\n'), ((1097, 1135), 'cv2.imshow', 'cv2.imshow', (['"""car n pedestrians"""', 'frame'], {}), "('car n pedestrians', frame)\n", (1107, 1135), False, 'import cv2\n'), ((1145, 1159), 'cv2.waitKey', 'cv2.waitKey', (['(1)'], {}), '(1)\n', (1156, 1159), False, 'import cv2\n'), ((834, 902), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x + 1, y + 2)', '(x + w, y + h)', '(255, 0, 0)', '(2)'], {}), '(frame, (x + 1, y + 2), (x + w, y + h), (255, 0, 0), 2)\n', (847, 902), False, 'import cv2\n'), ((895, 955), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', '(0, 0, 255)', '(2)'], {}), '(frame, (x, y), (x + w, y + h), (0, 0, 255), 2)\n', (908, 955), False, 'import cv2\n'), ((1028, 1090), 'cv2.rectangle', 'cv2.rectangle', (['frame', '(x, y)', '(x + w, y + h)', '(0, 255, 255)', '(2)'], {}), '(frame, (x, y), (x + w, y + h), (0, 255, 255), 2)\n', (1041, 1090), False, 'import cv2\n')] |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
#
# Distributed under terms of the MIT license.
"""
Strategy base class
"""
from abc import ABCMeta, abstractmethod
from tinydb import TinyDB, Query
from node import Node
import json
class Strategy(object):
def __init__(self, this_controller, this_description=None):
self.description = this_description
self.controller = this_controller
self.ledger = TinyDB("ledger.json")
self.db = TinyDB("nodes.json")
self.nodes = []
@abstractmethod
def store_file(self, file_bytes, file_name):
pass
@abstractmethod
def retrieve_file(self, file_name, locations):
pass
@abstractmethod
def get_time(self):
pass
def getNodes(self):
self.nodes = []
for item in self.db:
node = Node(item['mac'],item['ip'],item['port'],item['units'])
self.nodes.append(node)
return self.nodes
def getNodesWithFile(self,filename):
macs = self.ledger.search(Query().file_name == filename)
self.nodes = []
for item in macs:
mac = item["location"]
dbnode = self.db.get(Query().mac == mac)
if(dbnode == None):
continue
node = Node(dbnode['mac'],dbnode['ip'],dbnode['port'],dbnode['units'])
self.nodes.append(node)
return self.nodes
def getFileSize(self, filename):
file = self.ledger.get(Query().file_name == filename)
return file['size']
| [
"tinydb.Query",
"node.Node",
"tinydb.TinyDB"
]
| [((449, 470), 'tinydb.TinyDB', 'TinyDB', (['"""ledger.json"""'], {}), "('ledger.json')\n", (455, 470), False, 'from tinydb import TinyDB, Query\n'), ((489, 509), 'tinydb.TinyDB', 'TinyDB', (['"""nodes.json"""'], {}), "('nodes.json')\n", (495, 509), False, 'from tinydb import TinyDB, Query\n'), ((857, 915), 'node.Node', 'Node', (["item['mac']", "item['ip']", "item['port']", "item['units']"], {}), "(item['mac'], item['ip'], item['port'], item['units'])\n", (861, 915), False, 'from node import Node\n'), ((1296, 1362), 'node.Node', 'Node', (["dbnode['mac']", "dbnode['ip']", "dbnode['port']", "dbnode['units']"], {}), "(dbnode['mac'], dbnode['ip'], dbnode['port'], dbnode['units'])\n", (1300, 1362), False, 'from node import Node\n'), ((1051, 1058), 'tinydb.Query', 'Query', ([], {}), '()\n', (1056, 1058), False, 'from tinydb import TinyDB, Query\n'), ((1491, 1498), 'tinydb.Query', 'Query', ([], {}), '()\n', (1496, 1498), False, 'from tinydb import TinyDB, Query\n'), ((1200, 1207), 'tinydb.Query', 'Query', ([], {}), '()\n', (1205, 1207), False, 'from tinydb import TinyDB, Query\n')] |
#-*- encoding:utf-8 -*-
from __future__ import print_function
import sys
try:
reload(sys)
sys.setdefaultencoding('utf-8')
except:
pass
import codecs
from textrank4zh import TextRank4Keyword, TextRank4Sentence
text = codecs.open('../test/doc/01.txt', 'r', 'utf-8').read()
tr4w = TextRank4Keyword()
tr4w.analyze(text=text, lower=True, window=2) # py2中text必须是utf8编码的str或者unicode对象,py3中必须是utf8编码的bytes或者str对象
print( '关键词:' )
for item in tr4w.get_keywords(20, word_min_len=1):
print(item.word, item.weight)
print()
print( '关键短语:' )
for phrase in tr4w.get_keyphrases(keywords_num=20, min_occur_num= 2):
print(phrase)
tr4s = TextRank4Sentence()
tr4s.analyze(text=text, lower=True, source = 'all_filters')
print()
print( '摘要:' )
for item in tr4s.get_key_sentences(num=3):
print(item.weight, item.sentence) | [
"codecs.open",
"sys.setdefaultencoding",
"textrank4zh.TextRank4Sentence",
"textrank4zh.TextRank4Keyword"
]
| [((293, 311), 'textrank4zh.TextRank4Keyword', 'TextRank4Keyword', ([], {}), '()\n', (309, 311), False, 'from textrank4zh import TextRank4Keyword, TextRank4Sentence\n'), ((647, 666), 'textrank4zh.TextRank4Sentence', 'TextRank4Sentence', ([], {}), '()\n', (664, 666), False, 'from textrank4zh import TextRank4Keyword, TextRank4Sentence\n'), ((99, 130), 'sys.setdefaultencoding', 'sys.setdefaultencoding', (['"""utf-8"""'], {}), "('utf-8')\n", (121, 130), False, 'import sys\n'), ((231, 278), 'codecs.open', 'codecs.open', (['"""../test/doc/01.txt"""', '"""r"""', '"""utf-8"""'], {}), "('../test/doc/01.txt', 'r', 'utf-8')\n", (242, 278), False, 'import codecs\n')] |
import random
import uuid
import sys
import json
from faker import Factory
from faker.providers.person.fi_FI import Provider as PersonProvider
fake = Factory.create('fi_FI')
email_by_user = {}
users_by_id = {}
def anonymize_users(users):
usernames = set()
emails = set()
for data in users:
if data['model'] != 'users.user':
continue
user = data['fields']
user['password'] = "!"
username = fake.user_name()
while username in usernames:
username = fake.user_name()
usernames.add(username)
user['username'] = username
user['uuid'] = str(uuid.uuid4())
if user['first_name']:
user['first_name'] = fake.first_name()
if user['last_name']:
user['last_name'] = fake.last_name()
user['email'] = fake.email()
email_by_user[data['pk']] = user['email']
users_by_id[data['pk']] = user
def remove_secrets(data):
for model in data:
fields = model['fields']
if model['model'] == 'socialaccount.socialapp':
fields['client_id'] = fake.md5()
fields['secret'] = fake.md5()
elif model['model'] == 'socialaccount.socialapp':
fields['token_secret'] = fake.md5()
fields['token'] = fake.md5()
elif model['model'] == 'account.emailaddress':
fields['email'] = email_by_user[fields['user']]
elif model['model'] == 'socialaccount.socialaccount':
fields['extra_data'] = '{}'
fields['uid'] = users_by_id[fields['user']]['uuid']
elif model['model'] == 'sessions.session':
fields['session_data'] = "!"
model['pk'] = fake.md5()
data = json.load(sys.stdin)
anonymize_users(data)
remove_secrets(data)
json.dump(data, sys.stdout, indent=4)
| [
"json.load",
"faker.Factory.create",
"json.dump",
"uuid.uuid4"
]
| [((151, 174), 'faker.Factory.create', 'Factory.create', (['"""fi_FI"""'], {}), "('fi_FI')\n", (165, 174), False, 'from faker import Factory\n'), ((1731, 1751), 'json.load', 'json.load', (['sys.stdin'], {}), '(sys.stdin)\n', (1740, 1751), False, 'import json\n'), ((1795, 1832), 'json.dump', 'json.dump', (['data', 'sys.stdout'], {'indent': '(4)'}), '(data, sys.stdout, indent=4)\n', (1804, 1832), False, 'import json\n'), ((639, 651), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (649, 651), False, 'import uuid\n')] |
import random
from typing import Optional, Tuple, Union
import numpy as np
import torch
from torch import Tensor
from torch_geometric.utils import coalesce, degree, remove_self_loops
from .num_nodes import maybe_num_nodes
def negative_sampling(edge_index: Tensor,
num_nodes: Optional[Union[int, Tuple[int, int]]] = None,
num_neg_samples: Optional[int] = None,
method: str = "sparse",
force_undirected: bool = False) -> Tensor:
r"""Samples random negative edges of a graph given by :attr:`edge_index`.
Args:
edge_index (LongTensor): The edge indices.
num_nodes (int or Tuple[int, int], optional): The number of nodes,
*i.e.* :obj:`max_val + 1` of :attr:`edge_index`.
If given as a tuple, then :obj:`edge_index` is interpreted as a
bipartite graph with shape :obj:`(num_src_nodes, num_dst_nodes)`.
(default: :obj:`None`)
num_neg_samples (int, optional): The (approximate) number of negative
samples to return.
If set to :obj:`None`, will try to return a negative edge for every
positive edge. (default: :obj:`None`)
method (string, optional): The method to use for negative sampling,
*i.e.*, :obj:`"sparse"` or :obj:`"dense"`.
This is a memory/runtime trade-off.
:obj:`"sparse"` will work on any graph of any size, while
:obj:`"dense"` can perform faster true-negative checks.
(default: :obj:`"sparse"`)
force_undirected (bool, optional): If set to :obj:`True`, sampled
negative edges will be undirected. (default: :obj:`False`)
:rtype: LongTensor
"""
assert method in ['sparse', 'dense']
size = num_nodes
bipartite = isinstance(size, (tuple, list))
size = maybe_num_nodes(edge_index) if size is None else size
size = (size, size) if not bipartite else size
force_undirected = False if bipartite else force_undirected
idx, population = edge_index_to_vector(edge_index, size, bipartite,
force_undirected)
if idx.numel() >= population:
return edge_index.new_empty((2, 0))
if num_neg_samples is None:
num_neg_samples = edge_index.size(1)
if force_undirected:
num_neg_samples = num_neg_samples // 2
prob = 1. - idx.numel() / population # Probability to sample a negative.
sample_size = int(1.1 * num_neg_samples / prob) # (Over)-sample size.
neg_idx = None
if method == 'dense':
# The dense version creates a mask of shape `population` to check for
# invalid samples.
mask = idx.new_ones(population, dtype=torch.bool)
mask[idx] = False
for _ in range(3): # Number of tries to sample negative indices.
rnd = sample(population, sample_size, idx.device)
rnd = rnd[mask[rnd]] # Filter true negatives.
neg_idx = rnd if neg_idx is None else torch.cat([neg_idx, rnd])
if neg_idx.numel() >= num_neg_samples:
neg_idx = neg_idx[:num_neg_samples]
break
mask[neg_idx] = False
else: # 'sparse'
# The sparse version checks for invalid samples via `np.isin`.
idx = idx.to('cpu')
for _ in range(3): # Number of tries to sample negative indices.
rnd = sample(population, sample_size, device='cpu')
mask = np.isin(rnd, idx)
if neg_idx is not None:
mask |= np.isin(rnd, neg_idx.to('cpu'))
mask = torch.from_numpy(mask).to(torch.bool)
rnd = rnd[~mask].to(edge_index.device)
neg_idx = rnd if neg_idx is None else torch.cat([neg_idx, rnd])
if neg_idx.numel() >= num_neg_samples:
neg_idx = neg_idx[:num_neg_samples]
break
return vector_to_edge_index(neg_idx, size, bipartite, force_undirected)
def batched_negative_sampling(
edge_index: Tensor,
batch: Union[Tensor, Tuple[Tensor, Tensor]],
num_neg_samples: Optional[int] = None,
method: str = "sparse",
force_undirected: bool = False,
) -> Tensor:
r"""Samples random negative edges of multiple graphs given by
:attr:`edge_index` and :attr:`batch`.
Args:
edge_index (LongTensor): The edge indices.
batch (LongTensor or Tuple[LongTensor, LongTensor]): Batch vector
:math:`\mathbf{b} \in {\{ 0, \ldots, B-1\}}^N`, which assigns each
node to a specific example.
If given as a tuple, then :obj:`edge_index` is interpreted as a
bipartite graph connecting two different node types.
num_neg_samples (int, optional): The number of negative samples to
return. If set to :obj:`None`, will try to return a negative edge
for every positive edge. (default: :obj:`None`)
method (string, optional): The method to use for negative sampling,
*i.e.*, :obj:`"sparse"` or :obj:`"dense"`.
This is a memory/runtime trade-off.
:obj:`"sparse"` will work on any graph of any size, while
:obj:`"dense"` can perform faster true-negative checks.
(default: :obj:`"sparse"`)
force_undirected (bool, optional): If set to :obj:`True`, sampled
negative edges will be undirected. (default: :obj:`False`)
:rtype: LongTensor
"""
if isinstance(batch, Tensor):
src_batch, dst_batch = batch, batch
else:
src_batch, dst_batch = batch[0], batch[1]
split = degree(src_batch[edge_index[0]], dtype=torch.long).tolist()
edge_indices = torch.split(edge_index, split, dim=1)
num_src = degree(src_batch, dtype=torch.long)
cum_src = torch.cat([src_batch.new_zeros(1), num_src.cumsum(0)[:-1]])
if isinstance(batch, Tensor):
num_nodes = num_src.tolist()
cumsum = cum_src
else:
num_dst = degree(dst_batch, dtype=torch.long)
cum_dst = torch.cat([dst_batch.new_zeros(1), num_dst.cumsum(0)[:-1]])
num_nodes = torch.stack([num_src, num_dst], dim=1).tolist()
cumsum = torch.stack([cum_src, cum_dst], dim=1).unsqueeze(-1)
neg_edge_indices = []
for i, edge_index in enumerate(edge_indices):
edge_index = edge_index - cumsum[i]
neg_edge_index = negative_sampling(edge_index, num_nodes[i],
num_neg_samples, method,
force_undirected)
neg_edge_index += cumsum[i]
neg_edge_indices.append(neg_edge_index)
return torch.cat(neg_edge_indices, dim=1)
def structured_negative_sampling(edge_index, num_nodes: Optional[int] = None,
contains_neg_self_loops: bool = True):
r"""Samples a negative edge :obj:`(i,k)` for every positive edge
:obj:`(i,j)` in the graph given by :attr:`edge_index`, and returns it as a
tuple of the form :obj:`(i,j,k)`.
Args:
edge_index (LongTensor): The edge indices.
num_nodes (int, optional): The number of nodes, *i.e.*
:obj:`max_val + 1` of :attr:`edge_index`. (default: :obj:`None`)
contains_neg_self_loops (bool, optional): If set to
:obj:`False`, sampled negative edges will not contain self loops.
(default: :obj:`True`)
:rtype: (LongTensor, LongTensor, LongTensor)
"""
num_nodes = maybe_num_nodes(edge_index, num_nodes)
row, col = edge_index.cpu()
pos_idx = row * num_nodes + col
if not contains_neg_self_loops:
loop_idx = torch.arange(num_nodes) * (num_nodes + 1)
pos_idx = torch.cat([pos_idx, loop_idx], dim=0)
rand = torch.randint(num_nodes, (row.size(0), ), dtype=torch.long)
neg_idx = row * num_nodes + rand
mask = torch.from_numpy(np.isin(neg_idx, pos_idx)).to(torch.bool)
rest = mask.nonzero(as_tuple=False).view(-1)
while rest.numel() > 0: # pragma: no cover
tmp = torch.randint(num_nodes, (rest.size(0), ), dtype=torch.long)
rand[rest] = tmp
neg_idx = row[rest] * num_nodes + tmp
mask = torch.from_numpy(np.isin(neg_idx, pos_idx)).to(torch.bool)
rest = rest[mask]
return edge_index[0], edge_index[1], rand.to(edge_index.device)
def structured_negative_sampling_feasible(
edge_index: Tensor, num_nodes: Optional[int] = None,
contains_neg_self_loops: bool = True) -> bool:
r"""Returns :obj:`True` if
:meth:`~torch_geometric.utils.structured_negative_sampling` is feasible
on the graph given by :obj:`edge_index`.
:obj:`~torch_geometric.utils.structured_negative_sampling` is infeasible
if atleast one node is connected to all other nodes.
Args:
edge_index (LongTensor): The edge indices.
num_nodes (int, optional): The number of nodes, *i.e.*
:obj:`max_val + 1` of :attr:`edge_index`. (default: :obj:`None`)
contains_neg_self_loops (bool, optional): If set to
:obj:`False`, sampled negative edges will not contain self loops.
(default: :obj:`True`)
:rtype: bool
"""
num_nodes = maybe_num_nodes(edge_index, num_nodes)
max_num_neighbors = num_nodes
edge_index = coalesce(edge_index, num_nodes=num_nodes)
if not contains_neg_self_loops:
edge_index, _ = remove_self_loops(edge_index)
max_num_neighbors -= 1 # Reduce number of valid neighbors
deg = degree(edge_index[0], num_nodes)
# True if there exists no node that is connected to all other nodes.
return bool(torch.all(deg < max_num_neighbors))
###############################################################################
def sample(population: int, k: int, device=None) -> Tensor:
if population <= k:
return torch.arange(population, device=device)
else:
return torch.tensor(random.sample(range(population), k), device=device)
def edge_index_to_vector(
edge_index: Tensor,
size: Tuple[int, int],
bipartite: bool,
force_undirected: bool = False,
) -> Tuple[Tensor, int]:
row, col = edge_index
if bipartite: # No need to account for self-loops.
idx = (row * size[1]).add_(col)
population = size[0] * size[1]
return idx, population
elif force_undirected:
assert size[0] == size[1]
num_nodes = size[0]
# We only operate on the upper triangular matrix:
mask = row < col
row, col = row[mask], col[mask]
offset = torch.arange(1, num_nodes, device=row.device).cumsum(0)[row]
idx = row.mul_(num_nodes).add_(col).sub_(offset)
population = (num_nodes * (num_nodes + 1)) // 2 - num_nodes
return idx, population
else:
assert size[0] == size[1]
num_nodes = size[0]
# We remove self-loops as we do not want to take them into account
# when sampling negative values.
mask = row != col
row, col = row[mask], col[mask]
col[row < col] -= 1
idx = row.mul_(num_nodes - 1).add_(col)
population = num_nodes * num_nodes - num_nodes
return idx, population
def vector_to_edge_index(idx: Tensor, size: Tuple[int, int], bipartite: bool,
force_undirected: bool = False) -> Tensor:
if bipartite: # No need to account for self-loops.
row = idx.div(size[1], rounding_mode='floor')
col = idx % size[1]
return torch.stack([row, col], dim=0)
elif force_undirected:
assert size[0] == size[1]
num_nodes = size[0]
offset = torch.arange(1, num_nodes, device=idx.device).cumsum(0)
end = torch.arange(num_nodes, num_nodes * num_nodes, num_nodes,
device=idx.device)
row = torch.bucketize(idx, end.sub_(offset), right=True)
col = offset[row].add_(idx) % num_nodes
return torch.stack([torch.cat([row, col]), torch.cat([col, row])], 0)
else:
assert size[0] == size[1]
num_nodes = size[0]
row = idx.div(num_nodes - 1, rounding_mode='floor')
col = idx % (num_nodes - 1)
col[row <= col] += 1
return torch.stack([row, col], dim=0)
| [
"torch.split",
"torch_geometric.utils.degree",
"torch.all",
"torch.stack",
"numpy.isin",
"torch.from_numpy",
"torch_geometric.utils.remove_self_loops",
"torch.arange",
"torch_geometric.utils.coalesce",
"torch.cat"
]
| [((5711, 5748), 'torch.split', 'torch.split', (['edge_index', 'split'], {'dim': '(1)'}), '(edge_index, split, dim=1)\n', (5722, 5748), False, 'import torch\n'), ((5764, 5799), 'torch_geometric.utils.degree', 'degree', (['src_batch'], {'dtype': 'torch.long'}), '(src_batch, dtype=torch.long)\n', (5770, 5799), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((6667, 6701), 'torch.cat', 'torch.cat', (['neg_edge_indices'], {'dim': '(1)'}), '(neg_edge_indices, dim=1)\n', (6676, 6701), False, 'import torch\n'), ((9298, 9339), 'torch_geometric.utils.coalesce', 'coalesce', (['edge_index'], {'num_nodes': 'num_nodes'}), '(edge_index, num_nodes=num_nodes)\n', (9306, 9339), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((9509, 9541), 'torch_geometric.utils.degree', 'degree', (['edge_index[0]', 'num_nodes'], {}), '(edge_index[0], num_nodes)\n', (9515, 9541), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((5999, 6034), 'torch_geometric.utils.degree', 'degree', (['dst_batch'], {'dtype': 'torch.long'}), '(dst_batch, dtype=torch.long)\n', (6005, 6034), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((7712, 7749), 'torch.cat', 'torch.cat', (['[pos_idx, loop_idx]'], {'dim': '(0)'}), '([pos_idx, loop_idx], dim=0)\n', (7721, 7749), False, 'import torch\n'), ((9401, 9430), 'torch_geometric.utils.remove_self_loops', 'remove_self_loops', (['edge_index'], {}), '(edge_index)\n', (9418, 9430), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((9631, 9665), 'torch.all', 'torch.all', (['(deg < max_num_neighbors)'], {}), '(deg < max_num_neighbors)\n', (9640, 9665), False, 'import torch\n'), ((9850, 9889), 'torch.arange', 'torch.arange', (['population'], {'device': 'device'}), '(population, device=device)\n', (9862, 9889), False, 'import torch\n'), ((11503, 11533), 'torch.stack', 'torch.stack', (['[row, col]'], {'dim': '(0)'}), '([row, col], dim=0)\n', (11514, 11533), False, 'import torch\n'), ((3509, 3526), 'numpy.isin', 'np.isin', (['rnd', 'idx'], {}), '(rnd, idx)\n', (3516, 3526), True, 'import numpy as np\n'), ((5632, 5682), 'torch_geometric.utils.degree', 'degree', (['src_batch[edge_index[0]]'], {'dtype': 'torch.long'}), '(src_batch[edge_index[0]], dtype=torch.long)\n', (5638, 5682), False, 'from torch_geometric.utils import coalesce, degree, remove_self_loops\n'), ((7652, 7675), 'torch.arange', 'torch.arange', (['num_nodes'], {}), '(num_nodes)\n', (7664, 7675), False, 'import torch\n'), ((11712, 11788), 'torch.arange', 'torch.arange', (['num_nodes', '(num_nodes * num_nodes)', 'num_nodes'], {'device': 'idx.device'}), '(num_nodes, num_nodes * num_nodes, num_nodes, device=idx.device)\n', (11724, 11788), False, 'import torch\n'), ((12221, 12251), 'torch.stack', 'torch.stack', (['[row, col]'], {'dim': '(0)'}), '([row, col], dim=0)\n', (12232, 12251), False, 'import torch\n'), ((3045, 3070), 'torch.cat', 'torch.cat', (['[neg_idx, rnd]'], {}), '([neg_idx, rnd])\n', (3054, 3070), False, 'import torch\n'), ((3777, 3802), 'torch.cat', 'torch.cat', (['[neg_idx, rnd]'], {}), '([neg_idx, rnd])\n', (3786, 3802), False, 'import torch\n'), ((6134, 6172), 'torch.stack', 'torch.stack', (['[num_src, num_dst]'], {'dim': '(1)'}), '([num_src, num_dst], dim=1)\n', (6145, 6172), False, 'import torch\n'), ((6199, 6237), 'torch.stack', 'torch.stack', (['[cum_src, cum_dst]'], {'dim': '(1)'}), '([cum_src, cum_dst], dim=1)\n', (6210, 6237), False, 'import torch\n'), ((7888, 7913), 'numpy.isin', 'np.isin', (['neg_idx', 'pos_idx'], {}), '(neg_idx, pos_idx)\n', (7895, 7913), True, 'import numpy as np\n'), ((3638, 3660), 'torch.from_numpy', 'torch.from_numpy', (['mask'], {}), '(mask)\n', (3654, 3660), False, 'import torch\n'), ((8206, 8231), 'numpy.isin', 'np.isin', (['neg_idx', 'pos_idx'], {}), '(neg_idx, pos_idx)\n', (8213, 8231), True, 'import numpy as np\n'), ((11642, 11687), 'torch.arange', 'torch.arange', (['(1)', 'num_nodes'], {'device': 'idx.device'}), '(1, num_nodes, device=idx.device)\n', (11654, 11687), False, 'import torch\n'), ((11957, 11978), 'torch.cat', 'torch.cat', (['[row, col]'], {}), '([row, col])\n', (11966, 11978), False, 'import torch\n'), ((11980, 12001), 'torch.cat', 'torch.cat', (['[col, row]'], {}), '([col, row])\n', (11989, 12001), False, 'import torch\n'), ((10566, 10611), 'torch.arange', 'torch.arange', (['(1)', 'num_nodes'], {'device': 'row.device'}), '(1, num_nodes, device=row.device)\n', (10578, 10611), False, 'import torch\n')] |
import sys
if sys.version_info[:2] >= (3, 0):
# pylint: disable=E0611,F0401,I0011
from urllib.request import build_opener
else:
from urllib2 import build_opener
from . import __version__
urls = {
'gdata': "https://www.googleapis.com/youtube/v3/",
'watchv': "http://www.youtube.com/watch?v=%s",
'playlist': ('http://www.youtube.com/list_ajax?'
'style=json&action_get_list=1&list=%s'),
'thumb': "http://i.ytimg.com/vi/%s/default.jpg",
'bigthumb': "http://i.ytimg.com/vi/%s/mqdefault.jpg",
'bigthumbhd': "http://i.ytimg.com/vi/%s/hqdefault.jpg",
# For internal backend
'vidinfo': ('https://www.youtube.com/get_video_info?video_id=%s&'
'eurl=https://youtube.googleapis.com/v/%s&sts=%s'),
'embed': "https://youtube.com/embed/%s"
}
api_key = "<KEY>"
user_agent = "pafy " + __version__
lifespan = 60 * 60 * 5 # 5 hours
opener = build_opener()
opener.addheaders = [('User-Agent', user_agent)]
cache = {}
def_ydl_opts = {'quiet': True, 'prefer_insecure': False, 'no_warnings': True}
# The following are specific to the internal backend
UEFSM = 'url_encoded_fmt_stream_map'
AF = 'adaptive_fmts'
jsplayer = r';ytplayer\.config\s*=\s*({.*?});'
itags = {
'5': ('320x240', 'flv', "normal", ''),
'17': ('176x144', '3gp', "normal", ''),
'18': ('640x360', 'mp4', "normal", ''),
'22': ('1280x720', 'mp4', "normal", ''),
'34': ('640x360', 'flv', "normal", ''),
'35': ('854x480', 'flv', "normal", ''),
'36': ('320x240', '3gp', "normal", ''),
'37': ('1920x1080', 'mp4', "normal", ''),
'38': ('4096x3072', 'mp4', "normal", '4:3 hi-res'),
'43': ('640x360', 'webm', "normal", ''),
'44': ('854x480', 'webm', "normal", ''),
'45': ('1280x720', 'webm', "normal", ''),
'46': ('1920x1080', 'webm', "normal", ''),
'82': ('640x360-3D', 'mp4', "normal", ''),
'83': ('640x480-3D', 'mp4', 'normal', ''),
'84': ('1280x720-3D', 'mp4', "normal", ''),
'100': ('640x360-3D', 'webm', "normal", ''),
'102': ('1280x720-3D', 'webm', "normal", ''),
'133': ('426x240', 'm4v', 'video', ''),
'134': ('640x360', 'm4v', 'video', ''),
'135': ('854x480', 'm4v', 'video', ''),
'136': ('1280x720', 'm4v', 'video', ''),
'137': ('1920x1080', 'm4v', 'video', ''),
'138': ('4096x3072', 'm4v', 'video', ''),
'139': ('48k', 'm4a', 'audio', ''),
'140': ('128k', 'm4a', 'audio', ''),
'141': ('256k', 'm4a', 'audio', ''),
'160': ('256x144', 'm4v', 'video', ''),
'167': ('640x480', 'webm', 'video', ''),
'168': ('854x480', 'webm', 'video', ''),
'169': ('1280x720', 'webm', 'video', ''),
'170': ('1920x1080', 'webm', 'video', ''),
'171': ('128k', 'ogg', 'audio', ''),
'172': ('192k', 'ogg', 'audio', ''),
'218': ('854x480', 'webm', 'video', 'VP8'),
'219': ('854x480', 'webm', 'video', 'VP8'),
'242': ('360x240', 'webm', 'video', 'VP9'),
'243': ('480x360', 'webm', 'video', 'VP9'),
'244': ('640x480', 'webm', 'video', 'VP9 low'),
'245': ('640x480', 'webm', 'video', 'VP9 med'),
'246': ('640x480', 'webm', 'video', 'VP9 high'),
'247': ('720x480', 'webm', 'video', 'VP9'),
'248': ('1920x1080', 'webm', 'video', 'VP9'),
'249': ('48k', 'opus', 'audio', 'Opus'),
'250': ('56k', 'opus', 'audio', 'Opus'),
'251': ('128k', 'opus', 'audio', 'Opus'),
'256': ('192k', 'm4a', 'audio', '6-channel'),
'258': ('320k', 'm4a', 'audio', '6-channel'),
'264': ('2560x1440', 'm4v', 'video', ''),
'266': ('3840x2160', 'm4v', 'video', 'AVC'),
'271': ('1920x1280', 'webm', 'video', 'VP9'),
'272': ('3414x1080', 'webm', 'video', 'VP9'),
'278': ('256x144', 'webm', 'video', 'VP9'),
'298': ('1280x720', 'm4v', 'video', '60fps'),
'299': ('1920x1080', 'm4v', 'video', '60fps'),
'302': ('1280x720', 'webm', 'video', 'VP9'),
'303': ('1920x1080', 'webm', 'video', 'VP9'),
}
| [
"urllib2.build_opener"
]
| [((906, 920), 'urllib2.build_opener', 'build_opener', ([], {}), '()\n', (918, 920), False, 'from urllib2 import build_opener\n')] |
from tkinter import *
from PIL import ImageGrab
import numpy as np
import cv2
import time
import pyautogui as pg
import DirectInputRoutines as DIR
from LogKey import key_check
last_time = time.time()
one_hot = [0, 0, 0, 0, 0, 0]
hash_dict = {'w':0, 's':1, 'a':2, 'd':3, 'c':4, 'v':5}
X = []
y = []
def auto_canny(image, sigma=0.33):
# compute the median of the single channel pixel intensities
v = np.median(image)
# apply automatic Canny edge detection using the computed median
lower = int(max(0, (1.0 - sigma) * v))
upper = int(min(255, (1.0 + sigma) * v))
edged = cv2.Canny(image, lower, upper)
# return the edged image
return edged
def process_img(original_image):
processed_img = cv2.cvtColor(original_image, cv2.COLOR_BGR2GRAY)
processed_img = cv2.Canny(processed_img, threshold1=200, threshold2=300)
processed_img = cv2.Canny(processed_img, threshold1=200, threshold2=300)
#processed_img = cv2.Canny(processed_img, threshold1=200, threshold2=300)
vertices = np.array([[10,500],[10,300],[300,200],[500,200],[800,300],[800,500],
], np.int32)
processed_img = cv2.GaussianBlur(processed_img,(5,5),0)
processed_img = roi(processed_img, [vertices])
# more info: http://docs.opencv.org/3.0-beta/doc/py_tutorials/py_imgproc/py_houghlines/py_houghlines.html
# edges rho theta thresh # min length, max gap:
#lines = cv2.HoughLinesP(processed_img, 1, np.pi/180, 180, 20, 15)
#draw_lines(processed_img,lines)
return processed_img
def roi(img, vertices):
#blank mask:
mask = np.zeros_like(img)
# fill the mask
cv2.fillPoly(mask, vertices, 255)
# now only show the area that is the mask
masked = cv2.bitwise_and(img, mask)
return masked
def draw_lines(img,lines):
for line in lines:
coords = line[0]
cv2.line(img, (coords[0], coords[1]), (coords[2], coords[3]), [255,255,255], 3)
def change_tab():
pg.hotkey("alt","tab")
def send_key(e):
hash = {"w":DIR.W, "a":DIR.A, "s":DIR.S, "d":DIR.D}
return hash[e.keysym]
def keyup(e):
if(e.keysym == "Alt_L" or e.keysym == "Tab"):
return
#print('down', e.keysym)
change_tab()
DIR.ReleaseKey(send_key(e))
change_tab()
global last_time
one_hot[hash_dict[e.keysym]] = 0
temp = list(one_hot)
printscreen = np.array(ImageGrab.grab(bbox=(0,40,800,640)))
printscreen = process_img(printscreen)
print('loop took {} seconds'.format(time.time()-last_time))
print([printscreen, temp])
last_time = time.time()
X.append(printscreen)
y.append(temp)
#cv2.imshow("image", printscreen)
def keydown(e):
#print('up', e.keysym)
if(e.keysym == "Alt_L" or e.keysym == "Tab"):
return
change_tab()
DIR.ReleaseKey(send_key(e))
change_tab()
global last_time
one_hot[hash_dict[e.keysym]] = 1
temp = list(one_hot)
printscreen = np.array(ImageGrab.grab(bbox=(0,40,800,680)))
printscreen = process_img(printscreen)
print('loop took {} seconds'.format(time.time()-last_time))
print([printscreen,temp])
last_time = time.time()
X.append(printscreen)
y.append(temp)
root = Tk()
frame = Frame(root, width=100, height=100)
frame.bind("<KeyPress>", keydown)
frame.bind("<KeyRelease>", keyup)
frame.pack()
frame.focus_set()
root.mainloop()
np.save("X.npy", X)
np.save("y.npy", y) | [
"cv2.fillPoly",
"pyautogui.hotkey",
"numpy.median",
"cv2.GaussianBlur",
"PIL.ImageGrab.grab",
"cv2.line",
"numpy.zeros_like",
"cv2.bitwise_and",
"numpy.array",
"cv2.cvtColor",
"cv2.Canny",
"time.time",
"numpy.save"
]
| [((196, 207), 'time.time', 'time.time', ([], {}), '()\n', (205, 207), False, 'import time\n'), ((3513, 3532), 'numpy.save', 'np.save', (['"""X.npy"""', 'X'], {}), "('X.npy', X)\n", (3520, 3532), True, 'import numpy as np\n'), ((3534, 3553), 'numpy.save', 'np.save', (['"""y.npy"""', 'y'], {}), "('y.npy', y)\n", (3541, 3553), True, 'import numpy as np\n'), ((419, 435), 'numpy.median', 'np.median', (['image'], {}), '(image)\n', (428, 435), True, 'import numpy as np\n'), ((597, 627), 'cv2.Canny', 'cv2.Canny', (['image', 'lower', 'upper'], {}), '(image, lower, upper)\n', (606, 627), False, 'import cv2\n'), ((725, 773), 'cv2.cvtColor', 'cv2.cvtColor', (['original_image', 'cv2.COLOR_BGR2GRAY'], {}), '(original_image, cv2.COLOR_BGR2GRAY)\n', (737, 773), False, 'import cv2\n'), ((795, 851), 'cv2.Canny', 'cv2.Canny', (['processed_img'], {'threshold1': '(200)', 'threshold2': '(300)'}), '(processed_img, threshold1=200, threshold2=300)\n', (804, 851), False, 'import cv2\n'), ((873, 929), 'cv2.Canny', 'cv2.Canny', (['processed_img'], {'threshold1': '(200)', 'threshold2': '(300)'}), '(processed_img, threshold1=200, threshold2=300)\n', (882, 929), False, 'import cv2\n'), ((1031, 1126), 'numpy.array', 'np.array', (['[[10, 500], [10, 300], [300, 200], [500, 200], [800, 300], [800, 500]]', 'np.int32'], {}), '([[10, 500], [10, 300], [300, 200], [500, 200], [800, 300], [800, \n 500]], np.int32)\n', (1039, 1126), True, 'import numpy as np\n'), ((1162, 1204), 'cv2.GaussianBlur', 'cv2.GaussianBlur', (['processed_img', '(5, 5)', '(0)'], {}), '(processed_img, (5, 5), 0)\n', (1178, 1204), False, 'import cv2\n'), ((1679, 1697), 'numpy.zeros_like', 'np.zeros_like', (['img'], {}), '(img)\n', (1692, 1697), True, 'import numpy as np\n'), ((1724, 1757), 'cv2.fillPoly', 'cv2.fillPoly', (['mask', 'vertices', '(255)'], {}), '(mask, vertices, 255)\n', (1736, 1757), False, 'import cv2\n'), ((1819, 1845), 'cv2.bitwise_and', 'cv2.bitwise_and', (['img', 'mask'], {}), '(img, mask)\n', (1834, 1845), False, 'import cv2\n'), ((2062, 2085), 'pyautogui.hotkey', 'pg.hotkey', (['"""alt"""', '"""tab"""'], {}), "('alt', 'tab')\n", (2071, 2085), True, 'import pyautogui as pg\n'), ((2684, 2695), 'time.time', 'time.time', ([], {}), '()\n', (2693, 2695), False, 'import time\n'), ((3274, 3285), 'time.time', 'time.time', ([], {}), '()\n', (3283, 3285), False, 'import time\n'), ((1956, 2042), 'cv2.line', 'cv2.line', (['img', '(coords[0], coords[1])', '(coords[2], coords[3])', '[255, 255, 255]', '(3)'], {}), '(img, (coords[0], coords[1]), (coords[2], coords[3]), [255, 255, \n 255], 3)\n', (1964, 2042), False, 'import cv2\n'), ((2489, 2527), 'PIL.ImageGrab.grab', 'ImageGrab.grab', ([], {'bbox': '(0, 40, 800, 640)'}), '(bbox=(0, 40, 800, 640))\n', (2503, 2527), False, 'from PIL import ImageGrab\n'), ((3080, 3118), 'PIL.ImageGrab.grab', 'ImageGrab.grab', ([], {'bbox': '(0, 40, 800, 680)'}), '(bbox=(0, 40, 800, 680))\n', (3094, 3118), False, 'from PIL import ImageGrab\n'), ((2611, 2622), 'time.time', 'time.time', ([], {}), '()\n', (2620, 2622), False, 'import time\n'), ((3202, 3213), 'time.time', 'time.time', ([], {}), '()\n', (3211, 3213), False, 'import time\n')] |
# Generated by Django 2.2.5 on 2019-10-05 23:22
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Password',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False, unique=True)),
('website', models.CharField(max_length=128)),
('username', models.CharField(max_length=128)),
('pwd', models.CharField(max_length=128)),
('time_add', models.DateTimeField(auto_now_add=True, null=True)),
('time_modify', models.DateTimeField(auto_now=True)),
],
options={
'db_table': 'password_tab',
},
),
]
| [
"django.db.models.DateTimeField",
"django.db.models.CharField",
"django.db.models.IntegerField"
]
| [((304, 371), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'primary_key': '(True)', 'serialize': '(False)', 'unique': '(True)'}), '(primary_key=True, serialize=False, unique=True)\n', (323, 371), False, 'from django.db import migrations, models\n'), ((402, 434), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (418, 434), False, 'from django.db import migrations, models\n'), ((466, 498), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (482, 498), False, 'from django.db import migrations, models\n'), ((525, 557), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)'}), '(max_length=128)\n', (541, 557), False, 'from django.db import migrations, models\n'), ((589, 639), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'null': '(True)'}), '(auto_now_add=True, null=True)\n', (609, 639), False, 'from django.db import migrations, models\n'), ((674, 709), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (694, 709), False, 'from django.db import migrations, models\n')] |
import pytest
from onnx import TensorProto
from onnx import helper as oh
import finn.core.onnx_exec as oxe
from finn.core.modelwrapper import ModelWrapper
from finn.transformation.streamline.reorder import MoveTransposePastJoinAdd
from finn.util.basic import gen_finn_dt_tensor
def create_model(perm):
if perm == [0, 3, 1, 2]:
in_shape = [1, 128, 1, 256]
out_shape = [1, 256, 128, 1]
if perm == [0, 2, 3, 1]:
in_shape = [1, 256, 128, 1]
out_shape = [1, 128, 1, 256]
Transpose1_node = oh.make_node(
"Transpose", inputs=["in_transpose1"], outputs=["out_transpose1"], perm=perm
)
Transpose2_node = oh.make_node(
"Transpose", inputs=["in_transpose2"], outputs=["out_transpose2"], perm=perm
)
Join1_node = oh.make_node(
"Add", inputs=["out_transpose1", "out_transpose2"], outputs=["out_join1"]
)
in_transpose1 = oh.make_tensor_value_info(
"in_transpose1", TensorProto.FLOAT, in_shape
)
in_transpose2 = oh.make_tensor_value_info(
"in_transpose2", TensorProto.FLOAT, in_shape
)
out_transpose1 = oh.make_tensor_value_info(
"out_transpose1", TensorProto.FLOAT, out_shape
)
out_transpose2 = oh.make_tensor_value_info(
"out_transpose2", TensorProto.FLOAT, out_shape
)
out_join1 = oh.make_tensor_value_info("out_join1", TensorProto.FLOAT, out_shape)
graph = oh.make_graph(
nodes=[Transpose1_node, Transpose2_node, Join1_node],
name="test_graph",
inputs=[in_transpose1, in_transpose2],
outputs=[out_join1],
value_info=[
out_transpose1,
out_transpose2,
],
)
onnx_model = oh.make_model(graph, producer_name="test_model")
model = ModelWrapper(onnx_model)
return model
# Permutation of transpose node
@pytest.mark.parametrize("perm", [[0, 3, 1, 2], [0, 2, 3, 1]])
def test_move_identical_op_past_join_op(perm):
model = create_model(perm)
# Create input data
input0_tensor_name = model.graph.input[0].name
input1_tensor_name = model.graph.input[1].name
# Note: it is assumed that both tensors have the same shape and data type
input_shape = model.get_tensor_shape(input0_tensor_name)
input_dtype = model.get_tensor_datatype(input0_tensor_name)
input_val = gen_finn_dt_tensor(input_dtype, input_shape)
input_dict = {}
input_dict[input0_tensor_name] = input_val
input_dict[input1_tensor_name] = input_val
model_transformed = model.transform(MoveTransposePastJoinAdd())
assert oxe.compare_execution(model, model_transformed, input_dict)
# Check if order changed
node0_input0_model = model.find_consumers(model.graph.input[0].name)[0].op_type
node1_input1_model = model.find_consumers(model.graph.input[1].name)[0].op_type
node0_input0_model_transformed = model_transformed.find_consumers(
model_transformed.graph.input[0].name
)[0].op_type
node1_input1_model_transformed = model_transformed.find_consumers(
model_transformed.graph.input[1].name
)[0].op_type
assert node0_input0_model != node0_input0_model_transformed
assert node1_input1_model != node1_input1_model_transformed
| [
"onnx.helper.make_graph",
"onnx.helper.make_node",
"finn.core.onnx_exec.compare_execution",
"finn.util.basic.gen_finn_dt_tensor",
"onnx.helper.make_tensor_value_info",
"onnx.helper.make_model",
"pytest.mark.parametrize",
"finn.transformation.streamline.reorder.MoveTransposePastJoinAdd",
"finn.core.modelwrapper.ModelWrapper"
]
| [((1846, 1907), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""perm"""', '[[0, 3, 1, 2], [0, 2, 3, 1]]'], {}), "('perm', [[0, 3, 1, 2], [0, 2, 3, 1]])\n", (1869, 1907), False, 'import pytest\n'), ((533, 628), 'onnx.helper.make_node', 'oh.make_node', (['"""Transpose"""'], {'inputs': "['in_transpose1']", 'outputs': "['out_transpose1']", 'perm': 'perm'}), "('Transpose', inputs=['in_transpose1'], outputs=[\n 'out_transpose1'], perm=perm)\n", (545, 628), True, 'from onnx import helper as oh\n'), ((661, 756), 'onnx.helper.make_node', 'oh.make_node', (['"""Transpose"""'], {'inputs': "['in_transpose2']", 'outputs': "['out_transpose2']", 'perm': 'perm'}), "('Transpose', inputs=['in_transpose2'], outputs=[\n 'out_transpose2'], perm=perm)\n", (673, 756), True, 'from onnx import helper as oh\n'), ((784, 876), 'onnx.helper.make_node', 'oh.make_node', (['"""Add"""'], {'inputs': "['out_transpose1', 'out_transpose2']", 'outputs': "['out_join1']"}), "('Add', inputs=['out_transpose1', 'out_transpose2'], outputs=[\n 'out_join1'])\n", (796, 876), True, 'from onnx import helper as oh\n'), ((907, 978), 'onnx.helper.make_tensor_value_info', 'oh.make_tensor_value_info', (['"""in_transpose1"""', 'TensorProto.FLOAT', 'in_shape'], {}), "('in_transpose1', TensorProto.FLOAT, in_shape)\n", (932, 978), True, 'from onnx import helper as oh\n'), ((1013, 1084), 'onnx.helper.make_tensor_value_info', 'oh.make_tensor_value_info', (['"""in_transpose2"""', 'TensorProto.FLOAT', 'in_shape'], {}), "('in_transpose2', TensorProto.FLOAT, in_shape)\n", (1038, 1084), True, 'from onnx import helper as oh\n'), ((1120, 1193), 'onnx.helper.make_tensor_value_info', 'oh.make_tensor_value_info', (['"""out_transpose1"""', 'TensorProto.FLOAT', 'out_shape'], {}), "('out_transpose1', TensorProto.FLOAT, out_shape)\n", (1145, 1193), True, 'from onnx import helper as oh\n'), ((1229, 1302), 'onnx.helper.make_tensor_value_info', 'oh.make_tensor_value_info', (['"""out_transpose2"""', 'TensorProto.FLOAT', 'out_shape'], {}), "('out_transpose2', TensorProto.FLOAT, out_shape)\n", (1254, 1302), True, 'from onnx import helper as oh\n'), ((1333, 1401), 'onnx.helper.make_tensor_value_info', 'oh.make_tensor_value_info', (['"""out_join1"""', 'TensorProto.FLOAT', 'out_shape'], {}), "('out_join1', TensorProto.FLOAT, out_shape)\n", (1358, 1401), True, 'from onnx import helper as oh\n'), ((1415, 1616), 'onnx.helper.make_graph', 'oh.make_graph', ([], {'nodes': '[Transpose1_node, Transpose2_node, Join1_node]', 'name': '"""test_graph"""', 'inputs': '[in_transpose1, in_transpose2]', 'outputs': '[out_join1]', 'value_info': '[out_transpose1, out_transpose2]'}), "(nodes=[Transpose1_node, Transpose2_node, Join1_node], name=\n 'test_graph', inputs=[in_transpose1, in_transpose2], outputs=[out_join1\n ], value_info=[out_transpose1, out_transpose2])\n", (1428, 1616), True, 'from onnx import helper as oh\n'), ((1707, 1755), 'onnx.helper.make_model', 'oh.make_model', (['graph'], {'producer_name': '"""test_model"""'}), "(graph, producer_name='test_model')\n", (1720, 1755), True, 'from onnx import helper as oh\n'), ((1768, 1792), 'finn.core.modelwrapper.ModelWrapper', 'ModelWrapper', (['onnx_model'], {}), '(onnx_model)\n', (1780, 1792), False, 'from finn.core.modelwrapper import ModelWrapper\n'), ((2333, 2377), 'finn.util.basic.gen_finn_dt_tensor', 'gen_finn_dt_tensor', (['input_dtype', 'input_shape'], {}), '(input_dtype, input_shape)\n', (2351, 2377), False, 'from finn.util.basic import gen_finn_dt_tensor\n'), ((2573, 2632), 'finn.core.onnx_exec.compare_execution', 'oxe.compare_execution', (['model', 'model_transformed', 'input_dict'], {}), '(model, model_transformed, input_dict)\n', (2594, 2632), True, 'import finn.core.onnx_exec as oxe\n'), ((2533, 2559), 'finn.transformation.streamline.reorder.MoveTransposePastJoinAdd', 'MoveTransposePastJoinAdd', ([], {}), '()\n', (2557, 2559), False, 'from finn.transformation.streamline.reorder import MoveTransposePastJoinAdd\n')] |
import quandl
import math
import numpy as np
from sklearn import preprocessing, cross_validation, svm
from sklearn.linear_model import LinearRegression
import pickle
import datetime
from matplotlib import style
import matplotlib.pyplot as plot
# Config
isLoadFromLocal = True
quandl.ApiConfig.api_key = '<KEY>'
style.use('ggplot')
# Loading data
if isLoadFromLocal:
df = pickle.load(open("DataFromQuandl_Stock_Chap2.pickle", "rb"))
else:
df = quandl.get('WIKI/GOOGL')
pickle.dump(df, open("DataFromQuandl_Stock_Chap2.pickle", "wb+"))
# Data pre-processing
df['HL_PCT'] = (df['Adj. High'] - df['Adj. Close']) / df['Adj. Close']
df['PCT_Change'] = (df['Adj. Close'] - df['Adj. Open']) / df['Adj. Open']
df = df[['Adj. Close', 'HL_PCT', 'PCT_Change', 'Adj. Volume']]
forecastCol = 'Adj. Close'
df.fillna('-99999', inplace = True)
forecastOut = int(math.ceil(0.01*len(df)))
df['label'] = df[forecastCol].shift(-forecastOut)
# df['label'].plot()
# df[forecastCol].plot()
# plot.legend(loc = 4)
# plot.show()
x = np.array(df.drop(['label'], 1))
print(x)
x = preprocessing.scale(x)
print(x)
xLately = x[-forecastOut:]
x = x[:-forecastOut]
df.dropna(inplace = True)
y = np.array(df['label'])
# Regression
x_train, x_test, y_train, y_test = cross_validation.train_test_split(x, y, test_size=0.1)
# classifier = svm.SVR(kernel='linear') # SVM SVR
classifier = LinearRegression(n_jobs=3) # Linear Regression
classifier.fit(x_train, y_train)
accuracy = classifier.score(x_test, y_test)
forecastSet = classifier.predict(xLately)
print('Accuracy is ', accuracy, '\nForecasted values are ', forecastSet, '\nNumber of values is ', forecastOut)
df['Forecast'] = np.nan
lastDate = df.iloc[-1].name
print(lastDate)
lastTime = lastDate.timestamp()
print(lastTime)
oneDay = 24 * 60 * 60 # seconds in a day
nextTime = lastTime + oneDay
for iter in forecastSet:
nextDate = datetime.datetime.fromtimestamp(nextTime)
nextTime += oneDay
df.loc[nextDate] = [np.nan for _ in range(len(df.columns) - 1)] + [iter]
df['Adj. Close'].plot()
df['Forecast'].plot()
plot.legend(loc = 4)
plot.xlabel('Date')
plot.ylabel('Price')
plot.show() | [
"datetime.datetime.fromtimestamp",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.xlabel",
"numpy.array",
"matplotlib.style.use",
"quandl.get",
"sklearn.cross_validation.train_test_split",
"sklearn.linear_model.LinearRegression",
"sklearn.preprocessing.scale",
"matplotlib.pyplot.show"
]
| [((312, 331), 'matplotlib.style.use', 'style.use', (['"""ggplot"""'], {}), "('ggplot')\n", (321, 331), False, 'from matplotlib import style\n'), ((1072, 1094), 'sklearn.preprocessing.scale', 'preprocessing.scale', (['x'], {}), '(x)\n', (1091, 1094), False, 'from sklearn import preprocessing, cross_validation, svm\n'), ((1182, 1203), 'numpy.array', 'np.array', (["df['label']"], {}), "(df['label'])\n", (1190, 1203), True, 'import numpy as np\n'), ((1254, 1308), 'sklearn.cross_validation.train_test_split', 'cross_validation.train_test_split', (['x', 'y'], {'test_size': '(0.1)'}), '(x, y, test_size=0.1)\n', (1287, 1308), False, 'from sklearn import preprocessing, cross_validation, svm\n'), ((1374, 1400), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {'n_jobs': '(3)'}), '(n_jobs=3)\n', (1390, 1400), False, 'from sklearn.linear_model import LinearRegression\n'), ((2070, 2088), 'matplotlib.pyplot.legend', 'plot.legend', ([], {'loc': '(4)'}), '(loc=4)\n', (2081, 2088), True, 'import matplotlib.pyplot as plot\n'), ((2091, 2110), 'matplotlib.pyplot.xlabel', 'plot.xlabel', (['"""Date"""'], {}), "('Date')\n", (2102, 2110), True, 'import matplotlib.pyplot as plot\n'), ((2111, 2131), 'matplotlib.pyplot.ylabel', 'plot.ylabel', (['"""Price"""'], {}), "('Price')\n", (2122, 2131), True, 'import matplotlib.pyplot as plot\n'), ((2132, 2143), 'matplotlib.pyplot.show', 'plot.show', ([], {}), '()\n', (2141, 2143), True, 'import matplotlib.pyplot as plot\n'), ((454, 478), 'quandl.get', 'quandl.get', (['"""WIKI/GOOGL"""'], {}), "('WIKI/GOOGL')\n", (464, 478), False, 'import quandl\n'), ((1881, 1922), 'datetime.datetime.fromtimestamp', 'datetime.datetime.fromtimestamp', (['nextTime'], {}), '(nextTime)\n', (1912, 1922), False, 'import datetime\n')] |
import random
import sys
ntables = 100
ncols = 100
nrows = 10000
def printstderr(s):
sys.stderr.write(s + '\n')
sys.stderr.flush()
def get_value():
return random.randint(-99999999, 99999999)
for t in range(ntables):
printstderr(f'{t}/{ntables}')
print(f"create table x ({','.join(['x int'] * ncols)});")
for r in range(nrows):
print(f"insert into _last ({','.join(['x'] * ncols)}) values (", end='')
for c in range(ncols):
print(get_value(), end=('' if c==ncols-1 else ','))
print(');')
# 10 min to generate
# 3 min to process | [
"sys.stderr.write",
"sys.stderr.flush",
"random.randint"
]
| [((89, 115), 'sys.stderr.write', 'sys.stderr.write', (["(s + '\\n')"], {}), "(s + '\\n')\n", (105, 115), False, 'import sys\n'), ((118, 136), 'sys.stderr.flush', 'sys.stderr.flush', ([], {}), '()\n', (134, 136), False, 'import sys\n'), ((164, 199), 'random.randint', 'random.randint', (['(-99999999)', '(99999999)'], {}), '(-99999999, 99999999)\n', (178, 199), False, 'import random\n')] |
import uvicorn
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from routes import items
import config
from constants import *
config.parse_args()
app = FastAPI(
title="API",
description="API boilerplate",
version="1.0.0",
openapi_tags=API_TAGS_METADATA,
)
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
app.include_router(items.router)
@app.get("/")
async def root():
return {
"docs": "api documentation at /docs or /redoc",
}
if __name__ == "__main__":
uvicorn.run("main:app", host=config.CONFIG.host, port=int(config.CONFIG.port))
| [
"fastapi.FastAPI",
"config.parse_args"
]
| [((161, 180), 'config.parse_args', 'config.parse_args', ([], {}), '()\n', (178, 180), False, 'import config\n'), ((187, 291), 'fastapi.FastAPI', 'FastAPI', ([], {'title': '"""API"""', 'description': '"""API boilerplate"""', 'version': '"""1.0.0"""', 'openapi_tags': 'API_TAGS_METADATA'}), "(title='API', description='API boilerplate', version='1.0.0',\n openapi_tags=API_TAGS_METADATA)\n", (194, 291), False, 'from fastapi import FastAPI\n')] |
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.html import mark_safe
# Create your models here.
class Gellifinsta(models.Model):
class Meta:
ordering = ['-taken_at_datetime']
shortcode = models.CharField(_("Shortcode"), max_length=20)
taken_at_datetime = models.DateTimeField(_("taken at"))
username = models.CharField(_("Username"), max_length=100)
is_active = models.BooleanField(_("Active"),default=True)
is_video = models.BooleanField(_("Video"),default=False)
file_path = models.CharField(_("File Path"), max_length=500)
url = models.CharField(_("URL"), max_length=500)
created_dt = models.DateTimeField(_("Created Date/Time"), auto_now_add=True, null=True)
updated_dt = models.DateTimeField(_("Updated Date/Time"), auto_now=True, null=True)
caption = models.TextField(_("Caption"), blank=True, null=True)
tags = models.TextField(_("Tags"), blank=True, null=True)
def __str__(self):
return self.shortcode + ':' + str(self.taken_at_datetime)
def image_tag(self):
return mark_safe('<img src="%s" width="250" />' % (self.url))
image_tag.short_description = 'Image'
def tags_spaced(self):
return self.tags.replace(',',' ')
tags_spaced.short_description = 'Tags'
class Products(models.Model):
class Meta:
ordering = ['name']
name = models.CharField(_("Name"), max_length=100, unique=True)
is_active = models.BooleanField(_("Active"),default=True)
def __str__(self):
return self.name
| [
"django.utils.translation.ugettext_lazy",
"django.utils.html.mark_safe"
]
| [((279, 293), 'django.utils.translation.ugettext_lazy', '_', (['"""Shortcode"""'], {}), "('Shortcode')\n", (280, 293), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((355, 368), 'django.utils.translation.ugettext_lazy', '_', (['"""taken at"""'], {}), "('taken at')\n", (356, 368), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((402, 415), 'django.utils.translation.ugettext_lazy', '_', (['"""Username"""'], {}), "('Username')\n", (403, 415), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((469, 480), 'django.utils.translation.ugettext_lazy', '_', (['"""Active"""'], {}), "('Active')\n", (470, 480), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((530, 540), 'django.utils.translation.ugettext_lazy', '_', (['"""Video"""'], {}), "('Video')\n", (531, 540), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((589, 603), 'django.utils.translation.ugettext_lazy', '_', (['"""File Path"""'], {}), "('File Path')\n", (590, 603), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((648, 656), 'django.utils.translation.ugettext_lazy', '_', (['"""URL"""'], {}), "('URL')\n", (649, 656), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((712, 734), 'django.utils.translation.ugettext_lazy', '_', (['"""Created Date/Time"""'], {}), "('Created Date/Time')\n", (713, 734), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((804, 826), 'django.utils.translation.ugettext_lazy', '_', (['"""Updated Date/Time"""'], {}), "('Updated Date/Time')\n", (805, 826), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((885, 897), 'django.utils.translation.ugettext_lazy', '_', (['"""Caption"""'], {}), "('Caption')\n", (886, 897), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((950, 959), 'django.utils.translation.ugettext_lazy', '_', (['"""Tags"""'], {}), "('Tags')\n", (951, 959), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1115, 1168), 'django.utils.html.mark_safe', 'mark_safe', (['(\'<img src="%s" width="250" />\' % self.url)'], {}), '(\'<img src="%s" width="250" />\' % self.url)\n', (1124, 1168), False, 'from django.utils.html import mark_safe\n'), ((1431, 1440), 'django.utils.translation.ugettext_lazy', '_', (['"""Name"""'], {}), "('Name')\n", (1432, 1440), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1507, 1518), 'django.utils.translation.ugettext_lazy', '_', (['"""Active"""'], {}), "('Active')\n", (1508, 1518), True, 'from django.utils.translation import ugettext_lazy as _\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2018-01-16 13:35
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('scanBase', '0002_auto_20180116_1321'),
]
operations = [
migrations.CreateModel(
name='IPSection',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ip_section', models.CharField(blank=True, max_length=30, null=True, unique=True, verbose_name='ip段')),
('ip_start', models.GenericIPAddressField(blank=True, null=True, verbose_name='开始ip')),
('ip_end', models.GenericIPAddressField(blank=True, null=True, verbose_name='结束ip')),
('total', models.IntegerField(blank=True, null=True, verbose_name='总量')),
('deal_time', models.DateTimeField(blank=True, null=True, verbose_name='处理时间')),
('country', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='scanBase.CountryInfo', verbose_name='所属国家')),
],
options={
'verbose_name_plural': 'ip段信息',
'verbose_name': 'ip段信息',
},
),
]
| [
"django.db.models.GenericIPAddressField",
"django.db.models.IntegerField",
"django.db.models.ForeignKey",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
]
| [((430, 523), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (446, 523), False, 'from django.db import migrations, models\n'), ((553, 644), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(30)', 'null': '(True)', 'unique': '(True)', 'verbose_name': '"""ip段"""'}), "(blank=True, max_length=30, null=True, unique=True,\n verbose_name='ip段')\n", (569, 644), False, 'from django.db import migrations, models\n'), ((672, 744), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""开始ip"""'}), "(blank=True, null=True, verbose_name='开始ip')\n", (700, 744), False, 'from django.db import migrations, models\n'), ((774, 846), 'django.db.models.GenericIPAddressField', 'models.GenericIPAddressField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""结束ip"""'}), "(blank=True, null=True, verbose_name='结束ip')\n", (802, 846), False, 'from django.db import migrations, models\n'), ((875, 936), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""总量"""'}), "(blank=True, null=True, verbose_name='总量')\n", (894, 936), False, 'from django.db import migrations, models\n'), ((969, 1033), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""处理时间"""'}), "(blank=True, null=True, verbose_name='处理时间')\n", (989, 1033), False, 'from django.db import migrations, models\n'), ((1064, 1179), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""scanBase.CountryInfo"""', 'verbose_name': '"""所属国家"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'scanBase.CountryInfo', verbose_name='所属国家')\n", (1081, 1179), False, 'from django.db import migrations, models\n')] |
import unittest
from nanoservice import Responder
from nanoservice import Requester
class BaseTestCase(unittest.TestCase):
def setUp(self):
addr = 'inproc://test'
self.client = Requester(addr)
self.service = Responder(addr)
self.service.register('divide', lambda x, y: x / y)
self.service.register('echo', lambda x: x)
def tearDown(self):
self.client.socket.close()
self.service.socket.close()
class TestClient(BaseTestCase):
def test_build_payload(self):
payload = self.client.build_payload('echo', 'My Name')
method, args, ref = payload
self.assertTrue(method == 'echo')
self.assertTrue(len(payload) == 3)
def test_encoder(self):
data = {'name': '<NAME>'}
encoded = self.client.encode(data)
decoded = self.client.decode(encoded)
self.assertEqual(data, decoded)
def test_call_wo_receive(self):
# Requester side ops
method, args = 'echo', 'hello world'
payload = self.client.build_payload(method, args)
self.client.socket.send(self.client.encode(payload))
# Responder side ops
method, args, ref = self.service.receive()
self.assertEqual(method, 'echo')
self.assertEqual(args, 'hello world')
self.assertEqual(ref, payload[2])
def test_basic_socket_operation(self):
msg = 'abc'
self.client.socket.send(msg)
res = self.service.socket.recv().decode('utf-8')
self.assertEqual(msg, res)
def test_timeout(self):
c = Requester('inproc://timeout', timeouts=(1, 1))
c.socket.send('hello')
self.assertRaises(Exception, c.socket.recv)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"nanoservice.Requester",
"nanoservice.Responder"
]
| [((1743, 1758), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1756, 1758), False, 'import unittest\n'), ((201, 216), 'nanoservice.Requester', 'Requester', (['addr'], {}), '(addr)\n', (210, 216), False, 'from nanoservice import Requester\n'), ((240, 255), 'nanoservice.Responder', 'Responder', (['addr'], {}), '(addr)\n', (249, 255), False, 'from nanoservice import Responder\n'), ((1581, 1627), 'nanoservice.Requester', 'Requester', (['"""inproc://timeout"""'], {'timeouts': '(1, 1)'}), "('inproc://timeout', timeouts=(1, 1))\n", (1590, 1627), False, 'from nanoservice import Requester\n')] |
import unittest
class LexerTestCase(unittest.TestCase):
def makeLexer(self, text):
from spi import Lexer
lexer = Lexer(text)
return lexer
def test_tokens(self):
from spi import TokenType
records = (
('234', TokenType.INTEGER_CONST, 234),
('3.14', TokenType.REAL_CONST, 3.14),
('*', TokenType.MUL, '*'),
('DIV', TokenType.INTEGER_DIV, 'DIV'),
('/', TokenType.FLOAT_DIV, '/'),
('+', TokenType.PLUS, '+'),
('-', TokenType.MINUS, '-'),
('(', TokenType.LPAREN, '('),
(')', TokenType.RPAREN, ')'),
(':=', TokenType.ASSIGN, ':='),
('.', TokenType.DOT, '.'),
('number', TokenType.ID, 'number'),
(';', TokenType.SEMI, ';'),
('BEGIN', TokenType.BEGIN, 'BEGIN'),
('END', TokenType.END, 'END'),
('PROCEDURE', TokenType.PROCEDURE, 'PROCEDURE'),
)
for text, tok_type, tok_val in records:
lexer = self.makeLexer(text)
token = lexer.get_next_token()
self.assertEqual(token.type, tok_type)
self.assertEqual(token.value, tok_val)
def test_lexer_exception(self):
from spi import LexerError
lexer = self.makeLexer('<')
with self.assertRaises(LexerError):
lexer.get_next_token()
class ParserTestCase(unittest.TestCase):
def makeParser(self, text):
from spi import Lexer, Parser
lexer = Lexer(text)
parser = Parser(lexer)
return parser
def test_expression_invalid_syntax_01(self):
from spi import ParserError, ErrorCode
parser = self.makeParser(
"""
PROGRAM Test;
VAR
a : INTEGER;
BEGIN
a := 10 * ; {Invalid syntax}
END.
"""
)
with self.assertRaises(ParserError) as cm:
parser.parse()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, ErrorCode.UNEXPECTED_TOKEN)
self.assertEqual(the_exception.token.value, ';')
self.assertEqual(the_exception.token.lineno, 6)
def test_expression_invalid_syntax_02(self):
from spi import ParserError, ErrorCode
parser = self.makeParser(
"""
PROGRAM Test;
VAR
a : INTEGER;
BEGIN
a := 1 (1 + 2); {Invalid syntax}
END.
"""
)
with self.assertRaises(ParserError) as cm:
parser.parse()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, ErrorCode.UNEXPECTED_TOKEN)
self.assertEqual(the_exception.token.value, '(')
self.assertEqual(the_exception.token.lineno, 6)
def test_maximum_one_VAR_block_is_allowed(self):
from spi import ParserError, ErrorCode
# zero VARs
parser = self.makeParser(
"""
PROGRAM Test;
BEGIN
END.
"""
)
parser.parse()
# one VAR
parser = self.makeParser(
"""
PROGRAM Test;
VAR
a : INTEGER;
BEGIN
END.
"""
)
parser.parse()
parser = self.makeParser(
"""
PROGRAM Test;
VAR
a : INTEGER;
VAR
b : INTEGER;
BEGIN
a := 5;
b := a + 10;
END.
"""
)
with self.assertRaises(ParserError) as cm:
parser.parse()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, ErrorCode.UNEXPECTED_TOKEN)
self.assertEqual(the_exception.token.value, 'VAR')
self.assertEqual(the_exception.token.lineno, 5) # second VAR
class SemanticAnalyzerTestCase(unittest.TestCase):
def runSemanticAnalyzer(self, text):
from spi import Lexer, Parser, SemanticAnalyzer
lexer = Lexer(text)
parser = Parser(lexer)
tree = parser.parse()
semantic_analyzer = SemanticAnalyzer()
semantic_analyzer.visit(tree)
return semantic_analyzer
def test_semantic_duplicate_id_error(self):
from spi import SemanticError, ErrorCode
with self.assertRaises(SemanticError) as cm:
self.runSemanticAnalyzer(
"""
PROGRAM Test;
VAR
a : INTEGER;
a : REAL; {Duplicate identifier}
BEGIN
a := 5;
END.
"""
)
the_exception = cm.exception
self.assertEqual(the_exception.error_code, ErrorCode.DUPLICATE_ID)
self.assertEqual(the_exception.token.value, 'a')
self.assertEqual(the_exception.token.lineno, 5)
def test_semantic_id_not_found_error(self):
from spi import SemanticError, ErrorCode
with self.assertRaises(SemanticError) as cm:
self.runSemanticAnalyzer(
"""
PROGRAM Test;
VAR
a : INTEGER;
BEGIN
a := 5 + b;
END.
"""
)
the_exception = cm.exception
self.assertEqual(the_exception.error_code, ErrorCode.ID_NOT_FOUND)
self.assertEqual(the_exception.token.value, 'b')
class TestCallStack:
def __init__(self):
self._records = []
def push(self, ar):
self._records.append(ar)
def pop(self):
# do nothing
pass
def peek(self):
return self._records[-1]
class InterpreterTestCase(unittest.TestCase):
def makeInterpreter(self, text):
from spi import Lexer, Parser, SemanticAnalyzer, Interpreter
lexer = Lexer(text)
parser = Parser(lexer)
tree = parser.parse()
semantic_analyzer = SemanticAnalyzer()
semantic_analyzer.visit(tree)
interpreter = Interpreter(tree)
interpreter.call_stack = TestCallStack()
return interpreter
def test_integer_arithmetic_expressions(self):
for expr, result in (
('3', 3),
('2 + 7 * 4', 30),
('7 - 8 DIV 4', 5),
('14 + 2 * 3 - 6 DIV 2', 17),
('7 + 3 * (10 DIV (12 DIV (3 + 1) - 1))', 22),
('7 + 3 * (10 DIV (12 DIV (3 + 1) - 1)) DIV (2 + 3) - 5 - 3 + (8)', 10),
('7 + (((3 + 2)))', 12),
('- 3', -3),
('+ 3', 3),
('5 - - - + - 3', 8),
('5 - - - + - (3 + 4) - +2', 10),
):
interpreter = self.makeInterpreter(
"""PROGRAM Test;
VAR
a : INTEGER;
BEGIN
a := %s
END.
""" % expr
)
interpreter.interpret()
ar = interpreter.call_stack.peek()
self.assertEqual(ar['a'], result)
def test_float_arithmetic_expressions(self):
for expr, result in (
('3.14', 3.14),
('2.14 + 7 * 4', 30.14),
('7.14 - 8 / 4', 5.14),
):
interpreter = self.makeInterpreter(
"""PROGRAM Test;
VAR
a : REAL;
BEGIN
a := %s
END.
""" % expr
)
interpreter.interpret()
ar = interpreter.call_stack.peek()
self.assertEqual(ar['a'], result)
def test_procedure_call(self):
text = """\
program Main;
procedure Alpha(a : integer; b : integer);
var x : integer;
begin
x := (a + b ) * 2;
end;
begin { Main }
Alpha(3 + 5, 7);
end. { Main }
"""
interpreter = self.makeInterpreter(text)
interpreter.interpret()
ar = interpreter.call_stack.peek()
self.assertEqual(ar['a'], 8)
self.assertEqual(ar['b'], 7)
self.assertEqual(ar['x'], 30)
self.assertEqual(ar.nesting_level, 2)
def test_program(self):
text = """\
PROGRAM Part12;
VAR
number : INTEGER;
a, b : INTEGER;
y : REAL;
PROCEDURE P1;
VAR
a : REAL;
k : INTEGER;
PROCEDURE P2;
VAR
a, z : INTEGER;
BEGIN {P2}
z := 777;
END; {P2}
BEGIN {P1}
END; {P1}
BEGIN {Part12}
number := 2;
a := number ;
b := 10 * a + 10 * number DIV 4;
y := 20 / 7 + 3.14
END. {Part12}
"""
interpreter = self.makeInterpreter(text)
interpreter.interpret()
ar = interpreter.call_stack.peek()
self.assertEqual(len(ar.members.keys()), 4)
self.assertEqual(ar['number'], 2)
self.assertEqual(ar['a'], 2)
self.assertEqual(ar['b'], 25)
self.assertAlmostEqual(ar['y'], float(20) / 7 + 3.14) # 5.9971...
if __name__ == '__main__':
unittest.main()
| [
"spi.Lexer",
"spi.SemanticAnalyzer",
"spi.Interpreter",
"unittest.main",
"spi.Parser"
]
| [((9013, 9028), 'unittest.main', 'unittest.main', ([], {}), '()\n', (9026, 9028), False, 'import unittest\n'), ((135, 146), 'spi.Lexer', 'Lexer', (['text'], {}), '(text)\n', (140, 146), False, 'from spi import Lexer, Parser, SemanticAnalyzer, Interpreter\n'), ((1535, 1546), 'spi.Lexer', 'Lexer', (['text'], {}), '(text)\n', (1540, 1546), False, 'from spi import Lexer, Parser, SemanticAnalyzer, Interpreter\n'), ((1564, 1577), 'spi.Parser', 'Parser', (['lexer'], {}), '(lexer)\n', (1570, 1577), False, 'from spi import Lexer, Parser, SemanticAnalyzer, Interpreter\n'), ((4138, 4149), 'spi.Lexer', 'Lexer', (['text'], {}), '(text)\n', (4143, 4149), False, 'from spi import Lexer, Parser, SemanticAnalyzer, Interpreter\n'), ((4167, 4180), 'spi.Parser', 'Parser', (['lexer'], {}), '(lexer)\n', (4173, 4180), False, 'from spi import Lexer, Parser, SemanticAnalyzer, Interpreter\n'), ((4240, 4258), 'spi.SemanticAnalyzer', 'SemanticAnalyzer', ([], {}), '()\n', (4256, 4258), False, 'from spi import Lexer, Parser, SemanticAnalyzer, Interpreter\n'), ((5916, 5927), 'spi.Lexer', 'Lexer', (['text'], {}), '(text)\n', (5921, 5927), False, 'from spi import Lexer, Parser, SemanticAnalyzer, Interpreter\n'), ((5945, 5958), 'spi.Parser', 'Parser', (['lexer'], {}), '(lexer)\n', (5951, 5958), False, 'from spi import Lexer, Parser, SemanticAnalyzer, Interpreter\n'), ((6018, 6036), 'spi.SemanticAnalyzer', 'SemanticAnalyzer', ([], {}), '()\n', (6034, 6036), False, 'from spi import Lexer, Parser, SemanticAnalyzer, Interpreter\n'), ((6098, 6115), 'spi.Interpreter', 'Interpreter', (['tree'], {}), '(tree)\n', (6109, 6115), False, 'from spi import Lexer, Parser, SemanticAnalyzer, Interpreter\n')] |
from pygame import Surface, font
from copy import copy
from random import randint, choice
import string
from lib.transactionButton import TransactionButton
SHOP_PREFIX = ["archer", "baker", "fisher", "miller", "rancher", "robber"]
SHOP_SUFFIX = ["cave", "creek", "desert", "farm", "field", "forest", "hill", "lake", "mountain", "pass", "valley", "woods"]
class Shop():
def __init__(self, name, inventory, priceModifier, groupInventory, groupMoney, itemPrices, position, blitPosition, money, resourcePath):
self.yValue = 40
self.groupInventory = groupInventory
self.groupMoney = groupMoney
self.priceModifier = priceModifier
self.itemPrices = itemPrices
self.inventory = inventory
self.position = position
self.blitPosition = blitPosition
self.resourcePath = resourcePath
self.buyButtonList = []
self.sellButtonList = []
self.xPos = (-self.position * 40) + 1280
self.shopSurface = Surface((500, 300)).convert()
self.sepLine = Surface((self.shopSurface.get_width(), 10)).convert()
self.sepLine.fill((0, 0, 0))
self.invContainer = Surface((self.shopSurface.get_width() - 20,
self.shopSurface.get_height() / 2 - 35)).convert()
self.invContainer.fill((255, 255, 255))
self.titleFont = font.Font("res/fonts/west.ttf", 17)
self.textFont = font.Font("res/fonts/west.ttf", 15)
if (name == ""):
self.name = (choice(SHOP_PREFIX) + "'s " + choice(SHOP_SUFFIX)).capitalize()
else:
self.name = name
if (self.inventory == {}):
inventoryRandom = copy(self.groupInventory)
for key in list(inventoryRandom.keys()):
inventoryRandom[key] = randint(0, 10)
inventoryRandom["Food"] *= 20
self.inventory = inventoryRandom
if (money is None):
self.money = randint(200, 500)
else:
self.name = name
self.render()
def get_surface(self):
self.render()
return self.shopSurface
def update(self, groupInv, groupMoney):
self.groupInventory = groupInv
self.groupMoney = groupMoney
self.render()
def move(self, moveValue):
self.xPos += (2 * moveValue)
self.render()
def render(self):
self.yValue = 40
self.shopSurface.fill((133, 94, 66))
self.shopSurface.blit(self.titleFont.render(self.name + " - $" + str(self.money), 1, (0, 0, 255)), (10, 5))
self.shopSurface.blit(self.invContainer, (10, 25))
self.shopSurface.blit(self.invContainer, (10, self.shopSurface.get_height() / 2 + 30))
self.shopSurface.blit(self.textFont.render("Inventory", 1, (255, 0, 0)), (10, 25))
self.shopSurface.blit(self.textFont.render("Amount", 1, (255, 0, 0)), (130, 25))
self.shopSurface.blit(self.textFont.render("Price", 1, (255, 0, 0)), (200, 25))
for key in list(self.inventory.keys()):
self.shopSurface.blit(self.textFont.render(key + ":", 1, (0, 0, 0)), (10, self.yValue))
self.shopSurface.blit(self.textFont.render(str(self.inventory[key]), 1,
(0, 0, 0)), (150, self.yValue))
self.shopSurface.blit(self.textFont.render("$"+str(self.itemPrices[key] * self.priceModifier), 1,
(0, 0, 0)), (200, self.yValue))
if (len(self.buyButtonList) < len(self.inventory.keys())):
buttonPos = tuple(map(sum, zip(self.blitPosition, (250, self.yValue))))
self.buyButtonList.append(TransactionButton(transaction = "buy",
item = key,
imagePosition = (250, self.yValue),
rectPosition = buttonPos,
resourcePath = self.resourcePath))
self.yValue += 30
for button in self.buyButtonList:
self.shopSurface.blit(button.image, button.imagePosition)
self.shopSurface.blit(self.sepLine, (0, float(self.shopSurface.get_height()) / 2))
self.shopSurface.blit(self.titleFont.render("You - $" + str(self.groupMoney), 1, (0, 0, 255)),
(10, float(self.shopSurface.get_height()) / 2 + 10))
self.shopSurface.blit(self.titleFont.render("Inventory", 1, (255, 0, 0)),
(10, float(self.shopSurface.get_height()) / 2 + 30))
self.shopSurface.blit(self.titleFont.render("Amount", 1, (255, 0, 0)),
(130, float(self.shopSurface.get_height()) / 2 + 30))
self.shopSurface.blit(self.titleFont.render("Price", 1, (255, 0, 0)),
(200, float(self.shopSurface.get_height()) / 2 + 30))
self.yValue = (float(self.shopSurface.get_height()) / 2) + 45
for key in list(self.groupInventory.keys()):
self.shopSurface.blit(self.textFont.render(key + ":", 1, (0, 0, 0)), (10, self.yValue))
self.shopSurface.blit(self.textFont.render(str(self.groupInventory[key]), 1,
(0, 0, 0)), (150, self.yValue))
self.shopSurface.blit(self.textFont.render("$" + str(self.itemPrices[key] * self.priceModifier), 1,
(0, 0, 0)), (200, self.yValue))
if (len(self.sellButtonList) < len(self.inventory.keys())):
buttonPos = tuple(map(sum, zip(self.blitPosition, (250, self.yValue))))
self.sellButtonList.append(TransactionButton(transaction = "sell",
item = key,
imagePosition = (250, self.yValue),
rectPosition = buttonPos,
resourcePath = self.resourcePath))
self.yValue += 30
for button in self.sellButtonList:
self.shopSurface.blit(button.image, button.imagePosition)
| [
"random.choice",
"pygame.Surface",
"copy.copy",
"lib.transactionButton.TransactionButton",
"pygame.font.Font",
"random.randint"
]
| [((1236, 1271), 'pygame.font.Font', 'font.Font', (['"""res/fonts/west.ttf"""', '(17)'], {}), "('res/fonts/west.ttf', 17)\n", (1245, 1271), False, 'from pygame import Surface, font\n'), ((1290, 1325), 'pygame.font.Font', 'font.Font', (['"""res/fonts/west.ttf"""', '(15)'], {}), "('res/fonts/west.ttf', 15)\n", (1299, 1325), False, 'from pygame import Surface, font\n'), ((1510, 1535), 'copy.copy', 'copy', (['self.groupInventory'], {}), '(self.groupInventory)\n', (1514, 1535), False, 'from copy import copy\n'), ((1738, 1755), 'random.randint', 'randint', (['(200)', '(500)'], {}), '(200, 500)\n', (1745, 1755), False, 'from random import randint, choice\n'), ((913, 932), 'pygame.Surface', 'Surface', (['(500, 300)'], {}), '((500, 300))\n', (920, 932), False, 'from pygame import Surface, font\n'), ((1607, 1621), 'random.randint', 'randint', (['(0)', '(10)'], {}), '(0, 10)\n', (1614, 1621), False, 'from random import randint, choice\n'), ((3230, 3371), 'lib.transactionButton.TransactionButton', 'TransactionButton', ([], {'transaction': '"""buy"""', 'item': 'key', 'imagePosition': '(250, self.yValue)', 'rectPosition': 'buttonPos', 'resourcePath': 'self.resourcePath'}), "(transaction='buy', item=key, imagePosition=(250, self.\n yValue), rectPosition=buttonPos, resourcePath=self.resourcePath)\n", (3247, 3371), False, 'from lib.transactionButton import TransactionButton\n'), ((4905, 5047), 'lib.transactionButton.TransactionButton', 'TransactionButton', ([], {'transaction': '"""sell"""', 'item': 'key', 'imagePosition': '(250, self.yValue)', 'rectPosition': 'buttonPos', 'resourcePath': 'self.resourcePath'}), "(transaction='sell', item=key, imagePosition=(250, self.\n yValue), rectPosition=buttonPos, resourcePath=self.resourcePath)\n", (4922, 5047), False, 'from lib.transactionButton import TransactionButton\n'), ((1394, 1413), 'random.choice', 'choice', (['SHOP_SUFFIX'], {}), '(SHOP_SUFFIX)\n', (1400, 1413), False, 'from random import randint, choice\n'), ((1364, 1383), 'random.choice', 'choice', (['SHOP_PREFIX'], {}), '(SHOP_PREFIX)\n', (1370, 1383), False, 'from random import randint, choice\n')] |
import tkinter.messagebox
from tkinter import *
import tkinter as tk
from tkinter import filedialog
import numpy
import pytesseract #Python wrapper for Google-owned OCR engine known by the name of Tesseract.
import cv2
from PIL import Image, ImageTk
import os
root = tk.Tk()
root.title("Object Character Recognizer")
root.geometry("1280x720")
test_image = None
def browse_image():
fin = filedialog.askopenfilename(initialdir=os.getcwd(), title="Select Image File", filetypes=(("PNG Files", "*.png"), ("JPG Files", "*.jpg"), ("All Files", "*.*")))
global test_image
image = Image.open(fin)
test_image = image
img = ImageTk.PhotoImage(image.resize((650, 400)))
lb = tk.Label(image=img)
lb.place(x=25, y=50)
root.mainloop()
def use_ocr_default():
try:
global test_image
messge = None
#OEM stands for OCR Engine Mode and PSM stands for Page Segmentation Mode.
#OEM defines what kind of OCR engine is to be used (this defines the dataset that would be used to cross-match
#the available data with the testing data).
#PSM defines how Tesseract will treat the image that supposedly contains characters and how it will extract the
#data from the image.
tess = pytesseract.image_to_string(test_image, config='-l eng --oem 1 --psm 3')
label = Label(messge, text='Result:')
label.place(x=850, y=320)
display_message = Text(messge, width=46, height=15)
display_message.insert(END, str(tess))
display_message.config(state=DISABLED)
display_message.delete(0, END)
display_message.place(x=890, y=330)
except: #Print a error message when the user inputs an incompatible image.
tkinter.messagebox.showinfo('Something\'s Wrong!', 'Your picture may not contain English characters or you may have not selected a picture. Please select a picture with detectable English characters.')
def use_ocr_handwriting():
try:
global test_image
opencv_img = numpy.array(test_image)
opencv_img = opencv_img[:, :, ::-1].copy() #This line is used to convert RGB PIL image file to BGR cv2 image file.
blurred_img = cv2.medianBlur(opencv_img, 5)
gray_img = cv2.cvtColor(blurred_img, cv2.COLOR_BGR2GRAY)
thresh, binary = cv2.threshold(gray_img, 122, 255, cv2.THRESH_BINARY)
messge = None
tess = pytesseract.image_to_string(binary, config='-l eng --oem 1 --psm 3')
label = Label(messge, text='Result:')
label.place(x=850, y=320)
display_message = Text(messge, width=46, height=15)
display_message.insert(END, str(tess))
display_message.config(state=DISABLED)
display_message.delete(0, END)
display_message.place(x=890, y=330)
except:
tkinter.messagebox.showinfo('Something\'s Wrong!', 'Your picture may not contain English characters or you may have not selected a picture. Please select a picture with detectable English characters.')
def use_ocr_singletext():
try:
global test_image
messge = None
tess = pytesseract.image_to_string(test_image, config='-l eng --oem 1 --psm 7')
label = Label(messge, text='Result:')
label.place(x=850, y=320)
display_message = Text(messge, width=46, height=15)
display_message.insert(END, str(tess))
display_message.config(state=DISABLED)
display_message.delete(0, END)
display_message.place(x=890, y=330)
except:
tkinter.messagebox.showinfo('Something\'s Wrong!', 'Your picture may not contain English characters or you may have not selected a picture. Please select a picture with detectable English characters.')
w = tk.LabelFrame(root, text="Image:", width=768, height=600)
w.place(x=20, y=10)
w.pack_propagate(0)
w1 = tk.LabelFrame(root, text="Extracted Text:", width=500, height=310)
w1.place(x=800, y=300)
w2 = tk.LabelFrame(root, text="Operations:", width=350, height=280)
w2.place(x=800, y=10)
btn1 = tk.Button(w2, text="Load Image", padx=40, pady=10, command=browse_image)
btn1.place(x=22, y=20)
btn1 = tk.Button(w2, text="Run Handwritten OCR", padx=40, pady=10, command=use_ocr_handwriting)
btn1.place(x=22, y=80)
btn1 = tk.Button(w2, text="Run Default OCR", padx=40, pady=10, command=use_ocr_default)
btn1.place(x=22, y=140)
btn1 = tk.Button(w2, text="Run Single Text OCR", padx=40, pady=10, command=use_ocr_singletext)
btn1.place(x=22, y=200)
root.mainloop()
| [
"tkinter.LabelFrame",
"PIL.Image.open",
"cv2.threshold",
"cv2.medianBlur",
"tkinter.Button",
"os.getcwd",
"numpy.array",
"tkinter.Tk",
"tkinter.Label",
"pytesseract.image_to_string",
"cv2.cvtColor"
]
| [((268, 275), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (273, 275), True, 'import tkinter as tk\n'), ((3726, 3783), 'tkinter.LabelFrame', 'tk.LabelFrame', (['root'], {'text': '"""Image:"""', 'width': '(768)', 'height': '(600)'}), "(root, text='Image:', width=768, height=600)\n", (3739, 3783), True, 'import tkinter as tk\n'), ((3829, 3895), 'tkinter.LabelFrame', 'tk.LabelFrame', (['root'], {'text': '"""Extracted Text:"""', 'width': '(500)', 'height': '(310)'}), "(root, text='Extracted Text:', width=500, height=310)\n", (3842, 3895), True, 'import tkinter as tk\n'), ((3924, 3986), 'tkinter.LabelFrame', 'tk.LabelFrame', (['root'], {'text': '"""Operations:"""', 'width': '(350)', 'height': '(280)'}), "(root, text='Operations:', width=350, height=280)\n", (3937, 3986), True, 'import tkinter as tk\n'), ((4016, 4088), 'tkinter.Button', 'tk.Button', (['w2'], {'text': '"""Load Image"""', 'padx': '(40)', 'pady': '(10)', 'command': 'browse_image'}), "(w2, text='Load Image', padx=40, pady=10, command=browse_image)\n", (4025, 4088), True, 'import tkinter as tk\n'), ((4119, 4212), 'tkinter.Button', 'tk.Button', (['w2'], {'text': '"""Run Handwritten OCR"""', 'padx': '(40)', 'pady': '(10)', 'command': 'use_ocr_handwriting'}), "(w2, text='Run Handwritten OCR', padx=40, pady=10, command=\n use_ocr_handwriting)\n", (4128, 4212), True, 'import tkinter as tk\n'), ((4238, 4323), 'tkinter.Button', 'tk.Button', (['w2'], {'text': '"""Run Default OCR"""', 'padx': '(40)', 'pady': '(10)', 'command': 'use_ocr_default'}), "(w2, text='Run Default OCR', padx=40, pady=10, command=use_ocr_default\n )\n", (4247, 4323), True, 'import tkinter as tk\n'), ((4350, 4442), 'tkinter.Button', 'tk.Button', (['w2'], {'text': '"""Run Single Text OCR"""', 'padx': '(40)', 'pady': '(10)', 'command': 'use_ocr_singletext'}), "(w2, text='Run Single Text OCR', padx=40, pady=10, command=\n use_ocr_singletext)\n", (4359, 4442), True, 'import tkinter as tk\n'), ((589, 604), 'PIL.Image.open', 'Image.open', (['fin'], {}), '(fin)\n', (599, 604), False, 'from PIL import Image, ImageTk\n'), ((692, 711), 'tkinter.Label', 'tk.Label', ([], {'image': 'img'}), '(image=img)\n', (700, 711), True, 'import tkinter as tk\n'), ((1257, 1329), 'pytesseract.image_to_string', 'pytesseract.image_to_string', (['test_image'], {'config': '"""-l eng --oem 1 --psm 3"""'}), "(test_image, config='-l eng --oem 1 --psm 3')\n", (1284, 1329), False, 'import pytesseract\n'), ((2021, 2044), 'numpy.array', 'numpy.array', (['test_image'], {}), '(test_image)\n', (2032, 2044), False, 'import numpy\n'), ((2190, 2219), 'cv2.medianBlur', 'cv2.medianBlur', (['opencv_img', '(5)'], {}), '(opencv_img, 5)\n', (2204, 2219), False, 'import cv2\n'), ((2239, 2284), 'cv2.cvtColor', 'cv2.cvtColor', (['blurred_img', 'cv2.COLOR_BGR2GRAY'], {}), '(blurred_img, cv2.COLOR_BGR2GRAY)\n', (2251, 2284), False, 'import cv2\n'), ((2310, 2362), 'cv2.threshold', 'cv2.threshold', (['gray_img', '(122)', '(255)', 'cv2.THRESH_BINARY'], {}), '(gray_img, 122, 255, cv2.THRESH_BINARY)\n', (2323, 2362), False, 'import cv2\n'), ((2400, 2468), 'pytesseract.image_to_string', 'pytesseract.image_to_string', (['binary'], {'config': '"""-l eng --oem 1 --psm 3"""'}), "(binary, config='-l eng --oem 1 --psm 3')\n", (2427, 2468), False, 'import pytesseract\n'), ((3108, 3180), 'pytesseract.image_to_string', 'pytesseract.image_to_string', (['test_image'], {'config': '"""-l eng --oem 1 --psm 7"""'}), "(test_image, config='-l eng --oem 1 --psm 7')\n", (3135, 3180), False, 'import pytesseract\n'), ((432, 443), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (441, 443), False, 'import os\n')] |
# Copyright (c) 2017, Lawrence Livermore National Security, LLC. Produced at
# the Lawrence Livermore National Laboratory. LLNL-CODE-734707. All Rights
# reserved. See files LICENSE and NOTICE for details.
#
# This file is part of CEED, a collection of benchmarks, miniapps, software
# libraries and APIs for efficient high-order finite element and spectral
# element discretizations for exascale applications. For more information and
# source code availability see http://github.com/ceed.
#
# The CEED research is supported by the Exascale Computing Project 17-SC-20-SC,
# a collaborative effort of two U.S. Department of Energy organizations (Office
# of Science and the National Nuclear Security Administration) responsible for
# the planning and preparation of a capable exascale ecosystem, including
# software, applications, hardware, advanced system engineering and early
# testbed platforms, in support of the nation's exascale computing imperative.
# @file
# Test Ceed Vector functionality
import os
import libceed
import numpy as np
import check
TOL = libceed.EPSILON * 256
# -------------------------------------------------------------------------------
# Utility
# -------------------------------------------------------------------------------
def check_values(ceed, x, value):
with x.array_read() as b:
for i in range(len(b)):
assert b[i] == value
# -------------------------------------------------------------------------------
# Test creation, setting, reading, restoring, and destroying of a vector
# -------------------------------------------------------------------------------
def test_100(ceed_resource):
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
a = np.arange(10, 10 + n, dtype=ceed.scalar_type())
x.set_array(a, cmode=libceed.USE_POINTER)
with x.array_read() as b:
for i in range(n):
assert b[i] == 10 + i
# -------------------------------------------------------------------------------
# Test setValue
# -------------------------------------------------------------------------------
def test_101(ceed_resource):
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
value = 1
a = np.arange(10, 10 + n, dtype=ceed.scalar_type())
x.set_array(a, cmode=libceed.USE_POINTER)
with x.array() as b:
for i in range(len(b)):
assert b[i] == 10 + i
x.set_value(3.0)
check_values(ceed, x, 3.0)
del x
x = ceed.Vector(n)
# Set value before setting or getting the array
x.set_value(5.0)
check_values(ceed, x, 5.0)
# -------------------------------------------------------------------------------
# Test getArrayRead state counter
# -------------------------------------------------------------------------------
def test_102(ceed_resource):
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
x.set_value(0)
# Two read accesses should not generate an error
a = x.get_array_read()
b = x.get_array_read()
x.restore_array_read()
x.restore_array_read()
# -------------------------------------------------------------------------------
# Test setting one vector from array of another vector
# -------------------------------------------------------------------------------
def test_103(ceed_resource):
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
y = ceed.Vector(n)
a = np.arange(10, 10 + n, dtype=ceed.scalar_type())
x.set_array(a, cmode=libceed.USE_POINTER)
with x.array() as x_array:
y.set_array(x_array, cmode=libceed.USE_POINTER)
with y.array_read() as y_array:
for i in range(n):
assert y_array[i] == 10 + i
# -------------------------------------------------------------------------------
# Test getArray to modify array
# -------------------------------------------------------------------------------
def test_104(ceed_resource):
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
a = np.zeros(n, dtype=ceed.scalar_type())
x.set_array(a, cmode=libceed.USE_POINTER)
with x.array() as b:
b[3] = -3.14
if libceed.lib.CEED_SCALAR_TYPE == libceed.SCALAR_FP32:
assert a[3] == np.float32(-3.14)
else:
assert a[3] == -3.14
# -------------------------------------------------------------------------------
# Test creation, setting, reading, restoring, and destroying of a vector using
# CEED_MEM_DEVICE
# -------------------------------------------------------------------------------
def test_105(ceed_resource):
# Skip test for non-GPU backend
if 'gpu' in ceed_resource:
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
y = ceed.Vector(n)
a = np.arange(10, 10 + n, dtype=ceed.scalar_type())
x.set_array(a, cmode=libceed.USE_POINTER)
arr = x.get_array_read(memtype=libceed.MEM_DEVICE)
y.set_array(arr, memtype=libceed.MEM_DEVICE)
x.restore_array_read()
with y.array_read() as b:
for i in range(n):
assert b[i] == 10 + i
# -------------------------------------------------------------------------------
# Test view
# -------------------------------------------------------------------------------
def test_107(ceed_resource, capsys):
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
a = np.arange(10, 10 + n, dtype=ceed.scalar_type())
x.set_array(a, cmode=libceed.USE_POINTER)
print(x)
stdout, stderr, ref_stdout = check.output(capsys)
assert not stderr
assert stdout == ref_stdout
# -------------------------------------------------------------------------------
# Test norms
# -------------------------------------------------------------------------------
def test_108(ceed_resource, capsys):
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
a = np.arange(0, n, dtype=ceed.scalar_type())
for i in range(n):
if (i % 2 == 0):
a[i] *= -1
x.set_array(a, cmode=libceed.USE_POINTER)
norm = x.norm(normtype=libceed.NORM_1)
assert abs(norm - 45.) < TOL
norm = x.norm()
assert abs(norm - np.sqrt(285.)) < TOL
norm = x.norm(normtype=libceed.NORM_MAX)
assert abs(norm - 9.) < TOL
# -------------------------------------------------------------------------------
# Test taking the reciprocal of a vector
# -------------------------------------------------------------------------------
def test_119(ceed_resource):
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
a = np.arange(10, 10 + n, dtype=ceed.scalar_type())
x.set_array(a, cmode=libceed.USE_POINTER)
x.reciprocal()
with x.array_read() as b:
for i in range(n):
assert abs(b[i] - 1. / (10 + i)) < TOL
# -------------------------------------------------------------------------------
# Test AXPY
# -------------------------------------------------------------------------------
def test_121(ceed_resource, capsys):
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
y = ceed.Vector(n)
a = np.arange(10, 10 + n, dtype=ceed.scalar_type())
x.set_array(a, cmode=libceed.COPY_VALUES)
y.set_array(a, cmode=libceed.COPY_VALUES)
y.axpy(-0.5, x)
with y.array() as b:
assert np.allclose(.5 * a, b)
# -------------------------------------------------------------------------------
# Test pointwise multiplication
# -------------------------------------------------------------------------------
def test_122(ceed_resource, capsys):
ceed = libceed.Ceed(ceed_resource)
n = 10
w = ceed.Vector(n)
x = ceed.Vector(n)
y = ceed.Vector(n)
a = np.arange(0, n, dtype=ceed.scalar_type())
w.set_array(a, cmode=libceed.COPY_VALUES)
x.set_array(a, cmode=libceed.COPY_VALUES)
y.set_array(a, cmode=libceed.COPY_VALUES)
w.pointwise_mult(x, y)
with w.array() as b:
for i in range(len(b)):
assert abs(b[i] - i * i) < 1e-14
w.pointwise_mult(w, y)
with w.array() as b:
for i in range(len(b)):
assert abs(b[i] - i * i * i) < 1e-14
w.pointwise_mult(x, w)
with w.array() as b:
for i in range(len(b)):
assert abs(b[i] - i * i * i * i) < 1e-14
y.pointwise_mult(y, y)
with y.array() as b:
for i in range(len(b)):
assert abs(b[i] - i * i) < 1e-14
# -------------------------------------------------------------------------------
# Test Scale
# -------------------------------------------------------------------------------
def test_123(ceed_resource, capsys):
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
a = np.arange(10, 10 + n, dtype=ceed.scalar_type())
x.set_array(a, cmode=libceed.COPY_VALUES)
x.scale(-0.5)
with x.array() as b:
assert np.allclose(-.5 * a, b)
# -------------------------------------------------------------------------------
# Test getArrayWrite to modify array
# -------------------------------------------------------------------------------
def test_124(ceed_resource):
ceed = libceed.Ceed(ceed_resource)
n = 10
x = ceed.Vector(n)
with x.array_write() as a:
for i in range(len(a)):
a[i] = 3 * i
with x.array_read() as a:
for i in range(len(a)):
assert a[i] == 3 * i
# -------------------------------------------------------------------------------
# Test modification of reshaped array
# -------------------------------------------------------------------------------
def test_199(ceed_resource):
"""Modification of reshaped array"""
ceed = libceed.Ceed(ceed_resource)
vec = ceed.Vector(12)
vec.set_value(0.0)
with vec.array(4, 3) as x:
x[...] = np.eye(4, 3)
with vec.array_read(3, 4) as x:
assert np.all(x == np.eye(4, 3).reshape(3, 4))
# -------------------------------------------------------------------------------
| [
"numpy.eye",
"libceed.Ceed",
"numpy.allclose",
"numpy.sqrt",
"check.output",
"numpy.float32"
]
| [((1674, 1701), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (1686, 1701), False, 'import libceed\n'), ((2155, 2182), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (2167, 2182), False, 'import libceed\n'), ((2857, 2884), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (2869, 2884), False, 'import libceed\n'), ((3364, 3391), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (3376, 3391), False, 'import libceed\n'), ((3985, 4012), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (3997, 4012), False, 'import libceed\n'), ((5390, 5417), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (5402, 5417), False, 'import libceed\n'), ((5604, 5624), 'check.output', 'check.output', (['capsys'], {}), '(capsys)\n', (5616, 5624), False, 'import check\n'), ((5907, 5934), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (5919, 5934), False, 'import libceed\n'), ((6608, 6635), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (6620, 6635), False, 'import libceed\n'), ((7129, 7156), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (7141, 7156), False, 'import libceed\n'), ((7695, 7722), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (7707, 7722), False, 'import libceed\n'), ((8753, 8780), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (8765, 8780), False, 'import libceed\n'), ((9246, 9273), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (9258, 9273), False, 'import libceed\n'), ((9781, 9808), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (9793, 9808), False, 'import libceed\n'), ((4706, 4733), 'libceed.Ceed', 'libceed.Ceed', (['ceed_resource'], {}), '(ceed_resource)\n', (4718, 4733), False, 'import libceed\n'), ((7425, 7448), 'numpy.allclose', 'np.allclose', (['(0.5 * a)', 'b'], {}), '(0.5 * a, b)\n', (7436, 7448), True, 'import numpy as np\n'), ((8978, 9002), 'numpy.allclose', 'np.allclose', (['(-0.5 * a)', 'b'], {}), '(-0.5 * a, b)\n', (8989, 9002), True, 'import numpy as np\n'), ((9907, 9919), 'numpy.eye', 'np.eye', (['(4)', '(3)'], {}), '(4, 3)\n', (9913, 9919), True, 'import numpy as np\n'), ((4272, 4289), 'numpy.float32', 'np.float32', (['(-3.14)'], {}), '(-3.14)\n', (4282, 4289), True, 'import numpy as np\n'), ((6260, 6274), 'numpy.sqrt', 'np.sqrt', (['(285.0)'], {}), '(285.0)\n', (6267, 6274), True, 'import numpy as np\n'), ((9984, 9996), 'numpy.eye', 'np.eye', (['(4)', '(3)'], {}), '(4, 3)\n', (9990, 9996), True, 'import numpy as np\n')] |
from YouTubeFacesDB import generate_ytf_database
###############################################################################
# Create the dataset
###############################################################################
generate_ytf_database(
directory= '../data',#'/scratch/vitay/Datasets/YouTubeFaces', # Location of the YTF dataset
filename='ytfdb.h5', # Name of the HDF5 file to write to
labels=10, # Number of labels to randomly select
max_number=-1, # Maximum number of images to use
size=(100, 100), # Size of the images
color=False, # Black and white
bw_first=True, # Final shape is (1, w, h)
cropped=True # The original images are cropped to the faces
) | [
"YouTubeFacesDB.generate_ytf_database"
]
| [((231, 383), 'YouTubeFacesDB.generate_ytf_database', 'generate_ytf_database', ([], {'directory': '"""../data"""', 'filename': '"""ytfdb.h5"""', 'labels': '(10)', 'max_number': '(-1)', 'size': '(100, 100)', 'color': '(False)', 'bw_first': '(True)', 'cropped': '(True)'}), "(directory='../data', filename='ytfdb.h5', labels=10,\n max_number=-1, size=(100, 100), color=False, bw_first=True, cropped=True)\n", (252, 383), False, 'from YouTubeFacesDB import generate_ytf_database\n')] |
from freezegun import freeze_time
from rest_framework import test
from waldur_mastermind.billing.tests.utils import get_financial_report_url
from waldur_mastermind.invoices import models as invoice_models
from waldur_mastermind.invoices.tests import factories as invoice_factories
from waldur_mastermind.invoices.tests import fixtures as invoice_fixtures
@freeze_time('2017-01-10')
class PriceCurrentTest(test.APITransactionTestCase):
def setUp(self):
self.fixture = invoice_fixtures.InvoiceFixture()
invoice_factories.InvoiceItemFactory(
invoice=self.fixture.invoice,
project=self.fixture.project,
unit=invoice_models.InvoiceItem.Units.PER_MONTH,
unit_price=100,
quantity=1,
)
invoice_factories.InvoiceItemFactory(
invoice=self.fixture.invoice,
project=self.fixture.project,
unit=invoice_models.InvoiceItem.Units.PER_DAY,
unit_price=3,
quantity=31,
)
def test_current_price(self):
self.client.force_authenticate(self.fixture.staff)
url = get_financial_report_url(self.fixture.project.customer)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = response.json()
self.assertEqual(data['billing_price_estimate']['current'], 100 + 9 * 3)
diff = (
data['billing_price_estimate']['total']
- data['billing_price_estimate']['current']
)
self.assertEqual(diff, 22 * 3)
| [
"waldur_mastermind.invoices.tests.fixtures.InvoiceFixture",
"freezegun.freeze_time",
"waldur_mastermind.invoices.tests.factories.InvoiceItemFactory",
"waldur_mastermind.billing.tests.utils.get_financial_report_url"
]
| [((359, 384), 'freezegun.freeze_time', 'freeze_time', (['"""2017-01-10"""'], {}), "('2017-01-10')\n", (370, 384), False, 'from freezegun import freeze_time\n'), ((482, 515), 'waldur_mastermind.invoices.tests.fixtures.InvoiceFixture', 'invoice_fixtures.InvoiceFixture', ([], {}), '()\n', (513, 515), True, 'from waldur_mastermind.invoices.tests import fixtures as invoice_fixtures\n'), ((525, 707), 'waldur_mastermind.invoices.tests.factories.InvoiceItemFactory', 'invoice_factories.InvoiceItemFactory', ([], {'invoice': 'self.fixture.invoice', 'project': 'self.fixture.project', 'unit': 'invoice_models.InvoiceItem.Units.PER_MONTH', 'unit_price': '(100)', 'quantity': '(1)'}), '(invoice=self.fixture.invoice, project=\n self.fixture.project, unit=invoice_models.InvoiceItem.Units.PER_MONTH,\n unit_price=100, quantity=1)\n', (561, 707), True, 'from waldur_mastermind.invoices.tests import factories as invoice_factories\n'), ((778, 957), 'waldur_mastermind.invoices.tests.factories.InvoiceItemFactory', 'invoice_factories.InvoiceItemFactory', ([], {'invoice': 'self.fixture.invoice', 'project': 'self.fixture.project', 'unit': 'invoice_models.InvoiceItem.Units.PER_DAY', 'unit_price': '(3)', 'quantity': '(31)'}), '(invoice=self.fixture.invoice, project=\n self.fixture.project, unit=invoice_models.InvoiceItem.Units.PER_DAY,\n unit_price=3, quantity=31)\n', (814, 957), True, 'from waldur_mastermind.invoices.tests import factories as invoice_factories\n'), ((1128, 1183), 'waldur_mastermind.billing.tests.utils.get_financial_report_url', 'get_financial_report_url', (['self.fixture.project.customer'], {}), '(self.fixture.project.customer)\n', (1152, 1183), False, 'from waldur_mastermind.billing.tests.utils import get_financial_report_url\n')] |
# -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
#
# Copyright 2018-2019 Fetch.AI Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ------------------------------------------------------------------------------
"""This test module contains the tests for aea.cli.utils module."""
from builtins import FileNotFoundError
from typing import cast
from unittest import TestCase, mock
from click import BadParameter, ClickException
from jsonschema import ValidationError
from yaml import YAMLError
from aea.cli.utils.click_utils import AEAJsonPathType, PublicIdParameter
from aea.cli.utils.config import (
_init_cli_config,
get_or_create_cli_config,
update_cli_config,
)
from aea.cli.utils.context import Context
from aea.cli.utils.decorators import _validate_config_consistency, clean_after
from aea.cli.utils.formatting import format_items
from aea.cli.utils.generic import is_readme_present
from aea.cli.utils.package_utils import (
find_item_in_distribution,
find_item_locally,
is_fingerprint_correct,
try_get_balance,
try_get_item_source_path,
try_get_item_target_path,
validate_author_name,
validate_package_name,
)
from tests.conftest import FETCHAI
from tests.test_cli.tools_for_testing import (
ConfigLoaderMock,
ContextMock,
PublicIdMock,
StopTest,
raise_stoptest,
)
AUTHOR = "author"
class FormatItemsTestCase(TestCase):
"""Test case for format_items method."""
def testformat_items_positive(self):
"""Test format_items positive result."""
items = [
{
"public_id": "author/name:version",
"name": "obj-name",
"description": "Some description",
"author": "author",
"version": "1.0",
}
]
result = format_items(items)
expected_result = (
"------------------------------\n"
"Public ID: author/name:version\n"
"Name: obj-name\n"
"Description: Some description\n"
"Author: author\n"
"Version: 1.0\n"
"------------------------------\n"
)
self.assertEqual(result, expected_result)
@mock.patch("aea.cli.utils.package_utils.os.path.join", return_value="some-path")
class TryGetItemSourcePathTestCase(TestCase):
"""Test case for try_get_item_source_path method."""
@mock.patch("aea.cli.utils.package_utils.os.path.exists", return_value=True)
def test_get_item_source_path_positive(self, exists_mock, join_mock):
"""Test for get_item_source_path positive result."""
result = try_get_item_source_path("cwd", AUTHOR, "skills", "skill-name")
expected_result = "some-path"
self.assertEqual(result, expected_result)
join_mock.assert_called_once_with("cwd", AUTHOR, "skills", "skill-name")
exists_mock.assert_called_once_with("some-path")
result = try_get_item_source_path("cwd", None, "skills", "skill-name")
self.assertEqual(result, expected_result)
@mock.patch("aea.cli.utils.package_utils.os.path.exists", return_value=False)
def test_get_item_source_path_not_exists(self, exists_mock, join_mock):
"""Test for get_item_source_path item already exists."""
with self.assertRaises(ClickException):
try_get_item_source_path("cwd", AUTHOR, "skills", "skill-name")
@mock.patch("aea.cli.utils.package_utils.os.path.join", return_value="some-path")
class TryGetItemTargetPathTestCase(TestCase):
"""Test case for try_get_item_target_path method."""
@mock.patch("aea.cli.utils.package_utils.os.path.exists", return_value=False)
def test_get_item_target_path_positive(self, exists_mock, join_mock):
"""Test for get_item_source_path positive result."""
result = try_get_item_target_path("packages", AUTHOR, "skills", "skill-name")
expected_result = "some-path"
self.assertEqual(result, expected_result)
join_mock.assert_called_once_with("packages", AUTHOR, "skills", "skill-name")
exists_mock.assert_called_once_with("some-path")
@mock.patch("aea.cli.utils.package_utils.os.path.exists", return_value=True)
def test_get_item_target_path_already_exists(self, exists_mock, join_mock):
"""Test for get_item_target_path item already exists."""
with self.assertRaises(ClickException):
try_get_item_target_path("skills", AUTHOR, "skill-name", "packages_path")
class PublicIdParameterTestCase(TestCase):
"""Test case for PublicIdParameter class."""
def test_get_metavar_positive(self):
"""Test for get_metavar positive result."""
result = PublicIdParameter.get_metavar("obj", "param")
expected_result = "PUBLIC_ID"
self.assertEqual(result, expected_result)
@mock.patch("aea.cli.utils.config.os.path.dirname", return_value="dir-name")
@mock.patch("aea.cli.utils.config.os.path.exists", return_value=False)
@mock.patch("aea.cli.utils.config.os.makedirs")
@mock.patch("builtins.open")
class InitConfigFolderTestCase(TestCase):
"""Test case for _init_cli_config method."""
def test_init_cli_config_positive(
self, open_mock, makedirs_mock, exists_mock, dirname_mock
):
"""Test for _init_cli_config method positive result."""
_init_cli_config()
dirname_mock.assert_called_once()
exists_mock.assert_called_once_with("dir-name")
makedirs_mock.assert_called_once_with("dir-name")
@mock.patch("aea.cli.utils.config.get_or_create_cli_config")
@mock.patch("aea.cli.utils.generic.yaml.dump")
@mock.patch("builtins.open", mock.mock_open())
class UpdateCLIConfigTestCase(TestCase):
"""Test case for update_cli_config method."""
def testupdate_cli_config_positive(self, dump_mock, icf_mock):
"""Test for update_cli_config method positive result."""
update_cli_config({"some": "config"})
icf_mock.assert_called_once()
dump_mock.assert_called_once()
def _raise_yamlerror(*args):
raise YAMLError()
def _raise_file_not_found_error(*args):
raise FileNotFoundError()
@mock.patch("builtins.open", mock.mock_open())
class GetOrCreateCLIConfigTestCase(TestCase):
"""Test case for read_cli_config method."""
@mock.patch(
"aea.cli.utils.generic.yaml.safe_load", return_value={"correct": "output"}
)
def testget_or_create_cli_config_positive(self, safe_load_mock):
"""Test for get_or_create_cli_config method positive result."""
result = get_or_create_cli_config()
expected_result = {"correct": "output"}
self.assertEqual(result, expected_result)
safe_load_mock.assert_called_once()
@mock.patch("aea.cli.utils.generic.yaml.safe_load", _raise_yamlerror)
def testget_or_create_cli_config_bad_yaml(self):
"""Test for rget_or_create_cli_config method bad yaml behavior."""
with self.assertRaises(ClickException):
get_or_create_cli_config()
class CleanAfterTestCase(TestCase):
"""Test case for clean_after decorator method."""
@mock.patch("aea.cli.utils.decorators.os.path.exists", return_value=True)
@mock.patch("aea.cli.utils.decorators._cast_ctx", lambda x: x)
@mock.patch("aea.cli.utils.decorators.shutil.rmtree")
def test_clean_after_positive(self, rmtree_mock, *mocks):
"""Test clean_after decorator method for positive result."""
@clean_after
def func(click_context):
ctx = cast(Context, click_context.obj)
ctx.clean_paths.append("clean/path")
raise ClickException("Message")
with self.assertRaises(ClickException):
func(ContextMock())
rmtree_mock.assert_called_once_with("clean/path")
@mock.patch("aea.cli.utils.package_utils.click.echo", raise_stoptest)
class ValidateAuthorNameTestCase(TestCase):
"""Test case for validate_author_name method."""
@mock.patch(
"aea.cli.utils.package_utils.click.prompt", return_value="correct_author"
)
def test_validate_author_name_positive(self, prompt_mock):
"""Test validate_author_name for positive result."""
author = "valid_author"
result = validate_author_name(author=author)
self.assertEqual(result, author)
result = validate_author_name()
self.assertEqual(result, "correct_author")
prompt_mock.assert_called_once()
@mock.patch(
"aea.cli.utils.package_utils.click.prompt", return_value="inv@l1d_@uth&r"
)
def test_validate_author_name_negative(self, prompt_mock):
"""Test validate_author_name for negative result."""
with self.assertRaises(StopTest):
validate_author_name()
prompt_mock.return_value = "skills"
with self.assertRaises(StopTest):
validate_author_name()
class ValidatePackageNameTestCase(TestCase):
"""Test case for validate_package_name method."""
def test_validate_package_name_positive(self):
"""Test validate_package_name for positive result."""
validate_package_name("correct_name")
def test_validate_package_name_negative(self):
"""Test validate_package_name for negative result."""
with self.assertRaises(BadParameter):
validate_package_name("incorrect-name")
def _raise_validation_error(*args, **kwargs):
raise ValidationError("Message.")
class FindItemLocallyTestCase(TestCase):
"""Test case for find_item_locally method."""
@mock.patch("aea.cli.utils.package_utils.Path.exists", return_value=True)
@mock.patch(
"aea.cli.utils.package_utils.ConfigLoader.from_configuration_type",
_raise_validation_error,
)
def test_find_item_locally_bad_config(self, *mocks):
"""Test find_item_locally for bad config result."""
public_id = PublicIdMock.from_str("fetchai/echo:0.5.0")
with self.assertRaises(ClickException) as cm:
find_item_locally(ContextMock(), "skill", public_id)
self.assertIn("configuration file not valid", cm.exception.message)
@mock.patch("aea.cli.utils.package_utils.Path.exists", return_value=True)
@mock.patch("aea.cli.utils.package_utils.Path.open", mock.mock_open())
@mock.patch(
"aea.cli.utils.package_utils.ConfigLoader.from_configuration_type",
return_value=ConfigLoaderMock(),
)
def test_find_item_locally_cant_find(self, from_conftype_mock, *mocks):
"""Test find_item_locally for can't find result."""
public_id = PublicIdMock.from_str("fetchai/echo:0.5.0")
with self.assertRaises(ClickException) as cm:
find_item_locally(ContextMock(), "skill", public_id)
self.assertEqual(
cm.exception.message, "Cannot find skill with author and version specified."
)
class FindItemInDistributionTestCase(TestCase):
"""Test case for find_item_in_distribution method."""
@mock.patch("aea.cli.utils.package_utils.Path.exists", return_value=True)
@mock.patch(
"aea.cli.utils.package_utils.ConfigLoader.from_configuration_type",
_raise_validation_error,
)
def testfind_item_in_distribution_bad_config(self, *mocks):
"""Test find_item_in_distribution for bad config result."""
public_id = PublicIdMock.from_str("fetchai/echo:0.5.0")
with self.assertRaises(ClickException) as cm:
find_item_in_distribution(ContextMock(), "skill", public_id)
self.assertIn("configuration file not valid", cm.exception.message)
@mock.patch("aea.cli.utils.package_utils.Path.exists", return_value=False)
def testfind_item_in_distribution_not_found(self, *mocks):
"""Test find_item_in_distribution for not found result."""
public_id = PublicIdMock.from_str("fetchai/echo:0.5.0")
with self.assertRaises(ClickException) as cm:
find_item_in_distribution(ContextMock(), "skill", public_id)
self.assertIn("Cannot find skill", cm.exception.message)
@mock.patch("aea.cli.utils.package_utils.Path.exists", return_value=True)
@mock.patch("aea.cli.utils.package_utils.Path.open", mock.mock_open())
@mock.patch(
"aea.cli.utils.package_utils.ConfigLoader.from_configuration_type",
return_value=ConfigLoaderMock(),
)
def testfind_item_in_distribution_cant_find(self, from_conftype_mock, *mocks):
"""Test find_item_locally for can't find result."""
public_id = PublicIdMock.from_str("fetchai/echo:0.5.0")
with self.assertRaises(ClickException) as cm:
find_item_in_distribution(ContextMock(), "skill", public_id)
self.assertEqual(
cm.exception.message, "Cannot find skill with author and version specified."
)
class ValidateConfigConsistencyTestCase(TestCase):
"""Test case for _validate_config_consistency method."""
@mock.patch("aea.cli.utils.config.Path.exists", _raise_validation_error)
def test__validate_config_consistency_cant_find(self, *mocks):
"""Test _validate_config_consistency can't find result"""
with self.assertRaises(ValueError) as cm:
_validate_config_consistency(ContextMock(protocols=["some"]))
self.assertIn("Cannot find", str(cm.exception))
@mock.patch(
"aea.cli.utils.package_utils._compute_fingerprint",
return_value={"correct": "fingerprint"},
)
class IsFingerprintCorrectTestCase(TestCase):
"""Test case for adding skill with invalid fingerprint."""
def test_is_fingerprint_correct_positive(self, *mocks):
"""Test is_fingerprint_correct method for positive result."""
item_config = mock.Mock()
item_config.fingerprint = {"correct": "fingerprint"}
item_config.fingerprint_ignore_patterns = []
result = is_fingerprint_correct("package_path", item_config)
self.assertTrue(result)
def test_is_fingerprint_correct_negative(self, *mocks):
"""Test is_fingerprint_correct method for negative result."""
item_config = mock.Mock()
item_config.fingerprint = {"incorrect": "fingerprint"}
item_config.fingerprint_ignore_patterns = []
package_path = "package_dir"
result = is_fingerprint_correct(package_path, item_config)
self.assertFalse(result)
@mock.patch("aea.cli.config.click.ParamType")
class AEAJsonPathTypeTestCase(TestCase):
"""Test case for AEAJsonPathType class."""
@mock.patch("aea.cli.utils.click_utils.Path.exists", return_value=True)
def test_convert_root_vendor_positive(self, *mocks):
"""Test for convert method with root "vendor" positive result."""
value = "vendor.author.protocols.package_name.attribute_name"
ctx_mock = ContextMock()
ctx_mock.obj = mock.Mock()
ctx_mock.obj.set_config = mock.Mock()
obj = AEAJsonPathType()
obj.convert(value, "param", ctx_mock)
@mock.patch("aea.cli.utils.click_utils.Path.exists", return_value=False)
def test_convert_root_vendor_path_not_exists(self, *mocks):
"""Test for convert method with root "vendor" path not exists."""
value = "vendor.author.protocols.package_name.attribute_name"
obj = AEAJsonPathType()
with self.assertRaises(BadParameter):
obj.convert(value, "param", "ctx")
@mock.patch("aea.cli.utils.package_utils.LedgerApis", mock.MagicMock())
class TryGetBalanceTestCase(TestCase):
"""Test case for try_get_balance method."""
def test_try_get_balance_positive(self):
"""Test for try_get_balance method positive result."""
agent_config = mock.Mock()
agent_config.default_ledger_config = FETCHAI
wallet_mock = mock.Mock()
wallet_mock.addresses = {FETCHAI: "some-adress"}
try_get_balance(agent_config, wallet_mock, FETCHAI)
@mock.patch("aea.cli.utils.generic.os.path.exists", return_value=True)
class IsReadmePresentTestCase(TestCase):
"""Test case for is_readme_present method."""
def test_is_readme_present_positive(self, *mocks):
"""Test is_readme_present for positive result."""
self.assertTrue(is_readme_present("readme/path"))
| [
"tests.test_cli.tools_for_testing.ConfigLoaderMock",
"click.ClickException",
"aea.cli.utils.package_utils.validate_package_name",
"jsonschema.ValidationError",
"unittest.mock.patch",
"aea.cli.utils.generic.is_readme_present",
"aea.cli.utils.package_utils.try_get_item_source_path",
"tests.test_cli.tools_for_testing.ContextMock",
"aea.cli.utils.config.get_or_create_cli_config",
"aea.cli.utils.package_utils.validate_author_name",
"aea.cli.utils.package_utils.try_get_balance",
"aea.cli.utils.click_utils.PublicIdParameter.get_metavar",
"unittest.mock.Mock",
"yaml.YAMLError",
"unittest.mock.MagicMock",
"aea.cli.utils.config.update_cli_config",
"builtins.FileNotFoundError",
"aea.cli.utils.package_utils.try_get_item_target_path",
"typing.cast",
"aea.cli.utils.formatting.format_items",
"aea.cli.utils.package_utils.is_fingerprint_correct",
"unittest.mock.mock_open",
"aea.cli.utils.click_utils.AEAJsonPathType",
"aea.cli.utils.config._init_cli_config",
"tests.test_cli.tools_for_testing.PublicIdMock.from_str"
]
| [((2790, 2875), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.os.path.join"""'], {'return_value': '"""some-path"""'}), "('aea.cli.utils.package_utils.os.path.join', return_value='some-path'\n )\n", (2800, 2875), False, 'from unittest import TestCase, mock\n'), ((3979, 4064), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.os.path.join"""'], {'return_value': '"""some-path"""'}), "('aea.cli.utils.package_utils.os.path.join', return_value='some-path'\n )\n", (3989, 4064), False, 'from unittest import TestCase, mock\n'), ((5401, 5476), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.config.os.path.dirname"""'], {'return_value': '"""dir-name"""'}), "('aea.cli.utils.config.os.path.dirname', return_value='dir-name')\n", (5411, 5476), False, 'from unittest import TestCase, mock\n'), ((5478, 5547), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.config.os.path.exists"""'], {'return_value': '(False)'}), "('aea.cli.utils.config.os.path.exists', return_value=False)\n", (5488, 5547), False, 'from unittest import TestCase, mock\n'), ((5549, 5595), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.config.os.makedirs"""'], {}), "('aea.cli.utils.config.os.makedirs')\n", (5559, 5595), False, 'from unittest import TestCase, mock\n'), ((5597, 5624), 'unittest.mock.patch', 'mock.patch', (['"""builtins.open"""'], {}), "('builtins.open')\n", (5607, 5624), False, 'from unittest import TestCase, mock\n'), ((6079, 6138), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.config.get_or_create_cli_config"""'], {}), "('aea.cli.utils.config.get_or_create_cli_config')\n", (6089, 6138), False, 'from unittest import TestCase, mock\n'), ((6140, 6185), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.generic.yaml.dump"""'], {}), "('aea.cli.utils.generic.yaml.dump')\n", (6150, 6185), False, 'from unittest import TestCase, mock\n'), ((8344, 8412), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.click.echo"""', 'raise_stoptest'], {}), "('aea.cli.utils.package_utils.click.echo', raise_stoptest)\n", (8354, 8412), False, 'from unittest import TestCase, mock\n'), ((13856, 13964), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils._compute_fingerprint"""'], {'return_value': "{'correct': 'fingerprint'}"}), "('aea.cli.utils.package_utils._compute_fingerprint', return_value\n ={'correct': 'fingerprint'})\n", (13866, 13964), False, 'from unittest import TestCase, mock\n'), ((14881, 14925), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.config.click.ParamType"""'], {}), "('aea.cli.config.click.ParamType')\n", (14891, 14925), False, 'from unittest import TestCase, mock\n'), ((16408, 16477), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.generic.os.path.exists"""'], {'return_value': '(True)'}), "('aea.cli.utils.generic.os.path.exists', return_value=True)\n", (16418, 16477), False, 'from unittest import TestCase, mock\n'), ((2980, 3055), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.os.path.exists"""'], {'return_value': '(True)'}), "('aea.cli.utils.package_utils.os.path.exists', return_value=True)\n", (2990, 3055), False, 'from unittest import TestCase, mock\n'), ((3634, 3710), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.os.path.exists"""'], {'return_value': '(False)'}), "('aea.cli.utils.package_utils.os.path.exists', return_value=False)\n", (3644, 3710), False, 'from unittest import TestCase, mock\n'), ((4169, 4245), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.os.path.exists"""'], {'return_value': '(False)'}), "('aea.cli.utils.package_utils.os.path.exists', return_value=False)\n", (4179, 4245), False, 'from unittest import TestCase, mock\n'), ((4704, 4779), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.os.path.exists"""'], {'return_value': '(True)'}), "('aea.cli.utils.package_utils.os.path.exists', return_value=True)\n", (4714, 4779), False, 'from unittest import TestCase, mock\n'), ((6215, 6231), 'unittest.mock.mock_open', 'mock.mock_open', ([], {}), '()\n', (6229, 6231), False, 'from unittest import TestCase, mock\n'), ((6621, 6632), 'yaml.YAMLError', 'YAMLError', ([], {}), '()\n', (6630, 6632), False, 'from yaml import YAMLError\n'), ((6685, 6704), 'builtins.FileNotFoundError', 'FileNotFoundError', ([], {}), '()\n', (6702, 6704), False, 'from builtins import FileNotFoundError\n'), ((6854, 6944), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.generic.yaml.safe_load"""'], {'return_value': "{'correct': 'output'}"}), "('aea.cli.utils.generic.yaml.safe_load', return_value={'correct':\n 'output'})\n", (6864, 6944), False, 'from unittest import TestCase, mock\n'), ((7288, 7356), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.generic.yaml.safe_load"""', '_raise_yamlerror'], {}), "('aea.cli.utils.generic.yaml.safe_load', _raise_yamlerror)\n", (7298, 7356), False, 'from unittest import TestCase, mock\n'), ((6736, 6752), 'unittest.mock.mock_open', 'mock.mock_open', ([], {}), '()\n', (6750, 6752), False, 'from unittest import TestCase, mock\n'), ((7670, 7742), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.decorators.os.path.exists"""'], {'return_value': '(True)'}), "('aea.cli.utils.decorators.os.path.exists', return_value=True)\n", (7680, 7742), False, 'from unittest import TestCase, mock\n'), ((7748, 7809), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.decorators._cast_ctx"""', '(lambda x: x)'], {}), "('aea.cli.utils.decorators._cast_ctx', lambda x: x)\n", (7758, 7809), False, 'from unittest import TestCase, mock\n'), ((7815, 7867), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.decorators.shutil.rmtree"""'], {}), "('aea.cli.utils.decorators.shutil.rmtree')\n", (7825, 7867), False, 'from unittest import TestCase, mock\n'), ((8516, 8606), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.click.prompt"""'], {'return_value': '"""correct_author"""'}), "('aea.cli.utils.package_utils.click.prompt', return_value=\n 'correct_author')\n", (8526, 8606), False, 'from unittest import TestCase, mock\n'), ((9005, 9095), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.click.prompt"""'], {'return_value': '"""inv@l1d_@uth&r"""'}), "('aea.cli.utils.package_utils.click.prompt', return_value=\n 'inv@l1d_@uth&r')\n", (9015, 9095), False, 'from unittest import TestCase, mock\n'), ((9959, 9986), 'jsonschema.ValidationError', 'ValidationError', (['"""Message."""'], {}), "('Message.')\n", (9974, 9986), False, 'from jsonschema import ValidationError\n'), ((10086, 10158), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.Path.exists"""'], {'return_value': '(True)'}), "('aea.cli.utils.package_utils.Path.exists', return_value=True)\n", (10096, 10158), False, 'from unittest import TestCase, mock\n'), ((10164, 10271), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.ConfigLoader.from_configuration_type"""', '_raise_validation_error'], {}), "('aea.cli.utils.package_utils.ConfigLoader.from_configuration_type',\n _raise_validation_error)\n", (10174, 10271), False, 'from unittest import TestCase, mock\n'), ((10674, 10746), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.Path.exists"""'], {'return_value': '(True)'}), "('aea.cli.utils.package_utils.Path.exists', return_value=True)\n", (10684, 10746), False, 'from unittest import TestCase, mock\n'), ((11521, 11593), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.Path.exists"""'], {'return_value': '(True)'}), "('aea.cli.utils.package_utils.Path.exists', return_value=True)\n", (11531, 11593), False, 'from unittest import TestCase, mock\n'), ((11599, 11706), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.ConfigLoader.from_configuration_type"""', '_raise_validation_error'], {}), "('aea.cli.utils.package_utils.ConfigLoader.from_configuration_type',\n _raise_validation_error)\n", (11609, 11706), False, 'from unittest import TestCase, mock\n'), ((12132, 12205), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.Path.exists"""'], {'return_value': '(False)'}), "('aea.cli.utils.package_utils.Path.exists', return_value=False)\n", (12142, 12205), False, 'from unittest import TestCase, mock\n'), ((12599, 12671), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.package_utils.Path.exists"""'], {'return_value': '(True)'}), "('aea.cli.utils.package_utils.Path.exists', return_value=True)\n", (12609, 12671), False, 'from unittest import TestCase, mock\n'), ((13467, 13538), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.config.Path.exists"""', '_raise_validation_error'], {}), "('aea.cli.utils.config.Path.exists', _raise_validation_error)\n", (13477, 13538), False, 'from unittest import TestCase, mock\n'), ((15020, 15090), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.click_utils.Path.exists"""'], {'return_value': '(True)'}), "('aea.cli.utils.click_utils.Path.exists', return_value=True)\n", (15030, 15090), False, 'from unittest import TestCase, mock\n'), ((15490, 15561), 'unittest.mock.patch', 'mock.patch', (['"""aea.cli.utils.click_utils.Path.exists"""'], {'return_value': '(False)'}), "('aea.cli.utils.click_utils.Path.exists', return_value=False)\n", (15500, 15561), False, 'from unittest import TestCase, mock\n'), ((15951, 15967), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (15965, 15967), False, 'from unittest import TestCase, mock\n'), ((2401, 2420), 'aea.cli.utils.formatting.format_items', 'format_items', (['items'], {}), '(items)\n', (2413, 2420), False, 'from aea.cli.utils.formatting import format_items\n'), ((3208, 3271), 'aea.cli.utils.package_utils.try_get_item_source_path', 'try_get_item_source_path', (['"""cwd"""', 'AUTHOR', '"""skills"""', '"""skill-name"""'], {}), "('cwd', AUTHOR, 'skills', 'skill-name')\n", (3232, 3271), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((3516, 3577), 'aea.cli.utils.package_utils.try_get_item_source_path', 'try_get_item_source_path', (['"""cwd"""', 'None', '"""skills"""', '"""skill-name"""'], {}), "('cwd', None, 'skills', 'skill-name')\n", (3540, 3577), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((4398, 4466), 'aea.cli.utils.package_utils.try_get_item_target_path', 'try_get_item_target_path', (['"""packages"""', 'AUTHOR', '"""skills"""', '"""skill-name"""'], {}), "('packages', AUTHOR, 'skills', 'skill-name')\n", (4422, 4466), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((5264, 5309), 'aea.cli.utils.click_utils.PublicIdParameter.get_metavar', 'PublicIdParameter.get_metavar', (['"""obj"""', '"""param"""'], {}), "('obj', 'param')\n", (5293, 5309), False, 'from aea.cli.utils.click_utils import AEAJsonPathType, PublicIdParameter\n'), ((5901, 5919), 'aea.cli.utils.config._init_cli_config', '_init_cli_config', ([], {}), '()\n', (5917, 5919), False, 'from aea.cli.utils.config import _init_cli_config, get_or_create_cli_config, update_cli_config\n'), ((6465, 6502), 'aea.cli.utils.config.update_cli_config', 'update_cli_config', (["{'some': 'config'}"], {}), "({'some': 'config'})\n", (6482, 6502), False, 'from aea.cli.utils.config import _init_cli_config, get_or_create_cli_config, update_cli_config\n'), ((7113, 7139), 'aea.cli.utils.config.get_or_create_cli_config', 'get_or_create_cli_config', ([], {}), '()\n', (7137, 7139), False, 'from aea.cli.utils.config import _init_cli_config, get_or_create_cli_config, update_cli_config\n'), ((8789, 8824), 'aea.cli.utils.package_utils.validate_author_name', 'validate_author_name', ([], {'author': 'author'}), '(author=author)\n', (8809, 8824), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((8884, 8906), 'aea.cli.utils.package_utils.validate_author_name', 'validate_author_name', ([], {}), '()\n', (8904, 8906), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((9651, 9688), 'aea.cli.utils.package_utils.validate_package_name', 'validate_package_name', (['"""correct_name"""'], {}), "('correct_name')\n", (9672, 9688), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((10428, 10471), 'tests.test_cli.tools_for_testing.PublicIdMock.from_str', 'PublicIdMock.from_str', (['"""fetchai/echo:0.5.0"""'], {}), "('fetchai/echo:0.5.0')\n", (10449, 10471), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((11118, 11161), 'tests.test_cli.tools_for_testing.PublicIdMock.from_str', 'PublicIdMock.from_str', (['"""fetchai/echo:0.5.0"""'], {}), "('fetchai/echo:0.5.0')\n", (11139, 11161), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((10804, 10820), 'unittest.mock.mock_open', 'mock.mock_open', ([], {}), '()\n', (10818, 10820), False, 'from unittest import TestCase, mock\n'), ((11878, 11921), 'tests.test_cli.tools_for_testing.PublicIdMock.from_str', 'PublicIdMock.from_str', (['"""fetchai/echo:0.5.0"""'], {}), "('fetchai/echo:0.5.0')\n", (11899, 11921), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((12356, 12399), 'tests.test_cli.tools_for_testing.PublicIdMock.from_str', 'PublicIdMock.from_str', (['"""fetchai/echo:0.5.0"""'], {}), "('fetchai/echo:0.5.0')\n", (12377, 12399), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((13050, 13093), 'tests.test_cli.tools_for_testing.PublicIdMock.from_str', 'PublicIdMock.from_str', (['"""fetchai/echo:0.5.0"""'], {}), "('fetchai/echo:0.5.0')\n", (13071, 13093), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((12729, 12745), 'unittest.mock.mock_open', 'mock.mock_open', ([], {}), '()\n', (12743, 12745), False, 'from unittest import TestCase, mock\n'), ((14233, 14244), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (14242, 14244), False, 'from unittest import TestCase, mock\n'), ((14376, 14427), 'aea.cli.utils.package_utils.is_fingerprint_correct', 'is_fingerprint_correct', (['"""package_path"""', 'item_config'], {}), "('package_path', item_config)\n", (14398, 14427), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((14613, 14624), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (14622, 14624), False, 'from unittest import TestCase, mock\n'), ((14795, 14844), 'aea.cli.utils.package_utils.is_fingerprint_correct', 'is_fingerprint_correct', (['package_path', 'item_config'], {}), '(package_path, item_config)\n', (14817, 14844), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((15311, 15324), 'tests.test_cli.tools_for_testing.ContextMock', 'ContextMock', ([], {}), '()\n', (15322, 15324), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((15348, 15359), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (15357, 15359), False, 'from unittest import TestCase, mock\n'), ((15394, 15405), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (15403, 15405), False, 'from unittest import TestCase, mock\n'), ((15420, 15437), 'aea.cli.utils.click_utils.AEAJsonPathType', 'AEAJsonPathType', ([], {}), '()\n', (15435, 15437), False, 'from aea.cli.utils.click_utils import AEAJsonPathType, PublicIdParameter\n'), ((15784, 15801), 'aea.cli.utils.click_utils.AEAJsonPathType', 'AEAJsonPathType', ([], {}), '()\n', (15799, 15801), False, 'from aea.cli.utils.click_utils import AEAJsonPathType, PublicIdParameter\n'), ((16188, 16199), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (16197, 16199), False, 'from unittest import TestCase, mock\n'), ((16276, 16287), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (16285, 16287), False, 'from unittest import TestCase, mock\n'), ((16353, 16404), 'aea.cli.utils.package_utils.try_get_balance', 'try_get_balance', (['agent_config', 'wallet_mock', 'FETCHAI'], {}), '(agent_config, wallet_mock, FETCHAI)\n', (16368, 16404), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((3912, 3975), 'aea.cli.utils.package_utils.try_get_item_source_path', 'try_get_item_source_path', (['"""cwd"""', 'AUTHOR', '"""skills"""', '"""skill-name"""'], {}), "('cwd', AUTHOR, 'skills', 'skill-name')\n", (3936, 3975), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((4985, 5058), 'aea.cli.utils.package_utils.try_get_item_target_path', 'try_get_item_target_path', (['"""skills"""', 'AUTHOR', '"""skill-name"""', '"""packages_path"""'], {}), "('skills', AUTHOR, 'skill-name', 'packages_path')\n", (5009, 5058), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((7545, 7571), 'aea.cli.utils.config.get_or_create_cli_config', 'get_or_create_cli_config', ([], {}), '()\n', (7569, 7571), False, 'from aea.cli.utils.config import _init_cli_config, get_or_create_cli_config, update_cli_config\n'), ((8072, 8104), 'typing.cast', 'cast', (['Context', 'click_context.obj'], {}), '(Context, click_context.obj)\n', (8076, 8104), False, 'from typing import cast\n'), ((8172, 8197), 'click.ClickException', 'ClickException', (['"""Message"""'], {}), "('Message')\n", (8186, 8197), False, 'from click import BadParameter, ClickException\n'), ((9283, 9305), 'aea.cli.utils.package_utils.validate_author_name', 'validate_author_name', ([], {}), '()\n', (9303, 9305), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((9405, 9427), 'aea.cli.utils.package_utils.validate_author_name', 'validate_author_name', ([], {}), '()\n', (9425, 9427), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((9861, 9900), 'aea.cli.utils.package_utils.validate_package_name', 'validate_package_name', (['"""incorrect-name"""'], {}), "('incorrect-name')\n", (9882, 9900), False, 'from aea.cli.utils.package_utils import find_item_in_distribution, find_item_locally, is_fingerprint_correct, try_get_balance, try_get_item_source_path, try_get_item_target_path, validate_author_name, validate_package_name\n'), ((10936, 10954), 'tests.test_cli.tools_for_testing.ConfigLoaderMock', 'ConfigLoaderMock', ([], {}), '()\n', (10952, 10954), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((12861, 12879), 'tests.test_cli.tools_for_testing.ConfigLoaderMock', 'ConfigLoaderMock', ([], {}), '()\n', (12877, 12879), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((16707, 16739), 'aea.cli.utils.generic.is_readme_present', 'is_readme_present', (['"""readme/path"""'], {}), "('readme/path')\n", (16724, 16739), False, 'from aea.cli.utils.generic import is_readme_present\n'), ((8264, 8277), 'tests.test_cli.tools_for_testing.ContextMock', 'ContextMock', ([], {}), '()\n', (8275, 8277), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((10556, 10569), 'tests.test_cli.tools_for_testing.ContextMock', 'ContextMock', ([], {}), '()\n', (10567, 10569), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((11246, 11259), 'tests.test_cli.tools_for_testing.ContextMock', 'ContextMock', ([], {}), '()\n', (11257, 11259), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((12014, 12027), 'tests.test_cli.tools_for_testing.ContextMock', 'ContextMock', ([], {}), '()\n', (12025, 12027), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((12492, 12505), 'tests.test_cli.tools_for_testing.ContextMock', 'ContextMock', ([], {}), '()\n', (12503, 12505), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((13186, 13199), 'tests.test_cli.tools_for_testing.ContextMock', 'ContextMock', ([], {}), '()\n', (13197, 13199), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n'), ((13763, 13794), 'tests.test_cli.tools_for_testing.ContextMock', 'ContextMock', ([], {'protocols': "['some']"}), "(protocols=['some'])\n", (13774, 13794), False, 'from tests.test_cli.tools_for_testing import ConfigLoaderMock, ContextMock, PublicIdMock, StopTest, raise_stoptest\n')] |
from rest_framework import serializers
from punkweb_boards.conf.settings import SHOUTBOX_DISABLED_TAGS
from punkweb_boards.models import (
BoardProfile,
Category,
Subcategory,
Thread,
Post,
Conversation,
Message,
Report,
Shout,
)
class BoardProfileSerializer(serializers.ModelSerializer):
post_count = serializers.ReadOnlyField()
can_shout = serializers.ReadOnlyField()
rendered_username = serializers.ReadOnlyField()
rendered_rank = serializers.ReadOnlyField()
class Meta:
model = BoardProfile
fields = "__all__"
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
exclude = ("auth_req",)
class SubcategorySerializer(serializers.ModelSerializer):
last_thread = serializers.ReadOnlyField(source="last_thread.id")
last_thread_title = serializers.ReadOnlyField(source="last_thread.title")
last_thread_created = serializers.ReadOnlyField(
source="last_thread.created"
)
last_thread_user = serializers.ReadOnlyField(
source="last_thread.user.profile.rendered_username"
)
parent_name = serializers.ReadOnlyField(source="parent.name")
thread_count = serializers.ReadOnlyField()
post_count = serializers.ReadOnlyField()
can_post = serializers.SerializerMethodField()
def get_can_post(self, obj):
return obj.can_post(self.context.get("request").user)
class Meta:
model = Subcategory
exclude = ("auth_req",)
class ThreadSerializer(serializers.ModelSerializer):
last_post = serializers.ReadOnlyField(source="last_post.id")
last_post_created = serializers.ReadOnlyField(source="last_post.created")
last_post_username = serializers.ReadOnlyField(
source="last_post.user.username"
)
last_post_rendered_username = serializers.ReadOnlyField(
source="last_post.user.profile.rendered_username"
)
user_username = serializers.ReadOnlyField(source="user.username")
user_rendered_username = serializers.ReadOnlyField(
source="user.profile.rendered_username"
)
user_image = serializers.ReadOnlyField(source="user.profile.avatar")
user_post_count = serializers.ReadOnlyField(
source="user.profile.post_count"
)
user_join_date = serializers.ReadOnlyField(source="user.created")
flagged = serializers.ReadOnlyField(source="reported")
posts_count = serializers.ReadOnlyField()
can_edit = serializers.SerializerMethodField()
def get_can_edit(self, obj):
return obj.can_edit(self.context.get("request").user)
class Meta:
model = Thread
fields = "__all__"
read_only_fields = (
"pinned",
"closed",
"user",
"upvoted_by",
"downvoted_by",
)
class PostSerializer(serializers.ModelSerializer):
flagged = serializers.ReadOnlyField(source="reported")
can_edit = serializers.SerializerMethodField()
def get_can_edit(self, obj):
return obj.can_edit(self.context.get("request").user)
class Meta:
model = Post
fields = "__all__"
read_only_fields = ("user", "upvoted_by", "downvoted_by")
class ConversationSerializer(serializers.ModelSerializer):
last_message = serializers.ReadOnlyField(source="last_message.id")
last_message_title = serializers.ReadOnlyField(source="last_message.title")
last_message_created = serializers.ReadOnlyField(
source="last_message.created"
)
last_message_user = serializers.ReadOnlyField(
source="last_message.user.profile.rendered_username"
)
message_count = serializers.ReadOnlyField()
class Meta:
model = Conversation
fields = "__all__"
read_only_fields = ("unread_by",)
class MessageSerializer(serializers.ModelSerializer):
class Meta:
model = Message
fields = "__all__"
read_only_fields = ("user",)
class ShoutSerializer(serializers.ModelSerializer):
username = serializers.ReadOnlyField(source="user.username")
rendered_username = serializers.ReadOnlyField(
source="user.profile.rendered_username"
)
class Meta:
model = Shout
fields = (
"id",
"user",
"username",
"rendered_username",
"content",
"_content_rendered",
"created",
"modified",
)
read_only_fields = ("user",)
def create(self, validated_data):
for key in SHOUTBOX_DISABLED_TAGS:
key_tag = "[{}]".format(key).lower()
if (
key_tag[: len(key_tag) - 1]
in validated_data.get("content").lower()
):
raise serializers.ValidationError(
{
"notAllowed": "{} is not allowed in the shoutbox".format(
key_tag
)
}
)
return Shout.objects.create(**validated_data)
| [
"punkweb_boards.models.Shout.objects.create",
"rest_framework.serializers.SerializerMethodField",
"rest_framework.serializers.ReadOnlyField"
]
| [((344, 371), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (369, 371), False, 'from rest_framework import serializers\n'), ((388, 415), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (413, 415), False, 'from rest_framework import serializers\n'), ((440, 467), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (465, 467), False, 'from rest_framework import serializers\n'), ((488, 515), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (513, 515), False, 'from rest_framework import serializers\n'), ((797, 847), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_thread.id"""'}), "(source='last_thread.id')\n", (822, 847), False, 'from rest_framework import serializers\n'), ((872, 925), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_thread.title"""'}), "(source='last_thread.title')\n", (897, 925), False, 'from rest_framework import serializers\n'), ((952, 1007), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_thread.created"""'}), "(source='last_thread.created')\n", (977, 1007), False, 'from rest_framework import serializers\n'), ((1045, 1123), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_thread.user.profile.rendered_username"""'}), "(source='last_thread.user.profile.rendered_username')\n", (1070, 1123), False, 'from rest_framework import serializers\n'), ((1156, 1203), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""parent.name"""'}), "(source='parent.name')\n", (1181, 1203), False, 'from rest_framework import serializers\n'), ((1223, 1250), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (1248, 1250), False, 'from rest_framework import serializers\n'), ((1268, 1295), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (1293, 1295), False, 'from rest_framework import serializers\n'), ((1311, 1346), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (1344, 1346), False, 'from rest_framework import serializers\n'), ((1591, 1639), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_post.id"""'}), "(source='last_post.id')\n", (1616, 1639), False, 'from rest_framework import serializers\n'), ((1664, 1717), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_post.created"""'}), "(source='last_post.created')\n", (1689, 1717), False, 'from rest_framework import serializers\n'), ((1743, 1802), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_post.user.username"""'}), "(source='last_post.user.username')\n", (1768, 1802), False, 'from rest_framework import serializers\n'), ((1851, 1927), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_post.user.profile.rendered_username"""'}), "(source='last_post.user.profile.rendered_username')\n", (1876, 1927), False, 'from rest_framework import serializers\n'), ((1962, 2011), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""user.username"""'}), "(source='user.username')\n", (1987, 2011), False, 'from rest_framework import serializers\n'), ((2041, 2107), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""user.profile.rendered_username"""'}), "(source='user.profile.rendered_username')\n", (2066, 2107), False, 'from rest_framework import serializers\n'), ((2139, 2194), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""user.profile.avatar"""'}), "(source='user.profile.avatar')\n", (2164, 2194), False, 'from rest_framework import serializers\n'), ((2217, 2276), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""user.profile.post_count"""'}), "(source='user.profile.post_count')\n", (2242, 2276), False, 'from rest_framework import serializers\n'), ((2312, 2360), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""user.created"""'}), "(source='user.created')\n", (2337, 2360), False, 'from rest_framework import serializers\n'), ((2375, 2419), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""reported"""'}), "(source='reported')\n", (2400, 2419), False, 'from rest_framework import serializers\n'), ((2438, 2465), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (2463, 2465), False, 'from rest_framework import serializers\n'), ((2481, 2516), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (2514, 2516), False, 'from rest_framework import serializers\n'), ((2904, 2948), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""reported"""'}), "(source='reported')\n", (2929, 2948), False, 'from rest_framework import serializers\n'), ((2964, 2999), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {}), '()\n', (2997, 2999), False, 'from rest_framework import serializers\n'), ((3307, 3358), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_message.id"""'}), "(source='last_message.id')\n", (3332, 3358), False, 'from rest_framework import serializers\n'), ((3384, 3438), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_message.title"""'}), "(source='last_message.title')\n", (3409, 3438), False, 'from rest_framework import serializers\n'), ((3466, 3522), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_message.created"""'}), "(source='last_message.created')\n", (3491, 3522), False, 'from rest_framework import serializers\n'), ((3561, 3640), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""last_message.user.profile.rendered_username"""'}), "(source='last_message.user.profile.rendered_username')\n", (3586, 3640), False, 'from rest_framework import serializers\n'), ((3675, 3702), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {}), '()\n', (3700, 3702), False, 'from rest_framework import serializers\n'), ((4047, 4096), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""user.username"""'}), "(source='user.username')\n", (4072, 4096), False, 'from rest_framework import serializers\n'), ((4121, 4187), 'rest_framework.serializers.ReadOnlyField', 'serializers.ReadOnlyField', ([], {'source': '"""user.profile.rendered_username"""'}), "(source='user.profile.rendered_username')\n", (4146, 4187), False, 'from rest_framework import serializers\n'), ((5042, 5080), 'punkweb_boards.models.Shout.objects.create', 'Shout.objects.create', ([], {}), '(**validated_data)\n', (5062, 5080), False, 'from punkweb_boards.models import BoardProfile, Category, Subcategory, Thread, Post, Conversation, Message, Report, Shout\n')] |
import cv2
import ezdxf
import numpy as np
def draw_hatch(img, entity, color, mask):
for poly_path in entity.paths.paths:
# print(poly_path.path_type_flags)
polygon = np.array([vertex[:-1] for vertex in poly_path.vertices]).astype(int)
if poly_path.path_type_flags & 1 == 1:
cv2.fillPoly(img, [polygon], color)
cv2.fillPoly(mask, [polygon], (255, 255, 255))
else:
cv2.fillPoly(img, [polygon], (255, 255, 255))
return color
def draw_line(img, entity, color, mask):
p1 = entity.dxf.start[:-1]
p2 = entity.dxf.end[:-1]
cv2.line(img, (int(p1[0]), int(p1[1])), (int(p2[0]), int(p2[1])), color, 1)
cv2.line(mask, (int(p1[0]), int(p1[1])), (int(p2[0]), int(p2[1])), (255, 255, 255), 2)
return color
def draw_lwpolyline(img, entity, color, mask):
polyline = []
a = np.array(entity.lwpoints.values).astype(int)
while len(a) > 0:
polyline.append((a[0], a[1]))
a = a[5:]
cv2.polylines(img, [np.array(polyline)], entity.closed, color, 1)
cv2.polylines(mask, [np.array(polyline)], entity.closed, (255, 255, 255), 2)
return color
def draw_arc(img, entity, color, mask):
s = entity.dxf.start_angle * np.pi / 180
e = entity.dxf.end_angle * np.pi / 180
if s > e:
s -= 2 * np.pi
d = (e - s) / (int((e - s) * 180 / np.pi) + 1)
r = entity.dxf.radius
cx, cy = entity.dxf.center.xyz[:-1]
angles = np.arange(s, e + d / 2, d)
x = cx + r * np.cos(angles)
y = cy + r * np.sin(angles)
points = np.column_stack((x, y)).astype(int)
cv2.polylines(img, [points], abs(s - e) < 1e-9, color, 1)
cv2.polylines(mask, [points], abs(s - e) < 1e-9, (255, 255, 255), 2)
return color
def draw_circle(img, entity, color, mask):
r = entity.dxf.radius
cx, cy = entity.dxf.center.xyz[:-1]
cv2.circle(img, (int(cx), int(cy)), int(r), color, 1)
cv2.circle(mask, (int(cx), int(cy)), int(r), (255, 255, 255), -1)
return color
def draw_ellipse(img, entity, color, mask):
cx, cy = entity.dxf.center.xyz[:-1]
ma = entity.dxf.major_axis.magnitude
angle = entity.dxf.major_axis.angle_deg
mi = ma * entity.dxf.ratio
s = entity.dxf.start_param * 180 / np.pi
e = entity.dxf.end_param * 180 / np.pi
if entity.dxf.extrusion.z == -1:
s = 360 - s
e = 360 - e
cv2.ellipse(img, (int(cx), int(cy)), (int(ma), int(mi)), angle, s, e, color, 1)
cv2.ellipse(mask, (int(cx), int(cy)), (int(ma), int(mi)), angle, s, e, (255, 255, 255), 1)
return color
def draw_point(img, entity, color, mask):
cx, cy = entity.dxf.location.xyz[:-1]
cv2.circle(img, (int(cx), int(cy)), 0, color, 1)
cv2.circle(mask, (int(cx), int(cy)), 0, (255, 255, 255), -1)
return color
draw_map = {
'HATCH': draw_hatch,
'LINE': draw_line,
'LWPOLYLINE': draw_lwpolyline,
'ARC': draw_arc,
'CIRCLE': draw_circle,
'ELLIPSE': draw_ellipse,
'POINT': draw_point,
}
def paint(in_path, out_path, config):
doc = ezdxf.readfile(in_path)
extmax, extmin = doc.header['$EXTMAX'], doc.header['$EXTMIN']
xmin, ymin = np.floor(extmin[:-1]).astype(int)
xmax, ymax = np.ceil(extmax[:-1]).astype(int)
img = np.ones((ymax + ymin, xmax + xmin, 3), np.uint8) * 255
mask = np.zeros_like(img)
msp = doc.modelspace()
layers = config.get('layers', {})
colors = config.get('colors', {})
# print(doc.layers.entries.keys())
for layer_name, names in layers.items():
color = tuple(colors.get(layer_name, [0, 0, 0]))
for name in names:
if name not in doc.layers:
continue
entities = msp.query('*[layer=="%s"]' % name)
tmp = np.zeros((ymax + ymin, xmax + xmin), np.uint8)
for entity in entities:
if entity.DXFTYPE in draw_map:
draw_map[entity.DXFTYPE](img, entity, color, tmp)
else:
print("%s: %s" % (name, entity.DXFTYPE))
contours, hierarchy = cv2.findContours(tmp, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(mask, contours, -1, color, -1)
res, img_png = cv2.imencode('.png', cv2.flip(img, 0))
res, mask_png = cv2.imencode('.png', cv2.flip(mask, 0))
with open(out_path, 'wb') as f:
f.write(img_png.tobytes())
with open(out_path[:-4] + "_mask.png", 'wb') as f:
f.write(mask_png.tobytes())
| [
"cv2.fillPoly",
"numpy.ceil",
"cv2.drawContours",
"cv2.flip",
"numpy.ones",
"numpy.floor",
"numpy.column_stack",
"cv2.findContours",
"ezdxf.readfile",
"numpy.array",
"numpy.zeros",
"numpy.cos",
"numpy.sin",
"numpy.zeros_like",
"numpy.arange"
]
| [((1455, 1481), 'numpy.arange', 'np.arange', (['s', '(e + d / 2)', 'd'], {}), '(s, e + d / 2, d)\n', (1464, 1481), True, 'import numpy as np\n'), ((3039, 3062), 'ezdxf.readfile', 'ezdxf.readfile', (['in_path'], {}), '(in_path)\n', (3053, 3062), False, 'import ezdxf\n'), ((3306, 3324), 'numpy.zeros_like', 'np.zeros_like', (['img'], {}), '(img)\n', (3319, 3324), True, 'import numpy as np\n'), ((3240, 3288), 'numpy.ones', 'np.ones', (['(ymax + ymin, xmax + xmin, 3)', 'np.uint8'], {}), '((ymax + ymin, xmax + xmin, 3), np.uint8)\n', (3247, 3288), True, 'import numpy as np\n'), ((4220, 4236), 'cv2.flip', 'cv2.flip', (['img', '(0)'], {}), '(img, 0)\n', (4228, 4236), False, 'import cv2\n'), ((4279, 4296), 'cv2.flip', 'cv2.flip', (['mask', '(0)'], {}), '(mask, 0)\n', (4287, 4296), False, 'import cv2\n'), ((317, 352), 'cv2.fillPoly', 'cv2.fillPoly', (['img', '[polygon]', 'color'], {}), '(img, [polygon], color)\n', (329, 352), False, 'import cv2\n'), ((365, 411), 'cv2.fillPoly', 'cv2.fillPoly', (['mask', '[polygon]', '(255, 255, 255)'], {}), '(mask, [polygon], (255, 255, 255))\n', (377, 411), False, 'import cv2\n'), ((438, 483), 'cv2.fillPoly', 'cv2.fillPoly', (['img', '[polygon]', '(255, 255, 255)'], {}), '(img, [polygon], (255, 255, 255))\n', (450, 483), False, 'import cv2\n'), ((867, 899), 'numpy.array', 'np.array', (['entity.lwpoints.values'], {}), '(entity.lwpoints.values)\n', (875, 899), True, 'import numpy as np\n'), ((1014, 1032), 'numpy.array', 'np.array', (['polyline'], {}), '(polyline)\n', (1022, 1032), True, 'import numpy as np\n'), ((1085, 1103), 'numpy.array', 'np.array', (['polyline'], {}), '(polyline)\n', (1093, 1103), True, 'import numpy as np\n'), ((1499, 1513), 'numpy.cos', 'np.cos', (['angles'], {}), '(angles)\n', (1505, 1513), True, 'import numpy as np\n'), ((1531, 1545), 'numpy.sin', 'np.sin', (['angles'], {}), '(angles)\n', (1537, 1545), True, 'import numpy as np\n'), ((1559, 1582), 'numpy.column_stack', 'np.column_stack', (['(x, y)'], {}), '((x, y))\n', (1574, 1582), True, 'import numpy as np\n'), ((3146, 3167), 'numpy.floor', 'np.floor', (['extmin[:-1]'], {}), '(extmin[:-1])\n', (3154, 3167), True, 'import numpy as np\n'), ((3197, 3217), 'numpy.ceil', 'np.ceil', (['extmax[:-1]'], {}), '(extmax[:-1])\n', (3204, 3217), True, 'import numpy as np\n'), ((3736, 3782), 'numpy.zeros', 'np.zeros', (['(ymax + ymin, xmax + xmin)', 'np.uint8'], {}), '((ymax + ymin, xmax + xmin), np.uint8)\n', (3744, 3782), True, 'import numpy as np\n'), ((4053, 4118), 'cv2.findContours', 'cv2.findContours', (['tmp', 'cv2.RETR_EXTERNAL', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(tmp, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n', (4069, 4118), False, 'import cv2\n'), ((4131, 4178), 'cv2.drawContours', 'cv2.drawContours', (['mask', 'contours', '(-1)', 'color', '(-1)'], {}), '(mask, contours, -1, color, -1)\n', (4147, 4178), False, 'import cv2\n'), ((189, 245), 'numpy.array', 'np.array', (['[vertex[:-1] for vertex in poly_path.vertices]'], {}), '([vertex[:-1] for vertex in poly_path.vertices])\n', (197, 245), True, 'import numpy as np\n')] |
# Generated by Django 2.0.5 on 2019-07-26 06:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('TCS', '0041_auto_20190726_0030'),
]
operations = [
migrations.AlterModelOptions(
name='modelo',
options={'default_permissions': [], 'ordering': ['-id'], 'permissions': [('Can_View__Modelo', 'Ve modelos'), ('Can_Create__Modelo', 'Crea modelos'), ('Can_Update__Modelo', 'Modifica modelos'), ('Can_Delete__Modelo', 'Elimina modelos'), ('Can_Change__ModelTCS', 'Modifica modelos de equipo')], 'verbose_name': 'Modelo', 'verbose_name_plural': 'Modelos'},
),
]
| [
"django.db.migrations.AlterModelOptions"
]
| [((223, 645), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""modelo"""', 'options': "{'default_permissions': [], 'ordering': ['-id'], 'permissions': [(\n 'Can_View__Modelo', 'Ve modelos'), ('Can_Create__Modelo',\n 'Crea modelos'), ('Can_Update__Modelo', 'Modifica modelos'), (\n 'Can_Delete__Modelo', 'Elimina modelos'), ('Can_Change__ModelTCS',\n 'Modifica modelos de equipo')], 'verbose_name': 'Modelo',\n 'verbose_name_plural': 'Modelos'}"}), "(name='modelo', options={'default_permissions':\n [], 'ordering': ['-id'], 'permissions': [('Can_View__Modelo',\n 'Ve modelos'), ('Can_Create__Modelo', 'Crea modelos'), (\n 'Can_Update__Modelo', 'Modifica modelos'), ('Can_Delete__Modelo',\n 'Elimina modelos'), ('Can_Change__ModelTCS',\n 'Modifica modelos de equipo')], 'verbose_name': 'Modelo',\n 'verbose_name_plural': 'Modelos'})\n", (251, 645), False, 'from django.db import migrations\n')] |
import torch
from kornia.geometry.linalg import transform_points
from kornia.geometry.transform import remap
from kornia.utils import create_meshgrid
from .distort import distort_points, tilt_projection
# Based on https://github.com/opencv/opencv/blob/master/modules/calib3d/src/undistort.dispatch.cpp#L384
def undistort_points(points: torch.Tensor, K: torch.Tensor, dist: torch.Tensor) -> torch.Tensor:
r"""Compensate for lens distortion a set of 2D image points.
Radial :math:`(k_1, k_2, k_3, k_4, k_4, k_6)`,
tangential :math:`(p_1, p_2)`, thin prism :math:`(s_1, s_2, s_3, s_4)`, and tilt :math:`(\tau_x, \tau_y)`
distortion models are considered in this function.
Args:
points: Input image points with shape :math:`(*, N, 2)`.
K: Intrinsic camera matrix with shape :math:`(*, 3, 3)`.
dist: Distortion coefficients
:math:`(k_1,k_2,p_1,p_2[,k_3[,k_4,k_5,k_6[,s_1,s_2,s_3,s_4[,\tau_x,\tau_y]]]])`. This is
a vector with 4, 5, 8, 12 or 14 elements with shape :math:`(*, n)`.
Returns:
Undistorted 2D points with shape :math:`(*, N, 2)`.
Example:
>>> _ = torch.manual_seed(0)
>>> x = torch.rand(1, 4, 2)
>>> K = torch.eye(3)[None]
>>> dist = torch.rand(1, 4)
>>> undistort_points(x, K, dist)
tensor([[[-0.1513, -0.1165],
[ 0.0711, 0.1100],
[-0.0697, 0.0228],
[-0.1843, -0.1606]]])
"""
if points.dim() < 2 and points.shape[-1] != 2:
raise ValueError(f'points shape is invalid. Got {points.shape}.')
if K.shape[-2:] != (3, 3):
raise ValueError(f'K matrix shape is invalid. Got {K.shape}.')
if dist.shape[-1] not in [4, 5, 8, 12, 14]:
raise ValueError(f"Invalid number of distortion coefficients. Got {dist.shape[-1]}")
# Adding zeros to obtain vector with 14 coeffs.
if dist.shape[-1] < 14:
dist = torch.nn.functional.pad(dist, [0, 14 - dist.shape[-1]])
# Convert 2D points from pixels to normalized camera coordinates
cx: torch.Tensor = K[..., 0:1, 2] # princial point in x (Bx1)
cy: torch.Tensor = K[..., 1:2, 2] # princial point in y (Bx1)
fx: torch.Tensor = K[..., 0:1, 0] # focal in x (Bx1)
fy: torch.Tensor = K[..., 1:2, 1] # focal in y (Bx1)
# This is equivalent to K^-1 [u,v,1]^T
x: torch.Tensor = (points[..., 0] - cx) / fx # (BxN - Bx1)/Bx1 -> BxN
y: torch.Tensor = (points[..., 1] - cy) / fy # (BxN - Bx1)/Bx1 -> BxN
# Compensate for tilt distortion
if torch.any(dist[..., 12] != 0) or torch.any(dist[..., 13] != 0):
inv_tilt = tilt_projection(dist[..., 12], dist[..., 13], True)
# Transposed untilt points (instead of [x,y,1]^T, we obtain [x,y,1])
x, y = transform_points(inv_tilt, torch.stack([x, y], dim=-1)).unbind(-1)
# Iteratively undistort points
x0, y0 = x, y
for _ in range(5):
r2 = x * x + y * y
inv_rad_poly = (1 + dist[..., 5:6] * r2 + dist[..., 6:7] * r2 * r2 + dist[..., 7:8] * r2 ** 3) / (
1 + dist[..., 0:1] * r2 + dist[..., 1:2] * r2 * r2 + dist[..., 4:5] * r2 ** 3
)
deltaX = (
2 * dist[..., 2:3] * x * y
+ dist[..., 3:4] * (r2 + 2 * x * x)
+ dist[..., 8:9] * r2
+ dist[..., 9:10] * r2 * r2
)
deltaY = (
dist[..., 2:3] * (r2 + 2 * y * y)
+ 2 * dist[..., 3:4] * x * y
+ dist[..., 10:11] * r2
+ dist[..., 11:12] * r2 * r2
)
x = (x0 - deltaX) * inv_rad_poly
y = (y0 - deltaY) * inv_rad_poly
# Convert points from normalized camera coordinates to pixel coordinates
x = fx * x + cx
y = fy * y + cy
return torch.stack([x, y], -1)
# Based on https://github.com/opencv/opencv/blob/master/modules/calib3d/src/undistort.dispatch.cpp#L287
def undistort_image(image: torch.Tensor, K: torch.Tensor, dist: torch.Tensor) -> torch.Tensor:
r"""Compensate an image for lens distortion.
Radial :math:`(k_1, k_2, k_3, k_4, k_4, k_6)`,
tangential :math:`(p_1, p_2)`, thin prism :math:`(s_1, s_2, s_3, s_4)`, and tilt :math:`(\tau_x, \tau_y)`
distortion models are considered in this function.
Args:
image: Input image with shape :math:`(*, C, H, W)`.
K: Intrinsic camera matrix with shape :math:`(*, 3, 3)`.
dist: Distortion coefficients
:math:`(k_1,k_2,p_1,p_2[,k_3[,k_4,k_5,k_6[,s_1,s_2,s_3,s_4[,\tau_x,\tau_y]]]])`. This is
a vector with 4, 5, 8, 12 or 14 elements with shape :math:`(*, n)`.
Returns:
Undistorted image with shape :math:`(*, C, H, W)`.
Example:
>>> img = torch.rand(1, 3, 5, 5)
>>> K = torch.eye(3)[None]
>>> dist_coeff = torch.rand(4)
>>> out = undistort_image(img, K, dist_coeff)
>>> out.shape
torch.Size([1, 3, 5, 5])
"""
if len(image.shape) < 2:
raise ValueError(f"Image shape is invalid. Got: {image.shape}.")
if K.shape[-2:] != (3, 3):
raise ValueError(f'K matrix shape is invalid. Got {K.shape}.')
if dist.shape[-1] not in [4, 5, 8, 12, 14]:
raise ValueError(f'Invalid number of distortion coefficients. Got {dist.shape[-1]}.')
if not image.is_floating_point():
raise ValueError(f'Invalid input image data type. Input should be float. Got {image.dtype}.')
B, _, rows, cols = image.shape
# Create point coordinates for each pixel of the image
xy_grid: torch.Tensor = create_meshgrid(rows, cols, False, image.device, image.dtype)
pts = xy_grid.reshape(-1, 2) # (rows*cols)x2 matrix of pixel coordinates
# Distort points and define maps
ptsd: torch.Tensor = distort_points(pts, K, dist) # Bx(rows*cols)x2
mapx: torch.Tensor = ptsd[..., 0].reshape(B, rows, cols) # B x rows x cols, float
mapy: torch.Tensor = ptsd[..., 1].reshape(B, rows, cols) # B x rows x cols, float
# Remap image to undistort
out = remap(image, mapx, mapy, align_corners=True)
return out
| [
"torch.any",
"torch.stack",
"kornia.geometry.transform.remap",
"torch.nn.functional.pad",
"kornia.utils.create_meshgrid"
]
| [((3764, 3787), 'torch.stack', 'torch.stack', (['[x, y]', '(-1)'], {}), '([x, y], -1)\n', (3775, 3787), False, 'import torch\n'), ((5543, 5604), 'kornia.utils.create_meshgrid', 'create_meshgrid', (['rows', 'cols', '(False)', 'image.device', 'image.dtype'], {}), '(rows, cols, False, image.device, image.dtype)\n', (5558, 5604), False, 'from kornia.utils import create_meshgrid\n'), ((6010, 6054), 'kornia.geometry.transform.remap', 'remap', (['image', 'mapx', 'mapy'], {'align_corners': '(True)'}), '(image, mapx, mapy, align_corners=True)\n', (6015, 6054), False, 'from kornia.geometry.transform import remap\n'), ((1947, 2002), 'torch.nn.functional.pad', 'torch.nn.functional.pad', (['dist', '[0, 14 - dist.shape[-1]]'], {}), '(dist, [0, 14 - dist.shape[-1]])\n', (1970, 2002), False, 'import torch\n'), ((2561, 2590), 'torch.any', 'torch.any', (['(dist[..., 12] != 0)'], {}), '(dist[..., 12] != 0)\n', (2570, 2590), False, 'import torch\n'), ((2594, 2623), 'torch.any', 'torch.any', (['(dist[..., 13] != 0)'], {}), '(dist[..., 13] != 0)\n', (2603, 2623), False, 'import torch\n'), ((2816, 2843), 'torch.stack', 'torch.stack', (['[x, y]'], {'dim': '(-1)'}), '([x, y], dim=-1)\n', (2827, 2843), False, 'import torch\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
import numpy as np
from os import path as op
from ..util import load_data_file
# This is the package data dir, not the dir for config, etc.
DATA_DIR = op.join(op.dirname(__file__), '_data')
def load_iris():
"""Load the iris dataset
Returns
-------
iris : NpzFile
data['data'] : a (150, 4) NumPy array with the iris' features
data['group'] : a (150,) NumPy array with the iris' group
"""
return np.load(load_data_file('iris/iris.npz',
force_download='2014-09-04'))
def load_crate():
"""Load an image of a crate
Returns
-------
crate : array
256x256x3 crate image.
"""
return np.load(load_data_file('orig/crate.npz'))['crate']
def pack_unit(value):
"""Packs float values between [0,1] into 4 unsigned int8
Returns
-------
pack: array
packed interpolation kernel
"""
pack = np.zeros(value.shape + (4,), dtype=np.ubyte)
for i in range(4):
value, pack[..., i] = np.modf(value * 256.)
return pack
def pack_ieee(value):
"""Packs float ieee binary representation into 4 unsigned int8
Returns
-------
pack: array
packed interpolation kernel
"""
return np.fromstring(value.tobytes(),
np.ubyte).reshape((value.shape + (4,)))
def load_spatial_filters(packed=True):
"""Load spatial-filters kernel
Parameters
----------
packed : bool
Whether or not the data should be in "packed" representation
for use in GLSL code.
Returns
-------
kernel : array
16x1024x4 (packed float in rgba) or
16x1024 (unpacked float)
16 interpolation kernel with length 1024 each.
names : tuple of strings
Respective interpolation names, plus "Nearest" which does
not require a filter but can still be used
"""
names = ("Bilinear", "Hanning", "Hamming", "Hermite",
"Kaiser", "Quadric", "Bicubic", "CatRom",
"Mitchell", "Spline16", "Spline36", "Gaussian",
"Bessel", "Sinc", "Lanczos", "Blackman", "Nearest")
kernel = np.load(op.join(DATA_DIR, 'spatial-filters.npy'))
if packed:
# convert the kernel to a packed representation
kernel = pack_unit(kernel)
return kernel, names
| [
"os.path.dirname",
"numpy.zeros",
"os.path.join",
"numpy.modf"
]
| [((321, 341), 'os.path.dirname', 'op.dirname', (['__file__'], {}), '(__file__)\n', (331, 341), True, 'from os import path as op\n'), ((1080, 1124), 'numpy.zeros', 'np.zeros', (['(value.shape + (4,))'], {'dtype': 'np.ubyte'}), '(value.shape + (4,), dtype=np.ubyte)\n', (1088, 1124), True, 'import numpy as np\n'), ((1178, 1200), 'numpy.modf', 'np.modf', (['(value * 256.0)'], {}), '(value * 256.0)\n', (1185, 1200), True, 'import numpy as np\n'), ((2315, 2355), 'os.path.join', 'op.join', (['DATA_DIR', '"""spatial-filters.npy"""'], {}), "(DATA_DIR, 'spatial-filters.npy')\n", (2322, 2355), True, 'from os import path as op\n')] |
#! @@Author : <NAME>
#! @@Create : 18 Januari 2019
#! @@Modify : 19 Januari 2019
#! Gambar dari reddit.
#! Gunakan VPN karena DNS situs reddit sudah di blokir dari negara Indonesia.
import os
import json
import requests
import progressbar
from PIL import Image
from lxml import html
from time import sleep
from ImageDeleter import delete_png
from InstagramAPI import InstagramAPI
InstagramAPI = InstagramAPI(input("Username: "), input("Password: "))
while True:
if (InstagramAPI.login()):
break
else:
for x in range(300):
os.system('cls')
print(300-x)
sleep(1)
global useable
useable = []
os.system('pause')
def get_image():
print("Memulai mendapatkan gambar ..")
json_raw = requests.get('https://www.reddit.com/r/me_irl/new/.json', headers = {'User-agent': 'Image_Testing_V3'}).json()
json_data = json_raw['data']
json_children = json_data['children']
for x in range(len(json_children)):
json_current = json_children[x]
json_current_data = json_current['data']
json_current_url = json_current_data['url']
if "https://i.redd.it/" not in json_current_url:
pass
else:
if json_current_url not in useable:
useable.append(json_current_url)
download()
else:
pass
def download():
print("Memulai download ..")
global filename
new_filename = ""
filename = useable[-1]
filename = filename.replace("https://i.redd.it/", "")
print(filename)
f = open(filename, 'wb')
f.write(requests.get(useable[-1]).content)
f.close()
if (filename[-3] + filename[-2] + filename[-1]) != 'jpg':
im = Image.open(filename)
for x in range(len(filename)-3):
new_filename = new_filename + filename[x]
im = im.convert("RGB")
im.save("edit" + new_filename + 'jpg')
new_filename = "edit" + new_filename + "jpg"
print(new_filename)
else:
new_filename = filename
upload(new_filename)
def delete_image(bad_file):
print("Memulai menghapus gambar ..")
if (bad_file[0] + bad_file[1] + bad_file[2] + bad_file[3]) == "edit":
png_bad_file = ''
for x in range(len(bad_file)-3):
png_bad_file = png_bad_file + bad_file[x]
png_bad_file = png_bad_file + "png"
try:
os.remove(png_bad_file)
except Exception as e:
pass
os.remove(bad_file)
delete_png()
print("Selesai.")
wait()
def upload(file):
print("Memulai upload ..")
caption = ""
InstagramAPI.uploadPhoto(file, caption=caption)
delete_image(file)
def wait():
for i in progressbar.progressbar(range(1800)):
sleep(1)
while True:
get_image()
print("Gambar sukses di upload.")
sleep(5)
os.system('pause')
| [
"InstagramAPI.InstagramAPI.uploadPhoto",
"PIL.Image.open",
"time.sleep",
"ImageDeleter.delete_png",
"requests.get",
"os.system",
"InstagramAPI.InstagramAPI.login",
"os.remove"
]
| [((651, 669), 'os.system', 'os.system', (['"""pause"""'], {}), "('pause')\n", (660, 669), False, 'import os\n'), ((472, 492), 'InstagramAPI.InstagramAPI.login', 'InstagramAPI.login', ([], {}), '()\n', (490, 492), False, 'from InstagramAPI import InstagramAPI\n'), ((2478, 2497), 'os.remove', 'os.remove', (['bad_file'], {}), '(bad_file)\n', (2487, 2497), False, 'import os\n'), ((2502, 2514), 'ImageDeleter.delete_png', 'delete_png', ([], {}), '()\n', (2512, 2514), False, 'from ImageDeleter import delete_png\n'), ((2619, 2666), 'InstagramAPI.InstagramAPI.uploadPhoto', 'InstagramAPI.uploadPhoto', (['file'], {'caption': 'caption'}), '(file, caption=caption)\n', (2643, 2666), False, 'from InstagramAPI import InstagramAPI\n'), ((2842, 2850), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (2847, 2850), False, 'from time import sleep\n'), ((2855, 2873), 'os.system', 'os.system', (['"""pause"""'], {}), "('pause')\n", (2864, 2873), False, 'import os\n'), ((1726, 1746), 'PIL.Image.open', 'Image.open', (['filename'], {}), '(filename)\n', (1736, 1746), False, 'from PIL import Image\n'), ((2762, 2770), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (2767, 2770), False, 'from time import sleep\n'), ((560, 576), 'os.system', 'os.system', (['"""cls"""'], {}), "('cls')\n", (569, 576), False, 'import os\n'), ((614, 622), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (619, 622), False, 'from time import sleep\n'), ((746, 852), 'requests.get', 'requests.get', (['"""https://www.reddit.com/r/me_irl/new/.json"""'], {'headers': "{'User-agent': 'Image_Testing_V3'}"}), "('https://www.reddit.com/r/me_irl/new/.json', headers={\n 'User-agent': 'Image_Testing_V3'})\n", (758, 852), False, 'import requests\n'), ((1602, 1627), 'requests.get', 'requests.get', (['useable[-1]'], {}), '(useable[-1])\n', (1614, 1627), False, 'import requests\n'), ((2402, 2425), 'os.remove', 'os.remove', (['png_bad_file'], {}), '(png_bad_file)\n', (2411, 2425), False, 'import os\n')] |
from collections import MutableMapping, Container
from datetime import datetime, timedelta
from pyvalid import accepts
class LimitedTimeTable(MutableMapping, Container):
def __init__(self, time_span):
self.__storage = dict()
self.__time_span = None
self.time_span = time_span
@property
def time_span(self):
return self.__time_span
@time_span.setter
@accepts(object, timedelta)
def time_span(self, value):
self.__time_span = value
@property
def oldest(self):
value = None
if self.__len__() > 0:
value = min(self.__storage.keys())
return value
@property
def newest(self):
value = None
if self.__len__() > 0:
value = max(self.__storage.keys())
return value
def oldest_keys(self, size):
for key in self.__get_slice(0, size):
yield key
def oldest_values(self, size):
for key in self.oldest_keys(size):
yield self.__storage.get(key)
def oldest_items(self, size):
for key in self.oldest_keys(size):
yield (key, self.__storage.get(key))
def newest_keys(self, size):
for key in self.__get_slice(-size, None):
yield key
def newest_values(self, size):
for key in self.newest_keys(size):
yield self.__storage.get(key)
def newest_items(self, size):
for key in self.newest_keys(size):
yield (key, self.__storage.get(key))
def __get_slice(self, start, end):
keys = sorted(self.keys())
return keys[start:end]
def __getitem__(self, item):
return self.__storage.__getitem__(item)
@accepts(object, datetime, object)
def __setitem__(self, key, value):
now = datetime.now()
if key > now:
raise ValueError('Can\'t set item from future!')
oldest = self.oldest
if (oldest is not None) and (oldest != key):
longest_time_span = now - oldest
# Item is too old for current timetable
if longest_time_span >= self.time_span:
self.__delitem__(oldest)
return self.__storage.__setitem__(key, value)
def __delitem__(self, key):
return self.__storage.__delitem__(key)
def __len__(self):
return self.__storage.__len__()
def __iter__(self):
return self.__storage.__iter__()
def __contains__(self, item):
return self.__storage.__contains__(item)
__all__ = ['LimitedTimeTable']
| [
"datetime.datetime.now",
"pyvalid.accepts"
]
| [((407, 433), 'pyvalid.accepts', 'accepts', (['object', 'timedelta'], {}), '(object, timedelta)\n', (414, 433), False, 'from pyvalid import accepts\n'), ((1711, 1744), 'pyvalid.accepts', 'accepts', (['object', 'datetime', 'object'], {}), '(object, datetime, object)\n', (1718, 1744), False, 'from pyvalid import accepts\n'), ((1798, 1812), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1810, 1812), False, 'from datetime import datetime, timedelta\n')] |
#!/usr/bin/python
# Copyright (C) 2015 Ion Torrent Systems, Inc. All Rights Reserved
import subprocess
import re
pluginName = 'DataExport'
pluginDir = ""
networkFS = ["nfs", "cifs"]
localFS = ["ext4", "ext3", "xfs", "ntfs", "exfat", "vboxsf"]
supportedFS = ",".join(localFS + networkFS)
def test(bucket):
return bucket
def runProcess(exe):
p = subprocess.Popen(exe, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
return iter(p.stdout.readline, b'')
def runProcessAndReturnLastLine(exe):
p = subprocess.Popen(exe, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
return p.stdout.readlines()[-1]
def backupDevices(bucket):
devices = ""
cmd = "mount -l -t " + supportedFS
for line in runProcess(cmd.split()):
line_arr = line.split()
folder = line_arr[2]
fstype = line_arr[4]
perms = line_arr[5]
if perms.find('w') != -1:
use = True
if fstype in localFS:
m = re.match('^(/media|/mnt)', folder)
if not m:
use = False
if use:
cmd2 = "df -h %s " % folder
df = runProcessAndReturnLastLine(cmd2.split())
avail = df.split()[2]
devices = devices + "<OPTION VALUE=\"" + folder + "\">" + folder + " (" + avail + " free, " + fstype + ")</option>"
return devices
| [
"subprocess.Popen",
"re.match"
]
| [((359, 430), 'subprocess.Popen', 'subprocess.Popen', (['exe'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT'}), '(exe, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n', (375, 430), False, 'import subprocess\n'), ((519, 590), 'subprocess.Popen', 'subprocess.Popen', (['exe'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT'}), '(exe, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)\n', (535, 590), False, 'import subprocess\n'), ((982, 1016), 're.match', 're.match', (['"""^(/media|/mnt)"""', 'folder'], {}), "('^(/media|/mnt)', folder)\n", (990, 1016), False, 'import re\n')] |
# Ported from JavaSript version to Python and Pygame Zero
# Designed to work well with mu-editor environment.
#
# The original Javascript version wasdonw by <NAME>
# at https://github.com/beneater/boids (MIT License)
# No endorsement implied.
#
# Complex numbers are are used as vectors to integrate x and y positions and velocities
# MIT licesense (details in parent directory)
import random
import time
HEIGHT = 500 # window height
WIDTH = 900 # window width
MARGIN = 150 # disstance to start avoid edge
NUM_BOIDS = 75
VISUAL_RANGE = 70 # radius of influence for most algoriths
SPEED_LIMIT_UPPER = 13 # boids canonly fly so fast.
SPEED_LIMIT_LOWER = 3 # boid will fall if flying too slow
SPEED_INIT = 20 # range for random velocity
MIN_DISTANCE = 10 # the distance to stay away from other boids
AVOID_FACTOR = 0.05 # % location change if too close
CENTERING_FACTOR = 0.050 # % location change to pull to center
MATCHING_FACTOR = 0.015 # % velocity change if close
MARGIN_FACTOR = 0.25+0.0j # rate of turning away from edge
HISTORY_LENGTH = 30
BACK_COLOR = (0, 0, 90)
BOID_COLOR = (255, 128, 128)
BOID_SIZE = 8
TRAIL_COLOR = (255, 255, 64)
g_boids = []
class Boid:
def __init__(boid) :
boid.loc = complex(
(random.randint(0, WIDTH)),
(random.randint(0, HEIGHT)))
boid.vel = complex(
(random.randint(-SPEED_INIT, SPEED_INIT)),
(random.randint(-SPEED_INIT, SPEED_INIT)))
boid.history = []
def keep_within_bounds(boid) :
# Constrain a boid to within the window. If it gets too close to an edge,
# nudge it back in and reverse its direction.
if (boid.loc.real < MARGIN):
boid.vel += MARGIN_FACTOR * 1.0
if (boid.loc.real > WIDTH - MARGIN) :
boid.vel += MARGIN_FACTOR * -1.0
if (boid.loc.imag < MARGIN) :
boid.vel += MARGIN_FACTOR * 1.0j
if (boid.loc.imag > HEIGHT - MARGIN) :
boid.vel += MARGIN_FACTOR * -1.0j
def fly_towards_center(boid):
# Find the center of mass of the other boids and
# adjust velocity slightly to point towards the
# center of mass.
center = 0+0j
num_neighbors = 0
for other_boid in g_boids :
if abs(boid.loc - other_boid.loc) < VISUAL_RANGE :
center += other_boid.loc
num_neighbors += 1
if num_neighbors > 0 :
center = center / num_neighbors
boid.loc += (center - boid.loc) * CENTERING_FACTOR
def avoid_others(boid):
# Move away from other boids that are too close to avoid colliding
move = 0+0j
for other_boid in g_boids :
if not (other_boid is boid) :
if abs(boid.loc - other_boid.loc) < MIN_DISTANCE :
move += boid.loc - other_boid.loc
boid.vel += move * AVOID_FACTOR
def match_velocity(boid):
# Find the average velocity (speed and direction)
# of the other boids and adjust velocity slightly to match.
avg_vel = 0+0j
num_neighbors = 0
for otherBoid in g_boids:
if abs(boid.loc - otherBoid.loc) < VISUAL_RANGE :
avg_vel += otherBoid.vel
num_neighbors += 1
if num_neighbors > 0:
avg_vel /= num_neighbors
boid.vel += (avg_vel - boid.vel) * MATCHING_FACTOR
def limit_speed(boid):
# Speed will naturally vary in flocking behavior,
# but real animals can't go arbitrarily fast (or slow)
speed = abs(boid.vel)
if (speed > SPEED_LIMIT_UPPER) :
boid.vel = boid.vel / speed * SPEED_LIMIT_UPPER
if (speed < SPEED_LIMIT_LOWER) :
boid.vel = boid.vel / speed * SPEED_LIMIT_LOWER
return
def draw(boid):
screen.draw.filled_circle((boid.loc.real, boid.loc.imag), BOID_SIZE, BOID_COLOR)
tail = boid.loc + boid.vel * -1.8
screen.draw.line(
(boid.loc.real, boid.loc.imag),
(tail.real, tail.imag),
BOID_COLOR)
def draw_trail(boid):
pt_from = (boid.loc.real, boid.loc.imag)
for p in boid.history:
pt_to = (p.real, p.imag)
screen.draw.line(pt_from, pt_to, TRAIL_COLOR)
pt_from = pt_to
def draw():
screen.fill(BACK_COLOR)
if keyboard.space:
for boid in g_boids:
boid.draw_trail()
for boid in g_boids:
boid.draw()
screen.draw.text("space:tails r:restart", (20, 20))
def update():
for boid in g_boids:
# Apply rules
boid.fly_towards_center()
boid.avoid_others()
boid.match_velocity()
boid.limit_speed()
boid.keep_within_bounds()
# Update the position based on the current velocity
boid.loc += boid.vel
boid.history.insert(0, boid.loc)
boid.history = boid.history[:HISTORY_LENGTH]
def init():
global g_boids
g_boids = [Boid() for _ in range(NUM_BOIDS)]
def on_key_down(key, mod, unicode):
if (key == keys.R):
init()
init()
| [
"random.randint"
]
| [((1346, 1370), 'random.randint', 'random.randint', (['(0)', 'WIDTH'], {}), '(0, WIDTH)\n', (1360, 1370), False, 'import random\n'), ((1386, 1411), 'random.randint', 'random.randint', (['(0)', 'HEIGHT'], {}), '(0, HEIGHT)\n', (1400, 1411), False, 'import random\n'), ((1455, 1494), 'random.randint', 'random.randint', (['(-SPEED_INIT)', 'SPEED_INIT'], {}), '(-SPEED_INIT, SPEED_INIT)\n', (1469, 1494), False, 'import random\n'), ((1510, 1549), 'random.randint', 'random.randint', (['(-SPEED_INIT)', 'SPEED_INIT'], {}), '(-SPEED_INIT, SPEED_INIT)\n', (1524, 1549), False, 'import random\n')] |
import warnings
import numpy as np
import torch
import torch.nn.functional as F
from sklearn import metrics
from torch.utils.data import DataLoader, SequentialSampler, TensorDataset
from tqdm import tqdm
from datasets.bert_processors.abstract_processor import convert_examples_to_features_with_emotion, \
convert_examples_to_hierarchical_features
from utils.preprocessing import pad_input_matrix
from utils.tokenization import BertTokenizer
from utils.emotion import Emotion
# Suppress warnings from sklearn.metrics
warnings.filterwarnings('ignore')
class BertEvaluator(object):
def __init__(self, model, processor, args, split='dev'):
self.args = args
self.model = model
self.processor = processor
self.tokenizer = BertTokenizer.from_pretrained(args.model, is_lowercase=args.is_lowercase)
self.emotioner = Emotion(args.nrc_path, args.max_em_len, args.emotion_filters)
if split == 'test':
self.eval_examples = self.processor.get_test_examples(args.data_dir, args.test_name)
elif split == 'dev':
self.eval_examples = self.processor.get_dev_examples(args.data_dir, args.dev_name)
else:
self.eval_examples = self.processor.get_any_examples(args.data_dir, split)
def get_scores(self, silent=False, return_indices=False):
all_indices = []
if self.args.is_hierarchical:
eval_features = convert_examples_to_hierarchical_features(
self.eval_examples, self.args.max_seq_length, self.tokenizer)
else:
eval_features = convert_examples_to_features_with_emotion(
self.eval_examples, self.args.max_seq_length, self.tokenizer, self.emotioner)
unpadded_input_ids = [f.input_ids for f in eval_features]
unpadded_input_mask = [f.input_mask for f in eval_features]
unpadded_segment_ids = [f.segment_ids for f in eval_features]
unpadded_emotion_scores = [f.sentiment_scores for f in eval_features]
if self.args.is_hierarchical:
pad_input_matrix(unpadded_input_ids, self.args.max_doc_length)
pad_input_matrix(unpadded_input_mask, self.args.max_doc_length)
pad_input_matrix(unpadded_segment_ids, self.args.max_doc_length)
padded_input_ids = torch.tensor(unpadded_input_ids, dtype=torch.long)
padded_input_mask = torch.tensor(unpadded_input_mask, dtype=torch.long)
padded_segment_ids = torch.tensor(unpadded_segment_ids, dtype=torch.long)
padded_emotion_ids = torch.tensor(unpadded_emotion_scores, dtype=torch.long)
label_ids = torch.tensor([f.label_id for f in eval_features], dtype=torch.long)
eval_data = TensorDataset(padded_input_ids, padded_input_mask, padded_segment_ids, padded_emotion_ids, label_ids)
eval_sampler = SequentialSampler(eval_data)
eval_dataloader = DataLoader(eval_data, sampler=eval_sampler, batch_size=self.args.batch_size)
self.model.eval()
total_loss = 0
nb_eval_steps, nb_eval_examples = 0, 0
predicted_labels, target_labels = list(), list()
for input_ids, input_mask, segment_ids, emotion_ids, label_ids in tqdm(eval_dataloader, desc="Evaluating", disable=silent):
input_ids = input_ids.to(self.args.device)
input_mask = input_mask.to(self.args.device)
segment_ids = segment_ids.to(self.args.device)
emotion_ids = emotion_ids.to(self.args.device)
label_ids = label_ids.to(self.args.device)
with torch.no_grad():
if return_indices:
outs = self.model(input_ids, segment_ids, input_mask, emotion_ids=emotion_ids, return_indices=return_indices)
else:
outs = self.model(input_ids, segment_ids, input_mask, emotion_ids=emotion_ids)
if isinstance(outs, tuple):
outs, _ = outs
if return_indices:
logits, indices = outs
all_indices.extend(indices.cpu().detach().numpy())
else:
logits = outs
if self.args.is_multilabel:
predicted_labels.extend(F.sigmoid(logits).round().long().cpu().detach().numpy())
target_labels.extend(label_ids.cpu().detach().numpy())
loss = F.binary_cross_entropy_with_logits(logits, label_ids.float(), size_average=False)
average, average_mac = 'micro', 'macro'
else:
predicted_labels.extend(torch.argmax(logits, dim=1).cpu().detach().numpy())
target_labels.extend(torch.argmax(label_ids, dim=1).cpu().detach().numpy())
loss = F.cross_entropy(logits, torch.argmax(label_ids, dim=1))
average, average_mac = 'binary', 'binary'
if self.args.n_gpu > 1:
loss = loss.mean()
if self.args.gradient_accumulation_steps > 1:
loss = loss / self.args.gradient_accumulation_steps
total_loss += loss.item()
nb_eval_examples += input_ids.size(0)
nb_eval_steps += 1
predicted_labels, target_labels = np.array(predicted_labels), np.array(target_labels)
accuracy = metrics.accuracy_score(target_labels, predicted_labels)
precision = metrics.precision_score(target_labels, predicted_labels, average=average)
recall = metrics.recall_score(target_labels, predicted_labels, average=average)
avg_loss = total_loss / nb_eval_steps
hamming_loss = metrics.hamming_loss(target_labels, predicted_labels)
jaccard_score = metrics.jaccard_score(target_labels, predicted_labels, average=average)
f1_micro = metrics.f1_score(target_labels, predicted_labels, average=average)
f1_macro = metrics.f1_score(target_labels, predicted_labels, average=average_mac)
if return_indices:
return [accuracy, precision, recall, f1_micro, avg_loss, f1_macro, hamming_loss, jaccard_score, predicted_labels, target_labels, all_indices],\
['accuracy', 'precision', 'recall', 'f1_micro', 'avg_loss', 'f1_macro', 'hamming_loss', 'jaccard', 'predicted_labels', 'target_labels', 'all_indices']
else:
return [accuracy, precision, recall, f1_micro, avg_loss, f1_macro, hamming_loss, jaccard_score, predicted_labels, target_labels],\
['accuracy', 'precision', 'recall', 'f1_micro', 'avg_loss', 'f1_macro', 'hamming_loss', 'jaccard', 'predicted_labels', 'target_labels']
def get_bert_layers(self, silent=False, last_bert_layers=-1):
if self.args.is_hierarchical:
eval_features = convert_examples_to_hierarchical_features(
self.eval_examples, self.args.max_seq_length, self.tokenizer)
else:
eval_features = convert_examples_to_features_with_emotion(
self.eval_examples, self.args.max_seq_length, self.tokenizer, self.emotioner)
unpadded_input_ids = [f.input_ids for f in eval_features]
unpadded_input_mask = [f.input_mask for f in eval_features]
unpadded_segment_ids = [f.segment_ids for f in eval_features]
unpadded_emotion_ids = [f.emotioniment_scores for f in eval_features]
if self.args.is_hierarchical:
pad_input_matrix(unpadded_input_ids, self.args.max_doc_length)
pad_input_matrix(unpadded_input_mask, self.args.max_doc_length)
pad_input_matrix(unpadded_segment_ids, self.args.max_doc_length)
padded_input_ids = torch.tensor(unpadded_input_ids, dtype=torch.long)
padded_input_mask = torch.tensor(unpadded_input_mask, dtype=torch.long)
padded_segment_ids = torch.tensor(unpadded_segment_ids, dtype=torch.long)
padded_emotion_ids = torch.tensor(unpadded_emotion_ids, dtype=torch.long)
label_ids = torch.tensor([f.label_id for f in eval_features], dtype=torch.long)
eval_data = TensorDataset(padded_input_ids, padded_input_mask, padded_segment_ids, padded_emotion_ids, label_ids)
eval_sampler = SequentialSampler(eval_data)
eval_dataloader = DataLoader(eval_data, sampler=eval_sampler, batch_size=self.args.batch_size)
self.model.eval()
bert_layers_l, label_ids_l = [], []
for input_ids, input_mask, segment_ids, emotion_ids, label_ids in tqdm(eval_dataloader, desc="Evaluating", disable=silent):
input_ids = input_ids.to(self.args.device)
input_mask = input_mask.to(self.args.device)
segment_ids = segment_ids.to(self.args.device)
emotion_ids = emotion_ids.to(self.args.device)
label_ids = label_ids.to(self.args.device)
with torch.no_grad():
bert_layers = self.model.get_bert_embedding(input_ids, segment_ids, input_mask, emotion_ids=emotion_ids, last_bert_layers=last_bert_layers)
label_ids = torch.argmax(label_ids, dim=1).cpu().detach().numpy()
bert_layers_l.extend(bert_layers)
label_ids_l.extend(label_ids)
bert_layers_l = torch.stack(bert_layers_l, dim=0)
return bert_layers_l, label_ids_l
| [
"datasets.bert_processors.abstract_processor.convert_examples_to_hierarchical_features",
"torch.nn.functional.sigmoid",
"sklearn.metrics.precision_score",
"sklearn.metrics.recall_score",
"numpy.array",
"sklearn.metrics.jaccard_score",
"utils.emotion.Emotion",
"sklearn.metrics.hamming_loss",
"datasets.bert_processors.abstract_processor.convert_examples_to_features_with_emotion",
"torch.argmax",
"torch.utils.data.SequentialSampler",
"torch.utils.data.TensorDataset",
"sklearn.metrics.accuracy_score",
"warnings.filterwarnings",
"utils.preprocessing.pad_input_matrix",
"sklearn.metrics.f1_score",
"tqdm.tqdm",
"torch.stack",
"torch.tensor",
"torch.utils.data.DataLoader",
"torch.no_grad",
"utils.tokenization.BertTokenizer.from_pretrained"
]
| [((539, 572), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (562, 572), False, 'import warnings\n'), ((785, 858), 'utils.tokenization.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['args.model'], {'is_lowercase': 'args.is_lowercase'}), '(args.model, is_lowercase=args.is_lowercase)\n', (814, 858), False, 'from utils.tokenization import BertTokenizer\n'), ((885, 946), 'utils.emotion.Emotion', 'Emotion', (['args.nrc_path', 'args.max_em_len', 'args.emotion_filters'], {}), '(args.nrc_path, args.max_em_len, args.emotion_filters)\n', (892, 946), False, 'from utils.emotion import Emotion\n'), ((2358, 2408), 'torch.tensor', 'torch.tensor', (['unpadded_input_ids'], {'dtype': 'torch.long'}), '(unpadded_input_ids, dtype=torch.long)\n', (2370, 2408), False, 'import torch\n'), ((2438, 2489), 'torch.tensor', 'torch.tensor', (['unpadded_input_mask'], {'dtype': 'torch.long'}), '(unpadded_input_mask, dtype=torch.long)\n', (2450, 2489), False, 'import torch\n'), ((2520, 2572), 'torch.tensor', 'torch.tensor', (['unpadded_segment_ids'], {'dtype': 'torch.long'}), '(unpadded_segment_ids, dtype=torch.long)\n', (2532, 2572), False, 'import torch\n'), ((2603, 2658), 'torch.tensor', 'torch.tensor', (['unpadded_emotion_scores'], {'dtype': 'torch.long'}), '(unpadded_emotion_scores, dtype=torch.long)\n', (2615, 2658), False, 'import torch\n'), ((2680, 2747), 'torch.tensor', 'torch.tensor', (['[f.label_id for f in eval_features]'], {'dtype': 'torch.long'}), '([f.label_id for f in eval_features], dtype=torch.long)\n', (2692, 2747), False, 'import torch\n'), ((2771, 2876), 'torch.utils.data.TensorDataset', 'TensorDataset', (['padded_input_ids', 'padded_input_mask', 'padded_segment_ids', 'padded_emotion_ids', 'label_ids'], {}), '(padded_input_ids, padded_input_mask, padded_segment_ids,\n padded_emotion_ids, label_ids)\n', (2784, 2876), False, 'from torch.utils.data import DataLoader, SequentialSampler, TensorDataset\n'), ((2897, 2925), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['eval_data'], {}), '(eval_data)\n', (2914, 2925), False, 'from torch.utils.data import DataLoader, SequentialSampler, TensorDataset\n'), ((2953, 3029), 'torch.utils.data.DataLoader', 'DataLoader', (['eval_data'], {'sampler': 'eval_sampler', 'batch_size': 'self.args.batch_size'}), '(eval_data, sampler=eval_sampler, batch_size=self.args.batch_size)\n', (2963, 3029), False, 'from torch.utils.data import DataLoader, SequentialSampler, TensorDataset\n'), ((3268, 3324), 'tqdm.tqdm', 'tqdm', (['eval_dataloader'], {'desc': '"""Evaluating"""', 'disable': 'silent'}), "(eval_dataloader, desc='Evaluating', disable=silent)\n", (3272, 3324), False, 'from tqdm import tqdm\n'), ((5390, 5445), 'sklearn.metrics.accuracy_score', 'metrics.accuracy_score', (['target_labels', 'predicted_labels'], {}), '(target_labels, predicted_labels)\n', (5412, 5445), False, 'from sklearn import metrics\n'), ((5467, 5540), 'sklearn.metrics.precision_score', 'metrics.precision_score', (['target_labels', 'predicted_labels'], {'average': 'average'}), '(target_labels, predicted_labels, average=average)\n', (5490, 5540), False, 'from sklearn import metrics\n'), ((5559, 5629), 'sklearn.metrics.recall_score', 'metrics.recall_score', (['target_labels', 'predicted_labels'], {'average': 'average'}), '(target_labels, predicted_labels, average=average)\n', (5579, 5629), False, 'from sklearn import metrics\n'), ((5703, 5756), 'sklearn.metrics.hamming_loss', 'metrics.hamming_loss', (['target_labels', 'predicted_labels'], {}), '(target_labels, predicted_labels)\n', (5723, 5756), False, 'from sklearn import metrics\n'), ((5782, 5853), 'sklearn.metrics.jaccard_score', 'metrics.jaccard_score', (['target_labels', 'predicted_labels'], {'average': 'average'}), '(target_labels, predicted_labels, average=average)\n', (5803, 5853), False, 'from sklearn import metrics\n'), ((5874, 5940), 'sklearn.metrics.f1_score', 'metrics.f1_score', (['target_labels', 'predicted_labels'], {'average': 'average'}), '(target_labels, predicted_labels, average=average)\n', (5890, 5940), False, 'from sklearn import metrics\n'), ((5961, 6031), 'sklearn.metrics.f1_score', 'metrics.f1_score', (['target_labels', 'predicted_labels'], {'average': 'average_mac'}), '(target_labels, predicted_labels, average=average_mac)\n', (5977, 6031), False, 'from sklearn import metrics\n'), ((7734, 7784), 'torch.tensor', 'torch.tensor', (['unpadded_input_ids'], {'dtype': 'torch.long'}), '(unpadded_input_ids, dtype=torch.long)\n', (7746, 7784), False, 'import torch\n'), ((7814, 7865), 'torch.tensor', 'torch.tensor', (['unpadded_input_mask'], {'dtype': 'torch.long'}), '(unpadded_input_mask, dtype=torch.long)\n', (7826, 7865), False, 'import torch\n'), ((7896, 7948), 'torch.tensor', 'torch.tensor', (['unpadded_segment_ids'], {'dtype': 'torch.long'}), '(unpadded_segment_ids, dtype=torch.long)\n', (7908, 7948), False, 'import torch\n'), ((7979, 8031), 'torch.tensor', 'torch.tensor', (['unpadded_emotion_ids'], {'dtype': 'torch.long'}), '(unpadded_emotion_ids, dtype=torch.long)\n', (7991, 8031), False, 'import torch\n'), ((8053, 8120), 'torch.tensor', 'torch.tensor', (['[f.label_id for f in eval_features]'], {'dtype': 'torch.long'}), '([f.label_id for f in eval_features], dtype=torch.long)\n', (8065, 8120), False, 'import torch\n'), ((8144, 8249), 'torch.utils.data.TensorDataset', 'TensorDataset', (['padded_input_ids', 'padded_input_mask', 'padded_segment_ids', 'padded_emotion_ids', 'label_ids'], {}), '(padded_input_ids, padded_input_mask, padded_segment_ids,\n padded_emotion_ids, label_ids)\n', (8157, 8249), False, 'from torch.utils.data import DataLoader, SequentialSampler, TensorDataset\n'), ((8270, 8298), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['eval_data'], {}), '(eval_data)\n', (8287, 8298), False, 'from torch.utils.data import DataLoader, SequentialSampler, TensorDataset\n'), ((8326, 8402), 'torch.utils.data.DataLoader', 'DataLoader', (['eval_data'], {'sampler': 'eval_sampler', 'batch_size': 'self.args.batch_size'}), '(eval_data, sampler=eval_sampler, batch_size=self.args.batch_size)\n', (8336, 8402), False, 'from torch.utils.data import DataLoader, SequentialSampler, TensorDataset\n'), ((8556, 8612), 'tqdm.tqdm', 'tqdm', (['eval_dataloader'], {'desc': '"""Evaluating"""', 'disable': 'silent'}), "(eval_dataloader, desc='Evaluating', disable=silent)\n", (8560, 8612), False, 'from tqdm import tqdm\n'), ((9304, 9337), 'torch.stack', 'torch.stack', (['bert_layers_l'], {'dim': '(0)'}), '(bert_layers_l, dim=0)\n', (9315, 9337), False, 'import torch\n'), ((1462, 1570), 'datasets.bert_processors.abstract_processor.convert_examples_to_hierarchical_features', 'convert_examples_to_hierarchical_features', (['self.eval_examples', 'self.args.max_seq_length', 'self.tokenizer'], {}), '(self.eval_examples, self.args.\n max_seq_length, self.tokenizer)\n', (1503, 1570), False, 'from datasets.bert_processors.abstract_processor import convert_examples_to_features_with_emotion, convert_examples_to_hierarchical_features\n'), ((1628, 1752), 'datasets.bert_processors.abstract_processor.convert_examples_to_features_with_emotion', 'convert_examples_to_features_with_emotion', (['self.eval_examples', 'self.args.max_seq_length', 'self.tokenizer', 'self.emotioner'], {}), '(self.eval_examples, self.args.\n max_seq_length, self.tokenizer, self.emotioner)\n', (1669, 1752), False, 'from datasets.bert_processors.abstract_processor import convert_examples_to_features_with_emotion, convert_examples_to_hierarchical_features\n'), ((2110, 2172), 'utils.preprocessing.pad_input_matrix', 'pad_input_matrix', (['unpadded_input_ids', 'self.args.max_doc_length'], {}), '(unpadded_input_ids, self.args.max_doc_length)\n', (2126, 2172), False, 'from utils.preprocessing import pad_input_matrix\n'), ((2186, 2249), 'utils.preprocessing.pad_input_matrix', 'pad_input_matrix', (['unpadded_input_mask', 'self.args.max_doc_length'], {}), '(unpadded_input_mask, self.args.max_doc_length)\n', (2202, 2249), False, 'from utils.preprocessing import pad_input_matrix\n'), ((2263, 2327), 'utils.preprocessing.pad_input_matrix', 'pad_input_matrix', (['unpadded_segment_ids', 'self.args.max_doc_length'], {}), '(unpadded_segment_ids, self.args.max_doc_length)\n', (2279, 2327), False, 'from utils.preprocessing import pad_input_matrix\n'), ((5318, 5344), 'numpy.array', 'np.array', (['predicted_labels'], {}), '(predicted_labels)\n', (5326, 5344), True, 'import numpy as np\n'), ((5346, 5369), 'numpy.array', 'np.array', (['target_labels'], {}), '(target_labels)\n', (5354, 5369), True, 'import numpy as np\n'), ((6838, 6946), 'datasets.bert_processors.abstract_processor.convert_examples_to_hierarchical_features', 'convert_examples_to_hierarchical_features', (['self.eval_examples', 'self.args.max_seq_length', 'self.tokenizer'], {}), '(self.eval_examples, self.args.\n max_seq_length, self.tokenizer)\n', (6879, 6946), False, 'from datasets.bert_processors.abstract_processor import convert_examples_to_features_with_emotion, convert_examples_to_hierarchical_features\n'), ((7004, 7128), 'datasets.bert_processors.abstract_processor.convert_examples_to_features_with_emotion', 'convert_examples_to_features_with_emotion', (['self.eval_examples', 'self.args.max_seq_length', 'self.tokenizer', 'self.emotioner'], {}), '(self.eval_examples, self.args.\n max_seq_length, self.tokenizer, self.emotioner)\n', (7045, 7128), False, 'from datasets.bert_processors.abstract_processor import convert_examples_to_features_with_emotion, convert_examples_to_hierarchical_features\n'), ((7486, 7548), 'utils.preprocessing.pad_input_matrix', 'pad_input_matrix', (['unpadded_input_ids', 'self.args.max_doc_length'], {}), '(unpadded_input_ids, self.args.max_doc_length)\n', (7502, 7548), False, 'from utils.preprocessing import pad_input_matrix\n'), ((7562, 7625), 'utils.preprocessing.pad_input_matrix', 'pad_input_matrix', (['unpadded_input_mask', 'self.args.max_doc_length'], {}), '(unpadded_input_mask, self.args.max_doc_length)\n', (7578, 7625), False, 'from utils.preprocessing import pad_input_matrix\n'), ((7639, 7703), 'utils.preprocessing.pad_input_matrix', 'pad_input_matrix', (['unpadded_segment_ids', 'self.args.max_doc_length'], {}), '(unpadded_segment_ids, self.args.max_doc_length)\n', (7655, 7703), False, 'from utils.preprocessing import pad_input_matrix\n'), ((3636, 3651), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3649, 3651), False, 'import torch\n'), ((8924, 8939), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (8937, 8939), False, 'import torch\n'), ((4855, 4885), 'torch.argmax', 'torch.argmax', (['label_ids'], {'dim': '(1)'}), '(label_ids, dim=1)\n', (4867, 4885), False, 'import torch\n'), ((9127, 9157), 'torch.argmax', 'torch.argmax', (['label_ids'], {'dim': '(1)'}), '(label_ids, dim=1)\n', (9139, 9157), False, 'import torch\n'), ((4662, 4689), 'torch.argmax', 'torch.argmax', (['logits'], {'dim': '(1)'}), '(logits, dim=1)\n', (4674, 4689), False, 'import torch\n'), ((4752, 4782), 'torch.argmax', 'torch.argmax', (['label_ids'], {'dim': '(1)'}), '(label_ids, dim=1)\n', (4764, 4782), False, 'import torch\n'), ((4308, 4325), 'torch.nn.functional.sigmoid', 'F.sigmoid', (['logits'], {}), '(logits)\n', (4317, 4325), True, 'import torch.nn.functional as F\n')] |
import torch.nn as nn
import torch.nn.functional as F
class Model(nn.Module):
def __init__(self, config):
super(Model, self).__init__()
self.drop = nn.Dropout(config['dropout'])
self.fc1 = nn.Linear(784, 2000)
self.fc2 = nn.Linear(2000, 2000)
self.fc3 = nn.Linear(2000, 2000)
self.fc4 = nn.Linear(2000, 2000)
self.fc5 = nn.Linear(2000, 10)
def forward(self, x):
# 784 -> 2000
x = F.relu(self.drop(self.fc1(x)))
# 2000 -> 2000
x = F.relu(self.drop(self.fc2(x)))
# 2000 -> 2000
x = F.relu(self.drop(self.fc3(x)))
# 2000 -> 2000
x = F.relu(self.drop(self.fc4(x)))
# 2000 -> 100
x = self.fc5(x)
return x | [
"torch.nn.Dropout",
"torch.nn.Linear"
]
| [((177, 206), 'torch.nn.Dropout', 'nn.Dropout', (["config['dropout']"], {}), "(config['dropout'])\n", (187, 206), True, 'import torch.nn as nn\n'), ((229, 249), 'torch.nn.Linear', 'nn.Linear', (['(784)', '(2000)'], {}), '(784, 2000)\n', (238, 249), True, 'import torch.nn as nn\n'), ((270, 291), 'torch.nn.Linear', 'nn.Linear', (['(2000)', '(2000)'], {}), '(2000, 2000)\n', (279, 291), True, 'import torch.nn as nn\n'), ((312, 333), 'torch.nn.Linear', 'nn.Linear', (['(2000)', '(2000)'], {}), '(2000, 2000)\n', (321, 333), True, 'import torch.nn as nn\n'), ((354, 375), 'torch.nn.Linear', 'nn.Linear', (['(2000)', '(2000)'], {}), '(2000, 2000)\n', (363, 375), True, 'import torch.nn as nn\n'), ((396, 415), 'torch.nn.Linear', 'nn.Linear', (['(2000)', '(10)'], {}), '(2000, 10)\n', (405, 415), True, 'import torch.nn as nn\n')] |
#!/usr/bin/env python
# coding: utf-8
""" Learning Koopman Invariant Subspace
(c) <NAME>, 2017.
<EMAIL>
"""
import numpy as np
np.random.seed(1234567890)
from argparse import ArgumentParser
from os import path
import time
from lkis import TimeSeriesBatchMaker, KoopmanInvariantSubspaceLearner
from losses import combined_loss
from torch import device, save, manual_seed
from torch.optim import SGD
import matplotlib.pyplot as plt
import seaborn as sns
# -- Parse arguments
t = time.time()
parser = ArgumentParser(description='Learning Koopman Invariant Subspace (Now with PyTorch!)')
parser.add_argument("--name", "-n", type=str, default=f"lkis-{int(time.time())}", help="name of experiment")
parser.add_argument("--data-path", type=str, default="./train.npy", help="time-series data to model")
parser.add_argument("--epochs", "-e", type=int, default=1000, help="number of epochs to train for")
parser.add_argument("--num-batches", "-b", type=int, default=1, help="how many batchs for break the data up into")
parser.add_argument("--gpu", action="store_true", default=False, help="use a GPU or no")
parser.add_argument("--intermediate-observable", "-i", type=int, default=-1, help="intermediate dimensional observation space")
parser.add_argument("--save-model", "-m", action="store_true", default=False, help="whether or not you want the model saved to $name$.torch.mdl")
parser.add_argument("--save-training-plot", "-p", action="store_true", default=False, help="where to save plotting")
parser.add_argument("--max-lag", "-l", type=int, default=-1, help="maximum_lag")
parser.add_argument("--state-space", "-s", type=int, default=1, help="dimensionality of the underlying state space")
parser.add_argument("--alpha", "-a", type=float, default=1.0, help="value to score the reconstruction loss by")
parser.add_argument("--learning-rate", "-r", type=float, default=0.001, help="Optimizer learning rate")
parser.add_argument("--validation-data-path", "-v", type=str, default="")
#ToDo: Implement
parser.add_argument("--dmd", action="store_true", default=False, help="Execute and save the DMD on the training set")
if __name__ == "__main__":
# grab the command line arguments
cli_args = parser.parse_args()
manual_seed(216)
# find and load the training data
data_path = cli_args.data_path
print(f"Loading training data from {data_path}")
data_train = np.load(data_path)
if len(data_train.shape) == 1:
data_train = data_train.reshape(-1, 1)
print(f"Loaded a dataset with dimension: {data_train.shape}")
validate = cli_args.validation_data_path != ""
data_val = None
if validate:
data_path = cli_args.validation_data_path
print(f"Loading validation data from {data_path}")
data_val = np.load(data_path)
# process the delay either set by the user or is set to one 10th of the data
delay = cli_args.max_lag if cli_args.max_lag > 0 else (data_train.shape[0] // 10)
# based on the number of batches, delay, and size of the data compute the samples per batch
samples_per_batch = (data_train.shape[0] - delay) // cli_args.num_batches
# construct the data preparer
batch_iterator = TimeSeriesBatchMaker(
y=data_train,
batch_size=samples_per_batch,
max_lag=delay
)
if validate:
val_batch_iterator = TimeSeriesBatchMaker(
y=data_val,
max_lag=delay
)
# construct the end-to-end model
lkis = KoopmanInvariantSubspaceLearner(
observable_dim=data_train.shape[1],
latent_dim=cli_args.state_space,
intermediate_observable=cli_args.intermediate_observable,
delay=delay
)
if cli_args.gpu:
device = device("cuda")
# initialize the optimizer
optimizer = SGD(lkis.parameters(), lr=cli_args.learning_rate)
losses = []
val_losses = []
for epoch in range(cli_args.epochs):
loss = 0
for b in range(cli_args.num_batches):
optimizer.zero_grad()
time_delayed_ys, y_true = next(batch_iterator)
if cli_args.gpu:
time_delayed_ys.to(device)
y_true.to(device)
g_pred, y_pred = lkis(time_delayed_ys)
g_0 = g_pred[:-1]
g_1 = g_pred[1:]
batch_loss = combined_loss(y_pred=y_pred, y_true=y_true, g_0=g_0, g_1=g_1)
batch_loss.backward()
optimizer.step()
loss += batch_loss.item()
# display the epoch training loss
print(f"epoch : {epoch + 1}/{cli_args.epochs}, loss = {loss:.6f}")
losses.append(loss)
if validate:
y_time_delayed_val, y_true = next(val_batch_iterator)
if cli_args.gpu:
y_time_delayed_val.to(device)
y_true.to(device)
g_pred, y_pred = lkis(y_time_delayed_val)
g_0 = g_pred[:-1]
g_1 = g_pred[1:]
batch_loss = combined_loss(y_pred=y_pred, y_true=y_true, g_0=g_0, g_1=g_1)
val_loss = batch_loss.item()
print(f"\tval-loss = {val_loss:.6f}")
val_losses.append(val_loss)
if cli_args.save_model:
save(lkis, f"{cli_args.name}.torch.mdl")
if cli_args.save_training_plot:
sns.lineplot(x=list(range(cli_args.epochs)), y=losses, label="training loss")
if validate:
sns.lineplot(x=list(range(cli_args.epochs)), y=val_losses, label="validation loss")
plt.xlabel("Epochs")
plt.ylabel("Combined Reconstruction and DMD Loss")
plt.title(f"Training Loss for {cli_args.name}")
plt.savefig(f"{cli_args.name}-training-loss.png")
| [
"torch.manual_seed",
"matplotlib.pyplot.savefig",
"argparse.ArgumentParser",
"lkis.TimeSeriesBatchMaker",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"lkis.KoopmanInvariantSubspaceLearner",
"losses.combined_loss",
"numpy.random.seed",
"torch.save",
"matplotlib.pyplot.title",
"numpy.load",
"time.time",
"torch.device"
]
| [((131, 157), 'numpy.random.seed', 'np.random.seed', (['(1234567890)'], {}), '(1234567890)\n', (145, 157), True, 'import numpy as np\n'), ((485, 496), 'time.time', 'time.time', ([], {}), '()\n', (494, 496), False, 'import time\n'), ((506, 596), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Learning Koopman Invariant Subspace (Now with PyTorch!)"""'}), "(description=\n 'Learning Koopman Invariant Subspace (Now with PyTorch!)')\n", (520, 596), False, 'from argparse import ArgumentParser\n'), ((2227, 2243), 'torch.manual_seed', 'manual_seed', (['(216)'], {}), '(216)\n', (2238, 2243), False, 'from torch import device, save, manual_seed\n'), ((2388, 2406), 'numpy.load', 'np.load', (['data_path'], {}), '(data_path)\n', (2395, 2406), True, 'import numpy as np\n'), ((3189, 3268), 'lkis.TimeSeriesBatchMaker', 'TimeSeriesBatchMaker', ([], {'y': 'data_train', 'batch_size': 'samples_per_batch', 'max_lag': 'delay'}), '(y=data_train, batch_size=samples_per_batch, max_lag=delay)\n', (3209, 3268), False, 'from lkis import TimeSeriesBatchMaker, KoopmanInvariantSubspaceLearner\n'), ((3476, 3656), 'lkis.KoopmanInvariantSubspaceLearner', 'KoopmanInvariantSubspaceLearner', ([], {'observable_dim': 'data_train.shape[1]', 'latent_dim': 'cli_args.state_space', 'intermediate_observable': 'cli_args.intermediate_observable', 'delay': 'delay'}), '(observable_dim=data_train.shape[1],\n latent_dim=cli_args.state_space, intermediate_observable=cli_args.\n intermediate_observable, delay=delay)\n', (3507, 3656), False, 'from lkis import TimeSeriesBatchMaker, KoopmanInvariantSubspaceLearner\n'), ((2771, 2789), 'numpy.load', 'np.load', (['data_path'], {}), '(data_path)\n', (2778, 2789), True, 'import numpy as np\n'), ((3345, 3392), 'lkis.TimeSeriesBatchMaker', 'TimeSeriesBatchMaker', ([], {'y': 'data_val', 'max_lag': 'delay'}), '(y=data_val, max_lag=delay)\n', (3365, 3392), False, 'from lkis import TimeSeriesBatchMaker, KoopmanInvariantSubspaceLearner\n'), ((3725, 3739), 'torch.device', 'device', (['"""cuda"""'], {}), "('cuda')\n", (3731, 3739), False, 'from torch import device, save, manual_seed\n'), ((5195, 5235), 'torch.save', 'save', (['lkis', 'f"""{cli_args.name}.torch.mdl"""'], {}), "(lkis, f'{cli_args.name}.torch.mdl')\n", (5199, 5235), False, 'from torch import device, save, manual_seed\n'), ((5484, 5504), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Epochs"""'], {}), "('Epochs')\n", (5494, 5504), True, 'import matplotlib.pyplot as plt\n'), ((5513, 5563), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Combined Reconstruction and DMD Loss"""'], {}), "('Combined Reconstruction and DMD Loss')\n", (5523, 5563), True, 'import matplotlib.pyplot as plt\n'), ((5572, 5619), 'matplotlib.pyplot.title', 'plt.title', (['f"""Training Loss for {cli_args.name}"""'], {}), "(f'Training Loss for {cli_args.name}')\n", (5581, 5619), True, 'import matplotlib.pyplot as plt\n'), ((5628, 5677), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""{cli_args.name}-training-loss.png"""'], {}), "(f'{cli_args.name}-training-loss.png')\n", (5639, 5677), True, 'import matplotlib.pyplot as plt\n'), ((4315, 4376), 'losses.combined_loss', 'combined_loss', ([], {'y_pred': 'y_pred', 'y_true': 'y_true', 'g_0': 'g_0', 'g_1': 'g_1'}), '(y_pred=y_pred, y_true=y_true, g_0=g_0, g_1=g_1)\n', (4328, 4376), False, 'from losses import combined_loss\n'), ((4964, 5025), 'losses.combined_loss', 'combined_loss', ([], {'y_pred': 'y_pred', 'y_true': 'y_true', 'g_0': 'g_0', 'g_1': 'g_1'}), '(y_pred=y_pred, y_true=y_true, g_0=g_0, g_1=g_1)\n', (4977, 5025), False, 'from losses import combined_loss\n'), ((658, 669), 'time.time', 'time.time', ([], {}), '()\n', (667, 669), False, 'import time\n')] |
from telethon.tl.functions.photos import DeletePhotosRequest, GetUserPhotosRequest
from telethon.tl.types import InputPhoto
from userbot.cmdhelp import CmdHelp
from userbot.utils import admin_cmd, edit_or_reply, sudo_cmd
CmdHelp("delfp").add_command("delpfp", None, "delete ur currnt profile picture").add()
@borg.on(admin_cmd(pattern="delpfp ?(.*)"))
@borg.on(sudo_cmd(pattern="delpfp ?(.*)", allow_sudo=True))
async def remove_profilepic(delpfp):
"""For .delpfp command, delete your current profile picture in Telegram."""
group = delpfp.text[8:]
if group == "all":
lim = 0
elif group.isdigit():
lim = int(group)
else:
lim = 1
pfplist = await delpfp.client(
GetUserPhotosRequest(user_id=delpfp.from_id, offset=0, max_id=0, limit=lim)
)
input_photos = [InputPhoto(
id=sep.id,
access_hash=sep.access_hash,
file_reference=sep.file_reference,
) for sep in pfplist.photos]
await delpfp.client(DeletePhotosRequest(id=input_photos))
await edit_or_reply(
delpfp, f"`Successfully deleted {len(input_photos)} profile picture(s).`"
)
| [
"userbot.utils.admin_cmd",
"userbot.cmdhelp.CmdHelp",
"telethon.tl.functions.photos.DeletePhotosRequest",
"telethon.tl.functions.photos.GetUserPhotosRequest",
"telethon.tl.types.InputPhoto",
"userbot.utils.sudo_cmd"
]
| [((321, 354), 'userbot.utils.admin_cmd', 'admin_cmd', ([], {'pattern': '"""delpfp ?(.*)"""'}), "(pattern='delpfp ?(.*)')\n", (330, 354), False, 'from userbot.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((365, 414), 'userbot.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""delpfp ?(.*)"""', 'allow_sudo': '(True)'}), "(pattern='delpfp ?(.*)', allow_sudo=True)\n", (373, 414), False, 'from userbot.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((823, 913), 'telethon.tl.types.InputPhoto', 'InputPhoto', ([], {'id': 'sep.id', 'access_hash': 'sep.access_hash', 'file_reference': 'sep.file_reference'}), '(id=sep.id, access_hash=sep.access_hash, file_reference=sep.\n file_reference)\n', (833, 913), False, 'from telethon.tl.types import InputPhoto\n'), ((721, 796), 'telethon.tl.functions.photos.GetUserPhotosRequest', 'GetUserPhotosRequest', ([], {'user_id': 'delpfp.from_id', 'offset': '(0)', 'max_id': '(0)', 'limit': 'lim'}), '(user_id=delpfp.from_id, offset=0, max_id=0, limit=lim)\n', (741, 796), False, 'from telethon.tl.functions.photos import DeletePhotosRequest, GetUserPhotosRequest\n'), ((1023, 1059), 'telethon.tl.functions.photos.DeletePhotosRequest', 'DeletePhotosRequest', ([], {'id': 'input_photos'}), '(id=input_photos)\n', (1042, 1059), False, 'from telethon.tl.functions.photos import DeletePhotosRequest, GetUserPhotosRequest\n'), ((223, 239), 'userbot.cmdhelp.CmdHelp', 'CmdHelp', (['"""delfp"""'], {}), "('delfp')\n", (230, 239), False, 'from userbot.cmdhelp import CmdHelp\n')] |
# Copyright (C) 2006, 2008 Canonical Ltd
#
# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
# General Public License as public by the Free Software Foundation; version 2.0
# or (at your option) any later version. You can redistribute it and/or
# modify it under the terms of either of these two licenses.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# You should have received a copy of the licenses; if not, see
# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
# License, Version 2.0.
#
"""Tests for the lru_cache module."""
from dulwich import (
lru_cache,
)
from dulwich.tests import (
TestCase,
)
class TestLRUCache(TestCase):
"""Test that LRU cache properly keeps track of entries."""
def test_cache_size(self):
cache = lru_cache.LRUCache(max_cache=10)
self.assertEqual(10, cache.cache_size())
cache = lru_cache.LRUCache(max_cache=256)
self.assertEqual(256, cache.cache_size())
cache.resize(512)
self.assertEqual(512, cache.cache_size())
def test_missing(self):
cache = lru_cache.LRUCache(max_cache=10)
self.assertFalse('foo' in cache)
self.assertRaises(KeyError, cache.__getitem__, 'foo')
cache['foo'] = 'bar'
self.assertEqual('bar', cache['foo'])
self.assertTrue('foo' in cache)
self.assertFalse('bar' in cache)
def test_map_None(self):
# Make sure that we can properly map None as a key.
cache = lru_cache.LRUCache(max_cache=10)
self.assertFalse(None in cache)
cache[None] = 1
self.assertEqual(1, cache[None])
cache[None] = 2
self.assertEqual(2, cache[None])
# Test the various code paths of __getitem__, to make sure that we can
# handle when None is the key for the LRU and the MRU
cache[1] = 3
cache[None] = 1
cache[None]
cache[1]
cache[None]
self.assertEqual([None, 1], [n.key for n in cache._walk_lru()])
def test_add__null_key(self):
cache = lru_cache.LRUCache(max_cache=10)
self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1)
def test_overflow(self):
"""Adding extra entries will pop out old ones."""
cache = lru_cache.LRUCache(max_cache=1, after_cleanup_count=1)
cache['foo'] = 'bar'
# With a max cache of 1, adding 'baz' should pop out 'foo'
cache['baz'] = 'biz'
self.assertFalse('foo' in cache)
self.assertTrue('baz' in cache)
self.assertEqual('biz', cache['baz'])
def test_by_usage(self):
"""Accessing entries bumps them up in priority."""
cache = lru_cache.LRUCache(max_cache=2)
cache['baz'] = 'biz'
cache['foo'] = 'bar'
self.assertEqual('biz', cache['baz'])
# This must kick out 'foo' because it was the last accessed
cache['nub'] = 'in'
self.assertFalse('foo' in cache)
def test_cleanup(self):
"""Test that we can use a cleanup function."""
cleanup_called = []
def cleanup_func(key, val):
cleanup_called.append((key, val))
cache = lru_cache.LRUCache(max_cache=2, after_cleanup_count=2)
cache.add('baz', '1', cleanup=cleanup_func)
cache.add('foo', '2', cleanup=cleanup_func)
cache.add('biz', '3', cleanup=cleanup_func)
self.assertEqual([('baz', '1')], cleanup_called)
# 'foo' is now most recent, so final cleanup will call it last
cache['foo']
cache.clear()
self.assertEqual([('baz', '1'), ('biz', '3'), ('foo', '2')],
cleanup_called)
def test_cleanup_on_replace(self):
"""Replacing an object should cleanup the old value."""
cleanup_called = []
def cleanup_func(key, val):
cleanup_called.append((key, val))
cache = lru_cache.LRUCache(max_cache=2)
cache.add(1, 10, cleanup=cleanup_func)
cache.add(2, 20, cleanup=cleanup_func)
cache.add(2, 25, cleanup=cleanup_func)
self.assertEqual([(2, 20)], cleanup_called)
self.assertEqual(25, cache[2])
# Even __setitem__ should make sure cleanup() is called
cache[2] = 26
self.assertEqual([(2, 20), (2, 25)], cleanup_called)
def test_len(self):
cache = lru_cache.LRUCache(max_cache=10, after_cleanup_count=10)
cache[1] = 10
cache[2] = 20
cache[3] = 30
cache[4] = 40
self.assertEqual(4, len(cache))
cache[5] = 50
cache[6] = 60
cache[7] = 70
cache[8] = 80
self.assertEqual(8, len(cache))
cache[1] = 15 # replacement
self.assertEqual(8, len(cache))
cache[9] = 90
cache[10] = 100
cache[11] = 110
# We hit the max
self.assertEqual(10, len(cache))
self.assertEqual([11, 10, 9, 1, 8, 7, 6, 5, 4, 3],
[n.key for n in cache._walk_lru()])
def test_cleanup_shrinks_to_after_clean_count(self):
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=3)
cache.add(1, 10)
cache.add(2, 20)
cache.add(3, 25)
cache.add(4, 30)
cache.add(5, 35)
self.assertEqual(5, len(cache))
# This will bump us over the max, which causes us to shrink down to
# after_cleanup_cache size
cache.add(6, 40)
self.assertEqual(3, len(cache))
def test_after_cleanup_larger_than_max(self):
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=10)
self.assertEqual(5, cache._after_cleanup_count)
def test_after_cleanup_none(self):
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=None)
# By default _after_cleanup_size is 80% of the normal size
self.assertEqual(4, cache._after_cleanup_count)
def test_cleanup_2(self):
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=2)
# Add these in order
cache.add(1, 10)
cache.add(2, 20)
cache.add(3, 25)
cache.add(4, 30)
cache.add(5, 35)
self.assertEqual(5, len(cache))
# Force a compaction
cache.cleanup()
self.assertEqual(2, len(cache))
def test_preserve_last_access_order(self):
cache = lru_cache.LRUCache(max_cache=5)
# Add these in order
cache.add(1, 10)
cache.add(2, 20)
cache.add(3, 25)
cache.add(4, 30)
cache.add(5, 35)
self.assertEqual([5, 4, 3, 2, 1], [n.key for n in cache._walk_lru()])
# Now access some randomly
cache[2]
cache[5]
cache[3]
cache[2]
self.assertEqual([2, 3, 5, 4, 1], [n.key for n in cache._walk_lru()])
def test_get(self):
cache = lru_cache.LRUCache(max_cache=5)
cache.add(1, 10)
cache.add(2, 20)
self.assertEqual(20, cache.get(2))
self.assertEqual(None, cache.get(3))
obj = object()
self.assertTrue(obj is cache.get(3, obj))
self.assertEqual([2, 1], [n.key for n in cache._walk_lru()])
self.assertEqual(10, cache.get(1))
self.assertEqual([1, 2], [n.key for n in cache._walk_lru()])
def test_keys(self):
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=5)
cache[1] = 2
cache[2] = 3
cache[3] = 4
self.assertEqual([1, 2, 3], sorted(cache.keys()))
cache[4] = 5
cache[5] = 6
cache[6] = 7
self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys()))
def test_resize_smaller(self):
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4)
cache[1] = 2
cache[2] = 3
cache[3] = 4
cache[4] = 5
cache[5] = 6
self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys()))
cache[6] = 7
self.assertEqual([3, 4, 5, 6], sorted(cache.keys()))
# Now resize to something smaller, which triggers a cleanup
cache.resize(max_cache=3, after_cleanup_count=2)
self.assertEqual([5, 6], sorted(cache.keys()))
# Adding something will use the new size
cache[7] = 8
self.assertEqual([5, 6, 7], sorted(cache.keys()))
cache[8] = 9
self.assertEqual([7, 8], sorted(cache.keys()))
def test_resize_larger(self):
cache = lru_cache.LRUCache(max_cache=5, after_cleanup_count=4)
cache[1] = 2
cache[2] = 3
cache[3] = 4
cache[4] = 5
cache[5] = 6
self.assertEqual([1, 2, 3, 4, 5], sorted(cache.keys()))
cache[6] = 7
self.assertEqual([3, 4, 5, 6], sorted(cache.keys()))
cache.resize(max_cache=8, after_cleanup_count=6)
self.assertEqual([3, 4, 5, 6], sorted(cache.keys()))
cache[7] = 8
cache[8] = 9
cache[9] = 10
cache[10] = 11
self.assertEqual([3, 4, 5, 6, 7, 8, 9, 10], sorted(cache.keys()))
cache[11] = 12 # triggers cleanup back to new after_cleanup_count
self.assertEqual([6, 7, 8, 9, 10, 11], sorted(cache.keys()))
class TestLRUSizeCache(TestCase):
def test_basic_init(self):
cache = lru_cache.LRUSizeCache()
self.assertEqual(2048, cache._max_cache)
self.assertEqual(int(cache._max_size*0.8), cache._after_cleanup_size)
self.assertEqual(0, cache._value_size)
def test_add__null_key(self):
cache = lru_cache.LRUSizeCache()
self.assertRaises(ValueError, cache.add, lru_cache._null_key, 1)
def test_add_tracks_size(self):
cache = lru_cache.LRUSizeCache()
self.assertEqual(0, cache._value_size)
cache.add('my key', 'my value text')
self.assertEqual(13, cache._value_size)
def test_remove_tracks_size(self):
cache = lru_cache.LRUSizeCache()
self.assertEqual(0, cache._value_size)
cache.add('my key', 'my value text')
self.assertEqual(13, cache._value_size)
node = cache._cache['my key']
cache._remove_node(node)
self.assertEqual(0, cache._value_size)
def test_no_add_over_size(self):
"""Adding a large value may not be cached at all."""
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5)
self.assertEqual(0, cache._value_size)
self.assertEqual({}, cache.items())
cache.add('test', 'key')
self.assertEqual(3, cache._value_size)
self.assertEqual({'test': 'key'}, cache.items())
cache.add('test2', 'key that is too big')
self.assertEqual(3, cache._value_size)
self.assertEqual({'test':'key'}, cache.items())
# If we would add a key, only to cleanup and remove all cached entries,
# then obviously that value should not be stored
cache.add('test3', 'bigkey')
self.assertEqual(3, cache._value_size)
self.assertEqual({'test':'key'}, cache.items())
cache.add('test4', 'bikey')
self.assertEqual(3, cache._value_size)
self.assertEqual({'test':'key'}, cache.items())
def test_no_add_over_size_cleanup(self):
"""If a large value is not cached, we will call cleanup right away."""
cleanup_calls = []
def cleanup(key, value):
cleanup_calls.append((key, value))
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=5)
self.assertEqual(0, cache._value_size)
self.assertEqual({}, cache.items())
cache.add('test', 'key that is too big', cleanup=cleanup)
# key was not added
self.assertEqual(0, cache._value_size)
self.assertEqual({}, cache.items())
# and cleanup was called
self.assertEqual([('test', 'key that is too big')], cleanup_calls)
def test_adding_clears_cache_based_on_size(self):
"""The cache is cleared in LRU order until small enough"""
cache = lru_cache.LRUSizeCache(max_size=20)
cache.add('key1', 'value') # 5 chars
cache.add('key2', 'value2') # 6 chars
cache.add('key3', 'value23') # 7 chars
self.assertEqual(5+6+7, cache._value_size)
cache['key2'] # reference key2 so it gets a newer reference time
cache.add('key4', 'value234') # 8 chars, over limit
# We have to remove 2 keys to get back under limit
self.assertEqual(6+8, cache._value_size)
self.assertEqual({'key2':'value2', 'key4':'value234'},
cache.items())
def test_adding_clears_to_after_cleanup_size(self):
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
cache.add('key1', 'value') # 5 chars
cache.add('key2', 'value2') # 6 chars
cache.add('key3', 'value23') # 7 chars
self.assertEqual(5+6+7, cache._value_size)
cache['key2'] # reference key2 so it gets a newer reference time
cache.add('key4', 'value234') # 8 chars, over limit
# We have to remove 3 keys to get back under limit
self.assertEqual(8, cache._value_size)
self.assertEqual({'key4':'value234'}, cache.items())
def test_custom_sizes(self):
def size_of_list(lst):
return sum(len(x) for x in lst)
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10,
compute_size=size_of_list)
cache.add('key1', ['val', 'ue']) # 5 chars
cache.add('key2', ['val', 'ue2']) # 6 chars
cache.add('key3', ['val', 'ue23']) # 7 chars
self.assertEqual(5+6+7, cache._value_size)
cache['key2'] # reference key2 so it gets a newer reference time
cache.add('key4', ['value', '234']) # 8 chars, over limit
# We have to remove 3 keys to get back under limit
self.assertEqual(8, cache._value_size)
self.assertEqual({'key4':['value', '234']}, cache.items())
def test_cleanup(self):
cache = lru_cache.LRUSizeCache(max_size=20, after_cleanup_size=10)
# Add these in order
cache.add('key1', 'value') # 5 chars
cache.add('key2', 'value2') # 6 chars
cache.add('key3', 'value23') # 7 chars
self.assertEqual(5+6+7, cache._value_size)
cache.cleanup()
# Only the most recent fits after cleaning up
self.assertEqual(7, cache._value_size)
def test_keys(self):
cache = lru_cache.LRUSizeCache(max_size=10)
cache[1] = 'a'
cache[2] = 'b'
cache[3] = 'cdef'
self.assertEqual([1, 2, 3], sorted(cache.keys()))
def test_resize_smaller(self):
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9)
cache[1] = 'abc'
cache[2] = 'def'
cache[3] = 'ghi'
cache[4] = 'jkl'
# Triggers a cleanup
self.assertEqual([2, 3, 4], sorted(cache.keys()))
# Resize should also cleanup again
cache.resize(max_size=6, after_cleanup_size=4)
self.assertEqual([4], sorted(cache.keys()))
# Adding should use the new max size
cache[5] = 'mno'
self.assertEqual([4, 5], sorted(cache.keys()))
cache[6] = 'pqr'
self.assertEqual([6], sorted(cache.keys()))
def test_resize_larger(self):
cache = lru_cache.LRUSizeCache(max_size=10, after_cleanup_size=9)
cache[1] = 'abc'
cache[2] = 'def'
cache[3] = 'ghi'
cache[4] = 'jkl'
# Triggers a cleanup
self.assertEqual([2, 3, 4], sorted(cache.keys()))
cache.resize(max_size=15, after_cleanup_size=12)
self.assertEqual([2, 3, 4], sorted(cache.keys()))
cache[5] = 'mno'
cache[6] = 'pqr'
self.assertEqual([2, 3, 4, 5, 6], sorted(cache.keys()))
cache[7] = 'stu'
self.assertEqual([4, 5, 6, 7], sorted(cache.keys()))
| [
"dulwich.lru_cache.LRUSizeCache",
"dulwich.lru_cache.LRUCache"
]
| [((1169, 1201), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(10)'}), '(max_cache=10)\n', (1187, 1201), False, 'from dulwich import lru_cache\n'), ((1268, 1301), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(256)'}), '(max_cache=256)\n', (1286, 1301), False, 'from dulwich import lru_cache\n'), ((1474, 1506), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(10)'}), '(max_cache=10)\n', (1492, 1506), False, 'from dulwich import lru_cache\n'), ((1874, 1906), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(10)'}), '(max_cache=10)\n', (1892, 1906), False, 'from dulwich import lru_cache\n'), ((2443, 2475), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(10)'}), '(max_cache=10)\n', (2461, 2475), False, 'from dulwich import lru_cache\n'), ((2653, 2707), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(1)', 'after_cleanup_count': '(1)'}), '(max_cache=1, after_cleanup_count=1)\n', (2671, 2707), False, 'from dulwich import lru_cache\n'), ((3068, 3099), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(2)'}), '(max_cache=2)\n', (3086, 3099), False, 'from dulwich import lru_cache\n'), ((3556, 3610), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(2)', 'after_cleanup_count': '(2)'}), '(max_cache=2, after_cleanup_count=2)\n', (3574, 3610), False, 'from dulwich import lru_cache\n'), ((4282, 4313), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(2)'}), '(max_cache=2)\n', (4300, 4313), False, 'from dulwich import lru_cache\n'), ((4736, 4792), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(10)', 'after_cleanup_count': '(10)'}), '(max_cache=10, after_cleanup_count=10)\n', (4754, 4792), False, 'from dulwich import lru_cache\n'), ((5463, 5517), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(5)', 'after_cleanup_count': '(3)'}), '(max_cache=5, after_cleanup_count=3)\n', (5481, 5517), False, 'from dulwich import lru_cache\n'), ((5928, 5983), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(5)', 'after_cleanup_count': '(10)'}), '(max_cache=5, after_cleanup_count=10)\n', (5946, 5983), False, 'from dulwich import lru_cache\n'), ((6096, 6153), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(5)', 'after_cleanup_count': 'None'}), '(max_cache=5, after_cleanup_count=None)\n', (6114, 6153), False, 'from dulwich import lru_cache\n'), ((6324, 6378), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(5)', 'after_cleanup_count': '(2)'}), '(max_cache=5, after_cleanup_count=2)\n', (6342, 6378), False, 'from dulwich import lru_cache\n'), ((6732, 6763), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(5)'}), '(max_cache=5)\n', (6750, 6763), False, 'from dulwich import lru_cache\n'), ((7221, 7252), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(5)'}), '(max_cache=5)\n', (7239, 7252), False, 'from dulwich import lru_cache\n'), ((7688, 7742), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(5)', 'after_cleanup_count': '(5)'}), '(max_cache=5, after_cleanup_count=5)\n', (7706, 7742), False, 'from dulwich import lru_cache\n'), ((8044, 8098), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(5)', 'after_cleanup_count': '(4)'}), '(max_cache=5, after_cleanup_count=4)\n', (8062, 8098), False, 'from dulwich import lru_cache\n'), ((8785, 8839), 'dulwich.lru_cache.LRUCache', 'lru_cache.LRUCache', ([], {'max_cache': '(5)', 'after_cleanup_count': '(4)'}), '(max_cache=5, after_cleanup_count=4)\n', (8803, 8839), False, 'from dulwich import lru_cache\n'), ((9597, 9621), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {}), '()\n', (9619, 9621), False, 'from dulwich import lru_cache\n'), ((9847, 9871), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {}), '()\n', (9869, 9871), False, 'from dulwich import lru_cache\n'), ((9998, 10022), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {}), '()\n', (10020, 10022), False, 'from dulwich import lru_cache\n'), ((10219, 10243), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {}), '()\n', (10241, 10243), False, 'from dulwich import lru_cache\n'), ((10617, 10674), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {'max_size': '(10)', 'after_cleanup_size': '(5)'}), '(max_size=10, after_cleanup_size=5)\n', (10639, 10674), False, 'from dulwich import lru_cache\n'), ((11722, 11779), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {'max_size': '(10)', 'after_cleanup_size': '(5)'}), '(max_size=10, after_cleanup_size=5)\n', (11744, 11779), False, 'from dulwich import lru_cache\n'), ((12302, 12337), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {'max_size': '(20)'}), '(max_size=20)\n', (12324, 12337), False, 'from dulwich import lru_cache\n'), ((12944, 13002), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {'max_size': '(20)', 'after_cleanup_size': '(10)'}), '(max_size=20, after_cleanup_size=10)\n', (12966, 13002), False, 'from dulwich import lru_cache\n'), ((13617, 13707), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {'max_size': '(20)', 'after_cleanup_size': '(10)', 'compute_size': 'size_of_list'}), '(max_size=20, after_cleanup_size=10, compute_size=\n size_of_list)\n', (13639, 13707), False, 'from dulwich import lru_cache\n'), ((14307, 14365), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {'max_size': '(20)', 'after_cleanup_size': '(10)'}), '(max_size=20, after_cleanup_size=10)\n', (14329, 14365), False, 'from dulwich import lru_cache\n'), ((14753, 14788), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {'max_size': '(10)'}), '(max_size=10)\n', (14775, 14788), False, 'from dulwich import lru_cache\n'), ((14972, 15029), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {'max_size': '(10)', 'after_cleanup_size': '(9)'}), '(max_size=10, after_cleanup_size=9)\n', (14994, 15029), False, 'from dulwich import lru_cache\n'), ((15620, 15677), 'dulwich.lru_cache.LRUSizeCache', 'lru_cache.LRUSizeCache', ([], {'max_size': '(10)', 'after_cleanup_size': '(9)'}), '(max_size=10, after_cleanup_size=9)\n', (15642, 15677), False, 'from dulwich import lru_cache\n')] |
# coding: utf-8
import logging
import requests
import mimetypes
from io import BytesIO
from urllib.parse import urlparse
from datetime import datetime, timedelta
from collections import OrderedDict
from flask_babelex import gettext as _
from flask import (
render_template,
abort,
current_app,
request,
session,
redirect,
jsonify,
url_for,
Response,
send_from_directory,
g,
make_response,
)
from werkzeug.contrib.atom import AtomFeed
from urllib.parse import urljoin
from legendarium.formatter import descriptive_short_format
from . import main
from webapp import babel
from webapp import cache
from webapp import controllers
from webapp.choices import STUDY_AREAS
from webapp.utils import utils
from webapp.utils.caching import cache_key_with_lang, cache_key_with_lang_with_qs
from webapp import forms
from webapp.config.lang_names import display_original_lang_name
from opac_schema.v1.models import Journal, Issue, Article, Collection
from lxml import etree
from packtools import HTMLGenerator
logger = logging.getLogger(__name__)
JOURNAL_UNPUBLISH = _("O periódico está indisponível por motivo de: ")
ISSUE_UNPUBLISH = _("O número está indisponível por motivo de: ")
ARTICLE_UNPUBLISH = _("O artigo está indisponível por motivo de: ")
IAHX_LANGS = dict(
p='pt',
e='es',
i='en',
)
def url_external(endpoint, **kwargs):
url = url_for(endpoint, **kwargs)
return urljoin(request.url_root, url)
class RetryableError(Exception):
"""Erro recuperável sem que seja necessário modificar o estado dos dados
na parte cliente, e.g., timeouts, erros advindos de particionamento de rede
etc.
"""
class NonRetryableError(Exception):
"""Erro do qual não pode ser recuperado sem modificar o estado dos dados
na parte cliente, e.g., recurso solicitado não exite, URI inválida etc.
"""
def fetch_data(url: str, timeout: float = 2) -> bytes:
try:
response = requests.get(url, timeout=timeout)
except (requests.ConnectionError, requests.Timeout) as exc:
raise RetryableError(exc) from exc
except (requests.InvalidSchema, requests.MissingSchema, requests.InvalidURL) as exc:
raise NonRetryableError(exc) from exc
else:
try:
response.raise_for_status()
except requests.HTTPError as exc:
if 400 <= exc.response.status_code < 500:
raise NonRetryableError(exc) from exc
elif 500 <= exc.response.status_code < 600:
raise RetryableError(exc) from exc
else:
raise
return response.content
@main.before_app_request
def add_collection_to_g():
if not hasattr(g, 'collection'):
try:
collection = controllers.get_current_collection()
setattr(g, 'collection', collection)
except Exception:
# discutir o que fazer aqui
setattr(g, 'collection', {})
@main.after_request
def add_header(response):
response.headers['x-content-type-options'] = 'nosniff'
return response
@main.after_request
def add_language_code(response):
language = session.get('lang', get_locale())
response.set_cookie('language', language)
return response
@main.before_app_request
def add_forms_to_g():
setattr(g, 'email_share', forms.EmailShareForm())
setattr(g, 'email_contact', forms.ContactForm())
setattr(g, 'error', forms.ErrorForm())
@main.before_app_request
def add_scielo_org_config_to_g():
language = session.get('lang', get_locale())
scielo_org_links = {
key: url[language]
for key, url in current_app.config.get('SCIELO_ORG_URIS', {}).items()
}
setattr(g, 'scielo_org', scielo_org_links)
@babel.localeselector
def get_locale():
langs = current_app.config.get('LANGUAGES')
lang_from_headers = request.accept_languages.best_match(list(langs.keys()))
if 'lang' not in list(session.keys()):
session['lang'] = lang_from_headers
if not lang_from_headers and not session['lang']:
# Caso não seja possível detectar o idioma e não tenhamos a chave lang
# no seção, fixamos o idioma padrão.
session['lang'] = current_app.config.get('BABEL_DEFAULT_LOCALE')
return session['lang']
@main.route('/set_locale/<string:lang_code>/')
def set_locale(lang_code):
langs = current_app.config.get('LANGUAGES')
if lang_code not in list(langs.keys()):
abort(400, _('Código de idioma inválido'))
referrer = request.referrer
hash = request.args.get('hash')
if hash:
referrer += "#" + hash
# salvar o lang code na sessão
session['lang'] = lang_code
return redirect(referrer)
def get_lang_from_session():
"""
Tenta retornar o idioma da seção, caso não consiga retorna
BABEL_DEFAULT_LOCALE.
"""
try:
return session['lang']
except KeyError:
return current_app.config.get('BABEL_DEFAULT_LOCALE')
@main.route('/')
@cache.cached(key_prefix=cache_key_with_lang)
def index():
language = session.get('lang', get_locale())
news = controllers.get_latest_news_by_lang(language)
tweets = controllers.get_collection_tweets()
press_releases = controllers.get_press_releases({'language': language})
urls = {
'downloads': '{0}/w/accesses?collection={1}'.format(
current_app.config['METRICS_URL'],
current_app.config['OPAC_COLLECTION']),
'references': '{0}/w/publication/size?collection={1}'.format(
current_app.config['METRICS_URL'],
current_app.config['OPAC_COLLECTION']),
'other': '{0}/?collection={1}'.format(
current_app.config['METRICS_URL'],
current_app.config['OPAC_COLLECTION'])
}
if (
g.collection is not None
and isinstance(g.collection, Collection)
and g.collection.metrics is not None
and current_app.config['USE_HOME_METRICS']
):
g.collection.metrics.total_journal = Journal.objects.filter(
is_public=True, current_status="current"
).count()
g.collection.metrics.total_article = Article.objects.filter(
is_public=True
).count()
context = {
'news': news,
'urls': urls,
'tweets': tweets,
'press_releases': press_releases,
}
return render_template("collection/index.html", **context)
# ##################################Collection###################################
@main.route('/journals/alpha')
@cache.cached(key_prefix=cache_key_with_lang)
def collection_list():
allowed_filters = ["current", "no-current", ""]
query_filter = request.args.get("status", "")
if not query_filter in allowed_filters:
query_filter = ""
journals_list = [
controllers.get_journal_json_data(journal)
for journal in controllers.get_journals(query_filter=query_filter)
]
return render_template("collection/list_journal.html",
**{'journals_list': journals_list, 'query_filter': query_filter})
@main.route("/journals/thematic")
@cache.cached(key_prefix=cache_key_with_lang)
def collection_list_thematic():
allowed_query_filters = ["current", "no-current", ""]
allowed_thematic_filters = ["areas", "wos", "publisher"]
thematic_table = {
"areas": "study_areas",
"wos": "subject_categories",
"publisher": "publisher_name",
}
query_filter = request.args.get("status", "")
title_query = request.args.get("query", "")
thematic_filter = request.args.get("filter", "areas")
if not query_filter in allowed_query_filters:
query_filter = ""
if not thematic_filter in allowed_thematic_filters:
thematic_filter = "areas"
lang = get_lang_from_session()[:2].lower()
objects = controllers.get_journals_grouped_by(
thematic_table[thematic_filter],
title_query,
query_filter=query_filter,
lang=lang,
)
return render_template(
"collection/list_thematic.html",
**{"objects": objects, "query_filter": query_filter, "filter": thematic_filter}
)
@main.route('/journals/feed/')
@cache.cached(key_prefix=cache_key_with_lang)
def collection_list_feed():
language = session.get('lang', get_locale())
collection = controllers.get_current_collection()
title = 'SciELO - %s - %s' % (collection.name, _('Últimos periódicos inseridos na coleção'))
subtitle = _('10 últimos periódicos inseridos na coleção %s' % collection.name)
feed = AtomFeed(title,
subtitle=subtitle,
feed_url=request.url, url=request.url_root)
journals = controllers.get_journals_paginated(
title_query='', page=1, order_by='-created', per_page=10)
if not journals.items:
feed.add('Nenhum periódico encontrado',
url=request.url,
updated=datetime.now())
for journal in journals.items:
issues = controllers.get_issues_by_jid(journal.jid, is_public=True)
last_issue = issues[0] if issues else None
articles = []
if last_issue:
articles = controllers.get_articles_by_iid(last_issue.iid,
is_public=True)
result_dict = OrderedDict()
for article in articles:
section = article.get_section_by_lang(language[:2])
result_dict.setdefault(section, [])
result_dict[section].append(article)
context = {
'journal': journal,
'articles': result_dict,
'language': language,
'last_issue': last_issue
}
feed.add(journal.title,
render_template("collection/list_feed_content.html", **context),
content_type='html',
author=journal.publisher_name,
url=url_external('main.journal_detail', url_seg=journal.url_segment),
updated=journal.updated,
published=journal.created)
return feed.get_response()
@main.route("/about/", methods=['GET'])
@main.route('/about/<string:slug_name>', methods=['GET'])
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def about_collection(slug_name=None):
language = session.get('lang', get_locale())
context = {}
page = None
if slug_name:
# caso seja uma página
page = controllers.get_page_by_slug_name(slug_name, language)
if not page:
abort(404, _('Página não encontrada'))
context['page'] = page
else:
# caso não seja uma página é uma lista
pages = controllers.get_pages_by_lang(language)
context['pages'] = pages
return render_template("collection/about.html", **context)
# ###################################Journal#####################################
@main.route('/scielo.php/')
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def router_legacy():
script_php = request.args.get('script', None)
pid = request.args.get('pid', None)
tlng = request.args.get('tlng', None)
allowed_scripts = [
'sci_serial', 'sci_issuetoc', 'sci_arttext', 'sci_abstract', 'sci_issues', 'sci_pdf'
]
if (script_php is not None) and (script_php in allowed_scripts) and not pid:
# se tem pelo menos um param: pid ou script_php
abort(400, _(u'Requsição inválida ao tentar acessar o artigo com pid: %s' % pid))
elif script_php and pid:
if script_php == 'sci_serial':
# pid = issn
journal = controllers.get_journal_by_issn(pid)
if not journal:
abort(404, _('Periódico não encontrado'))
if not journal.is_public:
abort(404, JOURNAL_UNPUBLISH + _(journal.unpublish_reason))
return redirect(url_for('main.journal_detail',
url_seg=journal.url_segment), code=301)
elif script_php == 'sci_issuetoc':
issue = controllers.get_issue_by_pid(pid)
if not issue:
abort(404, _('Número não encontrado'))
if not issue.is_public:
abort(404, ISSUE_UNPUBLISH + _(issue.unpublish_reason))
if not issue.journal.is_public:
abort(404, JOURNAL_UNPUBLISH + _(issue.journal.unpublish_reason))
if issue.url_segment and "ahead" in issue.url_segment:
return redirect(
url_for('main.aop_toc', url_seg=url_seg), code=301)
return redirect(
url_for(
"main.issue_toc",
url_seg=issue.journal.url_segment,
url_seg_issue=issue.url_segment),
301
)
elif script_php == 'sci_arttext' or script_php == 'sci_abstract':
article = controllers.get_article_by_pid_v2(pid)
if not article:
abort(404, _('Artigo não encontrado'))
# 'abstract' or None (not False, porque False converterá a string 'False')
part = (script_php == 'sci_abstract' and 'abstract') or None
if tlng not in article.languages:
tlng = article.original_language
return redirect(url_for('main.article_detail_v3',
url_seg=article.journal.url_segment,
article_pid_v3=article.aid,
part=part,
lang=tlng),
code=301)
elif script_php == 'sci_issues':
journal = controllers.get_journal_by_issn(pid)
if not journal:
abort(404, _('Periódico não encontrado'))
if not journal.is_public:
abort(404, JOURNAL_UNPUBLISH + _(journal.unpublish_reason))
return redirect(url_for('main.issue_grid',
url_seg=journal.url_segment), 301)
elif script_php == 'sci_pdf':
# accesso ao pdf do artigo:
article = controllers.get_article_by_pid_v2(pid)
if not article:
abort(404, _('Artigo não encontrado'))
return redirect(
url_for(
'main.article_detail_v3',
url_seg=article.journal.url_segment,
article_pid_v3=article.aid,
format='pdf',
),
code=301
)
else:
abort(400, _(u'Requsição inválida ao tentar acessar o artigo com pid: %s' % pid))
else:
return redirect('/')
@main.route('/<string:journal_seg>')
@main.route('/journal/<string:journal_seg>')
def journal_detail_legacy_url(journal_seg):
return redirect(url_for('main.journal_detail',
url_seg=journal_seg), code=301)
@main.route('/j/<string:url_seg>/')
@cache.cached(key_prefix=cache_key_with_lang)
def journal_detail(url_seg):
journal = controllers.get_journal_by_url_seg(url_seg)
if not journal:
abort(404, _('Periódico não encontrado'))
if not journal.is_public:
abort(404, JOURNAL_UNPUBLISH + _(journal.unpublish_reason))
utils.fix_journal_last_issue(journal)
# todo: ajustar para que seja só noticias relacionadas ao periódico
language = session.get('lang', get_locale())
news = controllers.get_latest_news_by_lang(language)
# Press releases
press_releases = controllers.get_press_releases({
'journal': journal,
'language': language})
# Lista de seções
# Mantendo sempre o idioma inglês para as seções na página incial do periódico
if journal.last_issue and journal.current_status == "current":
sections = [section for section in journal.last_issue.sections if section.language == 'en']
recent_articles = controllers.get_recent_articles_of_issue(journal.last_issue.iid, is_public=True)
else:
sections = []
recent_articles = []
latest_issue = journal.last_issue
if latest_issue:
latest_issue_legend = descriptive_short_format(
title=journal.title, short_title=journal.short_title,
pubdate=str(latest_issue.year), volume=latest_issue.volume, number=latest_issue.number,
suppl=latest_issue.suppl_text, language=language[:2].lower())
else:
latest_issue_legend = ''
journal_metrics = controllers.get_journal_metrics(journal)
context = {
'journal': journal,
'press_releases': press_releases,
'recent_articles': recent_articles,
'journal_study_areas': [
STUDY_AREAS.get(study_area.upper()) for study_area in journal.study_areas
],
# o primiero item da lista é o último número.
# condicional para verificar se issues contém itens
'last_issue': latest_issue,
'latest_issue_legend': latest_issue_legend,
'sections': sections if sections else None,
'news': news,
'journal_metrics': journal_metrics
}
return render_template("journal/detail.html", **context)
@main.route('/journal/<string:url_seg>/feed/')
@cache.cached(key_prefix=cache_key_with_lang)
def journal_feed(url_seg):
journal = controllers.get_journal_by_url_seg(url_seg)
if not journal:
abort(404, _('Periódico não encontrado'))
if not journal.is_public:
abort(404, JOURNAL_UNPUBLISH + _(journal.unpublish_reason))
issues = controllers.get_issues_by_jid(journal.jid, is_public=True)
last_issue = issues[0] if issues else None
articles = controllers.get_articles_by_iid(last_issue.iid, is_public=True)
feed = AtomFeed(journal.title,
feed_url=request.url,
url=request.url_root,
subtitle=utils.get_label_issue(last_issue))
feed_language = session.get('lang', get_locale())
feed_language = feed_language[:2].lower()
for article in articles:
# ######### TODO: Revisar #########
article_lang = feed_language
if feed_language not in article.languages:
article_lang = article.original_language
feed.add(article.title or _('Artigo sem título'),
render_template("issue/feed_content.html", article=article),
content_type='html',
id=article.doi or article.pid,
author=article.authors,
url=url_external('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang=article_lang),
updated=journal.updated,
published=journal.created)
return feed.get_response()
@main.route("/journal/<string:url_seg>/about/", methods=['GET'])
@cache.cached(key_prefix=cache_key_with_lang)
def about_journal(url_seg):
language = session.get('lang', get_locale())
journal = controllers.get_journal_by_url_seg(url_seg)
if not journal:
abort(404, _('Periódico não encontrado'))
if not journal.is_public:
abort(404, JOURNAL_UNPUBLISH + _(journal.unpublish_reason))
latest_issue = utils.fix_journal_last_issue(journal)
if latest_issue:
latest_issue_legend = descriptive_short_format(
title=journal.title, short_title=journal.short_title,
pubdate=str(latest_issue.year), volume=latest_issue.volume, number=latest_issue.number,
suppl=latest_issue.suppl_text, language=language[:2].lower())
else:
latest_issue_legend = None
page = controllers.get_page_by_journal_acron_lang(journal.acronym, language)
context = {
'journal': journal,
'latest_issue_legend': latest_issue_legend,
'last_issue': latest_issue,
'journal_study_areas': [
STUDY_AREAS.get(study_area.upper()) for study_area in journal.study_areas
],
}
if page:
context['content'] = page.content
if page.updated_at:
context['page_updated_at'] = page.updated_at
return render_template("journal/about.html", **context)
@main.route("/journals/search/alpha/ajax/", methods=['GET', ])
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def journals_search_alpha_ajax():
if not request.is_xhr:
abort(400, _('Requisição inválida. Deve ser por ajax'))
query = request.args.get('query', '', type=str)
query_filter = request.args.get('query_filter', '', type=str)
page = request.args.get('page', 1, type=int)
lang = get_lang_from_session()[:2].lower()
response_data = controllers.get_alpha_list_from_paginated_journals(
title_query=query,
query_filter=query_filter,
page=page,
lang=lang)
return jsonify(response_data)
@main.route("/journals/search/group/by/filter/ajax/", methods=['GET'])
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def journals_search_by_theme_ajax():
if not request.is_xhr:
abort(400, _('Requisição inválida. Deve ser por ajax'))
query = request.args.get('query', '', type=str)
query_filter = request.args.get('query_filter', '', type=str)
filter = request.args.get('filter', 'areas', type=str)
lang = get_lang_from_session()[:2].lower()
if filter == 'areas':
objects = controllers.get_journals_grouped_by('study_areas', query, query_filter=query_filter, lang=lang)
elif filter == 'wos':
objects = controllers.get_journals_grouped_by('subject_categories', query, query_filter=query_filter, lang=lang)
elif filter == 'publisher':
objects = controllers.get_journals_grouped_by('publisher_name', query, query_filter=query_filter, lang=lang)
else:
return jsonify({
'error': 401,
'message': _('Parámetro "filter" é inválido, deve ser "areas", "wos" ou "publisher".')
})
return jsonify(objects)
@main.route("/journals/download/<string:list_type>/<string:extension>/", methods=['GET', ])
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def download_journal_list(list_type, extension):
if extension.lower() not in ['csv', 'xls']:
abort(401, _('Parámetro "extension" é inválido, deve ser "csv" ou "xls".'))
elif list_type.lower() not in ['alpha', 'areas', 'wos', 'publisher']:
abort(401, _('Parámetro "list_type" é inválido, deve ser: "alpha", "areas", "wos" ou "publisher".'))
else:
if extension.lower() == 'xls':
mimetype = 'application/vnd.ms-excel'
else:
mimetype = 'text/csv'
query = request.args.get('query', '', type=str)
data = controllers.get_journal_generator_for_csv(list_type=list_type,
title_query=query,
extension=extension.lower())
timestamp = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
filename = 'journals_%s_%s.%s' % (list_type, timestamp, extension)
response = Response(data, mimetype=mimetype)
response.headers['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
@main.route("/<string:url_seg>/contact", methods=['POST'])
def contact(url_seg):
if not request.is_xhr:
abort(403, _('Requisição inválida, deve ser ajax.'))
if utils.is_recaptcha_valid(request):
form = forms.ContactForm(request.form)
journal = controllers.get_journal_by_url_seg(url_seg)
if not journal.enable_contact:
abort(403, _('Periódico não permite envio de email.'))
recipients = journal.editor_email
if form.validate():
sent, message = controllers.send_email_contact(recipients,
form.data['name'],
form.data['your_email'],
form.data['message'])
return jsonify({'sent': sent, 'message': str(message),
'fields': [key for key in form.data.keys()]})
else:
return jsonify({'sent': False, 'message': form.errors,
'fields': [key for key in form.data.keys()]})
else:
abort(400, _('Requisição inválida, captcha inválido.'))
@main.route("/form_contact/<string:url_seg>/", methods=['GET'])
def form_contact(url_seg):
journal = controllers.get_journal_by_url_seg(url_seg)
if not journal:
abort(404, _('Periódico não encontrado'))
context = {
'journal': journal
}
return render_template("journal/includes/contact_form.html", **context)
# ###################################Issue#######################################
@main.route('/grid/<string:url_seg>/')
def issue_grid_legacy(url_seg):
return redirect(url_for('main.issue_grid', url_seg=url_seg), 301)
@main.route('/j/<string:url_seg>/grid')
@cache.cached(key_prefix=cache_key_with_lang)
def issue_grid(url_seg):
journal = controllers.get_journal_by_url_seg(url_seg)
if not journal:
abort(404, _('Periódico não encontrado'))
if not journal.is_public:
abort(404, JOURNAL_UNPUBLISH + _(journal.unpublish_reason))
# idioma da sessão
language = session.get('lang', get_locale())
# A ordenação padrão da função ``get_issues_by_jid``: "-year", "-volume", "-order"
issues_data = controllers.get_issues_for_grid_by_jid(journal.id, is_public=True)
latest_issue = issues_data['last_issue']
if latest_issue:
latest_issue_legend = descriptive_short_format(
title=journal.title, short_title=journal.short_title,
pubdate=str(latest_issue.year), volume=latest_issue.volume, number=latest_issue.number,
suppl=latest_issue.suppl_text, language=language[:2].lower())
else:
latest_issue_legend = None
context = {
'journal': journal,
'last_issue': issues_data['last_issue'],
'latest_issue_legend': latest_issue_legend,
'volume_issue': issues_data['volume_issue'],
'ahead': issues_data['ahead'],
'result_dict': issues_data['ordered_for_grid'],
'journal_study_areas': [
STUDY_AREAS.get(study_area.upper()) for study_area in journal.study_areas
],
}
return render_template("issue/grid.html", **context)
@main.route('/toc/<string:url_seg>/<string:url_seg_issue>/')
def issue_toc_legacy(url_seg, url_seg_issue):
if url_seg_issue and "ahead" in url_seg_issue:
return redirect(url_for('main.aop_toc', url_seg=url_seg), code=301)
return redirect(
url_for('main.issue_toc',
url_seg=url_seg,
url_seg_issue=url_seg_issue),
code=301)
@main.route('/j/<string:url_seg>/i/<string:url_seg_issue>/')
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def issue_toc(url_seg, url_seg_issue):
section_filter = None
goto = request.args.get("goto", None, type=str)
if goto not in ("previous", "next"):
goto = None
if goto in (None, "next") and "ahead" in url_seg_issue:
# redireciona para `aop_toc`
return redirect(url_for('main.aop_toc', url_seg=url_seg), code=301)
# idioma da sessão
language = session.get('lang', get_locale())
if current_app.config["FILTER_SECTION_ENABLE"]:
# seção dos documentos, se selecionada
section_filter = request.args.get('section', '', type=str).upper()
# obtém o issue
issue = controllers.get_issue_by_url_seg(url_seg, url_seg_issue)
if not issue:
abort(404, _('Número não encontrado'))
if not issue.is_public:
abort(404, ISSUE_UNPUBLISH + _(issue.unpublish_reason))
# obtém o journal
journal = issue.journal
if not journal.is_public:
abort(404, JOURNAL_UNPUBLISH + _(journal.unpublish_reason))
# completa url_segment do last_issue
utils.fix_journal_last_issue(journal)
# goto_next_or_previous_issue (redireciona)
goto_url = goto_next_or_previous_issue(
issue, request.args.get('goto', None, type=str))
if goto_url:
return redirect(goto_url, code=301)
# obtém os documentos
articles = controllers.get_articles_by_iid(issue.iid, is_public=True)
if articles:
# obtém TODAS as seções dos documentos deste sumário
sections = sorted({a.section.upper() for a in articles if a.section})
else:
# obtém as seções dos documentos deste sumário
sections = []
if current_app.config["FILTER_SECTION_ENABLE"] and section_filter != '':
# obtém somente os documentos da seção selecionada
articles = [a for a in articles if a.section.upper() == section_filter]
# obtém PDF e TEXT de cada documento
has_math_content = False
for article in articles:
article_text_languages = [doc['lang'] for doc in article.htmls]
article_pdf_languages = [(doc['lang'], doc['url']) for doc in article.pdfs]
setattr(article, "article_text_languages", article_text_languages)
setattr(article, "article_pdf_languages", article_pdf_languages)
if 'mml:' in article.title:
has_math_content = True
# obtém a legenda bibliográfica
issue_bibliographic_strip = descriptive_short_format(
title=journal.title, short_title=journal.short_title,
pubdate=str(issue.year), volume=issue.volume, number=issue.number,
suppl=issue.suppl_text, language=language[:2].lower())
context = {
'this_page_url': url_for(
'main.issue_toc',
url_seg=url_seg,
url_seg_issue=url_seg_issue),
'has_math_content': has_math_content,
'journal': journal,
'issue': issue,
'issue_bibliographic_strip': issue_bibliographic_strip,
'articles': articles,
'sections': sections,
'section_filter': section_filter,
'journal_study_areas': [
STUDY_AREAS.get(study_area.upper()) for study_area in journal.study_areas
],
'last_issue': journal.last_issue
}
return render_template("issue/toc.html", **context)
def goto_next_or_previous_issue(current_issue, goto_param):
if goto_param not in ["next", "previous"]:
return None
all_issues = list(
controllers.get_issues_by_jid(current_issue.journal.id, is_public=True))
if goto_param == "next":
selected_issue = utils.get_next_issue(all_issues, current_issue)
elif goto_param == "previous":
selected_issue = utils.get_prev_issue(all_issues, current_issue)
if selected_issue in (None, current_issue):
# nao precisa redirecionar
return None
try:
url_seg_issue = selected_issue.url_segment
except AttributeError:
return None
else:
return url_for('main.issue_toc',
url_seg=selected_issue.journal.url_segment,
url_seg_issue=url_seg_issue)
def get_next_or_previous_issue(current_issue, goto_param):
if goto_param not in ["next", "previous"]:
return current_issue
all_issues = list(
controllers.get_issues_by_jid(current_issue.journal.id, is_public=True))
if goto_param == "next":
return utils.get_next_issue(all_issues, current_issue)
return utils.get_prev_issue(all_issues, current_issue)
@main.route('/j/<string:url_seg>/aop')
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def aop_toc(url_seg):
section_filter = request.args.get('section', '', type=str).upper()
aop_issues = controllers.get_aop_issues(url_seg) or []
if not aop_issues:
abort(404, _('Artigos ahead of print não encontrados'))
goto = request.args.get("goto", None, type=str)
if goto == "previous":
url = goto_next_or_previous_issue(aop_issues[-1], goto)
if url:
redirect(url, code=301)
journal = aop_issues[0].journal
if not journal.is_public:
abort(404, JOURNAL_UNPUBLISH + _(journal.unpublish_reason))
utils.fix_journal_last_issue(journal)
articles = []
for aop_issue in aop_issues:
_articles = controllers.get_articles_by_iid(
aop_issue.iid, is_public=True)
if _articles:
articles.extend(_articles)
if not articles:
abort(404, _('Artigos ahead of print não encontrados'))
sections = sorted({a.section.upper() for a in articles if a.section})
if section_filter != '':
articles = [a for a in articles if a.section.upper() == section_filter]
for article in articles:
article_text_languages = [doc['lang'] for doc in article.htmls]
article_pdf_languages = [(doc['lang'], doc['url']) for doc in article.pdfs]
setattr(article, "article_text_languages", article_text_languages)
setattr(article, "article_pdf_languages", article_pdf_languages)
context = {
'this_page_url': url_for("main.aop_toc", url_seg=url_seg),
'journal': journal,
'issue': aop_issues[0],
'issue_bibliographic_strip': "ahead of print",
'articles': articles,
'sections': sections,
'section_filter': section_filter,
'journal_study_areas': [
STUDY_AREAS.get(study_area.upper())
for study_area in journal.study_areas
],
# o primeiro item da lista é o último número.
'last_issue': journal.last_issue
}
return render_template("issue/toc.html", **context)
@main.route('/feed/<string:url_seg>/<string:url_seg_issue>/')
@cache.cached(key_prefix=cache_key_with_lang)
def issue_feed(url_seg, url_seg_issue):
issue = controllers.get_issue_by_url_seg(url_seg, url_seg_issue)
if not issue:
abort(404, _('Número não encontrado'))
if not issue.is_public:
abort(404, ISSUE_UNPUBLISH + _(issue.unpublish_reason))
if not issue.journal.is_public:
abort(404, JOURNAL_UNPUBLISH + _(issue.journal.unpublish_reason))
journal = issue.journal
articles = controllers.get_articles_by_iid(issue.iid, is_public=True)
feed = AtomFeed(journal.title or "",
feed_url=request.url,
url=request.url_root,
subtitle=utils.get_label_issue(issue))
feed_language = session.get('lang', get_locale())
for article in articles:
# ######### TODO: Revisar #########
article_lang = feed_language
if feed_language not in article.languages:
article_lang = article.original_language
feed.add(article.title or 'Unknow title',
render_template("issue/feed_content.html", article=article),
content_type='html',
author=article.authors,
id=article.doi or article.pid,
url=url_external('main.article_detail_v3',
url_seg=journal.url_segment,
article_pid_v3=article.aid,
lang=article_lang),
updated=journal.updated,
published=journal.created)
return feed.get_response()
# ##################################Article######################################
@main.route('/article/<regex("S\d{4}-\d{3}[0-9xX][0-2][0-9]{3}\d{4}\d{5}"):pid>/')
@cache.cached(key_prefix=cache_key_with_lang)
def article_detail_pid(pid):
article = controllers.get_article_by_pid(pid)
if not article:
article = controllers.get_article_by_oap_pid(pid)
if not article:
abort(404, _('Artigo não encontrado'))
return redirect(url_for('main.article_detail_v3',
url_seg=article.journal.acronym,
article_pid_v3=article.aid))
def render_html_from_xml(article, lang, gs_abstract=False):
logger.debug("Get XML: %s", article.xml)
if current_app.config["SSM_XML_URL_REWRITE"]:
result = fetch_data(use_ssm_url(article.xml))
else:
result = fetch_data(article.xml)
xml = etree.parse(BytesIO(result))
generator = HTMLGenerator.parse(
xml, valid_only=False, gs_abstract=gs_abstract, output_style="website")
return generator.generate(lang), generator.languages
def render_html_from_html(article, lang):
html_url = [html
for html in article.htmls
if html['lang'] == lang]
try:
html_url = html_url[0]['url']
except IndexError:
raise ValueError('Artigo não encontrado') from None
result = fetch_data(use_ssm_url(html_url))
html = result.decode('utf8')
text_languages = [html['lang'] for html in article.htmls]
return html, text_languages
def render_html_abstract(article, lang):
abstract_text = ''
for abstract in article.abstracts:
if abstract['language'] == lang:
abstract_text = abstract["text"]
break
return abstract_text, article.abstract_languages
def render_html(article, lang, gs_abstract=False):
if article.xml:
return render_html_from_xml(article, lang, gs_abstract)
elif article.htmls:
if gs_abstract:
return render_html_abstract(article, lang)
return render_html_from_html(article, lang)
else:
# TODO: Corrigir os teste que esperam ter o atributo ``htmls``
# O ideal seria levantar um ValueError.
return '', []
# TODO: Remover assim que o valor Article.xml estiver consistente na base de
# dados
def use_ssm_url(url):
"""Normaliza a string `url` de acordo com os valores das diretivas de
configuração OPAC_SSM_SCHEME, OPAC_SSM_DOMAIN e OPAC_SSM_PORT.
A normalização busca obter uma URL absoluta em função de uma relativa, ou
uma absoluta em função de uma absoluta, mas com as partes *scheme* e
*authority* trocadas pelas definidas nas diretivas citadas anteriormente.
Este código deve ser removido assim que o valor de Article.xml estiver
consistente, i.e., todos os registros possuirem apenas URLs absolutas.
"""
if url.startswith("http"):
parsed_url = urlparse(url)
return current_app.config["SSM_BASE_URI"] + parsed_url.path
else:
return current_app.config["SSM_BASE_URI"] + url
@main.route('/article/<string:url_seg>/<string:url_seg_issue>/<string:url_seg_article>/')
@main.route('/article/<string:url_seg>/<string:url_seg_issue>/<string:url_seg_article>/<regex("(?:\w{2})"):lang_code>/')
@main.route('/article/<string:url_seg>/<string:url_seg_issue>/<regex("(.*)"):url_seg_article>/')
@main.route('/article/<string:url_seg>/<string:url_seg_issue>/<regex("(.*)"):url_seg_article>/<regex("(?:\w{2})"):lang_code>/')
@cache.cached(key_prefix=cache_key_with_lang)
def article_detail(url_seg, url_seg_issue, url_seg_article, lang_code=''):
issue = controllers.get_issue_by_url_seg(url_seg, url_seg_issue)
if not issue:
abort(404, _('Issue não encontrado'))
article = controllers.get_article_by_issue_article_seg(issue.iid, url_seg_article)
if article is None:
article = controllers.get_article_by_aop_url_segs(
issue.journal, url_seg_issue, url_seg_article
)
if article is None:
abort(404, _('Artigo não encontrado'))
req_params = {
"url_seg": article.journal.acronym,
"article_pid_v3": article.aid,
}
if lang_code:
req_params["lang"] = lang_code
return redirect(url_for('main.article_detail_v3', **req_params))
@main.route('/j/<string:url_seg>/a/<string:article_pid_v3>/')
@main.route('/j/<string:url_seg>/a/<string:article_pid_v3>/<string:part>/')
@cache.cached(key_prefix=cache_key_with_lang)
def article_detail_v3(url_seg, article_pid_v3, part=None):
qs_lang = request.args.get('lang', type=str) or None
qs_goto = request.args.get('goto', type=str) or None
qs_stop = request.args.get('stop', type=str) or None
qs_format = request.args.get('format', 'html', type=str)
gs_abstract = (part == "abstract")
if part and not gs_abstract:
abort(404,
_("Não existe '{}'. No seu lugar use '{}'"
).format(part, 'abstract'))
try:
qs_lang, article = controllers.get_article(
article_pid_v3, url_seg, qs_lang, gs_abstract, qs_goto)
if qs_goto:
return redirect(
url_for(
'main.article_detail_v3',
url_seg=url_seg,
article_pid_v3=article.aid,
part=part,
format=qs_format,
lang=qs_lang,
stop=getattr(article, 'stop', None),
),
code=301
)
except (controllers.PreviousOrNextArticleNotFoundError) as e:
if gs_abstract:
abort(404, _('Resumo inexistente'))
abort(404, _('Artigo inexistente'))
except (controllers.ArticleNotFoundError,
controllers.ArticleJournalNotFoundError):
abort(404, _('Artigo não encontrado'))
except controllers.ArticleLangNotFoundError:
return redirect(
url_for(
'main.article_detail_v3',
url_seg=url_seg,
article_pid_v3=article_pid_v3,
format=qs_format,
),
code=301
)
except controllers.ArticleAbstractNotFoundError:
abort(404, _('Recurso não encontrado'))
except controllers.ArticleIsNotPublishedError as e:
abort(404, "{}{}".format(ARTICLE_UNPUBLISH, e))
except controllers.IssueIsNotPublishedError as e:
abort(404, "{}{}".format(ISSUE_UNPUBLISH, e))
except controllers.JournalIsNotPublishedError as e:
abort(404, "{}{}".format(JOURNAL_UNPUBLISH, e))
except ValueError as e:
abort(404, str(e))
def _handle_html():
citation_pdf_url = None
for pdf_data in article.pdfs:
if pdf_data.get("lang") == qs_lang:
citation_pdf_url = url_for(
'main.article_detail_v3',
url_seg=article.journal.url_segment,
article_pid_v3=article_pid_v3,
lang=qs_lang,
format="pdf",
)
break
website = request.url
if website:
parsed_url = urlparse(request.url)
if current_app.config["FORCE_USE_HTTPS_GOOGLE_TAGS"]:
website = "{}://{}".format('https', parsed_url.netloc)
else:
website = "{}://{}".format(parsed_url.scheme, parsed_url.netloc)
if citation_pdf_url:
citation_pdf_url = "{}{}".format(website, citation_pdf_url)
try:
html, text_languages = render_html(article, qs_lang, gs_abstract)
except (ValueError, NonRetryableError):
abort(404, _('HTML do Artigo não encontrado ou indisponível'))
except RetryableError:
abort(500, _('Erro inesperado'))
text_versions = sorted(
[
(
lang,
display_original_lang_name(lang),
url_for(
'main.article_detail_v3',
url_seg=article.journal.url_segment,
article_pid_v3=article_pid_v3,
lang=lang
)
)
for lang in text_languages
]
)
citation_xml_url = "{}{}".format(
website,
url_for(
'main.article_detail_v3',
url_seg=article.journal.url_segment,
article_pid_v3=article_pid_v3,
format="xml",
lang=article.original_language,
)
)
context = {
'next_article': qs_stop != 'next',
'previous_article': qs_stop != 'previous',
'article': article,
'journal': article.journal,
'issue': article.issue,
'html': html,
'citation_pdf_url': citation_pdf_url,
'citation_xml_url': citation_xml_url,
'article_lang': qs_lang,
'text_versions': text_versions,
'related_links': controllers.related_links(article),
'gs_abstract': gs_abstract,
'part': part,
}
return render_template("article/detail.html", **context)
def _handle_pdf():
if not article.pdfs:
abort(404, _('PDF do Artigo não encontrado'))
pdf_info = [pdf for pdf in article.pdfs if pdf['lang'] == qs_lang]
if len(pdf_info) != 1:
abort(404, _('PDF do Artigo não encontrado'))
try:
pdf_url = pdf_info[0]['url']
except (IndexError, KeyError, ValueError, TypeError):
abort(404, _('PDF do Artigo não encontrado'))
if pdf_url:
return get_pdf_content(pdf_url)
raise abort(404, _('Recurso do Artigo não encontrado. Caminho inválido!'))
def _handle_xml():
if current_app.config["SSM_XML_URL_REWRITE"]:
result = fetch_data(use_ssm_url(article.xml))
else:
result = fetch_data(article.xml)
response = make_response(result)
response.headers['Content-Type'] = 'application/xml'
return response
if 'html' == qs_format:
return _handle_html()
elif 'pdf' == qs_format:
return _handle_pdf()
elif 'xml' == qs_format:
return _handle_xml()
else:
abort(400, _('Formato não suportado'))
@main.route('/readcube/epdf/')
@main.route('/readcube/epdf.php')
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def article_epdf():
doi = request.args.get('doi', None, type=str)
pid = request.args.get('pid', None, type=str)
pdf_path = request.args.get('pdf_path', None, type=str)
lang = request.args.get('lang', None, type=str)
if not all([doi, pid, pdf_path, lang]):
abort(400, _('Parâmetros insuficientes para obter o EPDF do artigo'))
else:
context = {
'doi': doi,
'pid': pid,
'pdf_path': pdf_path,
'lang': lang,
}
return render_template("article/epdf.html", **context)
def get_pdf_content(url):
logger.debug("Get PDF: %s", url)
if current_app.config["SSM_ARTICLE_ASSETS_OR_RENDITIONS_URL_REWRITE"]:
url = use_ssm_url(url)
try:
response = fetch_data(url)
except NonRetryableError:
abort(404, _('PDF não encontrado'))
except RetryableError:
abort(500, _('Erro inesperado'))
else:
mimetype, __ = mimetypes.guess_type(url)
return Response(response, mimetype=mimetype)
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def get_content_from_ssm(resource_ssm_media_path):
resource_ssm_full_url = current_app.config['SSM_BASE_URI'] + resource_ssm_media_path
url = resource_ssm_full_url.strip()
mimetype, __ = mimetypes.guess_type(url)
try:
ssm_response = fetch_data(url)
except NonRetryableError:
abort(404, _('Recurso não encontrado'))
except RetryableError:
abort(500, _('Erro inesperado'))
else:
return Response(ssm_response, mimetype=mimetype)
@main.route('/media/assets/<regex("(.*)"):relative_media_path>')
@cache.cached(key_prefix=cache_key_with_lang)
def media_assets_proxy(relative_media_path):
resource_ssm_path = '{ssm_media_path}{resource_path}'.format(
ssm_media_path=current_app.config['SSM_MEDIA_PATH'],
resource_path=relative_media_path)
return get_content_from_ssm(resource_ssm_path)
@main.route('/article/ssm/content/raw/')
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def article_ssm_content_raw():
resource_ssm_path = request.args.get('resource_ssm_path', None)
if not resource_ssm_path:
raise abort(404, _('Recurso do Artigo não encontrado. Caminho inválido!'))
else:
return get_content_from_ssm(resource_ssm_path)
@main.route('/pdf/<string:url_seg>/<string:url_seg_issue>/<string:url_seg_article>')
@main.route('/pdf/<string:url_seg>/<string:url_seg_issue>/<string:url_seg_article>/<regex("(?:\w{2})"):lang_code>')
@main.route('/pdf/<string:url_seg>/<string:url_seg_issue>/<regex("(.*)"):url_seg_article>')
@main.route('/pdf/<string:url_seg>/<string:url_seg_issue>/<regex("(.*)"):url_seg_article>/<regex("(?:\w{2})"):lang_code>')
@cache.cached(key_prefix=cache_key_with_lang)
def article_detail_pdf(url_seg, url_seg_issue, url_seg_article, lang_code=''):
"""
Padrões esperados:
`/pdf/csc/2021.v26suppl1/2557-2558`
`/pdf/csc/2021.v26suppl1/2557-2558/en`
"""
if not lang_code and "." not in url_seg_issue:
return router_legacy_pdf(url_seg, url_seg_issue, url_seg_article)
issue = controllers.get_issue_by_url_seg(url_seg, url_seg_issue)
if not issue:
abort(404, _('Issue não encontrado'))
article = controllers.get_article_by_issue_article_seg(issue.iid, url_seg_article)
if not article:
abort(404, _('Artigo não encontrado'))
req_params = {
'url_seg': article.journal.url_segment,
'article_pid_v3': article.aid,
'format': 'pdf',
}
if lang_code:
req_params['lang'] = lang_code
return redirect(url_for('main.article_detail_v3', **req_params), code=301)
@main.route('/pdf/<string:journal_acron>/<string:issue_info>/<string:pdf_filename>.pdf')
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def router_legacy_pdf(journal_acron, issue_info, pdf_filename):
pdf_filename = '%s.pdf' % pdf_filename
journal = controllers.get_journal_by_url_seg(journal_acron)
if not journal:
abort(404, _('Este PDF não existe em http://www.scielo.br. Consulte http://search.scielo.org'))
article = controllers.get_article_by_pdf_filename(
journal_acron, issue_info, pdf_filename)
if not article:
abort(404, _('PDF do artigo não foi encontrado'))
return redirect(
url_for(
'main.article_detail_v3',
url_seg=article.journal.url_segment,
article_pid_v3=article.aid,
format='pdf',
lang=article._pdf_lang,
),
code=301
)
@main.route('/cgi-bin/fbpe/<string:text_or_abstract>/')
@cache.cached(key_prefix=cache_key_with_lang_with_qs)
def router_legacy_article(text_or_abstract):
pid = request.args.get('pid', None)
lng = request.args.get('lng', None)
if not (text_or_abstract in ['fbtext', 'fbabs'] and pid):
# se tem pid
abort(400, _('Requsição inválida ao tentar acessar o artigo com pid: %s' % pid))
article = controllers.get_article_by_pid_v1(pid)
if not article:
abort(404, _('Artigo não encontrado'))
return redirect(
url_for(
'main.article_detail_v3',
url_seg=article.journal.url_segment,
article_pid_v3=article.aid,
),
code=301
)
# ###############################E-mail share##################################
@main.route("/email_share_ajax/", methods=['POST'])
def email_share_ajax():
if not request.is_xhr:
abort(400, _('Requisição inválida.'))
form = forms.EmailShareForm(request.form)
if form.validate():
recipients = [email.strip() for email in form.data['recipients'].split(';') if email.strip() != '']
sent, message = controllers.send_email_share(form.data['your_email'],
recipients,
form.data['share_url'],
form.data['subject'],
form.data['comment'])
return jsonify({'sent': sent, 'message': str(message),
'fields': [key for key in form.data.keys()]})
else:
return jsonify({'sent': False, 'message': form.errors,
'fields': [key for key in form.data.keys()]})
@main.route("/form_mail/", methods=['GET'])
def email_form():
context = {'url': request.args.get('url')}
return render_template("email/email_form.html", **context)
@main.route("/email_error_ajax/", methods=['POST'])
def email_error_ajax():
if not request.is_xhr:
abort(400, _('Requisição inválida.'))
form = forms.ErrorForm(request.form)
if form.validate():
recipients = [email.strip() for email in current_app.config.get('EMAIL_ACCOUNTS_RECEIVE_ERRORS') if email.strip() != '']
sent, message = controllers.send_email_error(form.data['name'],
form.data['your_email'],
recipients,
form.data['url'],
form.data['error_type'],
form.data['message'],
form.data['page_title'])
return jsonify({'sent': sent, 'message': str(message),
'fields': [key for key in form.data.keys()]})
else:
return jsonify({'sent': False, 'message': form.errors,
'fields': [key for key in form.data.keys()]})
@main.route("/error_mail/", methods=['GET'])
def error_form():
context = {'url': request.args.get('url')}
return render_template("includes/error_form.html", **context)
# ###############################Others########################################
@main.route("/media/<path:filename>/", methods=['GET'])
@cache.cached(key_prefix=cache_key_with_lang)
def download_file_by_filename(filename):
media_root = current_app.config['MEDIA_ROOT']
return send_from_directory(media_root, filename)
@main.route("/img/scielo.gif", methods=['GET'])
def full_text_image():
return send_from_directory('static', 'img/full_text_scielo_img.gif')
@main.route("/robots.txt", methods=['GET'])
def get_robots_txt_file():
return send_from_directory('static', 'robots.txt')
@main.route("/revistas/<path:journal_seg>/<string:page>.htm", methods=['GET'])
def router_legacy_info_pages(journal_seg, page):
"""
Essa view function realiza o redirecionamento das URLs antigas para as novas URLs.
Mantém um dicionário como uma tabela relacionamento entre o nome das páginas que pode ser:
Página âncora
[iaboutj.htm, eaboutj.htm, paboutj.htm] -> #about
[iedboard.htm, eedboard.htm, pedboard.htm] -> #editors
[iinstruc.htm einstruc.htm, pinstruc.htm]-> #instructions
isubscrp.htm -> Sem âncora
"""
page_anchor = {
'iaboutj': '#about',
'eaboutj': '#about',
'paboutj': '#about',
'eedboard': '#editors',
'iedboard': '#editors',
'pedboard': '#editors',
'iinstruc': '#instructions',
'pinstruc': '#instructions',
'einstruc': '#instructions'
}
return redirect('%s%s' % (url_for('main.about_journal',
url_seg=journal_seg), page_anchor.get(page, '')), code=301)
@main.route("/api/v1/counter_dict", methods=['GET'])
def router_counter_dicts():
"""
Essa view function retorna um dicionário, em formato JSON, que mapeia PIDs a insumos
necessários para o funcionamento das aplicações Matomo & COUNTER & SUSHI.
"""
end_date = request.args.get('end_date', '', type=str)
try:
end_date = datetime.strptime(end_date, '%Y-%m-%d')
except ValueError:
end_date = datetime.now()
begin_date = end_date - timedelta(days=30)
page = request.args.get('page', type=int)
if not page:
page = 1
limit = request.args.get('limit', type=int)
if not limit or limit > 100 or limit < 0:
limit = 100
results = {'dictionary_date': end_date,
'end_date': end_date.strftime('%Y-%m-%d %H-%M-%S'),
'begin_date': begin_date.strftime('%Y-%m-%d %H-%M-%S'),
'documents': {},
'collection': current_app.config['OPAC_COLLECTION']}
articles = controllers.get_articles_by_date_range(begin_date, end_date, page, limit)
for a in articles.items:
results['documents'].update(get_article_counter_data(a))
results['total'] = articles.total
results['pages'] = articles.pages
results['limit'] = articles.per_page
results['page'] = articles.page
return jsonify(results)
def get_article_counter_data(article):
return {
article.aid: {
"journal_acronym": article.journal.acronym,
"pid": article.pid if article.pid else '',
"aop_pid": article.aop_pid if article.aop_pid else '',
"pid_v1": article.scielo_pids.get('v1', ''),
"pid_v2": article.scielo_pids.get('v2', ''),
"pid_v3": article.scielo_pids.get('v3', ''),
"publication_date": article.publication_date,
"default_language": article.original_language,
"create": article.created,
"update": article.updated
}
}
@main.route('/cgi-bin/wxis.exe/iah/')
def author_production():
# http://www.scielo.br/cgi-bin/wxis.exe/iah/
# ?IsisScript=iah/iah.xis&base=article%5Edlibrary&format=iso.pft&
# lang=p&nextAction=lnk&
# indexSearch=AU&exprSearch=MEIERHOFFER,+LILIAN+KOZSLOWSKI
# ->
# //search.scielo.org/?lang=pt&q=au:MEIERHOFFER,+LILIAN+KOZSLOWSKI
search_url = current_app.config.get('URL_SEARCH')
if not search_url:
abort(404, "URL_SEARCH: {}".format(_('Página não encontrada')))
qs_exprSearch = request.args.get('exprSearch', type=str) or ''
qs_indexSearch = request.args.get('indexSearch', type=str) or ''
qs_lang = request.args.get('lang', type=str) or ''
_lang = IAHX_LANGS.get(qs_lang) or ''
_lang = _lang and "lang={}".format(_lang)
_expr = "{}{}".format(
qs_indexSearch == "AU" and "au:" or '', qs_exprSearch)
_expr = _expr and "q={}".format(_expr.replace(" ", "+"))
_and = _lang and _expr and "&" or ''
_question_mark = (_lang or _expr) and "?" or ""
if search_url.startswith("//"):
protocol = "https:"
elif search_url.startswith("http"):
protocol = ""
else:
protocol = "https://"
url = "{}{}{}{}{}{}".format(
protocol, search_url, _question_mark, _lang, _and, _expr)
return redirect(url, code=301)
| [
"logging.getLogger",
"flask.request.args.get",
"webapp.controllers.get_press_releases",
"flask.render_template",
"webapp.controllers.get_article_by_oap_pid",
"webapp.utils.utils.get_next_issue",
"webapp.controllers.send_email_error",
"webapp.controllers.get_page_by_journal_acron_lang",
"webapp.controllers.get_article_by_pdf_filename",
"webapp.controllers.get_recent_articles_of_issue",
"io.BytesIO",
"webapp.controllers.get_alpha_list_from_paginated_journals",
"mimetypes.guess_type",
"datetime.timedelta",
"webapp.controllers.get_issue_by_pid",
"webapp.controllers.get_page_by_slug_name",
"werkzeug.contrib.atom.AtomFeed",
"flask.current_app.config.get",
"flask.jsonify",
"webapp.controllers.get_journal_by_url_seg",
"webapp.controllers.get_issue_by_url_seg",
"webapp.cache.cached",
"flask.send_from_directory",
"webapp.controllers.get_aop_issues",
"flask.session.keys",
"webapp.controllers.get_current_collection",
"webapp.utils.utils.is_recaptcha_valid",
"webapp.controllers.get_journal_by_issn",
"webapp.controllers.get_article_by_pid",
"flask_babelex.gettext",
"webapp.controllers.get_journal_json_data",
"webapp.controllers.send_email_share",
"webapp.controllers.get_pages_by_lang",
"webapp.controllers.get_journal_metrics",
"webapp.controllers.get_articles_by_iid",
"webapp.controllers.get_article_by_aop_url_segs",
"webapp.controllers.get_article",
"webapp.controllers.send_email_contact",
"collections.OrderedDict",
"packtools.HTMLGenerator.parse",
"webapp.controllers.get_collection_tweets",
"requests.get",
"flask.redirect",
"webapp.config.lang_names.display_original_lang_name",
"webapp.forms.ContactForm",
"webapp.forms.EmailShareForm",
"opac_schema.v1.models.Article.objects.filter",
"flask.Response",
"urllib.parse.urljoin",
"opac_schema.v1.models.Journal.objects.filter",
"flask.make_response",
"webapp.forms.ErrorForm",
"webapp.controllers.get_issues_by_jid",
"webapp.controllers.get_journals",
"webapp.utils.utils.get_prev_issue",
"urllib.parse.urlparse",
"webapp.controllers.get_article_by_issue_article_seg",
"datetime.datetime.strptime",
"webapp.controllers.get_article_by_pid_v2",
"flask.url_for",
"webapp.controllers.get_journals_grouped_by",
"webapp.controllers.get_article_by_pid_v1",
"webapp.controllers.get_issues_for_grid_by_jid",
"webapp.controllers.get_articles_by_date_range",
"webapp.controllers.related_links",
"webapp.controllers.get_latest_news_by_lang",
"webapp.utils.utils.fix_journal_last_issue",
"webapp.utils.utils.get_label_issue",
"datetime.datetime.now",
"webapp.controllers.get_journals_paginated"
]
| [((1059, 1086), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1076, 1086), False, 'import logging\n'), ((1108, 1158), 'flask_babelex.gettext', '_', (['"""O periódico está indisponível por motivo de: """'], {}), "('O periódico está indisponível por motivo de: ')\n", (1109, 1158), True, 'from flask_babelex import gettext as _\n'), ((1177, 1224), 'flask_babelex.gettext', '_', (['"""O número está indisponível por motivo de: """'], {}), "('O número está indisponível por motivo de: ')\n", (1178, 1224), True, 'from flask_babelex import gettext as _\n'), ((1245, 1292), 'flask_babelex.gettext', '_', (['"""O artigo está indisponível por motivo de: """'], {}), "('O artigo está indisponível por motivo de: ')\n", (1246, 1292), True, 'from flask_babelex import gettext as _\n'), ((4989, 5033), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (5001, 5033), False, 'from webapp import cache\n'), ((6539, 6583), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (6551, 6583), False, 'from webapp import cache\n'), ((7125, 7169), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (7137, 7169), False, 'from webapp import cache\n'), ((8201, 8245), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (8213, 8245), False, 'from webapp import cache\n'), ((10219, 10271), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (10231, 10271), False, 'from webapp import cache\n'), ((10940, 10992), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (10952, 10992), False, 'from webapp import cache\n'), ((14994, 15038), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (15006, 15038), False, 'from webapp import cache\n'), ((17256, 17300), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (17268, 17300), False, 'from webapp import cache\n'), ((18945, 18989), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (18957, 18989), False, 'from webapp import cache\n'), ((20336, 20388), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (20348, 20388), False, 'from webapp import cache\n'), ((21076, 21128), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (21088, 21128), False, 'from webapp import cache\n'), ((22214, 22266), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (22226, 22266), False, 'from webapp import cache\n'), ((25187, 25231), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (25199, 25231), False, 'from webapp import cache\n'), ((27079, 27131), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (27091, 27131), False, 'from webapp import cache\n'), ((31711, 31763), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (31723, 31763), False, 'from webapp import cache\n'), ((33858, 33902), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (33870, 33902), False, 'from webapp import cache\n'), ((35622, 35666), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (35634, 35666), False, 'from webapp import cache\n'), ((38986, 39030), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (38998, 39030), False, 'from webapp import cache\n'), ((39934, 39978), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (39946, 39978), False, 'from webapp import cache\n'), ((46053, 46105), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (46065, 46105), False, 'from webapp import cache\n'), ((47144, 47196), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (47156, 47196), False, 'from webapp import cache\n'), ((47753, 47797), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (47765, 47797), False, 'from webapp import cache\n'), ((48108, 48160), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (48120, 48160), False, 'from webapp import cache\n'), ((48857, 48901), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (48869, 48901), False, 'from webapp import cache\n'), ((49892, 49944), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (49904, 49944), False, 'from webapp import cache\n'), ((50746, 50798), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang_with_qs'}), '(key_prefix=cache_key_with_lang_with_qs)\n', (50758, 50798), False, 'from webapp import cache\n'), ((54118, 54162), 'webapp.cache.cached', 'cache.cached', ([], {'key_prefix': 'cache_key_with_lang'}), '(key_prefix=cache_key_with_lang)\n', (54130, 54162), False, 'from webapp import cache\n'), ((1401, 1428), 'flask.url_for', 'url_for', (['endpoint'], {}), '(endpoint, **kwargs)\n', (1408, 1428), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((1440, 1470), 'urllib.parse.urljoin', 'urljoin', (['request.url_root', 'url'], {}), '(request.url_root, url)\n', (1447, 1470), False, 'from urllib.parse import urljoin\n'), ((3795, 3830), 'flask.current_app.config.get', 'current_app.config.get', (['"""LANGUAGES"""'], {}), "('LANGUAGES')\n", (3817, 3830), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((4367, 4402), 'flask.current_app.config.get', 'current_app.config.get', (['"""LANGUAGES"""'], {}), "('LANGUAGES')\n", (4389, 4402), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((4543, 4567), 'flask.request.args.get', 'request.args.get', (['"""hash"""'], {}), "('hash')\n", (4559, 4567), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((4691, 4709), 'flask.redirect', 'redirect', (['referrer'], {}), '(referrer)\n', (4699, 4709), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((5107, 5152), 'webapp.controllers.get_latest_news_by_lang', 'controllers.get_latest_news_by_lang', (['language'], {}), '(language)\n', (5142, 5152), False, 'from webapp import controllers\n'), ((5167, 5202), 'webapp.controllers.get_collection_tweets', 'controllers.get_collection_tweets', ([], {}), '()\n', (5200, 5202), False, 'from webapp import controllers\n'), ((5224, 5278), 'webapp.controllers.get_press_releases', 'controllers.get_press_releases', (["{'language': language}"], {}), "({'language': language})\n", (5254, 5278), False, 'from webapp import controllers\n'), ((6369, 6420), 'flask.render_template', 'render_template', (['"""collection/index.html"""'], {}), "('collection/index.html', **context)\n", (6384, 6420), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((6678, 6708), 'flask.request.args.get', 'request.args.get', (['"""status"""', '""""""'], {}), "('status', '')\n", (6694, 6708), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((6947, 7064), 'flask.render_template', 'render_template', (['"""collection/list_journal.html"""'], {}), "('collection/list_journal.html', **{'journals_list':\n journals_list, 'query_filter': query_filter})\n", (6962, 7064), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((7477, 7507), 'flask.request.args.get', 'request.args.get', (['"""status"""', '""""""'], {}), "('status', '')\n", (7493, 7507), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((7526, 7555), 'flask.request.args.get', 'request.args.get', (['"""query"""', '""""""'], {}), "('query', '')\n", (7542, 7555), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((7578, 7613), 'flask.request.args.get', 'request.args.get', (['"""filter"""', '"""areas"""'], {}), "('filter', 'areas')\n", (7594, 7613), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((7844, 7967), 'webapp.controllers.get_journals_grouped_by', 'controllers.get_journals_grouped_by', (['thematic_table[thematic_filter]', 'title_query'], {'query_filter': 'query_filter', 'lang': 'lang'}), '(thematic_table[thematic_filter],\n title_query, query_filter=query_filter, lang=lang)\n', (7879, 7967), False, 'from webapp import controllers\n'), ((8015, 8148), 'flask.render_template', 'render_template', (['"""collection/list_thematic.html"""'], {}), "('collection/list_thematic.html', **{'objects': objects,\n 'query_filter': query_filter, 'filter': thematic_filter})\n", (8030, 8148), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((8340, 8376), 'webapp.controllers.get_current_collection', 'controllers.get_current_collection', ([], {}), '()\n', (8374, 8376), False, 'from webapp import controllers\n'), ((8490, 8558), 'flask_babelex.gettext', '_', (["('10 últimos periódicos inseridos na coleção %s' % collection.name)"], {}), "('10 últimos periódicos inseridos na coleção %s' % collection.name)\n", (8491, 8558), True, 'from flask_babelex import gettext as _\n'), ((8571, 8649), 'werkzeug.contrib.atom.AtomFeed', 'AtomFeed', (['title'], {'subtitle': 'subtitle', 'feed_url': 'request.url', 'url': 'request.url_root'}), '(title, subtitle=subtitle, feed_url=request.url, url=request.url_root)\n', (8579, 8649), False, 'from werkzeug.contrib.atom import AtomFeed\n'), ((8706, 8803), 'webapp.controllers.get_journals_paginated', 'controllers.get_journals_paginated', ([], {'title_query': '""""""', 'page': '(1)', 'order_by': '"""-created"""', 'per_page': '(10)'}), "(title_query='', page=1, order_by=\n '-created', per_page=10)\n", (8740, 8803), False, 'from webapp import controllers\n'), ((10773, 10824), 'flask.render_template', 'render_template', (['"""collection/about.html"""'], {}), "('collection/about.html', **context)\n", (10788, 10824), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((11032, 11064), 'flask.request.args.get', 'request.args.get', (['"""script"""', 'None'], {}), "('script', None)\n", (11048, 11064), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((11075, 11104), 'flask.request.args.get', 'request.args.get', (['"""pid"""', 'None'], {}), "('pid', None)\n", (11091, 11104), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((11116, 11146), 'flask.request.args.get', 'request.args.get', (['"""tlng"""', 'None'], {}), "('tlng', None)\n", (11132, 11146), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((15082, 15125), 'webapp.controllers.get_journal_by_url_seg', 'controllers.get_journal_by_url_seg', (['url_seg'], {}), '(url_seg)\n', (15116, 15125), False, 'from webapp import controllers\n'), ((15301, 15338), 'webapp.utils.utils.fix_journal_last_issue', 'utils.fix_journal_last_issue', (['journal'], {}), '(journal)\n', (15329, 15338), False, 'from webapp.utils import utils\n'), ((15472, 15517), 'webapp.controllers.get_latest_news_by_lang', 'controllers.get_latest_news_by_lang', (['language'], {}), '(language)\n', (15507, 15517), False, 'from webapp import controllers\n'), ((15561, 15635), 'webapp.controllers.get_press_releases', 'controllers.get_press_releases', (["{'journal': journal, 'language': language}"], {}), "({'journal': journal, 'language': language})\n", (15591, 15635), False, 'from webapp import controllers\n'), ((16517, 16557), 'webapp.controllers.get_journal_metrics', 'controllers.get_journal_metrics', (['journal'], {}), '(journal)\n', (16548, 16557), False, 'from webapp import controllers\n'), ((17156, 17205), 'flask.render_template', 'render_template', (['"""journal/detail.html"""'], {}), "('journal/detail.html', **context)\n", (17171, 17205), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((17342, 17385), 'webapp.controllers.get_journal_by_url_seg', 'controllers.get_journal_by_url_seg', (['url_seg'], {}), '(url_seg)\n', (17376, 17385), False, 'from webapp import controllers\n'), ((17570, 17628), 'webapp.controllers.get_issues_by_jid', 'controllers.get_issues_by_jid', (['journal.jid'], {'is_public': '(True)'}), '(journal.jid, is_public=True)\n', (17599, 17628), False, 'from webapp import controllers\n'), ((17691, 17754), 'webapp.controllers.get_articles_by_iid', 'controllers.get_articles_by_iid', (['last_issue.iid'], {'is_public': '(True)'}), '(last_issue.iid, is_public=True)\n', (17722, 17754), False, 'from webapp import controllers\n'), ((19082, 19125), 'webapp.controllers.get_journal_by_url_seg', 'controllers.get_journal_by_url_seg', (['url_seg'], {}), '(url_seg)\n', (19116, 19125), False, 'from webapp import controllers\n'), ((19316, 19353), 'webapp.utils.utils.fix_journal_last_issue', 'utils.fix_journal_last_issue', (['journal'], {}), '(journal)\n', (19344, 19353), False, 'from webapp.utils import utils\n'), ((19729, 19798), 'webapp.controllers.get_page_by_journal_acron_lang', 'controllers.get_page_by_journal_acron_lang', (['journal.acronym', 'language'], {}), '(journal.acronym, language)\n', (19771, 19798), False, 'from webapp import controllers\n'), ((20221, 20269), 'flask.render_template', 'render_template', (['"""journal/about.html"""'], {}), "('journal/about.html', **context)\n", (20236, 20269), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((20528, 20567), 'flask.request.args.get', 'request.args.get', (['"""query"""', '""""""'], {'type': 'str'}), "('query', '', type=str)\n", (20544, 20567), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((20587, 20633), 'flask.request.args.get', 'request.args.get', (['"""query_filter"""', '""""""'], {'type': 'str'}), "('query_filter', '', type=str)\n", (20603, 20633), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((20645, 20682), 'flask.request.args.get', 'request.args.get', (['"""page"""', '(1)'], {'type': 'int'}), "('page', 1, type=int)\n", (20661, 20682), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((20751, 20873), 'webapp.controllers.get_alpha_list_from_paginated_journals', 'controllers.get_alpha_list_from_paginated_journals', ([], {'title_query': 'query', 'query_filter': 'query_filter', 'page': 'page', 'lang': 'lang'}), '(title_query=query,\n query_filter=query_filter, page=page, lang=lang)\n', (20801, 20873), False, 'from webapp import controllers\n'), ((20979, 21001), 'flask.jsonify', 'jsonify', (['response_data'], {}), '(response_data)\n', (20986, 21001), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((21271, 21310), 'flask.request.args.get', 'request.args.get', (['"""query"""', '""""""'], {'type': 'str'}), "('query', '', type=str)\n", (21287, 21310), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((21330, 21376), 'flask.request.args.get', 'request.args.get', (['"""query_filter"""', '""""""'], {'type': 'str'}), "('query_filter', '', type=str)\n", (21346, 21376), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((21390, 21435), 'flask.request.args.get', 'request.args.get', (['"""filter"""', '"""areas"""'], {'type': 'str'}), "('filter', 'areas', type=str)\n", (21406, 21435), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((22102, 22118), 'flask.jsonify', 'jsonify', (['objects'], {}), '(objects)\n', (22109, 22118), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((23558, 23591), 'webapp.utils.utils.is_recaptcha_valid', 'utils.is_recaptcha_valid', (['request'], {}), '(request)\n', (23582, 23591), False, 'from webapp.utils import utils\n'), ((24677, 24720), 'webapp.controllers.get_journal_by_url_seg', 'controllers.get_journal_by_url_seg', (['url_seg'], {}), '(url_seg)\n', (24711, 24720), False, 'from webapp import controllers\n'), ((24852, 24916), 'flask.render_template', 'render_template', (['"""journal/includes/contact_form.html"""'], {}), "('journal/includes/contact_form.html', **context)\n", (24867, 24916), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((25271, 25314), 'webapp.controllers.get_journal_by_url_seg', 'controllers.get_journal_by_url_seg', (['url_seg'], {}), '(url_seg)\n', (25305, 25314), False, 'from webapp import controllers\n'), ((25664, 25730), 'webapp.controllers.get_issues_for_grid_by_jid', 'controllers.get_issues_for_grid_by_jid', (['journal.id'], {'is_public': '(True)'}), '(journal.id, is_public=True)\n', (25702, 25730), False, 'from webapp import controllers\n'), ((26580, 26625), 'flask.render_template', 'render_template', (['"""issue/grid.html"""'], {}), "('issue/grid.html', **context)\n", (26595, 26625), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((27208, 27248), 'flask.request.args.get', 'request.args.get', (['"""goto"""', 'None'], {'type': 'str'}), "('goto', None, type=str)\n", (27224, 27248), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((27765, 27821), 'webapp.controllers.get_issue_by_url_seg', 'controllers.get_issue_by_url_seg', (['url_seg', 'url_seg_issue'], {}), '(url_seg, url_seg_issue)\n', (27797, 27821), False, 'from webapp import controllers\n'), ((28174, 28211), 'webapp.utils.utils.fix_journal_last_issue', 'utils.fix_journal_last_issue', (['journal'], {}), '(journal)\n', (28202, 28211), False, 'from webapp.utils import utils\n'), ((28465, 28523), 'webapp.controllers.get_articles_by_iid', 'controllers.get_articles_by_iid', (['issue.iid'], {'is_public': '(True)'}), '(issue.iid, is_public=True)\n', (28496, 28523), False, 'from webapp import controllers\n'), ((30407, 30451), 'flask.render_template', 'render_template', (['"""issue/toc.html"""'], {}), "('issue/toc.html', **context)\n", (30422, 30451), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((31621, 31668), 'webapp.utils.utils.get_prev_issue', 'utils.get_prev_issue', (['all_issues', 'current_issue'], {}), '(all_issues, current_issue)\n', (31641, 31668), False, 'from webapp.utils import utils\n'), ((32017, 32057), 'flask.request.args.get', 'request.args.get', (['"""goto"""', 'None'], {'type': 'str'}), "('goto', None, type=str)\n", (32033, 32057), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((32341, 32378), 'webapp.utils.utils.fix_journal_last_issue', 'utils.fix_journal_last_issue', (['journal'], {}), '(journal)\n', (32369, 32378), False, 'from webapp.utils import utils\n'), ((33748, 33792), 'flask.render_template', 'render_template', (['"""issue/toc.html"""'], {}), "('issue/toc.html', **context)\n", (33763, 33792), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((33955, 34011), 'webapp.controllers.get_issue_by_url_seg', 'controllers.get_issue_by_url_seg', (['url_seg', 'url_seg_issue'], {}), '(url_seg, url_seg_issue)\n', (33987, 34011), False, 'from webapp import controllers\n'), ((34326, 34384), 'webapp.controllers.get_articles_by_iid', 'controllers.get_articles_by_iid', (['issue.iid'], {'is_public': '(True)'}), '(issue.iid, is_public=True)\n', (34357, 34384), False, 'from webapp import controllers\n'), ((35711, 35746), 'webapp.controllers.get_article_by_pid', 'controllers.get_article_by_pid', (['pid'], {}), '(pid)\n', (35741, 35746), False, 'from webapp import controllers\n'), ((36387, 36482), 'packtools.HTMLGenerator.parse', 'HTMLGenerator.parse', (['xml'], {'valid_only': '(False)', 'gs_abstract': 'gs_abstract', 'output_style': '"""website"""'}), "(xml, valid_only=False, gs_abstract=gs_abstract,\n output_style='website')\n", (36406, 36482), False, 'from packtools import HTMLGenerator\n'), ((39118, 39174), 'webapp.controllers.get_issue_by_url_seg', 'controllers.get_issue_by_url_seg', (['url_seg', 'url_seg_issue'], {}), '(url_seg, url_seg_issue)\n', (39150, 39174), False, 'from webapp import controllers\n'), ((39254, 39326), 'webapp.controllers.get_article_by_issue_article_seg', 'controllers.get_article_by_issue_article_seg', (['issue.iid', 'url_seg_article'], {}), '(issue.iid, url_seg_article)\n', (39298, 39326), False, 'from webapp import controllers\n'), ((40225, 40269), 'flask.request.args.get', 'request.args.get', (['"""format"""', '"""html"""'], {'type': 'str'}), "('format', 'html', type=str)\n", (40241, 40269), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((46136, 46175), 'flask.request.args.get', 'request.args.get', (['"""doi"""', 'None'], {'type': 'str'}), "('doi', None, type=str)\n", (46152, 46175), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((46186, 46225), 'flask.request.args.get', 'request.args.get', (['"""pid"""', 'None'], {'type': 'str'}), "('pid', None, type=str)\n", (46202, 46225), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((46241, 46285), 'flask.request.args.get', 'request.args.get', (['"""pdf_path"""', 'None'], {'type': 'str'}), "('pdf_path', None, type=str)\n", (46257, 46285), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((46297, 46337), 'flask.request.args.get', 'request.args.get', (['"""lang"""', 'None'], {'type': 'str'}), "('lang', None, type=str)\n", (46313, 46337), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((47397, 47422), 'mimetypes.guess_type', 'mimetypes.guess_type', (['url'], {}), '(url)\n', (47417, 47422), False, 'import mimetypes\n'), ((48216, 48259), 'flask.request.args.get', 'request.args.get', (['"""resource_ssm_path"""', 'None'], {}), "('resource_ssm_path', None)\n", (48232, 48259), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((49249, 49305), 'webapp.controllers.get_issue_by_url_seg', 'controllers.get_issue_by_url_seg', (['url_seg', 'url_seg_issue'], {}), '(url_seg, url_seg_issue)\n', (49281, 49305), False, 'from webapp import controllers\n'), ((49385, 49457), 'webapp.controllers.get_article_by_issue_article_seg', 'controllers.get_article_by_issue_article_seg', (['issue.iid', 'url_seg_article'], {}), '(issue.iid, url_seg_article)\n', (49429, 49457), False, 'from webapp import controllers\n'), ((50067, 50116), 'webapp.controllers.get_journal_by_url_seg', 'controllers.get_journal_by_url_seg', (['journal_acron'], {}), '(journal_acron)\n', (50101, 50116), False, 'from webapp import controllers\n'), ((50257, 50342), 'webapp.controllers.get_article_by_pdf_filename', 'controllers.get_article_by_pdf_filename', (['journal_acron', 'issue_info', 'pdf_filename'], {}), '(journal_acron, issue_info, pdf_filename\n )\n', (50296, 50342), False, 'from webapp import controllers\n'), ((50854, 50883), 'flask.request.args.get', 'request.args.get', (['"""pid"""', 'None'], {}), "('pid', None)\n", (50870, 50883), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((50894, 50923), 'flask.request.args.get', 'request.args.get', (['"""lng"""', 'None'], {}), "('lng', None)\n", (50910, 50923), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((51111, 51149), 'webapp.controllers.get_article_by_pid_v1', 'controllers.get_article_by_pid_v1', (['pid'], {}), '(pid)\n', (51144, 51149), False, 'from webapp import controllers\n'), ((51663, 51697), 'webapp.forms.EmailShareForm', 'forms.EmailShareForm', (['request.form'], {}), '(request.form)\n', (51683, 51697), False, 'from webapp import forms\n'), ((52602, 52653), 'flask.render_template', 'render_template', (['"""email/email_form.html"""'], {}), "('email/email_form.html', **context)\n", (52617, 52653), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((52818, 52847), 'webapp.forms.ErrorForm', 'forms.ErrorForm', (['request.form'], {}), '(request.form)\n', (52833, 52847), False, 'from webapp import forms\n'), ((53922, 53976), 'flask.render_template', 'render_template', (['"""includes/error_form.html"""'], {}), "('includes/error_form.html', **context)\n", (53937, 53976), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((54265, 54306), 'flask.send_from_directory', 'send_from_directory', (['media_root', 'filename'], {}), '(media_root, filename)\n', (54284, 54306), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((54391, 54452), 'flask.send_from_directory', 'send_from_directory', (['"""static"""', '"""img/full_text_scielo_img.gif"""'], {}), "('static', 'img/full_text_scielo_img.gif')\n", (54410, 54452), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((54537, 54580), 'flask.send_from_directory', 'send_from_directory', (['"""static"""', '"""robots.txt"""'], {}), "('static', 'robots.txt')\n", (54556, 54580), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((55907, 55949), 'flask.request.args.get', 'request.args.get', (['"""end_date"""', '""""""'], {'type': 'str'}), "('end_date', '', type=str)\n", (55923, 55949), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((56134, 56168), 'flask.request.args.get', 'request.args.get', (['"""page"""'], {'type': 'int'}), "('page', type=int)\n", (56150, 56168), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((56216, 56251), 'flask.request.args.get', 'request.args.get', (['"""limit"""'], {'type': 'int'}), "('limit', type=int)\n", (56232, 56251), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((56617, 56690), 'webapp.controllers.get_articles_by_date_range', 'controllers.get_articles_by_date_range', (['begin_date', 'end_date', 'page', 'limit'], {}), '(begin_date, end_date, page, limit)\n', (56655, 56690), False, 'from webapp import controllers\n'), ((56951, 56967), 'flask.jsonify', 'jsonify', (['results'], {}), '(results)\n', (56958, 56967), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((57978, 58014), 'flask.current_app.config.get', 'current_app.config.get', (['"""URL_SEARCH"""'], {}), "('URL_SEARCH')\n", (58000, 58014), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((58915, 58938), 'flask.redirect', 'redirect', (['url'], {'code': '(301)'}), '(url, code=301)\n', (58923, 58938), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((1964, 1998), 'requests.get', 'requests.get', (['url'], {'timeout': 'timeout'}), '(url, timeout=timeout)\n', (1976, 1998), False, 'import requests\n'), ((3328, 3350), 'webapp.forms.EmailShareForm', 'forms.EmailShareForm', ([], {}), '()\n', (3348, 3350), False, 'from webapp import forms\n'), ((3384, 3403), 'webapp.forms.ContactForm', 'forms.ContactForm', ([], {}), '()\n', (3401, 3403), False, 'from webapp import forms\n'), ((3429, 3446), 'webapp.forms.ErrorForm', 'forms.ErrorForm', ([], {}), '()\n', (3444, 3446), False, 'from webapp import forms\n'), ((4204, 4250), 'flask.current_app.config.get', 'current_app.config.get', (['"""BABEL_DEFAULT_LOCALE"""'], {}), "('BABEL_DEFAULT_LOCALE')\n", (4226, 4250), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((6811, 6853), 'webapp.controllers.get_journal_json_data', 'controllers.get_journal_json_data', (['journal'], {}), '(journal)\n', (6844, 6853), False, 'from webapp import controllers\n'), ((9012, 9070), 'webapp.controllers.get_issues_by_jid', 'controllers.get_issues_by_jid', (['journal.jid'], {'is_public': '(True)'}), '(journal.jid, is_public=True)\n', (9041, 9070), False, 'from webapp import controllers\n'), ((9333, 9346), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (9344, 9346), False, 'from collections import OrderedDict\n'), ((10457, 10511), 'webapp.controllers.get_page_by_slug_name', 'controllers.get_page_by_slug_name', (['slug_name', 'language'], {}), '(slug_name, language)\n', (10490, 10511), False, 'from webapp import controllers\n'), ((10688, 10727), 'webapp.controllers.get_pages_by_lang', 'controllers.get_pages_by_lang', (['language'], {}), '(language)\n', (10717, 10727), False, 'from webapp import controllers\n'), ((14864, 14915), 'flask.url_for', 'url_for', (['"""main.journal_detail"""'], {'url_seg': 'journal_seg'}), "('main.journal_detail', url_seg=journal_seg)\n", (14871, 14915), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((15952, 16037), 'webapp.controllers.get_recent_articles_of_issue', 'controllers.get_recent_articles_of_issue', (['journal.last_issue.iid'], {'is_public': '(True)'}), '(journal.last_issue.iid, is_public=True\n )\n', (15992, 16037), False, 'from webapp import controllers\n'), ((21528, 21628), 'webapp.controllers.get_journals_grouped_by', 'controllers.get_journals_grouped_by', (['"""study_areas"""', 'query'], {'query_filter': 'query_filter', 'lang': 'lang'}), "('study_areas', query, query_filter=\n query_filter, lang=lang)\n", (21563, 21628), False, 'from webapp import controllers\n'), ((23609, 23640), 'webapp.forms.ContactForm', 'forms.ContactForm', (['request.form'], {}), '(request.form)\n', (23626, 23640), False, 'from webapp import forms\n'), ((23660, 23703), 'webapp.controllers.get_journal_by_url_seg', 'controllers.get_journal_by_url_seg', (['url_seg'], {}), '(url_seg)\n', (23694, 23703), False, 'from webapp import controllers\n'), ((25094, 25137), 'flask.url_for', 'url_for', (['"""main.issue_grid"""'], {'url_seg': 'url_seg'}), "('main.issue_grid', url_seg=url_seg)\n", (25101, 25137), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((26892, 26963), 'flask.url_for', 'url_for', (['"""main.issue_toc"""'], {'url_seg': 'url_seg', 'url_seg_issue': 'url_seg_issue'}), "('main.issue_toc', url_seg=url_seg, url_seg_issue=url_seg_issue)\n", (26899, 26963), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((28320, 28360), 'flask.request.args.get', 'request.args.get', (['"""goto"""', 'None'], {'type': 'str'}), "('goto', None, type=str)\n", (28336, 28360), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((28394, 28422), 'flask.redirect', 'redirect', (['goto_url'], {'code': '(301)'}), '(goto_url, code=301)\n', (28402, 28422), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((29797, 29868), 'flask.url_for', 'url_for', (['"""main.issue_toc"""'], {'url_seg': 'url_seg', 'url_seg_issue': 'url_seg_issue'}), "('main.issue_toc', url_seg=url_seg, url_seg_issue=url_seg_issue)\n", (29804, 29868), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((30613, 30684), 'webapp.controllers.get_issues_by_jid', 'controllers.get_issues_by_jid', (['current_issue.journal.id'], {'is_public': '(True)'}), '(current_issue.journal.id, is_public=True)\n', (30642, 30684), False, 'from webapp import controllers\n'), ((30740, 30787), 'webapp.utils.utils.get_next_issue', 'utils.get_next_issue', (['all_issues', 'current_issue'], {}), '(all_issues, current_issue)\n', (30760, 30787), False, 'from webapp.utils import utils\n'), ((31131, 31233), 'flask.url_for', 'url_for', (['"""main.issue_toc"""'], {'url_seg': 'selected_issue.journal.url_segment', 'url_seg_issue': 'url_seg_issue'}), "('main.issue_toc', url_seg=selected_issue.journal.url_segment,\n url_seg_issue=url_seg_issue)\n", (31138, 31233), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((31445, 31516), 'webapp.controllers.get_issues_by_jid', 'controllers.get_issues_by_jid', (['current_issue.journal.id'], {'is_public': '(True)'}), '(current_issue.journal.id, is_public=True)\n', (31474, 31516), False, 'from webapp import controllers\n'), ((31562, 31609), 'webapp.utils.utils.get_next_issue', 'utils.get_next_issue', (['all_issues', 'current_issue'], {}), '(all_issues, current_issue)\n', (31582, 31609), False, 'from webapp.utils import utils\n'), ((31876, 31911), 'webapp.controllers.get_aop_issues', 'controllers.get_aop_issues', (['url_seg'], {}), '(url_seg)\n', (31902, 31911), False, 'from webapp import controllers\n'), ((32451, 32513), 'webapp.controllers.get_articles_by_iid', 'controllers.get_articles_by_iid', (['aop_issue.iid'], {'is_public': '(True)'}), '(aop_issue.iid, is_public=True)\n', (32482, 32513), False, 'from webapp import controllers\n'), ((33234, 33274), 'flask.url_for', 'url_for', (['"""main.aop_toc"""'], {'url_seg': 'url_seg'}), "('main.aop_toc', url_seg=url_seg)\n", (33241, 33274), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((35786, 35825), 'webapp.controllers.get_article_by_oap_pid', 'controllers.get_article_by_oap_pid', (['pid'], {}), '(pid)\n', (35820, 35825), False, 'from webapp import controllers\n'), ((35915, 36013), 'flask.url_for', 'url_for', (['"""main.article_detail_v3"""'], {'url_seg': 'article.journal.acronym', 'article_pid_v3': 'article.aid'}), "('main.article_detail_v3', url_seg=article.journal.acronym,\n article_pid_v3=article.aid)\n", (35922, 36013), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((36353, 36368), 'io.BytesIO', 'BytesIO', (['result'], {}), '(result)\n', (36360, 36368), False, 'from io import BytesIO\n'), ((38399, 38412), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (38407, 38412), False, 'from urllib.parse import urlparse\n'), ((39369, 39459), 'webapp.controllers.get_article_by_aop_url_segs', 'controllers.get_article_by_aop_url_segs', (['issue.journal', 'url_seg_issue', 'url_seg_article'], {}), '(issue.journal, url_seg_issue,\n url_seg_article)\n', (39408, 39459), False, 'from webapp import controllers\n'), ((39744, 39791), 'flask.url_for', 'url_for', (['"""main.article_detail_v3"""'], {}), "('main.article_detail_v3', **req_params)\n", (39751, 39791), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((40052, 40086), 'flask.request.args.get', 'request.args.get', (['"""lang"""'], {'type': 'str'}), "('lang', type=str)\n", (40068, 40086), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((40109, 40143), 'flask.request.args.get', 'request.args.get', (['"""goto"""'], {'type': 'str'}), "('goto', type=str)\n", (40125, 40143), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((40166, 40200), 'flask.request.args.get', 'request.args.get', (['"""stop"""'], {'type': 'str'}), "('stop', type=str)\n", (40182, 40200), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((40501, 40580), 'webapp.controllers.get_article', 'controllers.get_article', (['article_pid_v3', 'url_seg', 'qs_lang', 'gs_abstract', 'qs_goto'], {}), '(article_pid_v3, url_seg, qs_lang, gs_abstract, qs_goto)\n', (40524, 40580), False, 'from webapp import controllers\n'), ((44783, 44832), 'flask.render_template', 'render_template', (['"""article/detail.html"""'], {}), "('article/detail.html', **context)\n", (44798, 44832), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((45646, 45667), 'flask.make_response', 'make_response', (['result'], {}), '(result)\n', (45659, 45667), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((46624, 46671), 'flask.render_template', 'render_template', (['"""article/epdf.html"""'], {}), "('article/epdf.html', **context)\n", (46639, 46671), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((47062, 47087), 'mimetypes.guess_type', 'mimetypes.guess_type', (['url'], {}), '(url)\n', (47082, 47087), False, 'import mimetypes\n'), ((47103, 47140), 'flask.Response', 'Response', (['response'], {'mimetype': 'mimetype'}), '(response, mimetype=mimetype)\n', (47111, 47140), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((47643, 47684), 'flask.Response', 'Response', (['ssm_response'], {'mimetype': 'mimetype'}), '(ssm_response, mimetype=mimetype)\n', (47651, 47684), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((49741, 49788), 'flask.url_for', 'url_for', (['"""main.article_detail_v3"""'], {}), "('main.article_detail_v3', **req_params)\n", (49748, 49788), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((50456, 50596), 'flask.url_for', 'url_for', (['"""main.article_detail_v3"""'], {'url_seg': 'article.journal.url_segment', 'article_pid_v3': 'article.aid', 'format': '"""pdf"""', 'lang': 'article._pdf_lang'}), "('main.article_detail_v3', url_seg=article.journal.url_segment,\n article_pid_v3=article.aid, format='pdf', lang=article._pdf_lang)\n", (50463, 50596), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((51247, 51349), 'flask.url_for', 'url_for', (['"""main.article_detail_v3"""'], {'url_seg': 'article.journal.url_segment', 'article_pid_v3': 'article.aid'}), "('main.article_detail_v3', url_seg=article.journal.url_segment,\n article_pid_v3=article.aid)\n", (51254, 51349), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((51856, 51994), 'webapp.controllers.send_email_share', 'controllers.send_email_share', (["form.data['your_email']", 'recipients', "form.data['share_url']", "form.data['subject']", "form.data['comment']"], {}), "(form.data['your_email'], recipients, form.data\n ['share_url'], form.data['subject'], form.data['comment'])\n", (51884, 51994), False, 'from webapp import controllers\n'), ((52566, 52589), 'flask.request.args.get', 'request.args.get', (['"""url"""'], {}), "('url')\n", (52582, 52589), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((53028, 53211), 'webapp.controllers.send_email_error', 'controllers.send_email_error', (["form.data['name']", "form.data['your_email']", 'recipients', "form.data['url']", "form.data['error_type']", "form.data['message']", "form.data['page_title']"], {}), "(form.data['name'], form.data['your_email'],\n recipients, form.data['url'], form.data['error_type'], form.data[\n 'message'], form.data['page_title'])\n", (53056, 53211), False, 'from webapp import controllers\n'), ((53886, 53909), 'flask.request.args.get', 'request.args.get', (['"""url"""'], {}), "('url')\n", (53902, 53909), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((55978, 56017), 'datetime.datetime.strptime', 'datetime.strptime', (['end_date', '"""%Y-%m-%d"""'], {}), "(end_date, '%Y-%m-%d')\n", (55995, 56017), False, 'from datetime import datetime, timedelta\n'), ((56103, 56121), 'datetime.timedelta', 'timedelta', ([], {'days': '(30)'}), '(days=30)\n', (56112, 56121), False, 'from datetime import datetime, timedelta\n'), ((58131, 58171), 'flask.request.args.get', 'request.args.get', (['"""exprSearch"""'], {'type': 'str'}), "('exprSearch', type=str)\n", (58147, 58171), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((58199, 58240), 'flask.request.args.get', 'request.args.get', (['"""indexSearch"""'], {'type': 'str'}), "('indexSearch', type=str)\n", (58215, 58240), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((58261, 58295), 'flask.request.args.get', 'request.args.get', (['"""lang"""'], {'type': 'str'}), "('lang', type=str)\n", (58277, 58295), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((2760, 2796), 'webapp.controllers.get_current_collection', 'controllers.get_current_collection', ([], {}), '()\n', (2794, 2796), False, 'from webapp import controllers\n'), ((3938, 3952), 'flask.session.keys', 'session.keys', ([], {}), '()\n', (3950, 3952), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((4467, 4497), 'flask_babelex.gettext', '_', (['"""Código de idioma inválido"""'], {}), "('Código de idioma inválido')\n", (4468, 4497), True, 'from flask_babelex import gettext as _\n'), ((4922, 4968), 'flask.current_app.config.get', 'current_app.config.get', (['"""BABEL_DEFAULT_LOCALE"""'], {}), "('BABEL_DEFAULT_LOCALE')\n", (4944, 4968), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((6877, 6928), 'webapp.controllers.get_journals', 'controllers.get_journals', ([], {'query_filter': 'query_filter'}), '(query_filter=query_filter)\n', (6901, 6928), False, 'from webapp import controllers\n'), ((8429, 8473), 'flask_babelex.gettext', '_', (['"""Últimos periódicos inseridos na coleção"""'], {}), "('Últimos periódicos inseridos na coleção')\n", (8430, 8473), True, 'from flask_babelex import gettext as _\n'), ((9191, 9254), 'webapp.controllers.get_articles_by_iid', 'controllers.get_articles_by_iid', (['last_issue.iid'], {'is_public': '(True)'}), '(last_issue.iid, is_public=True)\n', (9222, 9254), False, 'from webapp import controllers\n'), ((9762, 9825), 'flask.render_template', 'render_template', (['"""collection/list_feed_content.html"""'], {}), "('collection/list_feed_content.html', **context)\n", (9777, 9825), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((11426, 11495), 'flask_babelex.gettext', '_', (["(u'Requsição inválida ao tentar acessar o artigo com pid: %s' % pid)"], {}), "(u'Requsição inválida ao tentar acessar o artigo com pid: %s' % pid)\n", (11427, 11495), True, 'from flask_babelex import gettext as _\n'), ((14702, 14715), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (14710, 14715), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((15166, 15195), 'flask_babelex.gettext', '_', (['"""Periódico não encontrado"""'], {}), "('Periódico não encontrado')\n", (15167, 15195), True, 'from flask_babelex import gettext as _\n'), ((17426, 17455), 'flask_babelex.gettext', '_', (['"""Periódico não encontrado"""'], {}), "('Periódico não encontrado')\n", (17427, 17455), True, 'from flask_babelex import gettext as _\n'), ((17904, 17937), 'webapp.utils.utils.get_label_issue', 'utils.get_label_issue', (['last_issue'], {}), '(last_issue)\n', (17925, 17937), False, 'from webapp.utils import utils\n'), ((18332, 18391), 'flask.render_template', 'render_template', (['"""issue/feed_content.html"""'], {'article': 'article'}), "('issue/feed_content.html', article=article)\n", (18347, 18391), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((19166, 19195), 'flask_babelex.gettext', '_', (['"""Periódico não encontrado"""'], {}), "('Periódico não encontrado')\n", (19167, 19195), True, 'from flask_babelex import gettext as _\n'), ((20470, 20513), 'flask_babelex.gettext', '_', (['"""Requisição inválida. Deve ser por ajax"""'], {}), "('Requisição inválida. Deve ser por ajax')\n", (20471, 20513), True, 'from flask_babelex import gettext as _\n'), ((21213, 21256), 'flask_babelex.gettext', '_', (['"""Requisição inválida. Deve ser por ajax"""'], {}), "('Requisição inválida. Deve ser por ajax')\n", (21214, 21256), True, 'from flask_babelex import gettext as _\n'), ((21668, 21774), 'webapp.controllers.get_journals_grouped_by', 'controllers.get_journals_grouped_by', (['"""subject_categories"""', 'query'], {'query_filter': 'query_filter', 'lang': 'lang'}), "('subject_categories', query,\n query_filter=query_filter, lang=lang)\n", (21703, 21774), False, 'from webapp import controllers\n'), ((22383, 22446), 'flask_babelex.gettext', '_', (['"""Parámetro "extension" é inválido, deve ser "csv" ou "xls"."""'], {}), '(\'Parámetro "extension" é inválido, deve ser "csv" ou "xls".\')\n', (22384, 22446), True, 'from flask_babelex import gettext as _\n'), ((22794, 22833), 'flask.request.args.get', 'request.args.get', (['"""query"""', '""""""'], {'type': 'str'}), "('query', '', type=str)\n", (22810, 22833), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((23233, 23266), 'flask.Response', 'Response', (['data'], {'mimetype': 'mimetype'}), '(data, mimetype=mimetype)\n', (23241, 23266), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((23508, 23548), 'flask_babelex.gettext', '_', (['"""Requisição inválida, deve ser ajax."""'], {}), "('Requisição inválida, deve ser ajax.')\n", (23509, 23548), True, 'from flask_babelex import gettext as _\n'), ((23911, 24024), 'webapp.controllers.send_email_contact', 'controllers.send_email_contact', (['recipients', "form.data['name']", "form.data['your_email']", "form.data['message']"], {}), "(recipients, form.data['name'], form.data[\n 'your_email'], form.data['message'])\n", (23941, 24024), False, 'from webapp import controllers\n'), ((24525, 24568), 'flask_babelex.gettext', '_', (['"""Requisição inválida, captcha inválido."""'], {}), "('Requisição inválida, captcha inválido.')\n", (24526, 24568), True, 'from flask_babelex import gettext as _\n'), ((24760, 24789), 'flask_babelex.gettext', '_', (['"""Periódico não encontrado"""'], {}), "('Periódico não encontrado')\n", (24761, 24789), True, 'from flask_babelex import gettext as _\n'), ((25355, 25384), 'flask_babelex.gettext', '_', (['"""Periódico não encontrado"""'], {}), "('Periódico não encontrado')\n", (25356, 25384), True, 'from flask_babelex import gettext as _\n'), ((26810, 26850), 'flask.url_for', 'url_for', (['"""main.aop_toc"""'], {'url_seg': 'url_seg'}), "('main.aop_toc', url_seg=url_seg)\n", (26817, 26850), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((27432, 27472), 'flask.url_for', 'url_for', (['"""main.aop_toc"""'], {'url_seg': 'url_seg'}), "('main.aop_toc', url_seg=url_seg)\n", (27439, 27472), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((27859, 27885), 'flask_babelex.gettext', '_', (['"""Número não encontrado"""'], {}), "('Número não encontrado')\n", (27860, 27885), True, 'from flask_babelex import gettext as _\n'), ((30848, 30895), 'webapp.utils.utils.get_prev_issue', 'utils.get_prev_issue', (['all_issues', 'current_issue'], {}), '(all_issues, current_issue)\n', (30868, 30895), False, 'from webapp.utils import utils\n'), ((31808, 31849), 'flask.request.args.get', 'request.args.get', (['"""section"""', '""""""'], {'type': 'str'}), "('section', '', type=str)\n", (31824, 31849), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((31960, 32003), 'flask_babelex.gettext', '_', (['"""Artigos ahead of print não encontrados"""'], {}), "('Artigos ahead of print não encontrados')\n", (31961, 32003), True, 'from flask_babelex import gettext as _\n'), ((32177, 32200), 'flask.redirect', 'redirect', (['url'], {'code': '(301)'}), '(url, code=301)\n', (32185, 32200), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((32628, 32671), 'flask_babelex.gettext', '_', (['"""Artigos ahead of print não encontrados"""'], {}), "('Artigos ahead of print não encontrados')\n", (32629, 32671), True, 'from flask_babelex import gettext as _\n'), ((34050, 34076), 'flask_babelex.gettext', '_', (['"""Número não encontrado"""'], {}), "('Número não encontrado')\n", (34051, 34076), True, 'from flask_babelex import gettext as _\n'), ((34540, 34568), 'webapp.utils.utils.get_label_issue', 'utils.get_label_issue', (['issue'], {}), '(issue)\n', (34561, 34568), False, 'from webapp.utils import utils\n'), ((34908, 34967), 'flask.render_template', 'render_template', (['"""issue/feed_content.html"""'], {'article': 'article'}), "('issue/feed_content.html', article=article)\n", (34923, 34967), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((35866, 35892), 'flask_babelex.gettext', '_', (['"""Artigo não encontrado"""'], {}), "('Artigo não encontrado')\n", (35867, 35892), True, 'from flask_babelex import gettext as _\n'), ((39212, 39237), 'flask_babelex.gettext', '_', (['"""Issue não encontrado"""'], {}), "('Issue não encontrado')\n", (39213, 39237), True, 'from flask_babelex import gettext as _\n'), ((39521, 39547), 'flask_babelex.gettext', '_', (['"""Artigo não encontrado"""'], {}), "('Artigo não encontrado')\n", (39522, 39547), True, 'from flask_babelex import gettext as _\n'), ((42694, 42715), 'urllib.parse.urlparse', 'urlparse', (['request.url'], {}), '(request.url)\n', (42702, 42715), False, 'from urllib.parse import urlparse\n'), ((43937, 44093), 'flask.url_for', 'url_for', (['"""main.article_detail_v3"""'], {'url_seg': 'article.journal.url_segment', 'article_pid_v3': 'article_pid_v3', 'format': '"""xml"""', 'lang': 'article.original_language'}), "('main.article_detail_v3', url_seg=article.journal.url_segment,\n article_pid_v3=article_pid_v3, format='xml', lang=article.original_language\n )\n", (43944, 44093), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((44656, 44690), 'webapp.controllers.related_links', 'controllers.related_links', (['article'], {}), '(article)\n', (44681, 44690), False, 'from webapp import controllers\n'), ((45374, 45430), 'flask_babelex.gettext', '_', (['"""Recurso do Artigo não encontrado. Caminho inválido!"""'], {}), "('Recurso do Artigo não encontrado. Caminho inválido!')\n", (45375, 45430), True, 'from flask_babelex import gettext as _\n'), ((46402, 46459), 'flask_babelex.gettext', '_', (['"""Parâmetros insuficientes para obter o EPDF do artigo"""'], {}), "('Parâmetros insuficientes para obter o EPDF do artigo')\n", (46403, 46459), True, 'from flask_babelex import gettext as _\n'), ((48315, 48371), 'flask_babelex.gettext', '_', (['"""Recurso do Artigo não encontrado. Caminho inválido!"""'], {}), "('Recurso do Artigo não encontrado. Caminho inválido!')\n", (48316, 48371), True, 'from flask_babelex import gettext as _\n'), ((49343, 49368), 'flask_babelex.gettext', '_', (['"""Issue não encontrado"""'], {}), "('Issue não encontrado')\n", (49344, 49368), True, 'from flask_babelex import gettext as _\n'), ((49497, 49523), 'flask_babelex.gettext', '_', (['"""Artigo não encontrado"""'], {}), "('Artigo não encontrado')\n", (49498, 49523), True, 'from flask_babelex import gettext as _\n'), ((50157, 50245), 'flask_babelex.gettext', '_', (['"""Este PDF não existe em http://www.scielo.br. Consulte http://search.scielo.org"""'], {}), "('Este PDF não existe em http://www.scielo.br. Consulte http://search.scielo.org'\n )\n", (50158, 50245), True, 'from flask_babelex import gettext as _\n'), ((50387, 50424), 'flask_babelex.gettext', '_', (['"""PDF do artigo não foi encontrado"""'], {}), "('PDF do artigo não foi encontrado')\n", (50388, 50424), True, 'from flask_babelex import gettext as _\n'), ((51026, 51094), 'flask_babelex.gettext', '_', (["('Requsição inválida ao tentar acessar o artigo com pid: %s' % pid)"], {}), "('Requsição inválida ao tentar acessar o artigo com pid: %s' % pid)\n", (51027, 51094), True, 'from flask_babelex import gettext as _\n'), ((51189, 51215), 'flask_babelex.gettext', '_', (['"""Artigo não encontrado"""'], {}), "('Artigo não encontrado')\n", (51190, 51215), True, 'from flask_babelex import gettext as _\n'), ((51624, 51649), 'flask_babelex.gettext', '_', (['"""Requisição inválida."""'], {}), "('Requisição inválida.')\n", (51625, 51649), True, 'from flask_babelex import gettext as _\n'), ((52779, 52804), 'flask_babelex.gettext', '_', (['"""Requisição inválida."""'], {}), "('Requisição inválida.')\n", (52780, 52804), True, 'from flask_babelex import gettext as _\n'), ((56060, 56074), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (56072, 56074), False, 'from datetime import datetime, timedelta\n'), ((6013, 6077), 'opac_schema.v1.models.Journal.objects.filter', 'Journal.objects.filter', ([], {'is_public': '(True)', 'current_status': '"""current"""'}), "(is_public=True, current_status='current')\n", (6035, 6077), False, 'from opac_schema.v1.models import Journal, Issue, Article, Collection\n'), ((6153, 6191), 'opac_schema.v1.models.Article.objects.filter', 'Article.objects.filter', ([], {'is_public': '(True)'}), '(is_public=True)\n', (6175, 6191), False, 'from opac_schema.v1.models import Journal, Issue, Article, Collection\n'), ((8943, 8957), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (8955, 8957), False, 'from datetime import datetime, timedelta\n'), ((10556, 10582), 'flask_babelex.gettext', '_', (['"""Página não encontrada"""'], {}), "('Página não encontrada')\n", (10557, 10582), True, 'from flask_babelex import gettext as _\n'), ((11613, 11649), 'webapp.controllers.get_journal_by_issn', 'controllers.get_journal_by_issn', (['pid'], {}), '(pid)\n', (11644, 11649), False, 'from webapp import controllers\n'), ((15267, 15294), 'flask_babelex.gettext', '_', (['journal.unpublish_reason'], {}), '(journal.unpublish_reason)\n', (15268, 15294), True, 'from flask_babelex import gettext as _\n'), ((17527, 17554), 'flask_babelex.gettext', '_', (['journal.unpublish_reason'], {}), '(journal.unpublish_reason)\n', (17528, 17554), True, 'from flask_babelex import gettext as _\n'), ((18291, 18313), 'flask_babelex.gettext', '_', (['"""Artigo sem título"""'], {}), "('Artigo sem título')\n", (18292, 18313), True, 'from flask_babelex import gettext as _\n'), ((19267, 19294), 'flask_babelex.gettext', '_', (['journal.unpublish_reason'], {}), '(journal.unpublish_reason)\n', (19268, 19294), True, 'from flask_babelex import gettext as _\n'), ((21821, 21924), 'webapp.controllers.get_journals_grouped_by', 'controllers.get_journals_grouped_by', (['"""publisher_name"""', 'query'], {'query_filter': 'query_filter', 'lang': 'lang'}), "('publisher_name', query, query_filter=\n query_filter, lang=lang)\n", (21856, 21924), False, 'from webapp import controllers\n'), ((22541, 22634), 'flask_babelex.gettext', '_', (['"""Parámetro "list_type" é inválido, deve ser: "alpha", "areas", "wos" ou "publisher"."""'], {}), '(\'Parámetro "list_type" é inválido, deve ser: "alpha", "areas", "wos" ou "publisher".\'\n )\n', (22542, 22634), True, 'from flask_babelex import gettext as _\n'), ((23767, 23809), 'flask_babelex.gettext', '_', (['"""Periódico não permite envio de email."""'], {}), "('Periódico não permite envio de email.')\n", (23768, 23809), True, 'from flask_babelex import gettext as _\n'), ((25456, 25483), 'flask_babelex.gettext', '_', (['journal.unpublish_reason'], {}), '(journal.unpublish_reason)\n', (25457, 25483), True, 'from flask_babelex import gettext as _\n'), ((27682, 27723), 'flask.request.args.get', 'request.args.get', (['"""section"""', '""""""'], {'type': 'str'}), "('section', '', type=str)\n", (27698, 27723), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((27952, 27977), 'flask_babelex.gettext', '_', (['issue.unpublish_reason'], {}), '(issue.unpublish_reason)\n', (27953, 27977), True, 'from flask_babelex import gettext as _\n'), ((28099, 28126), 'flask_babelex.gettext', '_', (['journal.unpublish_reason'], {}), '(journal.unpublish_reason)\n', (28100, 28126), True, 'from flask_babelex import gettext as _\n'), ((32307, 32334), 'flask_babelex.gettext', '_', (['journal.unpublish_reason'], {}), '(journal.unpublish_reason)\n', (32308, 32334), True, 'from flask_babelex import gettext as _\n'), ((34144, 34169), 'flask_babelex.gettext', '_', (['issue.unpublish_reason'], {}), '(issue.unpublish_reason)\n', (34145, 34169), True, 'from flask_babelex import gettext as _\n'), ((34247, 34280), 'flask_babelex.gettext', '_', (['issue.journal.unpublish_reason'], {}), '(issue.journal.unpublish_reason)\n', (34248, 34280), True, 'from flask_babelex import gettext as _\n'), ((41174, 41197), 'flask_babelex.gettext', '_', (['"""Artigo inexistente"""'], {}), "('Artigo inexistente')\n", (41175, 41197), True, 'from flask_babelex import gettext as _\n'), ((41318, 41344), 'flask_babelex.gettext', '_', (['"""Artigo não encontrado"""'], {}), "('Artigo não encontrado')\n", (41319, 41344), True, 'from flask_babelex import gettext as _\n'), ((41432, 41536), 'flask.url_for', 'url_for', (['"""main.article_detail_v3"""'], {'url_seg': 'url_seg', 'article_pid_v3': 'article_pid_v3', 'format': 'qs_format'}), "('main.article_detail_v3', url_seg=url_seg, article_pid_v3=\n article_pid_v3, format=qs_format)\n", (41439, 41536), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((41715, 41742), 'flask_babelex.gettext', '_', (['"""Recurso não encontrado"""'], {}), "('Recurso não encontrado')\n", (41716, 41742), True, 'from flask_babelex import gettext as _\n'), ((42310, 42443), 'flask.url_for', 'url_for', (['"""main.article_detail_v3"""'], {'url_seg': 'article.journal.url_segment', 'article_pid_v3': 'article_pid_v3', 'lang': 'qs_lang', 'format': '"""pdf"""'}), "('main.article_detail_v3', url_seg=article.journal.url_segment,\n article_pid_v3=article_pid_v3, lang=qs_lang, format='pdf')\n", (42317, 42443), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((44909, 44942), 'flask_babelex.gettext', '_', (['"""PDF do Artigo não encontrado"""'], {}), "('PDF do Artigo não encontrado')\n", (44910, 44942), True, 'from flask_babelex import gettext as _\n'), ((45074, 45107), 'flask_babelex.gettext', '_', (['"""PDF do Artigo não encontrado"""'], {}), "('PDF do Artigo não encontrado')\n", (45075, 45107), True, 'from flask_babelex import gettext as _\n'), ((46936, 46959), 'flask_babelex.gettext', '_', (['"""PDF não encontrado"""'], {}), "('PDF não encontrado')\n", (46937, 46959), True, 'from flask_babelex import gettext as _\n'), ((47007, 47027), 'flask_babelex.gettext', '_', (['"""Erro inesperado"""'], {}), "('Erro inesperado')\n", (47008, 47027), True, 'from flask_babelex import gettext as _\n'), ((47521, 47548), 'flask_babelex.gettext', '_', (['"""Recurso não encontrado"""'], {}), "('Recurso não encontrado')\n", (47522, 47548), True, 'from flask_babelex import gettext as _\n'), ((47596, 47616), 'flask_babelex.gettext', '_', (['"""Erro inesperado"""'], {}), "('Erro inesperado')\n", (47597, 47616), True, 'from flask_babelex import gettext as _\n'), ((52923, 52978), 'flask.current_app.config.get', 'current_app.config.get', (['"""EMAIL_ACCOUNTS_RECEIVE_ERRORS"""'], {}), "('EMAIL_ACCOUNTS_RECEIVE_ERRORS')\n", (52945, 52978), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((55498, 55548), 'flask.url_for', 'url_for', (['"""main.about_journal"""'], {'url_seg': 'journal_seg'}), "('main.about_journal', url_seg=journal_seg)\n", (55505, 55548), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((58081, 58107), 'flask_babelex.gettext', '_', (['"""Página não encontrada"""'], {}), "('Página não encontrada')\n", (58082, 58107), True, 'from flask_babelex import gettext as _\n'), ((3634, 3679), 'flask.current_app.config.get', 'current_app.config.get', (['"""SCIELO_ORG_URIS"""', '{}'], {}), "('SCIELO_ORG_URIS', {})\n", (3656, 3679), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((11881, 11940), 'flask.url_for', 'url_for', (['"""main.journal_detail"""'], {'url_seg': 'journal.url_segment'}), "('main.journal_detail', url_seg=journal.url_segment)\n", (11888, 11940), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((12045, 12078), 'webapp.controllers.get_issue_by_pid', 'controllers.get_issue_by_pid', (['pid'], {}), '(pid)\n', (12073, 12078), False, 'from webapp import controllers\n'), ((23094, 23108), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (23106, 23108), False, 'from datetime import datetime, timedelta\n'), ((40377, 40420), 'flask_babelex.gettext', '_', (['"""Não existe \'{}\'. No seu lugar use \'{}\'"""'], {}), '("Não existe \'{}\'. No seu lugar use \'{}\'")\n', (40378, 40420), True, 'from flask_babelex import gettext as _\n'), ((41130, 41153), 'flask_babelex.gettext', '_', (['"""Resumo inexistente"""'], {}), "('Resumo inexistente')\n", (41131, 41153), True, 'from flask_babelex import gettext as _\n'), ((43215, 43265), 'flask_babelex.gettext', '_', (['"""HTML do Artigo não encontrado ou indisponível"""'], {}), "('HTML do Artigo não encontrado ou indisponível')\n", (43216, 43265), True, 'from flask_babelex import gettext as _\n'), ((43321, 43341), 'flask_babelex.gettext', '_', (['"""Erro inesperado"""'], {}), "('Erro inesperado')\n", (43322, 43341), True, 'from flask_babelex import gettext as _\n'), ((43466, 43498), 'webapp.config.lang_names.display_original_lang_name', 'display_original_lang_name', (['lang'], {}), '(lang)\n', (43492, 43498), False, 'from webapp.config.lang_names import display_original_lang_name\n'), ((43523, 43639), 'flask.url_for', 'url_for', (['"""main.article_detail_v3"""'], {'url_seg': 'article.journal.url_segment', 'article_pid_v3': 'article_pid_v3', 'lang': 'lang'}), "('main.article_detail_v3', url_seg=article.journal.url_segment,\n article_pid_v3=article_pid_v3, lang=lang)\n", (43530, 43639), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((45249, 45282), 'flask_babelex.gettext', '_', (['"""PDF do Artigo não encontrado"""'], {}), "('PDF do Artigo não encontrado')\n", (45250, 45282), True, 'from flask_babelex import gettext as _\n'), ((45957, 45983), 'flask_babelex.gettext', '_', (['"""Formato não suportado"""'], {}), "('Formato não suportado')\n", (45958, 45983), True, 'from flask_babelex import gettext as _\n'), ((11706, 11735), 'flask_babelex.gettext', '_', (['"""Periódico não encontrado"""'], {}), "('Periódico não encontrado')\n", (11707, 11735), True, 'from flask_babelex import gettext as _\n'), ((12616, 12714), 'flask.url_for', 'url_for', (['"""main.issue_toc"""'], {'url_seg': 'issue.journal.url_segment', 'url_seg_issue': 'issue.url_segment'}), "('main.issue_toc', url_seg=issue.journal.url_segment, url_seg_issue=\n issue.url_segment)\n", (12623, 12714), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((12903, 12941), 'webapp.controllers.get_article_by_pid_v2', 'controllers.get_article_by_pid_v2', (['pid'], {}), '(pid)\n', (12936, 12941), False, 'from webapp import controllers\n'), ((22004, 22079), 'flask_babelex.gettext', '_', (['"""Parámetro "filter" é inválido, deve ser "areas", "wos" ou "publisher"."""'], {}), '(\'Parámetro "filter" é inválido, deve ser "areas", "wos" ou "publisher".\')\n', (22005, 22079), True, 'from flask_babelex import gettext as _\n'), ((11823, 11850), 'flask_babelex.gettext', '_', (['journal.unpublish_reason'], {}), '(journal.unpublish_reason)\n', (11824, 11850), True, 'from flask_babelex import gettext as _\n'), ((12133, 12159), 'flask_babelex.gettext', '_', (['"""Número não encontrado"""'], {}), "('Número não encontrado')\n", (12134, 12159), True, 'from flask_babelex import gettext as _\n'), ((12518, 12558), 'flask.url_for', 'url_for', (['"""main.aop_toc"""'], {'url_seg': 'url_seg'}), "('main.aop_toc', url_seg=url_seg)\n", (12525, 12558), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((13311, 13435), 'flask.url_for', 'url_for', (['"""main.article_detail_v3"""'], {'url_seg': 'article.journal.url_segment', 'article_pid_v3': 'article.aid', 'part': 'part', 'lang': 'tlng'}), "('main.article_detail_v3', url_seg=article.journal.url_segment,\n article_pid_v3=article.aid, part=part, lang=tlng)\n", (13318, 13435), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((13680, 13716), 'webapp.controllers.get_journal_by_issn', 'controllers.get_journal_by_issn', (['pid'], {}), '(pid)\n', (13711, 13716), False, 'from webapp import controllers\n'), ((12243, 12268), 'flask_babelex.gettext', '_', (['issue.unpublish_reason'], {}), '(issue.unpublish_reason)\n', (12244, 12268), True, 'from flask_babelex import gettext as _\n'), ((12362, 12395), 'flask_babelex.gettext', '_', (['issue.journal.unpublish_reason'], {}), '(issue.journal.unpublish_reason)\n', (12363, 12395), True, 'from flask_babelex import gettext as _\n'), ((12997, 13023), 'flask_babelex.gettext', '_', (['"""Artigo não encontrado"""'], {}), "('Artigo não encontrado')\n", (12998, 13023), True, 'from flask_babelex import gettext as _\n'), ((13948, 14003), 'flask.url_for', 'url_for', (['"""main.issue_grid"""'], {'url_seg': 'journal.url_segment'}), "('main.issue_grid', url_seg=journal.url_segment)\n", (13955, 14003), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((14147, 14185), 'webapp.controllers.get_article_by_pid_v2', 'controllers.get_article_by_pid_v2', (['pid'], {}), '(pid)\n', (14180, 14185), False, 'from webapp import controllers\n'), ((13773, 13802), 'flask_babelex.gettext', '_', (['"""Periódico não encontrado"""'], {}), "('Periódico não encontrado')\n", (13774, 13802), True, 'from flask_babelex import gettext as _\n'), ((14315, 14431), 'flask.url_for', 'url_for', (['"""main.article_detail_v3"""'], {'url_seg': 'article.journal.url_segment', 'article_pid_v3': 'article.aid', 'format': '"""pdf"""'}), "('main.article_detail_v3', url_seg=article.journal.url_segment,\n article_pid_v3=article.aid, format='pdf')\n", (14322, 14431), False, 'from flask import render_template, abort, current_app, request, session, redirect, jsonify, url_for, Response, send_from_directory, g, make_response\n'), ((14605, 14674), 'flask_babelex.gettext', '_', (["(u'Requsição inválida ao tentar acessar o artigo com pid: %s' % pid)"], {}), "(u'Requsição inválida ao tentar acessar o artigo com pid: %s' % pid)\n", (14606, 14674), True, 'from flask_babelex import gettext as _\n'), ((13890, 13917), 'flask_babelex.gettext', '_', (['journal.unpublish_reason'], {}), '(journal.unpublish_reason)\n', (13891, 13917), True, 'from flask_babelex import gettext as _\n'), ((14241, 14267), 'flask_babelex.gettext', '_', (['"""Artigo não encontrado"""'], {}), "('Artigo não encontrado')\n", (14242, 14267), True, 'from flask_babelex import gettext as _\n')] |
"""
Converter um DataFrame para CSV
"""
import pandas as pd
dataset = pd.DataFrame({'Frutas': ["Abacaxi", "Mamão"],
"Nomes": ["Éverton", "Márcia"]},
index=["Linha 1", "Linha 2"])
dataset.to_csv("dataset.csv") | [
"pandas.DataFrame"
]
| [((71, 184), 'pandas.DataFrame', 'pd.DataFrame', (["{'Frutas': ['Abacaxi', 'Mamão'], 'Nomes': ['Éverton', 'Márcia']}"], {'index': "['Linha 1', 'Linha 2']"}), "({'Frutas': ['Abacaxi', 'Mamão'], 'Nomes': ['Éverton', 'Márcia'\n ]}, index=['Linha 1', 'Linha 2'])\n", (83, 184), True, 'import pandas as pd\n')] |
import os
import dj_database_url
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DEBUG = True
ALLOWED_HOSTS = []
ROOT_URLCONF = 'groups.tests.urls'
STATIC_URL = '/static/'
SECRET_KEY = '<KEY>'
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',)
DATABASES = {
'default': dj_database_url.config(default='postgres://localhost/groups')
}
DEFAULT_FILE_STORAGE = 'inmemorystorage.InMemoryStorage'
INSTALLED_APPS = (
'groups',
'crispy_forms',
'pagination',
'polymorphic',
# Put contenttypes before auth to work around test issue.
# See: https://code.djangoproject.com/ticket/10827#comment:12
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.admin',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'groups', 'tests', 'templates')
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.request',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
CRISPY_TEMPLATE_PACK = 'bootstrap3'
TEST_RUNNER = 'test_project.test_runner.Runner'
| [
"os.path.abspath",
"dj_database_url.config",
"os.path.join"
]
| [((321, 382), 'dj_database_url.config', 'dj_database_url.config', ([], {'default': '"""postgres://localhost/groups"""'}), "(default='postgres://localhost/groups')\n", (343, 382), False, 'import dj_database_url\n'), ((79, 104), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (94, 104), False, 'import os\n'), ((1120, 1174), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""groups"""', '"""tests"""', '"""templates"""'], {}), "(BASE_DIR, 'groups', 'tests', 'templates')\n", (1132, 1174), False, 'import os\n')] |
import bluetooth
import time
bt = bluetooth.BLE() # singleton
bt.active(True) # activate BT stack
UART_UUID = bluetooth.UUID('6E400001-B5A3-F393-E0A9-E50E24DCCA9E')
UART_TX = (bluetooth.UUID('6E400003-B5A3-F393-E0A9-E50E24DCCA9E'), bluetooth.FLAG_READ | bluetooth.FLAG_NOTIFY,)
UART_RX = (bluetooth.UUID('6E400002-B5A3-F393-E0A9-E50E24DCCA9E'), bluetooth.FLAG_WRITE,)
UART_SERVICE = (UART_UUID, (UART_TX, UART_RX,),)
SERVICES = (UART_SERVICE,)
( (tx, rx,), ) = bt.gatts_register_services(SERVICES)
bt.gap_advertise(100) | [
"bluetooth.BLE",
"bluetooth.UUID"
]
| [((34, 49), 'bluetooth.BLE', 'bluetooth.BLE', ([], {}), '()\n', (47, 49), False, 'import bluetooth\n'), ((149, 203), 'bluetooth.UUID', 'bluetooth.UUID', (['"""6E400001-B5A3-F393-E0A9-E50E24DCCA9E"""'], {}), "('6E400001-B5A3-F393-E0A9-E50E24DCCA9E')\n", (163, 203), False, 'import bluetooth\n'), ((215, 269), 'bluetooth.UUID', 'bluetooth.UUID', (['"""6E400003-B5A3-F393-E0A9-E50E24DCCA9E"""'], {}), "('6E400003-B5A3-F393-E0A9-E50E24DCCA9E')\n", (229, 269), False, 'import bluetooth\n'), ((328, 382), 'bluetooth.UUID', 'bluetooth.UUID', (['"""6E400002-B5A3-F393-E0A9-E50E24DCCA9E"""'], {}), "('6E400002-B5A3-F393-E0A9-E50E24DCCA9E')\n", (342, 382), False, 'import bluetooth\n')] |
import optparse
import sys
def make_set(data, s, e_vocab, f_vocab, aligned, reverse):
for pair in data.split():
cur = pair.split('-')
if reverse:
e_vocab.add(int(cur[1]))
f_vocab.add(int(cur[0]))
aligned.add(int(cur[0]))
s.add((int(cur[1]), int(cur[0])))
else:
e_vocab.add(int(cur[0]))
f_vocab.add(int(cur[1]))
aligned.add(int(cur[0]))
s.add((int(cur[0]), int(cur[1])))
def grow_diag_final_and(e2f_data, f2e_data):
directions = [(-1,0),(0,-1),(1,0),(0,1),(-1,-1),(-1,1),(1,-1),(1,1)]
for (i, (e2f, f2e)) in enumerate(zip(open(e2f_data), open(f2e_data))):
e2f_set, f2e_set, e_vocab, f_vocab, e_aligned, f_aligned = set(), set(), set(), set(), set(), set()
make_set(e2f, e2f_set, e_vocab, f_vocab, e_aligned, False)
make_set(f2e, f2e_set, e_vocab, f_vocab, f_aligned, True)
alignment = e2f_set & f2e_set
union_alignment = e2f_set | f2e_set
grow_diag(e_vocab, f_vocab, e_aligned, f_aligned, alignment, union_alignment, directions)
final(e_vocab, f_vocab, e_aligned, f_aligned, alignment, union_alignment, True)
for e, f in alignment:
sys.stdout.write("%i-%i " % (e,f))
sys.stdout.write("\n")
def grow_diag(e_vocab, f_vocab, e_alignment, f_alignment, alignment, union_alignment, directions):
prev_len = 0
while prev_len != len(alignment):
prev_len = len(alignment)
for e in e_vocab:
for f in f_vocab:
if (e, f) in alignment:
for d in directions:
en, fn = e + d[0], f + d[1]
if (en not in e_alignment or fn not in f_alignment) and (en, fn) in union_alignment:
alignment.add((en, fn))
e_alignment.add(en)
f_alignment.add(fn)
def final(e_vocab, f_vocab, e_alignment, f_alignment, alignment, union_alignment, final_and):
for e in e_vocab:
for f in f_vocab:
c = False
if final_and:
c = e not in e_alignment and f not in f_alignment
else:
c = e not in e_alignment or f not in f_alignment
if c and (e, f) in union_alignment:
alignment.add((e, f))
e_alignment.add(e)
f_alignment.add(f)
def main():
optparser = optparse.OptionParser()
optparser.add_option("-d", "--data", dest="train", default="data/alignment", help="Data filename prefix (default=data)")
optparser.add_option("-e", "--e2f", dest="e2f", default="ef", help="Suffix of English to French filename (default=ef)")
optparser.add_option("-f", "--f2e", dest="f2e", default="fe", help="Suffix of French to English filename (default=fe)")
optparser.add_option("-a", "--final_and", dest="final_and", action="store_true", help="Whether to use Final-And version of the algorithm")
(opts, args) = optparser.parse_args()
e2f_data = "%s.%s" % (opts.train, opts.e2f)
f2e_data = "%s.%s" % (opts.train, opts.f2e)
grow_diag_final_and(e2f_data, f2e_data)
if __name__ == "__main__":
main()
| [
"optparse.OptionParser",
"sys.stdout.write"
]
| [((2468, 2491), 'optparse.OptionParser', 'optparse.OptionParser', ([], {}), '()\n', (2489, 2491), False, 'import optparse\n'), ((1285, 1307), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (1301, 1307), False, 'import sys\n'), ((1242, 1277), 'sys.stdout.write', 'sys.stdout.write', (["('%i-%i ' % (e, f))"], {}), "('%i-%i ' % (e, f))\n", (1258, 1277), False, 'import sys\n')] |
# -*- coding: utf-8 -*-
# Copyright 2017-2018 ICON Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import unittest
from tbears.block_manager.tbears_db import TbearsDB
DIRECTORY_PATH = os.path.abspath((os.path.dirname(__file__)))
DB_PATH = os.path.join(DIRECTORY_PATH, './.tbears_db')
class TestTBearsDB(unittest.TestCase):
def setUp(self):
self.TBEARS_DB = TbearsDB(TbearsDB.make_db(DB_PATH))
self.test_key = b'test_key'
self.test_value = b'test_value'
def tearDown(self):
self.TBEARS_DB.close()
shutil.rmtree(DB_PATH)
def test_put_and_get(self):
# Put and get
self.TBEARS_DB.put(self.test_key, self.test_value)
ret = self.TBEARS_DB.get(self.test_key)
self.assertEqual(ret, self.test_value)
# overwrite
overwrite_value = b'test_value_overwrite'
self.TBEARS_DB.put(self.test_key, overwrite_value)
ret = self.TBEARS_DB.get(self.test_key)
self.assertEqual(ret, overwrite_value)
# get invalid key
ret = self.TBEARS_DB.get(b'invalid_key')
self.assertIsNone(ret)
# put invalid type
self.assertRaises(TypeError, self.TBEARS_DB.put, 'test_key', self.test_value)
self.assertRaises(TypeError, self.TBEARS_DB.put, self.test_key, 123)
def test_delete(self):
self.TBEARS_DB.put(self.test_key, self.test_value)
ret = self.TBEARS_DB.get(self.test_key)
self.assertEqual(ret, self.test_value)
self.TBEARS_DB.delete(self.test_key)
ret = self.TBEARS_DB.get(self.test_key)
self.assertIsNone(ret)
def test_iterator(self):
self.TBEARS_DB.put(b'key1', b'value1')
self.TBEARS_DB.put(b'key2', b'value2')
self.TBEARS_DB.put(b'key3', b'value3')
self.TBEARS_DB.put(b'key4', b'value4')
i = 1
for _, actual_value in self.TBEARS_DB.iterator():
expected_value = ('value' + str(i)).encode()
self.assertEqual(expected_value, actual_value)
i += 1
| [
"tbears.block_manager.tbears_db.TbearsDB.make_db",
"os.path.dirname",
"os.path.join",
"shutil.rmtree"
]
| [((771, 815), 'os.path.join', 'os.path.join', (['DIRECTORY_PATH', '"""./.tbears_db"""'], {}), "(DIRECTORY_PATH, './.tbears_db')\n", (783, 815), False, 'import os\n'), ((733, 758), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (748, 758), False, 'import os\n'), ((1080, 1102), 'shutil.rmtree', 'shutil.rmtree', (['DB_PATH'], {}), '(DB_PATH)\n', (1093, 1102), False, 'import shutil\n'), ((913, 938), 'tbears.block_manager.tbears_db.TbearsDB.make_db', 'TbearsDB.make_db', (['DB_PATH'], {}), '(DB_PATH)\n', (929, 938), False, 'from tbears.block_manager.tbears_db import TbearsDB\n')] |
# encoding: utf-8
"""
mplsmask.py
Created by <NAME> on 2016-12-01.
Copyright (c) 2014-2017 Exa Networks. All rights reserved.
"""
from exabgp.bgp.message.notification import Notify
from exabgp.bgp.message.update.attribute.bgpls.linkstate import LinkState
from exabgp.bgp.message.update.attribute.bgpls.linkstate import FlagLS
# 0 1 2 3
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Type | Length |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# |L|R| Reserved |
# +-+-+-+-+-+-+-+-+
# https://tools.ietf.org/html/rfc7752#section-3.3.2.2 MPLS Protocol Mask
#
# +------------+------------------------------------------+-----------+
# | Bit | Description | Reference |
# +------------+------------------------------------------+-----------+
# | 'L' | Label Distribution Protocol (LDP) | [RFC5036] |
# | 'R' | Extension to RSVP for LSP Tunnels | [RFC3209] |
# | | (RSVP-TE) | |
# | 'Reserved' | Reserved for future use | |
# +------------+------------------------------------------+-----------+
# RFC 7752 3.3.2.2. MPLS Protocol Mask TLV
@LinkState.register()
class MplsMask(FlagLS):
REPR = 'MPLS Protocol mask'
JSON = 'mpls-mask'
TLV = 1094
FLAGS = ['LDP', 'RSVP-TE', 'RSV', 'RSV', 'RSV', 'RSV', 'RSV', 'RSV']
LEN = 1
| [
"exabgp.bgp.message.update.attribute.bgpls.linkstate.LinkState.register"
]
| [((1453, 1473), 'exabgp.bgp.message.update.attribute.bgpls.linkstate.LinkState.register', 'LinkState.register', ([], {}), '()\n', (1471, 1473), False, 'from exabgp.bgp.message.update.attribute.bgpls.linkstate import LinkState\n')] |
import unittest
from opencmiss.utils.zinc.finiteelement import evaluateFieldNodesetRange
from opencmiss.utils.zinc.general import ChangeManager
from opencmiss.zinc.context import Context
from opencmiss.zinc.element import Element
from opencmiss.zinc.field import Field
from opencmiss.zinc.result import RESULT_OK
from scaffoldmaker.meshtypes.meshtype_3d_cecum1 import MeshType_3d_cecum1
from scaffoldmaker.utils.zinc_utils import createFaceMeshGroupExteriorOnFace
from testutils import assertAlmostEqualList
class CecumScaffoldTestCase(unittest.TestCase):
def test_cecum1(self):
"""
Test creation of cecum scaffold.
"""
parameterSetNames = MeshType_3d_cecum1.getParameterSetNames()
self.assertEqual(parameterSetNames, ["Default", "Pig 1"])
options = MeshType_3d_cecum1.getDefaultOptions("Pig 1")
self.assertEqual(30, len(options))
self.assertEqual(5, options.get("Number of segments"))
self.assertEqual(2, options.get("Number of elements around tenia coli"))
self.assertEqual(8, options.get("Number of elements along segment"))
self.assertEqual(1, options.get("Number of elements through wall"))
self.assertEqual(35.0, options.get("Start inner radius"))
self.assertEqual(3.0, options.get("Start inner radius derivative"))
self.assertEqual(38.0, options.get("End inner radius"))
self.assertEqual(3.0, options.get("End inner radius derivative"))
self.assertEqual(0.5, options.get("Corner inner radius factor"))
self.assertEqual(0.25, options.get("Haustrum inner radius factor"))
self.assertEqual(4.0, options.get("Segment length mid derivative factor"))
self.assertEqual(3, options.get("Number of tenia coli"))
self.assertEqual(5.0, options.get("Start tenia coli width"))
self.assertEqual(0.0, options.get("End tenia coli width derivative"))
self.assertEqual(2.0, options.get("Wall thickness"))
ostiumOptions = options['Ileocecal junction']
ostiumSettings = ostiumOptions.getScaffoldSettings()
self.assertEqual(1, ostiumSettings.get("Number of vessels"))
self.assertEqual(8, ostiumSettings.get("Number of elements around ostium"))
self.assertEqual(1, ostiumSettings.get("Number of elements through wall"))
self.assertEqual(20.0, ostiumSettings.get("Ostium diameter"))
self.assertEqual(10.0, ostiumSettings.get("Vessel inner diameter"))
self.assertEqual(60, options.get("Ileocecal junction angular position degrees"))
self.assertEqual(0.5, options.get("Ileocecal junction position along factor"))
context = Context("Test")
region = context.getDefaultRegion()
self.assertTrue(region.isValid())
annotationGroups = MeshType_3d_cecum1.generateBaseMesh(region, options)
self.assertEqual(2, len(annotationGroups))
fieldmodule = region.getFieldmodule()
self.assertEqual(RESULT_OK, fieldmodule.defineAllFaces())
mesh3d = fieldmodule.findMeshByDimension(3)
self.assertEqual(1492, mesh3d.getSize())
mesh2d = fieldmodule.findMeshByDimension(2)
self.assertEqual(5617, mesh2d.getSize())
mesh1d = fieldmodule.findMeshByDimension(1)
self.assertEqual(6767, mesh1d.getSize())
nodes = fieldmodule.findNodesetByFieldDomainType(Field.DOMAIN_TYPE_NODES)
self.assertEqual(2642, nodes.getSize())
datapoints = fieldmodule.findNodesetByFieldDomainType(Field.DOMAIN_TYPE_DATAPOINTS)
self.assertEqual(0, datapoints.getSize())
coordinates = fieldmodule.findFieldByName("coordinates").castFiniteElement()
self.assertTrue(coordinates.isValid())
minimums, maximums = evaluateFieldNodesetRange(coordinates, nodes)
assertAlmostEqualList(self, minimums, [-49.01658984455258, -46.89686037622053, -2.343256155753525], 1.0E-6)
assertAlmostEqualList(self, maximums, [42.18085849205387, 54.89264119402881, 180.0], 1.0E-6)
with ChangeManager(fieldmodule):
one = fieldmodule.createFieldConstant(1.0)
faceMeshGroup = createFaceMeshGroupExteriorOnFace(fieldmodule, Element.FACE_TYPE_XI3_1)
surfaceAreaField = fieldmodule.createFieldMeshIntegral(one, coordinates, faceMeshGroup)
surfaceAreaField.setNumbersOfPoints(4)
volumeField = fieldmodule.createFieldMeshIntegral(one, coordinates, mesh3d)
volumeField.setNumbersOfPoints(3)
fieldcache = fieldmodule.createFieldcache()
result, surfaceArea = surfaceAreaField.evaluateReal(fieldcache, 1)
self.assertEqual(result, RESULT_OK)
self.assertAlmostEqual(surfaceArea, 65960.20655074248, delta=1.0E-6)
result, volume = volumeField.evaluateReal(fieldcache, 1)
self.assertEqual(result, RESULT_OK)
self.assertAlmostEqual(volume, 127905.28250502056, delta=1.0E-6)
if __name__ == "__main__":
unittest.main()
| [
"opencmiss.zinc.context.Context",
"scaffoldmaker.meshtypes.meshtype_3d_cecum1.MeshType_3d_cecum1.getParameterSetNames",
"scaffoldmaker.meshtypes.meshtype_3d_cecum1.MeshType_3d_cecum1.generateBaseMesh",
"testutils.assertAlmostEqualList",
"scaffoldmaker.utils.zinc_utils.createFaceMeshGroupExteriorOnFace",
"scaffoldmaker.meshtypes.meshtype_3d_cecum1.MeshType_3d_cecum1.getDefaultOptions",
"opencmiss.utils.zinc.general.ChangeManager",
"unittest.main",
"opencmiss.utils.zinc.finiteelement.evaluateFieldNodesetRange"
]
| [((4961, 4976), 'unittest.main', 'unittest.main', ([], {}), '()\n', (4974, 4976), False, 'import unittest\n'), ((681, 722), 'scaffoldmaker.meshtypes.meshtype_3d_cecum1.MeshType_3d_cecum1.getParameterSetNames', 'MeshType_3d_cecum1.getParameterSetNames', ([], {}), '()\n', (720, 722), False, 'from scaffoldmaker.meshtypes.meshtype_3d_cecum1 import MeshType_3d_cecum1\n'), ((807, 852), 'scaffoldmaker.meshtypes.meshtype_3d_cecum1.MeshType_3d_cecum1.getDefaultOptions', 'MeshType_3d_cecum1.getDefaultOptions', (['"""Pig 1"""'], {}), "('Pig 1')\n", (843, 852), False, 'from scaffoldmaker.meshtypes.meshtype_3d_cecum1 import MeshType_3d_cecum1\n'), ((2670, 2685), 'opencmiss.zinc.context.Context', 'Context', (['"""Test"""'], {}), "('Test')\n", (2677, 2685), False, 'from opencmiss.zinc.context import Context\n'), ((2799, 2851), 'scaffoldmaker.meshtypes.meshtype_3d_cecum1.MeshType_3d_cecum1.generateBaseMesh', 'MeshType_3d_cecum1.generateBaseMesh', (['region', 'options'], {}), '(region, options)\n', (2834, 2851), False, 'from scaffoldmaker.meshtypes.meshtype_3d_cecum1 import MeshType_3d_cecum1\n'), ((3753, 3798), 'opencmiss.utils.zinc.finiteelement.evaluateFieldNodesetRange', 'evaluateFieldNodesetRange', (['coordinates', 'nodes'], {}), '(coordinates, nodes)\n', (3778, 3798), False, 'from opencmiss.utils.zinc.finiteelement import evaluateFieldNodesetRange\n'), ((3807, 3918), 'testutils.assertAlmostEqualList', 'assertAlmostEqualList', (['self', 'minimums', '[-49.01658984455258, -46.89686037622053, -2.343256155753525]', '(1e-06)'], {}), '(self, minimums, [-49.01658984455258, -\n 46.89686037622053, -2.343256155753525], 1e-06)\n', (3828, 3918), False, 'from testutils import assertAlmostEqualList\n'), ((3923, 4018), 'testutils.assertAlmostEqualList', 'assertAlmostEqualList', (['self', 'maximums', '[42.18085849205387, 54.89264119402881, 180.0]', '(1e-06)'], {}), '(self, maximums, [42.18085849205387, 54.89264119402881,\n 180.0], 1e-06)\n', (3944, 4018), False, 'from testutils import assertAlmostEqualList\n'), ((4030, 4056), 'opencmiss.utils.zinc.general.ChangeManager', 'ChangeManager', (['fieldmodule'], {}), '(fieldmodule)\n', (4043, 4056), False, 'from opencmiss.utils.zinc.general import ChangeManager\n'), ((4141, 4212), 'scaffoldmaker.utils.zinc_utils.createFaceMeshGroupExteriorOnFace', 'createFaceMeshGroupExteriorOnFace', (['fieldmodule', 'Element.FACE_TYPE_XI3_1'], {}), '(fieldmodule, Element.FACE_TYPE_XI3_1)\n', (4174, 4212), False, 'from scaffoldmaker.utils.zinc_utils import createFaceMeshGroupExteriorOnFace\n')] |
import sys
class Screen:
def __init__(self) -> None:
pass
def handle_events(self, events):
for event in events:
if event.type == self.pygame.QUIT:
sys.exit()
def draw(self, screen):
pass | [
"sys.exit"
]
| [((201, 211), 'sys.exit', 'sys.exit', ([], {}), '()\n', (209, 211), False, 'import sys\n')] |
import numpy as np
from math import pi,exp
def static_stability(height,area,theta,s_et=None,n_et=None):
"""
The function "static_stability" computes the vertical gradient (z-derivative)
of hemispheric-averaged potential temperature, i.e. d\tilde{theta}/dz in the def-
inition of QGPV in eq.(3) of Huang and Nakamura (2016), by central differencing.
At the boundary, the static stability is estimated by forward/backward differen-
cing involving two adjacent z-grid points:
i.e. stat_n[0] = (t0_n[1]-t0_n[0])/(height[1]-height[0])
stat_n[-1] = (t0_n[-2]-t0_n[-1])/(height[-2]-height[-1])
Please make inquiries and report issues via Github: https://github.com/csyhuang/hn2016_falwa/issues
Parameters
----------
height : sequence or array_like
Array of z-coordinate [in meters] with dimension = (kmax), equally spaced
area : ndarray
Two-dimension numpy array specifying differential areal element of each grid point;
dimension = (nlat, nlon).
theta : ndarray
Matrix of potential temperature [K] with dimension (kmax,nlat,nlon) or (kmax,nlat)
s_et : int, optional
Index of the latitude that defines the boundary of the Southern hemispheric domain;
initialized as nlat/2 if not input
n_et : int, optional
Index of the latitude that defines the boundary of the Southern hemispheric domain;
initialized as nlat/2 if not input
Returns
-------
t0_n : sequence or array_like
Area-weighted average of potential temperature (\tilde{\theta} in HN16)
in the Northern hemispheric domain with dimension = (kmax)
t0_s : sequence or array_like
Area-weighted average of potential temperature (\tilde{\theta} in HN16)
in the Southern hemispheric domain with dimension = (kmax)
stat_n : sequence or array_like
Static stability (d\tilde{\theta}/dz in HN16) in the Northern hemispheric
domain with dimension = (kmax)
stat_s : sequence or array_like
Static stability (d\tilde{\theta}/dz in HN16) in the Southern hemispheric
domain with dimension = (kmax)
"""
nlat = theta.shape[1]
if s_et==None:
s_et = nlat//2
if n_et==None:
n_et = nlat//2
stat_n = np.zeros(theta.shape[0])
stat_s = np.zeros(theta.shape[0])
if theta.ndim==3:
zonal_mean = np.mean(theta,axis=-1)
elif theta.ndim==2:
zonal_mean = theta
if area.ndim==2:
area_zonal_mean = np.mean(area,axis=-1)
elif area.ndim==1:
area_zonal_mean = area
csm_n_et = np.sum(area_zonal_mean[-n_et:])
csm_s_et = np.sum(area_zonal_mean[:s_et])
t0_n = np.sum(zonal_mean[:,-n_et:]*area_zonal_mean[np.newaxis,-n_et:],axis=-1)/csm_n_et
t0_s = np.sum(zonal_mean[:,:s_et]*area_zonal_mean[np.newaxis,:s_et],axis=-1)/csm_s_et
stat_n[1:-1] = (t0_n[2:]-t0_n[:-2])/(height[2:]-height[:-2])
stat_s[1:-1] = (t0_s[2:]-t0_s[:-2])/(height[2:]-height[:-2])
stat_n[0] = (t0_n[1]-t0_n[0])/(height[1]-height[0])
stat_n[-1] = (t0_n[-2]-t0_n[-1])/(height[-2]-height[-1])
stat_s[0] = (t0_s[1]-t0_s[0])/(height[1]-height[0])
stat_s[-1] = (t0_s[-2]-t0_s[-1])/(height[-2]-height[-1])
return t0_n,t0_s,stat_n,stat_s
def compute_qgpv_givenvort(omega,nlat,nlon,kmax,unih,ylat,avort,potential_temp,
t0_cn,t0_cs,stat_cn,stat_cs,nlat_s=None,scale_height=7000.):
"""
The function "compute_qgpv_givenvort" computes the quasi-geostrophic potential
vorticity based on the absolute vorticity, potential temperature and static
stability given.
Please make inquiries and report issues via Github: https://github.com/csyhuang/hn2016_falwa/issues
Parameters
----------
omega : float, optional
Rotation rate of the planet.
nlat : int
Latitudinal dimension of the latitude grid.
nlon : int
Longitudinal dimension of the longitude grid.
kmax : int
Vertical dimension of the height grid.
unih : sequence or array_like
Numpy array of height in [meters]; dimension = (kmax)
ylat : sequence or array_like
Numpy array of latitudes in [degrees]; dimension = (nlat)
avort : ndarray
Three-dimension numpy array of absolute vorticity (i.e. relative vorticity
+ 2*Omega*sin(lat)) in [1/s]; dimension = (kmax x nlat x nlon)
potential_temp : ndarray
Three-dimension numpy array of potential temperature in [K];
dimension = (kmax x nlat x nlon)
t0_cn : sequence or array_like
Area-weighted average of potential temperature (\tilde{\theta} in HN16)
in the Northern hemispheric domain with dimension = (kmax)
t0_cs : sequence or array_like
Area-weighted average of potential temperature (\tilde{\theta} in HN16)
in the Southern hemispheric domain with dimension = (kmax)
stat_cn : sequence or array_like
Static stability (d\tilde{\theta}/dz in HN16) in the Northern hemispheric
domain with dimension = (kmax)
stat_cs : sequence or array_like
Static stability (d\tilde{\theta}/dz in HN16) in the Southern hemispheric
domain with dimension = (kmax)
scale_height : float
Scale height of the atmosphere in [m] with default value 7000.
Returns
-------
QGPV : ndarray
Three-dimension numpy array of quasi-geostrophic potential vorticity;
dimension = (kmax x nlat x nlon)
dzdiv : ndarray
Three-dimension numpy array of the stretching term in QGPV;
dimension = (kmax x nlat x nlon)
"""
if nlat_s==None:
nlat_s=nlat//2
clat = np.cos(ylat*pi/180.)
clat = np.abs(clat) # Just to avoid the negative value at poles
# --- Next, calculate PV ---
av2 = np.empty_like(potential_temp) # dv/d(lon)
av3 = np.empty_like(potential_temp) # du/d(lat)
qgpv = np.empty_like(potential_temp) # av1+av2+av3+dzdiv
av1 = np.ones((kmax,nlat,nlon)) * 2*omega*np.sin(ylat[np.newaxis,:,np.newaxis]*pi/180.)
# Calculate the z-divergence term
zdiv = np.empty_like(potential_temp)
dzdiv = np.empty_like(potential_temp)
for kk in range(kmax): # This is more efficient
zdiv[kk,:nlat_s,:] = exp(-unih[kk]/scale_height)*(potential_temp[kk,:nlat_s,:]-t0_cs[kk])/stat_cs[kk]
zdiv[kk,-nlat_s:,:] = exp(-unih[kk]/scale_height)*(potential_temp[kk,-nlat_s:,:]-t0_cn[kk])/stat_cn[kk]
dzdiv[1:kmax-1,:,:] = np.exp(unih[1:kmax-1,np.newaxis,np.newaxis]/scale_height)* \
(zdiv[2:kmax,:,:]-zdiv[0:kmax-2,:,:]) \
/(unih[2:kmax,np.newaxis,np.newaxis]-unih[0:kmax-2,np.newaxis,np.newaxis])
dzdiv[0,:,:] = exp(unih[0]/scale_height)*(zdiv[1,:,:]-zdiv[0,:,:])/ \
(unih[1,np.newaxis,np.newaxis]-unih[0,np.newaxis,np.newaxis])
dzdiv[kmax-1,:,:] = exp(unih[kmax-1]/scale_height)*(zdiv[kmax-1,:,:]-zdiv[kmax-2,:,:])/ \
(unih[kmax-1,np.newaxis,np.newaxis]-unih[kmax-2,np.newaxis,np.newaxis])
qgpv = avort+dzdiv * av1
return qgpv, dzdiv
| [
"numpy.abs",
"numpy.mean",
"numpy.ones",
"numpy.exp",
"numpy.sum",
"numpy.zeros",
"numpy.empty_like",
"numpy.cos",
"numpy.sin",
"math.exp"
]
| [((2300, 2324), 'numpy.zeros', 'np.zeros', (['theta.shape[0]'], {}), '(theta.shape[0])\n', (2308, 2324), True, 'import numpy as np\n'), ((2338, 2362), 'numpy.zeros', 'np.zeros', (['theta.shape[0]'], {}), '(theta.shape[0])\n', (2346, 2362), True, 'import numpy as np\n'), ((2621, 2652), 'numpy.sum', 'np.sum', (['area_zonal_mean[-n_et:]'], {}), '(area_zonal_mean[-n_et:])\n', (2627, 2652), True, 'import numpy as np\n'), ((2668, 2698), 'numpy.sum', 'np.sum', (['area_zonal_mean[:s_et]'], {}), '(area_zonal_mean[:s_et])\n', (2674, 2698), True, 'import numpy as np\n'), ((5689, 5714), 'numpy.cos', 'np.cos', (['(ylat * pi / 180.0)'], {}), '(ylat * pi / 180.0)\n', (5695, 5714), True, 'import numpy as np\n'), ((5721, 5733), 'numpy.abs', 'np.abs', (['clat'], {}), '(clat)\n', (5727, 5733), True, 'import numpy as np\n'), ((5822, 5851), 'numpy.empty_like', 'np.empty_like', (['potential_temp'], {}), '(potential_temp)\n', (5835, 5851), True, 'import numpy as np\n'), ((5874, 5903), 'numpy.empty_like', 'np.empty_like', (['potential_temp'], {}), '(potential_temp)\n', (5887, 5903), True, 'import numpy as np\n'), ((5927, 5956), 'numpy.empty_like', 'np.empty_like', (['potential_temp'], {}), '(potential_temp)\n', (5940, 5956), True, 'import numpy as np\n'), ((6120, 6149), 'numpy.empty_like', 'np.empty_like', (['potential_temp'], {}), '(potential_temp)\n', (6133, 6149), True, 'import numpy as np\n'), ((6162, 6191), 'numpy.empty_like', 'np.empty_like', (['potential_temp'], {}), '(potential_temp)\n', (6175, 6191), True, 'import numpy as np\n'), ((2407, 2430), 'numpy.mean', 'np.mean', (['theta'], {'axis': '(-1)'}), '(theta, axis=-1)\n', (2414, 2430), True, 'import numpy as np\n'), ((2529, 2551), 'numpy.mean', 'np.mean', (['area'], {'axis': '(-1)'}), '(area, axis=-1)\n', (2536, 2551), True, 'import numpy as np\n'), ((2711, 2787), 'numpy.sum', 'np.sum', (['(zonal_mean[:, -n_et:] * area_zonal_mean[np.newaxis, -n_et:])'], {'axis': '(-1)'}), '(zonal_mean[:, -n_et:] * area_zonal_mean[np.newaxis, -n_et:], axis=-1)\n', (2717, 2787), True, 'import numpy as np\n'), ((2803, 2877), 'numpy.sum', 'np.sum', (['(zonal_mean[:, :s_et] * area_zonal_mean[np.newaxis, :s_et])'], {'axis': '(-1)'}), '(zonal_mean[:, :s_et] * area_zonal_mean[np.newaxis, :s_et], axis=-1)\n', (2809, 2877), True, 'import numpy as np\n'), ((6024, 6076), 'numpy.sin', 'np.sin', (['(ylat[np.newaxis, :, np.newaxis] * pi / 180.0)'], {}), '(ylat[np.newaxis, :, np.newaxis] * pi / 180.0)\n', (6030, 6076), True, 'import numpy as np\n'), ((6493, 6556), 'numpy.exp', 'np.exp', (['(unih[1:kmax - 1, np.newaxis, np.newaxis] / scale_height)'], {}), '(unih[1:kmax - 1, np.newaxis, np.newaxis] / scale_height)\n', (6499, 6556), True, 'import numpy as np\n'), ((6697, 6724), 'math.exp', 'exp', (['(unih[0] / scale_height)'], {}), '(unih[0] / scale_height)\n', (6700, 6724), False, 'from math import pi, exp\n'), ((6842, 6876), 'math.exp', 'exp', (['(unih[kmax - 1] / scale_height)'], {}), '(unih[kmax - 1] / scale_height)\n', (6845, 6876), False, 'from math import pi, exp\n'), ((5988, 6015), 'numpy.ones', 'np.ones', (['(kmax, nlat, nlon)'], {}), '((kmax, nlat, nlon))\n', (5995, 6015), True, 'import numpy as np\n'), ((6273, 6302), 'math.exp', 'exp', (['(-unih[kk] / scale_height)'], {}), '(-unih[kk] / scale_height)\n', (6276, 6302), False, 'from math import pi, exp\n'), ((6384, 6413), 'math.exp', 'exp', (['(-unih[kk] / scale_height)'], {}), '(-unih[kk] / scale_height)\n', (6387, 6413), False, 'from math import pi, exp\n')] |
import numpy as np
import scipy.interpolate
import scipy.ndimage
from sklearn.feature_extraction.image import extract_patches_2d, reconstruct_from_patches_2d
def _calc_patch_grid_dims(shape, patch_size, patch_stride):
x_w, x_h, x_c = shape
num_rows = 1 + (x_h - patch_size) // patch_stride
num_cols = 1 + (x_w - patch_size) // patch_stride
return num_rows, num_cols
def make_patch_grid(x, patch_size, patch_stride=1):
'''x shape: (num_channels, rows, cols)'''
x = x.transpose(2, 1, 0)
patches = extract_patches_2d(x, (patch_size, patch_size))
x_w, x_h, x_c = x.shape
num_rows, num_cols = _calc_patch_grid_dims(x.shape, patch_size, patch_stride)
patches = patches.reshape((num_rows, num_cols, patch_size, patch_size, x_c))
patches = patches.transpose((0, 1, 4, 2, 3))
#patches = np.rollaxis(patches, -1, 2)
return patches
def combine_patches_grid(in_patches, out_shape):
'''Reconstruct an image from these `patches`
input shape: (rows, cols, channels, patch_row, patch_col)
'''
num_rows, num_cols = in_patches.shape[:2]
num_channels = in_patches.shape[-3]
patch_size = in_patches.shape[-1]
num_patches = num_rows * num_cols
in_patches = np.reshape(in_patches, (num_patches, num_channels, patch_size, patch_size)) # (patches, channels, pr, pc)
in_patches = np.transpose(in_patches, (0, 2, 3, 1)) # (patches, p, p, channels)
recon = reconstruct_from_patches_2d(in_patches, out_shape)
return recon.transpose(2, 1, 0).astype(np.float32)
class PatchMatcher(object):
'''A matcher of image patches inspired by the PatchMatch algorithm.
image shape: (width, height, channels)
'''
def __init__(self, input_shape, target_img, patch_size=1, patch_stride=1, jump_size=0.5,
num_propagation_steps=5, num_random_steps=5, random_max_radius=1.0, random_scale=0.5):
self.input_shape = input_shape
self.patch_size = patch_size
self.patch_stride = patch_stride
self.jump_size = jump_size
self.num_propagation_steps = num_propagation_steps
self.num_random_steps = num_random_steps
self.random_max_radius = random_max_radius
self.random_scale = random_scale
self.num_input_rows, self.num_input_cols = _calc_patch_grid_dims(input_shape, patch_size, patch_stride)
self.target_patches = make_patch_grid(target_img, patch_size)
self.target_patches_normed = self.normalize_patches(self.target_patches)
self.coords = np.random.uniform(0.0, 1.0, # TODO: switch to pixels
(2, self.num_input_rows, self.num_input_cols))# * [[[self.num_input_rows]],[[self.num_input_cols]]]
self.similarity = np.zeros(input_shape[:2:-1], dtype=np.float32)
self.min_propagration_row = 1.0 / self.num_input_rows
self.min_propagration_col = 1.0 / self.num_input_cols
self.delta_row = np.array([[[self.min_propagration_row]], [[0.0]]])
self.delta_col = np.array([[[0.0]], [[self.min_propagration_col]]])
def update(self, input_img, reverse_propagation=False):
input_patches = self.get_patches_for(input_img)
self.update_with_patches(self.normalize_patches(input_patches), reverse_propagation=reverse_propagation)
def update_with_patches(self, input_patches, reverse_propagation=False):
self._propagate(input_patches, reverse_propagation=reverse_propagation)
self._random_update(input_patches)
def get_patches_for(self, img):
return make_patch_grid(img, self.patch_size);
def normalize_patches(self, patches):
norm = np.sqrt(np.sum(np.square(patches), axis=(2, 3, 4), keepdims=True))
return patches / norm
def _propagate(self, input_patches, reverse_propagation=False):
if reverse_propagation:
roll_direction = 1
else:
roll_direction = -1
sign = float(roll_direction)
for step_i in range(self.num_propagation_steps):
new_coords = self.clip_coords(np.roll(self.coords, roll_direction, 1) + self.delta_row * sign)
coords_row, similarity_row = self.eval_state(new_coords, input_patches)
new_coords = self.clip_coords(np.roll(self.coords, roll_direction, 2) + self.delta_col * sign)
coords_col, similarity_col = self.eval_state(new_coords, input_patches)
self.coords, self.similarity = self.take_best(coords_row, similarity_row, coords_col, similarity_col)
def _random_update(self, input_patches):
for alpha in range(1, self.num_random_steps + 1): # NOTE this should actually stop when the move is < 1
new_coords = self.clip_coords(self.coords + np.random.uniform(-self.random_max_radius, self.random_max_radius, self.coords.shape) * self.random_scale ** alpha)
self.coords, self.similarity = self.eval_state(new_coords, input_patches)
def eval_state(self, new_coords, input_patches):
new_similarity = self.patch_similarity(input_patches, new_coords)
delta_similarity = new_similarity - self.similarity
coords = np.where(delta_similarity > 0, new_coords, self.coords)
best_similarity = np.where(delta_similarity > 0, new_similarity, self.similarity)
return coords, best_similarity
def take_best(self, coords_a, similarity_a, coords_b, similarity_b):
delta_similarity = similarity_a - similarity_b
best_coords = np.where(delta_similarity > 0, coords_a, coords_b)
best_similarity = np.where(delta_similarity > 0, similarity_a, similarity_b)
return best_coords, best_similarity
def patch_similarity(self, source, coords):
'''Check the similarity of the patches specified in coords.'''
target_vals = self.lookup_coords(self.target_patches_normed, coords)
err = source * target_vals
return np.sum(err, axis=(2, 3, 4))
def clip_coords(self, coords):
# TODO: should this all be in pixel space?
coords = np.clip(coords, 0.0, 1.0)
return coords
def lookup_coords(self, x, coords):
x_shape = np.expand_dims(np.expand_dims(x.shape, -1), -1)
i_coords = np.round(coords * (x_shape[:2] - 1)).astype('int32')
return x[i_coords[0], i_coords[1]]
def get_reconstruction(self, patches=None, combined=None):
if combined is not None:
patches = make_patch_grid(combined, self.patch_size)
if patches is None:
patches = self.target_patches
patches = self.lookup_coords(patches, self.coords)
recon = combine_patches_grid(patches, self.input_shape)
return recon
def scale(self, new_shape, new_target_img):
'''Create a new matcher of the given shape and replace its
state with a scaled up version of the current matcher's state.
'''
new_matcher = PatchMatcher(new_shape, new_target_img, patch_size=self.patch_size,
patch_stride=self.patch_stride, jump_size=self.jump_size,
num_propagation_steps=self.num_propagation_steps,
num_random_steps=self.num_random_steps,
random_max_radius=self.random_max_radius,
random_scale=self.random_scale)
new_matcher.coords = congrid(self.coords, new_matcher.coords.shape, method='neighbour')
new_matcher.similarity = congrid(self.similarity, new_matcher.coords.shape, method='neighbour')
return new_matcher
def congrid(a, newdims, method='linear', centre=False, minusone=False):
'''Arbitrary resampling of source array to new dimension sizes.
Currently only supports maintaining the same number of dimensions.
To use 1-D arrays, first promote them to shape (x,1).
Uses the same parameters and creates the same co-ordinate lookup points
as IDL''s congrid routine, which apparently originally came from a VAX/VMS
routine of the same name.
method:
neighbour - closest value from original data
nearest and linear - uses n x 1-D interpolations using
scipy.interpolate.interp1d
(see Numerical Recipes for validity of use of n 1-D interpolations)
spline - uses ndimage.map_coordinates
centre:
True - interpolation points are at the centres of the bins
False - points are at the front edge of the bin
minusone:
For example- inarray.shape = (i,j) & new dimensions = (x,y)
False - inarray is resampled by factors of (i/x) * (j/y)
True - inarray is resampled by(i-1)/(x-1) * (j-1)/(y-1)
This prevents extrapolation one element beyond bounds of input array.
'''
if not a.dtype in [np.float64, np.float32]:
a = np.cast[float](a)
m1 = np.cast[int](minusone)
ofs = np.cast[int](centre) * 0.5
old = np.array( a.shape )
ndims = len( a.shape )
if len( newdims ) != ndims:
print("[congrid] dimensions error. " \
"This routine currently only support " \
"rebinning to the same number of dimensions.")
return None
newdims = np.asarray( newdims, dtype=float )
dimlist = []
if method == 'neighbour':
for i in range( ndims ):
base = np.indices(newdims)[i]
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
cd = np.array( dimlist ).round().astype(int)
newa = a[list( cd )]
return newa
elif method in ['nearest','linear']:
# calculate new dims
for i in range( ndims ):
base = np.arange( newdims[i] )
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
# specify old dims
olddims = [np.arange(i, dtype = np.float) for i in list( a.shape )]
# first interpolation - for ndims = any
mint = scipy.interpolate.interp1d( olddims[-1], a, kind=method )
newa = mint( dimlist[-1] )
trorder = [ndims - 1] + range( ndims - 1 )
for i in range( ndims - 2, -1, -1 ):
newa = newa.transpose( trorder )
mint = scipy.interpolate.interp1d( olddims[i], newa, kind=method )
newa = mint( dimlist[i] )
if ndims > 1:
# need one more transpose to return to original dimensions
newa = newa.transpose( trorder )
return newa
elif method in ['spline']:
oslices = [ slice(0,j) for j in old ]
oldcoords = np.ogrid[oslices]
nslices = [ slice(0,j) for j in list(newdims) ]
newcoords = np.mgrid[nslices]
newcoords_dims = range(np.rank(newcoords))
#make first index last
newcoords_dims.append(newcoords_dims.pop(0))
newcoords_tr = newcoords.transpose(newcoords_dims)
# makes a view that affects newcoords
newcoords_tr += ofs
deltas = (np.asarray(old) - m1) / (newdims - m1)
newcoords_tr *= deltas
newcoords_tr -= ofs
newa = scipy.ndimage.map_coordinates(a, newcoords)
return newa
else:
print("Congrid error: Unrecognized interpolation type.\n", \
"Currently only \'neighbour\', \'nearest\',\'linear\',", \
"and \'spline\' are supported.")
return None
if __name__ == '__main__':
import sys
import time
from scipy.misc import imsave
from image_analogy.img_utils import load_image, preprocess_image, deprocess_image
content_image_path, style_image_path, output_prefix = sys.argv[1:]
jump_size = 1.0
num_steps = 7
patch_size = 1
patch_stride = 1
feat_chans = 512
feat_style_shape = (feat_chans, 12, 18)
feat_style = np.random.uniform(0.0, 1.0, feat_style_shape)
feat_in_shape = (feat_chans, 17, 10)
feat_in = np.random.uniform(0.0, 1.0, feat_in_shape)
matcher = PatchMatcher(feat_in_shape[::-1], feat_style, patch_size=patch_size)
feat_in_normed = matcher.normalize_patches(matcher.get_patches_for(feat_in))
for i in range(num_steps):
matcher.update_with_patches(feat_in_normed)
r = matcher.get_reconstruction()
content_img_img = load_image(content_image_path)
content_n_channels, content_n_rows, content_n_cols = content_img_img.shape[::-1]
content_img = preprocess_image(content_img_img, content_n_cols, content_n_rows)[0]#.transpose((2,1,0))
style_img = load_image(style_image_path)
style_n_channels, style_n_rows, style_n_cols = content_img_img.shape[::-1]
style_img = preprocess_image(
load_image(style_image_path), style_n_cols, style_n_rows)[0]#.transpose((2,1,0))
pg = make_patch_grid(content_img, patch_size)
result = combine_patches_grid(pg, content_img.shape[::-1])
outimg = deprocess_image(result, contrast_percent=0)
imsave(output_prefix + '_bestre.png', outimg)
# # #
matcher = PatchMatcher((content_n_cols, content_n_rows, content_n_channels), style_img, patch_size=patch_size)
for i in range(num_steps):
start = time.time()
matcher.update(content_img, reverse_propagation=bool(i % 2))
print(matcher.similarity.min(), matcher.similarity.max(), matcher.similarity.mean())
end = time.time()
#print end-start
start = time.time()
result = matcher.get_reconstruction(patches=matcher.target_patches)
print(result.shape)
end = time.time()
print(end-start)
outimg = deprocess_image(result, contrast_percent=0)
# # imsave takes (rows, cols, channels)
imsave(output_prefix + '_best.png', outimg)
| [
"numpy.clip",
"sklearn.feature_extraction.image.extract_patches_2d",
"numpy.array",
"image_analogy.img_utils.preprocess_image",
"numpy.arange",
"numpy.rank",
"numpy.reshape",
"numpy.where",
"scipy.misc.imsave",
"numpy.asarray",
"image_analogy.img_utils.load_image",
"image_analogy.img_utils.deprocess_image",
"numpy.round",
"numpy.indices",
"numpy.square",
"numpy.transpose",
"time.time",
"numpy.roll",
"sklearn.feature_extraction.image.reconstruct_from_patches_2d",
"numpy.sum",
"numpy.zeros",
"numpy.expand_dims",
"numpy.random.uniform"
]
| [((527, 574), 'sklearn.feature_extraction.image.extract_patches_2d', 'extract_patches_2d', (['x', '(patch_size, patch_size)'], {}), '(x, (patch_size, patch_size))\n', (545, 574), False, 'from sklearn.feature_extraction.image import extract_patches_2d, reconstruct_from_patches_2d\n'), ((1228, 1303), 'numpy.reshape', 'np.reshape', (['in_patches', '(num_patches, num_channels, patch_size, patch_size)'], {}), '(in_patches, (num_patches, num_channels, patch_size, patch_size))\n', (1238, 1303), True, 'import numpy as np\n'), ((1352, 1390), 'numpy.transpose', 'np.transpose', (['in_patches', '(0, 2, 3, 1)'], {}), '(in_patches, (0, 2, 3, 1))\n', (1364, 1390), True, 'import numpy as np\n'), ((1431, 1481), 'sklearn.feature_extraction.image.reconstruct_from_patches_2d', 'reconstruct_from_patches_2d', (['in_patches', 'out_shape'], {}), '(in_patches, out_shape)\n', (1458, 1481), False, 'from sklearn.feature_extraction.image import extract_patches_2d, reconstruct_from_patches_2d\n'), ((8773, 8790), 'numpy.array', 'np.array', (['a.shape'], {}), '(a.shape)\n', (8781, 8790), True, 'import numpy as np\n'), ((9049, 9081), 'numpy.asarray', 'np.asarray', (['newdims'], {'dtype': 'float'}), '(newdims, dtype=float)\n', (9059, 9081), True, 'import numpy as np\n'), ((11676, 11721), 'numpy.random.uniform', 'np.random.uniform', (['(0.0)', '(1.0)', 'feat_style_shape'], {}), '(0.0, 1.0, feat_style_shape)\n', (11693, 11721), True, 'import numpy as np\n'), ((11777, 11819), 'numpy.random.uniform', 'np.random.uniform', (['(0.0)', '(1.0)', 'feat_in_shape'], {}), '(0.0, 1.0, feat_in_shape)\n', (11794, 11819), True, 'import numpy as np\n'), ((12127, 12157), 'image_analogy.img_utils.load_image', 'load_image', (['content_image_path'], {}), '(content_image_path)\n', (12137, 12157), False, 'from image_analogy.img_utils import load_image, preprocess_image, deprocess_image\n'), ((12366, 12394), 'image_analogy.img_utils.load_image', 'load_image', (['style_image_path'], {}), '(style_image_path)\n', (12376, 12394), False, 'from image_analogy.img_utils import load_image, preprocess_image, deprocess_image\n'), ((12724, 12767), 'image_analogy.img_utils.deprocess_image', 'deprocess_image', (['result'], {'contrast_percent': '(0)'}), '(result, contrast_percent=0)\n', (12739, 12767), False, 'from image_analogy.img_utils import load_image, preprocess_image, deprocess_image\n'), ((12772, 12817), 'scipy.misc.imsave', 'imsave', (["(output_prefix + '_bestre.png')", 'outimg'], {}), "(output_prefix + '_bestre.png', outimg)\n", (12778, 12817), False, 'from scipy.misc import imsave\n'), ((13228, 13239), 'time.time', 'time.time', ([], {}), '()\n', (13237, 13239), False, 'import time\n'), ((13346, 13357), 'time.time', 'time.time', ([], {}), '()\n', (13355, 13357), False, 'import time\n'), ((13392, 13435), 'image_analogy.img_utils.deprocess_image', 'deprocess_image', (['result'], {'contrast_percent': '(0)'}), '(result, contrast_percent=0)\n', (13407, 13435), False, 'from image_analogy.img_utils import load_image, preprocess_image, deprocess_image\n'), ((13484, 13527), 'scipy.misc.imsave', 'imsave', (["(output_prefix + '_best.png')", 'outimg'], {}), "(output_prefix + '_best.png', outimg)\n", (13490, 13527), False, 'from scipy.misc import imsave\n'), ((2520, 2594), 'numpy.random.uniform', 'np.random.uniform', (['(0.0)', '(1.0)', '(2, self.num_input_rows, self.num_input_cols)'], {}), '(0.0, 1.0, (2, self.num_input_rows, self.num_input_cols))\n', (2537, 2594), True, 'import numpy as np\n'), ((2712, 2758), 'numpy.zeros', 'np.zeros', (['input_shape[:2:-1]'], {'dtype': 'np.float32'}), '(input_shape[:2:-1], dtype=np.float32)\n', (2720, 2758), True, 'import numpy as np\n'), ((2908, 2958), 'numpy.array', 'np.array', (['[[[self.min_propagration_row]], [[0.0]]]'], {}), '([[[self.min_propagration_row]], [[0.0]]])\n', (2916, 2958), True, 'import numpy as np\n'), ((2984, 3034), 'numpy.array', 'np.array', (['[[[0.0]], [[self.min_propagration_col]]]'], {}), '([[[0.0]], [[self.min_propagration_col]]])\n', (2992, 3034), True, 'import numpy as np\n'), ((5102, 5157), 'numpy.where', 'np.where', (['(delta_similarity > 0)', 'new_coords', 'self.coords'], {}), '(delta_similarity > 0, new_coords, self.coords)\n', (5110, 5157), True, 'import numpy as np\n'), ((5184, 5247), 'numpy.where', 'np.where', (['(delta_similarity > 0)', 'new_similarity', 'self.similarity'], {}), '(delta_similarity > 0, new_similarity, self.similarity)\n', (5192, 5247), True, 'import numpy as np\n'), ((5438, 5488), 'numpy.where', 'np.where', (['(delta_similarity > 0)', 'coords_a', 'coords_b'], {}), '(delta_similarity > 0, coords_a, coords_b)\n', (5446, 5488), True, 'import numpy as np\n'), ((5515, 5573), 'numpy.where', 'np.where', (['(delta_similarity > 0)', 'similarity_a', 'similarity_b'], {}), '(delta_similarity > 0, similarity_a, similarity_b)\n', (5523, 5573), True, 'import numpy as np\n'), ((5865, 5892), 'numpy.sum', 'np.sum', (['err'], {'axis': '(2, 3, 4)'}), '(err, axis=(2, 3, 4))\n', (5871, 5892), True, 'import numpy as np\n'), ((5997, 6022), 'numpy.clip', 'np.clip', (['coords', '(0.0)', '(1.0)'], {}), '(coords, 0.0, 1.0)\n', (6004, 6022), True, 'import numpy as np\n'), ((12261, 12326), 'image_analogy.img_utils.preprocess_image', 'preprocess_image', (['content_img_img', 'content_n_cols', 'content_n_rows'], {}), '(content_img_img, content_n_cols, content_n_rows)\n', (12277, 12326), False, 'from image_analogy.img_utils import load_image, preprocess_image, deprocess_image\n'), ((12991, 13002), 'time.time', 'time.time', ([], {}), '()\n', (13000, 13002), False, 'import time\n'), ((13179, 13190), 'time.time', 'time.time', ([], {}), '()\n', (13188, 13190), False, 'import time\n'), ((6119, 6146), 'numpy.expand_dims', 'np.expand_dims', (['x.shape', '(-1)'], {}), '(x.shape, -1)\n', (6133, 6146), True, 'import numpy as np\n'), ((12517, 12545), 'image_analogy.img_utils.load_image', 'load_image', (['style_image_path'], {}), '(style_image_path)\n', (12527, 12545), False, 'from image_analogy.img_utils import load_image, preprocess_image, deprocess_image\n'), ((3630, 3648), 'numpy.square', 'np.square', (['patches'], {}), '(patches)\n', (3639, 3648), True, 'import numpy as np\n'), ((6171, 6207), 'numpy.round', 'np.round', (['(coords * (x_shape[:2] - 1))'], {}), '(coords * (x_shape[:2] - 1))\n', (6179, 6207), True, 'import numpy as np\n'), ((9184, 9203), 'numpy.indices', 'np.indices', (['newdims'], {}), '(newdims)\n', (9194, 9203), True, 'import numpy as np\n'), ((9547, 9568), 'numpy.arange', 'np.arange', (['newdims[i]'], {}), '(newdims[i])\n', (9556, 9568), True, 'import numpy as np\n'), ((9732, 9760), 'numpy.arange', 'np.arange', (['i'], {'dtype': 'np.float'}), '(i, dtype=np.float)\n', (9741, 9760), True, 'import numpy as np\n'), ((4026, 4065), 'numpy.roll', 'np.roll', (['self.coords', 'roll_direction', '(1)'], {}), '(self.coords, roll_direction, 1)\n', (4033, 4065), True, 'import numpy as np\n'), ((4217, 4256), 'numpy.roll', 'np.roll', (['self.coords', 'roll_direction', '(2)'], {}), '(self.coords, roll_direction, 2)\n', (4224, 4256), True, 'import numpy as np\n'), ((10607, 10625), 'numpy.rank', 'np.rank', (['newcoords'], {}), '(newcoords)\n', (10614, 10625), True, 'import numpy as np\n'), ((4695, 4785), 'numpy.random.uniform', 'np.random.uniform', (['(-self.random_max_radius)', 'self.random_max_radius', 'self.coords.shape'], {}), '(-self.random_max_radius, self.random_max_radius, self.\n coords.shape)\n', (4712, 4785), True, 'import numpy as np\n'), ((9335, 9352), 'numpy.array', 'np.array', (['dimlist'], {}), '(dimlist)\n', (9343, 9352), True, 'import numpy as np\n'), ((10864, 10879), 'numpy.asarray', 'np.asarray', (['old'], {}), '(old)\n', (10874, 10879), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
import socket, threading
from queue import Queue
import sys, struct
# NOTE: Use this path to create the UDS Server socket
SERVER_SOCKET_PATH = "./socket";
class Result:
def __init__(self):
self._evt = threading.Event()
self._result = None
def set_result(self, value):
self._result = value
self._evt.set()
def result(self):
self._evt.wait()
return self._result
class ActorExit(Exception):
pass
class Actor(object):
def __init__(self):
self._mailbox = Queue()
def send(self, msg):
self._mailbox.put(msg)
def recv(self):
msg = self._mailbox.get()
if msg is ActorExit:
raise ActorExit()
return msg
def close(self):
self.send(ActorExit)
def start(self):
self._terminated = threading.Event()
t = threading.Thread(target=self._bootstrap)
t.daemon = True
t.start()
def _bootstrap(self):
try:
self.run()
except ActorExit:
pass
finally:
self._terminated.set()
def join(self):
self._terminated.wait()
def run(self):
while True:
msg = self.recv()
class Worker(Actor):
def __init__(self):
super().__init__()
self.db = {}
def submit(self, values):
r = Result()
self.send((values, r))
return r
def run(self):
while True:
values, r = self.recv()
r.set_result(self.execute(values))
def execute(self, values):
cmd, *opts = values
print('[*]', cmd, opts)
if cmd == 1: #add
s, k, v = opts
self.db.setdefault(s, {})
self.db[s][k] = v
return [0]
elif cmd == 2: #remove
s, k = opts
if s in self.db and k in self.db[s]:
self.db[s].pop(k)
return [0]
elif cmd == 3: #get size
s = opts[0]
size = len(self.db[s]) if s in self.db else 0
return [1, size]
elif cmd == 4: #get value
s, k = opts
if s in self.db and k in self.db[s]:
score = self.db[s][k]
else:
score = 0
return [1, score]
elif cmd == 5: #range
*sets, _, lower, upper = opts
res = []
for s in sets:
if s not in self.db:
continue
for k,v in self.db[s].items():
if lower <= v <= upper:
res.append((k,v))
res.sort()
return [len(res)*2] + [e for kv in res for e in kv]
elif cmd == 6: #disconnect
return None
else:
raise Exception("Not supported CMD(%s)" % (cmd))
FMT = "!L"
def read_number_from_socket(connection):
return struct.unpack(FMT, connection.recv(4))[0]
def write_number_to_socket(connection, number):
connection.send(struct.pack(FMT, number))
def process_client_connection(connection, worker):
while True:
value_num = read_number_from_socket(connection)
values = []
for _ in range(value_num):
values.append(read_number_from_socket(connection))
res = worker.submit(values)
if res.result() == None:
break
for num in res.result():
write_number_to_socket(connection, num)
connection.close()
def main():
worker = Worker()
worker.start()
s = socket.socket(socket.AF_UNIX)
s.bind(SERVER_SOCKET_PATH)
s.listen(1)
while True:
cl, addr = s.accept()
t = threading.Thread(target = process_client_connection, args=(cl, worker))
t.start()
#worker.close()
s.close()
if __name__ == '__main__':
main()
| [
"socket.socket",
"struct.pack",
"threading.Event",
"threading.Thread",
"queue.Queue"
]
| [((3692, 3721), 'socket.socket', 'socket.socket', (['socket.AF_UNIX'], {}), '(socket.AF_UNIX)\n', (3705, 3721), False, 'import socket, threading\n'), ((239, 256), 'threading.Event', 'threading.Event', ([], {}), '()\n', (254, 256), False, 'import socket, threading\n'), ((564, 571), 'queue.Queue', 'Queue', ([], {}), '()\n', (569, 571), False, 'from queue import Queue\n'), ((886, 903), 'threading.Event', 'threading.Event', ([], {}), '()\n', (901, 903), False, 'import socket, threading\n'), ((916, 956), 'threading.Thread', 'threading.Thread', ([], {'target': 'self._bootstrap'}), '(target=self._bootstrap)\n', (932, 956), False, 'import socket, threading\n'), ((3114, 3138), 'struct.pack', 'struct.pack', (['FMT', 'number'], {}), '(FMT, number)\n', (3125, 3138), False, 'import sys, struct\n'), ((3827, 3896), 'threading.Thread', 'threading.Thread', ([], {'target': 'process_client_connection', 'args': '(cl, worker)'}), '(target=process_client_connection, args=(cl, worker))\n', (3843, 3896), False, 'import socket, threading\n')] |
"""
Calibrate with the ROS package aruco_detect
"""
import rospy
import roslib
from geometry_msgs.msg import Transform
class ROSArUcoCalibrate:
def __init__(self, aruco_tag_len=0.0795):
print("Please roslaunch roslaunch aruco_detect aruco_detect.launch before you run!")
self.aruco_tf_topic = "/fiducial_transforms"
self._aruco_tf_info_sub = rospy.Subscriber(self.aruco_tf_topic, Transform, self._tfCb)
self.aruco_tf = None
def _tfCb(self, tf_msg):
if tf_msg is None:
rospy.logwarn("_tfCb: tf_msg is None!")
self.aruco_tf = tf_msg
def get_tf(self):
aruco_tf = self.aruco_tf
return aruco_tf
| [
"rospy.Subscriber",
"rospy.logwarn"
]
| [((374, 434), 'rospy.Subscriber', 'rospy.Subscriber', (['self.aruco_tf_topic', 'Transform', 'self._tfCb'], {}), '(self.aruco_tf_topic, Transform, self._tfCb)\n', (390, 434), False, 'import rospy\n'), ((533, 572), 'rospy.logwarn', 'rospy.logwarn', (['"""_tfCb: tf_msg is None!"""'], {}), "('_tfCb: tf_msg is None!')\n", (546, 572), False, 'import rospy\n')] |
from __future__ import absolute_import
__author__ = 'marafi'
def SolutionAlgorithim(OData, Dt, Tol, Steps):
#Insert within the While loop, make sure parameter "ok" is defined
import OpenSeesAPI
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Lower Dt: %f and Tol: %f ... "'%(Dt,Tol)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch(Tolerance=0.8))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze %d %f ]'%(Steps,Dt)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton with Initial Tangent ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.Newton(Initial=True))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze %d %f ]'%(Steps,Dt)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Broyden ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.Broyden(8))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze %d %f ]'%(Steps,Dt)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying KrylovNewton ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.KrylovNewton())
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze %d %f ]'%(Steps,Dt)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
def SolutionAlgorithimV2(OData, Dt, Tol, Steps):
#Insert within the While loop, make sure parameter "ok" is defined
import OpenSeesAPI
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Lower Dt: %f and Tol: %f ... "'%(Dt,Tol)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Krylov... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.KrylovNewton(MaxDim = 6))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze %d %f ]'%(Steps,Dt)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying NewtonLineSearch... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch(Tolerance=0.8))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze %d %f ]'%(Steps,Dt)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying NewtonLineSearch Bisection... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('Bisection'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze %d %f ]'%(Steps,Dt)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying NewtonLineSearch Secant... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('Secant'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze %d %f ]'%(Steps,Dt)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying NewtonLineSearch RegulaFalsi... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('RegulaFalsi'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze %d %f ]'%(Steps,Dt)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
def SolutionAlgorithimKrylovOnly(OData, Dt, Tol, Steps, MaxDim = 6):
#Insert within the While loop, make sure parameter "ok" is defined
import OpenSeesAPI
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Lower Dt: %e and Tol: %e ... "'%(Dt,Tol)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Krylov... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol, 1000, 2))
# OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.KrylovNewton(MaxDim = MaxDim))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze %d %e ]'%(Steps,Dt)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
def SenSolutionAlgorithim(OData, Dt, Steps, Tol = 1e-12, KrylovMaxDim = 12, MinDt = 1e-12, NoOfIterations=3000):
import OpenSeesAPI
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set conv_tol %e'%Tol))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set max_iter %d;'%NoOfIterations))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol, 3000, 0))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('test EnergyIncr $conv_tol $max_iter;'))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('algorithm Newton;'))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('integrator Newmark 0.5 0.25;'))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('analysis Transient;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set dt %e;'%Dt))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set min_dt %e;'%MinDt))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set n_steps %d;'%Steps))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set cur_step 1;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set div 10.0;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set tol 1.0e-12;'))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('set eigenvalue [eigen 9];'))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('modalDamping 0.02;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('while {$cur_step < $n_steps} {'))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol, NoOfIterations, 0))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript(' test EnergyIncr $conv_tol $max_iter;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' algorithm Newton;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set ok [analyze 1 $dt];'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set dt_temp [expr $dt];'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' puts "> analysis failed to converge at step $cur_step";'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' puts "> trying KrylovNewton";'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' algorithm KrylovNewton -maxDim %d;'%KrylovMaxDim))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set ok [analyze 1 $dt];'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set t 0.0;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set mini_t 0.0;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set dt_temp [expr round($dt/$div/$tol)*$tol];'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set mini_dt_temp 0.0;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' while {$t < $dt} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' if {$dt_temp < $min_dt} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' puts "<< model did not converge (reason: time step less than $min_dt)";'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' puts "<< exiting safely";'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' wipe;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' exit;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' };'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' if {$dt_temp < [expr $dt/pow($div, 2)]} {'))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol*10, NoOfIterations, 0))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript(' test EnergyIncr [expr $conv_tol*10.0] $max_iter;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' };'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set ok [analyze 1 $dt_temp];'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' if {$ok == 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set t [expr round(($t + $dt_temp)/$tol)*$tol];'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set mini_t [expr round(($mini_t + $dt_temp)/$tol)*$tol];'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' if {$mini_t >= $mini_dt_temp} {set dt_temp [expr round($dt_temp*$div/$tol)*$tol]};'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' } else {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set mini_t 0.0;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set mini_dt_temp [expr round($dt_temp/$tol)*$tol];'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' set dt_temp [expr round($dt_temp/$div/$tol)*$tol];'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' };'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' };'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' };'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' };'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' if {$cur_step % 1 == 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' puts "Running Tim History Step: $cur_step out of %d (Sen Algo.)";'%Steps))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' };'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript(' incr cur_step;'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('};'))
def PushOverSolutionAlgorithim(OData, StepSize, Tol, ControlNode):
#Insert within the While loop, make sure parameter "ok" is defined
import OpenSeesAPI
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Smaller Step: %f and Tol: %f ... "'%(StepSize,Tol)))
OData.AddObject(OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl(ControlNode, 1, StepSize))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying KrylovNewton ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.KrylovNewton())
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch(Tolerance=0.8))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton with Initial Tangent ... "'))
# OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol,1000,0))
# OData.AddObject(OpenSeesAPI.Analysis.Algorithm.Newton(Initial=True))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
#
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Broyden ... "'))
# OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
# OData.AddObject(OpenSeesAPI.Analysis.Algorithm.Broyden(8))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
# OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search BiSection ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('Bisection'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search Secant... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('Secant'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search RegulaFalsi ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('RegulaFalsi'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
def PushOverSolutionAlgorithimDispIncr(OData, StepSize, Tol, ControlNode):
#Insert within the While loop, make sure parameter "ok" is defined
import OpenSeesAPI
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Smaller Step: %f and Tol: %f ... "'%(StepSize,Tol)))
OData.AddObject(OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl(ControlNode, 1, StepSize))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying KrylovNewton ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.KrylovNewton())
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch(Tolerance=0.8))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search BiSection ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('Bisection'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search Secant... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('Secant'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search RegulaFalsi ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('RegulaFalsi'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
def PushOverSolutionAlgorithimConstantAlgorithm(OData, StepSize, Tol, ControlNode, Iter=1000):
import OpenSeesAPI
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Smaller Step: %f and Tol: %f ... "'%(StepSize,Tol)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl(ControlNode, 1, StepSize))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying KrylovNewton ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,1000,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.KrylovNewton())
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
def PushOverSolutionAlgorithimConstantAlgorithmDispIncr(OData, StepSize, Tol, ControlNode, NoOfIterations=1000):
import OpenSeesAPI
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Smaller Step: %f and Tol: %f ... "'%(StepSize,Tol)))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl(ControlNode, 1, StepSize))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying KrylovNewton ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.NormDispIncr(Tol,NoOfIterations,2))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.KrylovNewton())
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
def PushOverSolutionAlgorithimConstantTol(OData, Tol, Iter=1000):
import OpenSeesAPI
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying KrylovNewton ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,Iter,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.KrylovNewton())
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,Iter,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch(Tolerance=0.8))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search BiSection ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,Iter,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('Bisection'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search Secant... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,Iter,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('Secant'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('if {$ok != 0} {'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('puts "Trying Newton Line Search RegulaFalsi ... "'))
OData.AddObject(OpenSeesAPI.Analysis.Test.EnergyIncr(Tol,Iter,0))
OData.AddObject(OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch('RegulaFalsi'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('set ok [analyze 1]'))
OData.AddObject(OpenSeesAPI.TCL.TCLScript('}')) | [
"OpenSeesAPI.TCL.TCLScript",
"OpenSeesAPI.Analysis.Algorithm.KrylovNewton",
"OpenSeesAPI.Analysis.Algorithm.Newton",
"OpenSeesAPI.Analysis.Algorithm.Broyden",
"OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch",
"OpenSeesAPI.Analysis.Test.EnergyIncr",
"OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl",
"OpenSeesAPI.Analysis.Test.NormDispIncr"
]
| [((224, 268), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (249, 268), False, 'import OpenSeesAPI\n'), ((290, 379), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['(\'puts "Trying Lower Dt: %f and Tol: %f ... "\' % (Dt, Tol))'], {}), '(\'puts "Trying Lower Dt: %f and Tol: %f ... "\' % (\n Dt, Tol))\n', (315, 379), False, 'import OpenSeesAPI\n'), ((393, 459), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search ... \\""""'], {}), '(\'puts "Trying Newton Line Search ... "\')\n', (418, 459), False, 'import OpenSeesAPI\n'), ((481, 531), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (517, 531), False, 'import OpenSeesAPI\n'), ((551, 613), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', ([], {'Tolerance': '(0.8)'}), '(Tolerance=0.8)\n', (598, 613), False, 'import OpenSeesAPI\n'), ((635, 701), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set ok [analyze %d %f ]' % (Steps, Dt))"], {}), "('set ok [analyze %d %f ]' % (Steps, Dt))\n", (660, 701), False, 'import OpenSeesAPI\n'), ((720, 750), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (745, 750), False, 'import OpenSeesAPI\n'), ((773, 817), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (798, 817), False, 'import OpenSeesAPI\n'), ((839, 914), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton with Initial Tangent ... \\""""'], {}), '(\'puts "Trying Newton with Initial Tangent ... "\')\n', (864, 914), False, 'import OpenSeesAPI\n'), ((936, 988), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (974, 988), False, 'import OpenSeesAPI\n'), ((1008, 1059), 'OpenSeesAPI.Analysis.Algorithm.Newton', 'OpenSeesAPI.Analysis.Algorithm.Newton', ([], {'Initial': '(True)'}), '(Initial=True)\n', (1045, 1059), False, 'import OpenSeesAPI\n'), ((1081, 1147), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set ok [analyze %d %f ]' % (Steps, Dt))"], {}), "('set ok [analyze %d %f ]' % (Steps, Dt))\n", (1106, 1147), False, 'import OpenSeesAPI\n'), ((1166, 1196), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (1191, 1196), False, 'import OpenSeesAPI\n'), ((1219, 1263), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (1244, 1263), False, 'import OpenSeesAPI\n'), ((1285, 1340), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Broyden ... \\""""'], {}), '(\'puts "Trying Broyden ... "\')\n', (1310, 1340), False, 'import OpenSeesAPI\n'), ((1362, 1412), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (1398, 1412), False, 'import OpenSeesAPI\n'), ((1432, 1473), 'OpenSeesAPI.Analysis.Algorithm.Broyden', 'OpenSeesAPI.Analysis.Algorithm.Broyden', (['(8)'], {}), '(8)\n', (1470, 1473), False, 'import OpenSeesAPI\n'), ((1495, 1561), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set ok [analyze %d %f ]' % (Steps, Dt))"], {}), "('set ok [analyze %d %f ]' % (Steps, Dt))\n", (1520, 1561), False, 'import OpenSeesAPI\n'), ((1580, 1610), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (1605, 1610), False, 'import OpenSeesAPI\n'), ((1633, 1677), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (1658, 1677), False, 'import OpenSeesAPI\n'), ((1699, 1759), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying KrylovNewton ... \\""""'], {}), '(\'puts "Trying KrylovNewton ... "\')\n', (1724, 1759), False, 'import OpenSeesAPI\n'), ((1781, 1831), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (1817, 1831), False, 'import OpenSeesAPI\n'), ((1851, 1896), 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', ([], {}), '()\n', (1894, 1896), False, 'import OpenSeesAPI\n'), ((1918, 1984), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set ok [analyze %d %f ]' % (Steps, Dt))"], {}), "('set ok [analyze %d %f ]' % (Steps, Dt))\n", (1943, 1984), False, 'import OpenSeesAPI\n'), ((2003, 2033), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (2028, 2033), False, 'import OpenSeesAPI\n'), ((2199, 2243), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (2224, 2243), False, 'import OpenSeesAPI\n'), ((2265, 2354), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['(\'puts "Trying Lower Dt: %f and Tol: %f ... "\' % (Dt, Tol))'], {}), '(\'puts "Trying Lower Dt: %f and Tol: %f ... "\' % (\n Dt, Tol))\n', (2290, 2354), False, 'import OpenSeesAPI\n'), ((2368, 2421), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Krylov... \\""""'], {}), '(\'puts "Trying Krylov... "\')\n', (2393, 2421), False, 'import OpenSeesAPI\n'), ((2443, 2493), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (2479, 2493), False, 'import OpenSeesAPI\n'), ((2513, 2566), 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', ([], {'MaxDim': '(6)'}), '(MaxDim=6)\n', (2556, 2566), False, 'import OpenSeesAPI\n'), ((2590, 2656), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set ok [analyze %d %f ]' % (Steps, Dt))"], {}), "('set ok [analyze %d %f ]' % (Steps, Dt))\n", (2615, 2656), False, 'import OpenSeesAPI\n'), ((2675, 2705), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (2700, 2705), False, 'import OpenSeesAPI\n'), ((2728, 2772), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (2753, 2772), False, 'import OpenSeesAPI\n'), ((2794, 2857), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying NewtonLineSearch... \\""""'], {}), '(\'puts "Trying NewtonLineSearch... "\')\n', (2819, 2857), False, 'import OpenSeesAPI\n'), ((2879, 2931), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (2917, 2931), False, 'import OpenSeesAPI\n'), ((2951, 3013), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', ([], {'Tolerance': '(0.8)'}), '(Tolerance=0.8)\n', (2998, 3013), False, 'import OpenSeesAPI\n'), ((3035, 3101), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set ok [analyze %d %f ]' % (Steps, Dt))"], {}), "('set ok [analyze %d %f ]' % (Steps, Dt))\n", (3060, 3101), False, 'import OpenSeesAPI\n'), ((3120, 3150), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (3145, 3150), False, 'import OpenSeesAPI\n'), ((3173, 3217), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (3198, 3217), False, 'import OpenSeesAPI\n'), ((3239, 3312), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying NewtonLineSearch Bisection... \\""""'], {}), '(\'puts "Trying NewtonLineSearch Bisection... "\')\n', (3264, 3312), False, 'import OpenSeesAPI\n'), ((3334, 3384), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (3370, 3384), False, 'import OpenSeesAPI\n'), ((3404, 3464), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""Bisection"""'], {}), "('Bisection')\n", (3451, 3464), False, 'import OpenSeesAPI\n'), ((3486, 3552), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set ok [analyze %d %f ]' % (Steps, Dt))"], {}), "('set ok [analyze %d %f ]' % (Steps, Dt))\n", (3511, 3552), False, 'import OpenSeesAPI\n'), ((3571, 3601), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (3596, 3601), False, 'import OpenSeesAPI\n'), ((3624, 3668), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (3649, 3668), False, 'import OpenSeesAPI\n'), ((3690, 3760), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying NewtonLineSearch Secant... \\""""'], {}), '(\'puts "Trying NewtonLineSearch Secant... "\')\n', (3715, 3760), False, 'import OpenSeesAPI\n'), ((3782, 3832), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (3818, 3832), False, 'import OpenSeesAPI\n'), ((3852, 3909), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""Secant"""'], {}), "('Secant')\n", (3899, 3909), False, 'import OpenSeesAPI\n'), ((3931, 3997), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set ok [analyze %d %f ]' % (Steps, Dt))"], {}), "('set ok [analyze %d %f ]' % (Steps, Dt))\n", (3956, 3997), False, 'import OpenSeesAPI\n'), ((4016, 4046), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (4041, 4046), False, 'import OpenSeesAPI\n'), ((4069, 4113), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (4094, 4113), False, 'import OpenSeesAPI\n'), ((4135, 4210), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying NewtonLineSearch RegulaFalsi... \\""""'], {}), '(\'puts "Trying NewtonLineSearch RegulaFalsi... "\')\n', (4160, 4210), False, 'import OpenSeesAPI\n'), ((4232, 4282), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (4268, 4282), False, 'import OpenSeesAPI\n'), ((4302, 4364), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""RegulaFalsi"""'], {}), "('RegulaFalsi')\n", (4349, 4364), False, 'import OpenSeesAPI\n'), ((4386, 4452), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set ok [analyze %d %f ]' % (Steps, Dt))"], {}), "('set ok [analyze %d %f ]' % (Steps, Dt))\n", (4411, 4452), False, 'import OpenSeesAPI\n'), ((4471, 4501), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (4496, 4501), False, 'import OpenSeesAPI\n'), ((4687, 4731), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (4712, 4731), False, 'import OpenSeesAPI\n'), ((4753, 4842), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['(\'puts "Trying Lower Dt: %e and Tol: %e ... "\' % (Dt, Tol))'], {}), '(\'puts "Trying Lower Dt: %e and Tol: %e ... "\' % (\n Dt, Tol))\n', (4778, 4842), False, 'import OpenSeesAPI\n'), ((4856, 4909), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Krylov... \\""""'], {}), '(\'puts "Trying Krylov... "\')\n', (4881, 4909), False, 'import OpenSeesAPI\n'), ((4931, 4983), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['Tol', '(1000)', '(2)'], {}), '(Tol, 1000, 2)\n', (4969, 4983), False, 'import OpenSeesAPI\n'), ((5077, 5135), 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', ([], {'MaxDim': 'MaxDim'}), '(MaxDim=MaxDim)\n', (5120, 5135), False, 'import OpenSeesAPI\n'), ((5159, 5225), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set ok [analyze %d %e ]' % (Steps, Dt))"], {}), "('set ok [analyze %d %e ]' % (Steps, Dt))\n", (5184, 5225), False, 'import OpenSeesAPI\n'), ((5244, 5274), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (5269, 5274), False, 'import OpenSeesAPI\n'), ((5433, 5483), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set conv_tol %e' % Tol)"], {}), "('set conv_tol %e' % Tol)\n", (5458, 5483), False, 'import OpenSeesAPI\n'), ((5503, 5565), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set max_iter %d;' % NoOfIterations)"], {}), "('set max_iter %d;' % NoOfIterations)\n", (5528, 5565), False, 'import OpenSeesAPI\n'), ((5585, 5637), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['Tol', '(3000)', '(0)'], {}), '(Tol, 3000, 0)\n', (5623, 5637), False, 'import OpenSeesAPI\n'), ((5971, 6015), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set dt %e;' % Dt)"], {}), "('set dt %e;' % Dt)\n", (5996, 6015), False, 'import OpenSeesAPI\n'), ((6035, 6086), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set min_dt %e;' % MinDt)"], {}), "('set min_dt %e;' % MinDt)\n", (6060, 6086), False, 'import OpenSeesAPI\n'), ((6106, 6158), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('set n_steps %d;' % Steps)"], {}), "('set n_steps %d;' % Steps)\n", (6131, 6158), False, 'import OpenSeesAPI\n'), ((6178, 6222), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set cur_step 1;"""'], {}), "('set cur_step 1;')\n", (6203, 6222), False, 'import OpenSeesAPI\n'), ((6244, 6286), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set div 10.0;"""'], {}), "('set div 10.0;')\n", (6269, 6286), False, 'import OpenSeesAPI\n'), ((6308, 6353), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set tol 1.0e-12;"""'], {}), "('set tol 1.0e-12;')\n", (6333, 6353), False, 'import OpenSeesAPI\n'), ((6524, 6583), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""while {$cur_step < $n_steps} {"""'], {}), "('while {$cur_step < $n_steps} {')\n", (6549, 6583), False, 'import OpenSeesAPI\n'), ((6605, 6667), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['Tol', 'NoOfIterations', '(0)'], {}), '(Tol, NoOfIterations, 0)\n', (6643, 6667), False, 'import OpenSeesAPI\n'), ((6779, 6827), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\talgorithm Newton;"""'], {}), "('\\talgorithm Newton;')\n", (6804, 6827), False, 'import OpenSeesAPI\n'), ((6848, 6902), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\tset ok [analyze 1 $dt];"""'], {}), "('\\tset ok [analyze 1 $dt];')\n", (6873, 6902), False, 'import OpenSeesAPI\n'), ((6923, 6969), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\tif {$ok != 0} {"""'], {}), "('\\tif {$ok != 0} {')\n", (6948, 6969), False, 'import OpenSeesAPI\n'), ((6990, 7046), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\tset dt_temp [expr $dt];"""'], {}), "('\\t\\tset dt_temp [expr $dt];')\n", (7015, 7046), False, 'import OpenSeesAPI\n'), ((7066, 7159), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\tputs "> analysis failed to converge at step $cur_step";"""'], {}), '(\n \'\\t\\tputs "> analysis failed to converge at step $cur_step";\')\n', (7091, 7159), False, 'import OpenSeesAPI\n'), ((7174, 7236), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\tputs "> trying KrylovNewton";"""'], {}), '(\'\\t\\tputs "> trying KrylovNewton";\')\n', (7199, 7236), False, 'import OpenSeesAPI\n'), ((7256, 7342), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (["('\\t\\talgorithm KrylovNewton -maxDim %d;' % KrylovMaxDim)"], {}), "('\\t\\talgorithm KrylovNewton -maxDim %d;' %\n KrylovMaxDim)\n", (7281, 7342), False, 'import OpenSeesAPI\n'), ((7356, 7412), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\tset ok [analyze 1 $dt];"""'], {}), "('\\t\\tset ok [analyze 1 $dt];')\n", (7381, 7412), False, 'import OpenSeesAPI\n'), ((7432, 7480), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\tif {$ok != 0} {"""'], {}), "('\\t\\tif {$ok != 0} {')\n", (7457, 7480), False, 'import OpenSeesAPI\n'), ((7500, 7545), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\tset t 0.0;"""'], {}), "('\\t\\t\\tset t 0.0;')\n", (7525, 7545), False, 'import OpenSeesAPI\n'), ((7564, 7614), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\tset mini_t 0.0;"""'], {}), "('\\t\\t\\tset mini_t 0.0;')\n", (7589, 7614), False, 'import OpenSeesAPI\n'), ((7633, 7718), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\tset dt_temp [expr round($dt/$div/$tol)*$tol];"""'], {}), "('\\t\\t\\tset dt_temp [expr round($dt/$div/$tol)*$tol];'\n )\n", (7658, 7718), False, 'import OpenSeesAPI\n'), ((7732, 7788), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\tset mini_dt_temp 0.0;"""'], {}), "('\\t\\t\\tset mini_dt_temp 0.0;')\n", (7757, 7788), False, 'import OpenSeesAPI\n'), ((7807, 7860), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\twhile {$t < $dt} {"""'], {}), "('\\t\\t\\twhile {$t < $dt} {')\n", (7832, 7860), False, 'import OpenSeesAPI\n'), ((7879, 7941), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\tif {$dt_temp < $min_dt} {"""'], {}), "('\\t\\t\\t\\tif {$dt_temp < $min_dt} {')\n", (7904, 7941), False, 'import OpenSeesAPI\n'), ((7959, 8079), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t\tputs "<< model did not converge (reason: time step less than $min_dt)";"""'], {}), '(\n \'\\t\\t\\t\\t\\tputs "<< model did not converge (reason: time step less than $min_dt)";\'\n )\n', (7984, 8079), False, 'import OpenSeesAPI\n'), ((8086, 8150), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t\tputs "<< exiting safely";"""'], {}), '(\'\\t\\t\\t\\t\\tputs "<< exiting safely";\')\n', (8111, 8150), False, 'import OpenSeesAPI\n'), ((8167, 8211), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t\twipe;"""'], {}), "('\\t\\t\\t\\t\\twipe;')\n", (8192, 8211), False, 'import OpenSeesAPI\n'), ((8228, 8272), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t\texit;"""'], {}), "('\\t\\t\\t\\t\\texit;')\n", (8253, 8272), False, 'import OpenSeesAPI\n'), ((8289, 8328), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t};"""'], {}), "('\\t\\t\\t\\t};')\n", (8314, 8328), False, 'import OpenSeesAPI\n'), ((8346, 8424), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\tif {$dt_temp < [expr $dt/pow($div, 2)]} {"""'], {}), "('\\t\\t\\t\\tif {$dt_temp < [expr $dt/pow($div, 2)]} {')\n", (8371, 8424), False, 'import OpenSeesAPI\n'), ((8442, 8509), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['(Tol * 10)', 'NoOfIterations', '(0)'], {}), '(Tol * 10, NoOfIterations, 0)\n', (8480, 8509), False, 'import OpenSeesAPI\n'), ((8635, 8674), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t};"""'], {}), "('\\t\\t\\t\\t};')\n", (8660, 8674), False, 'import OpenSeesAPI\n'), ((8692, 8757), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\tset ok [analyze 1 $dt_temp];"""'], {}), "('\\t\\t\\t\\tset ok [analyze 1 $dt_temp];')\n", (8717, 8757), False, 'import OpenSeesAPI\n'), ((8775, 8827), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\tif {$ok == 0} {"""'], {}), "('\\t\\t\\t\\tif {$ok == 0} {')\n", (8800, 8827), False, 'import OpenSeesAPI\n'), ((8845, 8935), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t\tset t [expr round(($t + $dt_temp)/$tol)*$tol];"""'], {}), "(\n '\\t\\t\\t\\t\\tset t [expr round(($t + $dt_temp)/$tol)*$tol];')\n", (8870, 8935), False, 'import OpenSeesAPI\n'), ((8947, 9047), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t\tset mini_t [expr round(($mini_t + $dt_temp)/$tol)*$tol];"""'], {}), "(\n '\\t\\t\\t\\t\\tset mini_t [expr round(($mini_t + $dt_temp)/$tol)*$tol];')\n", (8972, 9047), False, 'import OpenSeesAPI\n'), ((9059, 9190), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t\tif {$mini_t >= $mini_dt_temp} {set dt_temp [expr round($dt_temp*$div/$tol)*$tol]};"""'], {}), "(\n '\\t\\t\\t\\t\\tif {$mini_t >= $mini_dt_temp} {set dt_temp [expr round($dt_temp*$div/$tol)*$tol]};'\n )\n", (9084, 9190), False, 'import OpenSeesAPI\n'), ((9197, 9242), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t} else {"""'], {}), "('\\t\\t\\t\\t} else {')\n", (9222, 9242), False, 'import OpenSeesAPI\n'), ((9260, 9314), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t\tset mini_t 0.0;"""'], {}), "('\\t\\t\\t\\t\\tset mini_t 0.0;')\n", (9285, 9314), False, 'import OpenSeesAPI\n'), ((9331, 9425), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t\tset mini_dt_temp [expr round($dt_temp/$tol)*$tol];"""'], {}), "(\n '\\t\\t\\t\\t\\tset mini_dt_temp [expr round($dt_temp/$tol)*$tol];')\n", (9356, 9425), False, 'import OpenSeesAPI\n'), ((9437, 9531), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t\tset dt_temp [expr round($dt_temp/$div/$tol)*$tol];"""'], {}), "(\n '\\t\\t\\t\\t\\tset dt_temp [expr round($dt_temp/$div/$tol)*$tol];')\n", (9462, 9531), False, 'import OpenSeesAPI\n'), ((9543, 9582), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t\t};"""'], {}), "('\\t\\t\\t\\t};')\n", (9568, 9582), False, 'import OpenSeesAPI\n'), ((9600, 9637), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t\t};"""'], {}), "('\\t\\t\\t};')\n", (9625, 9637), False, 'import OpenSeesAPI\n'), ((9656, 9691), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t\t};"""'], {}), "('\\t\\t};')\n", (9681, 9691), False, 'import OpenSeesAPI\n'), ((9711, 9744), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t};"""'], {}), "('\\t};')\n", (9736, 9744), False, 'import OpenSeesAPI\n'), ((9765, 9821), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\tif {$cur_step % 1 == 0} {"""'], {}), "('\\tif {$cur_step % 1 == 0} {')\n", (9790, 9821), False, 'import OpenSeesAPI\n'), ((9842, 9957), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['(\'\\t\\tputs "Running Tim History Step: $cur_step out of %d (Sen Algo.)";\' %\n Steps)'], {}), '(\n \'\\t\\tputs "Running Tim History Step: $cur_step out of %d (Sen Algo.)";\' %\n Steps)\n', (9867, 9957), False, 'import OpenSeesAPI\n'), ((9966, 9999), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\t};"""'], {}), "('\\t};')\n", (9991, 9999), False, 'import OpenSeesAPI\n'), ((10020, 10065), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""\tincr cur_step;"""'], {}), "('\\tincr cur_step;')\n", (10045, 10065), False, 'import OpenSeesAPI\n'), ((10086, 10117), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""};"""'], {}), "('};')\n", (10111, 10117), False, 'import OpenSeesAPI\n'), ((10302, 10346), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (10327, 10346), False, 'import OpenSeesAPI\n'), ((10368, 10466), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['(\'puts "Trying Smaller Step: %f and Tol: %f ... "\' % (StepSize, Tol))'], {}), '(\'puts "Trying Smaller Step: %f and Tol: %f ... "\' %\n (StepSize, Tol))\n', (10393, 10466), False, 'import OpenSeesAPI\n'), ((10482, 10570), 'OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl', 'OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl', (['ControlNode', '(1)', 'StepSize'], {}), '(ControlNode, 1,\n StepSize)\n', (10540, 10570), False, 'import OpenSeesAPI\n'), ((10589, 10619), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (10614, 10619), False, 'import OpenSeesAPI\n'), ((10642, 10686), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (10667, 10686), False, 'import OpenSeesAPI\n'), ((10708, 10768), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying KrylovNewton ... \\""""'], {}), '(\'puts "Trying KrylovNewton ... "\')\n', (10733, 10768), False, 'import OpenSeesAPI\n'), ((10790, 10840), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (10826, 10840), False, 'import OpenSeesAPI\n'), ((10860, 10905), 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', ([], {}), '()\n', (10903, 10905), False, 'import OpenSeesAPI\n'), ((10927, 10974), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (10952, 10974), False, 'import OpenSeesAPI\n'), ((10996, 11026), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (11021, 11026), False, 'import OpenSeesAPI\n'), ((11049, 11093), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (11074, 11093), False, 'import OpenSeesAPI\n'), ((11115, 11181), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search ... \\""""'], {}), '(\'puts "Trying Newton Line Search ... "\')\n', (11140, 11181), False, 'import OpenSeesAPI\n'), ((11203, 11253), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (11239, 11253), False, 'import OpenSeesAPI\n'), ((11273, 11335), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', ([], {'Tolerance': '(0.8)'}), '(Tolerance=0.8)\n', (11320, 11335), False, 'import OpenSeesAPI\n'), ((11357, 11404), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (11382, 11404), False, 'import OpenSeesAPI\n'), ((11426, 11456), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (11451, 11456), False, 'import OpenSeesAPI\n'), ((12336, 12380), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (12361, 12380), False, 'import OpenSeesAPI\n'), ((12402, 12478), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search BiSection ... \\""""'], {}), '(\'puts "Trying Newton Line Search BiSection ... "\')\n', (12427, 12478), False, 'import OpenSeesAPI\n'), ((12500, 12550), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (12536, 12550), False, 'import OpenSeesAPI\n'), ((12570, 12630), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""Bisection"""'], {}), "('Bisection')\n", (12617, 12630), False, 'import OpenSeesAPI\n'), ((12652, 12699), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (12677, 12699), False, 'import OpenSeesAPI\n'), ((12721, 12751), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (12746, 12751), False, 'import OpenSeesAPI\n'), ((12774, 12818), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (12799, 12818), False, 'import OpenSeesAPI\n'), ((12840, 12912), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search Secant... \\""""'], {}), '(\'puts "Trying Newton Line Search Secant... "\')\n', (12865, 12912), False, 'import OpenSeesAPI\n'), ((12934, 12984), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (12970, 12984), False, 'import OpenSeesAPI\n'), ((13004, 13061), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""Secant"""'], {}), "('Secant')\n", (13051, 13061), False, 'import OpenSeesAPI\n'), ((13083, 13130), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (13108, 13130), False, 'import OpenSeesAPI\n'), ((13152, 13182), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (13177, 13182), False, 'import OpenSeesAPI\n'), ((13205, 13249), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (13230, 13249), False, 'import OpenSeesAPI\n'), ((13271, 13349), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search RegulaFalsi ... \\""""'], {}), '(\'puts "Trying Newton Line Search RegulaFalsi ... "\')\n', (13296, 13349), False, 'import OpenSeesAPI\n'), ((13371, 13421), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (13407, 13421), False, 'import OpenSeesAPI\n'), ((13441, 13503), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""RegulaFalsi"""'], {}), "('RegulaFalsi')\n", (13488, 13503), False, 'import OpenSeesAPI\n'), ((13525, 13572), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (13550, 13572), False, 'import OpenSeesAPI\n'), ((13594, 13624), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (13619, 13624), False, 'import OpenSeesAPI\n'), ((13816, 13860), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (13841, 13860), False, 'import OpenSeesAPI\n'), ((13882, 13980), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['(\'puts "Trying Smaller Step: %f and Tol: %f ... "\' % (StepSize, Tol))'], {}), '(\'puts "Trying Smaller Step: %f and Tol: %f ... "\' %\n (StepSize, Tol))\n', (13907, 13980), False, 'import OpenSeesAPI\n'), ((13996, 14084), 'OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl', 'OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl', (['ControlNode', '(1)', 'StepSize'], {}), '(ControlNode, 1,\n StepSize)\n', (14054, 14084), False, 'import OpenSeesAPI\n'), ((14103, 14133), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (14128, 14133), False, 'import OpenSeesAPI\n'), ((14156, 14200), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (14181, 14200), False, 'import OpenSeesAPI\n'), ((14222, 14282), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying KrylovNewton ... \\""""'], {}), '(\'puts "Trying KrylovNewton ... "\')\n', (14247, 14282), False, 'import OpenSeesAPI\n'), ((14304, 14356), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (14342, 14356), False, 'import OpenSeesAPI\n'), ((14376, 14421), 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', ([], {}), '()\n', (14419, 14421), False, 'import OpenSeesAPI\n'), ((14443, 14490), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (14468, 14490), False, 'import OpenSeesAPI\n'), ((14512, 14542), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (14537, 14542), False, 'import OpenSeesAPI\n'), ((14565, 14609), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (14590, 14609), False, 'import OpenSeesAPI\n'), ((14631, 14697), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search ... \\""""'], {}), '(\'puts "Trying Newton Line Search ... "\')\n', (14656, 14697), False, 'import OpenSeesAPI\n'), ((14719, 14771), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (14757, 14771), False, 'import OpenSeesAPI\n'), ((14791, 14853), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', ([], {'Tolerance': '(0.8)'}), '(Tolerance=0.8)\n', (14838, 14853), False, 'import OpenSeesAPI\n'), ((14875, 14922), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (14900, 14922), False, 'import OpenSeesAPI\n'), ((14944, 14974), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (14969, 14974), False, 'import OpenSeesAPI\n'), ((14997, 15041), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (15022, 15041), False, 'import OpenSeesAPI\n'), ((15063, 15139), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search BiSection ... \\""""'], {}), '(\'puts "Trying Newton Line Search BiSection ... "\')\n', (15088, 15139), False, 'import OpenSeesAPI\n'), ((15161, 15213), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (15199, 15213), False, 'import OpenSeesAPI\n'), ((15233, 15293), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""Bisection"""'], {}), "('Bisection')\n", (15280, 15293), False, 'import OpenSeesAPI\n'), ((15315, 15362), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (15340, 15362), False, 'import OpenSeesAPI\n'), ((15384, 15414), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (15409, 15414), False, 'import OpenSeesAPI\n'), ((15437, 15481), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (15462, 15481), False, 'import OpenSeesAPI\n'), ((15503, 15575), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search Secant... \\""""'], {}), '(\'puts "Trying Newton Line Search Secant... "\')\n', (15528, 15575), False, 'import OpenSeesAPI\n'), ((15597, 15649), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (15635, 15649), False, 'import OpenSeesAPI\n'), ((15669, 15726), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""Secant"""'], {}), "('Secant')\n", (15716, 15726), False, 'import OpenSeesAPI\n'), ((15748, 15795), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (15773, 15795), False, 'import OpenSeesAPI\n'), ((15817, 15847), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (15842, 15847), False, 'import OpenSeesAPI\n'), ((15870, 15914), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (15895, 15914), False, 'import OpenSeesAPI\n'), ((15936, 16014), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search RegulaFalsi ... \\""""'], {}), '(\'puts "Trying Newton Line Search RegulaFalsi ... "\')\n', (15961, 16014), False, 'import OpenSeesAPI\n'), ((16036, 16088), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (16074, 16088), False, 'import OpenSeesAPI\n'), ((16108, 16170), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""RegulaFalsi"""'], {}), "('RegulaFalsi')\n", (16155, 16170), False, 'import OpenSeesAPI\n'), ((16192, 16239), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (16217, 16239), False, 'import OpenSeesAPI\n'), ((16261, 16291), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (16286, 16291), False, 'import OpenSeesAPI\n'), ((16432, 16476), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (16457, 16476), False, 'import OpenSeesAPI\n'), ((16498, 16596), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['(\'puts "Trying Smaller Step: %f and Tol: %f ... "\' % (StepSize, Tol))'], {}), '(\'puts "Trying Smaller Step: %f and Tol: %f ... "\' %\n (StepSize, Tol))\n', (16523, 16596), False, 'import OpenSeesAPI\n'), ((16611, 16641), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (16636, 16641), False, 'import OpenSeesAPI\n'), ((16664, 16752), 'OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl', 'OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl', (['ControlNode', '(1)', 'StepSize'], {}), '(ControlNode, 1,\n StepSize)\n', (16722, 16752), False, 'import OpenSeesAPI\n'), ((16771, 16815), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (16796, 16815), False, 'import OpenSeesAPI\n'), ((16837, 16897), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying KrylovNewton ... \\""""'], {}), '(\'puts "Trying KrylovNewton ... "\')\n', (16862, 16897), False, 'import OpenSeesAPI\n'), ((16919, 16969), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', '(1000)', '(0)'], {}), '(Tol, 1000, 0)\n', (16955, 16969), False, 'import OpenSeesAPI\n'), ((16989, 17034), 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', ([], {}), '()\n', (17032, 17034), False, 'import OpenSeesAPI\n'), ((17056, 17103), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (17081, 17103), False, 'import OpenSeesAPI\n'), ((17125, 17155), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (17150, 17155), False, 'import OpenSeesAPI\n'), ((17314, 17358), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (17339, 17358), False, 'import OpenSeesAPI\n'), ((17380, 17478), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['(\'puts "Trying Smaller Step: %f and Tol: %f ... "\' % (StepSize, Tol))'], {}), '(\'puts "Trying Smaller Step: %f and Tol: %f ... "\' %\n (StepSize, Tol))\n', (17405, 17478), False, 'import OpenSeesAPI\n'), ((17493, 17523), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (17518, 17523), False, 'import OpenSeesAPI\n'), ((17546, 17634), 'OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl', 'OpenSeesAPI.Analysis.Integrator.Static.DisplacementControl', (['ControlNode', '(1)', 'StepSize'], {}), '(ControlNode, 1,\n StepSize)\n', (17604, 17634), False, 'import OpenSeesAPI\n'), ((17653, 17697), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (17678, 17697), False, 'import OpenSeesAPI\n'), ((17719, 17779), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying KrylovNewton ... \\""""'], {}), '(\'puts "Trying KrylovNewton ... "\')\n', (17744, 17779), False, 'import OpenSeesAPI\n'), ((17801, 17863), 'OpenSeesAPI.Analysis.Test.NormDispIncr', 'OpenSeesAPI.Analysis.Test.NormDispIncr', (['Tol', 'NoOfIterations', '(2)'], {}), '(Tol, NoOfIterations, 2)\n', (17839, 17863), False, 'import OpenSeesAPI\n'), ((17883, 17928), 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', ([], {}), '()\n', (17926, 17928), False, 'import OpenSeesAPI\n'), ((17950, 17997), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (17975, 17997), False, 'import OpenSeesAPI\n'), ((18019, 18049), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (18044, 18049), False, 'import OpenSeesAPI\n'), ((18161, 18205), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (18186, 18205), False, 'import OpenSeesAPI\n'), ((18227, 18287), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying KrylovNewton ... \\""""'], {}), '(\'puts "Trying KrylovNewton ... "\')\n', (18252, 18287), False, 'import OpenSeesAPI\n'), ((18309, 18359), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', 'Iter', '(0)'], {}), '(Tol, Iter, 0)\n', (18345, 18359), False, 'import OpenSeesAPI\n'), ((18379, 18424), 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', 'OpenSeesAPI.Analysis.Algorithm.KrylovNewton', ([], {}), '()\n', (18422, 18424), False, 'import OpenSeesAPI\n'), ((18446, 18493), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (18471, 18493), False, 'import OpenSeesAPI\n'), ((18515, 18545), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (18540, 18545), False, 'import OpenSeesAPI\n'), ((18568, 18612), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (18593, 18612), False, 'import OpenSeesAPI\n'), ((18634, 18700), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search ... \\""""'], {}), '(\'puts "Trying Newton Line Search ... "\')\n', (18659, 18700), False, 'import OpenSeesAPI\n'), ((18722, 18772), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', 'Iter', '(0)'], {}), '(Tol, Iter, 0)\n', (18758, 18772), False, 'import OpenSeesAPI\n'), ((18792, 18854), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', ([], {'Tolerance': '(0.8)'}), '(Tolerance=0.8)\n', (18839, 18854), False, 'import OpenSeesAPI\n'), ((18876, 18923), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (18901, 18923), False, 'import OpenSeesAPI\n'), ((18945, 18975), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (18970, 18975), False, 'import OpenSeesAPI\n'), ((18998, 19042), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (19023, 19042), False, 'import OpenSeesAPI\n'), ((19064, 19140), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search BiSection ... \\""""'], {}), '(\'puts "Trying Newton Line Search BiSection ... "\')\n', (19089, 19140), False, 'import OpenSeesAPI\n'), ((19162, 19212), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', 'Iter', '(0)'], {}), '(Tol, Iter, 0)\n', (19198, 19212), False, 'import OpenSeesAPI\n'), ((19232, 19292), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""Bisection"""'], {}), "('Bisection')\n", (19279, 19292), False, 'import OpenSeesAPI\n'), ((19314, 19361), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (19339, 19361), False, 'import OpenSeesAPI\n'), ((19383, 19413), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (19408, 19413), False, 'import OpenSeesAPI\n'), ((19436, 19480), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (19461, 19480), False, 'import OpenSeesAPI\n'), ((19502, 19574), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search Secant... \\""""'], {}), '(\'puts "Trying Newton Line Search Secant... "\')\n', (19527, 19574), False, 'import OpenSeesAPI\n'), ((19596, 19646), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', 'Iter', '(0)'], {}), '(Tol, Iter, 0)\n', (19632, 19646), False, 'import OpenSeesAPI\n'), ((19666, 19723), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""Secant"""'], {}), "('Secant')\n", (19713, 19723), False, 'import OpenSeesAPI\n'), ((19745, 19792), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (19770, 19792), False, 'import OpenSeesAPI\n'), ((19814, 19844), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (19839, 19844), False, 'import OpenSeesAPI\n'), ((19867, 19911), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""if {$ok != 0} {"""'], {}), "('if {$ok != 0} {')\n", (19892, 19911), False, 'import OpenSeesAPI\n'), ((19933, 20011), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""puts "Trying Newton Line Search RegulaFalsi ... \\""""'], {}), '(\'puts "Trying Newton Line Search RegulaFalsi ... "\')\n', (19958, 20011), False, 'import OpenSeesAPI\n'), ((20033, 20083), 'OpenSeesAPI.Analysis.Test.EnergyIncr', 'OpenSeesAPI.Analysis.Test.EnergyIncr', (['Tol', 'Iter', '(0)'], {}), '(Tol, Iter, 0)\n', (20069, 20083), False, 'import OpenSeesAPI\n'), ((20103, 20165), 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', 'OpenSeesAPI.Analysis.Algorithm.NewtonLineSearch', (['"""RegulaFalsi"""'], {}), "('RegulaFalsi')\n", (20150, 20165), False, 'import OpenSeesAPI\n'), ((20187, 20234), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""set ok [analyze 1]"""'], {}), "('set ok [analyze 1]')\n", (20212, 20234), False, 'import OpenSeesAPI\n'), ((20256, 20286), 'OpenSeesAPI.TCL.TCLScript', 'OpenSeesAPI.TCL.TCLScript', (['"""}"""'], {}), "('}')\n", (20281, 20286), False, 'import OpenSeesAPI\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) Polyconseil SAS. All rights reserved.
import hashlib
import json
import logging
import os
import re
from .html import html_config, HtmlHarvester # pylint: disable=unused-import
from .sphinx import ( # pylint: disable=unused-import
sphinx_config, sphinx_rtd_config,
SphinxHarvester, ReadTheDocsSphinxHarvester
)
logger = logging.getLogger(__name__)
def _must_process_path(path, include, exclude):
for exp in include:
if exp.match(path):
return True
for exp in exclude:
if exp.match(path):
return False
return True
def _compute_hash(path):
h = hashlib.md5()
with open(path, 'rb') as fp:
while 1:
buff = fp.read(8192)
if not buff:
break
h.update(buff)
return h.hexdigest()
def harvest_set(base_dir, doc_set, config, hashes, force):
"""Harvest a document set and return documents as dictionaries.
``config`` is the harvester configuration. It should contain a key
for each supported file extensions. ``hashes`` is a dictionary
that links the path of each indexed file to its hash. It is used
to decide whether the document should be indexed again. ``force``
indicates whether to reindex a document even if it has not ben
modified since the last indexation.
This function is a generator. It yields dictionaries. Each
dictionary should represent a document and contain the following
keys in addition to the keys returned by the harvester itself.
Each text-like value should be a string (in Python 3) or a unicode
object (in Python 2).
path
The path of the document relative to the root of the document
set.
set
The id of the document set. It should be ``doc_set``.
"""
config_copy = config.copy()
include = [re.compile(exp) for exp in config_copy.pop('include') or ()]
exclude = [re.compile(exp) for exp in config_copy.pop('exclude') or ()]
extensions = config_copy
for dir_path, _dir_names, file_names in os.walk(base_dir):
for filename in file_names:
path = os.path.join(dir_path, filename)
relative_path = os.path.relpath(path, base_dir)
if not _must_process_path(relative_path, include, exclude):
logger.debug('Excluded file "%s": include/exclude rules.', relative_path)
continue
_, extension = os.path.splitext(filename)
extension = extension.lstrip('.') # remove leading dot
harvester_class = extensions.get(extension)
if harvester_class is None:
logger.debug('Excluded file "%s": no harvester found for %s.', relative_path, extension)
continue
current_hash = _compute_hash(path)
indexed_hash = hashes.get(relative_path)
if not force and (indexed_hash == current_hash):
logger.debug('Excluded file: "%s": not modified since last indexation.', relative_path)
continue
try:
logger.debug('Indexing file "%s"', relative_path)
doc = harvester_class().harvest_file(path)
except Exception: # pylint: disable=broad-except
logger.exception("Could not index document %s", path)
else:
if doc:
if relative_path == 'index.html':
with open(os.path.join(base_dir, '.dokang'), 'w') as fp:
json.dump({'title': doc['title']}, fp)
doc['path'] = relative_path
doc['set'] = doc_set
doc['hash'] = current_hash
yield doc
| [
"logging.getLogger",
"hashlib.md5",
"re.compile",
"json.dump",
"os.path.join",
"os.path.splitext",
"os.walk",
"os.path.relpath"
]
| [((374, 401), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (391, 401), False, 'import logging\n'), ((656, 669), 'hashlib.md5', 'hashlib.md5', ([], {}), '()\n', (667, 669), False, 'import hashlib\n'), ((2091, 2108), 'os.walk', 'os.walk', (['base_dir'], {}), '(base_dir)\n', (2098, 2108), False, 'import os\n'), ((1881, 1896), 're.compile', 're.compile', (['exp'], {}), '(exp)\n', (1891, 1896), False, 'import re\n'), ((1957, 1972), 're.compile', 're.compile', (['exp'], {}), '(exp)\n', (1967, 1972), False, 'import re\n'), ((2165, 2197), 'os.path.join', 'os.path.join', (['dir_path', 'filename'], {}), '(dir_path, filename)\n', (2177, 2197), False, 'import os\n'), ((2226, 2257), 'os.path.relpath', 'os.path.relpath', (['path', 'base_dir'], {}), '(path, base_dir)\n', (2241, 2257), False, 'import os\n'), ((2472, 2498), 'os.path.splitext', 'os.path.splitext', (['filename'], {}), '(filename)\n', (2488, 2498), False, 'import os\n'), ((3562, 3600), 'json.dump', 'json.dump', (["{'title': doc['title']}", 'fp'], {}), "({'title': doc['title']}, fp)\n", (3571, 3600), False, 'import json\n'), ((3487, 3520), 'os.path.join', 'os.path.join', (['base_dir', '""".dokang"""'], {}), "(base_dir, '.dokang')\n", (3499, 3520), False, 'import os\n')] |
# Generated by Django 2.0.4 on 2019-05-21 16:51
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('carPooling', '0017_carpoolingrecunbook'),
]
operations = [
migrations.AlterField(
model_name='carpoolinguserconf',
name='c_name',
field=models.CharField(max_length=128, null=True, verbose_name='真实姓名'),
),
migrations.AlterField(
model_name='carpoolinguserconf',
name='c_phone',
field=models.CharField(db_index=True, max_length=11, verbose_name='电话号码'),
),
migrations.AlterField(
model_name='carpoolinguserconf',
name='c_weixin_id',
field=models.CharField(db_index=True, max_length=128, null=True, verbose_name='微信id'),
),
]
| [
"django.db.models.CharField"
]
| [((352, 416), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(128)', 'null': '(True)', 'verbose_name': '"""真实姓名"""'}), "(max_length=128, null=True, verbose_name='真实姓名')\n", (368, 416), False, 'from django.db import migrations, models\n'), ((551, 618), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(11)', 'verbose_name': '"""电话号码"""'}), "(db_index=True, max_length=11, verbose_name='电话号码')\n", (567, 618), False, 'from django.db import migrations, models\n'), ((757, 836), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(128)', 'null': '(True)', 'verbose_name': '"""微信id"""'}), "(db_index=True, max_length=128, null=True, verbose_name='微信id')\n", (773, 836), False, 'from django.db import migrations, models\n')] |
import bz2
import csv
import collections
import math
from enum import Enum
class Select(Enum):
FIRST = 'first'
RANGE_KEY = 'range_key'
RANGE_VALUE = 'range_value'
class SelectPolicy:
def __init__(self, policy, field=None):
self.policy = policy
self.field = field
class StateSet:
""" Wrapper for set of episode val/test states """
def __init__(self, scenes_file=None, states_files=None,
scene_filter=None, episode_filter=None, max_states_per_scene=None,
select_policy=SelectPolicy(Select.FIRST)):
self.states = []
self.scenes = []
self.scenes_by_id = {}
self.states_by_scene = {}
self.select_policy = select_policy
if scenes_file:
self._load_scenes(scenes_file, scene_filter)
if states_files:
if type(states_files) is str:
self._load_states(states_files, max_states_per_scene, episode_filter)
elif isinstance(states_files, collections.Iterable):
for states_file in states_files:
self._load_states(states_file, max_states_per_scene, episode_filter)
self._embed_states_in_scenes()
def get_splits(self, max_states_per_scene=None):
"""Get dictionary of StateSets keyed by scene 'set' i.e. dataset split"""
scenes_by_split = {}
for scene in self.scenes:
scenes_by_split.setdefault(scene['set'], []).append(scene)
state_sets_dict = {}
for split, scenes in scenes_by_split.items():
ss = StateSet()
ss._populate_from_lists(scenes, self.states_by_scene, max_states_per_scene)
state_sets_dict[split] = ss
return state_sets_dict
def get_scenes(self):
return self.scenes
def get_states(self):
return self.states
def get_states_by_scene_id(self, scene_id):
return self.states_by_scene[scene_id]
def _select_n_states(self, states, n):
# Select n states from big list of states
policy = self.select_policy.policy
field = self.select_policy.field
if n is not None and n < len(states):
if policy == Select.FIRST:
if field is not None:
# sort by field
states = sorted(states, key=lambda x: x[field])
return states[:n]
elif policy == Select.RANGE_KEY:
# sort by field
states = sorted(states, key=lambda x: x[field])
# select by evenly dividing indices
r = len(states)/float(n)
selected = []
for i in range(n):
si = int(math.floor(math.ceil(r*i)/2))
selected.append(states[si])
return selected
elif policy == Select.RANGE_VALUE:
# sort by field and get range (value)
states = sorted(states, key=lambda x: x[field])
fmin = states[0][field]
fmax = states[-1][field]
# print('Range is %f to %f' % (fmin,fmax))
# from range, divide up into n buckets
r = (fmax-fmin)/float(n)
buckets = []
for i in range(n):
buckets.append([])
for state in states:
bi = int(min(math.ceil((state[field] - fmin)/r), n-1))
buckets[bi].append(state)
# make sure all buckets have something
for i, bucket in enumerate(buckets):
if len(bucket) == 0:
# print('Nothing in bucket %d' % i)
# still some from other buckets
pi = max(i-1, 0)
ni = min(i+1, n-1)
nlen = len(buckets[ni])
plen = len(buckets[pi])
if nlen > plen:
# take half from bucket[ni] and put in current bucket
k = math.floor(nlen/2)
buckets[i] = buckets[ni][:k]
buckets[ni] = buckets[ni][k:]
else:
k = math.floor(plen/2)
buckets[i] = buckets[pi][:k]
buckets[pi] = buckets[pi][k:]
selected = []
for bucket in buckets:
bii = math.floor(len(bucket)/2)
selected.append(bucket[bii])
return selected
else:
raise ValueError('Unsupported select_policy ' + policy)
else:
return states
def _populate_from_lists(self, my_scenes, my_states_by_scene, max_states_per_scene):
self.scenes = my_scenes
for scene in my_scenes:
scene_id = scene['id']
self.scenes_by_id[scene_id] = scene
if scene_id in my_states_by_scene:
my_states = self._select_n_states(my_states_by_scene[scene_id], max_states_per_scene)
self.states_by_scene[scene_id] = my_states
self.states += my_states
def _load_scenes(self, filename, scene_filter):
with bz2.open(filename, 'rt') if filename.endswith('bz2') else open(filename) as f:
reader = csv.DictReader(f)
self.scenes = []
for r in reader:
for v in ['nrooms', 'nobjects', 'nlevels']:
if v in r:
r[v] = int(r[v])
for v in ['dimX', 'dimY', 'dimZ', 'floorArea']:
if v in r:
r[v] = float(r[v])
if scene_filter and not scene_filter(r):
continue
self.scenes.append(r)
self.scenes_by_id[r['id']] = r
self.scenes.sort(key=lambda x: x['nobjects'])
def _load_states(self, filename, max_states_per_scene, state_filter):
with bz2.open(filename, 'rt') if filename.endswith('bz2') else open(filename) as f:
reader = csv.DictReader(f)
all_states = [r for r in reader]
# Convert scene state and group by sceneId
counter = 0
for r in all_states:
for v in ['startX', 'startY', 'startZ', 'startAngle', 'goalX', 'goalY', 'goalZ', 'dist', 'pathDist']:
r[v] = float(r[v]) if v in r else None
for v in ['episodeId', 'pathNumDoors', 'pathNumRooms', 'level']:
r[v] = int(r[v]) if v in r else None
scene_id = r['sceneId']
scene_states = self.states_by_scene.setdefault(scene_id, [])
rec = {
'episode_id': counter,
'scene_id': r['sceneId'],
'room_id': r['roomId'],
'start': {'position': [r['startX'], r['startY'], r['startZ']], 'angle': r['startAngle']},
'goal': {'id': r['goalObjectId'], 'position': [r['goalX'], r['goalY'], r['goalZ']]},
'dist': r['dist']
}
for k in ['pathDist', 'pathNumRooms', 'pathRoomIds', 'pathNumDoors', 'pathDoorIds', 'level']:
if k in r:
rec[k] = r[k]
if not state_filter or state_filter(rec):
scene_states.append(rec)
counter = counter + 1
# Filter down to states per scene and create big list of all scenes
states = []
for scene_id, scene_states in self.states_by_scene.items():
self.states_by_scene[scene_id] = self._select_n_states(scene_states, max_states_per_scene)
states += self.states_by_scene[scene_id]
self.states = states
def _embed_states_in_scenes(self):
for state in self.states:
scene_id = state['scene_id']
if scene_id in self.scenes_by_id:
self.scenes_by_id[scene_id].setdefault('states', []).append(state)
scenes_with_no_states = []
for i, scene in enumerate(self.scenes):
if 'states' not in scene or len(scene['states']) == 0:
scenes_with_no_states.append(scene['id'])
del self.scenes_by_id[scene['id']]
self.scenes = [s for s in self.scenes if s['id'] not in scenes_with_no_states]
#print('Removed scenes with no episode states: ' + ','.join(scenes_with_no_states))
def main():
import argparse
# Argument processing
parser = argparse.ArgumentParser(description='Load state set')
parser.add_argument('-n', '--limit',
type=int,
help='Number of states per scene')
parser.add_argument('--select',
default=Select.FIRST,
type=Select,
help='Number of states per scene')
parser.add_argument('--field',
default=None,
help='Field to use for selection')
parser.add_argument('--scenes',
type=str,
default=None,
help='Scenes file to load')
parser.add_argument('input',
help='Input file to load')
args = parser.parse_args()
state_set = StateSet(scenes_file=args.scenes,
states_files=args.input,
max_states_per_scene=args.limit,
select_policy=SelectPolicy(args.select, args.field))
for state in state_set.states:
print(state)
if __name__ == "__main__":
main()
| [
"csv.DictReader",
"math.ceil",
"argparse.ArgumentParser",
"math.floor",
"bz2.open"
]
| [((8663, 8716), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Load state set"""'}), "(description='Load state set')\n", (8686, 8716), False, 'import argparse\n'), ((5410, 5427), 'csv.DictReader', 'csv.DictReader', (['f'], {}), '(f)\n', (5424, 5427), False, 'import csv\n'), ((6173, 6190), 'csv.DictReader', 'csv.DictReader', (['f'], {}), '(f)\n', (6187, 6190), False, 'import csv\n'), ((5310, 5334), 'bz2.open', 'bz2.open', (['filename', '"""rt"""'], {}), "(filename, 'rt')\n", (5318, 5334), False, 'import bz2\n'), ((6073, 6097), 'bz2.open', 'bz2.open', (['filename', '"""rt"""'], {}), "(filename, 'rt')\n", (6081, 6097), False, 'import bz2\n'), ((2736, 2752), 'math.ceil', 'math.ceil', (['(r * i)'], {}), '(r * i)\n', (2745, 2752), False, 'import math\n'), ((3409, 3445), 'math.ceil', 'math.ceil', (['((state[field] - fmin) / r)'], {}), '((state[field] - fmin) / r)\n', (3418, 3445), False, 'import math\n'), ((4096, 4116), 'math.floor', 'math.floor', (['(nlen / 2)'], {}), '(nlen / 2)\n', (4106, 4116), False, 'import math\n'), ((4292, 4312), 'math.floor', 'math.floor', (['(plen / 2)'], {}), '(plen / 2)\n', (4302, 4312), False, 'import math\n')] |
from hpcrocket.core.filesystem import Filesystem, FilesystemFactory
from hpcrocket.core.launchoptions import Options
from hpcrocket.pyfilesystem.localfilesystem import LocalFilesystem
from hpcrocket.pyfilesystem.sshfilesystem import SSHFilesystem
class PyFilesystemFactory(FilesystemFactory):
def __init__(self, options: Options) -> None:
self._options = options
def create_local_filesystem(self) -> Filesystem:
return LocalFilesystem(".")
def create_ssh_filesystem(self) -> Filesystem:
connection = self._options.connection
proxyjumps = self._options.proxyjumps
return SSHFilesystem(connection, proxyjumps)
| [
"hpcrocket.pyfilesystem.localfilesystem.LocalFilesystem",
"hpcrocket.pyfilesystem.sshfilesystem.SSHFilesystem"
]
| [((447, 467), 'hpcrocket.pyfilesystem.localfilesystem.LocalFilesystem', 'LocalFilesystem', (['"""."""'], {}), "('.')\n", (462, 467), False, 'from hpcrocket.pyfilesystem.localfilesystem import LocalFilesystem\n'), ((627, 664), 'hpcrocket.pyfilesystem.sshfilesystem.SSHFilesystem', 'SSHFilesystem', (['connection', 'proxyjumps'], {}), '(connection, proxyjumps)\n', (640, 664), False, 'from hpcrocket.pyfilesystem.sshfilesystem import SSHFilesystem\n')] |
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import
from mock import Mock, patch
from packaging import version
import pytest
from sagemaker.tensorflow import TensorFlow
REGION = "us-west-2"
ENV_INPUT = {"env_key1": "env_val1", "env_key2": "env_val2", "env_key3": "env_val3"}
@pytest.fixture()
def sagemaker_session():
return Mock(name="sagemaker_session", boto_region_name=REGION)
def _build_tf(sagemaker_session, **kwargs):
return TensorFlow(
sagemaker_session=sagemaker_session,
entry_point="dummy.py",
role="dummy-role",
instance_count=1,
instance_type="ml.c4.xlarge",
**kwargs,
)
@patch("sagemaker.fw_utils.python_deprecation_warning")
def test_estimator_py2_deprecation_warning(warning, sagemaker_session):
estimator = _build_tf(sagemaker_session, framework_version="2.1.1", py_version="py2")
assert estimator.py_version == "py2"
warning.assert_called_with("tensorflow", "2.1.1")
def test_py2_version_deprecated(sagemaker_session):
with pytest.raises(AttributeError) as e:
_build_tf(sagemaker_session, framework_version="2.1.2", py_version="py2")
msg = (
"Python 2 containers are only available with 2.1.1 and lower versions. "
"Please use a Python 3 container."
)
assert msg in str(e.value)
def test_py2_version_is_not_deprecated(sagemaker_session):
estimator = _build_tf(sagemaker_session, framework_version="1.15.0", py_version="py2")
assert estimator.py_version == "py2"
estimator = _build_tf(sagemaker_session, framework_version="2.0.0", py_version="py2")
assert estimator.py_version == "py2"
def test_framework_name(sagemaker_session):
tf = _build_tf(sagemaker_session, framework_version="1.15.2", py_version="py3")
assert tf._framework_name == "tensorflow"
def test_tf_add_environment_variables(sagemaker_session):
tf = _build_tf(
sagemaker_session,
framework_version="1.15.2",
py_version="py3",
environment=ENV_INPUT,
)
assert tf.environment == ENV_INPUT
def test_tf_miss_environment_variables(sagemaker_session):
tf = _build_tf(
sagemaker_session,
framework_version="1.15.2",
py_version="py3",
environment=None,
)
assert not tf.environment
def test_enable_sm_metrics(sagemaker_session):
tf = _build_tf(
sagemaker_session,
framework_version="1.15.2",
py_version="py3",
enable_sagemaker_metrics=True,
)
assert tf.enable_sagemaker_metrics
def test_disable_sm_metrics(sagemaker_session):
tf = _build_tf(
sagemaker_session,
framework_version="1.15.2",
py_version="py3",
enable_sagemaker_metrics=False,
)
assert not tf.enable_sagemaker_metrics
def test_disable_sm_metrics_if_fw_ver_is_less_than_1_15(
sagemaker_session, tensorflow_training_version, tensorflow_training_py_version
):
if version.Version(tensorflow_training_version) > version.Version("1.14"):
pytest.skip("This test is for TF 1.14 and lower.")
tf = _build_tf(
sagemaker_session,
framework_version=tensorflow_training_version,
py_version=tensorflow_training_py_version,
image_uri="old-image",
)
assert tf.enable_sagemaker_metrics is None
def test_enable_sm_metrics_if_fw_ver_is_at_least_1_15(
sagemaker_session, tensorflow_training_version, tensorflow_training_py_version
):
if version.Version(tensorflow_training_version) < version.Version("1.15"):
pytest.skip("This test is for TF 1.15 and higher.")
tf = _build_tf(
sagemaker_session,
framework_version=tensorflow_training_version,
py_version=tensorflow_training_py_version,
)
assert tf.enable_sagemaker_metrics
def test_require_image_uri_if_fw_ver_is_less_than_1_11(
sagemaker_session, tensorflow_training_version, tensorflow_training_py_version
):
if version.Version(tensorflow_training_version) > version.Version("1.10"):
pytest.skip("This test is for TF 1.10 and lower.")
with pytest.raises(ValueError) as e:
_build_tf(
sagemaker_session,
framework_version=tensorflow_training_version,
py_version=tensorflow_training_py_version,
)
expected_msg = (
"TF {version} supports only legacy mode. Please supply the image URI directly with "
"'image_uri=520713654638.dkr.ecr.{region}.amazonaws.com/"
"sagemaker-tensorflow:{version}-cpu-py2' and set 'model_dir=False'. If you are using any "
"legacy parameters (training_steps, evaluation_steps, checkpoint_path, requirements_file), "
"make sure to pass them directly as hyperparameters instead."
).format(version=tensorflow_training_version, region=REGION)
assert expected_msg in str(e.value)
| [
"mock.patch",
"sagemaker.tensorflow.TensorFlow",
"mock.Mock",
"pytest.raises",
"packaging.version.Version",
"pytest.fixture",
"pytest.skip"
]
| [((829, 845), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (843, 845), False, 'import pytest\n'), ((1202, 1256), 'mock.patch', 'patch', (['"""sagemaker.fw_utils.python_deprecation_warning"""'], {}), "('sagemaker.fw_utils.python_deprecation_warning')\n", (1207, 1256), False, 'from mock import Mock, patch\n'), ((882, 937), 'mock.Mock', 'Mock', ([], {'name': '"""sagemaker_session"""', 'boto_region_name': 'REGION'}), "(name='sagemaker_session', boto_region_name=REGION)\n", (886, 937), False, 'from mock import Mock, patch\n'), ((995, 1152), 'sagemaker.tensorflow.TensorFlow', 'TensorFlow', ([], {'sagemaker_session': 'sagemaker_session', 'entry_point': '"""dummy.py"""', 'role': '"""dummy-role"""', 'instance_count': '(1)', 'instance_type': '"""ml.c4.xlarge"""'}), "(sagemaker_session=sagemaker_session, entry_point='dummy.py',\n role='dummy-role', instance_count=1, instance_type='ml.c4.xlarge', **kwargs\n )\n", (1005, 1152), False, 'from sagemaker.tensorflow import TensorFlow\n'), ((1578, 1607), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (1591, 1607), False, 'import pytest\n'), ((3489, 3533), 'packaging.version.Version', 'version.Version', (['tensorflow_training_version'], {}), '(tensorflow_training_version)\n', (3504, 3533), False, 'from packaging import version\n'), ((3536, 3559), 'packaging.version.Version', 'version.Version', (['"""1.14"""'], {}), "('1.14')\n", (3551, 3559), False, 'from packaging import version\n'), ((3569, 3619), 'pytest.skip', 'pytest.skip', (['"""This test is for TF 1.14 and lower."""'], {}), "('This test is for TF 1.14 and lower.')\n", (3580, 3619), False, 'import pytest\n'), ((4008, 4052), 'packaging.version.Version', 'version.Version', (['tensorflow_training_version'], {}), '(tensorflow_training_version)\n', (4023, 4052), False, 'from packaging import version\n'), ((4055, 4078), 'packaging.version.Version', 'version.Version', (['"""1.15"""'], {}), "('1.15')\n", (4070, 4078), False, 'from packaging import version\n'), ((4088, 4139), 'pytest.skip', 'pytest.skip', (['"""This test is for TF 1.15 and higher."""'], {}), "('This test is for TF 1.15 and higher.')\n", (4099, 4139), False, 'import pytest\n'), ((4490, 4534), 'packaging.version.Version', 'version.Version', (['tensorflow_training_version'], {}), '(tensorflow_training_version)\n', (4505, 4534), False, 'from packaging import version\n'), ((4537, 4560), 'packaging.version.Version', 'version.Version', (['"""1.10"""'], {}), "('1.10')\n", (4552, 4560), False, 'from packaging import version\n'), ((4570, 4620), 'pytest.skip', 'pytest.skip', (['"""This test is for TF 1.10 and lower."""'], {}), "('This test is for TF 1.10 and lower.')\n", (4581, 4620), False, 'import pytest\n'), ((4631, 4656), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (4644, 4656), False, 'import pytest\n')] |
import subprocess
proc = subprocess.Popen(['python3', 'articlekeywords.py', 'aih.txt' , '5'], stdout=subprocess.PIPE )
#print(type(proc.communicate()[0]))
# path = '/opt/mycroft/skills/mycroft-bitcoinprice-skill/'
text = proc.stdout.read()
rows = text.splitlines()
#print(text.splitlines())
count = 0
s = ""
for row in rows:
divide = row.split()
wordCount = len(divide)
if wordCount > 1:
count = count + 1
s += str(count)
s += " "
s += str(divide[1])
s += " "
print(s)
# with open(path + 'out.csv', 'r') as content_file:
# text = content_file.read()
# self.speak_dialog("bitcoin.price", data={'price': str(text)})
#file_path = '/opt/mycroft/skills/mycroft-bitcoinprice-skill/out.csv'
#wordCount = 10
#
# text = Path(file_path).read_text()
# #print(exit_code) | [
"subprocess.Popen"
]
| [((27, 123), 'subprocess.Popen', 'subprocess.Popen', (["['python3', 'articlekeywords.py', 'aih.txt', '5']"], {'stdout': 'subprocess.PIPE'}), "(['python3', 'articlekeywords.py', 'aih.txt', '5'], stdout=\n subprocess.PIPE)\n", (43, 123), False, 'import subprocess\n')] |
# Python 2.7.1
import RPi.GPIO as GPIO
from twython import Twython
import time
import sys
import os
import pygame
APP_KEY='zmmlyAJzMDIntLpDYmSH98gbw'
APP_SECRET='<KEY>'
OAUTH_TOKEN='<KEY>'
OAUTH_TOKEN_SECRET='<KEY>'
applepislcy = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
### GENERAL ###
def Cleanup():
GPIO.cleanup()
def Sleep(seconds):
"""Puts the program to sleep"""
time.sleep(seconds)
def Alert(channel):
"""Simple alert function for testing event interrupts"""
print('Alert on channel',channel)
def TimeString():
"""Returns the current time"""
t = time.localtime()
return str(t[0])+'.'+str(t[1])+'.'+str(t[2])+'.'+str(t[3])+'.'+str(t[4])+'.'+str(t[5])
def LoadPins(mapping,inp):
"""Organizes an input into a pin mapping dict
mapping <list>, ['IA','IB']
inp <dict>, <list>, <int> {'IA':1,'IB':2}, [1,2]
"""
if type(inp) is int and len(mapping) == 1:
return {mapping[0]:inp}
elif type(inp) is list and len(mapping) == len(inp):
o = {}
for i in range(len(inp)):
o[mapping[i]] = inp[i]
return o
elif type(inp) is dict:
return inp
else:
print('Invalid input for pins:',inp,type(inp))
print('Expected:',mapping)
return {}
def BoolToSign(inp):
"""Converts boolean bits into signed bits
0 -> -1
1 -> 1"""
return (inp * 2) - 1
def SignToBool(inp):
"""Converts signed bits into boolean bits
-1 -> 0
1 -> 1"""
return (inp + 1) / 2
### PYGAME ###
def WindowSetup(size=(300,50),caption='',text='',background=(0,0,0),foreground=(255,255,255)):
"""Sets up a pygame window to take keyboard input
size <tuple>, width by height
caption <str>, window title bar
text <str>, text to display in window, accepts \n
background <tuple>, foreground <tuple>, (r,g,b) color
"""
pygame.init()
screen = pygame.display.set_mode(size,0,32)
pygame.display.set_caption(caption)
myfont = pygame.font.SysFont('Monospace',15)
labels = []
lines = text.split('\n')
for line in lines:
labels.append(myfont.render(line,1,foreground))
screen.fill(background)
y = 0
for label in labels:
screen.blit(label, (0,y))
y += 15
pygame.display.update()
def InputLoop(eventmap):
"""Begins a pygame loop, mapping key inputs to functions
eventmap <dict>, {pygame.K_t:myfunction}
"""
index = 0
while True:
events = pygame.event.get()
for event in events:
if event.type == pygame.KEYDOWN:
#print("{0}: You pressed {1:c}".format ( index , event.key ))
if event.key in eventmap:
eventmap[event.key]()
elif event.type == pygame.QUIT:
pygame.quit()
sys.exit()
def InputLoopDemo():
def dog():
print('woof')
def cat():
print('meow')
def fish():
print('blub')
WindowSetup(caption='pet simulator',text='d for dog\nc for cat\nf for fish')
InputLoop({pygame.K_d:dog, pygame.K_c:cat, pygame.K_f:fish})
### TWITTER ###
def Tweet(twit,statustext):
"""Tweets a message
twit <Twython>, create with Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
statustext <str>, must be <= 140 characters
"""
if len(statustext) > 140:
print('ERROR: Character limit 140 exceeded:',len(statustext))
else:
twit.update_status(status=statustext)
def TweetPicture(twit,file,statustext):
"""Tweets a message with a picture
twit <Twython>, create with Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
file <str>, path and filename to picture
statustext <str>, must be <= 140 characters
"""
photo = open(file, 'rb')
response = twitter.upload_media(media=photo)
twit.update_status(status=statustext, media_ids=[response['media_id']])
def TweetVideo(twit,file,statustext):
"""Tweets a message with a video
twit <Twython>, create with Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)
file <str>, path and filename to video
statustext <str>, must be <= 140 characters
"""
video = open(file, 'rb')
response = twitter.upload_video(media=video, media_type='video/mp4')
twit.update_status(status=statustext, media_ids=[response['media_id']])
| [
"RPi.GPIO.cleanup",
"sys.exit",
"pygame.init",
"twython.Twython",
"pygame.event.get",
"pygame.quit",
"pygame.display.set_mode",
"time.sleep",
"pygame.display.set_caption",
"time.localtime",
"pygame.display.update",
"pygame.font.SysFont"
]
| [((232, 293), 'twython.Twython', 'Twython', (['APP_KEY', 'APP_SECRET', 'OAUTH_TOKEN', 'OAUTH_TOKEN_SECRET'], {}), '(APP_KEY, APP_SECRET, OAUTH_TOKEN, OAUTH_TOKEN_SECRET)\n', (239, 293), False, 'from twython import Twython\n'), ((331, 345), 'RPi.GPIO.cleanup', 'GPIO.cleanup', ([], {}), '()\n', (343, 345), True, 'import RPi.GPIO as GPIO\n'), ((407, 426), 'time.sleep', 'time.sleep', (['seconds'], {}), '(seconds)\n', (417, 426), False, 'import time\n'), ((609, 625), 'time.localtime', 'time.localtime', ([], {}), '()\n', (623, 625), False, 'import time\n'), ((1888, 1901), 'pygame.init', 'pygame.init', ([], {}), '()\n', (1899, 1901), False, 'import pygame\n'), ((1915, 1951), 'pygame.display.set_mode', 'pygame.display.set_mode', (['size', '(0)', '(32)'], {}), '(size, 0, 32)\n', (1938, 1951), False, 'import pygame\n'), ((1954, 1989), 'pygame.display.set_caption', 'pygame.display.set_caption', (['caption'], {}), '(caption)\n', (1980, 1989), False, 'import pygame\n'), ((2003, 2039), 'pygame.font.SysFont', 'pygame.font.SysFont', (['"""Monospace"""', '(15)'], {}), "('Monospace', 15)\n", (2022, 2039), False, 'import pygame\n'), ((2280, 2303), 'pygame.display.update', 'pygame.display.update', ([], {}), '()\n', (2301, 2303), False, 'import pygame\n'), ((2491, 2509), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (2507, 2509), False, 'import pygame\n'), ((2806, 2819), 'pygame.quit', 'pygame.quit', ([], {}), '()\n', (2817, 2819), False, 'import pygame\n'), ((2836, 2846), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2844, 2846), False, 'import sys\n')] |
"""plerr entrypoint"""
from plerr import cli
if __name__ == '__main__':
cli.main()
| [
"plerr.cli.main"
]
| [((77, 87), 'plerr.cli.main', 'cli.main', ([], {}), '()\n', (85, 87), False, 'from plerr import cli\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib import decorators
from tempest.lib import exceptions
from senlin_tempest_plugin.api import base
from senlin_tempest_plugin.common import utils
class TestPolicyUpdateNegativeNotFound(base.BaseSenlinAPITest):
@decorators.attr(type=['negative'])
@decorators.idempotent_id('5df90d82-9889-4c6f-824c-30272bcfa767')
def test_policy_update_policy_not_found(self):
ex = self.assertRaises(exceptions.NotFound,
self.client.update_obj, 'policies',
'5df90d82-9889-4c6f-824c-30272bcfa767',
{'policy': {'name': 'new-name'}})
message = ex.resp_body['error']['message']
self.assertEqual(
"The policy '5df90d82-9889-4c6f-824c-30272bcfa767' "
"could not be found.", str(message))
@decorators.attr(type=['negative'])
@decorators.idempotent_id('29414add-9cba-4b72-a7bb-36718671dcab')
def test_policy_update_policy_invalid_param(self):
ex = self.assertRaises(exceptions.BadRequest,
self.client.update_obj, 'policies',
'5df90d82-9889-4c6f-824c-30272bcfa767',
{'policy': {'boo': 'foo'}})
message = ex.resp_body['error']['message']
self.assertEqual(
"Additional properties are not allowed (u'boo' was "
"unexpected)", str(message))
@decorators.attr(type=['negative'])
@decorators.idempotent_id('bf26ed1e-1d26-4472-b4c8-0bcca1c0a838')
def test_policy_update_policy_empty_param(self):
ex = self.assertRaises(exceptions.BadRequest,
self.client.update_obj, 'policies',
'5df90d82-9889-4c6f-824c-30272bcfa767',
{})
message = ex.resp_body['error']['message']
self.assertEqual(
"Malformed request data, missing 'policy' key in "
"request body.", str(message))
class TestPolicyUpdateNegativeBadRequest(base.BaseSenlinAPITest):
def setUp(self):
super(TestPolicyUpdateNegativeBadRequest, self).setUp()
# Create a policy
policy_id = utils.create_a_policy(self)
self.addCleanup(utils.delete_a_policy, self, policy_id)
self.policy_id = policy_id
@decorators.attr(type=['negative'])
@decorators.idempotent_id('31242de5-55ac-4589-87a1-a9940e4beca2')
def test_policy_update_no_property_updated(self):
# No property is updated.
params = {
'policy': {}
}
# Verify badrequest exception(400) is raised.
ex = self.assertRaises(exceptions.BadRequest,
self.client.update_obj,
'policies', self.policy_id, params)
message = ex.resp_body['error']['message']
self.assertEqual(
"'name' is a required property", str(message))
@decorators.attr(type=['negative'])
@decorators.idempotent_id('d2ca7de6-0069-48c9-b3de-ee975a2428dc')
def test_policy_update_spec_not_updatable(self):
# Try to update spec of policy.
# Note: name is the only property that can be updated
# after policy is created.
params = {
'policy': {
'name': 'new-name',
'spec': {'k1': 'v1'}
}
}
# Verify badrequest exception(400) is raised.
ex = self.assertRaises(exceptions.BadRequest,
self.client.update_obj,
'policies', self.policy_id, params)
message = ex.resp_body['error']['message']
self.assertEqual(
"Additional properties are not allowed (u'spec' was "
"unexpected)", str(message))
| [
"tempest.lib.decorators.attr",
"tempest.lib.decorators.idempotent_id",
"senlin_tempest_plugin.common.utils.create_a_policy"
]
| [((779, 813), 'tempest.lib.decorators.attr', 'decorators.attr', ([], {'type': "['negative']"}), "(type=['negative'])\n", (794, 813), False, 'from tempest.lib import decorators\n'), ((819, 883), 'tempest.lib.decorators.idempotent_id', 'decorators.idempotent_id', (['"""5df90d82-9889-4c6f-824c-30272bcfa767"""'], {}), "('5df90d82-9889-4c6f-824c-30272bcfa767')\n", (843, 883), False, 'from tempest.lib import decorators\n'), ((1388, 1422), 'tempest.lib.decorators.attr', 'decorators.attr', ([], {'type': "['negative']"}), "(type=['negative'])\n", (1403, 1422), False, 'from tempest.lib import decorators\n'), ((1428, 1492), 'tempest.lib.decorators.idempotent_id', 'decorators.idempotent_id', (['"""29414add-9cba-4b72-a7bb-36718671dcab"""'], {}), "('29414add-9cba-4b72-a7bb-36718671dcab')\n", (1452, 1492), False, 'from tempest.lib import decorators\n'), ((1989, 2023), 'tempest.lib.decorators.attr', 'decorators.attr', ([], {'type': "['negative']"}), "(type=['negative'])\n", (2004, 2023), False, 'from tempest.lib import decorators\n'), ((2029, 2093), 'tempest.lib.decorators.idempotent_id', 'decorators.idempotent_id', (['"""bf26ed1e-1d26-4472-b4c8-0bcca1c0a838"""'], {}), "('bf26ed1e-1d26-4472-b4c8-0bcca1c0a838')\n", (2053, 2093), False, 'from tempest.lib import decorators\n'), ((2891, 2925), 'tempest.lib.decorators.attr', 'decorators.attr', ([], {'type': "['negative']"}), "(type=['negative'])\n", (2906, 2925), False, 'from tempest.lib import decorators\n'), ((2931, 2995), 'tempest.lib.decorators.idempotent_id', 'decorators.idempotent_id', (['"""31242de5-55ac-4589-87a1-a9940e4beca2"""'], {}), "('31242de5-55ac-4589-87a1-a9940e4beca2')\n", (2955, 2995), False, 'from tempest.lib import decorators\n'), ((3511, 3545), 'tempest.lib.decorators.attr', 'decorators.attr', ([], {'type': "['negative']"}), "(type=['negative'])\n", (3526, 3545), False, 'from tempest.lib import decorators\n'), ((3551, 3615), 'tempest.lib.decorators.idempotent_id', 'decorators.idempotent_id', (['"""d2ca7de6-0069-48c9-b3de-ee975a2428dc"""'], {}), "('d2ca7de6-0069-48c9-b3de-ee975a2428dc')\n", (3575, 3615), False, 'from tempest.lib import decorators\n'), ((2758, 2785), 'senlin_tempest_plugin.common.utils.create_a_policy', 'utils.create_a_policy', (['self'], {}), '(self)\n', (2779, 2785), False, 'from senlin_tempest_plugin.common import utils\n')] |
from datasette import hookimpl
from datasette.utils import detect_spatialite
from shapely import wkt
def get_spatial_tables(conn):
if not detect_spatialite(conn):
return {}
spatial_tables = {}
c = conn.cursor()
c.execute(
"""SELECT f_table_name, f_geometry_column, srid, spatial_index_enabled
FROM geometry_columns"""
)
for row in c.fetchall():
if row[3] != 1:
print(
"Column {column} in table {table} has no spatial index; datasette-geo will ignore it.".format(
column=row[1], table=row[0]
)
)
continue
spatial_tables[row[0]] = row[1]
return spatial_tables
def get_bounds(conn, spatial_tables):
c = conn.cursor()
res = {}
for table, column in spatial_tables.items():
c.execute(
"SELECT AsText(Envelope(GUnion({column}))) FROM {table}".format(
table=table, column=column
)
)
data = c.fetchone()[0]
if data is None:
continue
bbox = wkt.loads(data)
res[table] = bbox.bounds
return res
| [
"shapely.wkt.loads",
"datasette.utils.detect_spatialite"
]
| [((144, 167), 'datasette.utils.detect_spatialite', 'detect_spatialite', (['conn'], {}), '(conn)\n', (161, 167), False, 'from datasette.utils import detect_spatialite\n'), ((1106, 1121), 'shapely.wkt.loads', 'wkt.loads', (['data'], {}), '(data)\n', (1115, 1121), False, 'from shapely import wkt\n')] |
"""
Message context.
"""
from typing import Dict
from microcosm.api import defaults, typed
from microcosm.config.types import boolean
from microcosm_logging.decorators import logger
from microcosm_pubsub.constants import TTL_KEY, URI_KEY
from microcosm_pubsub.message import SQSMessage
@defaults(
enable_ttl=typed(boolean, default_value=True),
initial_ttl=typed(int, default_value=32),
)
@logger
class SQSMessageContext:
"""
Factory for per-message contexts.
"""
def __init__(self, graph):
self.enable_ttl = graph.config.sqs_message_context.enable_ttl
self.initial_ttl = graph.config.sqs_message_context.initial_ttl
def __call__(self, context: SQSMessage, **kwargs) -> Dict[str, str]:
"""
Create a new context from a message.
"""
return self.from_sqs_message(context, **kwargs)
def from_sqs_message(self, message: SQSMessage, **kwargs):
context: Dict = dict(message.opaque_data)
context.update(
# include the message id
message_id=message.message_id,
**kwargs,
)
# include the TTL (if enabled)
if self.enable_ttl:
ttl = message.ttl if message.ttl is not None else self.initial_ttl
context[TTL_KEY] = str(ttl - 1)
# include the URI (if there is one)
if message.uri:
context[URI_KEY] = message.uri
return context
| [
"microcosm.api.typed"
]
| [((317, 351), 'microcosm.api.typed', 'typed', (['boolean'], {'default_value': '(True)'}), '(boolean, default_value=True)\n', (322, 351), False, 'from microcosm.api import defaults, typed\n'), ((369, 397), 'microcosm.api.typed', 'typed', (['int'], {'default_value': '(32)'}), '(int, default_value=32)\n', (374, 397), False, 'from microcosm.api import defaults, typed\n')] |
import pytest
from app.db import session_scope
pytestmark = pytest.mark.asyncio
async def test_engine_configured(env):
async with session_scope() as session:
assert str(session.bind.engine.url) == env("SQLALCHEMY_DATABASE_URI")
| [
"app.db.session_scope"
]
| [((138, 153), 'app.db.session_scope', 'session_scope', ([], {}), '()\n', (151, 153), False, 'from app.db import session_scope\n')] |
#!/usr/bin/env python3
"""
Description: Python script to append the common columns in one sheet from another sheet using fuzzy matching.
"""
import pip
def import_or_install(package):
try:
__import__(package)
except ImportError:
pip.main(['install', package])
import os
import sys
import argparse
import_or_install('numpy')
import_or_install('pandas')
import_or_install('fuzzywuzzy')
import numpy as np
import pandas as pd
from fuzzywuzzy import process, fuzz
class FuzzyMatcher:
"""
FuzzyMatcher class to perform the fuzzy matching.
"""
def __init__(self, df_1, df_2, columns_1, columns_2, append_in='second'):
"""
The constructor takes five arguments. The last argument 'append_in' is optional.
Parameters:
df_1: the first table in pandas.DataFrame format or the name of the CSV file for the first table
df_2: the second table in pandas.DataFrame format or the name of the CSV file for the second table
columns_1: list of common columns in the first table
columns_2: list of common columns in the second table
append_in (optional):
'first' if the common columns are to be appended in the first table
'second' if the common columns are to be appended in the second table
"""
if type(df_1) == str:
df_1 = pd.read_csv(df_1)
if type(df_2) == str:
df_2 = pd.read_csv(df_2)
df_1.columns = df_1.columns.str.lower().str.strip()
df_2.columns = df_2.columns.str.lower().str.strip()
columns_1 = [i.lower().strip() for i in columns_1]
columns_2 = [i.lower().strip() for i in columns_2]
if append_in == 'first':
temp = df_1
df_1 = df_2
df_2 = temp
temp = columns_1
columns_1 = columns_2
columns_2 = temp
self.df_1 = df_1.rename(columns=dict(zip(columns_1, columns_2)))
self.columns = columns_2
self.df_2 = self._fuzzy_match(self.df_1, df_2, self.columns[0])
@staticmethod
def _string_matching(name, collection, mapping_):
"""
Returns similar name using fuzzy matching.
"""
if name in collection:
return name
if name in mapping_:
return mapping_[name]
similar = process.extractOne(name, collection, scorer=fuzz.ratio)[0]
mapping_[name] = similar
return similar
def _fuzzy_match(self, df_1_t, df_2_t, common_column_t):
"""
Returns dataframe with the common column appended.
Notice that the appended columns end with '_t'.
"""
collection = set(df_1_t[common_column_t])
mapping_ = {}
df_2_t[common_column_t + '_t'] = df_2_t[common_column_t].apply(self._string_matching, args=(collection, mapping_))
return df_2_t
@property
def fuzzy_match(self):
"""
Returns the dataframe consisting of all the appended columns.
"""
for i_t, common_column in enumerate(self.columns[1:], start=1):
self.df_2[common_column + '_t'] = np.nan
group_1 = self.df_1.groupby(self.columns[:i_t])
group_2 = self.df_2.groupby([i + '_t' for i in self.columns[:i_t]])
for key, df_slice_2 in group_2:
df_slice_1 = group_1.get_group(key)
df_slice_2 = self._fuzzy_match(df_slice_1, df_slice_2, common_column)
self.df_2.loc[df_slice_2.index, common_column + '_t'] = df_slice_2.loc[:, common_column + '_t']
return self.df_2
def save(self, filename):
"""
Saves the result dataframe to a CSV file, filename.
"""
self.df_2.to_csv(filename)
def parse_args(parser):
"""
Parsing and configuration of the command line arguments.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--firstcsv', type=str, required=True, help='CSV file for first table.')
parser.add_argument('--secondcsv', type=str, required=True, help='CSV file for second table.')
parser.add_argument('--destination', type=str, default='output.csv', help='Destination filename.')
parser.add_argument('--commoncolumns1', type=str, required=True, help='Common columns for first table.')
parser.add_argument('--commoncolumns2', type=str, required=True, help='Common columns for second table in the same order.')
parser.add_argument("--in", dest="_in", default='second', choices=['second', 'first'], help='Table to append the columns. ')
return check_args(parser.parse_args())
def check_args(args):
"""
Checking the arguments if they are entered properly.
Validations performed:
1. Compulsory arguments are entered.
2. The entered filenames are present in the current folder.
3. The entered column names are present in the corresponding files.
4. If the destination filename is already present in the directory, ask the user if it can be overwritten.
"""
# for --firstcsv and --secondcsv
for filename in [args.firstcsv, args.secondcsv]:
if not os.path.isfile(filename):
raise Exception("File {} is not present in the currrent folder.".format(filename))
# --commoncolumns1
commoncolumns1 = [i.strip().lower() for i in args.commoncolumns1.split(',')]
temp = set(commoncolumns1) - set(pd.read_csv(args.firstcsv, nrows=1).columns.str.lower().str.strip())
if temp:
raise Exception("The following columns are not present in the file, {}:\n{}".format(args.firstcsv, temp))
# --commoncolumns2
commoncolumns2 = [i.strip().lower() for i in args.commoncolumns2.split(',')]
temp = set(commoncolumns2) - set(pd.read_csv(args.secondcsv, nrows=1).columns.str.lower().str.strip())
if temp:
raise Exception("The following columns are not present in the file, {}:\n{}".format(args.secondcsv, temp))
# --destination
if os.path.isfile(args.destination):
print("The file {} already exists. Do you want to overwrite it? y/n".format(args.destination))
ans = input().strip().lower()
if ans == 'n':
print("Please enter different destination filename and run the script again.")
sys.exit()
return args
if __name__ == "__main__":
# instantiate the ArgumentParser class and parse the arguments
parser = argparse.ArgumentParser()
arguments = parse_args(parser)
# save the arguments as some variables which later would be passed to FuzzyMatcher class
filename_1 = arguments.firstcsv
filename_2 = arguments.secondcsv
result_filename = arguments.destination
# clean and lowercase-ize the columns names
common_columns_1 = [i.strip().lower() for i in arguments.commoncolumns1.split(',')]
common_columns_2 = [i.strip().lower() for i in arguments.commoncolumns2.split(',')]
# instantiate the FuzzyMatcher object, perform the fuzzy match, and save the result to the destination CSV file
fuzzy_matcher = FuzzyMatcher(filename_1, filename_2, common_columns_1, common_columns_2, append_in=arguments._in)
fuzzy_matcher.fuzzy_match
fuzzy_matcher.save(result_filename)
| [
"argparse.ArgumentParser",
"pandas.read_csv",
"os.path.isfile",
"fuzzywuzzy.process.extractOne",
"sys.exit",
"pip.main"
]
| [((4202, 4227), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4225, 4227), False, 'import argparse\n'), ((6315, 6347), 'os.path.isfile', 'os.path.isfile', (['args.destination'], {}), '(args.destination)\n', (6329, 6347), False, 'import os\n'), ((6779, 6804), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (6802, 6804), False, 'import argparse\n'), ((255, 285), 'pip.main', 'pip.main', (["['install', package]"], {}), "(['install', package])\n", (263, 285), False, 'import pip\n'), ((1402, 1419), 'pandas.read_csv', 'pd.read_csv', (['df_1'], {}), '(df_1)\n', (1413, 1419), True, 'import pandas as pd\n'), ((1482, 1499), 'pandas.read_csv', 'pd.read_csv', (['df_2'], {}), '(df_2)\n', (1493, 1499), True, 'import pandas as pd\n'), ((2509, 2564), 'fuzzywuzzy.process.extractOne', 'process.extractOne', (['name', 'collection'], {'scorer': 'fuzz.ratio'}), '(name, collection, scorer=fuzz.ratio)\n', (2527, 2564), False, 'from fuzzywuzzy import process, fuzz\n'), ((5480, 5504), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (5494, 5504), False, 'import os\n'), ((6625, 6635), 'sys.exit', 'sys.exit', ([], {}), '()\n', (6633, 6635), False, 'import sys\n'), ((5743, 5778), 'pandas.read_csv', 'pd.read_csv', (['args.firstcsv'], {'nrows': '(1)'}), '(args.firstcsv, nrows=1)\n', (5754, 5778), True, 'import pandas as pd\n'), ((6085, 6121), 'pandas.read_csv', 'pd.read_csv', (['args.secondcsv'], {'nrows': '(1)'}), '(args.secondcsv, nrows=1)\n', (6096, 6121), True, 'import pandas as pd\n')] |
import os
from twisted.internet.defer import succeed
class Load(object):
def register(self, sysinfo):
self._sysinfo = sysinfo
def run(self):
self._sysinfo.add_header("System load", str(os.getloadavg()[0]))
return succeed(None)
| [
"os.getloadavg",
"twisted.internet.defer.succeed"
]
| [((250, 263), 'twisted.internet.defer.succeed', 'succeed', (['None'], {}), '(None)\n', (257, 263), False, 'from twisted.internet.defer import succeed\n'), ((214, 229), 'os.getloadavg', 'os.getloadavg', ([], {}), '()\n', (227, 229), False, 'import os\n')] |
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot
import agentframework
import csv
import matplotlib.animation
#create environment in which agents will operate
environment=[]
#read csv downloaded file
f = open('in.txt', newline='')
reader = csv.reader(f, quoting=csv.QUOTE_NONNUMERIC)
for row in reader:
rowlist=[] # A list of rows
environment.append(rowlist)
for value in row: # A list of value
#print(value) # Floats
rowlist.append(value)
f.close() # Don't close until you are done with the reader;
# the data is read on request.
#def distance_between(agents_row_a, agents_row_b):
# return (((agents_row_a.x - agents_row_b.x)**2) +
# ((agents_row_a.y - agents_row_b.y)**2))**0.5
num_of_agents = 10
num_of_iterations = 10
neighbourhood = 20
fig = matplotlib.pyplot.figure(figsize=(7, 7))
ax = fig.add_axes([0, 0, 1, 1])
# Make the agents and connecting with the environment.
agents = []
def update(frame_number):
fig.clear()
for i in range(num_of_agents):
agents.append(agentframework.Agent(environment,agents))
# Move and eat agents with every move or iteration.
for j in range(num_of_iterations):
for i in range(num_of_agents):
agents[i].move()
agents[i].eat()
agents[i].share_with_neighbours(neighbourhood)
# Loop through the agents in self.agents .
# Calculate the distance between self and the current other agent:
# distance = self.distance_between(agent)
# If distance is less than or equal to the neighbourhood
# Sum self.store and agent.store .
# Divide sum by two to calculate average.
# self.store = average
# agent.store = average
# End if
# End loop
# plot
matplotlib.pyplot.xlim(0, 299)
matplotlib.pyplot.ylim(0, 299)
for i in range(num_of_agents):
matplotlib.pyplot.scatter(agents[i].x,agents[i].y)
matplotlib.pyplot.imshow(environment)
animation = matplotlib.animation.FuncAnimation(fig, update, interval=1)
matplotlib.pyplot.show()
| [
"matplotlib.pyplot.imshow",
"matplotlib.use",
"matplotlib.animation.FuncAnimation",
"agentframework.Agent",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.scatter",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.xlim",
"csv.reader",
"matplotlib.pyplot.show"
]
| [((18, 41), 'matplotlib.use', 'matplotlib.use', (['"""TkAgg"""'], {}), "('TkAgg')\n", (32, 41), False, 'import matplotlib\n'), ((265, 308), 'csv.reader', 'csv.reader', (['f'], {'quoting': 'csv.QUOTE_NONNUMERIC'}), '(f, quoting=csv.QUOTE_NONNUMERIC)\n', (275, 308), False, 'import csv\n'), ((837, 877), 'matplotlib.pyplot.figure', 'matplotlib.pyplot.figure', ([], {'figsize': '(7, 7)'}), '(figsize=(7, 7))\n', (861, 877), False, 'import matplotlib\n'), ((2119, 2178), 'matplotlib.animation.FuncAnimation', 'matplotlib.animation.FuncAnimation', (['fig', 'update'], {'interval': '(1)'}), '(fig, update, interval=1)\n', (2153, 2178), False, 'import matplotlib\n'), ((2179, 2203), 'matplotlib.pyplot.show', 'matplotlib.pyplot.show', ([], {}), '()\n', (2201, 2203), False, 'import matplotlib\n'), ((1892, 1922), 'matplotlib.pyplot.xlim', 'matplotlib.pyplot.xlim', (['(0)', '(299)'], {}), '(0, 299)\n', (1914, 1922), False, 'import matplotlib\n'), ((1927, 1957), 'matplotlib.pyplot.ylim', 'matplotlib.pyplot.ylim', (['(0)', '(299)'], {}), '(0, 299)\n', (1949, 1957), False, 'import matplotlib\n'), ((2060, 2097), 'matplotlib.pyplot.imshow', 'matplotlib.pyplot.imshow', (['environment'], {}), '(environment)\n', (2084, 2097), False, 'import matplotlib\n'), ((2001, 2052), 'matplotlib.pyplot.scatter', 'matplotlib.pyplot.scatter', (['agents[i].x', 'agents[i].y'], {}), '(agents[i].x, agents[i].y)\n', (2026, 2052), False, 'import matplotlib\n'), ((1091, 1132), 'agentframework.Agent', 'agentframework.Agent', (['environment', 'agents'], {}), '(environment, agents)\n', (1111, 1132), False, 'import agentframework\n')] |
"""Validation for UDFs.
Warning: This is an experimental module and API here can change without notice.
DO NOT USE DIRECTLY.
"""
from inspect import Parameter, Signature, signature
from typing import Any, Callable, List
import ibis.common.exceptions as com
from ibis.expr.datatypes import DataType
def _parameter_count(funcsig: Signature) -> int:
"""Get the number of positional-or-keyword or position-only parameters in a
function signature.
Parameters
----------
funcsig : inspect.Signature
A UDF signature
Returns
-------
int
The number of parameters
"""
return sum(
param.kind in {param.POSITIONAL_OR_KEYWORD, param.POSITIONAL_ONLY}
for param in funcsig.parameters.values()
if param.default is Parameter.empty
)
def validate_input_type(
input_type: List[DataType], func: Callable
) -> Signature:
"""Check that the declared number of inputs (the length of `input_type`)
and the number of inputs to `func` are equal.
If the signature of `func` uses *args, then no check is done (since no
check can be done).
Parameters
----------
input_type : List[DataType]
func : callable
Returns
-------
inspect.Signature
"""
funcsig = signature(func)
params = funcsig.parameters.values()
# We can only do validation if all the positional arguments are explicit
# (i.e. no *args)
if not any(param.kind is Parameter.VAR_POSITIONAL for param in params):
declared_parameter_count = len(input_type)
function_parameter_count = _parameter_count(funcsig)
if declared_parameter_count != function_parameter_count:
raise TypeError(
'Function signature {!r} has {:d} parameters, '
'input_type has {:d}. These must match. Non-column '
'parameters must be defined as keyword only, i.e., '
'def foo(col, *, function_param).'.format(
func.__name__,
function_parameter_count,
declared_parameter_count,
)
)
return funcsig
def validate_output_type(output_type: Any) -> None:
"""Check that the output type is a single datatype."""
if isinstance(output_type, list):
raise com.IbisTypeError(
'The output type of a UDF must be a single datatype.'
)
| [
"ibis.common.exceptions.IbisTypeError",
"inspect.signature"
]
| [((1277, 1292), 'inspect.signature', 'signature', (['func'], {}), '(func)\n', (1286, 1292), False, 'from inspect import Parameter, Signature, signature\n'), ((2323, 2395), 'ibis.common.exceptions.IbisTypeError', 'com.IbisTypeError', (['"""The output type of a UDF must be a single datatype."""'], {}), "('The output type of a UDF must be a single datatype.')\n", (2340, 2395), True, 'import ibis.common.exceptions as com\n')] |
import inspect
from ariadne import make_executable_schema, QueryType, MutationType, SubscriptionType
from .resolver import *
#
# Schema
#
class GrammarError(Exception):
pass
keywords = ['query', 'mutation', 'subscription', 'source']
class SchemaMetaDict(dict):
'''
Dictionary that allows decorated schema entry functions to be overloaded
'''
def __setitem__(self, key, value):
if key in self and callable(value) and hasattr(value, 'name'):
value.next_func = self[key]
if not hasattr(value.next_func, 'name'):
raise GrammarError(f'Redefinition of {key}. Perhaps an earlier {key} is missing @_')
super().__setitem__(key, value)
def __getitem__(self, key):
#if key not in self and key.isupper() and key[:1] != '_':
if key not in self and key.isupper() and not key[:1] in keywords:
return key.upper()
else:
return super().__getitem__(key)
def _query_decorator(name):
def decorate(func):
func.tag = 'query'
func.name = name
return func
return decorate
def _mutation_decorator(name):
def decorate(func):
func.tag = 'mutation'
func.name = name
return func
return decorate
def _subscription_decorator(name):
def decorate(func):
func.tag = 'subscription'
func.name = name
return func
return decorate
def _source_decorator(name):
def decorate(func):
func.tag = 'source'
func.name = name
return func
return decorate
class SchemaMeta(type):
@classmethod
def __prepare__(meta, *args, **kwargs):
d = SchemaMetaDict()
d['query'] = _query_decorator
d['mutation'] = _mutation_decorator
d['subscription'] = _subscription_decorator
d['source'] = _source_decorator
return d
def __new__(meta, selfname, bases, attributes):
#del attributes['_']
for key in keywords:
del attributes[key]
self = super().__new__(meta, selfname, bases, attributes)
self._build(list(attributes.items()))
return self
class Schema(metaclass=SchemaMeta):
def __init__(self, parent=None):
self.parent = parent
self.children = []
if parent:
parent.add_child(self)
self.db = parent.db
else:
self.db = self
self.entries = self.__class__.entries
@classmethod
def produce(self, parent=None):
schema = self(parent)
return schema
def add_child(self, schema):
self.children.append(schema)
def get_gql(self):
gql = [inspect.getdoc(self)]
for child in self.children:
gql.append(child.get_gql())
return "\n".join(gql)
def register(self):
for entry in self.entries:
entry.register(self)
for child in self.children:
child.register()
def add(self, r):
self.entries.append(r)
@classmethod
def __collect_functions(self, definitions):
'''
Collect all of the tagged grammar entries
'''
entries = [ (name, value) for name, value in definitions
if callable(value) and hasattr(value, 'name') ]
return entries
@classmethod
def _build(self, definitions):
if vars(self).get('_build', False):
return
# Collect all of the entry functions from the class definition
functions = self.__collect_functions(definitions)
self.entries = self.__build_entries(functions)
@classmethod
def __build_entries(self, functions):
entries = []
errors = ''
for name, func in functions:
entry = self._build_entry(func)
entries.append(entry)
return entries
@classmethod
def _build_entry(self, func):
tag = func.tag
name = func.name
prodname = func.__name__
unwrapped = inspect.unwrap(func)
filename = unwrapped.__code__.co_filename
lineno = unwrapped.__code__.co_firstlineno
logger.debug(f"_build_entry:tag: {tag}")
logger.debug(f"_build_entry:name: {name}")
logger.debug(f"_build_entry:prodname: {prodname}")
logger.debug(f"_build_entry:unwrapped: {unwrapped}")
#entry = Resolver(name, func, prodname=prodname, filename=filename, lineno=lineno)
entry = entry_factories[tag](self, name, func, prodname=prodname, filename=filename, lineno=lineno)
logger.debug(f"_build_entry:entry: {entry}")
return entry
# This is for testing or in case you don't want a database as the root schema
class RootSchema(Schema):
"""
type Query {
dummy: Int!
}
type Mutation {
setDummy(val: Int!): Int
}
type Subscription {
dummy: Int
}
"""
instance = None
def __init__(self, parent=None):
super().__init__(parent)
Schema.instance = self
self.query_type = QueryType()
self.mutation_type = MutationType()
self.subscription_type = SubscriptionType()
@classmethod
def produce(self):
if self.instance:
return self.instance
self.instance = schema = self()
return schema
def make_executable(self):
self.register()
#return make_executable_schema(type_defs, self.query)
return make_executable_schema(
self.get_gql(),
self.query_type,
self.mutation_type,
self.subscription_type
) | [
"ariadne.SubscriptionType",
"ariadne.QueryType",
"ariadne.MutationType",
"inspect.unwrap",
"inspect.getdoc"
]
| [((4025, 4045), 'inspect.unwrap', 'inspect.unwrap', (['func'], {}), '(func)\n', (4039, 4045), False, 'import inspect\n'), ((5069, 5080), 'ariadne.QueryType', 'QueryType', ([], {}), '()\n', (5078, 5080), False, 'from ariadne import make_executable_schema, QueryType, MutationType, SubscriptionType\n'), ((5110, 5124), 'ariadne.MutationType', 'MutationType', ([], {}), '()\n', (5122, 5124), False, 'from ariadne import make_executable_schema, QueryType, MutationType, SubscriptionType\n'), ((5158, 5176), 'ariadne.SubscriptionType', 'SubscriptionType', ([], {}), '()\n', (5174, 5176), False, 'from ariadne import make_executable_schema, QueryType, MutationType, SubscriptionType\n'), ((2697, 2717), 'inspect.getdoc', 'inspect.getdoc', (['self'], {}), '(self)\n', (2711, 2717), False, 'import inspect\n')] |
import random
from otp.ai.AIBase import *
from direct.distributed.ClockDelta import *
from toontown.battle.BattleBase import *
from toontown.battle.BattleCalculatorAI import *
from toontown.toonbase.ToontownBattleGlobals import *
from toontown.battle.SuitBattleGlobals import *
from pandac.PandaModules import *
from toontown.battle import BattleExperienceAI
from direct.distributed import DistributedObjectAI
from direct.fsm import ClassicFSM, State
from direct.fsm import State
from direct.task import Task
from direct.directnotify import DirectNotifyGlobal
from toontown.ai import DatabaseObject
from toontown.toon import DistributedToonAI
from toontown.toon import InventoryBase
from toontown.toonbase import ToontownGlobals
from toontown.toon import NPCToons
from otp.ai.MagicWordGlobal import *
from toontown.pets import DistributedPetProxyAI
class DistributedBattleBaseAI(DistributedObjectAI.DistributedObjectAI, BattleBase):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedBattleBaseAI')
def __init__(self, air, zoneId, finishCallback = None, maxSuits = 4, bossBattle = 0, tutorialFlag = 0, interactivePropTrackBonus = -1):
DistributedObjectAI.DistributedObjectAI.__init__(self, air)
self.serialNum = 0
self.zoneId = zoneId
self.maxSuits = maxSuits
self.setBossBattle(bossBattle)
self.tutorialFlag = tutorialFlag
self.interactivePropTrackBonus = interactivePropTrackBonus
self.finishCallback = finishCallback
self.avatarExitEvents = []
self.responses = {}
self.adjustingResponses = {}
self.joinResponses = {}
self.adjustingSuits = []
self.adjustingToons = []
self.numSuitsEver = 0
BattleBase.__init__(self)
self.streetBattle = 1
self.pos = Point3(0, 0, 0)
self.initialSuitPos = Point3(0, 0, 0)
self.toonExp = {}
self.toonOrigQuests = {}
self.toonItems = {}
self.toonOrigMerits = {}
self.toonMerits = {}
self.toonParts = {}
self.battleCalc = BattleCalculatorAI(self, tutorialFlag)
if self.air.suitInvasionManager.getInvading():
mult = getInvasionMultiplier()
self.battleCalc.setSkillCreditMultiplier(mult)
if self.air.holidayManager.isMoreXpHolidayRunning():
mult = getMoreXpHolidayMultiplier()
self.battleCalc.setSkillCreditMultiplier(mult)
self.fsm = None
self.clearAttacks()
self.ignoreFaceOffDone = 0
self.needAdjust = 0
self.movieHasBeenMade = 0
self.movieHasPlayed = 0
self.rewardHasPlayed = 0
self.movieRequested = 0
self.ignoreResponses = 0
self.ignoreAdjustingResponses = 0
self.taskNames = []
self.exitedToons = []
self.suitsKilled = []
self.suitsKilledThisBattle = []
self.suitsKilledPerFloor = []
self.suitsEncountered = []
self.newToons = []
self.newSuits = []
self.numNPCAttacks = 0
self.npcAttacks = {}
self.pets = {}
self.fireCount = 0
self.fsm = ClassicFSM.ClassicFSM('DistributedBattleAI', [State.State('FaceOff', self.enterFaceOff, self.exitFaceOff, ['WaitForInput', 'Resume']),
State.State('WaitForJoin', self.enterWaitForJoin, self.exitWaitForJoin, ['WaitForInput', 'Resume']),
State.State('WaitForInput', self.enterWaitForInput, self.exitWaitForInput, ['MakeMovie', 'Resume']),
State.State('MakeMovie', self.enterMakeMovie, self.exitMakeMovie, ['PlayMovie', 'Resume']),
State.State('PlayMovie', self.enterPlayMovie, self.exitPlayMovie, ['WaitForJoin', 'Reward', 'Resume']),
State.State('Reward', self.enterReward, self.exitReward, ['Resume']),
State.State('Resume', self.enterResume, self.exitResume, []),
State.State('Off', self.enterOff, self.exitOff, ['FaceOff', 'WaitForJoin'])], 'Off', 'Off')
self.joinableFsm = ClassicFSM.ClassicFSM('Joinable', [State.State('Joinable', self.enterJoinable, self.exitJoinable, ['Unjoinable']), State.State('Unjoinable', self.enterUnjoinable, self.exitUnjoinable, ['Joinable'])], 'Unjoinable', 'Unjoinable')
self.joinableFsm.enterInitialState()
self.runableFsm = ClassicFSM.ClassicFSM('Runable', [State.State('Runable', self.enterRunable, self.exitRunable, ['Unrunable']), State.State('Unrunable', self.enterUnrunable, self.exitUnrunable, ['Runable'])], 'Unrunable', 'Unrunable')
self.runableFsm.enterInitialState()
self.adjustFsm = ClassicFSM.ClassicFSM('Adjust', [State.State('Adjusting', self.enterAdjusting, self.exitAdjusting, ['NotAdjusting', 'Adjusting']), State.State('NotAdjusting', self.enterNotAdjusting, self.exitNotAdjusting, ['Adjusting'])], 'NotAdjusting', 'NotAdjusting')
self.adjustFsm.enterInitialState()
self.fsm.enterInitialState()
self.startTime = globalClock.getRealTime()
self.adjustingTimer = Timer()
def clearAttacks(self):
self.toonAttacks = {}
self.suitAttacks = getDefaultSuitAttacks()
def requestDelete(self):
if hasattr(self, 'fsm'):
self.fsm.request('Off')
self.__removeTaskName(self.uniqueName('make-movie'))
DistributedObjectAI.DistributedObjectAI.requestDelete(self)
def delete(self):
self.notify.debug('deleting battle')
self.fsm.request('Off')
self.ignoreAll()
self.__removeAllTasks()
del self.fsm
del self.joinableFsm
del self.runableFsm
del self.adjustFsm
self.__cleanupJoinResponses()
self.timer.stop()
del self.timer
self.adjustingTimer.stop()
del self.adjustingTimer
self.battleCalc.cleanup()
del self.battleCalc
for suit in self.suits:
del suit.battleTrap
del self.finishCallback
for petProxy in self.pets.values():
petProxy.requestDelete()
DistributedObjectAI.DistributedObjectAI.delete(self)
def pause(self):
self.timer.stop()
self.adjustingTimer.stop()
def unpause(self):
self.timer.resume()
self.adjustingTimer.resume()
def abortBattle(self):
self.notify.debug('%s.abortBattle() called.' % self.doId)
toonsCopy = self.toons[:]
for toonId in toonsCopy:
self.__removeToon(toonId)
if self.fsm.getCurrentState().getName() == 'PlayMovie' or self.fsm.getCurrentState().getName() == 'MakeMovie':
self.exitedToons.append(toonId)
self.d_setMembers()
self.b_setState('Resume')
self.__removeAllTasks()
self.timer.stop()
self.adjustingTimer.stop()
def __removeSuit(self, suit):
self.notify.debug('__removeSuit(%d)' % suit.doId)
self.suits.remove(suit)
self.activeSuits.remove(suit)
if self.luredSuits.count(suit) == 1:
self.luredSuits.remove(suit)
self.suitGone = 1
del suit.battleTrap
def findSuit(self, id):
for s in self.suits:
if s.doId == id:
return s
return None
def __removeTaskName(self, name):
if self.taskNames.count(name):
self.taskNames.remove(name)
self.notify.debug('removeTaskName() - %s' % name)
taskMgr.remove(name)
def __removeAllTasks(self):
for n in self.taskNames:
self.notify.debug('removeAllTasks() - %s' % n)
taskMgr.remove(n)
self.taskNames = []
def __removeToonTasks(self, toonId):
name = self.taskName('running-toon-%d' % toonId)
self.__removeTaskName(name)
name = self.taskName('to-pending-av-%d' % toonId)
self.__removeTaskName(name)
def getLevelDoId(self):
return 0
def getBattleCellId(self):
return 0
def getPosition(self):
self.notify.debug('getPosition() - %s' % self.pos)
return [self.pos[0], self.pos[1], self.pos[2]]
def getInitialSuitPos(self):
p = []
p.append(self.initialSuitPos[0])
p.append(self.initialSuitPos[1])
p.append(self.initialSuitPos[2])
return p
def setBossBattle(self, bossBattle):
self.bossBattle = bossBattle
def getBossBattle(self):
return self.bossBattle
def b_setState(self, state):
self.notify.debug('network:setState(%s)' % state)
stime = globalClock.getRealTime() + SERVER_BUFFER_TIME
self.sendUpdate('setState', [state, globalClockDelta.localToNetworkTime(stime)])
self.setState(state)
def setState(self, state):
self.fsm.request(state)
def getState(self):
return [self.fsm.getCurrentState().getName(), globalClockDelta.getRealNetworkTime()]
def d_setMembers(self):
self.notify.debug('network:setMembers()')
self.sendUpdate('setMembers', self.getMembers())
def getMembers(self):
suits = []
for s in self.suits:
suits.append(s.doId)
joiningSuits = ''
for s in self.joiningSuits:
joiningSuits += str(suits.index(s.doId))
pendingSuits = ''
for s in self.pendingSuits:
pendingSuits += str(suits.index(s.doId))
activeSuits = ''
for s in self.activeSuits:
activeSuits += str(suits.index(s.doId))
luredSuits = ''
for s in self.luredSuits:
luredSuits += str(suits.index(s.doId))
suitTraps = ''
for s in self.suits:
if s.battleTrap == NO_TRAP:
suitTraps += '9'
elif s.battleTrap == BattleCalculatorAI.TRAP_CONFLICT:
suitTraps += '9'
else:
suitTraps += str(s.battleTrap)
toons = []
for t in self.toons:
toons.append(t)
joiningToons = ''
for t in self.joiningToons:
joiningToons += str(toons.index(t))
pendingToons = ''
for t in self.pendingToons:
pendingToons += str(toons.index(t))
activeToons = ''
for t in self.activeToons:
activeToons += str(toons.index(t))
runningToons = ''
for t in self.runningToons:
runningToons += str(toons.index(t))
self.notify.debug('getMembers() - suits: %s joiningSuits: %s pendingSuits: %s activeSuits: %s luredSuits: %s suitTraps: %s toons: %s joiningToons: %s pendingToons: %s activeToons: %s runningToons: %s' % (suits,
joiningSuits,
pendingSuits,
activeSuits,
luredSuits,
suitTraps,
toons,
joiningToons,
pendingToons,
activeToons,
runningToons))
return [suits,
joiningSuits,
pendingSuits,
activeSuits,
luredSuits,
suitTraps,
toons,
joiningToons,
pendingToons,
activeToons,
runningToons,
globalClockDelta.getRealNetworkTime()]
def d_adjust(self):
self.notify.debug('network:adjust()')
self.sendUpdate('adjust', [globalClockDelta.getRealNetworkTime()])
def getInteractivePropTrackBonus(self):
return self.interactivePropTrackBonus
def getZoneId(self):
return self.zoneId
def getTaskZoneId(self):
return self.zoneId
def d_setMovie(self):
self.notify.debug('network:setMovie()')
self.sendUpdate('setMovie', self.getMovie())
self.__updateEncounteredCogs()
def getMovie(self):
suitIds = []
for s in self.activeSuits:
suitIds.append(s.doId)
p = [self.movieHasBeenMade]
p.append(self.activeToons)
p.append(suitIds)
for t in self.activeToons:
if t in self.toonAttacks:
ta = self.toonAttacks[t]
index = -1
id = ta[TOON_ID_COL]
if id != -1:
index = self.activeToons.index(id)
track = ta[TOON_TRACK_COL]
if (track == NO_ATTACK or attackAffectsGroup(track, ta[TOON_LVL_COL])) and track != NPCSOS and track != PETSOS:
target = -1
if track == HEAL:
if ta[TOON_LVL_COL] == 1:
ta[TOON_HPBONUS_COL] = random.randint(0, 10000)
elif track == SOS or track == NPCSOS or track == PETSOS:
target = ta[TOON_TGT_COL]
elif track == HEAL:
if self.activeToons.count(ta[TOON_TGT_COL]) != 0:
target = self.activeToons.index(ta[TOON_TGT_COL])
else:
target = -1
elif suitIds.count(ta[TOON_TGT_COL]) != 0:
target = suitIds.index(ta[TOON_TGT_COL])
else:
target = -1
p = p + [index,
track,
ta[TOON_LVL_COL],
target]
p = p + ta[4:]
else:
index = self.activeToons.index(t)
attack = getToonAttack(index)
p = p + attack
for i in range(4 - len(self.activeToons)):
p = p + getToonAttack(-1)
for sa in self.suitAttacks:
index = -1
id = sa[SUIT_ID_COL]
if id != -1:
index = suitIds.index(id)
if sa[SUIT_ATK_COL] == -1:
targetIndex = -1
else:
targetIndex = sa[SUIT_TGT_COL]
if targetIndex == -1:
self.notify.debug('suit attack: %d must be group' % sa[SUIT_ATK_COL])
else:
toonId = self.activeToons[targetIndex]
p = p + [index, sa[SUIT_ATK_COL], targetIndex]
sa[SUIT_TAUNT_COL] = 0
if sa[SUIT_ATK_COL] != -1:
suit = self.findSuit(id)
sa[SUIT_TAUNT_COL] = getAttackTauntIndexFromIndex(suit, sa[SUIT_ATK_COL])
p = p + sa[3:]
return p
def d_setChosenToonAttacks(self):
self.notify.debug('network:setChosenToonAttacks()')
self.sendUpdate('setChosenToonAttacks', self.getChosenToonAttacks())
def getChosenToonAttacks(self):
ids = []
tracks = []
levels = []
targets = []
for t in self.activeToons:
if t in self.toonAttacks:
ta = self.toonAttacks[t]
else:
ta = getToonAttack(t)
ids.append(t)
tracks.append(ta[TOON_TRACK_COL])
levels.append(ta[TOON_LVL_COL])
targets.append(ta[TOON_TGT_COL])
return [ids,
tracks,
levels,
targets]
def d_setBattleExperience(self):
self.notify.debug('network:setBattleExperience()')
self.sendUpdate('setBattleExperience', self.getBattleExperience())
def getBattleExperience(self):
returnValue = BattleExperienceAI.getBattleExperience(4, self.activeToons, self.toonExp, self.battleCalc.toonSkillPtsGained, self.toonOrigQuests, self.toonItems, self.toonOrigMerits, self.toonMerits, self.toonParts, self.suitsKilled, self.helpfulToons)
return returnValue
def getToonUberStatus(self):
fieldList = []
uberIndex = LAST_REGULAR_GAG_LEVEL + 1
for toon in self.activeToons:
toonList = []
for trackIndex in range(MAX_TRACK_INDEX):
toonList.append(toon.inventory.numItem(track, uberIndex))
fieldList.append(encodeUber(toonList))
return fieldList
def addSuit(self, suit):
self.notify.debug('addSuit(%d)' % suit.doId)
self.newSuits.append(suit)
self.suits.append(suit)
suit.battleTrap = NO_TRAP
self.numSuitsEver += 1
def __joinSuit(self, suit):
self.joiningSuits.append(suit)
toPendingTime = MAX_JOIN_T + SERVER_BUFFER_TIME
taskName = self.taskName('to-pending-av-%d' % suit.doId)
self.__addJoinResponse(suit.doId, taskName)
self.taskNames.append(taskName)
taskMgr.doMethodLater(toPendingTime, self.__serverJoinDone, taskName, extraArgs=(suit.doId, taskName))
def __serverJoinDone(self, avId, taskName):
self.notify.debug('join for av: %d timed out on server' % avId)
self.__removeTaskName(taskName)
self.__makeAvPending(avId)
return Task.done
def __makeAvPending(self, avId):
self.notify.debug('__makeAvPending(%d)' % avId)
self.__removeJoinResponse(avId)
self.__removeTaskName(self.taskName('to-pending-av-%d' % avId))
if self.toons.count(avId) > 0:
self.joiningToons.remove(avId)
self.pendingToons.append(avId)
else:
suit = self.findSuit(avId)
if suit != None:
if not suit.isEmpty():
if not self.joiningSuits.count(suit) == 1:
self.notify.warning('__makeAvPending(%d) in zone: %d' % (avId, self.zoneId))
self.notify.warning('toons: %s' % self.toons)
self.notify.warning('joining toons: %s' % self.joiningToons)
self.notify.warning('pending toons: %s' % self.pendingToons)
self.notify.warning('suits: %s' % self.suits)
self.notify.warning('joining suits: %s' % self.joiningSuits)
self.notify.warning('pending suits: %s' % self.pendingSuits)
self.joiningSuits.remove(suit)
self.pendingSuits.append(suit)
else:
self.notify.warning('makeAvPending() %d not in toons or suits' % avId)
return
self.d_setMembers()
self.needAdjust = 1
self.__requestAdjust()
def suitRequestJoin(self, suit):
self.notify.debug('suitRequestJoin(%d)' % suit.getDoId())
if self.suitCanJoin():
self.addSuit(suit)
self.__joinSuit(suit)
self.d_setMembers()
suit.prepareToJoinBattle()
return 1
else:
self.notify.warning('suitRequestJoin() - not joinable - joinable state: %s max suits: %d' % (self.joinableFsm.getCurrentState().getName(), self.maxSuits))
return 0
def addToon(self, avId):
self.notify.debug('addToon(%d)' % avId)
toon = self.getToon(avId)
if toon == None:
return 0
toon.stopToonUp()
event = simbase.air.getAvatarExitEvent(avId)
self.avatarExitEvents.append(event)
self.accept(event, self.__handleUnexpectedExit, extraArgs=[avId])
event = 'inSafezone-%s' % avId
self.avatarExitEvents.append(event)
self.accept(event, self.__handleSuddenExit, extraArgs=[avId, 0])
self.newToons.append(avId)
self.toons.append(avId)
toon = simbase.air.doId2do.get(avId)
if toon:
if hasattr(self, 'doId'):
toon.b_setBattleId(self.doId)
else:
toon.b_setBattleId(-1)
messageToonAdded = 'Battle adding toon %s' % avId
messenger.send(messageToonAdded, [avId])
if self.fsm != None and self.fsm.getCurrentState().getName() == 'PlayMovie':
self.responses[avId] = 1
else:
self.responses[avId] = 0
self.adjustingResponses[avId] = 0
if avId not in self.toonExp:
p = []
for t in Tracks:
p.append(toon.experience.getExp(t))
self.toonExp[avId] = p
if avId not in self.toonOrigMerits:
self.toonOrigMerits[avId] = toon.cogMerits[:]
if avId not in self.toonMerits:
self.toonMerits[avId] = [0,
0,
0,
0,
0]
if avId not in self.toonOrigQuests:
flattenedQuests = []
for quest in toon.quests:
flattenedQuests.extend(quest)
self.toonOrigQuests[avId] = flattenedQuests
if avId not in self.toonItems:
self.toonItems[avId] = ([], [])
return 1
def __joinToon(self, avId, pos):
self.joiningToons.append(avId)
toPendingTime = MAX_JOIN_T + SERVER_BUFFER_TIME
taskName = self.taskName('to-pending-av-%d' % avId)
self.__addJoinResponse(avId, taskName, toon=1)
taskMgr.doMethodLater(toPendingTime, self.__serverJoinDone, taskName, extraArgs=(avId, taskName))
self.taskNames.append(taskName)
def __updateEncounteredCogs(self):
for toon in self.activeToons:
if toon in self.newToons:
for suit in self.activeSuits:
if hasattr(suit, 'dna'):
self.suitsEncountered.append({'type': suit.dna.name,
'activeToons': self.activeToons[:]})
else:
self.notify.warning('Suit has no DNA in zone %s: toons involved = %s' % (self.zoneId, self.activeToons))
return
self.newToons.remove(toon)
for suit in self.activeSuits:
if suit in self.newSuits:
if hasattr(suit, 'dna'):
self.suitsEncountered.append({'type': suit.dna.name,
'activeToons': self.activeToons[:]})
else:
self.notify.warning('Suit has no DNA in zone %s: toons involved = %s' % (self.zoneId, self.activeToons))
return
self.newSuits.remove(suit)
def __makeToonRun(self, toonId, updateAttacks):
self.activeToons.remove(toonId)
self.toonGone = 1
self.runningToons.append(toonId)
taskName = self.taskName('running-toon-%d' % toonId)
taskMgr.doMethodLater(TOON_RUN_T, self.__serverRunDone, taskName, extraArgs=(toonId, updateAttacks, taskName))
self.taskNames.append(taskName)
def __serverRunDone(self, toonId, updateAttacks, taskName):
self.notify.debug('run for toon: %d timed out on server' % toonId)
self.__removeTaskName(taskName)
self.__removeToon(toonId)
self.d_setMembers()
if len(self.toons) == 0:
self.notify.debug('last toon is gone - battle is finished')
self.b_setState('Resume')
else:
if updateAttacks == 1:
self.d_setChosenToonAttacks()
self.needAdjust = 1
self.__requestAdjust()
return Task.done
def __requestAdjust(self):
if not self.fsm:
return
cstate = self.fsm.getCurrentState().getName()
if cstate == 'WaitForInput' or cstate == 'WaitForJoin':
if self.adjustFsm.getCurrentState().getName() == 'NotAdjusting':
if self.needAdjust == 1:
self.d_adjust()
self.adjustingSuits = []
for s in self.pendingSuits:
self.adjustingSuits.append(s)
self.adjustingToons = []
for t in self.pendingToons:
self.adjustingToons.append(t)
self.adjustFsm.request('Adjusting')
else:
self.notify.debug('requestAdjust() - dont need to')
else:
self.notify.debug('requestAdjust() - already adjusting')
else:
self.notify.debug('requestAdjust() - in state: %s' % cstate)
def __handleUnexpectedExit(self, avId):
#TODO: fixme
#disconnectCode = self.air.getAvatarDisconnectReason(avId)
disconnectCode = "placeHolder dc code, need self.air.getAvatarDisconnectReason(avId)"
self.notify.warning('toon: %d exited unexpectedly, reason %s' % (avId, disconnectCode))
#userAborted = disconnectCode == ToontownGlobals.DisconnectCloseWindow
#TODO: fixme
userAborted = False
self.__handleSuddenExit(avId, userAborted)
def __handleSuddenExit(self, avId, userAborted):
self.__removeToon(avId, userAborted=userAborted)
if self.fsm.getCurrentState().getName() == 'PlayMovie' or self.fsm.getCurrentState().getName() == 'MakeMovie':
self.exitedToons.append(avId)
self.d_setMembers()
if len(self.toons) == 0:
self.notify.debug('last toon is gone - battle is finished')
self.__removeAllTasks()
self.timer.stop()
self.adjustingTimer.stop()
self.b_setState('Resume')
else:
self.needAdjust = 1
self.__requestAdjust()
def __removeSuit(self, suit):
self.notify.debug('__removeSuit(%d)' % suit.doId)
self.suits.remove(suit)
self.activeSuits.remove(suit)
if self.luredSuits.count(suit) == 1:
self.luredSuits.remove(suit)
self.suitGone = 1
del suit.battleTrap
def __removeToon(self, toonId, userAborted = 0):
self.notify.debug('__removeToon(%d)' % toonId)
if self.toons.count(toonId) == 0:
return
self.battleCalc.toonLeftBattle(toonId)
self.__removeToonTasks(toonId)
self.toons.remove(toonId)
if self.joiningToons.count(toonId) == 1:
self.joiningToons.remove(toonId)
if self.pendingToons.count(toonId) == 1:
self.pendingToons.remove(toonId)
if self.activeToons.count(toonId) == 1:
activeToonIdx = self.activeToons.index(toonId)
self.notify.debug('removing activeToons[%d], updating suitAttacks SUIT_HP_COL to match' % activeToonIdx)
for i in range(len(self.suitAttacks)):
if activeToonIdx < len(self.suitAttacks[i][SUIT_HP_COL]):
del self.suitAttacks[i][SUIT_HP_COL][activeToonIdx]
else:
self.notify.warning("suitAttacks %d doesn't have an HP column for active toon index %d" % (i, activeToonIdx))
self.activeToons.remove(toonId)
if self.runningToons.count(toonId) == 1:
self.runningToons.remove(toonId)
if self.adjustingToons.count(toonId) == 1:
self.notify.warning('removeToon() - toon: %d was adjusting!' % toonId)
self.adjustingToons.remove(toonId)
self.toonGone = 1
if toonId in self.pets:
self.pets[toonId].requestDelete()
del self.pets[toonId]
self.__removeResponse(toonId)
self.__removeAdjustingResponse(toonId)
self.__removeJoinResponses(toonId)
event = simbase.air.getAvatarExitEvent(toonId)
self.avatarExitEvents.remove(event)
self.ignore(event)
event = 'inSafezone-%s' % toonId
self.avatarExitEvents.remove(event)
self.ignore(event)
toon = simbase.air.doId2do.get(toonId)
if toon:
toon.b_setBattleId(0)
messageToonReleased = 'Battle releasing toon %s' % toon.doId
messenger.send(messageToonReleased, [toon.doId])
if not userAborted:
toon = self.getToon(toonId)
if toon != None:
toon.hpOwnedByBattle = 0
toon.d_setHp(toon.hp)
toon.d_setInventory(toon.inventory.makeNetString())
self.air.cogPageManager.toonEncounteredCogs(toon, self.suitsEncountered, self.getTaskZoneId())
elif len(self.suits) > 0 and not self.streetBattle:
self.notify.info('toon %d aborted non-street battle; clearing inventory and hp.' % toonId)
toon = DistributedToonAI.DistributedToonAI(self.air)
toon.doId = toonId
empty = InventoryBase.InventoryBase(toon)
toon.b_setInventory(empty.makeNetString())
toon.b_setHp(0)
db = DatabaseObject.DatabaseObject(self.air, toonId)
db.storeObject(toon, ['setInventory', 'setHp'])
self.notify.info('killing mem leak from temporary DistributedToonAI %d' % toonId)
toon.deleteDummy()
def getToon(self, toonId):
if toonId in self.air.doId2do:
return self.air.doId2do[toonId]
else:
self.notify.warning('getToon() - toon: %d not in repository!' % toonId)
return
def toonRequestRun(self):
toonId = self.air.getAvatarIdFromSender()
if self.ignoreResponses == 1:
self.notify.debug('ignoring response from toon: %d' % toonId)
return
self.notify.debug('toonRequestRun(%d)' % toonId)
if not self.isRunable():
self.notify.warning('toonRequestRun() - not runable')
return
updateAttacks = 0
if self.activeToons.count(toonId) == 0:
self.notify.warning('toon tried to run, but not found in activeToons: %d' % toonId)
return
for toon in self.activeToons:
if toon in self.toonAttacks:
ta = self.toonAttacks[toon]
track = ta[TOON_TRACK_COL]
level = ta[TOON_LVL_COL]
if ta[TOON_TGT_COL] == toonId or track == HEAL and attackAffectsGroup(track, level) and len(self.activeToons) <= 2:
healerId = ta[TOON_ID_COL]
self.notify.debug('resetting toon: %ds attack' % healerId)
self.toonAttacks[toon] = getToonAttack(toon, track=UN_ATTACK)
self.responses[healerId] = 0
updateAttacks = 1
self.__makeToonRun(toonId, updateAttacks)
self.d_setMembers()
self.needAdjust = 1
self.__requestAdjust()
def toonRequestJoin(self, x, y, z):
toonId = self.air.getAvatarIdFromSender()
self.notify.debug('toonRequestJoin(%d)' % toonId)
self.signupToon(toonId, x, y, z)
def toonDied(self):
toonId = self.air.getAvatarIdFromSender()
self.notify.debug('toonDied(%d)' % toonId)
if toonId in self.toons:
toon = self.getToon(toonId)
if toon:
toon.hp = -1
toon.inventory.zeroInv(1)
self.__handleSuddenExit(toonId, 0)
def signupToon(self, toonId, x, y, z):
if self.toons.count(toonId):
return
if self.toonCanJoin():
if self.addToon(toonId):
self.__joinToon(toonId, Point3(x, y, z))
self.d_setMembers()
else:
self.notify.warning('toonRequestJoin() - not joinable')
self.d_denyLocalToonJoin(toonId)
def d_denyLocalToonJoin(self, toonId):
self.notify.debug('network: denyLocalToonJoin(%d)' % toonId)
self.sendUpdateToAvatarId(toonId, 'denyLocalToonJoin', [])
def resetResponses(self):
self.responses = {}
for t in self.toons:
self.responses[t] = 0
self.ignoreResponses = 0
def allToonsResponded(self):
for t in self.toons:
if self.responses[t] == 0:
return 0
self.ignoreResponses = 1
return 1
def __allPendingActiveToonsResponded(self):
for t in self.pendingToons + self.activeToons:
if self.responses[t] == 0:
return 0
self.ignoreResponses = 1
return 1
def __allActiveToonsResponded(self):
for t in self.activeToons:
if self.responses[t] == 0:
return 0
self.ignoreResponses = 1
return 1
def __removeResponse(self, toonId):
del self.responses[toonId]
if self.ignoreResponses == 0 and len(self.toons) > 0:
currStateName = self.fsm.getCurrentState().getName()
if currStateName == 'WaitForInput':
if self.__allActiveToonsResponded():
self.notify.debug('removeResponse() - dont wait for movie')
self.__requestMovie()
elif currStateName == 'PlayMovie':
if self.__allPendingActiveToonsResponded():
self.notify.debug('removeResponse() - surprise movie done')
self.__movieDone()
elif currStateName == 'Reward' or currStateName == 'BuildingReward':
if self.__allActiveToonsResponded():
self.notify.debug('removeResponse() - surprise reward done')
self.handleRewardDone()
def __resetAdjustingResponses(self):
self.adjustingResponses = {}
for t in self.toons:
self.adjustingResponses[t] = 0
self.ignoreAdjustingResponses = 0
def __allAdjustingToonsResponded(self):
for t in self.toons:
if self.adjustingResponses[t] == 0:
return 0
self.ignoreAdjustingResponses = 1
return 1
def __removeAdjustingResponse(self, toonId):
if toonId in self.adjustingResponses:
del self.adjustingResponses[toonId]
if self.ignoreAdjustingResponses == 0 and len(self.toons) > 0:
if self.__allAdjustingToonsResponded():
self.__adjustDone()
def __addJoinResponse(self, avId, taskName, toon = 0):
if toon == 1:
for jr in self.joinResponses.values():
jr[avId] = 0
self.joinResponses[avId] = {}
for t in self.toons:
self.joinResponses[avId][t] = 0
self.joinResponses[avId]['taskName'] = taskName
def __removeJoinResponses(self, avId):
self.__removeJoinResponse(avId)
removedOne = 0
for j in self.joinResponses.values():
if avId in j:
del j[avId]
removedOne = 1
if removedOne == 1:
for t in self.joiningToons:
if self.__allToonsRespondedJoin(t):
self.__makeAvPending(t)
def __removeJoinResponse(self, avId):
if avId in self.joinResponses:
taskMgr.remove(self.joinResponses[avId]['taskName'])
del self.joinResponses[avId]
def __allToonsRespondedJoin(self, avId):
jr = self.joinResponses[avId]
for t in self.toons:
if jr[t] == 0:
return 0
return 1
def __cleanupJoinResponses(self):
for jr in self.joinResponses.values():
taskMgr.remove(jr['taskName'])
del jr
def adjustDone(self):
toonId = self.air.getAvatarIdFromSender()
if self.ignoreAdjustingResponses == 1:
self.notify.debug('adjustDone() - ignoring toon: %d' % toonId)
return
elif self.adjustFsm.getCurrentState().getName() != 'Adjusting':
self.notify.warning('adjustDone() - in state %s' % self.fsm.getCurrentState().getName())
return
elif self.toons.count(toonId) == 0:
self.notify.warning('adjustDone() - toon: %d not in toon list' % toonId)
return
self.adjustingResponses[toonId] += 1
self.notify.debug('toon: %d done adjusting' % toonId)
if self.__allAdjustingToonsResponded():
self.__adjustDone()
def timeout(self):
toonId = self.air.getAvatarIdFromSender()
if self.ignoreResponses == 1:
self.notify.debug('timeout() - ignoring toon: %d' % toonId)
return
elif self.fsm.getCurrentState().getName() != 'WaitForInput':
self.notify.warning('timeout() - in state: %s' % self.fsm.getCurrentState().getName())
return
elif self.toons.count(toonId) == 0:
self.notify.warning('timeout() - toon: %d not in toon list' % toonId)
return
self.toonAttacks[toonId] = getToonAttack(toonId)
self.d_setChosenToonAttacks()
self.responses[toonId] += 1
self.notify.debug('toon: %d timed out' % toonId)
if self.__allActiveToonsResponded():
self.__requestMovie(timeout=1)
def movieDone(self):
toonId = self.air.getAvatarIdFromSender()
if self.ignoreResponses == 1:
self.notify.debug('movieDone() - ignoring toon: %d' % toonId)
return
elif self.fsm.getCurrentState().getName() != 'PlayMovie':
self.notify.warning('movieDone() - in state %s' % self.fsm.getCurrentState().getName())
return
elif self.toons.count(toonId) == 0:
self.notify.warning('movieDone() - toon: %d not in toon list' % toonId)
return
self.responses[toonId] += 1
self.notify.debug('toon: %d done with movie' % toonId)
if self.__allPendingActiveToonsResponded():
self.__movieDone()
else:
self.timer.stop()
self.timer.startCallback(TIMEOUT_PER_USER, self.__serverMovieDone)
def rewardDone(self):
toonId = self.air.getAvatarIdFromSender()
stateName = self.fsm.getCurrentState().getName()
if self.ignoreResponses == 1:
self.notify.debug('rewardDone() - ignoring toon: %d' % toonId)
return
elif stateName not in ('Reward', 'BuildingReward', 'FactoryReward', 'MintReward', 'StageReward', 'CountryClubReward'):
self.notify.warning('rewardDone() - in state %s' % stateName)
return
elif self.toons.count(toonId) == 0:
self.notify.warning('rewardDone() - toon: %d not in toon list' % toonId)
return
self.responses[toonId] += 1
self.notify.debug('toon: %d done with reward' % toonId)
if self.__allActiveToonsResponded():
self.handleRewardDone()
else:
self.timer.stop()
self.timer.startCallback(TIMEOUT_PER_USER, self.serverRewardDone)
def assignRewards(self):
if self.rewardHasPlayed == 1:
self.notify.debug('handleRewardDone() - reward has already played')
return
self.rewardHasPlayed = 1
BattleExperienceAI.assignRewards(self.activeToons, self.battleCalc.toonSkillPtsGained, self.suitsKilled, self.getTaskZoneId(), self.helpfulToons)
def joinDone(self, avId):
toonId = self.air.getAvatarIdFromSender()
if self.toons.count(toonId) == 0:
self.notify.warning('joinDone() - toon: %d not in toon list' % toonId)
return
if avId not in self.joinResponses:
self.notify.debug('joinDone() - no entry for: %d - ignoring: %d' % (avId, toonId))
return
jr = self.joinResponses[avId]
if toonId in jr:
jr[toonId] += 1
self.notify.debug('client with localToon: %d done joining av: %d' % (toonId, avId))
if self.__allToonsRespondedJoin(avId):
self.__makeAvPending(avId)
def requestAttack(self, track, level, av):
toonId = self.air.getAvatarIdFromSender()
if self.ignoreResponses == 1:
self.notify.debug('requestAttack() - ignoring toon: %d' % toonId)
return
elif self.fsm.getCurrentState().getName() != 'WaitForInput':
self.notify.warning('requestAttack() - in state: %s' % self.fsm.getCurrentState().getName())
return
elif self.activeToons.count(toonId) == 0:
self.notify.warning('requestAttack() - toon: %d not in toon list' % toonId)
return
self.notify.debug('requestAttack(%d, %d, %d, %d)' % (toonId,
track,
level,
av))
toon = self.getToon(toonId)
if toon == None:
self.notify.warning('requestAttack() - no toon: %d' % toonId)
return
validResponse = 1
if track == SOS:
self.notify.debug('toon: %d calls for help' % toonId)
self.air.writeServerEvent('friendSOS', toonId, '%s' % av)
self.toonAttacks[toonId] = getToonAttack(toonId, track=SOS, target=av)
elif track == NPCSOS:
self.notify.debug('toon: %d calls for help' % toonId)
self.air.writeServerEvent('NPCSOS', toonId, '%s' % av)
toon = self.getToon(toonId)
if toon == None:
return
if av in toon.NPCFriendsDict:
npcCollision = 0
if av in self.npcAttacks:
callingToon = self.npcAttacks[av]
if self.activeToons.count(callingToon) == 1:
self.toonAttacks[toonId] = getToonAttack(toonId, track=PASS)
npcCollision = 1
if npcCollision == 0:
self.toonAttacks[toonId] = getToonAttack(toonId, track=NPCSOS, level=5, target=av)
self.numNPCAttacks += 1
self.npcAttacks[av] = toonId
elif track == PETSOS:
self.notify.debug('toon: %d calls for pet: %d' % (toonId, av))
self.air.writeServerEvent('PETSOS', toonId, '%s' % av)
toon = self.getToon(toonId)
if toon == None:
return
if not self.validate(toonId, level in toon.petTrickPhrases, 'requestAttack: invalid pet trickId: %s' % level):
return
self.toonAttacks[toonId] = getToonAttack(toonId, track=PETSOS, level=level, target=av)
elif track == UN_ATTACK:
self.notify.debug('toon: %d changed its mind' % toonId)
self.toonAttacks[toonId] = getToonAttack(toonId, track=UN_ATTACK)
if toonId in self.responses:
self.responses[toonId] = 0
validResponse = 0
elif track == PASS:
self.toonAttacks[toonId] = getToonAttack(toonId, track=PASS)
elif track == FIRE:
if simbase.air.doId2do[toonId].getPinkSlips() < self.getFireCount() + 1:
#Not allowed to fire, force them to pass >:D
self.toonAttacks[toonId] = getToonAttack(toonId, track=PASS)
else:
#Allowed to fire
self.setFireCount(self.fireCount + 1)
self.toonAttacks[toonId] = getToonAttack(toonId, track=FIRE, target=av)
else:
if not self.validate(toonId, track >= 0 and track <= MAX_TRACK_INDEX, 'requestAttack: invalid track %s' % track):
return
if not self.validate(toonId, level >= 0 and level <= MAX_LEVEL_INDEX, 'requestAttack: invalid level %s' % level):
return
if toon.inventory.numItem(track, level) == 0:
self.notify.warning('requestAttack() - toon has no item track: %d level: %d' % (track, level))
self.toonAttacks[toonId] = getToonAttack(toonId)
return
if track == HEAL:
if self.runningToons.count(av) == 1 or attackAffectsGroup(track, level) and len(self.activeToons) < 2:
self.toonAttacks[toonId] = getToonAttack(toonId, track=UN_ATTACK)
validResponse = 0
else:
self.toonAttacks[toonId] = getToonAttack(toonId, track=track, level=level, target=av)
else:
self.toonAttacks[toonId] = getToonAttack(toonId, track=track, level=level, target=av)
if av == -1 and not attackAffectsGroup(track, level):
validResponse = 0
self.d_setChosenToonAttacks()
if validResponse == 1:
self.responses[toonId] += 1
self.notify.debug('toon: %d chose an attack' % toonId)
if self.__allActiveToonsResponded():
self.__requestMovie()
def requestPetProxy(self, av):
toonId = self.air.getAvatarIdFromSender()
if self.ignoreResponses == 1:
self.notify.debug('requestPetProxy() - ignoring toon: %d' % toonId)
return
elif self.fsm.getCurrentState().getName() != 'WaitForInput':
self.notify.warning('requestPetProxy() - in state: %s' % self.fsm.getCurrentState().getName())
return
elif self.activeToons.count(toonId) == 0:
self.notify.warning('requestPetProxy() - toon: %d not in toon list' % toonId)
return
self.notify.debug('requestPetProxy(%s, %s)' % (toonId, av))
toon = self.getToon(toonId)
if toon == None:
self.notify.warning('requestPetProxy() - no toon: %d' % toonId)
return
petId = toon.getPetId()
zoneId = self.zoneId
if petId == av:
if not toonId in self.pets:
def handleGetPetProxy(success, pet, petId = petId, zoneId = zoneId, toonId = toonId):
if success:
petProxy = DistributedPetProxyAI.DistributedPetProxyAI(self.air)
petProxy.setOwnerId(pet.getOwnerId())
petProxy.setPetName(pet.getPetName())
petProxy.setTraitSeed(pet.getTraitSeed())
petProxy.setSafeZone(pet.getSafeZone())
petProxy.setForgetfulness(pet.getForgetfulness())
petProxy.setBoredomThreshold(pet.getBoredomThreshold())
petProxy.setRestlessnessThreshold(pet.getRestlessnessThreshold())
petProxy.setPlayfulnessThreshold(pet.getPlayfulnessThreshold())
petProxy.setLonelinessThreshold(pet.getLonelinessThreshold())
petProxy.setSadnessThreshold(pet.getSadnessThreshold())
petProxy.setFatigueThreshold(pet.getFatigueThreshold())
petProxy.setHungerThreshold(pet.getHungerThreshold())
petProxy.setConfusionThreshold(pet.getConfusionThreshold())
petProxy.setExcitementThreshold(pet.getExcitementThreshold())
petProxy.setAngerThreshold(pet.getAngerThreshold())
petProxy.setSurpriseThreshold(pet.getSurpriseThreshold())
petProxy.setAffectionThreshold(pet.getAffectionThreshold())
petProxy.setHead(pet.getHead())
petProxy.setEars(pet.getEars())
petProxy.setNose(pet.getNose())
petProxy.setTail(pet.getTail())
petProxy.setBodyTexture(pet.getBodyTexture())
petProxy.setColor(pet.getColor())
petProxy.setColorScale(pet.getColorScale())
petProxy.setEyeColor(pet.getEyeColor())
petProxy.setGender(pet.getGender())
petProxy.setLastSeenTimestamp(pet.getLastSeenTimestamp())
petProxy.setBoredom(pet.getBoredom())
petProxy.setRestlessness(pet.getRestlessness())
petProxy.setPlayfulness(pet.getPlayfulness())
petProxy.setLoneliness(pet.getLoneliness())
petProxy.setSadness(pet.getSadness())
petProxy.setAffection(pet.getAffection())
petProxy.setHunger(pet.getHunger())
petProxy.setConfusion(pet.getConfusion())
petProxy.setExcitement(pet.getExcitement())
petProxy.setFatigue(pet.getFatigue())
petProxy.setAnger(pet.getAnger())
petProxy.setSurprise(pet.getSurprise())
petProxy.setTrickAptitudes(pet.getTrickAptitudes())
pet.requestDelete()
def deleted(task):
petProxy.doNotDeallocateChannel = True
petProxy.generateWithRequiredAndId(petId, self.air.districtId, self.zoneId)
petProxy.broadcastDominantMood()
self.pets[toonId] = petProxy
return task.done
self.acceptOnce(self.air.getAvatarExitEvent(petId),
lambda: taskMgr.doMethodLater(0,
deleted, self.uniqueName('petdel-%d' % petId)))
else:
self.notify.warning('error generating petProxy: %s' % petId)
self.getPetProxyObject(petId, handleGetPetProxy)
def suitCanJoin(self):
return len(self.suits) < self.maxSuits and self.isJoinable()
def toonCanJoin(self):
return len(self.toons) < 4 and self.isJoinable()
def __requestMovie(self, timeout = 0):
if self.adjustFsm.getCurrentState().getName() == 'Adjusting':
self.notify.debug('__requestMovie() - in Adjusting')
self.movieRequested = 1
else:
movieDelay = 0
if len(self.activeToons) == 0:
self.notify.warning('only pending toons left in battle %s, toons = %s' % (self.doId, self.toons))
elif len(self.activeSuits) == 0:
self.notify.warning('only pending suits left in battle %s, suits = %s' % (self.doId, self.suits))
elif len(self.activeToons) > 1 and not timeout:
movieDelay = 1
self.fsm.request('MakeMovie')
if movieDelay:
taskMgr.doMethodLater(0.8, self.__makeMovie, self.uniqueName('make-movie'))
self.taskNames.append(self.uniqueName('make-movie'))
else:
self.__makeMovie()
def __makeMovie(self, task = None):
self.notify.debug('makeMovie()')
if self._DOAI_requestedDelete:
self.notify.warning('battle %s requested delete, then __makeMovie was called!' % self.doId)
if hasattr(self, 'levelDoId'):
self.notify.warning('battle %s in level %s' % (self.doId, self.levelDoId))
return
self.__removeTaskName(self.uniqueName('make-movie'))
if self.movieHasBeenMade == 1:
self.notify.debug('__makeMovie() - movie has already been made')
return
self.movieRequested = 0
self.movieHasBeenMade = 1
self.movieHasPlayed = 0
self.rewardHasPlayed = 0
for t in self.activeToons:
if t not in self.toonAttacks:
self.toonAttacks[t] = getToonAttack(t)
attack = self.toonAttacks[t]
if attack[TOON_TRACK_COL] == PASS or attack[TOON_TRACK_COL] == UN_ATTACK:
self.toonAttacks[t] = getToonAttack(t)
if self.toonAttacks[t][TOON_TRACK_COL] != NO_ATTACK:
self.addHelpfulToon(t)
self.battleCalc.calculateRound()
for t in self.activeToons:
self.sendEarnedExperience(t)
toon = self.getToon(t)
if toon != None:
toon.hpOwnedByBattle = 1
if toon.immortalMode:
toon.toonUp(toon.maxHp)
self.d_setMovie()
self.b_setState('PlayMovie')
return Task.done
def sendEarnedExperience(self, toonId):
toon = self.getToon(toonId)
if toon != None:
expList = self.battleCalc.toonSkillPtsGained.get(toonId, None)
if expList == None:
toon.d_setEarnedExperience([])
else:
roundList = []
for exp in expList:
roundList.append(int(exp + 0.5))
toon.d_setEarnedExperience(roundList)
def enterOff(self):
return
def exitOff(self):
return
def enterFaceOff(self):
return
def exitFaceOff(self):
return
def enterWaitForJoin(self):
self.notify.debug('enterWaitForJoin()')
if len(self.activeSuits) > 0:
self.b_setState('WaitForInput')
else:
self.notify.debug('enterWaitForJoin() - no active suits')
self.runableFsm.request('Runable')
self.resetResponses()
self.__requestAdjust()
def exitWaitForJoin(self):
pass
def enterWaitForInput(self):
self.notify.debug('enterWaitForInput()')
self.joinableFsm.request('Joinable')
self.runableFsm.request('Runable')
self.resetResponses()
self.__requestAdjust()
if not self.tutorialFlag:
self.timer.startCallback(SERVER_INPUT_TIMEOUT, self.__serverTimedOut)
self.npcAttacks = {}
for toonId in self.toons:
if bboard.get('autoRestock-%s' % toonId, False):
toon = self.air.doId2do.get(toonId)
if toon is not None:
toon.doRestock(0)
def exitWaitForInput(self):
self.npcAttacks = {}
self.timer.stop()
def __serverTimedOut(self):
self.notify.debug('wait for input timed out on server')
self.ignoreResponses = 1
self.__requestMovie(timeout=1)
def enterMakeMovie(self):
self.notify.debug('enterMakeMovie()')
self.runableFsm.request('Unrunable')
self.resetResponses()
def exitMakeMovie(self):
pass
def enterPlayMovie(self):
self.notify.debug('enterPlayMovie()')
self.joinableFsm.request('Joinable')
self.runableFsm.request('Unrunable')
self.resetResponses()
movieTime = TOON_ATTACK_TIME * (len(self.activeToons) + self.numNPCAttacks) + SUIT_ATTACK_TIME * len(self.activeSuits) + SERVER_BUFFER_TIME
self.numNPCAttacks = 0
self.notify.debug('estimated upper bound of movie time: %f' % movieTime)
self.timer.startCallback(movieTime, self.__serverMovieDone)
def __serverMovieDone(self):
self.notify.debug('movie timed out on server')
self.ignoreResponses = 1
self.__movieDone()
def serverRewardDone(self):
self.notify.debug('reward timed out on server')
self.ignoreResponses = 1
self.handleRewardDone()
def handleRewardDone(self):
self.b_setState('Resume')
def exitPlayMovie(self):
self.timer.stop()
def __movieDone(self):
self.notify.debug('__movieDone() - movie is finished')
if self.movieHasPlayed == 1:
self.notify.debug('__movieDone() - movie had already finished')
return
self.movieHasBeenMade = 0
self.movieHasPlayed = 1
self.ignoreResponses = 1
needUpdate = 0
toonHpDict = {}
for toon in self.activeToons:
toonHpDict[toon] = [0, 0, 0]
actualToon = self.getToon(toon)
self.notify.debug('BEFORE ROUND: toon: %d hp: %d' % (toon, actualToon.hp))
deadSuits = []
trapDict = {}
suitsLuredOntoTraps = []
npcTrapAttacks = []
for activeToon in self.activeToons + self.exitedToons:
if activeToon in self.toonAttacks:
attack = self.toonAttacks[activeToon]
track = attack[TOON_TRACK_COL]
npc_level = None
if track == NPCSOS:
track, npc_level, npc_hp = NPCToons.getNPCTrackLevelHp(attack[TOON_TGT_COL])
if track == None:
track = NPCSOS
elif track == TRAP:
npcTrapAttacks.append(attack)
toon = self.getToon(attack[TOON_ID_COL])
av = attack[TOON_TGT_COL]
if toon != None and av in toon.NPCFriendsDict:
toon.NPCFriendsDict[av] -= 1
if toon.NPCFriendsDict[av] <= 0:
del toon.NPCFriendsDict[av]
toon.d_setNPCFriendsDict(toon.NPCFriendsDict)
continue
if track != NO_ATTACK:
toonId = attack[TOON_ID_COL]
level = attack[TOON_LVL_COL]
if npc_level != None:
level = npc_level
if attack[TOON_TRACK_COL] == NPCSOS:
toon = self.getToon(toonId)
av = attack[TOON_TGT_COL]
if toon != None and av in toon.NPCFriendsDict:
toon.NPCFriendsDict[av] -= 1
if toon.NPCFriendsDict[av] <= 0:
del toon.NPCFriendsDict[av]
toon.d_setNPCFriendsDict(toon.NPCFriendsDict)
elif track == PETSOS:
pass
elif track == FIRE:
pass
elif track != SOS:
toon = self.getToon(toonId)
if toon != None:
check = toon.inventory.useItem(track, level)
if check == -1:
self.air.writeServerEvent('suspicious', toonId, 'Toon generating movie for non-existant gag track %s level %s' % (track, level))
self.notify.warning('generating movie for non-existant gag track %s level %s! avId: %s' % (track, level, toonId))
toon.d_setInventory(toon.inventory.makeNetString())
hps = attack[TOON_HP_COL]
if track == SOS:
self.notify.debug('toon: %d called for help' % toonId)
elif track == NPCSOS:
self.notify.debug('toon: %d called for help' % toonId)
elif track == PETSOS:
self.notify.debug('toon: %d called for pet' % toonId)
for i in range(len(self.activeToons)):
toon = self.getToon(self.activeToons[i])
if toon != None:
if i < len(hps):
hp = hps[i]
if hp > 0:
toonHpDict[toon.doId][0] += hp
self.notify.debug('pet heal: toon: %d healed for hp: %d' % (toon.doId, hp))
else:
self.notify.warning('Invalid targetIndex %s in hps %s.' % (i, hps))
elif track == NPC_RESTOCK_GAGS:
for at in self.activeToons:
toon = self.getToon(at)
if toon != None:
toon.inventory.NPCMaxOutInv(npc_level)
toon.d_setInventory(toon.inventory.makeNetString())
elif track == HEAL:
if levelAffectsGroup(HEAL, level):
for i in range(len(self.activeToons)):
at = self.activeToons[i]
if at != toonId or attack[TOON_TRACK_COL] == NPCSOS:
toon = self.getToon(at)
if toon != None:
if i < len(hps):
hp = hps[i]
else:
self.notify.warning('Invalid targetIndex %s in hps %s.' % (i, hps))
hp = 0
toonHpDict[toon.doId][0] += hp
self.notify.debug('HEAL: toon: %d healed for hp: %d' % (toon.doId, hp))
else:
targetId = attack[TOON_TGT_COL]
toon = self.getToon(targetId)
if toon != None and targetId in self.activeToons:
targetIndex = self.activeToons.index(targetId)
if targetIndex < len(hps):
hp = hps[targetIndex]
else:
self.notify.warning('Invalid targetIndex %s in hps %s.' % (targetIndex, hps))
hp = 0
toonHpDict[toon.doId][0] += hp
elif attackAffectsGroup(track, level, attack[TOON_TRACK_COL]):
for suit in self.activeSuits:
targetIndex = self.activeSuits.index(suit)
if targetIndex < 0 or targetIndex >= len(hps):
self.notify.warning('Got attack (%s, %s) on target suit %s, but hps has only %s entries: %s' % (track,
level,
targetIndex,
len(hps),
hps))
else:
hp = hps[targetIndex]
if hp > 0 and track == LURE:
if suit.battleTrap == UBER_GAG_LEVEL_INDEX:
pass
suit.battleTrap = NO_TRAP
needUpdate = 1
if suit.doId in trapDict:
del trapDict[suit.doId]
if suitsLuredOntoTraps.count(suit) == 0:
suitsLuredOntoTraps.append(suit)
if track == TRAP:
targetId = suit.doId
if targetId in trapDict:
trapDict[targetId].append(attack)
else:
trapDict[targetId] = [attack]
needUpdate = 1
died = attack[SUIT_DIED_COL] & 1 << targetIndex
if died != 0:
if deadSuits.count(suit) == 0:
deadSuits.append(suit)
else:
targetId = attack[TOON_TGT_COL]
target = self.findSuit(targetId)
if target != None:
targetIndex = self.activeSuits.index(target)
if targetIndex < 0 or targetIndex >= len(hps):
self.notify.warning('Got attack (%s, %s) on target suit %s, but hps has only %s entries: %s' % (track,
level,
targetIndex,
len(hps),
hps))
else:
hp = hps[targetIndex]
if track == TRAP:
if targetId in trapDict:
trapDict[targetId].append(attack)
else:
trapDict[targetId] = [attack]
if hp > 0 and track == LURE:
oldBattleTrap = target.battleTrap
if oldBattleTrap == UBER_GAG_LEVEL_INDEX:
pass
target.battleTrap = NO_TRAP
needUpdate = 1
if target.doId in trapDict:
del trapDict[target.doId]
if suitsLuredOntoTraps.count(target) == 0:
suitsLuredOntoTraps.append(target)
if oldBattleTrap == UBER_GAG_LEVEL_INDEX:
for otherSuit in self.activeSuits:
if not otherSuit == target:
otherSuit.battleTrap = NO_TRAP
if otherSuit.doId in trapDict:
del trapDict[otherSuit.doId]
died = attack[SUIT_DIED_COL] & 1 << targetIndex
if died != 0:
if deadSuits.count(target) == 0:
deadSuits.append(target)
self.exitedToons = []
for suitKey in trapDict.keys():
attackList = trapDict[suitKey]
attack = attackList[0]
target = self.findSuit(attack[TOON_TGT_COL])
if attack[TOON_LVL_COL] == UBER_GAG_LEVEL_INDEX:
targetId = suitKey
target = self.findSuit(targetId)
if len(attackList) == 1:
if suitsLuredOntoTraps.count(target) == 0:
self.notify.debug('movieDone() - trap set')
target.battleTrap = attack[TOON_LVL_COL]
needUpdate = 1
else:
target.battleTrap = NO_TRAP
else:
self.notify.debug('movieDone() - traps collided')
if target != None:
target.battleTrap = NO_TRAP
if self.battleCalc.trainTrapTriggered:
self.notify.debug('Train trap triggered, clearing all traps')
for otherSuit in self.activeSuits:
self.notify.debug('suit =%d, oldBattleTrap=%d' % (otherSuit.doId, otherSuit.battleTrap))
otherSuit.battleTrap = NO_TRAP
currLuredSuits = self.battleCalc.getLuredSuits()
if len(self.luredSuits) == len(currLuredSuits):
for suit in self.luredSuits:
if currLuredSuits.count(suit.doId) == 0:
needUpdate = 1
break
else:
needUpdate = 1
self.luredSuits = []
for i in currLuredSuits:
suit = self.air.doId2do[i]
self.luredSuits.append(suit)
self.notify.debug('movieDone() - suit: %d is lured' % i)
for attack in npcTrapAttacks:
track, level, hp = NPCToons.getNPCTrackLevelHp(attack[TOON_TGT_COL])
for suit in self.activeSuits:
if self.luredSuits.count(suit) == 0 and suit.battleTrap == NO_TRAP:
suit.battleTrap = level
needUpdate = 1
for suit in deadSuits:
self.notify.debug('removing dead suit: %d' % suit.doId)
if suit.isDeleted():
self.notify.debug('whoops, suit %d is deleted.' % suit.doId)
else:
self.notify.debug('suit had revives? %d' % suit.getMaxSkeleRevives())
encounter = {'type': suit.dna.name,
'level': suit.getActualLevel(),
'track': suit.dna.dept,
'isSkelecog': suit.getSkelecog(),
'isForeman': suit.isForeman(),
'isVP': 0,
'isCFO': 0,
'isSupervisor': suit.isSupervisor(),
'isVirtual': suit.isVirtual(),
'hasRevives': suit.getMaxSkeleRevives(),
'activeToons': self.activeToons[:]}
self.suitsKilled.append(encounter)
self.suitsKilledThisBattle.append(encounter)
self.air.suitInvasionManager.handleSuitDefeated()
self.__removeSuit(suit)
needUpdate = 1
suit.resume()
lastActiveSuitDied = 0
if len(self.activeSuits) == 0 and len(self.pendingSuits) == 0:
lastActiveSuitDied = 1
for i in range(4):
attack = self.suitAttacks[i][SUIT_ATK_COL]
if attack != NO_ATTACK:
suitId = self.suitAttacks[i][SUIT_ID_COL]
suit = self.findSuit(suitId)
if suit == None:
self.notify.warning('movieDone() - suit: %d is gone!' % suitId)
continue
if not (hasattr(suit, 'dna') and suit.dna):
toonId = self.air.getAvatarIdFromSender()
self.notify.warning('_movieDone avoiding crash, sender=%s but suit has no dna' % toonId)
self.air.writeServerEvent('suspicious', toonId, '_movieDone avoiding crash, suit has no dna')
continue
adict = getSuitAttack(suit.getStyleName(), suit.getLevel(), attack)
hps = self.suitAttacks[i][SUIT_HP_COL]
if adict['group'] == ATK_TGT_GROUP:
for activeToon in self.activeToons:
toon = self.getToon(activeToon)
if toon != None:
targetIndex = self.activeToons.index(activeToon)
toonDied = self.suitAttacks[i][TOON_DIED_COL] & 1 << targetIndex
if targetIndex >= len(hps):
self.notify.warning('DAMAGE: toon %s is no longer in battle!' % activeToon)
else:
hp = hps[targetIndex]
if hp > 0:
self.notify.debug('DAMAGE: toon: %d hit for dmg: %d' % (activeToon, hp))
if toonDied != 0:
toonHpDict[toon.doId][2] = 1
toonHpDict[toon.doId][1] += hp
elif adict['group'] == ATK_TGT_SINGLE:
targetIndex = self.suitAttacks[i][SUIT_TGT_COL]
if targetIndex >= len(self.activeToons):
self.notify.warning('movieDone() - toon: %d gone!' % targetIndex)
break
toonId = self.activeToons[targetIndex]
toon = self.getToon(toonId)
toonDied = self.suitAttacks[i][TOON_DIED_COL] & 1 << targetIndex
if targetIndex >= len(hps):
self.notify.warning('DAMAGE: toon %s is no longer in battle!' % toonId)
else:
hp = hps[targetIndex]
if hp > 0:
self.notify.debug('DAMAGE: toon: %d hit for dmg: %d' % (toonId, hp))
if toonDied != 0:
toonHpDict[toon.doId][2] = 1
toonHpDict[toon.doId][1] += hp
deadToons = []
for activeToon in self.activeToons:
hp = toonHpDict[activeToon]
toon = self.getToon(activeToon)
if toon != None:
self.notify.debug('AFTER ROUND: currtoonHP: %d toonMAX: %d hheal: %d damage: %d' % (toon.hp,
toon.maxHp,
hp[0],
hp[1]))
toon.hpOwnedByBattle = 0
hpDelta = hp[0] - hp[1]
if hpDelta >= 0:
toon.toonUp(hpDelta, quietly=1)
else:
toon.takeDamage(-hpDelta, quietly=1)
if toon.hp <= 0:
self.notify.debug('movieDone() - toon: %d was killed' % activeToon)
toon.inventory.zeroInv(1)
deadToons.append(activeToon)
self.notify.debug('AFTER ROUND: toon: %d setHp: %d' % (toon.doId, toon.hp))
if toon.unlimitedGags:
toon.doRestock(noUber=0, noPaid=0)
for deadToon in deadToons:
self.__removeToon(deadToon)
needUpdate = 1
self.clearAttacks()
self.d_setMovie()
self.d_setChosenToonAttacks()
self.localMovieDone(needUpdate, deadToons, deadSuits, lastActiveSuitDied)
def enterResume(self):
for suit in self.suits:
self.notify.info('battle done, resuming suit: %d' % suit.doId)
if suit.isDeleted():
self.notify.info('whoops, suit %d is deleted.' % suit.doId)
else:
suit.resume()
self.suits = []
self.joiningSuits = []
self.pendingSuits = []
self.adjustingSuits = []
self.activeSuits = []
self.luredSuits = []
for toonId in self.toons:
toon = simbase.air.doId2do.get(toonId)
if toon:
toon.b_setBattleId(0)
messageToonReleased = 'Battle releasing toon %s' % toon.doId
messenger.send(messageToonReleased, [toon.doId])
for exitEvent in self.avatarExitEvents:
self.ignore(exitEvent)
eventMsg = {}
for encounter in self.suitsKilledThisBattle:
cog = encounter['type']
level = encounter['level']
msgName = '%s%s' % (cog, level)
if encounter['isSkelecog']:
msgName += '+'
if msgName in eventMsg:
eventMsg[msgName] += 1
else:
eventMsg[msgName] = 1
msgText = ''
for msgName, count in eventMsg.items():
if msgText != '':
msgText += ','
msgText += '%s%s' % (count, msgName)
self.air.writeServerEvent('battleCogsDefeated', self.doId, '%s|%s' % (msgText, self.getTaskZoneId()))
def exitResume(self):
pass
def isJoinable(self):
return self.joinableFsm.getCurrentState().getName() == 'Joinable'
def enterJoinable(self):
self.notify.debug('enterJoinable()')
def exitJoinable(self):
pass
def enterUnjoinable(self):
self.notify.debug('enterUnjoinable()')
def exitUnjoinable(self):
pass
def isRunable(self):
return self.runableFsm.getCurrentState().getName() == 'Runable'
def enterRunable(self):
self.notify.debug('enterRunable()')
def exitRunable(self):
pass
def enterUnrunable(self):
self.notify.debug('enterUnrunable()')
def exitUnrunable(self):
pass
def __estimateAdjustTime(self):
self.needAdjust = 0
adjustTime = 0
if len(self.pendingSuits) > 0 or self.suitGone == 1:
self.suitGone = 0
pos0 = self.suitPendingPoints[0][0]
pos1 = self.suitPoints[0][0][0]
adjustTime = self.calcSuitMoveTime(pos0, pos1)
if len(self.pendingToons) > 0 or self.toonGone == 1:
self.toonGone = 0
if adjustTime == 0:
pos0 = self.toonPendingPoints[0][0]
pos1 = self.toonPoints[0][0][0]
adjustTime = self.calcToonMoveTime(pos0, pos1)
return adjustTime
def enterAdjusting(self):
self.notify.debug('enterAdjusting()')
self.timer.stop()
self.__resetAdjustingResponses()
self.adjustingTimer.startCallback(self.__estimateAdjustTime() + SERVER_BUFFER_TIME, self.__serverAdjustingDone)
def __serverAdjustingDone(self):
if self.needAdjust == 1:
self.adjustFsm.request('NotAdjusting')
self.__requestAdjust()
else:
self.notify.debug('adjusting timed out on the server')
self.ignoreAdjustingResponses = 1
self.__adjustDone()
def exitAdjusting(self):
currStateName = self.fsm.getCurrentState().getName()
if currStateName == 'WaitForInput':
self.timer.restart()
elif currStateName == 'WaitForJoin':
self.b_setState('WaitForInput')
self.adjustingTimer.stop()
def __addTrainTrapForNewSuits(self):
hasTrainTrap = False
trapInfo = None
for otherSuit in self.activeSuits:
if otherSuit.battleTrap == UBER_GAG_LEVEL_INDEX:
hasTrainTrap = True
if hasTrainTrap:
for curSuit in self.activeSuits:
if not curSuit.battleTrap == UBER_GAG_LEVEL_INDEX:
oldBattleTrap = curSuit.battleTrap
curSuit.battleTrap = UBER_GAG_LEVEL_INDEX
self.battleCalc.addTrainTrapForJoiningSuit(curSuit.doId)
self.notify.debug('setting traintrack trap for joining suit %d oldTrap=%s' % (curSuit.doId, oldBattleTrap))
def __adjustDone(self):
for s in self.adjustingSuits:
self.pendingSuits.remove(s)
self.activeSuits.append(s)
self.adjustingSuits = []
for toon in self.adjustingToons:
if self.pendingToons.count(toon) == 1:
self.pendingToons.remove(toon)
else:
self.notify.warning('adjustDone() - toon: %d not pending!' % toon.doId)
if self.activeToons.count(toon) == 0:
self.activeToons.append(toon)
self.ignoreResponses = 0
self.sendEarnedExperience(toon)
else:
self.notify.warning('adjustDone() - toon: %d already active!' % toon.doId)
self.adjustingToons = []
self.__addTrainTrapForNewSuits()
self.d_setMembers()
self.adjustFsm.request('NotAdjusting')
if self.needAdjust == 1:
self.notify.debug('__adjustDone() - need to adjust again')
self.__requestAdjust()
def enterNotAdjusting(self):
self.notify.debug('enterNotAdjusting()')
if self.movieRequested == 1:
if len(self.activeToons) > 0 and self.__allActiveToonsResponded():
self.__requestMovie()
def exitNotAdjusting(self):
pass
def getPetProxyObject(self, petId, callback):
doneEvent = 'generate-%d' % petId
def handlePetProxyRead(pet):
callback(1, pet)
self.air.sendActivate(petId, self.air.districtId, 0)
self.acceptOnce(doneEvent, handlePetProxyRead)
def _getNextSerialNum(self):
num = self.serialNum
self.serialNum += 1
return num
def setFireCount(self, amount):
self.fireCount = amount
def getFireCount(self):
return self.fireCount
@magicWord(category=CATEGORY_PROGRAMMER)
def skipMovie():
invoker = spellbook.getInvoker()
battleId = invoker.getBattleId()
if not battleId:
return 'You are not currently in a battle!'
battle = simbase.air.doId2do.get(battleId)
battle._DistributedBattleBaseAI__movieDone()
return 'Battle movie skipped.'
| [
"toontown.pets.DistributedPetProxyAI.DistributedPetProxyAI",
"direct.distributed.DistributedObjectAI.DistributedObjectAI.requestDelete",
"direct.fsm.State.State",
"toontown.ai.DatabaseObject.DatabaseObject",
"direct.distributed.DistributedObjectAI.DistributedObjectAI.delete",
"toontown.toon.DistributedToonAI.DistributedToonAI",
"toontown.battle.BattleExperienceAI.getBattleExperience",
"toontown.toon.InventoryBase.InventoryBase",
"toontown.toon.NPCToons.getNPCTrackLevelHp",
"direct.directnotify.DirectNotifyGlobal.directNotify.newCategory",
"direct.distributed.DistributedObjectAI.DistributedObjectAI.__init__",
"random.randint"
]
| [((947, 1017), 'direct.directnotify.DirectNotifyGlobal.directNotify.newCategory', 'DirectNotifyGlobal.directNotify.newCategory', (['"""DistributedBattleBaseAI"""'], {}), "('DistributedBattleBaseAI')\n", (990, 1017), False, 'from direct.directnotify import DirectNotifyGlobal\n'), ((1167, 1226), 'direct.distributed.DistributedObjectAI.DistributedObjectAI.__init__', 'DistributedObjectAI.DistributedObjectAI.__init__', (['self', 'air'], {}), '(self, air)\n', (1215, 1226), False, 'from direct.distributed import DistributedObjectAI\n'), ((5287, 5346), 'direct.distributed.DistributedObjectAI.DistributedObjectAI.requestDelete', 'DistributedObjectAI.DistributedObjectAI.requestDelete', (['self'], {}), '(self)\n', (5340, 5346), False, 'from direct.distributed import DistributedObjectAI\n'), ((6012, 6064), 'direct.distributed.DistributedObjectAI.DistributedObjectAI.delete', 'DistributedObjectAI.DistributedObjectAI.delete', (['self'], {}), '(self)\n', (6058, 6064), False, 'from direct.distributed import DistributedObjectAI\n'), ((15093, 15342), 'toontown.battle.BattleExperienceAI.getBattleExperience', 'BattleExperienceAI.getBattleExperience', (['(4)', 'self.activeToons', 'self.toonExp', 'self.battleCalc.toonSkillPtsGained', 'self.toonOrigQuests', 'self.toonItems', 'self.toonOrigMerits', 'self.toonMerits', 'self.toonParts', 'self.suitsKilled', 'self.helpfulToons'], {}), '(4, self.activeToons, self.toonExp,\n self.battleCalc.toonSkillPtsGained, self.toonOrigQuests, self.toonItems,\n self.toonOrigMerits, self.toonMerits, self.toonParts, self.suitsKilled,\n self.helpfulToons)\n', (15131, 15342), False, 'from toontown.battle import BattleExperienceAI\n'), ((66394, 66443), 'toontown.toon.NPCToons.getNPCTrackLevelHp', 'NPCToons.getNPCTrackLevelHp', (['attack[TOON_TGT_COL]'], {}), '(attack[TOON_TGT_COL])\n', (66421, 66443), False, 'from toontown.toon import NPCToons\n'), ((3199, 3290), 'direct.fsm.State.State', 'State.State', (['"""FaceOff"""', 'self.enterFaceOff', 'self.exitFaceOff', "['WaitForInput', 'Resume']"], {}), "('FaceOff', self.enterFaceOff, self.exitFaceOff, ['WaitForInput',\n 'Resume'])\n", (3210, 3290), False, 'from direct.fsm import State\n'), ((3297, 3401), 'direct.fsm.State.State', 'State.State', (['"""WaitForJoin"""', 'self.enterWaitForJoin', 'self.exitWaitForJoin', "['WaitForInput', 'Resume']"], {}), "('WaitForJoin', self.enterWaitForJoin, self.exitWaitForJoin, [\n 'WaitForInput', 'Resume'])\n", (3308, 3401), False, 'from direct.fsm import State\n'), ((3407, 3510), 'direct.fsm.State.State', 'State.State', (['"""WaitForInput"""', 'self.enterWaitForInput', 'self.exitWaitForInput', "['MakeMovie', 'Resume']"], {}), "('WaitForInput', self.enterWaitForInput, self.exitWaitForInput,\n ['MakeMovie', 'Resume'])\n", (3418, 3510), False, 'from direct.fsm import State\n'), ((3517, 3612), 'direct.fsm.State.State', 'State.State', (['"""MakeMovie"""', 'self.enterMakeMovie', 'self.exitMakeMovie', "['PlayMovie', 'Resume']"], {}), "('MakeMovie', self.enterMakeMovie, self.exitMakeMovie, [\n 'PlayMovie', 'Resume'])\n", (3528, 3612), False, 'from direct.fsm import State\n'), ((3618, 3725), 'direct.fsm.State.State', 'State.State', (['"""PlayMovie"""', 'self.enterPlayMovie', 'self.exitPlayMovie', "['WaitForJoin', 'Reward', 'Resume']"], {}), "('PlayMovie', self.enterPlayMovie, self.exitPlayMovie, [\n 'WaitForJoin', 'Reward', 'Resume'])\n", (3629, 3725), False, 'from direct.fsm import State\n'), ((3731, 3799), 'direct.fsm.State.State', 'State.State', (['"""Reward"""', 'self.enterReward', 'self.exitReward', "['Resume']"], {}), "('Reward', self.enterReward, self.exitReward, ['Resume'])\n", (3742, 3799), False, 'from direct.fsm import State\n'), ((3810, 3870), 'direct.fsm.State.State', 'State.State', (['"""Resume"""', 'self.enterResume', 'self.exitResume', '[]'], {}), "('Resume', self.enterResume, self.exitResume, [])\n", (3821, 3870), False, 'from direct.fsm import State\n'), ((3881, 3956), 'direct.fsm.State.State', 'State.State', (['"""Off"""', 'self.enterOff', 'self.exitOff', "['FaceOff', 'WaitForJoin']"], {}), "('Off', self.enterOff, self.exitOff, ['FaceOff', 'WaitForJoin'])\n", (3892, 3956), False, 'from direct.fsm import State\n'), ((4035, 4113), 'direct.fsm.State.State', 'State.State', (['"""Joinable"""', 'self.enterJoinable', 'self.exitJoinable', "['Unjoinable']"], {}), "('Joinable', self.enterJoinable, self.exitJoinable, ['Unjoinable'])\n", (4046, 4113), False, 'from direct.fsm import State\n'), ((4115, 4202), 'direct.fsm.State.State', 'State.State', (['"""Unjoinable"""', 'self.enterUnjoinable', 'self.exitUnjoinable', "['Joinable']"], {}), "('Unjoinable', self.enterUnjoinable, self.exitUnjoinable, [\n 'Joinable'])\n", (4126, 4202), False, 'from direct.fsm import State\n'), ((4333, 4407), 'direct.fsm.State.State', 'State.State', (['"""Runable"""', 'self.enterRunable', 'self.exitRunable', "['Unrunable']"], {}), "('Runable', self.enterRunable, self.exitRunable, ['Unrunable'])\n", (4344, 4407), False, 'from direct.fsm import State\n'), ((4409, 4487), 'direct.fsm.State.State', 'State.State', (['"""Unrunable"""', 'self.enterUnrunable', 'self.exitUnrunable', "['Runable']"], {}), "('Unrunable', self.enterUnrunable, self.exitUnrunable, ['Runable'])\n", (4420, 4487), False, 'from direct.fsm import State\n'), ((4618, 4719), 'direct.fsm.State.State', 'State.State', (['"""Adjusting"""', 'self.enterAdjusting', 'self.exitAdjusting', "['NotAdjusting', 'Adjusting']"], {}), "('Adjusting', self.enterAdjusting, self.exitAdjusting, [\n 'NotAdjusting', 'Adjusting'])\n", (4629, 4719), False, 'from direct.fsm import State\n'), ((4716, 4809), 'direct.fsm.State.State', 'State.State', (['"""NotAdjusting"""', 'self.enterNotAdjusting', 'self.exitNotAdjusting', "['Adjusting']"], {}), "('NotAdjusting', self.enterNotAdjusting, self.exitNotAdjusting,\n ['Adjusting'])\n", (4727, 4809), False, 'from direct.fsm import State\n'), ((27757, 27802), 'toontown.toon.DistributedToonAI.DistributedToonAI', 'DistributedToonAI.DistributedToonAI', (['self.air'], {}), '(self.air)\n', (27792, 27802), False, 'from toontown.toon import DistributedToonAI\n'), ((27854, 27887), 'toontown.toon.InventoryBase.InventoryBase', 'InventoryBase.InventoryBase', (['toon'], {}), '(toon)\n', (27881, 27887), False, 'from toontown.toon import InventoryBase\n'), ((27988, 28035), 'toontown.ai.DatabaseObject.DatabaseObject', 'DatabaseObject.DatabaseObject', (['self.air', 'toonId'], {}), '(self.air, toonId)\n', (28017, 28035), False, 'from toontown.ai import DatabaseObject\n'), ((55141, 55190), 'toontown.toon.NPCToons.getNPCTrackLevelHp', 'NPCToons.getNPCTrackLevelHp', (['attack[TOON_TGT_COL]'], {}), '(attack[TOON_TGT_COL])\n', (55168, 55190), False, 'from toontown.toon import NPCToons\n'), ((44749, 44802), 'toontown.pets.DistributedPetProxyAI.DistributedPetProxyAI', 'DistributedPetProxyAI.DistributedPetProxyAI', (['self.air'], {}), '(self.air)\n', (44792, 44802), False, 'from toontown.pets import DistributedPetProxyAI\n'), ((12410, 12434), 'random.randint', 'random.randint', (['(0)', '(10000)'], {}), '(0, 10000)\n', (12424, 12434), False, 'import random\n')] |
import gym
from gym import spaces, error, utils
from gym.utils import seeding
import numpy as np
from scipy.spatial.distance import pdist, squareform
import configparser
from os import path
import matplotlib.pyplot as plt
from matplotlib.pyplot import gca
font = {'family' : 'sans-serif',
'weight' : 'bold',
'size' : 14}
class FlockingEnv(gym.Env):
def __init__(self):
config_file = path.join(path.dirname(__file__), "params_flock.cfg")
config = configparser.ConfigParser()
config.read(config_file)
config = config['flock']
self.fig = None
self.line1 = None
self.filter_len = int(config['filter_length'])
self.nx_system = 4
self.n_nodes = int(config['network_size'])
self.comm_radius = float(config['comm_radius'])
self.dt = float(config['system_dt'])
self.v_max = float(config['max_vel_init'])
self.v_bias = self.v_max # 0.5 * self.v_max
self.r_max = float(config['max_rad_init'])
self.std_dev = float(config['std_dev']) * self.dt
self.pooling = []
if config.getboolean('sum_pooling'):
self.pooling.append(np.nansum)
if config.getboolean('min_pooling'):
self.pooling.append(np.nanmin)
if config.getboolean('max_pooling'):
self.pooling.append(np.nanmax)
self.n_pools = len(self.pooling)
# number of features and outputs
self.n_features = int(config['N_features'])
self.nx = int(self.n_features / self.n_pools / self.filter_len)
self.nu = int(config['N_outputs']) # outputs
self.x_agg = np.zeros((self.n_nodes, self.nx * self.filter_len, self.n_pools))
self.x = np.zeros((self.n_nodes, self.nx_system))
self.u = np.zeros((self.n_nodes, self.nu))
self.mean_vel = np.zeros((self.n_nodes, self.nu))
# TODO
self.max_accel = 40
self.max_z = 200
# self.b = np.ones((self.n_nodes,1))
# self.action_space = spaces.Box(low=-self.max_accel, high=self.max_accel, shape=(self.n_nodes, 2), dtype=np.float32 )
# self.observation_space = spaces.Box(low=-self.max_z, high=self.max_z, shape=(
# self.n_nodes, self.nx * self.filter_len * self.n_pools) , dtype=np.float32)
self.action_space = spaces.Box(low=-self.max_accel, high=self.max_accel, shape=(2,) , dtype=np.float32 )
self.observation_space = spaces.Box(low=-self.max_z, high=self.max_z, shape=(self.n_features, ), dtype=np.float32)
self.seed()
def render(self, mode='human'):
if self.fig is None:
plt.ion()
fig = plt.figure()
ax = fig.add_subplot(111)
line1, = ax.plot(self.x[:, 0], self.x[:, 1], 'bo') # Returns a tuple of line objects, thus the comma
ax.plot([0], [0], 'kx')
plt.ylim(-1.0 * self.r_max, 1.0 * self.r_max)
plt.xlim(-1.0 * self.r_max, 1.0 * self.r_max)
a = gca()
a.set_xticklabels(a.get_xticks(), font)
a.set_yticklabels(a.get_yticks(), font)
plt.title('GNN Controller')
self.fig = fig
self.line1 = line1
self.line1.set_xdata(self.x[:, 0])
self.line1.set_ydata(self.x[:, 1])
self.fig.canvas.draw()
self.fig.canvas.flush_events()
def seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def step(self, u):
x = self.x
x_ = np.zeros((self.n_nodes, self.nx_system))
#u = np.vstack((np.zeros((self.n_leaders, 2)), u))
# x position
x_[:, 0] = x[:, 0] + x[:, 2] * self.dt
# y position
x_[:, 1] = x[:, 1] + x[:, 3] * self.dt
# x velocity
x_[:, 2] = x[:, 2] + 0.1 * u[:, 0] * self.dt + np.random.normal(0, self.std_dev,(self.n_nodes,))
# y velocity
x_[:, 3] = x[:, 3] + 0.1 * u[:, 1] * self.dt + np.random.normal(0, self.std_dev,(self.n_nodes,))
# TODO - check the 0.1
self.x = x_
self.x_agg = self.aggregate(self.x, self.x_agg)
self.u = u
return self._get_obs(), -self.instant_cost(), False, {}
def instant_cost(self): # sum of differences in velocities
return np.sum(np.var(self.x[:, 2:4], axis=0)) #+ np.sum(np.square(self.u)) * 0.00001
#return np.sum(np.square(self.x[:,2:4] - self.mean_vel))
def _get_obs(self):
reshaped = self.x_agg.reshape((self.n_nodes, self.n_features))
clipped = np.clip(reshaped, a_min=-self.max_z, a_max=self.max_z)
return clipped #[self.n_leaders:, :]
def reset(self):
x = np.zeros((self.n_nodes, self.nx_system))
degree = 0
min_dist = 0
while degree < 2 or min_dist < 0.1: # < 0.25: # 0.25: #0.5: #min_dist < 0.25:
# randomly initialize the state of all agents
length = np.sqrt(np.random.uniform(0, self.r_max, size=(self.n_nodes,)))
angle = np.pi * np.random.uniform(0, 2, size=(self.n_nodes,))
x[:, 0] = length * np.cos(angle)
x[:, 1] = length * np.sin(angle)
bias = np.random.uniform(low=-self.v_bias, high=self.v_bias, size=(2,))
x[:, 2] = np.random.uniform(low=-self.v_max, high=self.v_max, size=(self.n_nodes,)) + bias[0]
x[:, 3] = np.random.uniform(low=-self.v_max, high=self.v_max, size=(self.n_nodes,)) + bias[1]
# compute distances between agents
x_t_loc = x[:, 0:2] # x,y location determines connectivity
a_net = squareform(pdist(x_t_loc.reshape((self.n_nodes, 2)), 'euclidean'))
# no self loops
a_net = a_net + 2 * self.comm_radius * np.eye(self.n_nodes)
# compute minimum distance between agents and degree of network
min_dist = np.min(np.min(a_net))
a_net = a_net < self.comm_radius
degree = np.min(np.sum(a_net.astype(int), axis=1))
self.mean_vel = np.mean(x[:,2:4],axis=0)
self.x = x
self.x_agg = np.zeros((self.n_nodes, self.nx * self.filter_len, self.n_pools))
self.x_agg = self.aggregate(self.x, self.x_agg)
return self._get_obs()
# def render(self, mode='human'):
# pass
def close(self):
pass
def aggregate(self, xt, x_agg):
"""
Perform aggegration operation for all possible pooling operations using helper functions get_pool and get_comms
Args:
x_agg (): Last time step's aggregated info
xt (): Current state of all agents
Returns:
Aggregated state values
"""
x_features = self.get_x_features(xt)
a_net = self.get_connectivity(xt)
for k in range(0, self.n_pools):
comm_data = self.get_comms(np.dstack((x_features, self.get_features(x_agg[:, :, k]))), a_net)
x_agg[:, :, k] = self.get_pool(comm_data, self.pooling[k])
return x_agg
def get_connectivity(self, x):
"""
Get the adjacency matrix of the network based on agent locations by computing pairwise distances using pdist
Args:
x (): current states of all agents
Returns: adjacency matrix of network
"""
x_t_loc = x[:, 0:2] # x,y location determines connectivity
a_net = squareform(pdist(x_t_loc.reshape((self.n_nodes, 2)), 'euclidean'))
a_net = (a_net < self.comm_radius).astype(float)
np.fill_diagonal(a_net, 0)
return a_net
def get_x_features(self, xt): # TODO
"""
Compute the non-linear features necessary for implementing Turner 2003
Args:
xt (): current state of all agents
Returns: matrix of features for each agent
"""
diff = xt.reshape((self.n_nodes, 1, self.nx_system)) - xt.reshape((1, self.n_nodes, self.nx_system))
r2 = np.multiply(diff[:, :, 0], diff[:, :, 0]) + np.multiply(diff[:, :, 1], diff[:, :, 1]) + np.eye(
self.n_nodes)
return np.dstack((diff[:, :, 2], np.divide(diff[:, :, 0], np.multiply(r2, r2)), np.divide(diff[:, :, 0], r2),
diff[:, :, 3], np.divide(diff[:, :, 1], np.multiply(r2, r2)), np.divide(diff[:, :, 1], r2)))
def get_features(self, agg):
"""
Matrix of
Args:
agg (): the aggregated matrix from the last time step
Returns: matrix of aggregated features from all nodes at current time
"""
return np.tile(agg[:, :-self.nx].reshape((self.n_nodes, 1, -1)), (1, self.n_nodes, 1)) # TODO check indexing
def get_comms(self, mat, a_net):
"""
Enforces that agents who are not connected in the network cannot observe each others' states
Args:
mat (): matrix of state information for the whole graph
a_net (): adjacency matrix for flock network (weighted networks unsupported for now)
Returns:
mat (): sparse matrix with NaN values where agents can't communicate
"""
a_net[a_net == 0] = np.nan
return mat * a_net.reshape(self.n_nodes, self.n_nodes, 1)
def get_pool(self, mat, func):
"""
Perform pooling operations on the matrix of state information. The replacement of values with NaNs for agents who
can't communicate must already be enforced.
Args:
mat (): matrix of state information
func (): pooling function (np.nansum(), np.nanmin() or np.nanmax()). Must ignore NaNs.
Returns:
information pooled from neighbors for each agent
"""
return func(mat, axis=1).reshape((self.n_nodes, self.n_features)) # TODO check this axis = 1
def controller(self):
"""
The controller for flocking from Turner 2003.
Args:
x (): the current state
Returns: the optimal action
"""
x = self.x
s_diff = x.reshape((self.n_nodes, 1, self.nx_system)) - x.reshape((1, self.n_nodes, self.nx_system))
r2 = np.multiply(s_diff[:, :, 0], s_diff[:, :, 0]) + np.multiply(s_diff[:, :, 1], s_diff[:, :, 1]) + np.eye(
self.n_nodes)
p = np.dstack((s_diff, self.potential_grad(s_diff[:, :, 0], r2), self.potential_grad(s_diff[:, :, 1], r2)))
p_sum = np.nansum(p, axis=1).reshape((self.n_nodes, self.nx_system + 2))
return np.hstack(((- p_sum[:, 4] - p_sum[:, 2]).reshape((-1, 1)), (- p_sum[:, 3] - p_sum[:, 5]).reshape(-1, 1)))
def potential_grad(self, pos_diff, r2):
"""
Computes the gradient of the potential function for flocking proposed in Turner 2003.
Args:
pos_diff (): difference in a component of position among all agents
r2 (): distance squared between agents
Returns: corresponding component of the gradient of the potential
"""
grad = -2.0 * np.divide(pos_diff, np.multiply(r2, r2)) + 2 * np.divide(pos_diff, r2)
grad[r2 > self.comm_radius] = 0
return grad
| [
"numpy.clip",
"configparser.ConfigParser",
"numpy.sin",
"numpy.divide",
"gym.utils.seeding.np_random",
"numpy.mean",
"numpy.multiply",
"numpy.min",
"matplotlib.pyplot.ylim",
"numpy.random.normal",
"numpy.eye",
"matplotlib.pyplot.gca",
"numpy.fill_diagonal",
"os.path.dirname",
"numpy.cos",
"matplotlib.pyplot.ion",
"matplotlib.pyplot.title",
"matplotlib.pyplot.xlim",
"numpy.nansum",
"gym.spaces.Box",
"numpy.zeros",
"matplotlib.pyplot.figure",
"numpy.random.uniform",
"numpy.var"
]
| [((488, 515), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (513, 515), False, 'import configparser\n'), ((1655, 1720), 'numpy.zeros', 'np.zeros', (['(self.n_nodes, self.nx * self.filter_len, self.n_pools)'], {}), '((self.n_nodes, self.nx * self.filter_len, self.n_pools))\n', (1663, 1720), True, 'import numpy as np\n'), ((1738, 1778), 'numpy.zeros', 'np.zeros', (['(self.n_nodes, self.nx_system)'], {}), '((self.n_nodes, self.nx_system))\n', (1746, 1778), True, 'import numpy as np\n'), ((1796, 1829), 'numpy.zeros', 'np.zeros', (['(self.n_nodes, self.nu)'], {}), '((self.n_nodes, self.nu))\n', (1804, 1829), True, 'import numpy as np\n'), ((1854, 1887), 'numpy.zeros', 'np.zeros', (['(self.n_nodes, self.nu)'], {}), '((self.n_nodes, self.nu))\n', (1862, 1887), True, 'import numpy as np\n'), ((2336, 2423), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(-self.max_accel)', 'high': 'self.max_accel', 'shape': '(2,)', 'dtype': 'np.float32'}), '(low=-self.max_accel, high=self.max_accel, shape=(2,), dtype=np.\n float32)\n', (2346, 2423), False, 'from gym import spaces, error, utils\n'), ((2454, 2546), 'gym.spaces.Box', 'spaces.Box', ([], {'low': '(-self.max_z)', 'high': 'self.max_z', 'shape': '(self.n_features,)', 'dtype': 'np.float32'}), '(low=-self.max_z, high=self.max_z, shape=(self.n_features,),\n dtype=np.float32)\n', (2464, 2546), False, 'from gym import spaces, error, utils\n'), ((3434, 3457), 'gym.utils.seeding.np_random', 'seeding.np_random', (['seed'], {}), '(seed)\n', (3451, 3457), False, 'from gym.utils import seeding\n'), ((3536, 3576), 'numpy.zeros', 'np.zeros', (['(self.n_nodes, self.nx_system)'], {}), '((self.n_nodes, self.nx_system))\n', (3544, 3576), True, 'import numpy as np\n'), ((4556, 4610), 'numpy.clip', 'np.clip', (['reshaped'], {'a_min': '(-self.max_z)', 'a_max': 'self.max_z'}), '(reshaped, a_min=-self.max_z, a_max=self.max_z)\n', (4563, 4610), True, 'import numpy as np\n'), ((4690, 4730), 'numpy.zeros', 'np.zeros', (['(self.n_nodes, self.nx_system)'], {}), '((self.n_nodes, self.nx_system))\n', (4698, 4730), True, 'import numpy as np\n'), ((6098, 6163), 'numpy.zeros', 'np.zeros', (['(self.n_nodes, self.nx * self.filter_len, self.n_pools)'], {}), '((self.n_nodes, self.nx * self.filter_len, self.n_pools))\n', (6106, 6163), True, 'import numpy as np\n'), ((7520, 7546), 'numpy.fill_diagonal', 'np.fill_diagonal', (['a_net', '(0)'], {}), '(a_net, 0)\n', (7536, 7546), True, 'import numpy as np\n'), ((427, 449), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (439, 449), False, 'from os import path\n'), ((2644, 2653), 'matplotlib.pyplot.ion', 'plt.ion', ([], {}), '()\n', (2651, 2653), True, 'import matplotlib.pyplot as plt\n'), ((2672, 2684), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2682, 2684), True, 'import matplotlib.pyplot as plt\n'), ((2885, 2930), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-1.0 * self.r_max)', '(1.0 * self.r_max)'], {}), '(-1.0 * self.r_max, 1.0 * self.r_max)\n', (2893, 2930), True, 'import matplotlib.pyplot as plt\n'), ((2943, 2988), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-1.0 * self.r_max)', '(1.0 * self.r_max)'], {}), '(-1.0 * self.r_max, 1.0 * self.r_max)\n', (2951, 2988), True, 'import matplotlib.pyplot as plt\n'), ((3005, 3010), 'matplotlib.pyplot.gca', 'gca', ([], {}), '()\n', (3008, 3010), False, 'from matplotlib.pyplot import gca\n'), ((3127, 3154), 'matplotlib.pyplot.title', 'plt.title', (['"""GNN Controller"""'], {}), "('GNN Controller')\n", (3136, 3154), True, 'import matplotlib.pyplot as plt\n'), ((3850, 3900), 'numpy.random.normal', 'np.random.normal', (['(0)', 'self.std_dev', '(self.n_nodes,)'], {}), '(0, self.std_dev, (self.n_nodes,))\n', (3866, 3900), True, 'import numpy as np\n'), ((3977, 4027), 'numpy.random.normal', 'np.random.normal', (['(0)', 'self.std_dev', '(self.n_nodes,)'], {}), '(0, self.std_dev, (self.n_nodes,))\n', (3993, 4027), True, 'import numpy as np\n'), ((4306, 4336), 'numpy.var', 'np.var', (['self.x[:, 2:4]'], {'axis': '(0)'}), '(self.x[:, 2:4], axis=0)\n', (4312, 4336), True, 'import numpy as np\n'), ((5188, 5252), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(-self.v_bias)', 'high': 'self.v_bias', 'size': '(2,)'}), '(low=-self.v_bias, high=self.v_bias, size=(2,))\n', (5205, 5252), True, 'import numpy as np\n'), ((6032, 6058), 'numpy.mean', 'np.mean', (['x[:, 2:4]'], {'axis': '(0)'}), '(x[:, 2:4], axis=0)\n', (6039, 6058), True, 'import numpy as np\n'), ((8038, 8058), 'numpy.eye', 'np.eye', (['self.n_nodes'], {}), '(self.n_nodes)\n', (8044, 8058), True, 'import numpy as np\n'), ((10212, 10232), 'numpy.eye', 'np.eye', (['self.n_nodes'], {}), '(self.n_nodes)\n', (10218, 10232), True, 'import numpy as np\n'), ((4948, 5002), 'numpy.random.uniform', 'np.random.uniform', (['(0)', 'self.r_max'], {'size': '(self.n_nodes,)'}), '(0, self.r_max, size=(self.n_nodes,))\n', (4965, 5002), True, 'import numpy as np\n'), ((5032, 5077), 'numpy.random.uniform', 'np.random.uniform', (['(0)', '(2)'], {'size': '(self.n_nodes,)'}), '(0, 2, size=(self.n_nodes,))\n', (5049, 5077), True, 'import numpy as np\n'), ((5109, 5122), 'numpy.cos', 'np.cos', (['angle'], {}), '(angle)\n', (5115, 5122), True, 'import numpy as np\n'), ((5154, 5167), 'numpy.sin', 'np.sin', (['angle'], {}), '(angle)\n', (5160, 5167), True, 'import numpy as np\n'), ((5275, 5348), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(-self.v_max)', 'high': 'self.v_max', 'size': '(self.n_nodes,)'}), '(low=-self.v_max, high=self.v_max, size=(self.n_nodes,))\n', (5292, 5348), True, 'import numpy as np\n'), ((5381, 5454), 'numpy.random.uniform', 'np.random.uniform', ([], {'low': '(-self.v_max)', 'high': 'self.v_max', 'size': '(self.n_nodes,)'}), '(low=-self.v_max, high=self.v_max, size=(self.n_nodes,))\n', (5398, 5454), True, 'import numpy as np\n'), ((5880, 5893), 'numpy.min', 'np.min', (['a_net'], {}), '(a_net)\n', (5886, 5893), True, 'import numpy as np\n'), ((7950, 7991), 'numpy.multiply', 'np.multiply', (['diff[:, :, 0]', 'diff[:, :, 0]'], {}), '(diff[:, :, 0], diff[:, :, 0])\n', (7961, 7991), True, 'import numpy as np\n'), ((7994, 8035), 'numpy.multiply', 'np.multiply', (['diff[:, :, 1]', 'diff[:, :, 1]'], {}), '(diff[:, :, 1], diff[:, :, 1])\n', (8005, 8035), True, 'import numpy as np\n'), ((8160, 8188), 'numpy.divide', 'np.divide', (['diff[:, :, 0]', 'r2'], {}), '(diff[:, :, 0], r2)\n', (8169, 8188), True, 'import numpy as np\n'), ((8278, 8306), 'numpy.divide', 'np.divide', (['diff[:, :, 1]', 'r2'], {}), '(diff[:, :, 1], r2)\n', (8287, 8306), True, 'import numpy as np\n'), ((10116, 10161), 'numpy.multiply', 'np.multiply', (['s_diff[:, :, 0]', 's_diff[:, :, 0]'], {}), '(s_diff[:, :, 0], s_diff[:, :, 0])\n', (10127, 10161), True, 'import numpy as np\n'), ((10164, 10209), 'numpy.multiply', 'np.multiply', (['s_diff[:, :, 1]', 's_diff[:, :, 1]'], {}), '(s_diff[:, :, 1], s_diff[:, :, 1])\n', (10175, 10209), True, 'import numpy as np\n'), ((10378, 10398), 'numpy.nansum', 'np.nansum', (['p'], {'axis': '(1)'}), '(p, axis=1)\n', (10387, 10398), True, 'import numpy as np\n'), ((11017, 11040), 'numpy.divide', 'np.divide', (['pos_diff', 'r2'], {}), '(pos_diff, r2)\n', (11026, 11040), True, 'import numpy as np\n'), ((5752, 5772), 'numpy.eye', 'np.eye', (['self.n_nodes'], {}), '(self.n_nodes)\n', (5758, 5772), True, 'import numpy as np\n'), ((8138, 8157), 'numpy.multiply', 'np.multiply', (['r2', 'r2'], {}), '(r2, r2)\n', (8149, 8157), True, 'import numpy as np\n'), ((8256, 8275), 'numpy.multiply', 'np.multiply', (['r2', 'r2'], {}), '(r2, r2)\n', (8267, 8275), True, 'import numpy as np\n'), ((10990, 11009), 'numpy.multiply', 'np.multiply', (['r2', 'r2'], {}), '(r2, r2)\n', (11001, 11009), True, 'import numpy as np\n')] |
# -*-coding:utf-8-*-
# from functools import reduce
from functools import reduce
SANCAI_jixiang = [1, 3, 5, 7, 8, 11, 13, 15, 16, 18, 21, 23, 24, 25, 31, 32, 33, 35, 37, 39, 41, 45, 47, 48, 52, 57, 61,
63,
65, 67, 68, 81] # 吉祥运暗示数(代表健全,幸福,名誉等)
SANCAI_xiaoji = [6, 17, 26, 27, 29, 30, 38, 49, 51, 55, 58, 71, 73, 75] # 次吉祥运暗示数(代表多少有些障碍,但能获得吉运)
SANCAI_xiong = [2, 4, 9, 10, 12, 14, 19, 20, 22, 28, 34, 36, 40, 42, 43, 44, 46, 50, 53, 54, 56, 59, 60, 62, 64, 66, 69,
70,
72, 74, 76, 77, 78, 79, 80] # 凶数运暗示数(代表逆境,沉浮,薄弱,病难,困难,多灾等)
SANCAI_wise = [3, 13, 16, 21, 23, 29, 31, 37, 39, 41, 45, 47] # 首领运暗示数(智慧 )仁勇全备,立上位,能领导众人)
SANCAI_wealth = [15, 16, 24, 29, 32, 33, 41, 52] # 财富运暗示数(多钱财,富贵,白手可获巨财)
SANCAI_artist = [13, 14, 18, 26, 29, 33, 35, 38, 48] # 艺能运暗示数(富有艺术天才,对审美,艺术,演艺,体育有通达之能)
SANCAI_goodwife = [5, 6, 11, 13, 15, 16, 24, 32, 35] # 女德运暗示数(具有妇德,品性温良,助夫爱子)
SANCAI_death = [21, 23, 26, 28, 29, 33, 39] # 女性孤寡运暗示数(难觅夫君,家庭不和,夫妻两虎相斗,离婚,严重者夫妻一方早亡)
SANCAI_alone = [4, 10, 12, 14, 22, 28, 34] # 孤独运暗示数(妻凌夫或夫克妻)
SANCAI_merry = [5, 6, 15, 16, 32, 39, 41] # 双妻运暗示数
SANCAI_stubbon = [7, 17, 18, 25, 27, 28, 37, 47] # 刚情运暗示数(性刚固执,意气用事)
SANCAI_gentle = [5, 6, 11, 15, 16, 24, 31, 32, 35] # 温和运暗示数(性情平和,能得上下信望)
# 可以自己配置觉得好的数字
# 参考好的搭配
refer_good_num_list = [SANCAI_jixiang, SANCAI_xiaoji, SANCAI_wise, SANCAI_wealth, SANCAI_artist, SANCAI_goodwife,
SANCAI_merry, SANCAI_gentle]
# 自己设定的好的搭配
good_num_list = [SANCAI_jixiang, SANCAI_xiaoji, SANCAI_wise, SANCAI_wealth, SANCAI_artist, SANCAI_goodwife,
SANCAI_merry, SANCAI_gentle]
# 参考坏的搭配
refer_bad_num_list = [SANCAI_xiong, SANCAI_death, SANCAI_alone, SANCAI_stubbon]
# 自己设定的坏的搭配
bad_num_list = [SANCAI_xiong, SANCAI_death, SANCAI_alone]
good_num_set = set(reduce((lambda x, y: x + y), good_num_list, []))
bad_num_set = set(reduce((lambda x, y: x + y), bad_num_list, []))
print('五格好分值:', good_num_set)
print('五格差分值:', bad_num_set)
# 筛选出有好没坏的三才五格
best_num_set = [x for x in good_num_set if x not in bad_num_set]
print('想要的三才五格数字:', best_num_set)
RESULT_UNKNOWN = '结果未知'
| [
"functools.reduce"
]
| [((1814, 1859), 'functools.reduce', 'reduce', (['(lambda x, y: x + y)', 'good_num_list', '[]'], {}), '(lambda x, y: x + y, good_num_list, [])\n', (1820, 1859), False, 'from functools import reduce\n'), ((1881, 1925), 'functools.reduce', 'reduce', (['(lambda x, y: x + y)', 'bad_num_list', '[]'], {}), '(lambda x, y: x + y, bad_num_list, [])\n', (1887, 1925), False, 'from functools import reduce\n')] |
"""
Losses that assume an underlying spatial organization
(gradients, curvature, etc.)
"""
import torch
import torch.nn as tnn
from nitorch.core.pyutils import make_list, prod
from nitorch.core.utils import slice_tensor
from nitorch.spatial import diff1d
from ._base import Loss
class LocalFeatures(tnn.Module):
"""Base class for feature extractors.
Is it really useful?
"""
def __init__(self, bound='dct2', voxel_size=1, *args, **kwargs):
"""
Parameters
----------
bound : BoundType, default='dct2'
Boundary conditions, used to compute derivatives at the edges.
voxel_size : float or list[float], default=1
Voxel size
"""
super().__init__(*args, **kwargs)
self.bound = bound
self.voxel_size = voxel_size
class Diff(LocalFeatures):
"""Finite differences."""
def __init__(self, order=1, side='c', dim=None, *args, **kwargs):
"""
Parameters
----------
order : int, default=1
Finite differences order
side : {'c', 'f', 'b'} or list[{'c', 'f', 'b'}], default='c'
Type of finite-differencesto extract about each voxel:
* 'c' : central -> `g[i] = (x[i+1] - x[i-1])/2`
* 'f' : forward -> `g[i] = (x[i+1] - x[i])`
* 'b' : backward -> `g[i] = (x[i] - x[i-1])`
dim : int or list[int], optional
Dimensions along which to compute the finite differences.
By default, all except the first two (batch and channel).
bound : BoundType or list[BoundType], default='dct2'
Boundary conditions, used to compute derivatives at the edges.
voxel_size : float or list[float], default=1
Voxel size
reduction : {'mean', 'sum'} or callable, default='mean'
Type of reduction to apply.
"""
super().__init__(*args, **kwargs)
self.order = order
self.side = side
self.dim = dim
def forward(self, x, **overload):
"""
Parameters
----------
x : tensor
Input tensor with shape (batch, channel, *spatial)
overload : dict
All parameters defined at build time can be overridden
at call time.
Returns
-------
g : tensor
Finite differences with shape
(batch, channel, *spatial, len(dim), len(side))
If `dim` or `side` are scalars, not lists, their respective
dimension is dropped in the output tensor.
E.g., if `side='c'`, the output shape is
(batch, channel, *spatial, len(dim))
"""
order = overload.get('order', self.order)
side = make_list(overload.get('side', self.side))
drop_side_dim = not isinstance(side, (tuple, list))
side = make_list(side)
dim = overload.get('dim', self.dim)
dim = list(range(2, x.dim())) if dim is None else dim
drop_dim_dim = not isinstance(dim, (tuple, list))
dim = make_list(dim)
nb_dim = len(dim)
voxel_size = overload.get('voxel_size', self.voxel_size)
voxel_size = make_list(voxel_size, nb_dim)
bound = make_list(overload.get('bound', self.bound), nb_dim)
diffs = []
for d, vx, bnd in zip(dim, voxel_size, bound):
sides = []
for s in side:
grad = diff1d(x, order=order, dim=d, voxel_size=vx,
side=s, bound=bnd)
sides.append(grad)
sides = torch.stack(sides, dim=-1)
diffs.append(sides)
diffs = torch.stack(diffs, dim=-2)
if drop_dim_dim:
diffs = slice_tensor(diffs, 0, dim=-2)
if drop_side_dim:
diffs = slice_tensor(diffs, 0, dim=-1)
return diffs
class MembraneLoss(Loss):
"""Compute the membrane energy (squared gradients) of a tensor.
The membrane energy of a field is the integral of its squared
gradient magnitude (l2 norm). This class extends this concept to
other norms of the gradient (l1, l{1,2}).
In the l2 case, if we name "f" the unit of the field and "m" the
spatial unit of a voxel, the output loss has unit `(f/m)**2`.
If `factor` is used to weight each voxel by its volume (as should
be done in a proper integration) the unit becomes
`(f/m)**2 * m**d = f**2 * m**(d-2)`.
In the l1 case, it is `f/m` in the absence of weighting and
`f * m**(d-1)` with volume weighting.
"""
def __init__(self, voxel_size=1, factor=1, bound='dct2', l1=None,
*args, **kwargs):
"""
Parameters
----------
voxel_size : float or list[float], default=1
Voxel size. Useful for anisotropic tensors (where the
sampling rate is higher in some directions than others).
factor : float or list[float], default=1
Scale the loss by a per-dimension factor. Useful when
working with resized tensor to compensate for different
number of voxels.
bound : BoundType, default='dct2'
Boundary conditions, used to compute derivatives at the edges.
l1 : bool or int or list[int], default=None
Dimensions along which to apply a square root reduction
('l1 norm'), after taking the square. Dimensions are
those of the gradient map with shape
(batch, channel, *spatial, direction, side)
* False: nowhere == (squared) l2 norm
* True: everywhere == l1 norm
* Otherwise: l_{1,2} norm (group sparsity)
"""
super().__init__(*args, **kwargs)
self.voxel_size = voxel_size
self.factor = factor
self.bound = bound
self.l1 = l1
def forward(self, x, **overload):
"""
Parameters
----------
x : tensor
Input tensor
overload : dict
All parameters defined at build time can be overridden
at call time.
Returns
-------
loss : scalar or tensor
The output shape depends on the type of reduction used.
If 'mean' or 'sum', this function returns a scalar.
"""
nb_dim = x.dim() - 2
voxel_size = make_list(overload.get('voxel_size', self.voxel_size), nb_dim)
factor = make_list(overload.get('factor', self.factor), nb_dim)
bound = make_list(overload.get('bound', self.bound), nb_dim)
l1 = overload.get('l1', self.l1)
# Compute spatial gradients
#
# TODO: when penalty == 'l2', for some boundary conditions, there's no
# need to compute both forward and backward gradients as they are
# the same (but shifted). For now, to avoid having to detect which
# cases can be accelerated, I always compute both (more general).
loss = Diff(side=['f', 'b'], bound=bound, voxel_size=voxel_size)(x)
loss = loss.square()
# Apply l1
if l1 not in (None, False):
if l1 is True:
loss = loss.sqrt()
else:
l1 = make_list(l1)
loss = loss.sum(dim=l1).sqrt() # TODO: use self.reduction instead of sum?
# Reduce
loss = super().forward(loss)
# Scale
factor = prod(factor)
if factor != 1:
loss = loss * factor
return loss
class BendingLoss(Loss):
"""Compute the bending energy (squared gradients) of a tensor.
The bending energy of a field is the integral of its squared
second-order derivatives magnitude (l2 norm).
This class extends this concept to other norms of the gradient
(l1, l{1,2}).
In the l2 case, if we name "f" the unit of the field and "m" the
spatial unit of a voxel, the output loss has unit `(f/m**2)**2`.
If `factor` is used to weight each voxel by its volume (as should
be done in a proper integration) the unit becomes
`(f/m**2)**2 * m**d = f**2 * m**(d-4)`.
In the l1 case, it is `f/m**2` in the absence of weighting and
`f * m**(d-2)` with volume weighting.
"""
def __init__(self, voxel_size=1, factor=1, bound='dct2', l1=None,
*args, **kwargs):
"""
Parameters
----------
voxel_size : float or list[float], default=1
Voxel size. Useful for anisotropic tensors (where the
sampling rate is higher in some directions than others).
factor : float or list[float], default=1
Scale the loss by a per-dimension factor. Useful when
working with resized tensor to compensate for different
number of voxels.
bound : BoundType, default='dct2'
Boundary conditions, used to compute derivatives at the edges.
l1 : bool or int or list[int], default=None
Dimensions along which to apply a square root reduction
('l1 norm'), after taking the square. Dimensions are
those of the gradient map with shape
(batch, channel, *spatial, direction)
* False: nowhere == (squared) l2 norm
* True: everywhere == l1 norm
* Otherwise: l_{1,2} norm (group sparsity)
"""
super().__init__(*args, **kwargs)
self.voxel_size = voxel_size
self.factor = factor
self.bound = bound
self.l1 = l1
def forward(self, x, **overload):
"""
Parameters
----------
x : tensor
Input tensor
overload : dict
All parameters defined at build time can be overridden
at call time.
Returns
-------
loss : scalar or tensor
The output shape depends on the type of reduction used.
If 'mean' or 'sum', this function returns a scalar.
"""
nb_dim = x.dim() - 2
voxel_size = make_list(overload.get('voxel_size', self.voxel_size), nb_dim)
factor = make_list(overload.get('factor', self.factor), nb_dim)
bound = make_list(overload.get('bound', self.bound), nb_dim)
l1 = overload.get('l1', self.l1)
# Compute spatial gradients
loss = Diff(order=2, side='c', bound=bound, voxel_size=voxel_size)(x)
loss = loss.square()
# Apply l1
if l1 not in (None, False):
if l1 is True:
loss = loss.sqrt()
else:
l1 = make_list(l1)
loss = loss.sum(dim=l1).sqrt()
# Reduce
loss = super().forward(loss)
# Scale
factor = prod(factor)
if factor != 1:
loss = loss * factor
return loss
class LameShearLoss(Loss):
"""Strain-part of the (Linear)-Elastic energy (penalty on shears).
= second Lame constant = shear modulus
The shear energy of a deformation field is the integral of the square
magnitude (l2 norm) of the symetric part diagonal terms of its Jacobian.
This class extends this concept to other norms of the gradient
(l1, l{1,2}).
In the l2 case, E = sum_{i != j} (dv[i]/dx[j]) ** 2.
"""
def __init__(self, voxel_size=1, factor=1, bound='dct2', l1=None,
exclude_zooms=False, *args, **kwargs):
"""
Parameters
----------
voxel_size : float or list[float], default=1
Voxel size. Useful for anisotropic tensors (where the
sampling rate is higher in some directions than others).
factor : float or list[float], default=1
Scale the loss by a per-dimension factor. Useful when
working with resized tensor to compensate for different
number of voxels.
bound : BoundType, default='dct2'
Boundary conditions, used to compute derivatives at the edges.
l1 : bool or int or list[int], default=None
Dimensions along which to apply a square root reduction
('l1 norm'), after taking the square. Dimensions are
those of the gradient map with shape
(batch, channel, *spatial, side)
* False: nowhere == (squared) l2 norm
* True: everywhere == l1 norm
* Otherwise: l_{1,2} norm (group sparsity)
Here, `channel` map to elements of the Jacobian matrix, while
`side` map to the combination of sides (forward/backward)
used when extracting finite differences. Therefore, the
number of channels is dim*(dim+1)//2 and the number of sides
is 4.
exclude_zooms : bool, default=False
Do not include diagonal elements of the Jacobian in the
penalty (i.e., penalize only shears)
"""
super().__init__(*args, **kwargs)
self.voxel_size = voxel_size
self.factor = factor
self.bound = bound
self.l1 = l1
self.exclude_zooms = exclude_zooms
def forward(self, x, **overload):
"""
Parameters
----------
x : (batch, ndim, *spatial) tensor
Input displacement tensor (in channel first order)
overload : dict
All parameters defined at build time can be overridden
at call time.
Returns
-------
loss : scalar or tensor
The output shape depends on the type of reduction used.
If 'mean' or 'sum', this function returns a scalar.
"""
nb_dim = x.dim() - 2
voxel_size = make_list(overload.get('voxel_size', self.voxel_size), nb_dim)
factor = make_list(overload.get('factor', self.factor), nb_dim)
bound = make_list(overload.get('bound', self.bound), nb_dim)
l1 = overload.get('l1', self.l1)
exclude_zooms = overload.get('exclude_zooms', self.exclude_zooms)
# Compute spatial gradients
loss_diag = [] # diagonal elements of the Jacobian
loss_offdiag = [] # off-diagonal elements of hte (symmetric) Jacobian
for i in range(nb_dim):
# symmetric part
x_i = x[:, i:i+1, ...]
subloss_diag = []
subloss_offdiag = []
for j in range(nb_dim):
for side_i in ('f', 'b'):
diff = Diff(dim=[j+2], side=side_i, bound=bound,
voxel_size=voxel_size)
diff_ij = diff(x_i)
if i == j:
# diagonal elements
if not exclude_zooms:
subloss_diag.append(diff_ij)
else:
# off diagonal elements
x_j = x[:, j:j+1, ...]
for side_j in ('f', 'b'):
diff = Diff(dim=[i+2], side=side_j, bound=bound,
voxel_size=voxel_size)
diff_ji = diff(x_j)
subloss_offdiag.append((diff_ij + diff_ji)/2)
if not exclude_zooms:
loss_diag.append(torch.stack(subloss_diag, dim=-1))
loss_offdiag.append(torch.stack(subloss_offdiag, dim=-1))
if not exclude_zooms:
loss_diag = torch.cat(loss_diag, dim=1)
loss_offdiag = torch.cat(loss_offdiag, dim=1)
if l1 not in (None, False):
# Apply l1 reduction
if l1 is True:
if not exclude_zooms:
loss_diag = loss_diag.abs()
loss_offdiag = loss_offdiag.abs()
else:
l1 = make_list(l1)
if not exclude_zooms:
loss_diag = loss_diag.square().sum(dim=l1, keepdim=True).sqrt()
loss_offdiag = loss_offdiag.square().sum(dim=l1, keepdim=True).sqrt()
else:
# Apply l2 reduction
if not exclude_zooms:
loss_diag = loss_diag.square()
loss_offdiag = loss_offdiag.square()
# Mean reduction across sides
if not exclude_zooms:
loss_diag = loss_diag.mean(dim=-1)
loss_offdiag = loss_offdiag.mean(dim=-1)
# Weighted reduction across elements
if not exclude_zooms:
if loss_diag.shape[1] == 1:
# element dimension already reduced -> we need a small hack
loss = (loss_diag.square() + 2*loss_offdiag.square()) / (nb_dim**2)
loss = loss.sum(dim=1, keepdim=True).sqrt()
else:
# simple weighted average
loss = (loss_diag.sum(dim=1, keepdim=True) +
loss_offdiag.sum(dim=1, keepdim=True)*2) / (nb_dim**2)
else:
loss = loss_offdiag.sum(dim=1, keepdim=True)*2 / (nb_dim**2)
# Reduce
loss = super().forward(loss)
# Scale
factor = prod(factor)
if factor != 1:
loss = loss * factor
return loss
class LameZoomLoss(Loss):
"""Compression-part of the (Linear)-Elastic energy (penalty on volume change).
= first Lame constant
The compression energy of a deformation field is the integral of the square
magnitude (l2 norm) of the trace its Jacobian.
This class extends this concept to other norms of the gradient
(l1, l{1,2}).
In the l2 case, E = sum_{ij} (dv[i]/dx[j] + dv[j]/dx[i]) ** 2.
"""
def __init__(self, voxel_size=1, factor=1, bound='dct2', l1=None,
*args, **kwargs):
"""
Parameters
----------
voxel_size : float or list[float], default=1
Voxel size. Useful for anisotropic tensors (where the
sampling rate is higher in some directions than others).
factor : float or list[float], default=1
Scale the loss by a per-dimension factor. Useful when
working with resized tensor to compensate for different
number of voxels.
bound : BoundType, default='dct2'
Boundary conditions, used to compute derivatives at the edges.
l1 : bool or int or list[int], default=None
Dimensions along which to apply a square root reduction
('l1 norm'), after taking the square. Dimensions are
those of the gradient map with shape
(batch, channel, *spatial, direction, side)
* False: nowhere == (squared) l2 norm
* True: everywhere == l1 norm
* Otherwise: l_{1,2} norm (group sparsity)
"""
super().__init__(*args, **kwargs)
self.voxel_size = voxel_size
self.factor = factor
self.bound = bound
self.l1 = l1
def forward(self, x, **overload):
"""
Parameters
----------
x : tensor
Input tensor
overload : dict
All parameters defined at build time can be overridden
at call time.
Returns
-------
loss : scalar or tensor
The output shape depends on the type of reduction used.
If 'mean' or 'sum', this function returns a scalar.
"""
nb_dim = x.dim() - 2
voxel_size = make_list(overload.get('voxel_size', self.voxel_size), nb_dim)
factor = make_list(overload.get('factor', self.factor), nb_dim)
bound = make_list(overload.get('bound', self.bound), nb_dim)
l1 = overload.get('l1', self.l1)
# Compute spatial gradients
loss = []
for i in range(nb_dim):
x_i = x[:, i:i+1, ...]
diff = Diff(dim=[i], side=['f', 'b'], bound=bound,
voxel_size=voxel_size)
loss.append(diff(x_i))
loss = torch.cat(loss, dim=1)
loss = loss.square()
# Apply l1
if l1 not in (None, False):
if l1 is True:
loss = loss.sqrt()
else:
l1 = make_list(l1)
loss = loss.sum(dim=l1, keepdim=True).sqrt()
# Mean reduction across sides
loss = loss.mean(dim=-1)
# Reduce
loss = super().forward(loss)
# Scale
factor = prod(factor)
if factor != 1:
loss = loss * factor
return loss
| [
"nitorch.core.utils.slice_tensor",
"torch.stack",
"nitorch.core.pyutils.make_list",
"nitorch.spatial.diff1d",
"nitorch.core.pyutils.prod",
"torch.cat"
]
| [((2889, 2904), 'nitorch.core.pyutils.make_list', 'make_list', (['side'], {}), '(side)\n', (2898, 2904), False, 'from nitorch.core.pyutils import make_list, prod\n'), ((3083, 3097), 'nitorch.core.pyutils.make_list', 'make_list', (['dim'], {}), '(dim)\n', (3092, 3097), False, 'from nitorch.core.pyutils import make_list, prod\n'), ((3211, 3240), 'nitorch.core.pyutils.make_list', 'make_list', (['voxel_size', 'nb_dim'], {}), '(voxel_size, nb_dim)\n', (3220, 3240), False, 'from nitorch.core.pyutils import make_list, prod\n'), ((3682, 3708), 'torch.stack', 'torch.stack', (['diffs'], {'dim': '(-2)'}), '(diffs, dim=-2)\n', (3693, 3708), False, 'import torch\n'), ((7441, 7453), 'nitorch.core.pyutils.prod', 'prod', (['factor'], {}), '(factor)\n', (7445, 7453), False, 'from nitorch.core.pyutils import make_list, prod\n'), ((10749, 10761), 'nitorch.core.pyutils.prod', 'prod', (['factor'], {}), '(factor)\n', (10753, 10761), False, 'from nitorch.core.pyutils import make_list, prod\n'), ((15463, 15493), 'torch.cat', 'torch.cat', (['loss_offdiag'], {'dim': '(1)'}), '(loss_offdiag, dim=1)\n', (15472, 15493), False, 'import torch\n'), ((17042, 17054), 'nitorch.core.pyutils.prod', 'prod', (['factor'], {}), '(factor)\n', (17046, 17054), False, 'from nitorch.core.pyutils import make_list, prod\n'), ((19898, 19920), 'torch.cat', 'torch.cat', (['loss'], {'dim': '(1)'}), '(loss, dim=1)\n', (19907, 19920), False, 'import torch\n'), ((20343, 20355), 'nitorch.core.pyutils.prod', 'prod', (['factor'], {}), '(factor)\n', (20347, 20355), False, 'from nitorch.core.pyutils import make_list, prod\n'), ((3607, 3633), 'torch.stack', 'torch.stack', (['sides'], {'dim': '(-1)'}), '(sides, dim=-1)\n', (3618, 3633), False, 'import torch\n'), ((3755, 3785), 'nitorch.core.utils.slice_tensor', 'slice_tensor', (['diffs', '(0)'], {'dim': '(-2)'}), '(diffs, 0, dim=-2)\n', (3767, 3785), False, 'from nitorch.core.utils import slice_tensor\n'), ((3832, 3862), 'nitorch.core.utils.slice_tensor', 'slice_tensor', (['diffs', '(0)'], {'dim': '(-1)'}), '(diffs, 0, dim=-1)\n', (3844, 3862), False, 'from nitorch.core.utils import slice_tensor\n'), ((15412, 15439), 'torch.cat', 'torch.cat', (['loss_diag'], {'dim': '(1)'}), '(loss_diag, dim=1)\n', (15421, 15439), False, 'import torch\n'), ((3458, 3521), 'nitorch.spatial.diff1d', 'diff1d', (['x'], {'order': 'order', 'dim': 'd', 'voxel_size': 'vx', 'side': 's', 'bound': 'bnd'}), '(x, order=order, dim=d, voxel_size=vx, side=s, bound=bnd)\n', (3464, 3521), False, 'from nitorch.spatial import diff1d\n'), ((7247, 7260), 'nitorch.core.pyutils.make_list', 'make_list', (['l1'], {}), '(l1)\n', (7256, 7260), False, 'from nitorch.core.pyutils import make_list, prod\n'), ((10599, 10612), 'nitorch.core.pyutils.make_list', 'make_list', (['l1'], {}), '(l1)\n', (10608, 10612), False, 'from nitorch.core.pyutils import make_list, prod\n'), ((15320, 15356), 'torch.stack', 'torch.stack', (['subloss_offdiag'], {'dim': '(-1)'}), '(subloss_offdiag, dim=-1)\n', (15331, 15356), False, 'import torch\n'), ((15766, 15779), 'nitorch.core.pyutils.make_list', 'make_list', (['l1'], {}), '(l1)\n', (15775, 15779), False, 'from nitorch.core.pyutils import make_list, prod\n'), ((20107, 20120), 'nitorch.core.pyutils.make_list', 'make_list', (['l1'], {}), '(l1)\n', (20116, 20120), False, 'from nitorch.core.pyutils import make_list, prod\n'), ((15253, 15286), 'torch.stack', 'torch.stack', (['subloss_diag'], {'dim': '(-1)'}), '(subloss_diag, dim=-1)\n', (15264, 15286), False, 'import torch\n')] |
#!/usr/bin/env python3
# Copyright 2020 Gaitech Korea Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: <NAME>
import os
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import DeclareLaunchArgument
from launch.substitutions import LaunchConfiguration
from launch_ros.actions import Node
def generate_launch_description():
default_config_locks = os.path.join(get_package_share_directory('twist_mux'),
'config', 'twist_mux_locks.yaml')
default_config_topics = os.path.join(get_package_share_directory('twist_mux'),
'config', 'twist_mux_topics.yaml')
default_config_joystick = os.path.join(get_package_share_directory('twist_mux'),
'config', 'joystick.yaml')
return LaunchDescription([
DeclareLaunchArgument(
'config_locks',
default_value=default_config_locks,
description='Default locks config file'),
DeclareLaunchArgument(
'config_topics',
default_value=default_config_topics,
description='Default topics config file'),
DeclareLaunchArgument(
'config_joy',
default_value=default_config_joystick,
description='Default joystick config file'),
DeclareLaunchArgument(
'cmd_vel_out',
default_value='twist_mux/cmd_vel',
description='cmd vel output topic'),
Node(
package='twist_mux',
executable='twist_mux',
output='screen',
remappings={('/cmd_vel_out', LaunchConfiguration('cmd_vel_out'))},
parameters=[
LaunchConfiguration('config_locks'),
LaunchConfiguration('config_topics'),
LaunchConfiguration('config_joy')]
),
Node(
package='twist_mux',
executable='twist_marker',
output='screen',
remappings={('/twist', LaunchConfiguration('cmd_vel_out'))},
parameters=[{
'frame_id': 'base_link',
'scale': 1.0,
'vertical_position': 2.0}])
])
| [
"ament_index_python.packages.get_package_share_directory",
"launch.substitutions.LaunchConfiguration",
"launch.actions.DeclareLaunchArgument"
]
| [((959, 999), 'ament_index_python.packages.get_package_share_directory', 'get_package_share_directory', (['"""twist_mux"""'], {}), "('twist_mux')\n", (986, 999), False, 'from ament_index_python.packages import get_package_share_directory\n'), ((1116, 1156), 'ament_index_python.packages.get_package_share_directory', 'get_package_share_directory', (['"""twist_mux"""'], {}), "('twist_mux')\n", (1143, 1156), False, 'from ament_index_python.packages import get_package_share_directory\n'), ((1277, 1317), 'ament_index_python.packages.get_package_share_directory', 'get_package_share_directory', (['"""twist_mux"""'], {}), "('twist_mux')\n", (1304, 1317), False, 'from ament_index_python.packages import get_package_share_directory\n'), ((1429, 1547), 'launch.actions.DeclareLaunchArgument', 'DeclareLaunchArgument', (['"""config_locks"""'], {'default_value': 'default_config_locks', 'description': '"""Default locks config file"""'}), "('config_locks', default_value=default_config_locks,\n description='Default locks config file')\n", (1450, 1547), False, 'from launch.actions import DeclareLaunchArgument\n'), ((1590, 1711), 'launch.actions.DeclareLaunchArgument', 'DeclareLaunchArgument', (['"""config_topics"""'], {'default_value': 'default_config_topics', 'description': '"""Default topics config file"""'}), "('config_topics', default_value=default_config_topics,\n description='Default topics config file')\n", (1611, 1711), False, 'from launch.actions import DeclareLaunchArgument\n'), ((1754, 1876), 'launch.actions.DeclareLaunchArgument', 'DeclareLaunchArgument', (['"""config_joy"""'], {'default_value': 'default_config_joystick', 'description': '"""Default joystick config file"""'}), "('config_joy', default_value=default_config_joystick,\n description='Default joystick config file')\n", (1775, 1876), False, 'from launch.actions import DeclareLaunchArgument\n'), ((1919, 2030), 'launch.actions.DeclareLaunchArgument', 'DeclareLaunchArgument', (['"""cmd_vel_out"""'], {'default_value': '"""twist_mux/cmd_vel"""', 'description': '"""cmd vel output topic"""'}), "('cmd_vel_out', default_value='twist_mux/cmd_vel',\n description='cmd vel output topic')\n", (1940, 2030), False, 'from launch.actions import DeclareLaunchArgument\n'), ((2297, 2332), 'launch.substitutions.LaunchConfiguration', 'LaunchConfiguration', (['"""config_locks"""'], {}), "('config_locks')\n", (2316, 2332), False, 'from launch.substitutions import LaunchConfiguration\n'), ((2350, 2386), 'launch.substitutions.LaunchConfiguration', 'LaunchConfiguration', (['"""config_topics"""'], {}), "('config_topics')\n", (2369, 2386), False, 'from launch.substitutions import LaunchConfiguration\n'), ((2404, 2437), 'launch.substitutions.LaunchConfiguration', 'LaunchConfiguration', (['"""config_joy"""'], {}), "('config_joy')\n", (2423, 2437), False, 'from launch.substitutions import LaunchConfiguration\n'), ((2218, 2252), 'launch.substitutions.LaunchConfiguration', 'LaunchConfiguration', (['"""cmd_vel_out"""'], {}), "('cmd_vel_out')\n", (2237, 2252), False, 'from launch.substitutions import LaunchConfiguration\n'), ((2601, 2635), 'launch.substitutions.LaunchConfiguration', 'LaunchConfiguration', (['"""cmd_vel_out"""'], {}), "('cmd_vel_out')\n", (2620, 2635), False, 'from launch.substitutions import LaunchConfiguration\n')] |
import sys
import unittest
import requests_mock
from mock import patch
sys.path.append('services/LiveService')
from LiveService import LiveService
L = LiveService()
baseURL = "https://yanexx65s8e1.live.elementalclouddev.com/api"
class LiveServiceTest(unittest.TestCase):
'''@patch('services.LiveService.LiveService.time', return_value=1502345833)
def testSetHeaders(self, mock_time):
headers = L.setHeaders("/schedules")
self.assertEqual(headers, {'X-Auth-Expires': '1502345863',
'X-Auth-Key': '9c9a72cd3a8feec48539f1943afbef8d',
'Content-type': 'application/xml',
'X-Auth-User': '',
'Accept': 'application/xml'})'''
@requests_mock.Mocker()
def testGetStatus(self, m):
m.get(baseURL + "/live_events/150/status", status_code=200)
resp = L.getLiveEventStatus(150)
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testGetEvents(self, m):
m.get(baseURL + "/live_events", status_code=200)
m.get(baseURL + "/live_events?filter=running", status_code=200)
resp = L.getLiveEvents(None)
self.assertEqual(resp.status_code, 200)
resp = L.getLiveEvents("running")
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testGetEvent(self, m):
m.get(baseURL + "/live_events/164", status_code=200)
resp = L.getLiveEvent(164)
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testGetSchedules(self, m):
m.get(baseURL + "/schedules", status_code=200)
resp = L.getSchedules()
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testGetLiveProfiles(self, m):
m.get(baseURL + "/live_event_profiles", status_code=200)
resp = L.getLiveProfiles()
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testGetLiveProfile(self, m):
m.get(baseURL + "/live_event_profiles/11", status_code=200)
resp = L.getLiveProfile(11)
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testCreateLiveEvent(self, m):
with open('Tests/test_XML/live_event.xml', 'r') as infile:
xml = infile.read()
m.post(baseURL + "/live_events", status_code=201)
resp = L.createEvent(xml)
self.assertEqual(resp.status_code, 201)
@requests_mock.Mocker()
def testCreateSchedule(self, m):
with open('Tests/test_XML/schedule.xml', 'r') as infile:
xml = infile.read()
m.post(baseURL + "/schedules", status_code=201)
resp = L.createSchedule(xml)
self.assertEqual(resp.status_code, 201)
@requests_mock.Mocker()
def testCreateProfile(self, m):
with open('Tests/test_XML/schedule.xml', 'r') as infile:
xml = infile.read()
m.post(baseURL + "/schedules", status_code=201)
resp = L.createSchedule(xml)
self.assertEqual(resp.status_code, 201)
@requests_mock.Mocker()
def testUpdateEvent(self, m):
with open('Tests/test_XML/live_event.xml', 'r') as infile:
xml = infile.read()
m.put(baseURL + "/live_events/50", status_code=200)
resp = L.updateLiveEvent(50, xml)
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testUpdatePlaylist(self, m):
with open('Tests/test_XML/live_event.xml', 'r') as infile:
xml = infile.read()
m.post(baseURL + "/live_events/92/playlist", status_code=200)
resp = L.updatePlaylist(92, xml)
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testUpdateSchedule(self, m):
with open('Tests/test_XML/schedule.xml', 'r') as infile:
xml = infile.read()
m.put(baseURL + "/schedules/13", status_code=200)
resp = L.updateSchedule(13, xml)
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testUpdateProfile(self, m):
with open('Tests/test_XML/live_profile.xml', 'r') as infile:
xml = infile.read()
m.put(baseURL + "/live_event_profiles/33", status_code=200)
resp = L.updateProfile(33, xml)
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testRemoveLiveEvent(self, m):
m.delete(baseURL + "/live_events/191", status_code=200)
resp = L.removeEvent(191)
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testRemoveSchedule(self, m):
m.delete(baseURL + "/schedules/13", status_code=200)
resp = L.removeSchedule(13)
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testRemoveProfile(self, m):
m.delete(baseURL + "/live_event_profiles/33", status_code=200)
resp = L.removeProfile(33)
self.assertEqual(resp.status_code, 200)
@requests_mock.Mocker()
def testStartEvent(self, m):
m.post(baseURL + "/live_events/50/start", status_code=200)
resp = L.startLiveEvent(50)
self.assertEqual(resp.status_code, 200)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"requests_mock.Mocker",
"sys.path.append",
"LiveService.LiveService"
]
| [((71, 110), 'sys.path.append', 'sys.path.append', (['"""services/LiveService"""'], {}), "('services/LiveService')\n", (86, 110), False, 'import sys\n'), ((154, 167), 'LiveService.LiveService', 'LiveService', ([], {}), '()\n', (165, 167), False, 'from LiveService import LiveService\n'), ((798, 820), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (818, 820), False, 'import requests_mock\n'), ((1016, 1038), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (1036, 1038), False, 'import requests_mock\n'), ((1381, 1403), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (1401, 1403), False, 'import requests_mock\n'), ((1585, 1607), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (1605, 1607), False, 'import requests_mock\n'), ((1784, 1806), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (1804, 1806), False, 'import requests_mock\n'), ((1999, 2021), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (2019, 2021), False, 'import requests_mock\n'), ((2217, 2239), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (2237, 2239), False, 'import requests_mock\n'), ((2523, 2545), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (2543, 2545), False, 'import requests_mock\n'), ((2827, 2849), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (2847, 2849), False, 'import requests_mock\n'), ((3130, 3152), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (3150, 3152), False, 'import requests_mock\n'), ((3442, 3464), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (3462, 3464), False, 'import requests_mock\n'), ((3766, 3788), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (3786, 3788), False, 'import requests_mock\n'), ((4076, 4098), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (4096, 4098), False, 'import requests_mock\n'), ((4398, 4420), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (4418, 4420), False, 'import requests_mock\n'), ((4611, 4633), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (4631, 4633), False, 'import requests_mock\n'), ((4822, 4844), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (4842, 4844), False, 'import requests_mock\n'), ((5041, 5063), 'requests_mock.Mocker', 'requests_mock.Mocker', ([], {}), '()\n', (5061, 5063), False, 'import requests_mock\n'), ((5282, 5297), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5295, 5297), False, 'import unittest\n')] |
import pandas as pd
import numpy as np
import os
import logging
# suppress warnings
import warnings;
warnings.filterwarnings('ignore');
from tqdm.autonotebook import tqdm
# register `pandas.progress_apply` and `pandas.Series.map_apply` with `tqdm`
tqdm.pandas()
# https://pandas.pydata.org/pandas-docs/stable/user_guide/options.html#available-options
# adjust pandas display
pd.options.display.max_columns = 30 # default 20
pd.options.display.max_rows = 200 # default 60
pd.options.display.float_format = '{:.2f}'.format
# pd.options.display.precision = 2
pd.options.display.max_colwidth = 200 # default 50; None = all
# Number of array items in summary at beginning and end of each dimension
# np.set_printoptions(edgeitems=3) # default 3
np.set_printoptions(suppress=True) # no scientific notation for small numbers
# IPython (Jupyter) setting:
# Print out every value instead of just "last_expr" (default)
from IPython.core.interactiveshell import InteractiveShell
InteractiveShell.ast_node_interactivity = "all"
import matplotlib as mpl
from matplotlib import pyplot as plt
# defaults: mpl.rcParamsDefault
rc_params = {'figure.figsize': (8, 4),
'axes.labelsize': 'large',
'axes.titlesize': 'large',
'xtick.labelsize': 'large',
'ytick.labelsize': 'large',
'savefig.dpi': 100,
'figure.dpi': 100 }
# adjust matplotlib defaults
mpl.rcParams.update(rc_params)
import seaborn as sns
sns.set_style("darkgrid")
# sns.set()
| [
"tqdm.autonotebook.tqdm.pandas",
"matplotlib.rcParams.update",
"seaborn.set_style",
"warnings.filterwarnings",
"numpy.set_printoptions"
]
| [((103, 136), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (126, 136), False, 'import warnings\n'), ((252, 265), 'tqdm.autonotebook.tqdm.pandas', 'tqdm.pandas', ([], {}), '()\n', (263, 265), False, 'from tqdm.autonotebook import tqdm\n'), ((746, 780), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'suppress': '(True)'}), '(suppress=True)\n', (765, 780), True, 'import numpy as np\n'), ((1429, 1459), 'matplotlib.rcParams.update', 'mpl.rcParams.update', (['rc_params'], {}), '(rc_params)\n', (1448, 1459), True, 'import matplotlib as mpl\n'), ((1483, 1508), 'seaborn.set_style', 'sns.set_style', (['"""darkgrid"""'], {}), "('darkgrid')\n", (1496, 1508), True, 'import seaborn as sns\n')] |
import sys
import soundcard
import numpy
import pytest
ones = numpy.ones(1024)
signal = numpy.concatenate([[ones], [-ones]]).T
def test_speakers():
for speaker in soundcard.all_speakers():
assert isinstance(speaker.name, str)
assert hasattr(speaker, 'id')
assert isinstance(speaker.channels, int)
assert speaker.channels > 0
def test_microphones():
for microphone in soundcard.all_microphones():
assert isinstance(microphone.name, str)
assert hasattr(microphone, 'id')
assert isinstance(microphone.channels, int)
assert microphone.channels > 0
def test_default_playback():
soundcard.default_speaker().play(signal, 44100, channels=2)
def test_default_record():
recording = soundcard.default_microphone().record(1024, 44100)
assert len(recording == 1024)
def test_default_blockless_record():
recording = soundcard.default_microphone().record(None, 44100)
@pytest.fixture
def loopback_speaker():
import sys
if sys.platform == 'win32':
# must install https://www.vb-audio.com/Cable/index.htm
return soundcard.get_speaker('Cable')
elif sys.platform == 'darwin':
# must install soundflower
return soundcard.get_speaker('Soundflower64')
elif sys.platform == 'linux':
# pacmd load-module module-null-sink channels=6 rate=48000
return soundcard.get_speaker('Null')
else:
raise RuntimeError('Unknown platform {}'.format(sys.platform))
@pytest.fixture
def loopback_player(loopback_speaker):
with loopback_speaker.player(48000, channels=2, blocksize=512) as player:
yield player
@pytest.fixture
def loopback_microphone():
if sys.platform == 'win32':
# must install https://www.vb-audio.com/Cable/index.htm
return soundcard.get_microphone('Cable')
elif sys.platform == 'darwin':
# must install soundflower
return soundcard.get_microphone('Soundflower64')
elif sys.platform == 'linux':
return soundcard.get_microphone('Null', include_loopback=True)
else:
raise RuntimeError('Unknown platform {}'.format(sys.platform))
@pytest.fixture
def loopback_recorder(loopback_microphone):
with loopback_microphone.recorder(48000, channels=2, blocksize=512) as recorder:
yield recorder
def test_loopback_playback(loopback_player, loopback_recorder):
loopback_player.play(signal)
recording = loopback_recorder.record(1024*10)
assert recording.shape[1] == 2
left, right = recording.T
assert left.mean() > 0
assert right.mean() < 0
assert (left > 0.5).sum() == len(signal)
assert (right < -0.5).sum() == len(signal)
def test_loopback_reverse_recorder_channelmap(loopback_player, loopback_microphone):
with loopback_microphone.recorder(48000, channels=[1, 0], blocksize=512) as loopback_recorder:
loopback_player.play(signal)
recording = loopback_recorder.record(1024*12)
assert recording.shape[1] == 2
left, right = recording.T
assert right.mean() > 0
assert left.mean() < 0
assert (right > 0.5).sum() == len(signal)
assert (left < -0.5).sum() == len(signal)
def test_loopback_reverse_player_channelmap(loopback_speaker, loopback_recorder):
with loopback_speaker.player(48000, channels=[1, 0], blocksize=512) as loopback_player:
loopback_player.play(signal)
recording = loopback_recorder.record(1024*12)
assert recording.shape[1] == 2
left, right = recording.T
assert right.mean() > 0
assert left.mean() < 0
assert (right > 0.5).sum() == len(signal)
assert (left < -0.5).sum() == len(signal)
def test_loopback_mono_player_channelmap(loopback_speaker, loopback_recorder):
with loopback_speaker.player(48000, channels=[0], blocksize=512) as loopback_player:
loopback_player.play(signal[:,0])
recording = loopback_recorder.record(1024*12)
assert recording.shape[1] == 2
left, right = recording.T
assert left.mean() > 0
if sys.platform == 'linux':
# unmapped channels on linux are filled with the mean of other channels
assert right.mean() < left.mean()
else:
assert abs(right.mean()) < 0.01 # something like zero
assert (left > 0.5).sum() == len(signal)
def test_loopback_mono_recorder_channelmap(loopback_player, loopback_microphone):
with loopback_microphone.recorder(48000, channels=[0], blocksize=512) as loopback_recorder:
loopback_player.play(signal)
recording = loopback_recorder.record(1024*12)
assert len(recording.shape) == 1 or recording.shape[1] == 1
assert recording.mean() > 0
assert (recording > 0.5).sum() == len(signal)
def test_loopback_multichannel_channelmap(loopback_speaker, loopback_microphone):
with loopback_speaker.player(48000, channels=[2, 0], blocksize=512) as loopback_player:
with loopback_microphone.recorder(48000, channels=[2, 0], blocksize=512) as loopback_recorder:
loopback_player.play(signal)
recording = loopback_recorder.record(1024*12)
assert len(recording.shape) == 2
left, right = recording.T
assert left.mean() > 0
assert right.mean() < 0
assert (left > 0.5).sum() == len(signal)
assert (right < -0.5).sum() == len(signal)
| [
"soundcard.get_microphone",
"soundcard.all_speakers",
"soundcard.all_microphones",
"numpy.ones",
"soundcard.default_microphone",
"soundcard.default_speaker",
"soundcard.get_speaker",
"numpy.concatenate"
]
| [((63, 79), 'numpy.ones', 'numpy.ones', (['(1024)'], {}), '(1024)\n', (73, 79), False, 'import numpy\n'), ((89, 125), 'numpy.concatenate', 'numpy.concatenate', (['[[ones], [-ones]]'], {}), '([[ones], [-ones]])\n', (106, 125), False, 'import numpy\n'), ((169, 193), 'soundcard.all_speakers', 'soundcard.all_speakers', ([], {}), '()\n', (191, 193), False, 'import soundcard\n'), ((410, 437), 'soundcard.all_microphones', 'soundcard.all_microphones', ([], {}), '()\n', (435, 437), False, 'import soundcard\n'), ((1114, 1144), 'soundcard.get_speaker', 'soundcard.get_speaker', (['"""Cable"""'], {}), "('Cable')\n", (1135, 1144), False, 'import soundcard\n'), ((1806, 1839), 'soundcard.get_microphone', 'soundcard.get_microphone', (['"""Cable"""'], {}), "('Cable')\n", (1830, 1839), False, 'import soundcard\n'), ((653, 680), 'soundcard.default_speaker', 'soundcard.default_speaker', ([], {}), '()\n', (678, 680), False, 'import soundcard\n'), ((757, 787), 'soundcard.default_microphone', 'soundcard.default_microphone', ([], {}), '()\n', (785, 787), False, 'import soundcard\n'), ((896, 926), 'soundcard.default_microphone', 'soundcard.default_microphone', ([], {}), '()\n', (924, 926), False, 'import soundcard\n'), ((1230, 1268), 'soundcard.get_speaker', 'soundcard.get_speaker', (['"""Soundflower64"""'], {}), "('Soundflower64')\n", (1251, 1268), False, 'import soundcard\n'), ((1925, 1966), 'soundcard.get_microphone', 'soundcard.get_microphone', (['"""Soundflower64"""'], {}), "('Soundflower64')\n", (1949, 1966), False, 'import soundcard\n'), ((1385, 1414), 'soundcard.get_speaker', 'soundcard.get_speaker', (['"""Null"""'], {}), "('Null')\n", (1406, 1414), False, 'import soundcard\n'), ((2016, 2071), 'soundcard.get_microphone', 'soundcard.get_microphone', (['"""Null"""'], {'include_loopback': '(True)'}), "('Null', include_loopback=True)\n", (2040, 2071), False, 'import soundcard\n')] |
import numpy as np
import h5py
import os
from devito.logger import info
from devito import TimeFunction, clear_cache
from examples.seismic.acoustic import AcousticWaveSolver
from examples.seismic import Model, RickerSource, Receiver, TimeAxis
from math import floor
from scipy.interpolate import griddata
import argparse
parser = argparse.ArgumentParser(description='')
parser.add_argument('--data_path', dest='data_path', type=str, default='/home/ec2-user/data', help='raw data path')
parser.add_argument('--save_dir', dest='save_dir', type=str, default='/home/ec2-user/data', help='saving directory')
args = parser.parse_args()
data_path = args.data_path
save_dir = args.save_dir
origin = (0., 0.)
spacing=(7.5, 7.5)
tn=1100.
nbpml=40
# Define your vp in km/sec (x, z)
vp = np.fromfile(os.path.join(data_path, 'vp_marmousi_bi'),
dtype='float32', sep="")
vp = np.reshape(vp, (1601, 401))
# vp = vp[400:1401, 0:401]
shape=[401, 301]
values = np.zeros([vp.shape[0]*vp.shape[1], ])
points = np.zeros([vp.shape[0]*vp.shape[1], 2])
k = 0
for indx in range(0, vp.shape[0]):
for indy in range(0, vp.shape[1]):
values[k] = vp[indx, indy]
points[k, 0] = indx
points[k, 1] = indy
k = k + 1
# nx, ny = shape[0], shape[1]
X, Y = np.meshgrid(np.array(np.linspace(1000, 1287, shape[0])), np.array(np.linspace(120, 232, shape[1])))
int_vp = griddata(points, values, (X, Y), method='cubic')
int_vp = np.transpose(int_vp)
vp = int_vp
# create model
model = Model(origin, spacing, shape, 2, vp, nbpml=nbpml)
# Derive timestepping from model spacing
dt = model.critical_dt
t0 = 0.0
nt = int(1 + (tn-t0) / dt) # Number of timesteps
time = np.linspace(t0, tn, nt) # Discretized time axis
datasize0 = int(np.shape(range(0, shape[0], 4))[0])
datasize1 = int(np.shape(range(100, nt, 20))[0])
datasize = datasize0*datasize1
strTrainA = os.path.join(save_dir, 'Wavefield_Marmousi_pml_401x301_1000-1287_120-232_4k_20kp100_A_train.hdf5')
strTrainB = os.path.join(save_dir, 'Wavefield_Marmousi_pml_401x301_1000-1287_120-232_4k_20kp100_B_train.hdf5')
dataset_train = "train_dataset"
file_trainA = h5py.File(strTrainA, 'w-')
datasetA = file_trainA.create_dataset(dataset_train, (datasize, shape[0]+2*nbpml, shape[1]+2*nbpml))
file_trainB = h5py.File(strTrainB, 'w-')
datasetB = file_trainB.create_dataset(dataset_train, (datasize, shape[0]+2*nbpml, shape[1]+2*nbpml))
num_rec = 601
rec_samp = np.linspace(0., model.domain_size[0], num=num_rec);
rec_samp = rec_samp[1]-rec_samp[0]
time_range = TimeAxis(start=t0, stop=tn, step=dt)
src = RickerSource(name='src', grid=model.grid, f0=0.025, time_range=time_range, space_order=1, npoint=1)
src.coordinates.data[0, :] = np.array([1*spacing[0], 2*spacing[1]]).astype(np.float32)
rec = Receiver(name='rec', grid=model.grid, time_range=time_range, npoint=num_rec)
rec.coordinates.data[:, 0] = np.linspace(0., model.domain_size[0], num=num_rec)
rec.coordinates.data[:, 1:] = src.coordinates.data[0, 1:]
solverbad = AcousticWaveSolver(model, source=src, receiver=rec, kernel='OT2', isic=True,
space_order=2, freesurface=False)
solvergood = AcousticWaveSolver(model, source=src, receiver=rec, kernel='OT2', isic=True,
space_order=20, freesurface=False)
ulocgood = TimeFunction(name="u", grid=model.grid, time_order=2, space_order=20, save=nt)
ulocbad = TimeFunction(name="u", grid=model.grid, time_order=2, space_order=2, save=nt)
kk = 0
for xsrc in range(0, shape[0], 4):
clear_cache()
ulocgood.data.fill(0.)
ulocbad.data.fill(0.)
src.coordinates.data[0, :] = np.array([xsrc*spacing[0], 2*spacing[1]]).astype(np.float32)
rec.coordinates.data[:, 0] = np.linspace(0., model.domain_size[0], num=num_rec)
rec.coordinates.data[:, 1:] = src.coordinates.data[0, 1:]
_, ulocgood, _ = solvergood.forward(m=model.m, src=src, time=nt-1, save=True)
_, ulocbad, _ = solverbad.forward(m=model.m, src=src, time=nt-1, save=True)
datasetA[kk:(kk+datasize1), :, :] = np.array(ulocgood.data[range(100, nt, 20), :, :])
datasetB[kk:(kk+datasize1), :, :] = np.array(ulocbad.data[range(100, nt, 20), :, :])
kk = kk + datasize1
file_trainA.close()
file_trainB.close()
| [
"examples.seismic.TimeAxis",
"numpy.reshape",
"argparse.ArgumentParser",
"scipy.interpolate.griddata",
"devito.TimeFunction",
"os.path.join",
"h5py.File",
"examples.seismic.RickerSource",
"numpy.zeros",
"examples.seismic.Model",
"numpy.linspace",
"examples.seismic.Receiver",
"devito.clear_cache",
"numpy.array",
"examples.seismic.acoustic.AcousticWaveSolver",
"numpy.transpose"
]
| [((331, 370), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '""""""'}), "(description='')\n", (354, 370), False, 'import argparse\n'), ((877, 904), 'numpy.reshape', 'np.reshape', (['vp', '(1601, 401)'], {}), '(vp, (1601, 401))\n', (887, 904), True, 'import numpy as np\n'), ((959, 996), 'numpy.zeros', 'np.zeros', (['[vp.shape[0] * vp.shape[1]]'], {}), '([vp.shape[0] * vp.shape[1]])\n', (967, 996), True, 'import numpy as np\n'), ((1006, 1046), 'numpy.zeros', 'np.zeros', (['[vp.shape[0] * vp.shape[1], 2]'], {}), '([vp.shape[0] * vp.shape[1], 2])\n', (1014, 1046), True, 'import numpy as np\n'), ((1385, 1433), 'scipy.interpolate.griddata', 'griddata', (['points', 'values', '(X, Y)'], {'method': '"""cubic"""'}), "(points, values, (X, Y), method='cubic')\n", (1393, 1433), False, 'from scipy.interpolate import griddata\n'), ((1443, 1463), 'numpy.transpose', 'np.transpose', (['int_vp'], {}), '(int_vp)\n', (1455, 1463), True, 'import numpy as np\n'), ((1500, 1549), 'examples.seismic.Model', 'Model', (['origin', 'spacing', 'shape', '(2)', 'vp'], {'nbpml': 'nbpml'}), '(origin, spacing, shape, 2, vp, nbpml=nbpml)\n', (1505, 1549), False, 'from examples.seismic import Model, RickerSource, Receiver, TimeAxis\n'), ((1680, 1703), 'numpy.linspace', 'np.linspace', (['t0', 'tn', 'nt'], {}), '(t0, tn, nt)\n', (1691, 1703), True, 'import numpy as np\n'), ((1877, 1979), 'os.path.join', 'os.path.join', (['save_dir', '"""Wavefield_Marmousi_pml_401x301_1000-1287_120-232_4k_20kp100_A_train.hdf5"""'], {}), "(save_dir,\n 'Wavefield_Marmousi_pml_401x301_1000-1287_120-232_4k_20kp100_A_train.hdf5')\n", (1889, 1979), False, 'import os\n'), ((1988, 2090), 'os.path.join', 'os.path.join', (['save_dir', '"""Wavefield_Marmousi_pml_401x301_1000-1287_120-232_4k_20kp100_B_train.hdf5"""'], {}), "(save_dir,\n 'Wavefield_Marmousi_pml_401x301_1000-1287_120-232_4k_20kp100_B_train.hdf5')\n", (2000, 2090), False, 'import os\n'), ((2134, 2160), 'h5py.File', 'h5py.File', (['strTrainA', '"""w-"""'], {}), "(strTrainA, 'w-')\n", (2143, 2160), False, 'import h5py\n'), ((2277, 2303), 'h5py.File', 'h5py.File', (['strTrainB', '"""w-"""'], {}), "(strTrainB, 'w-')\n", (2286, 2303), False, 'import h5py\n'), ((2431, 2482), 'numpy.linspace', 'np.linspace', (['(0.0)', 'model.domain_size[0]'], {'num': 'num_rec'}), '(0.0, model.domain_size[0], num=num_rec)\n', (2442, 2482), True, 'import numpy as np\n'), ((2533, 2569), 'examples.seismic.TimeAxis', 'TimeAxis', ([], {'start': 't0', 'stop': 'tn', 'step': 'dt'}), '(start=t0, stop=tn, step=dt)\n', (2541, 2569), False, 'from examples.seismic import Model, RickerSource, Receiver, TimeAxis\n'), ((2576, 2679), 'examples.seismic.RickerSource', 'RickerSource', ([], {'name': '"""src"""', 'grid': 'model.grid', 'f0': '(0.025)', 'time_range': 'time_range', 'space_order': '(1)', 'npoint': '(1)'}), "(name='src', grid=model.grid, f0=0.025, time_range=time_range,\n space_order=1, npoint=1)\n", (2588, 2679), False, 'from examples.seismic import Model, RickerSource, Receiver, TimeAxis\n'), ((2770, 2846), 'examples.seismic.Receiver', 'Receiver', ([], {'name': '"""rec"""', 'grid': 'model.grid', 'time_range': 'time_range', 'npoint': 'num_rec'}), "(name='rec', grid=model.grid, time_range=time_range, npoint=num_rec)\n", (2778, 2846), False, 'from examples.seismic import Model, RickerSource, Receiver, TimeAxis\n'), ((2876, 2927), 'numpy.linspace', 'np.linspace', (['(0.0)', 'model.domain_size[0]'], {'num': 'num_rec'}), '(0.0, model.domain_size[0], num=num_rec)\n', (2887, 2927), True, 'import numpy as np\n'), ((2998, 3112), 'examples.seismic.acoustic.AcousticWaveSolver', 'AcousticWaveSolver', (['model'], {'source': 'src', 'receiver': 'rec', 'kernel': '"""OT2"""', 'isic': '(True)', 'space_order': '(2)', 'freesurface': '(False)'}), "(model, source=src, receiver=rec, kernel='OT2', isic=True,\n space_order=2, freesurface=False)\n", (3016, 3112), False, 'from examples.seismic.acoustic import AcousticWaveSolver\n'), ((3130, 3245), 'examples.seismic.acoustic.AcousticWaveSolver', 'AcousticWaveSolver', (['model'], {'source': 'src', 'receiver': 'rec', 'kernel': '"""OT2"""', 'isic': '(True)', 'space_order': '(20)', 'freesurface': '(False)'}), "(model, source=src, receiver=rec, kernel='OT2', isic=True,\n space_order=20, freesurface=False)\n", (3148, 3245), False, 'from examples.seismic.acoustic import AcousticWaveSolver\n'), ((3262, 3340), 'devito.TimeFunction', 'TimeFunction', ([], {'name': '"""u"""', 'grid': 'model.grid', 'time_order': '(2)', 'space_order': '(20)', 'save': 'nt'}), "(name='u', grid=model.grid, time_order=2, space_order=20, save=nt)\n", (3274, 3340), False, 'from devito import TimeFunction, clear_cache\n'), ((3351, 3428), 'devito.TimeFunction', 'TimeFunction', ([], {'name': '"""u"""', 'grid': 'model.grid', 'time_order': '(2)', 'space_order': '(2)', 'save': 'nt'}), "(name='u', grid=model.grid, time_order=2, space_order=2, save=nt)\n", (3363, 3428), False, 'from devito import TimeFunction, clear_cache\n'), ((792, 833), 'os.path.join', 'os.path.join', (['data_path', '"""vp_marmousi_bi"""'], {}), "(data_path, 'vp_marmousi_bi')\n", (804, 833), False, 'import os\n'), ((3478, 3491), 'devito.clear_cache', 'clear_cache', ([], {}), '()\n', (3489, 3491), False, 'from devito import TimeFunction, clear_cache\n'), ((3674, 3725), 'numpy.linspace', 'np.linspace', (['(0.0)', 'model.domain_size[0]'], {'num': 'num_rec'}), '(0.0, model.domain_size[0], num=num_rec)\n', (3685, 3725), True, 'import numpy as np\n'), ((1296, 1329), 'numpy.linspace', 'np.linspace', (['(1000)', '(1287)', 'shape[0]'], {}), '(1000, 1287, shape[0])\n', (1307, 1329), True, 'import numpy as np\n'), ((1341, 1372), 'numpy.linspace', 'np.linspace', (['(120)', '(232)', 'shape[1]'], {}), '(120, 232, shape[1])\n', (1352, 1372), True, 'import numpy as np\n'), ((2705, 2747), 'numpy.array', 'np.array', (['[1 * spacing[0], 2 * spacing[1]]'], {}), '([1 * spacing[0], 2 * spacing[1]])\n', (2713, 2747), True, 'import numpy as np\n'), ((3580, 3625), 'numpy.array', 'np.array', (['[xsrc * spacing[0], 2 * spacing[1]]'], {}), '([xsrc * spacing[0], 2 * spacing[1]])\n', (3588, 3625), True, 'import numpy as np\n')] |
from setuptools import setup
setup(name="pykinematicskineticstoolbox",
version="0.0",
description="Installable python package which collects useful kinematics and kinetics functions",
author="<NAME>",
author_email="<EMAIL>",
license="MIT",
packages=["pykinematicskineticstoolbox"],
install_requires=["numpy"],
)
| [
"setuptools.setup"
]
| [((30, 325), 'setuptools.setup', 'setup', ([], {'name': '"""pykinematicskineticstoolbox"""', 'version': '"""0.0"""', 'description': '"""Installable python package which collects useful kinematics and kinetics functions"""', 'author': '"""<NAME>"""', 'author_email': '"""<EMAIL>"""', 'license': '"""MIT"""', 'packages': "['pykinematicskineticstoolbox']", 'install_requires': "['numpy']"}), "(name='pykinematicskineticstoolbox', version='0.0', description=\n 'Installable python package which collects useful kinematics and kinetics functions'\n , author='<NAME>', author_email='<EMAIL>', license='MIT', packages=[\n 'pykinematicskineticstoolbox'], install_requires=['numpy'])\n", (35, 325), False, 'from setuptools import setup\n')] |
from datetime import datetime
# ensure an rpc peer is added
def addpeer(p, rpcpeer):
pid = rpcpeer['id']
if pid not in p.persist['peerstate']:
p.persist['peerstate'][pid] = {
'connected': rpcpeer['connected'],
'last_seen': datetime.now() if rpcpeer['connected'] else None,
'avail': 1.0 if rpcpeer['connected'] else 0.0
}
# exponetially smooth online/offline states of peers
def trace_availability(p, rpcpeers):
p.persist['availcount'] += 1
leadwin = max(min(p.avail_window, p.persist['availcount'] * p.avail_interval), p.avail_interval)
samples = leadwin / p.avail_interval
alpha = 1.0 / samples
beta = 1.0 - alpha
for rpcpeer in rpcpeers['peers']:
pid = rpcpeer['id']
addpeer(p, rpcpeer)
if rpcpeer['connected']:
p.persist['peerstate'][pid]['last_seen'] = datetime.now()
p.persist['peerstate'][pid]['connected'] = True
p.persist['peerstate'][pid]['avail'] = 1.0 * alpha + p.persist['peerstate'][pid]['avail'] * beta
else:
p.persist['peerstate'][pid]['connected'] = False
p.persist['peerstate'][pid]['avail'] = 0.0 * alpha + p.persist['peerstate'][pid]['avail'] * beta
| [
"datetime.datetime.now"
]
| [((883, 897), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (895, 897), False, 'from datetime import datetime\n'), ((265, 279), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (277, 279), False, 'from datetime import datetime\n')] |
"""
Functions for reading Magritek Spinsolve binary (dx/1d) files and
parameter (acqu.par/proc.par) files.
"""
import os
from warnings import warn
import numpy as np
from . import fileiobase
from . import jcampdx
__developer_info__ = """
Spinsolve is the software used on the Magritek benchtop NMR devices.
A spectrum is saved in a folder with several files. The spectral data is
stored in these files: 'data.1d' (FID), 'spectrum.1d' (Fourier transformed)
and 'spectrum_processed.1d' (FT + processed by spinsolve)
Optional spectral data (System->Prefs->Setup->Global data storage):
'nmr_fid.dx' (FID stored in `JCAMP-DX standard <http://www.jcamp-dx.org/>`),
'spectrum.csv' and 'spectrum_processed.csv' (FT + processed by Spinsovle with ppm for each
point and intensity delimited by ';')
Other files:
'acqu.par' - all parameters that are used for acquisition
'Protocol.par' - text file used to reload data back into the Spinsolve software
'processing.script' - text file to transfer Spinsolve software protocol settings
into MNOVA
The Spinsolve Expert software has a slightly different output:
[Needs to be double checked as I do not have access to this software -LCageman]
- Output into JCAMP-DX is not possible
- 'spectrum_processed.1d' is not generated
- (new) 'fid.1d' - seems to be the same as 'data.1d'
- (new) 'proc.par' - contains processing parameters in the same style as 'acqu.par'
- (new) .pt1 files - seem to be plot files specific for the expert software, cannot
be read by NMRglue
"""
def read(dir='.', specfile=None, acqupar="acqu.par", procpar="proc.par"):
"""
Reads spinsolve files from a directory
When no spectrum filename is given (specfile), the following list is tried, in
that specific order
["nmr_fid.dx", "data.1d", "fid.1d", "spectrum.1d", "spectrum_processed.1d"]
To use the resolution enhanced spectrum use the './Enhanced' folder as input.
Note that spectrum.1d and spectrum_processed.1d contain only data in the
frequency domain, so no Fourier transformation is needed. Also, use
dic["spectrum"]["xaxis"] to plot the x-axis
Parameters
----------
dir : str
Directory to read from
specfile : str, optional
Filename to import spectral data from. None uses standard filename from:
["nmr_fid.dx", "data.1d", "fid.1d", "spectrum.1d", "spectrum_processed.1d"]
acqupar : str, optional
Filename for acquisition parameters. None uses standard name.
procpar : str, optional
Filename for processing parameters. None uses standard name.
Returns
-------
dic : dict
All parameters that can be present in the data folder:
dic["spectrum"] - First bytes of spectrum(_processed).1d
dic["acqu"] - Parameters present in acqu.par
dic["proc"] - Parameters present in proc.par
dic["dx"] - - Parameters present in the header of nmr_fid.dx
data : ndarray
Array of NMR data
"""
if os.path.isdir(dir) is not True:
raise IOError("directory %s does not exist" % (dir))
# Create empty dic
dic = {"spectrum": {}, "acqu": {}, "proc":{}, "dx":{}}
# Read in acqu.par and write to dic
acqupar = os.path.join(dir, acqupar)
if os.path.isfile(acqupar):
with open(acqupar, "r") as f:
info = f.readlines()
for line in info:
line = line.replace("\n", "")
k, v = line.split("=")
dic["acqu"][k.strip()] = v.strip()
# Read in proc.par and write to dic
procpar = os.path.join(dir,procpar)
if os.path.isfile(procpar):
with open(procpar, "r") as f:
info = f.readlines()
for line in info:
line = line.replace("\n", "")
k, v = line.split("=")
dic["proc"][k.strip()] = v.strip()
# Define which spectrumfile to take, using 'specfile' when defined, otherwise
# the files in 'priority_list' are tried, in that particular order
priority_list = ["nmr_fid.dx", "data.1d", "fid.1d", "spectrum.1d", "spectrum_processed.1d", None]
if specfile:
inputfile = os.path.join(dir, specfile)
if not os.path.isfile(inputfile):
raise IOError("File %s does not exist" % (inputfile))
else:
for priority in priority_list:
if priority == None:
raise IOError("directory %s does not contain spectral data" % (dir))
inputfile = os.path.join(dir, priority)
if os.path.isfile(inputfile):
break
# Detect which file we are dealing with from the extension and read in the spectral data
# Reading .dx file using existing nmrglue.fileio.jcampdx module
if inputfile.split('.')[-1] == "dx":
dic["dx"], raw_data = jcampdx.read(inputfile)
data = np.empty((int(dic["dx"]["$TD"][0]), ), dtype='complex128')
data = raw_data[0][:] + 1j * raw_data[1][:]
# Reading .1d files
elif inputfile.split('.')[-1] == "1d":
with open(inputfile, "rb") as f:
raw_data = f.read()
# Write out parameters from the first 32 bytes into dic["spectrum"]
keys = ["owner", "format", "version", "dataType", "xDim", "yDim", "zDim", "qDim"]
for i, k in enumerate(keys):
start = i * 4
end = start + 4
value = int.from_bytes( raw_data[start:end], "little")
dic["spectrum"][k] = value
data = np.frombuffer(raw_data[end:], "<f")
# The first 1/3 of the file is xaxis data (s or ppm)
split = data.shape[-1] // 3
xscale = data[0 : split]
dic["spectrum"]["xaxis"] = xscale
# The rest is real and imaginary data points interleaved
data = data[split : : 2] + 1j * data[split + 1 : : 2]
else:
raise IOError("File %s cannot be interpreted, use .dx or .1d instead" % (inputfile))
return dic,data
def guess_udic(dic,data):
"""
Guess parameters of universal dictionary from dic, data pair.
Parameters
----------
dic : dict
Dictionary of JCAMP-DX, acqu, proc and spectrum parameters.
data : ndarray
Array of NMR data.
Returns
-------
udic : dict
Universal dictionary of spectral parameters.
"""
# Create an empty universal dictionary
udic = fileiobase.create_blank_udic(1)
# Update defalt parameters, first acqu.par parameters in dic are tried, then JCAMP-DX header parameters
# size
if data is not None:
udic[0]["size"] = len(data)
else:
warn('No data, cannot set udic size')
# sw
try:
udic[0]['sw'] = float(dic['acqu']['bandwidth']) * 1000
except KeyError:
try:
udic[0]['sw'] = float(dic['dx']['$SW'][0]) * float(dic['dx']['$BF1'][0])
except KeyError:
try:
if dic["spectrum"]["freqdata"]:
udic[0]['sw'] = dic["spectrum"]["xaxis"][-1] - dic["spectrum"]["xaxis"][0]
elif data is not None:
udic[0]['sw'] = len(data) / dic["spectrum"]["xaxis"][-1]
else:
warn("Cannot set spectral width - set manually using: 'udic[0]['sw'] = x' where x is the spectral width in Hz")
except KeyError:
warn("Cannot set spectral width - set manually using: 'udic[0]['sw'] = x' where x is the spectral width in Hz")
# obs
try:
udic[0]['obs'] = float(dic['acqu']['b1Freq'])
except KeyError:
try:
udic[0]['obs'] = float(dic['dx']['$BF1'][0])
except KeyError:
warn("Cannot set observe frequency - set manually using: 'udic[0]['obs'] = x' where x is magnetic field in MHz")
# car
try:
udic[0]['car'] = float(dic['acqu']['lowestFrequency']) + (float(dic['acqu']['bandwidth']) * 1000 / 2)
except KeyError:
try:
udic[0]['car'] = (float(dic['dx']['$REFERENCEPOINT'][0]) * -1 ) + (float(dic['dx']['$SW'][0]) * udic[0]['obs'] / 2)
except KeyError:
try:
udic[0]['car'] = (float(dic['dx']['$BF1'][0]) - float(dic['dx']['$SF'][0])) * 1000000
except KeyError:
warn("Cannot set carrier - try: 'udic[0]['car'] = x * udic[0]['obs']' where x is the center of the spectrum in ppm")
# label
try:
udic[0]['label'] = dic['acqu']['rxChannel']
except KeyError:
try:
label_value = dic['dx'][".OBSERVENUCLEUS"][0].replace("^", "")
udic[0]["label"] = label_value
except KeyError:
warn("Cannot set observed nucleus label")
#keys left to default
# udic[0]['complex']
# udic[0]['encoding']
# udic[0]['time'] = True
# udic[0]['freq'] = False
return udic
| [
"os.path.join",
"os.path.isfile",
"os.path.isdir",
"warnings.warn",
"numpy.frombuffer"
]
| [((3216, 3242), 'os.path.join', 'os.path.join', (['dir', 'acqupar'], {}), '(dir, acqupar)\n', (3228, 3242), False, 'import os\n'), ((3250, 3273), 'os.path.isfile', 'os.path.isfile', (['acqupar'], {}), '(acqupar)\n', (3264, 3273), False, 'import os\n'), ((3551, 3577), 'os.path.join', 'os.path.join', (['dir', 'procpar'], {}), '(dir, procpar)\n', (3563, 3577), False, 'import os\n'), ((3584, 3607), 'os.path.isfile', 'os.path.isfile', (['procpar'], {}), '(procpar)\n', (3598, 3607), False, 'import os\n'), ((2980, 2998), 'os.path.isdir', 'os.path.isdir', (['dir'], {}), '(dir)\n', (2993, 2998), False, 'import os\n'), ((4125, 4152), 'os.path.join', 'os.path.join', (['dir', 'specfile'], {}), '(dir, specfile)\n', (4137, 4152), False, 'import os\n'), ((6583, 6620), 'warnings.warn', 'warn', (['"""No data, cannot set udic size"""'], {}), "('No data, cannot set udic size')\n", (6587, 6620), False, 'from warnings import warn\n'), ((4168, 4193), 'os.path.isfile', 'os.path.isfile', (['inputfile'], {}), '(inputfile)\n', (4182, 4193), False, 'import os\n'), ((4452, 4479), 'os.path.join', 'os.path.join', (['dir', 'priority'], {}), '(dir, priority)\n', (4464, 4479), False, 'import os\n'), ((4495, 4520), 'os.path.isfile', 'os.path.isfile', (['inputfile'], {}), '(inputfile)\n', (4509, 4520), False, 'import os\n'), ((5466, 5501), 'numpy.frombuffer', 'np.frombuffer', (['raw_data[end:]', '"""<f"""'], {}), "(raw_data[end:], '<f')\n", (5479, 5501), True, 'import numpy as np\n'), ((7644, 7766), 'warnings.warn', 'warn', (['"""Cannot set observe frequency - set manually using: \'udic[0][\'obs\'] = x\' where x is magnetic field in MHz"""'], {}), '(\n "Cannot set observe frequency - set manually using: \'udic[0][\'obs\'] = x\' where x is magnetic field in MHz"\n )\n', (7648, 7766), False, 'from warnings import warn\n'), ((8622, 8663), 'warnings.warn', 'warn', (['"""Cannot set observed nucleus label"""'], {}), "('Cannot set observed nucleus label')\n", (8626, 8663), False, 'from warnings import warn\n'), ((7326, 7447), 'warnings.warn', 'warn', (['"""Cannot set spectral width - set manually using: \'udic[0][\'sw\'] = x\' where x is the spectral width in Hz"""'], {}), '(\n "Cannot set spectral width - set manually using: \'udic[0][\'sw\'] = x\' where x is the spectral width in Hz"\n )\n', (7330, 7447), False, 'from warnings import warn\n'), ((8242, 8368), 'warnings.warn', 'warn', (['"""Cannot set carrier - try: \'udic[0][\'car\'] = x * udic[0][\'obs\']\' where x is the center of the spectrum in ppm"""'], {}), '(\n "Cannot set carrier - try: \'udic[0][\'car\'] = x * udic[0][\'obs\']\' where x is the center of the spectrum in ppm"\n )\n', (8246, 8368), False, 'from warnings import warn\n'), ((7169, 7290), 'warnings.warn', 'warn', (['"""Cannot set spectral width - set manually using: \'udic[0][\'sw\'] = x\' where x is the spectral width in Hz"""'], {}), '(\n "Cannot set spectral width - set manually using: \'udic[0][\'sw\'] = x\' where x is the spectral width in Hz"\n )\n', (7173, 7290), False, 'from warnings import warn\n')] |
# -*- coding: utf-8 -*-
#
# Graph : graph package
#
# Copyright or Copr. 2006 INRIA - CIRAD - INRA
#
# File author(s): <NAME> <<EMAIL>>
#
# Distributed under the Cecill-C License.
# See accompanying file LICENSE.txt or copy at
# http://www.cecill.info/licences/Licence_CeCILL-C_V1-en.html
#
# VPlants WebSite : https://gforge.inria.fr/projects/vplants/
#
"""This module provide a simple pure python implementation
for a graph interface
does not implement copy concept
"""
from id_dict import IdDict
class GraphError(Exception):
"""
base class of all graph exceptions
"""
class InvalidEdge(GraphError, KeyError):
"""
exception raised when a wrong edge id is provided
"""
class InvalidVertex(GraphError, KeyError):
"""
exception raised when a wrong vertex id is provided
"""
class Graph(object):
"""Directed graph with multiple links
in this implementation :
- vertices are tuple of edge_in,edge_out
- edges are tuple of source,target
"""
def __init__(self, graph=None, idgenerator="set"):
"""constructor
if graph is not none make a copy of the topological structure of graph
(i.e. don't use the same id)
args:
- graph (Graph): the graph to copy, default=None
- idgenerator (str): type of idgenerator to use, default 'set'
"""
self._vertices = IdDict(idgenerator=idgenerator)
self._edges = IdDict(idgenerator=idgenerator)
if graph is not None:
self.extend(graph)
# ##########################################################
#
# Graph concept
#
# ##########################################################
def source(self, eid):
"""Retrieve the source vertex of an edge
args:
- eid (int): edge id
return:
- (int): vertex id
"""
try:
return self._edges[eid][0]
except KeyError:
raise InvalidEdge(eid)
def target(self, eid):
"""Retrieve the target vertex of an edge
args:
- eid (int): edge id
return:
- (int): vertex id
"""
try:
return self._edges[eid][1]
except KeyError:
raise InvalidEdge(eid)
def edge_vertices(self, eid):
"""Retrieve both source and target vertex of an edge
args:
- eid (int): edge id
return:
- (int, int): source id, target id
"""
try:
return self._edges[eid]
except KeyError:
raise InvalidEdge(eid)
def edge(self, source, target):
"""Find the matching edge with same source and same target
return None if it don't succeed
args:
- source (int): source vertex
- target (int): target vertex
return:
- (int): edge id with same source and target
- (None): if search is unsuccessful
"""
if target not in self:
raise InvalidVertex(target)
for eid in self.out_edges(source):
if self.target(eid) == target:
return eid
return None
def __contains__(self, vid):
"""magic alias for `has_vertex`
"""
return self.has_vertex(vid)
def has_vertex(self, vid):
"""test whether a vertex belong to the graph
args:
- vid (int): id of vertex
return:
- (bool)
"""
return vid in self._vertices
def has_edge(self, eid):
"""test whether an edge belong to the graph
args:
- eid (int): id of edge
return:
- (bool)
"""
return eid in self._edges
def is_valid(self):
"""Test the validity of the graph
return:
- (bool)
"""
return True
# ##########################################################
#
# Vertex List Graph Concept
#
# ##########################################################
def vertices(self):
"""Iterator on all vertices
return:
- (iter of int)
"""
return iter(self._vertices)
def __iter__(self):
"""Magic alias for `vertices`
"""
return iter(self._vertices)
def nb_vertices(self):
"""Total number of vertices in the graph
return:
- (int)
"""
return len(self._vertices)
def __len__(self):
"""Magic alias for `nb_vertices`
"""
return self.nb_vertices()
def in_neighbors(self, vid):
"""Iterator on the neighbors of vid
where edges are directed from neighbor to vid
args:
- vid (int): vertex id
return:
- (iter of int): iter of vertex id
"""
if vid not in self:
raise InvalidVertex(vid)
neighbors_list = [self.source(eid) for eid in self._vertices[vid][0]]
return iter(set(neighbors_list))
def out_neighbors(self, vid):
"""Iterator on the neighbors of vid
where edges are directed from vid to neighbor
args:
- vid (int): vertex id
return:
- (iter of int): iter of vertex id
"""
if vid not in self:
raise InvalidVertex(vid)
neighbors_list = [self.target(eid) for eid in self._vertices[vid][1]]
return iter(set(neighbors_list))
def neighbors(self, vid):
"""Iterator on all neighbors of vid both in and out
args:
- vid (int): vertex id
return:
- (iter of int): iter of vertex id
"""
neighbors_list = list(self.in_neighbors(vid))
neighbors_list.extend(self.out_neighbors(vid))
return iter(set(neighbors_list))
def nb_in_neighbors(self, vid):
"""Number of in neighbors of vid
where edges are directed from neighbor to vid
args:
- vid (int): vertex id
return:
- (int)
"""
neighbors_set = list(self.in_neighbors(vid))
return len(neighbors_set)
def nb_out_neighbors(self, vid):
"""Number of out neighbors of vid
where edges are directed from vid to neighbor
args:
- vid (int): vertex id
return:
- (int)
"""
neighbors_set = list(self.out_neighbors(vid))
return len(neighbors_set)
def nb_neighbors(self, vid):
"""Total number of both in and out neighbors of vid
args:
- vid (int): vertex id
return:
- (int)
"""
neighbors_set = list(self.neighbors(vid))
return len(neighbors_set)
# ##########################################################
#
# Edge List Graph Concept
#
# ##########################################################
def _iter_edges(self, vid):
"""
internal function that perform 'edges' with vid not None
"""
link_in, link_out = self._vertices[vid]
for eid in link_in:
yield eid
for eid in link_out:
yield eid
def edges(self, vid=None):
"""Iterate on all edges connected to a given vertex.
If vid is None (default), iterate on all edges in the graph
args:
- vid (int): vertex holdings edges, default (None)
return:
- (iter of int): iterator on edge ids
"""
if vid is None:
return iter(self._edges)
if vid not in self:
raise InvalidVertex(vid)
return self._iter_edges(vid)
def nb_edges(self, vid=None):
"""Number of edges connected to a given vertex.
If vid is None (default), total number of edges in the graph
args:
- vid (int): vertex holdings edges, default (None)
return:
- (int)
"""
if vid is None:
return len(self._edges)
if vid not in self:
raise InvalidVertex(vid)
return len(self._vertices[vid][0]) + len(self._vertices[vid][1])
def in_edges(self, vid):
"""Iterate on all edges pointing to a given vertex.
args:
- vid (int): vertex target of edges
return:
- (iter of int): iterator on edge ids
"""
if vid not in self:
raise InvalidVertex(vid)
for eid in self._vertices[vid][0]:
yield eid
def out_edges(self, vid):
"""Iterate on all edges away from a given vertex.
args:
- vid (int): vertex source of edges
return:
- (iter of int): iterator on edge ids
"""
if vid not in self:
raise InvalidVertex(vid)
for eid in self._vertices[vid][1]:
yield eid
def nb_in_edges(self, vid):
"""Number of edges pointing to a given vertex.
args:
- vid (int): vertex target of edges
return:
- (int)
"""
if vid not in self:
raise InvalidVertex(vid)
return len(self._vertices[vid][0])
def nb_out_edges(self, vid):
"""Number of edges away from a given vertex.
args:
- vid (int): vertex source of edges
return:
- (int)
"""
if vid not in self:
raise InvalidVertex(vid)
return len(self._vertices[vid][1])
# ##########################################################
#
# Mutable Vertex Graph concept
#
# ##########################################################
def add_vertex(self, vid=None):
"""Add a vertex to the graph.
If vid is not provided create a new vid
args:
- vid (int): id to use. If None (default) will generate a new one
return:
- vid (int): id used for the new vertex
"""
try:
return self._vertices.add((set(), set()), vid)
except KeyError:
raise InvalidVertex(vid)
def remove_vertex(self, vid):
"""Remove a specified vertex of the graph.
Also remove all edge attached to it.
args:
- vid (int): id of vertex to remove
"""
if vid not in self:
raise InvalidVertex(vid)
link_in, link_out = self._vertices[vid]
for edge in list(link_in):
self.remove_edge(edge)
for edge in list(link_out):
self.remove_edge(edge)
del self._vertices[vid]
def clear(self):
"""Remove all vertices and edges
don't change references to objects
"""
self._edges.clear()
self._vertices.clear()
# ##########################################################
#
# Mutable Edge Graph concept
#
# ##########################################################
def add_edge(self, sid, tid, eid=None):
"""Add an edge to the graph.
If eid is not provided generate a new one.
args:
- sid (int): id of source vertex
- tid (int): id of target vertex
- eid (int): id to use. If None (default) will generate a new one
return:
- eid (int): id used for new edge
"""
if sid not in self:
raise InvalidVertex(sid)
if tid not in self:
raise InvalidVertex(tid)
try:
eid = self._edges.add((sid, tid), eid)
except KeyError:
raise InvalidEdge(eid)
self._vertices[sid][1].add(eid)
self._vertices[tid][0].add(eid)
return eid
def remove_edge(self, eid):
"""Remove a specified edge from the graph.
args:
- eid (int): id of edge to remove
"""
if not self.has_edge(eid):
raise InvalidEdge(eid)
sid, tid = self._edges[eid]
self._vertices[sid][1].remove(eid)
self._vertices[tid][0].remove(eid)
del self._edges[eid]
def clear_edges(self):
"""Remove all the edges of the graph
don't change references to objects
"""
self._edges.clear()
for vid, (in_set, out_set) in self._vertices.iteritems():
in_set.clear()
out_set.clear()
# ##########################################################
#
# Extend Graph concept
#
# ##########################################################
def extend(self, graph):
"""Add the specified graph to self, create new vid and eid
args:
- graph (Graph): the graph to add
return:
- (dict of (int, int)): mapping between vertex id in graph and
vertex id in extended self
- (dict of (int, int)): mapping between edge id in graph and
edge id in extended self
"""
# vertex adding
trans_vid = {}
for vid in list(graph.vertices()):
trans_vid[vid] = self.add_vertex()
# edge adding
trans_eid = {}
for eid in list(graph.edges()):
sid = trans_vid[graph.source(eid)]
tid = trans_vid[graph.target(eid)]
trans_eid[eid] = self.add_edge(sid, tid)
return trans_vid, trans_eid
def sub_graph(self, vids):
"""
"""
raise NotImplemented
# from copy import deepcopy
# vids = set(vids)
#
# result = deepcopy(self)
# result._vertices.clear()
# result._edges.clear()
#
# for key, edges in self._vertices.items():
# if key in vids:
# inedges, outedges = edges
# sortedinedges = set(
# [eid for eid in inedges if self.source(eid) in vids])
# sortedoutedges = set(
# [eid for eid in outedges if self.target(eid) in vids])
# result._vertices.add((sortedinedges, sortedoutedges), key)
# for eid in sortedoutedges:
# result._edges.add(self._edges[eid], eid)
#
# return result
| [
"id_dict.IdDict"
]
| [((1438, 1469), 'id_dict.IdDict', 'IdDict', ([], {'idgenerator': 'idgenerator'}), '(idgenerator=idgenerator)\n', (1444, 1469), False, 'from id_dict import IdDict\n'), ((1492, 1523), 'id_dict.IdDict', 'IdDict', ([], {'idgenerator': 'idgenerator'}), '(idgenerator=idgenerator)\n', (1498, 1523), False, 'from id_dict import IdDict\n')] |
import paddle.fluid as fluid
from paddle.fluid.initializer import MSRA
from paddle.fluid.param_attr import ParamAttr
class MobileNetV2SSD:
def __init__(self, img, num_classes, img_shape):
self.img = img
self.num_classes = num_classes
self.img_shape = img_shape
def ssd_net(self, scale=1.0):
# 300x300
bottleneck_params_list = [(1, 16, 1, 1),
(6, 24, 2, 2),
(6, 32, 3, 2),
(6, 64, 4, 2),
(6, 96, 3, 1)]
# conv1
input = self.conv_bn_layer(input=self.img,
num_filters=int(32 * scale),
filter_size=3,
stride=2,
padding=1,
if_act=True)
# bottleneck sequences
in_c = int(32 * scale)
for layer_setting in bottleneck_params_list:
t, c, n, s = layer_setting
input = self.invresi_blocks(input=input, in_c=in_c, t=t, c=int(c * scale), n=n, s=s)
in_c = int(c * scale)
# 19x19
module11 = input
tmp = self.invresi_blocks(input=input, in_c=in_c, t=6, c=int(160 * scale), n=3, s=2)
# 10x10
module13 = self.invresi_blocks(input=tmp, in_c=int(160 * scale), t=6, c=int(320 * scale), n=1, s=1)
module14 = self.extra_block(module13, 256, 512, 1)
# 5x5
module15 = self.extra_block(module14, 128, 256, 1)
# 3x3
module16 = self.extra_block(module15, 128, 256, 1)
# 2x2
module17 = self.extra_block(module16, 64, 128, 1)
mbox_locs, mbox_confs, box, box_var = fluid.layers.multi_box_head(
inputs=[module11, module13, module14, module15, module16, module17],
image=self.img,
num_classes=self.num_classes,
min_ratio=20,
max_ratio=90,
min_sizes=[60.0, 105.0, 150.0, 195.0, 240.0, 285.0],
max_sizes=[[], 150.0, 195.0, 240.0, 285.0, 300.0],
aspect_ratios=[[2.], [2., 3.], [2., 3.], [2., 3.], [2., 3.], [2., 3.]],
base_size=self.img_shape[2],
offset=0.5,
flip=True)
return mbox_locs, mbox_confs, box, box_var
def conv_bn_layer(self, input, filter_size, num_filters, stride, padding, num_groups=1, if_act=True,
use_cudnn=True):
parameter_attr = ParamAttr(learning_rate=0.1, initializer=MSRA())
conv = fluid.layers.conv2d(input=input,
num_filters=num_filters,
filter_size=filter_size,
stride=stride,
padding=padding,
groups=num_groups,
use_cudnn=use_cudnn,
param_attr=parameter_attr,
bias_attr=False)
bn = fluid.layers.batch_norm(input=conv)
if if_act:
return fluid.layers.relu6(bn)
else:
return bn
def shortcut(self, input, data_residual):
return fluid.layers.elementwise_add(input, data_residual)
def inverted_residual_unit(self,
input,
num_in_filter,
num_filters,
ifshortcut,
stride,
filter_size,
padding,
expansion_factor):
num_expfilter = int(round(num_in_filter * expansion_factor))
channel_expand = self.conv_bn_layer(input=input,
num_filters=num_expfilter,
filter_size=1,
stride=1,
padding=0,
num_groups=1,
if_act=True)
bottleneck_conv = self.conv_bn_layer(input=channel_expand,
num_filters=num_expfilter,
filter_size=filter_size,
stride=stride,
padding=padding,
num_groups=num_expfilter,
if_act=True,
use_cudnn=False)
linear_out = self.conv_bn_layer(input=bottleneck_conv,
num_filters=num_filters,
filter_size=1,
stride=1,
padding=0,
num_groups=1,
if_act=False)
if ifshortcut:
out = self.shortcut(input=input, data_residual=linear_out)
return out
else:
return linear_out
def invresi_blocks(self, input, in_c, t, c, n, s):
first_block = self.inverted_residual_unit(input=input,
num_in_filter=in_c,
num_filters=c,
ifshortcut=False,
stride=s,
filter_size=3,
padding=1,
expansion_factor=t)
last_residual_block = first_block
last_c = c
for i in range(1, n):
last_residual_block = self.inverted_residual_unit(input=last_residual_block,
num_in_filter=last_c,
num_filters=c,
ifshortcut=True,
stride=1,
filter_size=3,
padding=1,
expansion_factor=t)
return last_residual_block
def conv_bn(self, input, filter_size, num_filters, stride, padding, num_groups=1, act='relu', use_cudnn=True):
parameter_attr = ParamAttr(learning_rate=0.1, initializer=MSRA())
conv = fluid.layers.conv2d(input=input,
num_filters=num_filters,
filter_size=filter_size,
stride=stride,
padding=padding,
groups=num_groups,
use_cudnn=use_cudnn,
param_attr=parameter_attr,
bias_attr=False)
return fluid.layers.batch_norm(input=conv, act=act)
def extra_block(self, input, num_filters1, num_filters2, num_groups):
# 1x1 conv
pointwise_conv = self.conv_bn(input=input,
filter_size=1,
num_filters=int(num_filters1),
stride=1,
num_groups=int(num_groups),
padding=0)
# 3x3 conv
normal_conv = self.conv_bn(input=pointwise_conv,
filter_size=3,
num_filters=int(num_filters2),
stride=2,
num_groups=int(num_groups),
padding=1)
return normal_conv
def build_ssd(img, num_classes, img_shape):
ssd_model = MobileNetV2SSD(img, num_classes, img_shape)
return ssd_model.ssd_net()
if __name__ == '__main__':
data = fluid.data(name='data', shape=[None, 3, 300, 300])
build_ssd(data, 21, img_shape=[3, 300, 300])
| [
"paddle.fluid.data",
"paddle.fluid.layers.relu6",
"paddle.fluid.initializer.MSRA",
"paddle.fluid.layers.conv2d",
"paddle.fluid.layers.batch_norm",
"paddle.fluid.layers.multi_box_head",
"paddle.fluid.layers.elementwise_add"
]
| [((8339, 8389), 'paddle.fluid.data', 'fluid.data', ([], {'name': '"""data"""', 'shape': '[None, 3, 300, 300]'}), "(name='data', shape=[None, 3, 300, 300])\n", (8349, 8389), True, 'import paddle.fluid as fluid\n'), ((1779, 2209), 'paddle.fluid.layers.multi_box_head', 'fluid.layers.multi_box_head', ([], {'inputs': '[module11, module13, module14, module15, module16, module17]', 'image': 'self.img', 'num_classes': 'self.num_classes', 'min_ratio': '(20)', 'max_ratio': '(90)', 'min_sizes': '[60.0, 105.0, 150.0, 195.0, 240.0, 285.0]', 'max_sizes': '[[], 150.0, 195.0, 240.0, 285.0, 300.0]', 'aspect_ratios': '[[2.0], [2.0, 3.0], [2.0, 3.0], [2.0, 3.0], [2.0, 3.0], [2.0, 3.0]]', 'base_size': 'self.img_shape[2]', 'offset': '(0.5)', 'flip': '(True)'}), '(inputs=[module11, module13, module14, module15,\n module16, module17], image=self.img, num_classes=self.num_classes,\n min_ratio=20, max_ratio=90, min_sizes=[60.0, 105.0, 150.0, 195.0, 240.0,\n 285.0], max_sizes=[[], 150.0, 195.0, 240.0, 285.0, 300.0],\n aspect_ratios=[[2.0], [2.0, 3.0], [2.0, 3.0], [2.0, 3.0], [2.0, 3.0], [\n 2.0, 3.0]], base_size=self.img_shape[2], offset=0.5, flip=True)\n', (1806, 2209), True, 'import paddle.fluid as fluid\n'), ((2597, 2804), 'paddle.fluid.layers.conv2d', 'fluid.layers.conv2d', ([], {'input': 'input', 'num_filters': 'num_filters', 'filter_size': 'filter_size', 'stride': 'stride', 'padding': 'padding', 'groups': 'num_groups', 'use_cudnn': 'use_cudnn', 'param_attr': 'parameter_attr', 'bias_attr': '(False)'}), '(input=input, num_filters=num_filters, filter_size=\n filter_size, stride=stride, padding=padding, groups=num_groups,\n use_cudnn=use_cudnn, param_attr=parameter_attr, bias_attr=False)\n', (2616, 2804), True, 'import paddle.fluid as fluid\n'), ((3089, 3124), 'paddle.fluid.layers.batch_norm', 'fluid.layers.batch_norm', ([], {'input': 'conv'}), '(input=conv)\n', (3112, 3124), True, 'import paddle.fluid as fluid\n'), ((3284, 3334), 'paddle.fluid.layers.elementwise_add', 'fluid.layers.elementwise_add', (['input', 'data_residual'], {}), '(input, data_residual)\n', (3312, 3334), True, 'import paddle.fluid as fluid\n'), ((6819, 7026), 'paddle.fluid.layers.conv2d', 'fluid.layers.conv2d', ([], {'input': 'input', 'num_filters': 'num_filters', 'filter_size': 'filter_size', 'stride': 'stride', 'padding': 'padding', 'groups': 'num_groups', 'use_cudnn': 'use_cudnn', 'param_attr': 'parameter_attr', 'bias_attr': '(False)'}), '(input=input, num_filters=num_filters, filter_size=\n filter_size, stride=stride, padding=padding, groups=num_groups,\n use_cudnn=use_cudnn, param_attr=parameter_attr, bias_attr=False)\n', (6838, 7026), True, 'import paddle.fluid as fluid\n'), ((7313, 7357), 'paddle.fluid.layers.batch_norm', 'fluid.layers.batch_norm', ([], {'input': 'conv', 'act': 'act'}), '(input=conv, act=act)\n', (7336, 7357), True, 'import paddle.fluid as fluid\n'), ((3163, 3185), 'paddle.fluid.layers.relu6', 'fluid.layers.relu6', (['bn'], {}), '(bn)\n', (3181, 3185), True, 'import paddle.fluid as fluid\n'), ((2574, 2580), 'paddle.fluid.initializer.MSRA', 'MSRA', ([], {}), '()\n', (2578, 2580), False, 'from paddle.fluid.initializer import MSRA\n'), ((6796, 6802), 'paddle.fluid.initializer.MSRA', 'MSRA', ([], {}), '()\n', (6800, 6802), False, 'from paddle.fluid.initializer import MSRA\n')] |
#! /usr/bin/env python3
import json
import os.path
import jinja2
DEFAULT_PARAMS = {
"ansible_user": "vagrant"
}
if __name__ == "__main__":
# Reading configuration
here = os.path.dirname(os.path.realpath(__file__ + "/../"))
with open(here + "/config.json", "r") as rf:
config = json.load(rf)
print(json.dumps(config, sort_keys=True, indent=4))
# Generating an inventory file
with open(here + "/playbook/inventory/hosts", "w") as inventory:
inventory.write("[kafka]\n")
for host in config["hosts"]:
# Setting default values and updating them when more specific.
params = dict()
params.update(DEFAULT_PARAMS)
params.update(config["params"])
params.update(config["hosts"][host])
# Setting some extra ansible paramters.
params["ansible_ssh_host"] = params["ip"]
inventory.write("%s\t%s\n" % (host, " ".join(("%s=%s" % (k,v) for k,v in params.items()))))
# Generating the Vagrantfile
env = jinja2.Environment(loader=jinja2.FileSystemLoader(here + "/templates/"))
template = env.get_template('Vagrantfile.j2')
template.stream(**config).dump(here + '/vagrant/Vagrantfile')
# Generating group vars for kafka
with open(here + "/playbook/group_vars/kafka.yml", "w") as gv:
gv.write("---\n")
gv.write("hosts:\n")
for (host, params) in config["hosts"].items():
gv.write(" %s: '%s.%s'\n" % (params["ip"], params["hostname"], config["params"]["domain" ]))
gv.write("kafka:\n")
gv.write(" hosts:\n")
for (host, params) in config["hosts"].items():
gv.write(" - %s.%s\n" % (params["hostname"], config["params"]["domain" ]))
| [
"json.load",
"jinja2.FileSystemLoader",
"json.dumps"
]
| [((308, 321), 'json.load', 'json.load', (['rf'], {}), '(rf)\n', (317, 321), False, 'import json\n'), ((332, 376), 'json.dumps', 'json.dumps', (['config'], {'sort_keys': '(True)', 'indent': '(4)'}), '(config, sort_keys=True, indent=4)\n', (342, 376), False, 'import json\n'), ((1075, 1120), 'jinja2.FileSystemLoader', 'jinja2.FileSystemLoader', (["(here + '/templates/')"], {}), "(here + '/templates/')\n", (1098, 1120), False, 'import jinja2\n')] |
# pylint: skip-file
import os
from assimilator import *
from Boinc import boinc_project_path
class SlimeClustersAssimilator(Assimilator):
def __init__(self):
Assimilator.__init__(self)
def assimilate_handler(self, wu, results, canonical_result):
if canonical_result == None:
return
src_file = self.get_file_path(canonical_result)
dst_dir = boinc_project_path.project_path('slime-clusters')
dst_file = os.path.join(dst_dir, 'results.txt')
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
with open(src_file, 'r') as src, open(dst_file, 'a') as dst:
dst.writelines(src.readlines())
if __name__ == "__main__":
SlimeClustersAssimilator().run() | [
"Boinc.boinc_project_path.project_path",
"os.path.exists",
"os.path.join",
"os.makedirs"
]
| [((404, 453), 'Boinc.boinc_project_path.project_path', 'boinc_project_path.project_path', (['"""slime-clusters"""'], {}), "('slime-clusters')\n", (435, 453), False, 'from Boinc import boinc_project_path\n'), ((473, 509), 'os.path.join', 'os.path.join', (['dst_dir', '"""results.txt"""'], {}), "(dst_dir, 'results.txt')\n", (485, 509), False, 'import os\n'), ((526, 549), 'os.path.exists', 'os.path.exists', (['dst_dir'], {}), '(dst_dir)\n', (540, 549), False, 'import os\n'), ((563, 583), 'os.makedirs', 'os.makedirs', (['dst_dir'], {}), '(dst_dir)\n', (574, 583), False, 'import os\n')] |
# Licensed to Modin Development Team under one or more contributor license agreements.
# See the NOTICE file distributed with this work for additional information regarding
# copyright ownership. The Modin Development Team licenses this file to you under the
# Apache License, Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
"""Module houses class that implements ``PandasOnRayDataframe`` class using cuDF."""
import numpy as np
import ray
from ..partitioning.partition import cuDFOnRayDataframePartition
from ..partitioning.partition_manager import cuDFOnRayDataframePartitionManager
from modin.core.execution.ray.implementations.pandas_on_ray.dataframe.dataframe import (
PandasOnRayDataframe,
)
from modin.error_message import ErrorMessage
class cuDFOnRayDataframe(PandasOnRayDataframe):
"""
The class implements the interface in ``PandasOnRayDataframe`` using cuDF.
Parameters
----------
partitions : np.ndarray
A 2D NumPy array of partitions.
index : sequence
The index for the dataframe. Converted to a ``pandas.Index``.
columns : sequence
The columns object for the dataframe. Converted to a ``pandas.Index``.
row_lengths : list, optional
The length of each partition in the rows. The "height" of
each of the block partitions. Is computed if not provided.
column_widths : list, optional
The width of each partition in the columns. The "width" of
each of the block partitions. Is computed if not provided.
dtypes : pandas.Series, optional
The data types for the dataframe columns.
"""
_partition_mgr_cls = cuDFOnRayDataframePartitionManager
def synchronize_labels(self, axis=None):
"""
Synchronize labels by applying the index object (Index or Columns) to the partitions eagerly.
Parameters
----------
axis : {0, 1, None}, default: None
The axis to apply to. If None, it applies to both axes.
"""
ErrorMessage.catch_bugs_and_request_email(
axis is not None and axis not in [0, 1]
)
cum_row_lengths = np.cumsum([0] + self._row_lengths)
cum_col_widths = np.cumsum([0] + self._column_widths)
def apply_idx_objs(df, idx, cols, axis):
# cudf does not support set_axis. It only supports rename with 1-to-1 mapping.
# Therefore, we need to create the dictionary that have the relationship between
# current index and new ones.
idx = {df.index[i]: idx[i] for i in range(len(idx))}
cols = {df.index[i]: cols[i] for i in range(len(cols))}
if axis == 0:
return df.rename(index=idx)
elif axis == 1:
return df.rename(columns=cols)
else:
return df.rename(index=idx, columns=cols)
keys = np.array(
[
[
self._partitions[i][j].apply(
apply_idx_objs,
idx=self.index[
slice(cum_row_lengths[i], cum_row_lengths[i + 1])
],
cols=self.columns[
slice(cum_col_widths[j], cum_col_widths[j + 1])
],
axis=axis,
)
for j in range(len(self._partitions[i]))
]
for i in range(len(self._partitions))
]
)
self._partitions = np.array(
[
[
cuDFOnRayDataframePartition(
self._partitions[i][j].get_gpu_manager(),
keys[i][j],
self._partitions[i][j]._length_cache,
self._partitions[i][j]._width_cache,
)
for j in range(len(keys[i]))
]
for i in range(len(keys))
]
)
def mask(
self,
row_indices=None,
row_numeric_idx=None,
col_indices=None,
col_numeric_idx=None,
):
"""
Lazily select columns or rows from given indices.
Parameters
----------
row_indices : list of hashable, optional
The row labels to extract.
row_numeric_idx : list of int, optional
The row indices to extract.
col_indices : list of hashable, optional
The column labels to extract.
col_numeric_idx : list of int, optional
The column indices to extract.
Returns
-------
cuDFOnRayDataframe
A new ``cuDFOnRayDataframe`` from the mask provided.
Notes
-----
If both `row_indices` and `row_numeric_idx` are set, `row_indices` will be used.
The same rule applied to `col_indices` and `col_numeric_idx`.
"""
if isinstance(row_numeric_idx, slice) and (
row_numeric_idx == slice(None) or row_numeric_idx == slice(0, None)
):
row_numeric_idx = None
if isinstance(col_numeric_idx, slice) and (
col_numeric_idx == slice(None) or col_numeric_idx == slice(0, None)
):
col_numeric_idx = None
if (
row_indices is None
and row_numeric_idx is None
and col_indices is None
and col_numeric_idx is None
):
return self.copy()
if row_indices is not None:
row_numeric_idx = self.index.get_indexer_for(row_indices)
if row_numeric_idx is not None:
row_partitions_list = self._get_dict_of_block_index(0, row_numeric_idx)
if isinstance(row_numeric_idx, slice):
# Row lengths for slice are calculated as the length of the slice
# on the partition. Often this will be the same length as the current
# length, but sometimes it is different, thus the extra calculation.
new_row_lengths = [
len(range(*idx.indices(self._row_lengths[p])))
for p, idx in row_partitions_list.items()
]
# Use the slice to calculate the new row index
new_index = self.index[row_numeric_idx]
else:
new_row_lengths = [len(idx) for _, idx in row_partitions_list.items()]
new_index = self.index[sorted(row_numeric_idx)]
else:
row_partitions_list = {
i: slice(None) for i in range(len(self._row_lengths))
}
new_row_lengths = self._row_lengths
new_index = self.index
if col_indices is not None:
col_numeric_idx = self.columns.get_indexer_for(col_indices)
if col_numeric_idx is not None:
col_partitions_list = self._get_dict_of_block_index(1, col_numeric_idx)
if isinstance(col_numeric_idx, slice):
# Column widths for slice are calculated as the length of the slice
# on the partition. Often this will be the same length as the current
# length, but sometimes it is different, thus the extra calculation.
new_col_widths = [
len(range(*idx.indices(self._column_widths[p])))
for p, idx in col_partitions_list.items()
]
# Use the slice to calculate the new columns
new_columns = self.columns[col_numeric_idx]
assert sum(new_col_widths) == len(
new_columns
), "{} != {}.\n{}\n{}\n{}".format(
sum(new_col_widths),
len(new_columns),
col_numeric_idx,
self._column_widths,
col_partitions_list,
)
if self._dtypes is not None:
new_dtypes = self.dtypes[col_numeric_idx]
else:
new_dtypes = None
else:
new_col_widths = [len(idx) for _, idx in col_partitions_list.items()]
new_columns = self.columns[sorted(col_numeric_idx)]
if self._dtypes is not None:
new_dtypes = self.dtypes.iloc[sorted(col_numeric_idx)]
else:
new_dtypes = None
else:
col_partitions_list = {
i: slice(None) for i in range(len(self._column_widths))
}
new_col_widths = self._column_widths
new_columns = self.columns
if self._dtypes is not None:
new_dtypes = self.dtypes
else:
new_dtypes = None
key_and_gpus = np.array(
[
[
[
self._partitions[row_idx][col_idx].mask(
row_internal_indices, col_internal_indices
),
self._partitions[row_idx][col_idx].get_gpu_manager(),
]
for col_idx, col_internal_indices in col_partitions_list.items()
if isinstance(col_internal_indices, slice)
or len(col_internal_indices) > 0
]
for row_idx, row_internal_indices in row_partitions_list.items()
if isinstance(row_internal_indices, slice)
or len(row_internal_indices) > 0
]
)
shape = key_and_gpus.shape[:2]
keys = ray.get(key_and_gpus[:, :, 0].flatten().tolist())
gpu_managers = key_and_gpus[:, :, 1].flatten().tolist()
new_partitions = self._partition_mgr_cls._create_partitions(
keys, gpu_managers
).reshape(shape)
intermediate = self.__constructor__(
new_partitions,
new_index,
new_columns,
new_row_lengths,
new_col_widths,
new_dtypes,
)
# Check if monotonically increasing, return if it is. Fast track code path for
# common case to keep it fast.
if (
row_numeric_idx is None
or isinstance(row_numeric_idx, slice)
or len(row_numeric_idx) == 1
or np.all(row_numeric_idx[1:] >= row_numeric_idx[:-1])
) and (
col_numeric_idx is None
or isinstance(col_numeric_idx, slice)
or len(col_numeric_idx) == 1
or np.all(col_numeric_idx[1:] >= col_numeric_idx[:-1])
):
return intermediate
# The new labels are often smaller than the old labels, so we can't reuse the
# original order values because those were mapped to the original data. We have
# to reorder here based on the expected order from within the data.
# We create a dictionary mapping the position of the numeric index with respect
# to all others, then recreate that order by mapping the new order values from
# the old. This information is sent to `_reorder_labels`.
if row_numeric_idx is not None:
row_order_mapping = dict(
zip(sorted(row_numeric_idx), range(len(row_numeric_idx)))
)
new_row_order = [row_order_mapping[idx] for idx in row_numeric_idx]
else:
new_row_order = None
if col_numeric_idx is not None:
col_order_mapping = dict(
zip(sorted(col_numeric_idx), range(len(col_numeric_idx)))
)
new_col_order = [col_order_mapping[idx] for idx in col_numeric_idx]
else:
new_col_order = None
return intermediate._reorder_labels(
row_numeric_idx=new_row_order, col_numeric_idx=new_col_order
)
| [
"numpy.cumsum",
"modin.error_message.ErrorMessage.catch_bugs_and_request_email",
"numpy.all"
]
| [((2458, 2544), 'modin.error_message.ErrorMessage.catch_bugs_and_request_email', 'ErrorMessage.catch_bugs_and_request_email', (['(axis is not None and axis not in [0, 1])'], {}), '(axis is not None and axis not in\n [0, 1])\n', (2499, 2544), False, 'from modin.error_message import ErrorMessage\n'), ((2590, 2624), 'numpy.cumsum', 'np.cumsum', (['([0] + self._row_lengths)'], {}), '([0] + self._row_lengths)\n', (2599, 2624), True, 'import numpy as np\n'), ((2650, 2686), 'numpy.cumsum', 'np.cumsum', (['([0] + self._column_widths)'], {}), '([0] + self._column_widths)\n', (2659, 2686), True, 'import numpy as np\n'), ((10833, 10884), 'numpy.all', 'np.all', (['(row_numeric_idx[1:] >= row_numeric_idx[:-1])'], {}), '(row_numeric_idx[1:] >= row_numeric_idx[:-1])\n', (10839, 10884), True, 'import numpy as np\n'), ((11043, 11094), 'numpy.all', 'np.all', (['(col_numeric_idx[1:] >= col_numeric_idx[:-1])'], {}), '(col_numeric_idx[1:] >= col_numeric_idx[:-1])\n', (11049, 11094), True, 'import numpy as np\n')] |
from astropy.table import Table, Column
import matplotlib.pyplot as plt
#url = "https://exoplanetarchive.ipac.caltech.edu/cgi-bin/nstedAPI/nph-nstedAPI?table=exoplanets&select=pl_hostname,ra,dec&order=dec&format=csv"
url = "https://exoplanetarchive.ipac.caltech.edu/cgi-bin/nstedAPI/nph-nstedAPI?table=exoplanets"
# This API returns Hostname, RA and Dec
t = Table.read(url, format="csv")
t_b = t[t["pl_letter"] == "b"]
t_c = t[t["pl_letter"] == "c"]
t_d = t[t["pl_letter"] == "d"]
t_e = t[t["pl_letter"] == "e"]
t_f = t[t["pl_letter"] == "f"]
t_g = t[t["pl_letter"] == "g"]
t_h = t[t["pl_letter"] == "h"]
t_i = t[t["pl_letter"] == "i"]
fig = plt.figure()
ax = fig.add_subplot(1,1,1,aspect="equal")
ax.scatter(t_b["ra"],t_b["dec"],color="Black",label = "2 Planets")
ax.scatter(t_c["ra"],t_c["dec"],color="red", label = "3 Planets")
ax.scatter(t_d["ra"],t_d["dec"],color="blue", label = "4 Planets")
ax.scatter(t_e["ra"],t_e["dec"],color="green", label = "5 Planets")
ax.scatter(t_f["ra"],t_f["dec"],color="yellow", label = "6 Planets")
ax.scatter(t_g["ra"],t_g["dec"],color="purple", label = "7 Planets")
ax.scatter(t_h["ra"],t_h["dec"],color="orange", label = "8 Planets")
ax.scatter(t_i["ra"],t_i["dec"],color="cyan", label = "9 Planets")
ax.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
ax.set_xlim(360,0)
ax.set_ylim(-90,90)
ax.set_ylabel("DEC")
ax.set_xlabel("RA")
ax.set_title("Positions of Explanets by number of planets in system")
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.show() | [
"matplotlib.pyplot.show",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.legend",
"astropy.table.Table.read"
]
| [((359, 388), 'astropy.table.Table.read', 'Table.read', (['url'], {'format': '"""csv"""'}), "(url, format='csv')\n", (369, 388), False, 'from astropy.table import Table, Column\n'), ((645, 657), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (655, 657), True, 'import matplotlib.pyplot as plt\n'), ((1455, 1517), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'bbox_to_anchor': '(1.05, 1)', 'loc': '(2)', 'borderaxespad': '(0.0)'}), '(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.0)\n', (1465, 1517), True, 'import matplotlib.pyplot as plt\n'), ((1517, 1527), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1525, 1527), True, 'import matplotlib.pyplot as plt\n')] |
# Copyright (C) 2019 Cancer Care Associates
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import uuid
import numpy as np
import pydicom
from pymedphys._dicom.create import dicom_dataset_from_dict
from pymedphys._dicom.header import (
RED_adjustment_map_from_structure_names,
adjust_machine_name,
adjust_RED_by_structure_name,
adjust_rel_elec_density,
)
from pymedphys._dicom.utilities import remove_file
HERE = os.path.dirname(__file__)
ORIGINAL_DICOM_FILENAME = os.path.join(
HERE, "scratch", "original-{}.dcm".format(str(uuid.uuid4()))
)
ADJUSTED_DICOM_FILENAME = os.path.join(
HERE, "scratch", "adjusted-{}.dcm".format(str(uuid.uuid4()))
)
def compare_dicom_cli(command, original, expected):
pydicom.write_file(ORIGINAL_DICOM_FILENAME, original)
try:
subprocess.check_call(command)
cli_adjusted_ds = pydicom.read_file(ADJUSTED_DICOM_FILENAME, force=True)
assert str(cli_adjusted_ds) == str(expected)
finally:
remove_file(ORIGINAL_DICOM_FILENAME)
remove_file(ADJUSTED_DICOM_FILENAME)
def test_adjust_machine_name():
new_name = "new_name"
original_ds = dicom_dataset_from_dict(
{
"BeamSequence": [
{"TreatmentMachineName": "hello"},
{"TreatmentMachineName": "george"},
]
}
)
expected_ds = dicom_dataset_from_dict(
{
"BeamSequence": [
{"TreatmentMachineName": new_name},
{"TreatmentMachineName": new_name},
]
}
)
adjusted_ds = adjust_machine_name(original_ds, new_name)
assert adjusted_ds != original_ds
assert adjusted_ds == expected_ds
command = "pymedphys dicom adjust-machine-name".split() + [
ORIGINAL_DICOM_FILENAME,
ADJUSTED_DICOM_FILENAME,
new_name,
]
compare_dicom_cli(command, original_ds, expected_ds)
def test_electron_density_append():
adjustment_map = {
"to_be_changed 1": 1.0,
"to_be_changed 2": 0.5,
"to_be_changed 3": 1.5,
}
excess_adjustment_map = {**adjustment_map, **{"this_structure_doesnt_exist": 1.0}}
original_ds = dicom_dataset_from_dict(
{
"StructureSetROISequence": [
{"ROINumber": 1, "ROIName": "to_be_changed 1"},
{"ROINumber": 2, "ROIName": "dont_change_me"},
{"ROINumber": 10, "ROIName": "to_be_changed 2"},
{"ROINumber": 99, "ROIName": "to_be_changed 3"},
],
"RTROIObservationsSequence": [
{
"ReferencedROINumber": 1,
"ROIPhysicalPropertiesSequence": [
{
"ROIPhysicalProperty": "EFFECTIVE_Z",
"ROIPhysicalPropertyValue": 6,
}
],
},
{"ReferencedROINumber": 2},
{"ReferencedROINumber": 10},
{
"ReferencedROINumber": 99,
"ROIPhysicalPropertiesSequence": [
{
"ROIPhysicalProperty": "REL_ELEC_DENSITY",
"ROIPhysicalPropertyValue": 0,
}
],
},
],
}
)
expected_ds = dicom_dataset_from_dict(
{
"RTROIObservationsSequence": [
{
"ReferencedROINumber": 1,
"ROIPhysicalPropertiesSequence": [
{
"ROIPhysicalProperty": "EFFECTIVE_Z",
"ROIPhysicalPropertyValue": 6,
},
{
"ROIPhysicalProperty": "REL_ELEC_DENSITY",
"ROIPhysicalPropertyValue": adjustment_map[
"to_be_changed 1"
],
},
],
},
{"ReferencedROINumber": 2},
{
"ReferencedROINumber": 10,
"ROIPhysicalPropertiesSequence": [
{
"ROIPhysicalProperty": "REL_ELEC_DENSITY",
"ROIPhysicalPropertyValue": adjustment_map[
"to_be_changed 2"
],
}
],
},
{
"ReferencedROINumber": 99,
"ROIPhysicalPropertiesSequence": [
{
"ROIPhysicalProperty": "REL_ELEC_DENSITY",
"ROIPhysicalPropertyValue": adjustment_map[
"to_be_changed 3"
],
}
],
},
]
},
template_ds=original_ds,
)
adjusted_ds = adjust_rel_elec_density(original_ds, adjustment_map)
assert adjusted_ds != original_ds
assert str(expected_ds) == str(adjusted_ds)
adjusted_with_excess_ds = adjust_rel_elec_density(
original_ds, excess_adjustment_map, ignore_missing_structure=True
)
assert adjusted_with_excess_ds != original_ds
assert str(expected_ds) == str(adjusted_with_excess_ds)
excess_adjustment_map_as_list = [
["{}".format(key), item] for key, item in excess_adjustment_map.items()
]
excess_adjustment_map_flat = np.concatenate(excess_adjustment_map_as_list).tolist()
command = (
"pymedphys dicom adjust-RED -i ".split()
+ [ORIGINAL_DICOM_FILENAME, ADJUSTED_DICOM_FILENAME]
+ excess_adjustment_map_flat
)
compare_dicom_cli(command, original_ds, expected_ds)
def test_structure_name_parse():
structure_names = [
"a RED=1",
"b",
"c",
"d RED=2.2",
"e red = 3",
"f",
"g Red: 4.7",
"h RED=0.5 ",
]
expected_adjustment_map = {
"a RED=1": 1,
"d RED=2.2": 2.2,
"e red = 3": 3,
"g Red: 4.7": 4.7,
"h RED=0.5 ": 0.5,
}
adjustment_map = RED_adjustment_map_from_structure_names(structure_names)
assert expected_adjustment_map == adjustment_map
def test_structure_name_based_RED_append():
electron_density_to_use = 0.5
original_ds = dicom_dataset_from_dict(
{
"StructureSetROISequence": [
{
"ROINumber": 1,
"ROIName": "a_structure RED={}".format(electron_density_to_use),
},
{"ROINumber": 2, "ROIName": "dont_change_me"},
],
"RTROIObservationsSequence": [
{"ReferencedROINumber": 1},
{"ReferencedROINumber": 2},
],
}
)
expected_ds = dicom_dataset_from_dict(
{
"RTROIObservationsSequence": [
{
"ReferencedROINumber": 1,
"ROIPhysicalPropertiesSequence": [
{
"ROIPhysicalProperty": "REL_ELEC_DENSITY",
"ROIPhysicalPropertyValue": electron_density_to_use,
}
],
},
{"ReferencedROINumber": 2},
]
},
template_ds=original_ds,
)
adjusted_ds = adjust_RED_by_structure_name(original_ds)
assert adjusted_ds != original_ds
assert str(expected_ds) == str(adjusted_ds)
command = "pymedphys dicom adjust-RED-by-structure-name".split() + [
ORIGINAL_DICOM_FILENAME,
ADJUSTED_DICOM_FILENAME,
]
compare_dicom_cli(command, original_ds, expected_ds)
| [
"pymedphys._dicom.header.adjust_RED_by_structure_name",
"pymedphys._dicom.header.adjust_machine_name",
"pymedphys._dicom.header.RED_adjustment_map_from_structure_names",
"subprocess.check_call",
"pymedphys._dicom.utilities.remove_file",
"uuid.uuid4",
"os.path.dirname",
"pymedphys._dicom.create.dicom_dataset_from_dict",
"pydicom.read_file",
"numpy.concatenate",
"pydicom.write_file",
"pymedphys._dicom.header.adjust_rel_elec_density"
]
| [((958, 983), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (973, 983), False, 'import os\n'), ((1256, 1309), 'pydicom.write_file', 'pydicom.write_file', (['ORIGINAL_DICOM_FILENAME', 'original'], {}), '(ORIGINAL_DICOM_FILENAME, original)\n', (1274, 1309), False, 'import pydicom\n'), ((1676, 1794), 'pymedphys._dicom.create.dicom_dataset_from_dict', 'dicom_dataset_from_dict', (["{'BeamSequence': [{'TreatmentMachineName': 'hello'}, {\n 'TreatmentMachineName': 'george'}]}"], {}), "({'BeamSequence': [{'TreatmentMachineName': 'hello'},\n {'TreatmentMachineName': 'george'}]})\n", (1699, 1794), False, 'from pymedphys._dicom.create import dicom_dataset_from_dict\n'), ((1893, 2013), 'pymedphys._dicom.create.dicom_dataset_from_dict', 'dicom_dataset_from_dict', (["{'BeamSequence': [{'TreatmentMachineName': new_name}, {\n 'TreatmentMachineName': new_name}]}"], {}), "({'BeamSequence': [{'TreatmentMachineName': new_name\n }, {'TreatmentMachineName': new_name}]})\n", (1916, 2013), False, 'from pymedphys._dicom.create import dicom_dataset_from_dict\n'), ((2111, 2153), 'pymedphys._dicom.header.adjust_machine_name', 'adjust_machine_name', (['original_ds', 'new_name'], {}), '(original_ds, new_name)\n', (2130, 2153), False, 'from pymedphys._dicom.header import RED_adjustment_map_from_structure_names, adjust_machine_name, adjust_RED_by_structure_name, adjust_rel_elec_density\n'), ((2714, 3361), 'pymedphys._dicom.create.dicom_dataset_from_dict', 'dicom_dataset_from_dict', (["{'StructureSetROISequence': [{'ROINumber': 1, 'ROIName': 'to_be_changed 1'},\n {'ROINumber': 2, 'ROIName': 'dont_change_me'}, {'ROINumber': 10,\n 'ROIName': 'to_be_changed 2'}, {'ROINumber': 99, 'ROIName':\n 'to_be_changed 3'}], 'RTROIObservationsSequence': [{\n 'ReferencedROINumber': 1, 'ROIPhysicalPropertiesSequence': [{\n 'ROIPhysicalProperty': 'EFFECTIVE_Z', 'ROIPhysicalPropertyValue': 6}]},\n {'ReferencedROINumber': 2}, {'ReferencedROINumber': 10}, {\n 'ReferencedROINumber': 99, 'ROIPhysicalPropertiesSequence': [{\n 'ROIPhysicalProperty': 'REL_ELEC_DENSITY', 'ROIPhysicalPropertyValue': \n 0}]}]}"], {}), "({'StructureSetROISequence': [{'ROINumber': 1,\n 'ROIName': 'to_be_changed 1'}, {'ROINumber': 2, 'ROIName':\n 'dont_change_me'}, {'ROINumber': 10, 'ROIName': 'to_be_changed 2'}, {\n 'ROINumber': 99, 'ROIName': 'to_be_changed 3'}],\n 'RTROIObservationsSequence': [{'ReferencedROINumber': 1,\n 'ROIPhysicalPropertiesSequence': [{'ROIPhysicalProperty': 'EFFECTIVE_Z',\n 'ROIPhysicalPropertyValue': 6}]}, {'ReferencedROINumber': 2}, {\n 'ReferencedROINumber': 10}, {'ReferencedROINumber': 99,\n 'ROIPhysicalPropertiesSequence': [{'ROIPhysicalProperty':\n 'REL_ELEC_DENSITY', 'ROIPhysicalPropertyValue': 0}]}]})\n", (2737, 3361), False, 'from pymedphys._dicom.create import dicom_dataset_from_dict\n'), ((3926, 4667), 'pymedphys._dicom.create.dicom_dataset_from_dict', 'dicom_dataset_from_dict', (["{'RTROIObservationsSequence': [{'ReferencedROINumber': 1,\n 'ROIPhysicalPropertiesSequence': [{'ROIPhysicalProperty': 'EFFECTIVE_Z',\n 'ROIPhysicalPropertyValue': 6}, {'ROIPhysicalProperty':\n 'REL_ELEC_DENSITY', 'ROIPhysicalPropertyValue': adjustment_map[\n 'to_be_changed 1']}]}, {'ReferencedROINumber': 2}, {\n 'ReferencedROINumber': 10, 'ROIPhysicalPropertiesSequence': [{\n 'ROIPhysicalProperty': 'REL_ELEC_DENSITY', 'ROIPhysicalPropertyValue':\n adjustment_map['to_be_changed 2']}]}, {'ReferencedROINumber': 99,\n 'ROIPhysicalPropertiesSequence': [{'ROIPhysicalProperty':\n 'REL_ELEC_DENSITY', 'ROIPhysicalPropertyValue': adjustment_map[\n 'to_be_changed 3']}]}]}"], {'template_ds': 'original_ds'}), "({'RTROIObservationsSequence': [{\n 'ReferencedROINumber': 1, 'ROIPhysicalPropertiesSequence': [{\n 'ROIPhysicalProperty': 'EFFECTIVE_Z', 'ROIPhysicalPropertyValue': 6}, {\n 'ROIPhysicalProperty': 'REL_ELEC_DENSITY', 'ROIPhysicalPropertyValue':\n adjustment_map['to_be_changed 1']}]}, {'ReferencedROINumber': 2}, {\n 'ReferencedROINumber': 10, 'ROIPhysicalPropertiesSequence': [{\n 'ROIPhysicalProperty': 'REL_ELEC_DENSITY', 'ROIPhysicalPropertyValue':\n adjustment_map['to_be_changed 2']}]}, {'ReferencedROINumber': 99,\n 'ROIPhysicalPropertiesSequence': [{'ROIPhysicalProperty':\n 'REL_ELEC_DENSITY', 'ROIPhysicalPropertyValue': adjustment_map[\n 'to_be_changed 3']}]}]}, template_ds=original_ds)\n", (3949, 4667), False, 'from pymedphys._dicom.create import dicom_dataset_from_dict\n'), ((5623, 5675), 'pymedphys._dicom.header.adjust_rel_elec_density', 'adjust_rel_elec_density', (['original_ds', 'adjustment_map'], {}), '(original_ds, adjustment_map)\n', (5646, 5675), False, 'from pymedphys._dicom.header import RED_adjustment_map_from_structure_names, adjust_machine_name, adjust_RED_by_structure_name, adjust_rel_elec_density\n'), ((5794, 5888), 'pymedphys._dicom.header.adjust_rel_elec_density', 'adjust_rel_elec_density', (['original_ds', 'excess_adjustment_map'], {'ignore_missing_structure': '(True)'}), '(original_ds, excess_adjustment_map,\n ignore_missing_structure=True)\n', (5817, 5888), False, 'from pymedphys._dicom.header import RED_adjustment_map_from_structure_names, adjust_machine_name, adjust_RED_by_structure_name, adjust_rel_elec_density\n'), ((6850, 6906), 'pymedphys._dicom.header.RED_adjustment_map_from_structure_names', 'RED_adjustment_map_from_structure_names', (['structure_names'], {}), '(structure_names)\n', (6889, 6906), False, 'from pymedphys._dicom.header import RED_adjustment_map_from_structure_names, adjust_machine_name, adjust_RED_by_structure_name, adjust_rel_elec_density\n'), ((7553, 7842), 'pymedphys._dicom.create.dicom_dataset_from_dict', 'dicom_dataset_from_dict', (["{'RTROIObservationsSequence': [{'ReferencedROINumber': 1,\n 'ROIPhysicalPropertiesSequence': [{'ROIPhysicalProperty':\n 'REL_ELEC_DENSITY', 'ROIPhysicalPropertyValue': electron_density_to_use\n }]}, {'ReferencedROINumber': 2}]}"], {'template_ds': 'original_ds'}), "({'RTROIObservationsSequence': [{\n 'ReferencedROINumber': 1, 'ROIPhysicalPropertiesSequence': [{\n 'ROIPhysicalProperty': 'REL_ELEC_DENSITY', 'ROIPhysicalPropertyValue':\n electron_density_to_use}]}, {'ReferencedROINumber': 2}]}, template_ds=\n original_ds)\n", (7576, 7842), False, 'from pymedphys._dicom.create import dicom_dataset_from_dict\n'), ((8123, 8164), 'pymedphys._dicom.header.adjust_RED_by_structure_name', 'adjust_RED_by_structure_name', (['original_ds'], {}), '(original_ds)\n', (8151, 8164), False, 'from pymedphys._dicom.header import RED_adjustment_map_from_structure_names, adjust_machine_name, adjust_RED_by_structure_name, adjust_rel_elec_density\n'), ((1328, 1358), 'subprocess.check_call', 'subprocess.check_call', (['command'], {}), '(command)\n', (1349, 1358), False, 'import subprocess\n'), ((1385, 1439), 'pydicom.read_file', 'pydicom.read_file', (['ADJUSTED_DICOM_FILENAME'], {'force': '(True)'}), '(ADJUSTED_DICOM_FILENAME, force=True)\n', (1402, 1439), False, 'import pydicom\n'), ((1515, 1551), 'pymedphys._dicom.utilities.remove_file', 'remove_file', (['ORIGINAL_DICOM_FILENAME'], {}), '(ORIGINAL_DICOM_FILENAME)\n', (1526, 1551), False, 'from pymedphys._dicom.utilities import remove_file\n'), ((1560, 1596), 'pymedphys._dicom.utilities.remove_file', 'remove_file', (['ADJUSTED_DICOM_FILENAME'], {}), '(ADJUSTED_DICOM_FILENAME)\n', (1571, 1596), False, 'from pymedphys._dicom.utilities import remove_file\n'), ((1074, 1086), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1084, 1086), False, 'import uuid\n'), ((1181, 1193), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1191, 1193), False, 'import uuid\n'), ((6168, 6213), 'numpy.concatenate', 'np.concatenate', (['excess_adjustment_map_as_list'], {}), '(excess_adjustment_map_as_list)\n', (6182, 6213), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
import os, sys, re, json, requests, datetime, tarfile, argparse
from pprint import pprint
import numpy as np
from utils.UrlUtils import UrlUtils
server = 'https://qc.sentinel1.eo.esa.int/'
cal_re = re.compile(r'S1\w_AUX_CAL')
def cmdLineParse():
'''
Command line parser.
'''
parser = argparse.ArgumentParser(description='Fetch calibration auxiliary files ingested into HySDS')
parser.add_argument('-o', '--output', dest='outdir', type=str, default='.',
help='Path to output directory')
parser.add_argument('-d', '--dry-run', dest='dry_run', action='store_true',
help="Don't download anything; just output the URLs")
return parser.parse_args()
def download_file(url, outdir='.', session=None):
'''
Download file to specified directory.
'''
if session is None:
session = requests.session()
path = "%s.tgz" % os.path.join(outdir, os.path.basename(url))
print('Downloading URL: ', url)
request = session.get(url, stream=True, verify=False)
request.raise_for_status()
with open(path,'wb') as f:
for chunk in request.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
f.flush()
return path
def untar_file(path, outdir):
'''
Extract aux cal files.
'''
if not tarfile.is_tarfile(path):
raise RuntimeError("%s is not a tarfile." % path)
with tarfile.open(path) as f:
f.extractall(outdir)
def get_active_ids(es_url):
"""Query for the active calibration IDs."""
query = {
"query":{
"bool":{
"must":[
{"term":{"_id": "S1_AUX_CAL_ACTIVE"}},
]
}
},
"sort":[ { "starttime": { "order": "desc" } } ]
}
es_index = "grq_*_s1-aux_cal_active"
if es_url.endswith('/'):
search_url = '%s%s/_search' % (es_url, es_index)
else:
search_url = '%s/%s/_search' % (es_url, es_index)
r = requests.post(search_url, data=json.dumps(query))
if r.status_code == 200:
result = r.json()
#pprint(result)
total = result['hits']['total']
if total == 0:
raise RuntimeError("Failed to find S1_AUX_CAL_ACTIVE at %s." % search_url)
return result['hits']['hits'][0]['_source']['metadata']['active_ids']
else:
print("Failed to query %s:\n%s" % (es_url, r.text), file=sys.stderr)
print("query: %s" % json.dumps(query, indent=2), file=sys.stderr)
print("returned: %s" % r.text, file=sys.stderr)
r.raise_for_status()
def get_cal_url(id, es_url):
"""Query for the active calibration url."""
query = {
"query":{
"bool":{
"must":[
{"term":{"_id": id}},
]
}
},
"fields": ["urls", "metadata.archive_filename"]
}
es_index = "grq_*_s1-aux_cal"
if es_url.endswith('/'):
search_url = '%s%s/_search' % (es_url, es_index)
else:
search_url = '%s/%s/_search' % (es_url, es_index)
r = requests.post(search_url, data=json.dumps(query))
if r.status_code == 200:
result = r.json()
pprint(result)
total = result['hits']['total']
if total == 0:
raise RuntimeError("Failed to find %s at %s." % (id, search_url))
urls = result['hits']['hits'][0]['fields']['urls']
archive_fname = result['hits']['hits'][0]['fields']['metadata.archive_filename'][0]
url = [x for x in urls if x.startswith('http')][0]
#print(urls)
#print(url)
#print(archive_fname)
return os.path.join(url, archive_fname)
else:
print("Failed to query %s:\n%s" % (es_url, r.text), file=sys.stderr)
print("query: %s" % json.dumps(query, indent=2), file=sys.stderr)
print("returned: %s" % r.text, file=sys.stderr)
r.raise_for_status()
def fetch(outdir, dry_run):
# get endpoint configurations
uu = UrlUtils()
es_url = uu.rest_url
# get active calibration ids
active_ids = get_active_ids(es_url)
print(active_ids)
# get urls for active calibration files
cal_urls = [get_cal_url(i, es_url) for i in active_ids]
print(cal_urls)
if len(cal_urls) == 0:
print('Failed to find calibration auxiliary files')
if dry_run: print('\n'.join(cal_urls))
else:
if not os.path.isdir(outdir): os.makedirs(outdir)
for cal_url in cal_urls:
try: cal_file = download_file(cal_url, outdir)
except:
print('Failed to download URL: ', cal_url)
raise
try: cal_dir = untar_file(cal_file, outdir)
except:
print('Failed to untar: ', cal_file)
raise
os.unlink(cal_file)
if __name__ == '__main__':
inps = cmdLineParse()
fetch(inps.outdir, inps.dry_run)
| [
"requests.session",
"tarfile.open",
"argparse.ArgumentParser",
"re.compile",
"os.makedirs",
"json.dumps",
"utils.UrlUtils.UrlUtils",
"tarfile.is_tarfile",
"os.path.join",
"os.path.isdir",
"os.path.basename",
"os.unlink",
"pprint.pprint"
]
| [((225, 252), 're.compile', 're.compile', (['"""S1\\\\w_AUX_CAL"""'], {}), "('S1\\\\w_AUX_CAL')\n", (235, 252), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((329, 426), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Fetch calibration auxiliary files ingested into HySDS"""'}), "(description=\n 'Fetch calibration auxiliary files ingested into HySDS')\n", (352, 426), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((4057, 4067), 'utils.UrlUtils.UrlUtils', 'UrlUtils', ([], {}), '()\n', (4065, 4067), False, 'from utils.UrlUtils import UrlUtils\n'), ((878, 896), 'requests.session', 'requests.session', ([], {}), '()\n', (894, 896), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((1362, 1386), 'tarfile.is_tarfile', 'tarfile.is_tarfile', (['path'], {}), '(path)\n', (1380, 1386), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((1455, 1473), 'tarfile.open', 'tarfile.open', (['path'], {}), '(path)\n', (1467, 1473), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((3252, 3266), 'pprint.pprint', 'pprint', (['result'], {}), '(result)\n', (3258, 3266), False, 'from pprint import pprint\n'), ((3704, 3736), 'os.path.join', 'os.path.join', (['url', 'archive_fname'], {}), '(url, archive_fname)\n', (3716, 3736), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((941, 962), 'os.path.basename', 'os.path.basename', (['url'], {}), '(url)\n', (957, 962), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((2065, 2082), 'json.dumps', 'json.dumps', (['query'], {}), '(query)\n', (2075, 2082), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((3170, 3187), 'json.dumps', 'json.dumps', (['query'], {}), '(query)\n', (3180, 3187), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((4473, 4494), 'os.path.isdir', 'os.path.isdir', (['outdir'], {}), '(outdir)\n', (4486, 4494), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((4496, 4515), 'os.makedirs', 'os.makedirs', (['outdir'], {}), '(outdir)\n', (4507, 4515), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((4872, 4891), 'os.unlink', 'os.unlink', (['cal_file'], {}), '(cal_file)\n', (4881, 4891), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((2506, 2533), 'json.dumps', 'json.dumps', (['query'], {'indent': '(2)'}), '(query, indent=2)\n', (2516, 2533), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n'), ((3852, 3879), 'json.dumps', 'json.dumps', (['query'], {'indent': '(2)'}), '(query, indent=2)\n', (3862, 3879), False, 'import os, sys, re, json, requests, datetime, tarfile, argparse\n')] |
# Copyright 2005-2008, <NAME>
# Copyright 2010, 2012 <NAME>
# This software's license gives you freedom; you can copy, convey,
# propagate, redistribute, modify and/or redistribute modified versions of
# this program under the terms of the GNU Affero General Public License
# (AGPL) as published by the Free Software Foundation (FSF), either
# version 3 of the License, or (at your option) any later version of the
# AGPL published by the FSF.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero
# General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program in a file in the toplevel directory called
# "AGPLv3". If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import url, include
from django.contrib import admin, admindocs
from conservancy import feeds, frontpage, sponsors
import conservancy.apps.fundgoal.views as fundgoal_views
import conservancy.static.views as static_views
admin.autodiscover()
urlpatterns = [
url(r'^$', frontpage.view),
url(r'^sponsors$', frontpage.view),
url(r'^sponsors/$', sponsors.view),
url(r'^sponsors/index.html$', sponsors.view),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', admin.site.urls),
url(r'^feeds/blog/?$', feeds.BlogFeed()),
url(r'^feeds/news/?$', feeds.PressReleaseFeed()),
url(r'^feeds/omnibus/?$', feeds.OmnibusFeed()),
url(r'^feeds/?$', feeds.view),
url(r'^news(/|$)', include('conservancy.apps.news.urls')),
url(r'^blog(/|$)', include('conservancy.apps.blog.urls')),
# formerly static templated things... (dirs with templates)
url(r'^error/(40[134]|500)(?:/index\.html|/|)$', static_views.handler),
url(r'^error', static_views.index),
url(r'^about', static_views.index),
url(r'^donate', static_views.index),
url(r'^copyleft-compliance', static_views.index,
{'fundraiser_sought' : 'vmware-match-0'}),
url(r'^projects', static_views.index),
url(r'^npoacct', static_views.index,
{'fundraiser_sought' : 'npoacct'}),
url(r'^contractpatch', include('conservancy.apps.contractpatch.urls')),
url(r'^overview', static_views.index),
url(r'^privacy-policy', static_views.index),
url(r'^supporter', include('conservancy.apps.supporter.urls')),
url(r'^fundraiser_data', fundgoal_views.view),
]
| [
"django.conf.urls.url",
"conservancy.feeds.BlogFeed",
"conservancy.feeds.OmnibusFeed",
"conservancy.feeds.PressReleaseFeed",
"django.conf.urls.include",
"django.contrib.admin.autodiscover"
]
| [((1144, 1164), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (1162, 1164), False, 'from django.contrib import admin, admindocs\n'), ((1186, 1211), 'django.conf.urls.url', 'url', (['"""^$"""', 'frontpage.view'], {}), "('^$', frontpage.view)\n", (1189, 1211), False, 'from django.conf.urls import url, include\n'), ((1218, 1251), 'django.conf.urls.url', 'url', (['"""^sponsors$"""', 'frontpage.view'], {}), "('^sponsors$', frontpage.view)\n", (1221, 1251), False, 'from django.conf.urls import url, include\n'), ((1258, 1291), 'django.conf.urls.url', 'url', (['"""^sponsors/$"""', 'sponsors.view'], {}), "('^sponsors/$', sponsors.view)\n", (1261, 1291), False, 'from django.conf.urls import url, include\n'), ((1298, 1341), 'django.conf.urls.url', 'url', (['"""^sponsors/index.html$"""', 'sponsors.view'], {}), "('^sponsors/index.html$', sponsors.view)\n", (1301, 1341), False, 'from django.conf.urls import url, include\n'), ((1415, 1446), 'django.conf.urls.url', 'url', (['"""^admin/"""', 'admin.site.urls'], {}), "('^admin/', admin.site.urls)\n", (1418, 1446), False, 'from django.conf.urls import url, include\n'), ((1605, 1633), 'django.conf.urls.url', 'url', (['"""^feeds/?$"""', 'feeds.view'], {}), "('^feeds/?$', feeds.view)\n", (1608, 1633), False, 'from django.conf.urls import url, include\n'), ((1830, 1900), 'django.conf.urls.url', 'url', (['"""^error/(40[134]|500)(?:/index\\\\.html|/|)$"""', 'static_views.handler'], {}), "('^error/(40[134]|500)(?:/index\\\\.html|/|)$', static_views.handler)\n", (1833, 1900), False, 'from django.conf.urls import url, include\n'), ((1906, 1939), 'django.conf.urls.url', 'url', (['"""^error"""', 'static_views.index'], {}), "('^error', static_views.index)\n", (1909, 1939), False, 'from django.conf.urls import url, include\n'), ((1946, 1979), 'django.conf.urls.url', 'url', (['"""^about"""', 'static_views.index'], {}), "('^about', static_views.index)\n", (1949, 1979), False, 'from django.conf.urls import url, include\n'), ((1986, 2020), 'django.conf.urls.url', 'url', (['"""^donate"""', 'static_views.index'], {}), "('^donate', static_views.index)\n", (1989, 2020), False, 'from django.conf.urls import url, include\n'), ((2027, 2119), 'django.conf.urls.url', 'url', (['"""^copyleft-compliance"""', 'static_views.index', "{'fundraiser_sought': 'vmware-match-0'}"], {}), "('^copyleft-compliance', static_views.index, {'fundraiser_sought':\n 'vmware-match-0'})\n", (2030, 2119), False, 'from django.conf.urls import url, include\n'), ((2150, 2186), 'django.conf.urls.url', 'url', (['"""^projects"""', 'static_views.index'], {}), "('^projects', static_views.index)\n", (2153, 2186), False, 'from django.conf.urls import url, include\n'), ((2193, 2262), 'django.conf.urls.url', 'url', (['"""^npoacct"""', 'static_views.index', "{'fundraiser_sought': 'npoacct'}"], {}), "('^npoacct', static_views.index, {'fundraiser_sought': 'npoacct'})\n", (2196, 2262), False, 'from django.conf.urls import url, include\n'), ((2364, 2400), 'django.conf.urls.url', 'url', (['"""^overview"""', 'static_views.index'], {}), "('^overview', static_views.index)\n", (2367, 2400), False, 'from django.conf.urls import url, include\n'), ((2407, 2449), 'django.conf.urls.url', 'url', (['"""^privacy-policy"""', 'static_views.index'], {}), "('^privacy-policy', static_views.index)\n", (2410, 2449), False, 'from django.conf.urls import url, include\n'), ((2524, 2568), 'django.conf.urls.url', 'url', (['"""^fundraiser_data"""', 'fundgoal_views.view'], {}), "('^fundraiser_data', fundgoal_views.view)\n", (2527, 2568), False, 'from django.conf.urls import url, include\n'), ((1368, 1408), 'django.conf.urls.include', 'include', (['"""django.contrib.admindocs.urls"""'], {}), "('django.contrib.admindocs.urls')\n", (1375, 1408), False, 'from django.conf.urls import url, include\n'), ((1476, 1492), 'conservancy.feeds.BlogFeed', 'feeds.BlogFeed', ([], {}), '()\n', (1490, 1492), False, 'from conservancy import feeds, frontpage, sponsors\n'), ((1522, 1546), 'conservancy.feeds.PressReleaseFeed', 'feeds.PressReleaseFeed', ([], {}), '()\n', (1544, 1546), False, 'from conservancy import feeds, frontpage, sponsors\n'), ((1579, 1598), 'conservancy.feeds.OmnibusFeed', 'feeds.OmnibusFeed', ([], {}), '()\n', (1596, 1598), False, 'from conservancy import feeds, frontpage, sponsors\n'), ((1659, 1696), 'django.conf.urls.include', 'include', (['"""conservancy.apps.news.urls"""'], {}), "('conservancy.apps.news.urls')\n", (1666, 1696), False, 'from django.conf.urls import url, include\n'), ((1722, 1759), 'django.conf.urls.include', 'include', (['"""conservancy.apps.blog.urls"""'], {}), "('conservancy.apps.blog.urls')\n", (1729, 1759), False, 'from django.conf.urls import url, include\n'), ((2311, 2357), 'django.conf.urls.include', 'include', (['"""conservancy.apps.contractpatch.urls"""'], {}), "('conservancy.apps.contractpatch.urls')\n", (2318, 2357), False, 'from django.conf.urls import url, include\n'), ((2475, 2517), 'django.conf.urls.include', 'include', (['"""conservancy.apps.supporter.urls"""'], {}), "('conservancy.apps.supporter.urls')\n", (2482, 2517), False, 'from django.conf.urls import url, include\n')] |
import unittest
from unittest.mock import Mock
from graphene import Schema
from graphene.test import Client
from graphene_spike.query import Query
class MainTest(unittest.TestCase):
def setUp(self):
self.schema = Schema(query=Query)
self.client = client = Client(self.schema)
def test_hello_should_work_without_argument(self):
# Assign
query_string = '{ hello }'
# Acts
executed = self.client.execute(query_string)
# Assert
self.assertEqual(executed['data'], {"hello": "Hello stranger, you have 18 !"})
def test_hello_should_write_the_giving_name(self):
# Assign
query_string = '{ hello(name: "Fabien") }'
# Acts
executed = self.client.execute(query_string)
# Assert
self.assertEqual(executed['data'], {"hello": "Hello Fabien, you have 18 !"})
def test_hello_should_write_the_giving_age(self):
# Assign
query_string = '{ hello(age: 24) }'
# Acts
executed = self.client.execute(query_string)
# Assert
self.assertEqual(executed['data'], {"hello": "Hello stranger, you have 24 !"})
def test_goodbye_should_giving_a_response(self):
# Assign
query_string = '{ goodbye }'
# Acts
executed = self.client.execute(query_string)
# Assert
self.assertEqual(executed['data'], {"goodbye": "See ya!"})
| [
"graphene.Schema",
"graphene.test.Client"
]
| [((229, 248), 'graphene.Schema', 'Schema', ([], {'query': 'Query'}), '(query=Query)\n', (235, 248), False, 'from graphene import Schema\n'), ((280, 299), 'graphene.test.Client', 'Client', (['self.schema'], {}), '(self.schema)\n', (286, 299), False, 'from graphene.test import Client\n')] |
from django.db import models
from django.contrib import admin
class Provider(models.Model):
name = models.CharField(max_length=50)
domain = models.CharField(max_length=50)
class Meta:
ordering = ['name']
app_label = 'api'
def __str__(self):
return self.domain
@admin.register(Provider)
class ProviderAdmin(admin.ModelAdmin):
list_display = ('name', 'domain')
| [
"django.contrib.admin.register",
"django.db.models.CharField"
]
| [((307, 331), 'django.contrib.admin.register', 'admin.register', (['Provider'], {}), '(Provider)\n', (321, 331), False, 'from django.contrib import admin\n'), ((105, 136), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (121, 136), False, 'from django.db import models\n'), ((150, 181), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (166, 181), False, 'from django.db import models\n')] |
#!/usr/bin/env python
# license removed for brevity
import rospy
from std_msgs.msg import String
from gazebo_msgs.msg import LinkState
def talker():
pub = rospy.Publisher('/gazebo/set_link_state', LinkState, queue_size=10)
ppp = LinkState()
rospy.init_node('talker', anonymous=True)
rate = rospy.Rate(100) # 10hz
i = 1
while not rospy.is_shutdown():
ppp.link_name = "platform"
ppp.pose.position.x = 0.1
ppp.pose.position.y = 0.1
ppp.pose.position.z = 1
ppp.pose.orientation.x = 0
ppp.pose.orientation.y = 0
ppp.pose.orientation.z = 0
ppp.pose.orientation.w = 0
i = i+1
rospy.loginfo(ppp)
pub.publish(ppp)
rate.sleep()
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
| [
"rospy.is_shutdown",
"rospy.init_node",
"gazebo_msgs.msg.LinkState",
"rospy.Rate",
"rospy.Publisher",
"rospy.loginfo"
]
| [((161, 228), 'rospy.Publisher', 'rospy.Publisher', (['"""/gazebo/set_link_state"""', 'LinkState'], {'queue_size': '(10)'}), "('/gazebo/set_link_state', LinkState, queue_size=10)\n", (176, 228), False, 'import rospy\n'), ((239, 250), 'gazebo_msgs.msg.LinkState', 'LinkState', ([], {}), '()\n', (248, 250), False, 'from gazebo_msgs.msg import LinkState\n'), ((255, 296), 'rospy.init_node', 'rospy.init_node', (['"""talker"""'], {'anonymous': '(True)'}), "('talker', anonymous=True)\n", (270, 296), False, 'import rospy\n'), ((313, 328), 'rospy.Rate', 'rospy.Rate', (['(100)'], {}), '(100)\n', (323, 328), False, 'import rospy\n'), ((360, 379), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (377, 379), False, 'import rospy\n'), ((680, 698), 'rospy.loginfo', 'rospy.loginfo', (['ppp'], {}), '(ppp)\n', (693, 698), False, 'import rospy\n')] |
import torch
from os import listdir, path
from PIL import Image
import torchvision
class DiscriminatorDataset(torch.utils.data.Dataset):
def __init__(self):
super(DiscriminatorDataset, self).__init__()
currentDir = path.dirname(__file__)
abstractDir = path.join(currentDir, 'image_data/abstract')
realisticDir = path.join(currentDir, 'image_data/realistic')
abstractFiles = [path.join(abstractDir, f) for f in listdir(
abstractDir) if path.isfile(path.join(abstractDir, f))]
realisticFiles = [path.join(realisticDir, f) for f in listdir(
realisticDir) if path.isfile(path.join(realisticDir, f))]
self.abstractFilesLen = len(abstractFiles)
self.allFiles = abstractFiles + realisticFiles
def __len__(self):
return len(self.allFiles)
def __getitem__(self, index):
filename = self.allFiles[index]
pilImage = Image.open(filename).convert("RGB")
return (torchvision.transforms.ToTensor()(pilImage), 1 if index < self.abstractFilesLen else 0)
| [
"os.listdir",
"PIL.Image.open",
"os.path.join",
"os.path.dirname",
"torchvision.transforms.ToTensor"
]
| [((238, 260), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (250, 260), False, 'from os import listdir, path\n'), ((283, 327), 'os.path.join', 'path.join', (['currentDir', '"""image_data/abstract"""'], {}), "(currentDir, 'image_data/abstract')\n", (292, 327), False, 'from os import listdir, path\n'), ((351, 396), 'os.path.join', 'path.join', (['currentDir', '"""image_data/realistic"""'], {}), "(currentDir, 'image_data/realistic')\n", (360, 396), False, 'from os import listdir, path\n'), ((422, 447), 'os.path.join', 'path.join', (['abstractDir', 'f'], {}), '(abstractDir, f)\n', (431, 447), False, 'from os import listdir, path\n'), ((560, 586), 'os.path.join', 'path.join', (['realisticDir', 'f'], {}), '(realisticDir, f)\n', (569, 586), False, 'from os import listdir, path\n'), ((457, 477), 'os.listdir', 'listdir', (['abstractDir'], {}), '(abstractDir)\n', (464, 477), False, 'from os import listdir, path\n'), ((596, 617), 'os.listdir', 'listdir', (['realisticDir'], {}), '(realisticDir)\n', (603, 617), False, 'from os import listdir, path\n'), ((933, 953), 'PIL.Image.open', 'Image.open', (['filename'], {}), '(filename)\n', (943, 953), False, 'from PIL import Image\n'), ((985, 1018), 'torchvision.transforms.ToTensor', 'torchvision.transforms.ToTensor', ([], {}), '()\n', (1016, 1018), False, 'import torchvision\n'), ((506, 531), 'os.path.join', 'path.join', (['abstractDir', 'f'], {}), '(abstractDir, f)\n', (515, 531), False, 'from os import listdir, path\n'), ((646, 672), 'os.path.join', 'path.join', (['realisticDir', 'f'], {}), '(realisticDir, f)\n', (655, 672), False, 'from os import listdir, path\n')] |
"""Tests for the HAPServer."""
from socket import timeout
from unittest.mock import Mock, MagicMock, patch
import pytest
from pyhap import hap_server
@patch('pyhap.hap_server.HAPServer.server_bind', new=MagicMock())
@patch('pyhap.hap_server.HAPServer.server_activate', new=MagicMock())
def test_finish_request_pops_socket():
"""Test that ``finish_request`` always clears the connection after a request."""
amock = Mock()
client_addr = ('192.168.1.1', 55555)
server_addr = ('', 51826)
# Positive case: The request is handled
server = hap_server.HAPServer(server_addr, amock,
handler_type=lambda *args: MagicMock())
server.connections[client_addr] = amock
server.finish_request(amock, client_addr)
assert len(server.connections) == 0
# Negative case: The request fails with a timeout
def raises(*args):
raise timeout()
server = hap_server.HAPServer(server_addr, amock,
handler_type=raises)
server.connections[client_addr] = amock
server.finish_request(amock, client_addr)
assert len(server.connections) == 0
# Negative case: The request raises some other exception
server = hap_server.HAPServer(server_addr, amock,
handler_type=lambda *args: 1 / 0)
server.connections[client_addr] = amock
with pytest.raises(Exception):
server.finish_request(amock, client_addr)
assert len(server.connections) == 0
| [
"pyhap.hap_server.HAPServer",
"unittest.mock.Mock",
"unittest.mock.MagicMock",
"socket.timeout",
"pytest.raises"
]
| [((426, 432), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (430, 432), False, 'from unittest.mock import Mock, MagicMock, patch\n'), ((924, 985), 'pyhap.hap_server.HAPServer', 'hap_server.HAPServer', (['server_addr', 'amock'], {'handler_type': 'raises'}), '(server_addr, amock, handler_type=raises)\n', (944, 985), False, 'from pyhap import hap_server\n'), ((1226, 1300), 'pyhap.hap_server.HAPServer', 'hap_server.HAPServer', (['server_addr', 'amock'], {'handler_type': '(lambda *args: 1 / 0)'}), '(server_addr, amock, handler_type=lambda *args: 1 / 0)\n', (1246, 1300), False, 'from pyhap import hap_server\n'), ((901, 910), 'socket.timeout', 'timeout', ([], {}), '()\n', (908, 910), False, 'from socket import timeout\n'), ((1389, 1413), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (1402, 1413), False, 'import pytest\n'), ((207, 218), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (216, 218), False, 'from unittest.mock import Mock, MagicMock, patch\n'), ((277, 288), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (286, 288), False, 'from unittest.mock import Mock, MagicMock, patch\n'), ((664, 675), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (673, 675), False, 'from unittest.mock import Mock, MagicMock, patch\n')] |
import torch
from torchaudio_unittest.common_utils import PytorchTestCase
from torchaudio_unittest.models.emformer.emformer_test_impl import EmformerTestImpl
class EmformerFloat32CPUTest(EmformerTestImpl, PytorchTestCase):
dtype = torch.float32
device = torch.device("cpu")
class EmformerFloat64CPUTest(EmformerTestImpl, PytorchTestCase):
dtype = torch.float64
device = torch.device("cpu")
| [
"torch.device"
]
| [((264, 283), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (276, 283), False, 'import torch\n'), ((390, 409), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (402, 409), False, 'import torch\n')] |
"""This module contains code for parsing RPC responses."""
from dataclasses import dataclass, field
from typing import Union, Tuple, Any, Dict, List, Optional, Literal
from apischema import alias
from apischema.conversions import as_str
from solana.publickey import PublicKey
from solana.transaction import TransactionSignature
as_str(PublicKey)
TransactionErrorResult = Optional[dict]
@dataclass
class TransactionErr:
"""Container for possible transaction errors."""
err: TransactionErrorResult
@dataclass
class Context:
"""RPC result context."""
slot: int
@dataclass
class WithContext:
"""Base class for RPC result including context."""
context: Context
@dataclass
class AccountInfo:
"""Account information."""
lamports: int
owner: PublicKey
data: Union[Literal[""], Tuple[str, str], Dict[str, Any]]
executable: bool
rent_epoch: int = field(metadata=alias("rentEpoch"))
@dataclass
class AccountInfoAndContext(WithContext):
"""Account info and RPC result context."""
value: AccountInfo
@dataclass
class SubscriptionNotificationBase:
"""Base class for RPC subscription notifications."""
subscription: int
result: Any
@dataclass
class AccountNotification(SubscriptionNotificationBase):
"""Account subscription notification."""
result: AccountInfoAndContext
@dataclass
class LogItem(TransactionErr):
"""Container for logs from logSubscribe."""
signature: TransactionSignature
logs: Optional[List[str]]
@dataclass
class LogItemAndContext(WithContext):
"""Log item with RPC result context."""
value: LogItem
@dataclass
class LogsNotification(SubscriptionNotificationBase):
"""Logs subscription notification."""
result: LogItemAndContext
@dataclass
class ProgramAccount:
"""Program account pubkey and account info."""
pubkey: PublicKey
account: AccountInfo
@dataclass
class ProgramAccountAndContext(WithContext):
"""Program subscription data with RPC result context."""
value: ProgramAccount
@dataclass
class ProgramNotification(SubscriptionNotificationBase):
"""Program subscription notification."""
result: ProgramAccountAndContext
@dataclass
class SignatureErrAndContext(WithContext):
"""Signature subscription error info with RPC result context."""
value: TransactionErr
@dataclass
class SignatureNotification(SubscriptionNotificationBase):
"""Signature subscription notification."""
result: SignatureErrAndContext
@dataclass
class SlotBase:
"""Base class for slot container."""
slot: int
@dataclass
class SlotInfo(SlotBase):
"""Slot info."""
parent: int
root: int
@dataclass
class SlotNotification(SubscriptionNotificationBase):
"""Slot subscription notification."""
result: SlotInfo
@dataclass
class RootNotification(SubscriptionNotificationBase):
"""Root subscription notification."""
result: int
@dataclass
class SlotAndTimestampBase(SlotBase):
"""Base class for a slot with timestamp."""
timestamp: int
@dataclass
class FirstShredReceived(SlotAndTimestampBase):
"""First shread received update."""
type: Literal["firstShredReceived"]
@dataclass
class Completed(SlotAndTimestampBase):
"""Slot completed update."""
type: Literal["completed"]
@dataclass
class CreatedBank(SlotAndTimestampBase):
"""Created bank update."""
parent: int
type: Literal["createdBank"]
@dataclass
class SlotTransactionStats:
"""Slot transaction stats."""
num_transaction_entries: int = field(metadata=alias("numTransactionEntries"))
num_successful_transactions: int = field(metadata=alias("numSuccessfulTransactions"))
num_failed_transactions: int = field(metadata=alias("numFailedTransactions"))
max_transactions_per_entry: int = field(metadata=alias("maxTransactionsPerEntry"))
@dataclass
class Frozen(SlotAndTimestampBase):
"""Slot frozen update."""
stats: SlotTransactionStats
type: Literal["frozen"]
@dataclass
class Dead(SlotAndTimestampBase):
"""Dead slot update."""
err: str
type: Literal["dead"]
@dataclass
class OptimisticConfirmation(SlotAndTimestampBase):
"""Optimistic confirmation update."""
type: Literal["optimisticConfirmation"]
@dataclass
class Root(SlotAndTimestampBase):
"""Root update."""
type: Literal["root"]
SlotsUpdatesItem = Union[FirstShredReceived, Completed, CreatedBank, Frozen, Dead, OptimisticConfirmation, Root]
@dataclass
class SlotsUpdatesNotification(SubscriptionNotificationBase):
"""Slots updates notification."""
result: SlotsUpdatesItem
@dataclass
class VoteItem:
"""Vote data."""
hash: str
slots: List[int]
timestamp: Optional[int]
@dataclass
class VoteNotification(SubscriptionNotificationBase):
"""Vote update notification."""
result: VoteItem
SubscriptionNotification = Union[
AccountNotification,
LogsNotification,
ProgramNotification,
SignatureNotification,
SlotNotification,
RootNotification,
SlotsUpdatesNotification,
VoteNotification,
]
| [
"apischema.conversions.as_str",
"apischema.alias"
]
| [((332, 349), 'apischema.conversions.as_str', 'as_str', (['PublicKey'], {}), '(PublicKey)\n', (338, 349), False, 'from apischema.conversions import as_str\n'), ((917, 935), 'apischema.alias', 'alias', (['"""rentEpoch"""'], {}), "('rentEpoch')\n", (922, 935), False, 'from apischema import alias\n'), ((3584, 3614), 'apischema.alias', 'alias', (['"""numTransactionEntries"""'], {}), "('numTransactionEntries')\n", (3589, 3614), False, 'from apischema import alias\n'), ((3670, 3704), 'apischema.alias', 'alias', (['"""numSuccessfulTransactions"""'], {}), "('numSuccessfulTransactions')\n", (3675, 3704), False, 'from apischema import alias\n'), ((3756, 3786), 'apischema.alias', 'alias', (['"""numFailedTransactions"""'], {}), "('numFailedTransactions')\n", (3761, 3786), False, 'from apischema import alias\n'), ((3841, 3873), 'apischema.alias', 'alias', (['"""maxTransactionsPerEntry"""'], {}), "('maxTransactionsPerEntry')\n", (3846, 3873), False, 'from apischema import alias\n')] |
"""
Vesper archive settings.
The Vesper server serves the Vesper archive that is in the directory
in which the server starts. The archive settings are the composition
of a set of default settings (hard-coded in this module) and settings
(optionally) specified in the file "Archive Settings.yaml" in the
archive directory.
"""
from pathlib import Path
import os
import sys
from vesper.util.settings import Settings
from vesper.util.settings_type import SettingsType
import vesper.archive_paths as archive_paths
_DEFAULT_SETTINGS = Settings.create_from_yaml('''
database:
engine: SQLite
''')
_SETTINGS_TYPE = SettingsType('Archive Settings', _DEFAULT_SETTINGS)
_SETTINGS_FILE_NAME = 'Archive Settings.yaml'
def _create_settings():
archive_dir_path = Path(os.getcwd())
settings = _load_settings_file(archive_dir_path)
archive_paths.initialize(archive_dir_path, settings)
return settings
def _load_settings_file(archive_dir_path):
file_path = archive_dir_path / _SETTINGS_FILE_NAME
if not file_path.exists():
# settings file doex not exist
return _SETTINGS_TYPE.defaults
else:
# settings file exists
try:
return _SETTINGS_TYPE.create_settings_from_yaml_file(file_path)
except Exception as e:
print((
'Load failed for settings file "{}". Error message '
'was: {}').format(file_path, str(e)))
sys.exit(1)
archive_settings = _create_settings()
| [
"vesper.archive_paths.initialize",
"vesper.util.settings.Settings.create_from_yaml",
"os.getcwd",
"sys.exit",
"vesper.util.settings_type.SettingsType"
]
| [((536, 599), 'vesper.util.settings.Settings.create_from_yaml', 'Settings.create_from_yaml', (['"""\ndatabase:\n engine: SQLite\n"""'], {}), '("""\ndatabase:\n engine: SQLite\n""")\n', (561, 599), False, 'from vesper.util.settings import Settings\n'), ((619, 670), 'vesper.util.settings_type.SettingsType', 'SettingsType', (['"""Archive Settings"""', '_DEFAULT_SETTINGS'], {}), "('Archive Settings', _DEFAULT_SETTINGS)\n", (631, 670), False, 'from vesper.util.settings_type import SettingsType\n'), ((843, 895), 'vesper.archive_paths.initialize', 'archive_paths.initialize', (['archive_dir_path', 'settings'], {}), '(archive_dir_path, settings)\n', (867, 895), True, 'import vesper.archive_paths as archive_paths\n'), ((773, 784), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (782, 784), False, 'import os\n'), ((1487, 1498), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (1495, 1498), False, 'import sys\n')] |
import subprocess
from LEGEND import tbot as bot
from LEGEND import tbot as borg
from LEGEND.events import register
from LEGEND import OWNER_ID, SUDO_USERS
import asyncio
import traceback
import io
import os
import sys
import time
from telethon.tl import functions
from telethon.tl import types
from telethon.tl.types import *
from telethon.errors import *
@register(pattern="^/bash (.*)")
async def msg(event):
if event.sender_id == OWNER_ID:
pass
else:
return
PROCESS_RUN_TIME = 100
cmd = event.pattern_match.group(1)
reply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
time.time() + PROCESS_RUN_TIME
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await process.communicate()
e = stderr.decode()
if not e:
e = "No Error"
o = stdout.decode()
if not o:
o = "**Tip**: \n`If you want to see the results of your code, I suggest printing them to stdout.`"
else:
_o = o.split("\n")
o = "`\n".join(_o)
await event.reply(f"**QUERY:**\n__Command:__\n`{cmd}` \n__PID:__\n`{process.pid}`\n\n**stderr:** \n`{e}`\n**Output:**\n{o}"
)
@register(pattern="^/eval")
async def _(event):
if event.sender_id == OWNER_ID:
pass
elif event.sender_id in SUDO_USERS:
pass
else:
return
cmd = event.text.split(" ", maxsplit=1)[1]
reply_to_id = event.message.id
if event.reply_to_msg_id:
reply_to_id = event.reply_to_msg_id
old_stderr = sys.stderr
old_stdout = sys.stdout
redirected_output = sys.stdout = io.StringIO()
redirected_error = sys.stderr = io.StringIO()
stdout, stderr, exc = None, None, None
try:
await aexec(cmd, event)
except Exception:
exc = traceback.format_exc()
stdout = redirected_output.getvalue()
stderr = redirected_error.getvalue()
sys.stdout = old_stdout
sys.stderr = old_stderr
evaluation = ""
if exc:
evaluation = exc
elif stderr:
evaluation = stderr
elif stdout:
evaluation = stdout
else:
evaluation = "Success"
final_output = "**EVAL**: `{}` \n\n **OUTPUT**: \n`{}` \n".format(cmd, evaluation)
MAX_MESSAGE_SIZE_LIMIT = 4095
if len(final_output) > MAX_MESSAGE_SIZE_LIMIT:
with io.BytesIO(str.encode(final_output)) as out_file:
out_file.name = "eval.text"
await bot.send_file(
event.chat_id,
out_file,
force_document=True,
allow_cache=False,
caption=cmd,
reply_to=reply_to_id,
)
else:
await event.reply(final_output)
async def aexec(code, smessatatus):
message = event = smessatatus
def p(_x):
return print(slitu.yaml_format(_x))
reply = await event.get_reply_message()
exec(
"async def __aexec(message, reply, client, p): "
+ "\n event = smessatatus = message"
+ "".join(f"\n {l}" for l in code.split("\n"))
)
return await locals()["__aexec"](message, reply, bot, p)
| [
"traceback.format_exc",
"LEGEND.events.register",
"LEGEND.tbot.send_file",
"io.StringIO",
"asyncio.create_subprocess_shell",
"time.time"
]
| [((360, 391), 'LEGEND.events.register', 'register', ([], {'pattern': '"""^/bash (.*)"""'}), "(pattern='^/bash (.*)')\n", (368, 391), False, 'from LEGEND.events import register\n'), ((1284, 1310), 'LEGEND.events.register', 'register', ([], {'pattern': '"""^/eval"""'}), "(pattern='^/eval')\n", (1292, 1310), False, 'from LEGEND.events import register\n'), ((1708, 1721), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1719, 1721), False, 'import io\n'), ((1758, 1771), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (1769, 1771), False, 'import io\n'), ((667, 678), 'time.time', 'time.time', ([], {}), '()\n', (676, 678), False, 'import time\n'), ((718, 823), 'asyncio.create_subprocess_shell', 'asyncio.create_subprocess_shell', (['cmd'], {'stdout': 'asyncio.subprocess.PIPE', 'stderr': 'asyncio.subprocess.PIPE'}), '(cmd, stdout=asyncio.subprocess.PIPE, stderr\n =asyncio.subprocess.PIPE)\n', (749, 823), False, 'import asyncio\n'), ((1893, 1915), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1913, 1915), False, 'import traceback\n'), ((2539, 2657), 'LEGEND.tbot.send_file', 'bot.send_file', (['event.chat_id', 'out_file'], {'force_document': '(True)', 'allow_cache': '(False)', 'caption': 'cmd', 'reply_to': 'reply_to_id'}), '(event.chat_id, out_file, force_document=True, allow_cache=\n False, caption=cmd, reply_to=reply_to_id)\n', (2552, 2657), True, 'from LEGEND import tbot as bot\n')] |
# Status: Being ported by Steven Watanabe
# Base revision: 47077
#
# Copyright (c) 2005 <NAME>.
# Copyright 2006 <NAME>
# Copyright (c) 2008 <NAME>
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
##### Using Precompiled Headers (Quick Guide) #####
#
# Make precompiled mypch.hpp:
#
# import pch ;
#
# cpp-pch mypch
# : # sources
# mypch.hpp
# : # requiremnts
# <toolset>msvc:<source>mypch.cpp
# ;
#
# Add cpp-pch to sources:
#
# exe hello
# : main.cpp hello.cpp mypch
# ;
from b2.build import type, feature, generators
from b2.tools import builtin
type.register('PCH', ['pch'])
type.register('C_PCH', [], 'PCH')
type.register('CPP_PCH', [], 'PCH')
# Control precompiled header (PCH) generation.
feature.feature('pch',
['on', 'off'],
['propagated'])
feature.feature('pch-header', [], ['free', 'dependency'])
feature.feature('pch-file', [], ['free', 'dependency'])
class PchGenerator(generators.Generator):
"""
Base PCH generator. The 'run' method has the logic to prevent this generator
from being run unless it's being used for a top-level PCH target.
"""
def action_class(self):
return builtin.CompileAction
def run(self, project, name, prop_set, sources):
if not name:
# Unless this generator is invoked as the top-most generator for a
# main target, fail. This allows using 'H' type as input type for
# this generator, while preventing Boost.Build to try this generator
# when not explicitly asked for.
#
# One bad example is msvc, where pch generator produces both PCH
# target and OBJ target, so if there's any header generated (like by
# bison, or by msidl), we'd try to use pch generator to get OBJ from
# that H, which is completely wrong. By restricting this generator
# only to pch main target, such problem is solved.
pass
else:
r = self.run_pch(project, name,
prop_set.add_raw(['<define>BOOST_BUILD_PCH_ENABLED']),
sources)
return generators.add_usage_requirements(
r, ['<define>BOOST_BUILD_PCH_ENABLED'])
# This rule must be overridden by the derived classes.
def run_pch(self, project, name, prop_set, sources):
pass
# NOTE: requirements are empty, default pch generator can be applied when
# pch=off.
generators.register(builtin.DummyGenerator(
"pch.default-c-pch-generator", False, [], ['C_PCH'], []))
generators.register(builtin.DummyGenerator(
"pch.default-cpp-pch-generator", False, [], ['CPP_PCH'], []))
| [
"b2.build.generators.add_usage_requirements",
"b2.tools.builtin.DummyGenerator",
"b2.build.feature.feature",
"b2.build.type.register"
]
| [((728, 757), 'b2.build.type.register', 'type.register', (['"""PCH"""', "['pch']"], {}), "('PCH', ['pch'])\n", (741, 757), False, 'from b2.build import type, feature, generators\n'), ((758, 791), 'b2.build.type.register', 'type.register', (['"""C_PCH"""', '[]', '"""PCH"""'], {}), "('C_PCH', [], 'PCH')\n", (771, 791), False, 'from b2.build import type, feature, generators\n'), ((792, 827), 'b2.build.type.register', 'type.register', (['"""CPP_PCH"""', '[]', '"""PCH"""'], {}), "('CPP_PCH', [], 'PCH')\n", (805, 827), False, 'from b2.build import type, feature, generators\n'), ((876, 929), 'b2.build.feature.feature', 'feature.feature', (['"""pch"""', "['on', 'off']", "['propagated']"], {}), "('pch', ['on', 'off'], ['propagated'])\n", (891, 929), False, 'from b2.build import type, feature, generators\n'), ((963, 1020), 'b2.build.feature.feature', 'feature.feature', (['"""pch-header"""', '[]', "['free', 'dependency']"], {}), "('pch-header', [], ['free', 'dependency'])\n", (978, 1020), False, 'from b2.build import type, feature, generators\n'), ((1021, 1076), 'b2.build.feature.feature', 'feature.feature', (['"""pch-file"""', '[]', "['free', 'dependency']"], {}), "('pch-file', [], ['free', 'dependency'])\n", (1036, 1076), False, 'from b2.build import type, feature, generators\n'), ((2632, 2711), 'b2.tools.builtin.DummyGenerator', 'builtin.DummyGenerator', (['"""pch.default-c-pch-generator"""', '(False)', '[]', "['C_PCH']", '[]'], {}), "('pch.default-c-pch-generator', False, [], ['C_PCH'], [])\n", (2654, 2711), False, 'from b2.tools import builtin\n'), ((2738, 2826), 'b2.tools.builtin.DummyGenerator', 'builtin.DummyGenerator', (['"""pch.default-cpp-pch-generator"""', '(False)', '[]', "['CPP_PCH']", '[]'], {}), "('pch.default-cpp-pch-generator', False, [], [\n 'CPP_PCH'], [])\n", (2760, 2826), False, 'from b2.tools import builtin\n'), ((2305, 2378), 'b2.build.generators.add_usage_requirements', 'generators.add_usage_requirements', (['r', "['<define>BOOST_BUILD_PCH_ENABLED']"], {}), "(r, ['<define>BOOST_BUILD_PCH_ENABLED'])\n", (2338, 2378), False, 'from b2.build import type, feature, generators\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.