code
stringlengths 20
1.05M
| apis
sequence | extract_api
stringlengths 75
5.24M
|
---|---|---|
# -*- coding: utf-8 -*-
# Copyright 2016 <NAME>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
from io import StringIO
from flask import Flask, Request
__version__ = "0.2.0"
def get_nested(o, default, *args):
if o is None:
return default
current = o
for arg in args:
if current is None or arg is None or current.get(arg, None) is None:
return default
current = current.get(arg, default)
return current
def make_environ(event, context):
v = event["version"]
if v == "1.0":
return make_environ_v1(event, context)
elif v == "2.0":
return make_environ_v2(event, context)
else:
ValueError(f"invalid version {v}")
def make_environ_v1(event, context):
environ = {}
# key might be there but set to None
headers = event.get("headers", {}) or {}
for hdr_name, hdr_value in headers.items():
hdr_name = hdr_name.replace("-", "_").upper()
if hdr_name in ["CONTENT_TYPE", "CONTENT_LENGTH"]:
environ[hdr_name] = hdr_value
continue
http_hdr_name = "HTTP_{}".format(hdr_name)
environ[http_hdr_name] = hdr_value
qs = event.get("queryStringParameters", "")
environ["REQUEST_METHOD"] = event.get("httpMethod", "")
environ["PATH_INFO"] = event.get("path", "")
environ["QUERY_STRING"] = urlencode(qs) if qs else ""
environ["REMOTE_ADDR"] = get_nested(
event, "", "requestContext", "identity", "sourceIp"
)
environ["HOST"] = "{}:{}".format(
environ.get("HTTP_HOST", ""),
environ.get("HTTP_X_FORWARDED_PORT", ""),
)
environ["SCRIPT_NAME"] = ""
environ["SERVER_NAME"] = "SERVER_NAME"
environ["SERVER_PORT"] = environ.get("HTTP_X_FORWARDED_PORT", "")
environ["SERVER_PROTOCOL"] = "HTTP/1.1"
environ["CONTENT_LENGTH"] = str(len(event.get("body", "")))
environ["wsgi.url_scheme"] = environ.get("HTTP_X_FORWARDED_PROTO", "")
environ["wsgi.input"] = StringIO(event.get("body", ""))
environ["wsgi.version"] = (1, 0)
environ["wsgi.errors"] = sys.stderr
environ["wsgi.multithread"] = False
environ["wsgi.run_once"] = True
environ["wsgi.multiprocess"] = False
# store AWS input event and context in WSGI environment
environ["aws.event"] = event
environ["aws.context"] = context
return environ
def make_environ_v2(event, context):
environ = {}
# key might be there but set to None
headers = event.get("headers", {}) or {}
for hdr_name, hdr_value in headers.items():
hdr_name = hdr_name.replace("-", "_").upper()
if hdr_name in ["CONTENT_TYPE", "CONTENT_LENGTH"]:
environ[hdr_name] = hdr_value
continue
http_hdr_name = "HTTP_{}".format(hdr_name)
environ[http_hdr_name] = hdr_value
environ["REQUEST_METHOD"] = get_nested(
event, "", "requestContext", "http", "method"
)
environ["PATH_INFO"] = get_nested(event, "", "requestContext", "http", "path")
environ["QUERY_STRING"] = event.get("rawQueryString")
environ["REMOTE_ADDR"] = get_nested(event, "", "requestContext", "http", "sourceIp")
environ["HOST"] = "{}:{}".format(
environ.get("HTTP_HOST", ""),
environ.get("HTTP_X_FORWARDED_PORT", ""),
)
environ["SCRIPT_NAME"] = ""
environ["SERVER_NAME"] = "SERVER_NAME"
environ["SERVER_PORT"] = environ.get("HTTP_X_FORWARDED_PORT", "")
environ["SERVER_PROTOCOL"] = "HTTP/1.1"
environ["CONTENT_LENGTH"] = str(len(event.get("body", "")))
environ["wsgi.url_scheme"] = environ.get("HTTP_X_FORWARDED_PROTO", "")
environ["wsgi.input"] = StringIO(event.get("body", ""))
environ["wsgi.version"] = (1, 0)
environ["wsgi.errors"] = sys.stderr
environ["wsgi.multithread"] = False
environ["wsgi.run_once"] = True
environ["wsgi.multiprocess"] = False
# store AWS input event and context in WSGI environment
environ["aws.event"] = event
environ["aws.context"] = context
return environ
class LambdaRequest(Request):
@property
def aws_event(self):
return self.environ.get("aws.event")
@property
def aws_context(self):
return self.environ.get("aws.context")
class LambdaResponse:
def __init__(self):
self.status = None
self.response_headers = None
def start_response(self, status, response_headers, exc_info=None):
self.status = int(status[:3])
self.response_headers = dict(response_headers)
def is_lambda(event):
v1_method = event.get("httpMethod", "")
v2_method = get_nested(event, "", "requestContext", "http", "method")
return (v1_method != "") or (v2_method != "")
class FlaskLambda(Flask):
request_class = LambdaRequest
def __call__(self, event, context):
print("start request")
try:
if not is_lambda(event):
self.logger.debug("Called as regular Flask app")
# In this "context" `event` is `environ` and
# `context` is `start_response`, meaning the request didn't
# occur via API Gateway and Lambda
return super(FlaskLambda, self).__call__(event, context)
self.logger.debug("Called with AWS Lambda input event")
self.logger.debug("Event: %r", event)
response = LambdaResponse()
body = next(
self.wsgi_app(make_environ(event, context), response.start_response)
)
print("OK")
return {
"statusCode": response.status,
"headers": response.response_headers,
"body": body.decode("utf-8"),
}
except Exception:
self.logger.exception("An unexpected exception occured")
return {"statusCode": 500, "headers": {}, "body": "Internal Server Error"}
| [
"urllib.parse.urlencode"
] | [((2017, 2030), 'urllib.parse.urlencode', 'urlencode', (['qs'], {}), '(qs)\n', (2026, 2030), False, 'from urllib.parse import urlencode\n')] |
# -*- coding: utf-8 -*-
from abc import ABC
import torch.utils.data
from lichee import plugin
from lichee.utils.tfrecord.reader import read_single_record_with_spec_index
from .dataset_base import BaseDataset
@plugin.register_plugin(plugin.PluginType.DATA_LOADER, "dataset_mem")
class DatasetMem(torch.utils.data.Dataset, BaseDataset, ABC):
def __init__(self, cfg, data_file, desc_file, training=True):
super().__init__(cfg, data_file, desc_file, training)
def __getitem__(self, index):
"""
get transformed data with index
:param index: data index
:return: transformed data
"""
data_file_index, (start_offset, end_offset) = self.get_nth_data_file(index)
tfrecord_data_file = self.tfrecord_data_file_list[data_file_index]
row = read_single_record_with_spec_index(tfrecord_data_file, start_offset, end_offset, self.description)
return self.transform(row)
| [
"lichee.plugin.register_plugin",
"lichee.utils.tfrecord.reader.read_single_record_with_spec_index"
] | [((213, 281), 'lichee.plugin.register_plugin', 'plugin.register_plugin', (['plugin.PluginType.DATA_LOADER', '"""dataset_mem"""'], {}), "(plugin.PluginType.DATA_LOADER, 'dataset_mem')\n", (235, 281), False, 'from lichee import plugin\n'), ((812, 914), 'lichee.utils.tfrecord.reader.read_single_record_with_spec_index', 'read_single_record_with_spec_index', (['tfrecord_data_file', 'start_offset', 'end_offset', 'self.description'], {}), '(tfrecord_data_file, start_offset,\n end_offset, self.description)\n', (846, 914), False, 'from lichee.utils.tfrecord.reader import read_single_record_with_spec_index\n')] |
import socket
import logging
from logging.handlers import SysLogHandler
from st2common.runners.base_action import Action
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
class WriteSyslogAction(Action):
def run(self, message, facility, priority):
host = self.config['server']
port = self.config['port']
protocol = self.config['protocol']
syslog_proto = {
"tcp": socket.SOCK_STREAM,
"udp": socket.SOCK_DGRAM
}.get(protocol.lower(), "udp")
handler = SysLogHandler(
address=(host, port),
facility=SysLogHandler.facility_names.get(facility, "user"),
socktype=syslog_proto
)
formatter = logging.Formatter('%(module)s[%(process)d]: %(message)s')
handler.setFormatter(formatter)
handler.encodePriority(facility, priority)
log.addHandler(handler)
log.info(message)
| [
"logging.getLogger",
"logging.Formatter",
"logging.handlers.SysLogHandler.facility_names.get"
] | [((129, 156), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (146, 156), False, 'import logging\n'), ((730, 787), 'logging.Formatter', 'logging.Formatter', (['"""%(module)s[%(process)d]: %(message)s"""'], {}), "('%(module)s[%(process)d]: %(message)s')\n", (747, 787), False, 'import logging\n'), ((614, 664), 'logging.handlers.SysLogHandler.facility_names.get', 'SysLogHandler.facility_names.get', (['facility', '"""user"""'], {}), "(facility, 'user')\n", (646, 664), False, 'from logging.handlers import SysLogHandler\n')] |
from leetcode.easy.ex0155 import InitialMinStack, TupleMinStack
class TestInitialMinStack:
def test_pushes_element(self):
stack = InitialMinStack()
stack.push(1)
assert stack.elements[-1] == 1
def test_pops_element(self):
stack = InitialMinStack()
stack.push(1)
stack.pop()
assert not stack.elements
def test_top_element_returns_last_element(self):
stack = InitialMinStack()
stack.push(1)
stack.push(2)
assert stack.top() == stack.elements[-1]
def test_returns_minimum_element(self):
stack = InitialMinStack()
stack.push(1)
stack.push(-3)
stack.push(3)
stack.push(-5)
stack.push(0)
assert stack.getMin() == -5
def test_handles_combination_of_operations(self):
stack = InitialMinStack();
stack.push(-2);
stack.push(0);
stack.push(-3);
assert stack.getMin() == -3
stack.pop();
assert stack.top() == 0
assert stack.getMin() == -2
class TestTupleMinStack:
def test_pushes_element(self):
stack = TupleMinStack()
stack.push(1)
assert stack.elements[-1][0] == 1
def test_pops_element(self):
stack = TupleMinStack()
stack.push(1)
stack.pop()
assert not stack.elements
def test_top_element_returns_last_element(self):
stack = TupleMinStack()
stack.push(1)
stack.push(2)
assert stack.top() == stack.elements[-1][0]
def test_returns_minimum_element(self):
stack = TupleMinStack()
stack.push(1)
stack.push(-3)
stack.push(3)
stack.push(-5)
stack.push(0)
assert stack.getMin() == -5
def test_handles_combination_of_operations(self):
stack = TupleMinStack();
stack.push(-2);
stack.push(0);
stack.push(-3);
assert stack.getMin() == -3
stack.pop();
assert stack.top() == 0
assert stack.getMin() == -2
| [
"leetcode.easy.ex0155.TupleMinStack",
"leetcode.easy.ex0155.InitialMinStack"
] | [((144, 161), 'leetcode.easy.ex0155.InitialMinStack', 'InitialMinStack', ([], {}), '()\n', (159, 161), False, 'from leetcode.easy.ex0155 import InitialMinStack, TupleMinStack\n'), ((275, 292), 'leetcode.easy.ex0155.InitialMinStack', 'InitialMinStack', ([], {}), '()\n', (290, 292), False, 'from leetcode.easy.ex0155 import InitialMinStack, TupleMinStack\n'), ((442, 459), 'leetcode.easy.ex0155.InitialMinStack', 'InitialMinStack', ([], {}), '()\n', (457, 459), False, 'from leetcode.easy.ex0155 import InitialMinStack, TupleMinStack\n'), ((616, 633), 'leetcode.easy.ex0155.InitialMinStack', 'InitialMinStack', ([], {}), '()\n', (631, 633), False, 'from leetcode.easy.ex0155 import InitialMinStack, TupleMinStack\n'), ((855, 872), 'leetcode.easy.ex0155.InitialMinStack', 'InitialMinStack', ([], {}), '()\n', (870, 872), False, 'from leetcode.easy.ex0155 import InitialMinStack, TupleMinStack\n'), ((1149, 1164), 'leetcode.easy.ex0155.TupleMinStack', 'TupleMinStack', ([], {}), '()\n', (1162, 1164), False, 'from leetcode.easy.ex0155 import InitialMinStack, TupleMinStack\n'), ((1281, 1296), 'leetcode.easy.ex0155.TupleMinStack', 'TupleMinStack', ([], {}), '()\n', (1294, 1296), False, 'from leetcode.easy.ex0155 import InitialMinStack, TupleMinStack\n'), ((1446, 1461), 'leetcode.easy.ex0155.TupleMinStack', 'TupleMinStack', ([], {}), '()\n', (1459, 1461), False, 'from leetcode.easy.ex0155 import InitialMinStack, TupleMinStack\n'), ((1621, 1636), 'leetcode.easy.ex0155.TupleMinStack', 'TupleMinStack', ([], {}), '()\n', (1634, 1636), False, 'from leetcode.easy.ex0155 import InitialMinStack, TupleMinStack\n'), ((1858, 1873), 'leetcode.easy.ex0155.TupleMinStack', 'TupleMinStack', ([], {}), '()\n', (1871, 1873), False, 'from leetcode.easy.ex0155 import InitialMinStack, TupleMinStack\n')] |
from enum import Enum
import re
from calculator.token_pkg.token_class import Token
from calculator.utils import is_num,is_parentheses,is_operator,sanitize_string,Types
from calculator.token_pkg import OPERATORS_STRING
#enum to avoid errors
class Notaions(Enum):
INFIX = 'INFIX'
POSTFIX = 'POSTFIX'
"""
Tokenizer class takes in an expression and can spit out with a list of token class
"""
class Tokenizer:
def __init__(self,exp,notation = Notaions.INFIX):
self.expression = exp
self.notation = notation
#uses below function as a property for ease
@property
def tokens(self):
return self.tokenize_expression()
"""
paremeter:takes an expression, takes in what no
returns: a list of character with the class token
"""
def tokenize_expression(self):
tokens = []
if self.notation == Notaions.INFIX:
tokens = self.split_infix_expression(self.expression)
elif self.notation == Notaions.POSTFIX:
tokens = self.split_postfix_expression(self.expression)
else:
print("Notation not supported yet")
return Tokenizer.construct_token_array(tokens)
"""
paremeter:takes a list of characters
returns: a list of character with the class token
"""
@staticmethod
def construct_token_array(tokens_expression):
tokens = []
for token in tokens_expression:
if is_operator(token):
tokens.append(Token(Types.OPERATOR,token))
elif is_num(token):
tokens.append(Token(Types.NUMBER,token))
elif token == '(':
tokens.append(Token(Types.LEFT_PAREN,token))
elif token == ')':
tokens.append(Token(Types.RIGHT_PAREN,token))
else:
tokens.append(Token(Types.UNKNOWN,token))
return tokens
#splits am array of postfix expression
@staticmethod
def split_postfix_expression(exp):
#finds operators,deciamals,numbers
res = re.findall(rf'[0-9\.]+|[{OPERATORS_STRING}]', exp)
n = len(res)
#case of first number being negative
if n>1 and res[0]== '-':
res[1] = '-' + res[1]
res.pop(0)
return res
#splits the array based of infix expression
@staticmethod
def split_infix_expression(exp):
#remove whitespace
exp = sanitize_string(exp)
#finds operators,deciamals,numbers
res = re.findall(rf'[0-9\.]+|[^0-9\ .]|[(){OPERATORS_STRING}]', exp)
n = len(res)
#case of first number being negative
if n>1 and res[0]== '-':
res[1] = '-' + res[1]
res.pop(0)
n =len(res)
#mimimum of 3 length to combine negatives
if n>3:
for i in range(n-2):
# handles case of a negative number right after a left parentheses
if res[i]=='(' and res[i+1] == '-' and is_num(res[i+2]):
res[i+2] = '-' + res[i+2]
res.pop(i+1)
# handles the case of operation and negative right after
if(is_operator(res[i]) and res[i+1] == '-' and is_num(res[i+2]) ):
res[i+2] = '-' + res[i+2]
res.pop(i+1)
return res | [
"calculator.utils.is_operator",
"calculator.token_pkg.token_class.Token",
"re.findall",
"calculator.utils.is_num",
"calculator.utils.sanitize_string"
] | [((2074, 2124), 're.findall', 're.findall', (['f"""[0-9\\\\.]+|[{OPERATORS_STRING}]"""', 'exp'], {}), "(f'[0-9\\\\.]+|[{OPERATORS_STRING}]', exp)\n", (2084, 2124), False, 'import re\n'), ((2452, 2472), 'calculator.utils.sanitize_string', 'sanitize_string', (['exp'], {}), '(exp)\n', (2467, 2472), False, 'from calculator.utils import is_num, is_parentheses, is_operator, sanitize_string, Types\n'), ((2531, 2595), 're.findall', 're.findall', (['f"""[0-9\\\\.]+|[^0-9\\\\ .]|[(){OPERATORS_STRING}]"""', 'exp'], {}), "(f'[0-9\\\\.]+|[^0-9\\\\ .]|[(){OPERATORS_STRING}]', exp)\n", (2541, 2595), False, 'import re\n'), ((1462, 1480), 'calculator.utils.is_operator', 'is_operator', (['token'], {}), '(token)\n', (1473, 1480), False, 'from calculator.utils import is_num, is_parentheses, is_operator, sanitize_string, Types\n'), ((1558, 1571), 'calculator.utils.is_num', 'is_num', (['token'], {}), '(token)\n', (1564, 1571), False, 'from calculator.utils import is_num, is_parentheses, is_operator, sanitize_string, Types\n'), ((1512, 1540), 'calculator.token_pkg.token_class.Token', 'Token', (['Types.OPERATOR', 'token'], {}), '(Types.OPERATOR, token)\n', (1517, 1540), False, 'from calculator.token_pkg.token_class import Token\n'), ((3015, 3033), 'calculator.utils.is_num', 'is_num', (['res[i + 2]'], {}), '(res[i + 2])\n', (3021, 3033), False, 'from calculator.utils import is_num, is_parentheses, is_operator, sanitize_string, Types\n'), ((3204, 3223), 'calculator.utils.is_operator', 'is_operator', (['res[i]'], {}), '(res[i])\n', (3215, 3223), False, 'from calculator.utils import is_num, is_parentheses, is_operator, sanitize_string, Types\n'), ((3248, 3266), 'calculator.utils.is_num', 'is_num', (['res[i + 2]'], {}), '(res[i + 2])\n', (3254, 3266), False, 'from calculator.utils import is_num, is_parentheses, is_operator, sanitize_string, Types\n'), ((1603, 1629), 'calculator.token_pkg.token_class.Token', 'Token', (['Types.NUMBER', 'token'], {}), '(Types.NUMBER, token)\n', (1608, 1629), False, 'from calculator.token_pkg.token_class import Token\n'), ((1691, 1721), 'calculator.token_pkg.token_class.Token', 'Token', (['Types.LEFT_PAREN', 'token'], {}), '(Types.LEFT_PAREN, token)\n', (1696, 1721), False, 'from calculator.token_pkg.token_class import Token\n'), ((1783, 1814), 'calculator.token_pkg.token_class.Token', 'Token', (['Types.RIGHT_PAREN', 'token'], {}), '(Types.RIGHT_PAREN, token)\n', (1788, 1814), False, 'from calculator.token_pkg.token_class import Token\n'), ((1863, 1890), 'calculator.token_pkg.token_class.Token', 'Token', (['Types.UNKNOWN', 'token'], {}), '(Types.UNKNOWN, token)\n', (1868, 1890), False, 'from calculator.token_pkg.token_class import Token\n')] |
# Source: https://www.reddit.com/r/learnpython/comments/42dqv4/python_module_to_fetch_book_info_from_google_books/
import requests
import re
class gbooks():
def __init__(self):
self.earliest_year = 9999
self.earliest_entry = None
self.maturity_rating = None
self.categories = None
self.googleapikey="<KEY>"
def search(self, title, author):
search_value = title + author
parms = {"q":search_value, 'key':self.googleapikey}
r = requests.get(url="https://www.googleapis.com/books/v1/volumes", params=parms)
response = r.json()
self.retrieve_earliest_publication_date(response, title)
def retrieve_earliest_publication_date(self, search_results, title):
self.earliest_date = 9999
for current_result in search_results["items"]:
print(current_result["volumeInfo"])
year = repr(current_result["volumeInfo"]["publishedDate"])
matched_year = re.match(r'.*([1-3][0-9]{3})', year)
if 'title' in current_result["volumeInfo"].keys() and matched_year is not None:
if title in current_result["volumeInfo"]["title"]:
year = int(matched_year.group(1))
if not isinstance(year,int): year = 9999
if self.earliest_year > year:
self.earliest_year = year
self.earliest_entry = current_result
self.set_entry_metadata()
def set_entry_metadata(self):
if 'maturityRating' in self.earliest_entry["volumeInfo"].keys(): self.maturity_rating = self.earliest_entry["volumeInfo"]["maturityRating"]
if 'categories' in self.earliest_entry["volumeInfo"].keys(): self.categories = self.earliest_entry["volumeInfo"]["categories"]
if 'pageCount' in self.earliest_entry["volumeInfo"].keys(): self.pageCount = self.earliest_entry["volumeInfo"]["pageCount"]
if __name__ == "__main__":
bk = gbooks()
# bk.search("The Poetical Works of Addison; Gay's Fables; and Somerville's Chase / With Memoirs and Critical Dissertations, by the Rev. <NAME>")
bk.search("The First Men in the Moon", "<NAME>") | [
"re.match",
"requests.get"
] | [((510, 587), 'requests.get', 'requests.get', ([], {'url': '"""https://www.googleapis.com/books/v1/volumes"""', 'params': 'parms'}), "(url='https://www.googleapis.com/books/v1/volumes', params=parms)\n", (522, 587), False, 'import requests\n'), ((1008, 1043), 're.match', 're.match', (['""".*([1-3][0-9]{3})"""', 'year'], {}), "('.*([1-3][0-9]{3})', year)\n", (1016, 1043), False, 'import re\n')] |
import torch
import random
import torch.nn as nn
import torch.nn.functional as F
from utils.config import *
from torch.nn.parameter import Parameter
from utils.utils_general import _cuda
from utils.utils_general import sequence_mask
from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder
class DualAttentionDecoder(nn.Module):
def __init__(self, shared_emb, lang, embedding_dim, dropout):
super(DualAttentionDecoder, self).__init__()
self.num_vocab = lang.n_words
self.lang = lang
self.embedding_dim = embedding_dim
self.dropout = dropout
self.dropout_layer = nn.Dropout(dropout)
self.C = shared_emb
self.softmax = nn.Softmax(dim=1)
self.gru = nn.GRU(embedding_dim, embedding_dim)
self.relu = nn.ReLU()
self.projector = nn.Linear(4*embedding_dim, embedding_dim)
self.softmax = nn.Softmax(dim = 1)
self.knowledge_attention = Attention(embedding_dim, embedding_dim*2, embedding_dim, mode='mlp')
self.context_attention = Attention(embedding_dim, embedding_dim*2, embedding_dim, mode='mlp')
self.concat = nn.Linear(5*embedding_dim, embedding_dim)
self.entity_ranking = Attention(embedding_dim, embedding_dim*2, embedding_dim, mode='mlp', return_attn_only=True)
def forward(self, extKnow, extKnow_mask, context, context_mask, story_size, story_lengths, copy_list, encode_hidden, target_batches, max_target_length, schedule_sampling, get_decoded_words):
batch_size = len(copy_list)
# Initialize variables for vocab and pointer
all_decoder_outputs_vocab = _cuda(torch.zeros(max_target_length, batch_size, self.num_vocab))
all_decoder_outputs_ptr = _cuda(torch.zeros(max_target_length, batch_size, story_size[1]))
decoder_input = _cuda(torch.LongTensor([SOS_token] * batch_size))
memory_mask_for_step = _cuda(torch.ones(story_size[0], story_size[1]))
decoded_fine, decoded_coarse = [], []
hidden = self.relu(self.projector(encode_hidden)).unsqueeze(0)
# Start to generate word-by-word
for t in range(max_target_length):
rnn_input_list, concat_input_list = [], []
embed_q = self.dropout_layer(self.C(decoder_input)) # b * e
if len(embed_q.size()) == 1: embed_q = embed_q.unsqueeze(0)
rnn_input_list.append(embed_q)
rnn_input = torch.cat(rnn_input_list, dim=1)
_, hidden = self.gru(rnn_input.unsqueeze(0), hidden)
concat_input_list.append(hidden.squeeze(0))
#get knowledge attention
knowledge_outputs, _ = self.knowledge_attention(hidden.transpose(0,1), extKnow, mask=extKnow_mask, return_weights=True)
concat_input_list.append(knowledge_outputs.squeeze(1))
#get context attention
context_outputs = self.context_attention(hidden.transpose(0,1), context, mask=context_mask)
concat_input_list.append(context_outputs.squeeze(1))
#concat_input = torch.cat((hidden.squeeze(0), context_outputs.squeeze(1), knowledge_outputs.squeeze(1)), dim=1)
concat_input = torch.cat(concat_input_list, dim=1)
concat_output = torch.tanh(self.concat(concat_input))
p_vocab = self.attend_vocab(self.C.weight, concat_output)
p_entity = self.entity_ranking(concat_output.unsqueeze(1), extKnow, mask=extKnow_mask).squeeze(1)
prob_soft = self.softmax(p_entity)
all_decoder_outputs_vocab[t] = p_vocab
all_decoder_outputs_ptr[t] = p_entity
use_teacher_forcing = random.random() < schedule_sampling
if use_teacher_forcing:
decoder_input = target_batches[:,t]
else:
_, topvi = p_vocab.data.topk(1)
decoder_input = topvi.squeeze()
if get_decoded_words:
search_len = min(5, min(story_lengths))
prob_soft = prob_soft * memory_mask_for_step
_, toppi = prob_soft.data.topk(search_len)
temp_f, temp_c = [], []
for bi in range(batch_size):
token = topvi[bi].item() #topvi[:,0][bi].item()
temp_c.append(self.lang.index2word[token])
if '@' in self.lang.index2word[token]:
cw = 'UNK'
for i in range(search_len):
if toppi[:,i][bi] < story_lengths[bi]-1:
cw = copy_list[bi][toppi[:,i][bi].item()]
break
temp_f.append(cw)
if args['record']:
memory_mask_for_step[bi, toppi[:,i][bi].item()] = 0
else:
temp_f.append(self.lang.index2word[token])
decoded_fine.append(temp_f)
decoded_coarse.append(temp_c)
return all_decoder_outputs_vocab, all_decoder_outputs_ptr, decoded_fine, decoded_coarse
def attend_vocab(self, seq, cond):
scores_ = cond.matmul(seq.transpose(1,0))
# scores = F.softmax(scores_, dim=1)
return scores_
class AttrProxy(object):
"""
Translates index lookups into attribute lookups.
To implement some trick which able to use list of nn.Module in a nn.Module
see https://discuss.pytorch.org/t/list-of-nn-module-in-a-nn-module/219/2
"""
def __init__(self, module, prefix):
self.module = module
self.prefix = prefix
def __getitem__(self, i):
return getattr(self.module, self.prefix + str(i))
class ContextEncoder(nn.Module):
def __init__(self, vocab_size, embedding_dim, dropout):
super(ContextEncoder, self).__init__()
self.vocab_size = vocab_size
self.embedding_dim = embedding_dim
self.dropout = dropout
self.dropout_layer = nn.Dropout(dropout)
self.embedding = nn.Embedding(vocab_size, embedding_dim, padding_idx=PAD_token)
self.embedding.weight.data.normal_(0, 0.1)
#define two RNNEncoders and one HRNNEncoder
self.question_rnn1 = RNNEncoder(
input_size=embedding_dim,
hidden_size=embedding_dim * 2,
embedder=None,
num_layers=1,
bidirectional=True,
dropout=dropout)
self.question_rnn2 = RNNEncoder(
input_size=embedding_dim * 2,
hidden_size=embedding_dim * 2,
embedder=None,
num_layers=1,
bidirectional=False,
dropout=dropout)
self.hier_question_rnn = HRNNEncoder(self.question_rnn1, self.question_rnn2)
def forward(self, x2, x2_lengths, x2_mask):
x2_embed = self.embedding(x2.contiguous())
#add dropout
x2_embed = self.dropout_layer(x2_embed)
hiera_outputs, hiera_hidden, sub_outputs, sub_hidden, last_sub_outputs, last_sub_lengths = self.hier_question_rnn((x2_embed, x2_lengths), x2_mask)
# Get the question mask
question_len = x2_lengths.gt(0).long().sum(dim=1)
question_mask = torch.stack(
[x2_mask[b, l - 1] for b, l in enumerate(question_len)])
max_len = last_sub_lengths.max()
question_mask = question_mask[:, :max_len]
return x2_embed, sub_outputs, sub_hidden, hiera_outputs, hiera_hidden, last_sub_outputs, last_sub_lengths, question_mask
class KnowledgeEncoder(nn.Module):
def __init__(self, vocab_size, embedding_dim, relation_size, dropout, B):
super(KnowledgeEncoder, self).__init__()
#Embedding parameters
self.embedding_dim = embedding_dim
self.dropout = dropout
self.dropout_layer = nn.Dropout(dropout)
self.embedding = nn.Embedding(vocab_size, embedding_dim, padding_idx=PAD_token)
self.embedding.weight.data.normal_(0, 0.1)
self.relation_size = relation_size
self.relu = nn.ReLU()
self.B = B
#get C_i_1
self.question_attn1 = Attention(embedding_dim, embedding_dim, embedding_dim, mode='mlp')
self.dialog_flow1 = RNNEncoder(embedding_dim * 2 + 1, embedding_dim, embedder=None, num_layers=1, bidirectional=False)
self.gcn1 = GCNEncoder(embedding_dim, embedding_dim, self.relation_size, dropout, B=self.B)
#get C_i_2
self.question_attn2 = Attention(embedding_dim * 2, embedding_dim * 2, embedding_dim, mode='mlp')
self.dialog_flow2 = RNNEncoder(embedding_dim * 4, embedding_dim, embedder=None, num_layers=1, bidirectional=False)
self.gcn2 = GCNEncoder(embedding_dim, embedding_dim, self.relation_size, dropout, B=self.B)
#self-attention
self.entity_attention = SelfAttention(embedding_dim * 2, embedding_dim)
def graph_norm(self, graph):
graph = graph.to_dense()
batch_size = graph.size(0)
degree = torch.sum(graph, dim=-1, keepdim=True).clamp(min=1)
graph = graph / degree
return graph
def forward(self, x1, x1_f, x1_mask, x1_lengths, x2, x2_mask, x2_lengths, x2_embed, x2_outputs, x2_hidden, graph):
"""
x1 : [batch * len_k * MEM_TOKEN_SIZE]
x1_f : [batch * q_num * len_k * n_feat(1)]
x1_mask : [batch * len_k]
x1_lengths : [batch]
x2 : [batch * q_num * len_c * MEM_TOKEN_SIZE]
x2_mask : [batch * q_num * len_c]
x2_embed : [batch * q_num * len_c * h1]
x2_outputs : [batch * q_num * len_c * h]
x2_lengths : [batch * q_num]
"""
#print("x1 size:", x1.size())
#print("x1_f size:", x1_f.size())
#print("x1_mask size:", x1_mask.size())
#print("x2 size:", x2.size())
#print("x2_mask size:", x2_mask.size())
batch_size, len_k = x1.size(0), x1.size(1)
q_num, len_c = x2.size(1), x2.size(2)
def expansion_for_doc(z):
return z.unsqueeze(1).expand(z.size(0), q_num, z.size(1), z.size(2)).contiguous().view(-1, z.size(1), z.size(2))
#embedding
x1_embed = self.embedding(x1)
#add dropout
x1_embed = self.dropout_layer(x1_embed)
x1_embed_expand = expansion_for_doc(x1_embed) #(b * q_num) * len_k * em1
x1_mask_expand = x1_mask.unsqueeze(1).expand(x1.size(0), x2.size(1), x1.size(1)).contiguous().view(-1, x1_mask.size(-1)) #(b * q_num) * len_k
graph = self.graph_norm(graph)
graph_expand = graph.unsqueeze(1).expand(graph.size(0), q_num, graph.size(1), graph.size(2), graph.size(3))
graph_expand = graph_expand.contiguous().view(-1, graph.size(1), graph.size(2), graph.size(3))
x2_embed = x2_embed.contiguous().view(-1, x2_embed.size(-2), x2_embed.size(-1))
x2_mask = x2_mask.view(-1, x2_mask.size(-1))
#question Encoding
questions_hiddens = x2_outputs.view(batch_size * q_num, len_c, -1)
def flow_operation(cur_h, flow):
flow_in = cur_h.transpose(0, 1).view(len_k, batch_size, q_num, -1)
flow_in = flow_in.transpose(0, 2).contiguous().view(q_num, batch_size * len_k, -1).transpose(0, 1)
# [bsz * context_length, max_qa_pair, hidden_state]
flow_out,_ = flow(flow_in)
# [bsz * context_length, max_qa_pair, flow_hidden_state_dim (hidden_state/2)]
flow_out = flow_out.transpose(0, 1).view(q_num, batch_size, len_k, -1).transpose(0, 2).contiguous()
flow_out = flow_out.view(len_k, batch_size * q_num, -1).transpose(0, 1)
# [bsz * max_qa_pair, context_length, flow_hidden_state_dim]
return flow_out
#get C_i_1
x1_attn = self.question_attn1(x1_embed_expand, x2_embed, mask=x2_mask) #(b * q_num) * len_k * em2
C1_input = torch.cat([x1_embed_expand, x1_attn, x1_f.view(batch_size*q_num, len_k, 1)], dim=2) #(b * q_num) * len_k * (em1 + em2 + n_feat)
C1_1 = flow_operation(C1_input, self.dialog_flow1) #(b * q_num) * len_k * em
C1_2 = self.gcn1(C1_1, graph_expand)
#get C_i_2
x1_attn2 = self.question_attn2(torch.cat((C1_1, C1_2), dim=2), questions_hiddens, mask=x2_mask)
C2_input = torch.cat((C1_1, C1_2, x1_attn2), dim=2)
C2_1 = flow_operation(C2_input, self.dialog_flow2)
C2_2 = self.gcn2(C2_1, graph_expand)
C_final = torch.cat((C2_1, C2_2), dim=2)
C_final = C_final.contiguous().view(batch_size, q_num, len_k, -1)
#get the last question representation
qid = x2_lengths.gt(0).long().sum(dim=1)
outputs = torch.stack(
[C_final[b, l - 1] for b, l in enumerate(qid)]) #batch_size * len_k * h
hidden = self.entity_attention(outputs, x_mask = x1_mask).unsqueeze(1)
return outputs, hidden
| [
"torch.nn.ReLU",
"torch.nn.Dropout",
"models.layers.HRNNEncoder",
"models.layers.Attention",
"torch.nn.Softmax",
"torch.ones",
"models.layers.GCNEncoder",
"torch.LongTensor",
"torch.cat",
"random.random",
"torch.sum",
"torch.nn.Linear",
"models.layers.RNNEncoder",
"models.layers.SelfAttention",
"torch.zeros",
"torch.nn.Embedding",
"torch.nn.GRU"
] | [((647, 666), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (657, 666), True, 'import torch.nn as nn\n'), ((720, 737), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (730, 737), True, 'import torch.nn as nn\n'), ((757, 793), 'torch.nn.GRU', 'nn.GRU', (['embedding_dim', 'embedding_dim'], {}), '(embedding_dim, embedding_dim)\n', (763, 793), True, 'import torch.nn as nn\n'), ((814, 823), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (821, 823), True, 'import torch.nn as nn\n'), ((849, 892), 'torch.nn.Linear', 'nn.Linear', (['(4 * embedding_dim)', 'embedding_dim'], {}), '(4 * embedding_dim, embedding_dim)\n', (858, 892), True, 'import torch.nn as nn\n'), ((914, 931), 'torch.nn.Softmax', 'nn.Softmax', ([], {'dim': '(1)'}), '(dim=1)\n', (924, 931), True, 'import torch.nn as nn\n'), ((969, 1039), 'models.layers.Attention', 'Attention', (['embedding_dim', '(embedding_dim * 2)', 'embedding_dim'], {'mode': '"""mlp"""'}), "(embedding_dim, embedding_dim * 2, embedding_dim, mode='mlp')\n", (978, 1039), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((1071, 1141), 'models.layers.Attention', 'Attention', (['embedding_dim', '(embedding_dim * 2)', 'embedding_dim'], {'mode': '"""mlp"""'}), "(embedding_dim, embedding_dim * 2, embedding_dim, mode='mlp')\n", (1080, 1141), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((1162, 1205), 'torch.nn.Linear', 'nn.Linear', (['(5 * embedding_dim)', 'embedding_dim'], {}), '(5 * embedding_dim, embedding_dim)\n', (1171, 1205), True, 'import torch.nn as nn\n'), ((1234, 1331), 'models.layers.Attention', 'Attention', (['embedding_dim', '(embedding_dim * 2)', 'embedding_dim'], {'mode': '"""mlp"""', 'return_attn_only': '(True)'}), "(embedding_dim, embedding_dim * 2, embedding_dim, mode='mlp',\n return_attn_only=True)\n", (1243, 1331), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((6075, 6094), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (6085, 6094), True, 'import torch.nn as nn\n'), ((6120, 6182), 'torch.nn.Embedding', 'nn.Embedding', (['vocab_size', 'embedding_dim'], {'padding_idx': 'PAD_token'}), '(vocab_size, embedding_dim, padding_idx=PAD_token)\n', (6132, 6182), True, 'import torch.nn as nn\n'), ((6316, 6453), 'models.layers.RNNEncoder', 'RNNEncoder', ([], {'input_size': 'embedding_dim', 'hidden_size': '(embedding_dim * 2)', 'embedder': 'None', 'num_layers': '(1)', 'bidirectional': '(True)', 'dropout': 'dropout'}), '(input_size=embedding_dim, hidden_size=embedding_dim * 2,\n embedder=None, num_layers=1, bidirectional=True, dropout=dropout)\n', (6326, 6453), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((6576, 6718), 'models.layers.RNNEncoder', 'RNNEncoder', ([], {'input_size': '(embedding_dim * 2)', 'hidden_size': '(embedding_dim * 2)', 'embedder': 'None', 'num_layers': '(1)', 'bidirectional': '(False)', 'dropout': 'dropout'}), '(input_size=embedding_dim * 2, hidden_size=embedding_dim * 2,\n embedder=None, num_layers=1, bidirectional=False, dropout=dropout)\n', (6586, 6718), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((6845, 6896), 'models.layers.HRNNEncoder', 'HRNNEncoder', (['self.question_rnn1', 'self.question_rnn2'], {}), '(self.question_rnn1, self.question_rnn2)\n', (6856, 6896), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((7943, 7962), 'torch.nn.Dropout', 'nn.Dropout', (['dropout'], {}), '(dropout)\n', (7953, 7962), True, 'import torch.nn as nn\n'), ((7988, 8050), 'torch.nn.Embedding', 'nn.Embedding', (['vocab_size', 'embedding_dim'], {'padding_idx': 'PAD_token'}), '(vocab_size, embedding_dim, padding_idx=PAD_token)\n', (8000, 8050), True, 'import torch.nn as nn\n'), ((8165, 8174), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (8172, 8174), True, 'import torch.nn as nn\n'), ((8252, 8318), 'models.layers.Attention', 'Attention', (['embedding_dim', 'embedding_dim', 'embedding_dim'], {'mode': '"""mlp"""'}), "(embedding_dim, embedding_dim, embedding_dim, mode='mlp')\n", (8261, 8318), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((8347, 8450), 'models.layers.RNNEncoder', 'RNNEncoder', (['(embedding_dim * 2 + 1)', 'embedding_dim'], {'embedder': 'None', 'num_layers': '(1)', 'bidirectional': '(False)'}), '(embedding_dim * 2 + 1, embedding_dim, embedder=None, num_layers=\n 1, bidirectional=False)\n', (8357, 8450), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((8466, 8545), 'models.layers.GCNEncoder', 'GCNEncoder', (['embedding_dim', 'embedding_dim', 'self.relation_size', 'dropout'], {'B': 'self.B'}), '(embedding_dim, embedding_dim, self.relation_size, dropout, B=self.B)\n', (8476, 8545), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((8596, 8670), 'models.layers.Attention', 'Attention', (['(embedding_dim * 2)', '(embedding_dim * 2)', 'embedding_dim'], {'mode': '"""mlp"""'}), "(embedding_dim * 2, embedding_dim * 2, embedding_dim, mode='mlp')\n", (8605, 8670), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((8699, 8797), 'models.layers.RNNEncoder', 'RNNEncoder', (['(embedding_dim * 4)', 'embedding_dim'], {'embedder': 'None', 'num_layers': '(1)', 'bidirectional': '(False)'}), '(embedding_dim * 4, embedding_dim, embedder=None, num_layers=1,\n bidirectional=False)\n', (8709, 8797), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((8814, 8893), 'models.layers.GCNEncoder', 'GCNEncoder', (['embedding_dim', 'embedding_dim', 'self.relation_size', 'dropout'], {'B': 'self.B'}), '(embedding_dim, embedding_dim, self.relation_size, dropout, B=self.B)\n', (8824, 8893), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((8951, 8998), 'models.layers.SelfAttention', 'SelfAttention', (['(embedding_dim * 2)', 'embedding_dim'], {}), '(embedding_dim * 2, embedding_dim)\n', (8964, 8998), False, 'from models.layers import SelfAttention, Attention, RNNEncoder, HRNNEncoder, GCNEncoder\n'), ((12488, 12528), 'torch.cat', 'torch.cat', (['(C1_1, C1_2, x1_attn2)'], {'dim': '(2)'}), '((C1_1, C1_2, x1_attn2), dim=2)\n', (12497, 12528), False, 'import torch\n'), ((12660, 12690), 'torch.cat', 'torch.cat', (['(C2_1, C2_2)'], {'dim': '(2)'}), '((C2_1, C2_2), dim=2)\n', (12669, 12690), False, 'import torch\n'), ((1654, 1712), 'torch.zeros', 'torch.zeros', (['max_target_length', 'batch_size', 'self.num_vocab'], {}), '(max_target_length, batch_size, self.num_vocab)\n', (1665, 1712), False, 'import torch\n'), ((1754, 1811), 'torch.zeros', 'torch.zeros', (['max_target_length', 'batch_size', 'story_size[1]'], {}), '(max_target_length, batch_size, story_size[1])\n', (1765, 1811), False, 'import torch\n'), ((1843, 1885), 'torch.LongTensor', 'torch.LongTensor', (['([SOS_token] * batch_size)'], {}), '([SOS_token] * batch_size)\n', (1859, 1885), False, 'import torch\n'), ((1924, 1964), 'torch.ones', 'torch.ones', (['story_size[0]', 'story_size[1]'], {}), '(story_size[0], story_size[1])\n', (1934, 1964), False, 'import torch\n'), ((2453, 2485), 'torch.cat', 'torch.cat', (['rnn_input_list'], {'dim': '(1)'}), '(rnn_input_list, dim=1)\n', (2462, 2485), False, 'import torch\n'), ((3201, 3236), 'torch.cat', 'torch.cat', (['concat_input_list'], {'dim': '(1)'}), '(concat_input_list, dim=1)\n', (3210, 3236), False, 'import torch\n'), ((12404, 12434), 'torch.cat', 'torch.cat', (['(C1_1, C1_2)'], {'dim': '(2)'}), '((C1_1, C1_2), dim=2)\n', (12413, 12434), False, 'import torch\n'), ((3669, 3684), 'random.random', 'random.random', ([], {}), '()\n', (3682, 3684), False, 'import random\n'), ((9127, 9165), 'torch.sum', 'torch.sum', (['graph'], {'dim': '(-1)', 'keepdim': '(True)'}), '(graph, dim=-1, keepdim=True)\n', (9136, 9165), False, 'import torch\n')] |
from flask import Blueprint
import __blogsley__
bp = Blueprint('root', __name__, template_folder=__blogsley__.static_folder)
from .routes import *
| [
"flask.Blueprint"
] | [((55, 126), 'flask.Blueprint', 'Blueprint', (['"""root"""', '__name__'], {'template_folder': '__blogsley__.static_folder'}), "('root', __name__, template_folder=__blogsley__.static_folder)\n", (64, 126), False, 'from flask import Blueprint\n')] |
"""
The Fibonacci class has functions for dealing with Fibonacci numbers, and
Fibonacci sequences. For an explanation of Fibonacci numbers and Fibonacci
sequences, see https://en.wikipedia.org/wiki/Fibonacci_number.
"""
from typing import Generator
# Custom imports
from services.expected_fibonacci_numbers import ExpectedFibonacciNumbers
class Fibonacci:
expected_fibonacci_sequence = ExpectedFibonacciNumbers(
).get_expected_fibonacci_sequence(301)
# Cache of known Fibonacci numbers
known_cache = {
0: expected_fibonacci_sequence[0],
1: expected_fibonacci_sequence[1]
}
def get_fibonacci_number(self, n: int) -> int:
"""Recursively generate the nth Fibonacci number"""
if not isinstance(n, int) or n < 0:
raise ValueError("n must be a non-negative integer")
if n in self.known_cache:
return self.known_cache[n]
# Without caching known Fibonacci numbers like this, this function
# will generate a "maximum recursion depth exceeded" error
# (when for sufficiently large Fibonacci numbers).
# That's because Python doesn't do tail recursion elimination.
self.known_cache[n] = self.get_fibonacci_number(
n - 1) + self.get_fibonacci_number(n - 2)
return self.known_cache[n]
def generate_fibonacci_sequence(
self, sequence_length: int) -> Generator[int, None, None]:
if not isinstance(sequence_length, int) or sequence_length < 1:
raise ValueError("sequence_length must be a positive integer")
return (self.get_fibonacci_number(n) for n in range(sequence_length))
| [
"services.expected_fibonacci_numbers.ExpectedFibonacciNumbers"
] | [((394, 420), 'services.expected_fibonacci_numbers.ExpectedFibonacciNumbers', 'ExpectedFibonacciNumbers', ([], {}), '()\n', (418, 420), False, 'from services.expected_fibonacci_numbers import ExpectedFibonacciNumbers\n')] |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import os
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
from tqdm import tqdm
import numpy as np
from utils import *
os.environ["CUDA_VISIBLE_DEVICES"]="0"
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model_type = 'mobilenet_v2_torchhub' # 'mobilenet_v1' 'mobilenet_v2' 'mobilenet_v2_torchhub'
pretrained = True # load imagenet weight (only for 'mobilenet_v2_torchhub')
checkpoint_dir = './experiments/pretrained_mobilenet_v2_best/'
checkpoint = checkpoint_dir + '/checkpoint_best.pt'
input_size = 224
n_classes = 120
batch_size = 8
def run_test():
model = create_model(model_type=model_type, pretrained=pretrained, n_classes=n_classes,
input_size=input_size, checkpoint=checkpoint)
model = model.to(device)
print(model)
for name, weight in model.named_parameters():
print(name, weight.max().item(), weight.min().item())
if __name__ == '__main__':
run_test()
| [
"torch.cuda.is_available"
] | [((294, 319), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (317, 319), False, 'import torch\n')] |
#! /usr/bin/env python
import littlebbl
import six
import pytest
from utils.exceptions import *
def test_unicode_returned():
k = littlebbl.resolve('20', 'east 68th street', '3B', 'MH')
assert isinstance(k[0], six.text_type)
assert isinstance(k[1], six.text_type)
assert isinstance(k[2], six.text_type)
def test_borough_codes():
assert littlebbl.get_borough_number('MH') == '1'
assert littlebbl.get_borough_number('BX') == '2'
assert littlebbl.get_borough_number('BK') == '3'
assert littlebbl.get_borough_number('QN') == '4'
assert littlebbl.get_borough_number('SI') == '5'
def test_borough_exception():
with pytest.raises(InvalidBoroughException):
littlebbl.resolve('20', 'east 68th street', '3B', 'NO')
| [
"littlebbl.get_borough_number",
"littlebbl.resolve",
"pytest.raises"
] | [((137, 192), 'littlebbl.resolve', 'littlebbl.resolve', (['"""20"""', '"""east 68th street"""', '"""3B"""', '"""MH"""'], {}), "('20', 'east 68th street', '3B', 'MH')\n", (154, 192), False, 'import littlebbl\n'), ((361, 395), 'littlebbl.get_borough_number', 'littlebbl.get_borough_number', (['"""MH"""'], {}), "('MH')\n", (389, 395), False, 'import littlebbl\n'), ((414, 448), 'littlebbl.get_borough_number', 'littlebbl.get_borough_number', (['"""BX"""'], {}), "('BX')\n", (442, 448), False, 'import littlebbl\n'), ((467, 501), 'littlebbl.get_borough_number', 'littlebbl.get_borough_number', (['"""BK"""'], {}), "('BK')\n", (495, 501), False, 'import littlebbl\n'), ((520, 554), 'littlebbl.get_borough_number', 'littlebbl.get_borough_number', (['"""QN"""'], {}), "('QN')\n", (548, 554), False, 'import littlebbl\n'), ((573, 607), 'littlebbl.get_borough_number', 'littlebbl.get_borough_number', (['"""SI"""'], {}), "('SI')\n", (601, 607), False, 'import littlebbl\n'), ((656, 694), 'pytest.raises', 'pytest.raises', (['InvalidBoroughException'], {}), '(InvalidBoroughException)\n', (669, 694), False, 'import pytest\n'), ((704, 759), 'littlebbl.resolve', 'littlebbl.resolve', (['"""20"""', '"""east 68th street"""', '"""3B"""', '"""NO"""'], {}), "('20', 'east 68th street', '3B', 'NO')\n", (721, 759), False, 'import littlebbl\n')] |
import kshingle as ks
def test1():
shingles = ks.shingleseqs_k("", 1)
assert shingles == [[]]
shingles = ks.shingleseqs_k(" ", 1)
assert shingles == [[" "]]
shingles = ks.shingleseqs_k(" ", 2)
assert shingles == [[" "], []]
def test2():
shingles = ks.shingleseqs_k(" ", 0)
assert shingles == []
shingles = ks.shingleseqs_k(" ", -1)
assert shingles == []
def test3():
shingles = ks.shingleseqs_k("12345", 0)
assert shingles == []
shingles = ks.shingleseqs_k("12345", 1)
assert shingles == [['1', '2', '3', '4', '5']]
shingles = ks.shingleseqs_k("12345", 2)
assert shingles == [
['1', '2', '3', '4', '5'],
['12', '23', '34', '45']]
shingles = ks.shingleseqs_k("12345", 3)
assert shingles == [
['1', '2', '3', '4', '5'],
['12', '23', '34', '45'],
['123', '234', '345']]
shingles = ks.shingleseqs_k("12345", 4)
assert shingles == [
['1', '2', '3', '4', '5'],
['12', '23', '34', '45'],
['123', '234', '345'],
['1234', '2345']]
shingles = ks.shingleseqs_k("12345", 5)
assert shingles == [
['1', '2', '3', '4', '5'],
['12', '23', '34', '45'],
['123', '234', '345'],
['1234', '2345'],
['12345']]
shingles = ks.shingleseqs_k("12345", 6)
assert shingles == [
['1', '2', '3', '4', '5'],
['12', '23', '34', '45'],
['123', '234', '345'],
['1234', '2345'],
['12345'], []]
def test11():
shingles = ks.shingleseqs_range("", 0, 1)
assert shingles == [[]]
shingles = ks.shingleseqs_range(" ", 0, 1)
assert shingles == [[" "]]
shingles = ks.shingleseqs_range(" ", 0, 2)
assert shingles == [[" "], []]
shingles = ks.shingleseqs_range("", 1, 1)
assert shingles == [[]]
shingles = ks.shingleseqs_range(" ", 1, 1)
assert shingles == [[" "]]
shingles = ks.shingleseqs_range(" ", 1, 2)
assert shingles == [[" "], []]
def test12():
shingles = ks.shingleseqs_range("", -10, -10)
assert shingles == []
shingles = ks.shingleseqs_range("", 0, 0)
assert shingles == []
shingles = ks.shingleseqs_range("", 4, 1)
assert shingles == []
def test31():
shingles = ks.shingleseqs_list("", [1])
assert shingles == [[]]
shingles = ks.shingleseqs_list(" ", [1])
assert shingles == [[" "]]
shingles = ks.shingleseqs_list(" ", [1, 2])
assert shingles == [[" "], []]
def test32():
shingles = ks.shingleseqs_list(" ", [0])
assert shingles == []
shingles = ks.shingleseqs_list(" ", [-1])
assert shingles == []
def test33():
shingles = ks.shingleseqs_list("12345", [5, 6])
assert shingles == [['12345'], []]
shingles = ks.shingleseqs_list("12345", [0, 3, 6])
assert shingles == [['123', '234', '345'], []] # this is a problem!
def test41():
seqs = ks.shingleseqs_k(
"12345", k=6, padding='center', placeholder='x', evenpad='pre')
target = [
['1', '2', '3', '4', '5'],
['x', '12', '23', '34', '45'],
['x', '123', '234', '345', 'x'],
['x', 'x', '1234', '2345', 'x'],
['x', 'x', '12345', 'x', 'x'],
['x', 'x', 'x', 'x', 'x']]
assert seqs == target
def test42():
seqs = ks.shingleseqs_k(
"12345", k=6, padding='center', placeholder='x', evenpad='post')
target = [
['1', '2', '3', '4', '5'],
['12', '23', '34', '45', 'x'],
['x', '123', '234', '345', 'x'],
['x', '1234', '2345', 'x', 'x'],
['x', 'x', '12345', 'x', 'x'],
['x', 'x', 'x', 'x', 'x']]
assert seqs == target
def test43():
seqs = ks.shingleseqs_k(
"12345", k=6, padding='pre', placeholder='x')
target = [
['1', '2', '3', '4', '5'],
['x', '12', '23', '34', '45'],
['x', 'x', '123', '234', '345'],
['x', 'x', 'x', '1234', '2345'],
['x', 'x', 'x', 'x', '12345'],
['x', 'x', 'x', 'x', 'x']]
assert seqs == target
def test44():
seqs = ks.shingleseqs_k(
"12345", k=6, padding='post', placeholder='x')
target = [
['1', '2', '3', '4', '5'],
['12', '23', '34', '45', 'x'],
['123', '234', '345', 'x', 'x'],
['1234', '2345', 'x', 'x', 'x'],
['12345', 'x', 'x', 'x', 'x'],
['x', 'x', 'x', 'x', 'x']]
assert seqs == target
def test45():
seqs = ks.shingleseqs_range(
"12345", n_min=2, n_max=4, padding='center',
placeholder='x', evenpad='pre')
target = [
['x', '12', '23', '34', '45'],
['x', '123', '234', '345', 'x'],
['x', 'x', '1234', '2345', 'x']]
assert seqs == target
def test46():
seqs = ks.shingleseqs_range(
"12345", n_min=2, n_max=4, padding='center',
placeholder='x', evenpad='post')
target = [
['12', '23', '34', '45', 'x'],
['x', '123', '234', '345', 'x'],
['x', '1234', '2345', 'x', 'x']]
assert seqs == target
def test47():
seqs = ks.shingleseqs_range(
"12345", n_min=2, n_max=4, padding='pre', placeholder='x')
target = [
['x', '12', '23', '34', '45'],
['x', 'x', '123', '234', '345'],
['x', 'x', 'x', '1234', '2345']]
assert seqs == target
def test48():
seqs = ks.shingleseqs_range(
"12345", n_min=2, n_max=4, padding='post', placeholder='x')
target = [
['12', '23', '34', '45', 'x'],
['123', '234', '345', 'x', 'x'],
['1234', '2345', 'x', 'x', 'x']]
assert seqs == target
def test49():
seqs = ks.shingleseqs_list(
"12345", klist=[2, 5], padding='center',
placeholder='x', evenpad='pre')
target = [
['x', '12', '23', '34', '45'],
['x', 'x', '12345', 'x', 'x']]
assert seqs == target
def test50():
seqs = ks.shingleseqs_list(
"12345", klist=[2, 5], padding='center',
placeholder='x', evenpad='post')
target = [
['12', '23', '34', '45', 'x'],
['x', 'x', '12345', 'x', 'x']]
assert seqs == target
def test51():
seqs = ks.shingleseqs_list(
"12345", klist=[2, 5], padding='pre', placeholder='x')
target = [
['x', '12', '23', '34', '45'],
['x', 'x', 'x', 'x', '12345']]
assert seqs == target
def test52():
seqs = ks.shingleseqs_list(
"12345", klist=[2, 5], padding='post', placeholder='x')
target = [
['12', '23', '34', '45', 'x'],
['12345', 'x', 'x', 'x', 'x']]
assert seqs == target
| [
"kshingle.shingleseqs_k",
"kshingle.shingleseqs_range",
"kshingle.shingleseqs_list"
] | [((52, 75), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['""""""', '(1)'], {}), "('', 1)\n", (68, 75), True, 'import kshingle as ks\n'), ((120, 144), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['""" """', '(1)'], {}), "(' ', 1)\n", (136, 144), True, 'import kshingle as ks\n'), ((192, 216), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['""" """', '(2)'], {}), "(' ', 2)\n", (208, 216), True, 'import kshingle as ks\n'), ((282, 306), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['""" """', '(0)'], {}), "(' ', 0)\n", (298, 306), True, 'import kshingle as ks\n'), ((349, 374), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['""" """', '(-1)'], {}), "(' ', -1)\n", (365, 374), True, 'import kshingle as ks\n'), ((431, 459), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['"""12345"""', '(0)'], {}), "('12345', 0)\n", (447, 459), True, 'import kshingle as ks\n'), ((502, 530), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['"""12345"""', '(1)'], {}), "('12345', 1)\n", (518, 530), True, 'import kshingle as ks\n'), ((598, 626), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['"""12345"""', '(2)'], {}), "('12345', 2)\n", (614, 626), True, 'import kshingle as ks\n'), ((737, 765), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['"""12345"""', '(3)'], {}), "('12345', 3)\n", (753, 765), True, 'import kshingle as ks\n'), ((907, 935), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['"""12345"""', '(4)'], {}), "('12345', 4)\n", (923, 935), True, 'import kshingle as ks\n'), ((1103, 1131), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['"""12345"""', '(5)'], {}), "('12345', 5)\n", (1119, 1131), True, 'import kshingle as ks\n'), ((1318, 1346), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['"""12345"""', '(6)'], {}), "('12345', 6)\n", (1334, 1346), True, 'import kshingle as ks\n'), ((1552, 1582), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['""""""', '(0)', '(1)'], {}), "('', 0, 1)\n", (1572, 1582), True, 'import kshingle as ks\n'), ((1627, 1658), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['""" """', '(0)', '(1)'], {}), "(' ', 0, 1)\n", (1647, 1658), True, 'import kshingle as ks\n'), ((1706, 1737), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['""" """', '(0)', '(2)'], {}), "(' ', 0, 2)\n", (1726, 1737), True, 'import kshingle as ks\n'), ((1789, 1819), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['""""""', '(1)', '(1)'], {}), "('', 1, 1)\n", (1809, 1819), True, 'import kshingle as ks\n'), ((1864, 1895), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['""" """', '(1)', '(1)'], {}), "(' ', 1, 1)\n", (1884, 1895), True, 'import kshingle as ks\n'), ((1943, 1974), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['""" """', '(1)', '(2)'], {}), "(' ', 1, 2)\n", (1963, 1974), True, 'import kshingle as ks\n'), ((2041, 2075), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['""""""', '(-10)', '(-10)'], {}), "('', -10, -10)\n", (2061, 2075), True, 'import kshingle as ks\n'), ((2118, 2148), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['""""""', '(0)', '(0)'], {}), "('', 0, 0)\n", (2138, 2148), True, 'import kshingle as ks\n'), ((2191, 2221), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['""""""', '(4)', '(1)'], {}), "('', 4, 1)\n", (2211, 2221), True, 'import kshingle as ks\n'), ((2279, 2307), 'kshingle.shingleseqs_list', 'ks.shingleseqs_list', (['""""""', '[1]'], {}), "('', [1])\n", (2298, 2307), True, 'import kshingle as ks\n'), ((2352, 2381), 'kshingle.shingleseqs_list', 'ks.shingleseqs_list', (['""" """', '[1]'], {}), "(' ', [1])\n", (2371, 2381), True, 'import kshingle as ks\n'), ((2429, 2461), 'kshingle.shingleseqs_list', 'ks.shingleseqs_list', (['""" """', '[1, 2]'], {}), "(' ', [1, 2])\n", (2448, 2461), True, 'import kshingle as ks\n'), ((2528, 2557), 'kshingle.shingleseqs_list', 'ks.shingleseqs_list', (['""" """', '[0]'], {}), "(' ', [0])\n", (2547, 2557), True, 'import kshingle as ks\n'), ((2600, 2630), 'kshingle.shingleseqs_list', 'ks.shingleseqs_list', (['""" """', '[-1]'], {}), "(' ', [-1])\n", (2619, 2630), True, 'import kshingle as ks\n'), ((2688, 2724), 'kshingle.shingleseqs_list', 'ks.shingleseqs_list', (['"""12345"""', '[5, 6]'], {}), "('12345', [5, 6])\n", (2707, 2724), True, 'import kshingle as ks\n'), ((2780, 2819), 'kshingle.shingleseqs_list', 'ks.shingleseqs_list', (['"""12345"""', '[0, 3, 6]'], {}), "('12345', [0, 3, 6])\n", (2799, 2819), True, 'import kshingle as ks\n'), ((2920, 3005), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['"""12345"""'], {'k': '(6)', 'padding': '"""center"""', 'placeholder': '"""x"""', 'evenpad': '"""pre"""'}), "('12345', k=6, padding='center', placeholder='x', evenpad='pre'\n )\n", (2936, 3005), True, 'import kshingle as ks\n'), ((3308, 3394), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['"""12345"""'], {'k': '(6)', 'padding': '"""center"""', 'placeholder': '"""x"""', 'evenpad': '"""post"""'}), "('12345', k=6, padding='center', placeholder='x', evenpad=\n 'post')\n", (3324, 3394), True, 'import kshingle as ks\n'), ((3697, 3759), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['"""12345"""'], {'k': '(6)', 'padding': '"""pre"""', 'placeholder': '"""x"""'}), "('12345', k=6, padding='pre', placeholder='x')\n", (3713, 3759), True, 'import kshingle as ks\n'), ((4067, 4130), 'kshingle.shingleseqs_k', 'ks.shingleseqs_k', (['"""12345"""'], {'k': '(6)', 'padding': '"""post"""', 'placeholder': '"""x"""'}), "('12345', k=6, padding='post', placeholder='x')\n", (4083, 4130), True, 'import kshingle as ks\n'), ((4438, 4539), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['"""12345"""'], {'n_min': '(2)', 'n_max': '(4)', 'padding': '"""center"""', 'placeholder': '"""x"""', 'evenpad': '"""pre"""'}), "('12345', n_min=2, n_max=4, padding='center',\n placeholder='x', evenpad='pre')\n", (4458, 4539), True, 'import kshingle as ks\n'), ((4742, 4844), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['"""12345"""'], {'n_min': '(2)', 'n_max': '(4)', 'padding': '"""center"""', 'placeholder': '"""x"""', 'evenpad': '"""post"""'}), "('12345', n_min=2, n_max=4, padding='center',\n placeholder='x', evenpad='post')\n", (4762, 4844), True, 'import kshingle as ks\n'), ((5047, 5126), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['"""12345"""'], {'n_min': '(2)', 'n_max': '(4)', 'padding': '"""pre"""', 'placeholder': '"""x"""'}), "('12345', n_min=2, n_max=4, padding='pre', placeholder='x')\n", (5067, 5126), True, 'import kshingle as ks\n'), ((5325, 5410), 'kshingle.shingleseqs_range', 'ks.shingleseqs_range', (['"""12345"""'], {'n_min': '(2)', 'n_max': '(4)', 'padding': '"""post"""', 'placeholder': '"""x"""'}), "('12345', n_min=2, n_max=4, padding='post', placeholder='x'\n )\n", (5345, 5410), True, 'import kshingle as ks\n'), ((5604, 5701), 'kshingle.shingleseqs_list', 'ks.shingleseqs_list', (['"""12345"""'], {'klist': '[2, 5]', 'padding': '"""center"""', 'placeholder': '"""x"""', 'evenpad': '"""pre"""'}), "('12345', klist=[2, 5], padding='center', placeholder=\n 'x', evenpad='pre')\n", (5623, 5701), True, 'import kshingle as ks\n'), ((5860, 5958), 'kshingle.shingleseqs_list', 'ks.shingleseqs_list', (['"""12345"""'], {'klist': '[2, 5]', 'padding': '"""center"""', 'placeholder': '"""x"""', 'evenpad': '"""post"""'}), "('12345', klist=[2, 5], padding='center', placeholder=\n 'x', evenpad='post')\n", (5879, 5958), True, 'import kshingle as ks\n'), ((6117, 6191), 'kshingle.shingleseqs_list', 'ks.shingleseqs_list', (['"""12345"""'], {'klist': '[2, 5]', 'padding': '"""pre"""', 'placeholder': '"""x"""'}), "('12345', klist=[2, 5], padding='pre', placeholder='x')\n", (6136, 6191), True, 'import kshingle as ks\n'), ((6347, 6422), 'kshingle.shingleseqs_list', 'ks.shingleseqs_list', (['"""12345"""'], {'klist': '[2, 5]', 'padding': '"""post"""', 'placeholder': '"""x"""'}), "('12345', klist=[2, 5], padding='post', placeholder='x')\n", (6366, 6422), True, 'import kshingle as ks\n')] |
#! /usr/bin/env python3
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The LSTM baseline."""
import numpy as np
import torch
import torch.nn as nn
import jacinle.random as random
from difflogic.nn.neural_logic.modules._utils import meshgrid
from jactorch.functional.shape import broadcast
__all__ = ['LSTMBaseline']
class LSTMBaseline(nn.Module):
"""LSTM baseline model."""
def __init__(self,
input_dim,
feature_dim,
num_layers=2,
hidden_size=512,
code_length=8):
super().__init__()
current_dim = input_dim + code_length * 2
self.feature_dim = feature_dim
assert feature_dim == 1 or feature_dim == 2, ('only support attributes or '
'relations')
self.num_layers = num_layers
self.hidden_size = hidden_size
self.code_length = code_length
self.lstm = nn.LSTM(
current_dim,
hidden_size,
num_layers,
batch_first=True,
bidirectional=True)
def forward(self, relations, attributes=None):
batch_size, nr = relations.size()[:2]
assert nr == relations.size(2)
id_shape = list(relations.size()[:-1])
ids = [
random.permutation(2**self.code_length - 1)[:nr] + 1
for i in range(batch_size)
]
ids = np.vstack(ids)
binary_ids = self.binarize_code(ids)
zeros = torch.tensor(
np.zeros(binary_ids.shape),
dtype=relations.dtype,
device=relations.device)
binary_ids = torch.tensor(
binary_ids, dtype=relations.dtype, device=relations.device)
binary_ids2 = torch.cat(meshgrid(binary_ids, dim=1), dim=-1)
if attributes is None:
rels = [binary_ids2, relations]
else:
padding = torch.zeros(
*binary_ids2.size()[:-1],
attributes.size(-1),
dtype=relations.dtype,
device=relations.device)
rels = [binary_ids2, padding, relations]
rels = torch.cat(rels, dim=-1)
input_seq = rels.view(batch_size, -1, rels.size(-1))
if attributes is not None:
assert nr == attributes.size(1)
padding = torch.zeros(
*binary_ids.size()[:-1],
relations.size(-1),
dtype=relations.dtype,
device=relations.device)
attributes = torch.cat([binary_ids, zeros, attributes, padding], dim=-1)
input_seq = torch.cat([input_seq, attributes], dim=1)
h0 = torch.zeros(
self.num_layers * 2,
batch_size,
self.hidden_size,
dtype=relations.dtype,
device=relations.device)
c0 = torch.zeros(
self.num_layers * 2,
batch_size,
self.hidden_size,
dtype=relations.dtype,
device=relations.device)
out, _ = self.lstm(input_seq, (h0, c0))
out = out[:, -1]
if self.feature_dim == 1:
expanded_feature = broadcast(out.unsqueeze(dim=1), 1, nr)
return torch.cat([binary_ids, expanded_feature], dim=-1)
else:
expanded_feature = broadcast(out.unsqueeze(dim=1), 1, nr)
expanded_feature = broadcast(expanded_feature.unsqueeze(dim=1), 1, nr)
return torch.cat([binary_ids2, expanded_feature], dim=-1)
def binarize_code(self, x):
m = self.code_length
code = np.zeros((x.shape + (m,)))
for i in range(m)[::-1]:
code[:, :, i] = (x >= 2**i).astype('float')
x = x - code[:, :, i] * 2**i
return code
def get_output_dim(self):
return self.hidden_size * 2 + self.code_length * self.feature_dim
| [
"torch.nn.LSTM",
"torch.tensor",
"numpy.zeros",
"difflogic.nn.neural_logic.modules._utils.meshgrid",
"numpy.vstack",
"jacinle.random.permutation",
"torch.zeros",
"torch.cat"
] | [((1447, 1534), 'torch.nn.LSTM', 'nn.LSTM', (['current_dim', 'hidden_size', 'num_layers'], {'batch_first': '(True)', 'bidirectional': '(True)'}), '(current_dim, hidden_size, num_layers, batch_first=True,\n bidirectional=True)\n', (1454, 1534), True, 'import torch.nn as nn\n'), ((1867, 1881), 'numpy.vstack', 'np.vstack', (['ids'], {}), '(ids)\n', (1876, 1881), True, 'import numpy as np\n'), ((2066, 2138), 'torch.tensor', 'torch.tensor', (['binary_ids'], {'dtype': 'relations.dtype', 'device': 'relations.device'}), '(binary_ids, dtype=relations.dtype, device=relations.device)\n', (2078, 2138), False, 'import torch\n'), ((2511, 2534), 'torch.cat', 'torch.cat', (['rels'], {'dim': '(-1)'}), '(rels, dim=-1)\n', (2520, 2534), False, 'import torch\n'), ((2972, 3087), 'torch.zeros', 'torch.zeros', (['(self.num_layers * 2)', 'batch_size', 'self.hidden_size'], {'dtype': 'relations.dtype', 'device': 'relations.device'}), '(self.num_layers * 2, batch_size, self.hidden_size, dtype=\n relations.dtype, device=relations.device)\n', (2983, 3087), False, 'import torch\n'), ((3133, 3248), 'torch.zeros', 'torch.zeros', (['(self.num_layers * 2)', 'batch_size', 'self.hidden_size'], {'dtype': 'relations.dtype', 'device': 'relations.device'}), '(self.num_layers * 2, batch_size, self.hidden_size, dtype=\n relations.dtype, device=relations.device)\n', (3144, 3248), False, 'import torch\n'), ((3790, 3814), 'numpy.zeros', 'np.zeros', (['(x.shape + (m,))'], {}), '(x.shape + (m,))\n', (3798, 3814), True, 'import numpy as np\n'), ((1957, 1983), 'numpy.zeros', 'np.zeros', (['binary_ids.shape'], {}), '(binary_ids.shape)\n', (1965, 1983), True, 'import numpy as np\n'), ((2176, 2203), 'difflogic.nn.neural_logic.modules._utils.meshgrid', 'meshgrid', (['binary_ids'], {'dim': '(1)'}), '(binary_ids, dim=1)\n', (2184, 2203), False, 'from difflogic.nn.neural_logic.modules._utils import meshgrid\n'), ((2842, 2901), 'torch.cat', 'torch.cat', (['[binary_ids, zeros, attributes, padding]'], {'dim': '(-1)'}), '([binary_ids, zeros, attributes, padding], dim=-1)\n', (2851, 2901), False, 'import torch\n'), ((2920, 2961), 'torch.cat', 'torch.cat', (['[input_seq, attributes]'], {'dim': '(1)'}), '([input_seq, attributes], dim=1)\n', (2929, 2961), False, 'import torch\n'), ((3458, 3507), 'torch.cat', 'torch.cat', (['[binary_ids, expanded_feature]'], {'dim': '(-1)'}), '([binary_ids, expanded_feature], dim=-1)\n', (3467, 3507), False, 'import torch\n'), ((3672, 3722), 'torch.cat', 'torch.cat', (['[binary_ids2, expanded_feature]'], {'dim': '(-1)'}), '([binary_ids2, expanded_feature], dim=-1)\n', (3681, 3722), False, 'import torch\n'), ((1763, 1808), 'jacinle.random.permutation', 'random.permutation', (['(2 ** self.code_length - 1)'], {}), '(2 ** self.code_length - 1)\n', (1781, 1808), True, 'import jacinle.random as random\n')] |
import datetime
import json
import logging
import re
import time
from stix_shifter_utils.stix_translation.src.json_to_stix import observable
from stix_shifter_utils.stix_translation.src.patterns.pattern_objects import (
ObservationExpression, ComparisonExpression,
ComparisonExpressionOperators, ComparisonComparators, Pattern,
CombinedComparisonExpression, CombinedObservationExpression, ObservationOperators,
StartStopQualifier
)
from .transformers import InfobloxToDomainName, TimestampToSeconds
REFERENCE_DATA_TYPES = {
"qip": ["ipv4", "ipv4_cidr"],
"value": ["ipv4", "ipv4_cidr", "domain_name"],
"qname": ["domain_name"],
"ip": ["ipv4", "ipv4_cidr", "ipv6", "ipv6_cidr"]
}
REFERENCE_FIELDS = ('src_ref.value', 'hostname_ref.value',
'ip_ref.value', 'extensions.dns-ext.question.domain_ref.value'
)
START_STOP_STIX_QUALIFIER = r"START((t'\d{4}(-\d{2}){2}T\d{2}(:\d{2}){2}(\.\d+)?Z')|(\s\d{13}\s))STOP"
TIMESTAMP_MILLISECONDS = r"\.\d+Z$"
THREAT_LEVEL_MAPPING = {
"HIGH": 3,
"MEDIUM": 2,
"LOW": 1
}
logger = logging.getLogger(__name__)
class DuplicateFieldException(Exception):
pass
class QueryStringPatternTranslator:
comparator_lookup = {
'tideDbData': {
ComparisonExpressionOperators.And: "&",
ObservationOperators.And: '&',
ComparisonExpressionOperators.Or: "&",
ObservationOperators.Or: '&',
ComparisonComparators.Equal: "=",
ComparisonComparators.GreaterThan: "=",
ComparisonComparators.GreaterThanOrEqual: "=",
ComparisonComparators.LessThan: "=",
ComparisonComparators.LessThanOrEqual: "=",
ComparisonComparators.Like: "="
},
'dnsEventData': {
ComparisonExpressionOperators.And: "&",
ObservationOperators.And: '&',
ComparisonExpressionOperators.Or: "&",
ObservationOperators.Or: '&',
ComparisonComparators.Equal: "="
},
'dossierData': {
ComparisonExpressionOperators.And: "&",
ObservationOperators.And: '&',
ComparisonExpressionOperators.Or: "&",
ObservationOperators.Or: '&',
ComparisonComparators.Equal: "="
}
}
def __init__(self, pattern: Pattern, data_model_mapper, time_range):
self.dmm = data_model_mapper
self.pattern = pattern
self.using_operators = set()
self.assigned_fields = set()
self.qualified_queries = []
self.dialect = data_model_mapper.dialect
self.translated = self.parse_expression(pattern)
self.qualified_queries = self.translated
self.qualified_queries = _format_translated_queries(self.dialect,
self.qualified_queries,
time_range)
@staticmethod
def _format_equality(value) -> str:
return '{}'.format(value)
@staticmethod
def _format_like(value) -> str:
return "{}".format(value)
@staticmethod
def _check_value_type(value):
value = str(value)
for key, pattern in observable.REGEX.items():
if key != 'date' and bool(re.search(pattern, value)):
return key
return None
@staticmethod
def _parse_reference(value_type, mapped_field, value, comparator):
if value_type not in REFERENCE_DATA_TYPES["{}".format(mapped_field)]:
return None
else:
return "{mapped_field}{comparator}{value}".format(
mapped_field=mapped_field, comparator=comparator, value=value)
def _sanatize_field(self, mapped_field, comparator):
# NOTE: performs the necessary un-transformation/conversion to Infoblox compatible query.
comparator_suffix_map = {
ComparisonComparators.GreaterThan: '_from_date',
ComparisonComparators.GreaterThanOrEqual: '_from_date',
ComparisonComparators.LessThan: '_to_date',
ComparisonComparators.LessThanOrEqual: '_to_date',
}
updated_field = mapped_field
if self.dialect == 'tideDbData':
if mapped_field == 'imported':
updated_field = 'imported' + comparator_suffix_map[comparator]
elif comparator == ComparisonComparators.Like:
if mapped_field not in ['profile', 'origin', 'host', 'ip', 'url', 'domain', 'property', 'class', 'target']:
raise NotImplementedError("Comparison operator {} unsupported for Infoblox connector {} field {}".format(comparator.name, self.dialect, mapped_field))
updated_field = 'text_search'
return updated_field
def _sanatize_value(self, mapped_field, value):
# NOTE: performs the necessary un-transformation/conversion to Infoblox compatible query.
updated_value = value
if self.dialect == 'dnsEventData':
if mapped_field == 'qname':
updated_value = InfobloxToDomainName.untransform(value)
elif mapped_field == 'threat_level':
updated_value = THREAT_LEVEL_MAPPING[value]
elif self.dialect == 'tideDbData':
if mapped_field == 'type':
updated_value = value.lower()
return updated_value
def _parse_mapped_fields(self, expression, value, comparator, stix_field, mapped_fields_array):
comparison_string = ""
is_reference_value = self._is_reference_value(stix_field)
value_type = self._check_value_type(expression.value) if is_reference_value else None
for mapped_field in mapped_fields_array:
mapped_field = self._sanatize_field(mapped_field, expression.comparator)
value = self._sanatize_value(mapped_field, value)
if is_reference_value:
parsed_reference = self._parse_reference(value_type, mapped_field, value, comparator)
if not parsed_reference:
continue
comparison_string += parsed_reference
else:
comparison_string += "{mapped_field}{comparator}{value}".format(mapped_field=mapped_field, comparator=comparator, value=value)
return comparison_string
@staticmethod
def _is_reference_value(stix_field):
return stix_field in REFERENCE_FIELDS
def _lookup_comparison_operator(self, expression_operator):
if expression_operator not in self.comparator_lookup[self.dialect]:
raise NotImplementedError("Comparison operator {} unsupported for Infoblox connector {}".format(expression_operator.name, self.dialect))
return self.comparator_lookup[self.dialect][expression_operator]
def _calculate_intersection(self, mapped_fields_array, stix_field, assigned_fields):
mapped_fields_set = set(mapped_fields_array)
assigned_fields_set = set(assigned_fields.keys())
intersection = assigned_fields_set.intersection(mapped_fields_set)
if intersection:
raise DuplicateFieldException("Multiple criteria for one field is not support in Infoblox connector, field={}, duplicates={}".format(', '.join(intersection), stix_field))
if self.dialect == 'tideDbData' and stix_field == 'imported':
# for TIDE imported date field, allow multiple criteria
return
for field in mapped_fields_array:
assigned_fields[field] = 1
def _set_threat_type(self, stix_object, stix_field, final_expression, value):
# NOTE: for the Dossier and TIDE apis, threat_type must be provided. Using the provided query, determine the appropriate type.
stix_map = {
'dossierData': [
{
'stix_object': ['domain-name', 'x-infoblox-dossier-event-result-pdns'],
'stix_field': ['value', 'hostname_ref.value'],
'threat_type': 'host'
},
{
'stix_object': ['ipv4-addr', 'ipv6-addr', 'x-infoblox-dossier-event-result-pdns'],
'stix_field': ['value', 'ip_ref.value'],
'threat_type': 'ip'
}
],
'tideDbData': [
{
'stix_object': ['domain-name', 'x-infoblox-threat'],
'stix_field': ['value', 'host_name', 'domain_ref.value'],
'threat_type': 'host'
},
{
'stix_object': ['ipv4-addr', 'ipv6-addr', 'x-infoblox-threat'],
'stix_field': ['value', 'ip_ref.value'],
'threat_type': 'ip'
},
{
'stix_object': ['x-infoblox-threat'],
'stix_field': ['url'],
'threat_type': 'url'
},
{
'stix_object': ['email-addr', 'x-infoblox-threat'],
'stix_field': ['value', 'email_ref.value'],
'threat_type': 'email'
}
]
}
if self.dialect not in stix_map:
return
for mapping in stix_map[self.dialect]:
threat_type = None
if stix_object in mapping['stix_object'] and stix_field in mapping['stix_field']:
threat_type = mapping['threat_type']
if stix_object == 'x-infoblox-threat' and stix_field == 'threat_type':
threat_type = value.lower()
if threat_type:
return threat_type
return
def _merge_queries_in_expression(self, expression_01, expression_02, operator):
assert not (len(expression_01) > 1 and len(expression_02) > 1), "Failed to merge queries, expressions too complex"
expression_small = expression_01 if len(expression_01) == 1 else expression_02
expression_large = expression_02 if expression_small == expression_01 else expression_01
# determine threat_type from individual queries
threat_type_array = [i['threatType'] for i in (expression_01 + expression_02) if i['threatType']]
threat_type_set = set(threat_type_array)
if len(threat_type_set) > 1:
raise RuntimeError("Conflicting threat_type found, {}".format(sorted(threat_type_set)))
for query in expression_large:
merging_expression = expression_small[0]
query['query'] = operator.join([merging_expression['query'], query['query']])
query['threatType'] = merging_expression['threatType'] if merging_expression['threatType'] else query['threatType']
return expression_large
def _parse_expression(self, expression, qualifier=None, intersection_fields=None) -> str:
if isinstance(expression, ComparisonExpression):
# Resolve STIX Object Path to a field in the target Data Model
stix_object, stix_field = expression.object_path.split(':')
# Multiple data source fields may map to the same STIX Object
mapped_fields_array = self.dmm.map_field(stix_object, stix_field)
if intersection_fields is not None:
self._calculate_intersection(mapped_fields_array, stix_field, intersection_fields)
else:
assigned_fields = dict()
self._calculate_intersection(mapped_fields_array, stix_field, assigned_fields)
# Resolve the comparison symbol to use in the query string (usually just ':')
comparator = self._lookup_comparison_operator(expression.comparator)
# Some values are formatted differently based on how they're being compared
if expression.comparator == ComparisonComparators.Like:
value = self._format_like(expression.value)
else:
value = self._format_equality(expression.value)
final_expression = self._parse_mapped_fields(expression, value, comparator, stix_field, mapped_fields_array)
threatType = self._set_threat_type(stix_object, stix_field, final_expression, value)
return [{'query': final_expression, 'threatType': threatType, 'startStopTime': qualifier}]
elif isinstance(expression, CombinedComparisonExpression):
operator = self._lookup_comparison_operator(expression.operator)
# NOTE: APIs do not support duplicate criteria (example domain-name=d1.com AND domain-name=d2.com). As a workaround, the expression
# will be split into multiple independent queries.
exp1_fields = dict()
use_two_queries = True
try:
# Process LHS of expression, intersections here is an invalid query, stop processing.
expression_01 = self._parse_expression(expression.expr1, qualifier, exp1_fields)
except DuplicateFieldException as error:
logger.error("%s", error)
raise NotImplementedError("{}".format(error))
try:
# Process RHS of expression, if intersections are found re-attempt parsing but as two separate queries.
expression_02 = self._parse_expression(expression.expr2, qualifier, exp1_fields)
except DuplicateFieldException as error:
try:
exp2_fields = dict()
expression_02 = self._parse_expression(expression.expr2, qualifier, exp2_fields)
use_two_queries = False
except DuplicateFieldException as error:
logger.error("%s", error)
raise NotImplementedError("{}".format(error))
assert expression_01 and expression_02, "Failed to parse one side of the expression"
# NOTE: Merging the two list of queries this would be for expressions with `OR` or `AND` (with duplicate criteria). For
# expressions with `AND` (but with different criteria), then the list of queries on one side of the expression will be concatenated together.
result = expression_01 + expression_02
if expression.operator == ComparisonExpressionOperators.And and use_two_queries:
result = self._merge_queries_in_expression(expression_01, expression_02, operator)
return result
elif isinstance(expression, ObservationExpression):
result = self._parse_expression(expression.comparison_expression, qualifier, intersection_fields)
return result
elif isinstance(expression, StartStopQualifier) and hasattr(expression, 'observation_expression'):
return self._parse_expression(getattr(expression, 'observation_expression'), expression.qualifier, intersection_fields)
elif isinstance(expression, CombinedObservationExpression):
exp1_fields = dict()
exp2_fields = dict()
expression_01 = self._parse_expression(expression.expr1, qualifier, exp1_fields)
expression_02 = self._parse_expression(expression.expr2, qualifier, exp2_fields)
result = expression_01 + expression_02
return result
elif isinstance(expression, Pattern):
result = self._parse_expression(expression.expression)
return result
else:
raise RuntimeError("Unknown Recursion Case for expression={}, type(expression)={}".format(
expression, type(expression)))
def parse_expression(self, pattern: Pattern):
return self._parse_expression(pattern)
def _test_or_add_milliseconds(timestamp) -> str:
# remove single quotes around timestamp
timestamp = re.sub("'", "", timestamp)
# check for 3-decimal milliseconds
if not bool(re.search(TIMESTAMP_MILLISECONDS, timestamp)):
timestamp = re.sub('Z$', '.000Z', timestamp)
return timestamp
def _test_start_stop_format(query_string) -> bool:
# Matches STARTt'1234-56-78T00:00:00.123Z'STOPt'1234-56-78T00:00:00.123Z'
# or START 1234567890123 STOP 1234567890123
return bool(re.search(START_STOP_STIX_QUALIFIER, query_string))
def _get_parts_start_stop(query):
# Remove leading 't' before timestamps
query = re.sub("(?<=START)t|(?<=STOP)t", "", query)
# Split individual query to isolate timestamps
query_parts = re.split("(START)|(STOP)", query)
# Remove None array entries
query_parts = list(map(lambda x: x.strip(), list(filter(None, query_parts))))
return query_parts
def _format_query_with_timestamp(dialect:str, query: str, time_range, start_stop_time) -> str:
if dialect == 'dnsEventData':
if start_stop_time and _test_start_stop_format(start_stop_time):
query_parts = _get_parts_start_stop(start_stop_time)
# grab time stamps from array
start_time = _test_or_add_milliseconds(query_parts[1])
stop_time = _test_or_add_milliseconds(query_parts[3])
transformer = TimestampToSeconds()
second_start_time = transformer.transform(start_time)
second_stop_time = transformer.transform(stop_time)
return 't0={}&t1={}&{}'.format(str(second_start_time), str(second_stop_time), query)
# default to last X minutes
totime = int(time.time())
fromtime = int(totime - datetime.timedelta(minutes=time_range).total_seconds())
return 't0={}&t1={}&{}'.format(str(fromtime), str(totime), query)
if dialect == 'tideDbData':
if start_stop_time and _test_start_stop_format(start_stop_time):
query_parts = _get_parts_start_stop(start_stop_time)
# grab time stamps from array
start_time = _test_or_add_milliseconds(query_parts[1])
stop_time = _test_or_add_milliseconds(query_parts[3])
transformer = TimestampToSeconds()
second_start_time = transformer.transform(start_time)
second_stop_time = transformer.transform(stop_time)
return 'from_date={}&to_date={}&{}'.format(start_time, stop_time, query)
if any(substring in query for substring in ['imported', 'expiration']):
return query
return 'period={} minutes&{}'.format(time_range, query)
return query
def _format_translated_queries(dialect, entry_array, time_range):
# Transform from human-readable timestamp to 10-digit second time
# Ex. START t'2014-04-25T15:51:20.000Z' to START 1398441080
formatted_queries = []
for entry in entry_array:
query = entry['query']
if not query or not query.strip():
# ignore empty queries
continue
query = _format_query_with_timestamp(dialect, query, time_range, entry['startStopTime'])
payload = dict()
payload['offset'] = 0
payload['query'] = query
if 'threatType' in entry:
payload['threat_type'] = entry['threatType']
formatted_queries.append(payload)
return formatted_queries
def translate_pattern(pattern: Pattern, data_model_mapping, options):
trans_queries = QueryStringPatternTranslator(pattern, data_model_mapping, options['time_range']).qualified_queries
queries = []
for trans_query in trans_queries:
trans_query['source'] = data_model_mapping.dialect
queries.append(json.dumps(trans_query))
return queries
| [
"logging.getLogger",
"re.split",
"json.dumps",
"stix_shifter_utils.stix_translation.src.json_to_stix.observable.REGEX.items",
"re.sub",
"datetime.timedelta",
"time.time",
"re.search"
] | [((1063, 1090), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1080, 1090), False, 'import logging\n'), ((15759, 15785), 're.sub', 're.sub', (['"""\'"""', '""""""', 'timestamp'], {}), '("\'", \'\', timestamp)\n', (15765, 15785), False, 'import re\n'), ((16300, 16343), 're.sub', 're.sub', (['"""(?<=START)t|(?<=STOP)t"""', '""""""', 'query'], {}), "('(?<=START)t|(?<=STOP)t', '', query)\n", (16306, 16343), False, 'import re\n'), ((16413, 16446), 're.split', 're.split', (['"""(START)|(STOP)"""', 'query'], {}), "('(START)|(STOP)', query)\n", (16421, 16446), False, 'import re\n'), ((3207, 3231), 'stix_shifter_utils.stix_translation.src.json_to_stix.observable.REGEX.items', 'observable.REGEX.items', ([], {}), '()\n', (3229, 3231), False, 'from stix_shifter_utils.stix_translation.src.json_to_stix import observable\n'), ((15908, 15940), 're.sub', 're.sub', (['"""Z$"""', '""".000Z"""', 'timestamp'], {}), "('Z$', '.000Z', timestamp)\n", (15914, 15940), False, 'import re\n'), ((16157, 16207), 're.search', 're.search', (['START_STOP_STIX_QUALIFIER', 'query_string'], {}), '(START_STOP_STIX_QUALIFIER, query_string)\n', (16166, 16207), False, 'import re\n'), ((15841, 15885), 're.search', 're.search', (['TIMESTAMP_MILLISECONDS', 'timestamp'], {}), '(TIMESTAMP_MILLISECONDS, timestamp)\n', (15850, 15885), False, 'import re\n'), ((17363, 17374), 'time.time', 'time.time', ([], {}), '()\n', (17372, 17374), False, 'import time\n'), ((19405, 19428), 'json.dumps', 'json.dumps', (['trans_query'], {}), '(trans_query)\n', (19415, 19428), False, 'import json\n'), ((3271, 3296), 're.search', 're.search', (['pattern', 'value'], {}), '(pattern, value)\n', (3280, 3296), False, 'import re\n'), ((17408, 17446), 'datetime.timedelta', 'datetime.timedelta', ([], {'minutes': 'time_range'}), '(minutes=time_range)\n', (17426, 17446), False, 'import datetime\n')] |
import torch
import torch.nn as nn
import torchvision.transforms as transforms
import torchvision.datasets as datasets
from torch.utils.data import DataLoader
class Flatten(nn.Module):
def forward(self, x):
return x.view(x.size()[0], -1)
class Classifier(nn.Module):
def __init__(self):
super(Classifier, self).__init__()
self.main = nn.Sequential(
nn.Conv2d(1, 64, 5, stride=1, padding=2),
nn.ReLU(),
nn.Conv2d(64, 64, 5, stride=2, padding=0),
nn.ReLU(),
Flatten(),
nn.Linear(64 * 12 * 12, 128),
nn.ReLU(),
nn.Linear(128, 10)
)
def forward(self, x):
return self.main(x)
| [
"torch.nn.ReLU",
"torch.nn.Linear",
"torch.nn.Conv2d"
] | [((397, 437), 'torch.nn.Conv2d', 'nn.Conv2d', (['(1)', '(64)', '(5)'], {'stride': '(1)', 'padding': '(2)'}), '(1, 64, 5, stride=1, padding=2)\n', (406, 437), True, 'import torch.nn as nn\n'), ((451, 460), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (458, 460), True, 'import torch.nn as nn\n'), ((474, 515), 'torch.nn.Conv2d', 'nn.Conv2d', (['(64)', '(64)', '(5)'], {'stride': '(2)', 'padding': '(0)'}), '(64, 64, 5, stride=2, padding=0)\n', (483, 515), True, 'import torch.nn as nn\n'), ((529, 538), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (536, 538), True, 'import torch.nn as nn\n'), ((575, 603), 'torch.nn.Linear', 'nn.Linear', (['(64 * 12 * 12)', '(128)'], {}), '(64 * 12 * 12, 128)\n', (584, 603), True, 'import torch.nn as nn\n'), ((617, 626), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (624, 626), True, 'import torch.nn as nn\n'), ((640, 658), 'torch.nn.Linear', 'nn.Linear', (['(128)', '(10)'], {}), '(128, 10)\n', (649, 658), True, 'import torch.nn as nn\n')] |
from .errors import *
from .constants import *
from .objects import *
import requests
class Client:
'''This is the main class to interact with API'''
def __init__(self):
self._session = requests.Session()
def fetch_repository(self, repository:str):
"""Fetches a public repository
Parameters:
repository (str) : The full name of repository.
Returns:
simplegithub.Repository
Raises:
simplegithub.NotFound
"""
resp = self._session.get(f'{BASE_URL}/repos/{repository}')
if resp.status_code == 200:
return Repository(resp.json())
if resp.status_code == 404:
raise NotFound(resp.json()['message'])
if resp.status_code == 403:
print(resp.json()['message'])
def fetch_user(self, user:str):
"""Fetches a user.
Parameters:
user (str) : The login (or username) of the user.
Returns:
simplegithub.User
Raises:
simplegithub.NotFound
"""
resp = self._session.get(f'{BASE_URL}/users/{user}')
if resp.status_code == 200:
return User(resp.json())
if resp.status_code == 404:
raise NotFound(resp.json()['message'])
if resp.status_code == 403:
print(resp.json()['message'])
def fetch_gist(self, id:str=None):
"""Fetches a gist.
Parameters:
id (str) : The ID of the gist.
Returns:
simplegithub.Gist
Raises:
simplegithub.NotFound
"""
resp = self._session.get(f'{BASE_URL}/gists/{id}')
if resp.status_code == 200:
return Gist(resp.json())
if resp.status_code == 404:
raise NotFound(resp.json()['message'])
if resp.status_code == 403:
print(resp.json()['message'])
def fetch_license(self, key:str=None):
"""Fetches a license.
Parameters:
key (str) : The license key. e.g `mit`
Returns:
simplegithub.License
Raises:
simplegithub.NotFound
"""
resp = self._session.get(f'{BASE_URL}/licenses/{key}')
if resp.status_code == 200:
return License(resp.json())
if resp.status_code == 404:
raise NotFound(resp.json()['message'])
if resp.status_code == 403:
print(resp.json()['message']) | [
"requests.Session"
] | [((191, 209), 'requests.Session', 'requests.Session', ([], {}), '()\n', (207, 209), False, 'import requests\n')] |
# Copyright 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from iotronicclient.common.i18n import _
class Resource(object):
"""Resource class
This class is used to manage the various fields that a resource (e.g.
Chassis, Board, Port) contains. An individual field consists of a
'field_id' (key) and a 'label' (value). The caller only provides the
'field_ids' when instantiating the object.
Ordering of the 'field_ids' will be preserved as specified by the caller.
It also provides the ability to exclude some of these fields when they are
being used for sorting.
"""
FIELDS = {
'name': 'Name',
'project': 'Project',
'uuid': 'UUID',
'extra': 'Extra',
'updated_at': 'Updated At',
'id': 'ID',
'created_at': 'Created At',
'status': 'Status',
'code': 'Code',
'mobile': 'Mobile',
'session': 'Session',
'location': 'Location',
'owner': 'Owner',
'type': 'Type',
'callable': 'Callable',
'public': 'Public',
'onboot': 'On Boot',
'board_uuid': 'Board uuid',
'plugin_uuid': 'Plugin uuid',
'plugin': 'Plugin',
'parameters': 'Parameters',
#
# 'address': 'Address',
# 'async': 'Async',
# 'attach': 'Response is attachment',
# 'chassis_uuid': 'Chassis UUID',
# 'clean_step': 'Clean Step',
# 'console_enabled': 'Console Enabled',
# 'description': 'Description',
# 'http_methods': 'Supported HTTP methods',
# 'inspection_finished_at': 'Inspection Finished At',
# 'inspection_started_at': 'Inspection Started At',
# 'instance_info': 'Instance Info',
# 'instance_uuid': 'Instance UUID',
# 'internal_info': 'Internal Info',
# 'last_error': 'Last Error',
# 'maintenance': 'Maintenance',
# 'maintenance_reason': 'Maintenance Reason',
# 'mode': 'Mode',
# 'power_state': 'Power State',
# 'properties': 'Properties',
# 'provision_state': 'Provisioning State',
# 'provision_updated_at': 'Provision Updated At',
# 'raid_config': 'Current RAID configuration',
# 'reservation': 'Reservation',
# 'resource_class': 'Resource Class',
# 'target_power_state': 'Target Power State',
# 'target_provision_state': 'Target Provision State',
# 'target_raid_config': 'Target RAID configuration',
# 'local_link_connection': 'Local Link Connection',
# 'pxe_enabled': 'PXE boot enabled',
# 'portgroup_uuid': 'Portgroup UUID',
# 'boot_interface': 'Boot Interface',
# 'console_interface': 'Console Interface',
# 'deploy_interface': 'Deploy Interface',
# 'inspect_interface': 'Inspect Interface',
# 'management_interface': 'Management Interface',
# 'network_interface': 'Network Interface',
# 'power_interface': 'Power Interface',
# 'raid_interface': 'RAID Interface',
# 'vendor_interface': 'Vendor Interface',
# 'standalone_ports_supported': 'Standalone Ports Supported',
}
def __init__(self, field_ids, sort_excluded=None):
"""Create a Resource object
:param field_ids: A list of strings that the Resource object will
contain. Each string must match an existing key in
FIELDS.
:param sort_excluded: Optional. A list of strings that will not be used
for sorting. Must be a subset of 'field_ids'.
:raises: ValueError if sort_excluded contains value not in field_ids
"""
self._fields = tuple(field_ids)
self._labels = tuple([self.FIELDS[x] for x in field_ids])
if sort_excluded is None:
sort_excluded = []
not_existing = set(sort_excluded) - set(field_ids)
if not_existing:
raise ValueError(
_("sort_excluded specified with value not contained in "
"field_ids. Unknown value(s): %s") % ','.join(not_existing))
self._sort_fields = tuple(
[x for x in field_ids if x not in sort_excluded])
self._sort_labels = tuple([self.FIELDS[x] for x in self._sort_fields])
@property
def fields(self):
return self._fields
@property
def labels(self):
return self._labels
@property
def sort_fields(self):
return self._sort_fields
@property
def sort_labels(self):
return self._sort_labels
# Boards
BOARD_DETAILED_RESOURCE = Resource(
[
'uuid',
'name',
'type',
'status',
'code',
'session',
'mobile',
'extra',
'created_at',
'updated_at',
'location',
'project',
'owner',
],
sort_excluded=[
'extra', 'location', 'session',
])
BOARD_RESOURCE = Resource(
['uuid',
'name',
'type',
'status',
'session',
])
# Plugins
PLUGIN_DETAILED_RESOURCE = Resource(
['uuid',
'name',
'owner',
'code',
'public',
'callable',
'extra'
],
sort_excluded=[
'extra', 'code',
])
PLUGIN_RESOURCE = Resource(
['uuid',
'name',
'owner',
'public',
'callable',
])
PLUGIN_INJECT_RESOURCE_ON_BOARD = Resource(
[
'plugin',
'status',
'onboot',
'created_at',
'updated_at',
])
PLUGIN_INJECT_RESOURCE = Resource(
['board_uuid',
'plugin_uuid',
'status',
'onboot',
'created_at',
'updated_at',
])
| [
"iotronicclient.common.i18n._"
] | [((4577, 4671), 'iotronicclient.common.i18n._', '_', (['"""sort_excluded specified with value not contained in field_ids. Unknown value(s): %s"""'], {}), "('sort_excluded specified with value not contained in field_ids. Unknown value(s): %s'\n )\n", (4578, 4671), False, 'from iotronicclient.common.i18n import _\n')] |
from setuptools import setup, find_packages
setup(name='DeWave',
version='0.12',
description='Single-channel blind source separation',
long_description='Decomposing two overlapping speech signals that are \
recoded in one channel and restoring signals for each speaker',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Build Tools',
],
keywords=[
'Blind source separation',
'Single channel',
],
url='https://github.com/chaodengusc/DeWave',
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
packages=find_packages(),
install_requires=[
'tensorflow',
'numpy',
'scikit-learn',
'librosa',
],
entry_points={'console_scripts':[
'dewave-clip=DeWave.cmddataprep:audioclips',
'dewave-pack=DeWave.cmddatapack:packclips',
'dewave-train=DeWave.cmdtrain:trainmodel',
'dewave-infer=DeWave.cmdinfer:infer',
]},
include_package_data=True,
zip_safe=False)
| [
"setuptools.find_packages"
] | [((750, 765), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (763, 765), False, 'from setuptools import setup, find_packages\n')] |
# python3.7
"""Contains the implementation of generator described in StyleGAN.
Different from the official tensorflow model in folder `stylegan_tf_official`,
this is a simple pytorch version which only contains the generator part. This
class is specially used for inference.
For more details, please check the original paper:
https://arxiv.org/pdf/1812.04948.pdf
"""
from collections import OrderedDict
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
__all__ = ['StyleGANGeneratorModel']
# Defines a dictionary, which maps the target resolution of the final generated
# image to numbers of filters used in each convolutional layer in sequence.
_RESOLUTIONS_TO_CHANNELS = {
8: [512, 512, 512],
16: [512, 512, 512, 512],
32: [512, 512, 512, 512, 512],
64: [512, 512, 512, 512, 512, 256],
128: [512, 512, 512, 512, 512, 256, 128],
256: [512, 512, 512, 512, 512, 256, 128, 64],
512: [512, 512, 512, 512, 512, 256, 128, 64, 32],
1024: [512, 512, 512, 512, 512, 256, 128, 64, 32, 16],
}
# Variable mapping from pytorch model to official tensorflow model.
_STYLEGAN_PTH_VARS_TO_TF_VARS = {
# Statistic information of disentangled latent feature, w.
'truncation.w_avg':
'dlatent_avg', # [512]
# Noises.
'synthesis.layer0.epilogue.apply_noise.noise':
'noise0', # [1, 1, 4, 4]
'synthesis.layer1.epilogue.apply_noise.noise':
'noise1', # [1, 1, 4, 4]
'synthesis.layer2.epilogue.apply_noise.noise':
'noise2', # [1, 1, 8, 8]
'synthesis.layer3.epilogue.apply_noise.noise':
'noise3', # [1, 1, 8, 8]
'synthesis.layer4.epilogue.apply_noise.noise':
'noise4', # [1, 1, 16, 16]
'synthesis.layer5.epilogue.apply_noise.noise':
'noise5', # [1, 1, 16, 16]
'synthesis.layer6.epilogue.apply_noise.noise':
'noise6', # [1, 1, 32, 32]
'synthesis.layer7.epilogue.apply_noise.noise':
'noise7', # [1, 1, 32, 32]
'synthesis.layer8.epilogue.apply_noise.noise':
'noise8', # [1, 1, 64, 64]
'synthesis.layer9.epilogue.apply_noise.noise':
'noise9', # [1, 1, 64, 64]
'synthesis.layer10.epilogue.apply_noise.noise':
'noise10', # [1, 1, 128, 128]
'synthesis.layer11.epilogue.apply_noise.noise':
'noise11', # [1, 1, 128, 128]
'synthesis.layer12.epilogue.apply_noise.noise':
'noise12', # [1, 1, 256, 256]
'synthesis.layer13.epilogue.apply_noise.noise':
'noise13', # [1, 1, 256, 256]
'synthesis.layer14.epilogue.apply_noise.noise':
'noise14', # [1, 1, 512, 512]
'synthesis.layer15.epilogue.apply_noise.noise':
'noise15', # [1, 1, 512, 512]
'synthesis.layer16.epilogue.apply_noise.noise':
'noise16', # [1, 1, 1024, 1024]
'synthesis.layer17.epilogue.apply_noise.noise':
'noise17', # [1, 1, 1024, 1024]
# Mapping blocks.
'mapping.dense0.linear.weight':
'Dense0/weight', # [512, 512]
'mapping.dense0.wscale.bias':
'Dense0/bias', # [512]
'mapping.dense1.linear.weight':
'Dense1/weight', # [512, 512]
'mapping.dense1.wscale.bias':
'Dense1/bias', # [512]
'mapping.dense2.linear.weight':
'Dense2/weight', # [512, 512]
'mapping.dense2.wscale.bias':
'Dense2/bias', # [512]
'mapping.dense3.linear.weight':
'Dense3/weight', # [512, 512]
'mapping.dense3.wscale.bias':
'Dense3/bias', # [512]
'mapping.dense4.linear.weight':
'Dense4/weight', # [512, 512]
'mapping.dense4.wscale.bias':
'Dense4/bias', # [512]
'mapping.dense5.linear.weight':
'Dense5/weight', # [512, 512]
'mapping.dense5.wscale.bias':
'Dense5/bias', # [512]
'mapping.dense6.linear.weight':
'Dense6/weight', # [512, 512]
'mapping.dense6.wscale.bias':
'Dense6/bias', # [512]
'mapping.dense7.linear.weight':
'Dense7/weight', # [512, 512]
'mapping.dense7.wscale.bias':
'Dense7/bias', # [512]
# Synthesis blocks.
'synthesis.layer0.first_layer':
'4x4/Const/const', # [1, 512, 4, 4]
'synthesis.layer0.epilogue.apply_noise.weight':
'4x4/Const/Noise/weight', # [512]
'synthesis.layer0.epilogue.bias':
'4x4/Const/bias', # [512]
'synthesis.layer0.epilogue.style_mod.dense.linear.weight':
'4x4/Const/StyleMod/weight', # [1024, 512]
'synthesis.layer0.epilogue.style_mod.dense.wscale.bias':
'4x4/Const/StyleMod/bias', # [1024]
'synthesis.layer1.conv.weight':
'4x4/Conv/weight', # [512, 512, 3, 3]
'synthesis.layer1.epilogue.apply_noise.weight':
'4x4/Conv/Noise/weight', # [512]
'synthesis.layer1.epilogue.bias':
'4x4/Conv/bias', # [512]
'synthesis.layer1.epilogue.style_mod.dense.linear.weight':
'4x4/Conv/StyleMod/weight', # [1024, 512]
'synthesis.layer1.epilogue.style_mod.dense.wscale.bias':
'4x4/Conv/StyleMod/bias', # [1024]
'synthesis.layer2.conv.weight':
'8x8/Conv0_up/weight', # [512, 512, 3, 3]
'synthesis.layer2.epilogue.apply_noise.weight':
'8x8/Conv0_up/Noise/weight', # [512]
'synthesis.layer2.epilogue.bias':
'8x8/Conv0_up/bias', # [512]
'synthesis.layer2.epilogue.style_mod.dense.linear.weight':
'8x8/Conv0_up/StyleMod/weight', # [1024, 512]
'synthesis.layer2.epilogue.style_mod.dense.wscale.bias':
'8x8/Conv0_up/StyleMod/bias', # [1024]
'synthesis.layer3.conv.weight':
'8x8/Conv1/weight', # [512, 512, 3, 3]
'synthesis.layer3.epilogue.apply_noise.weight':
'8x8/Conv1/Noise/weight', # [512]
'synthesis.layer3.epilogue.bias':
'8x8/Conv1/bias', # [512]
'synthesis.layer3.epilogue.style_mod.dense.linear.weight':
'8x8/Conv1/StyleMod/weight', # [1024, 512]
'synthesis.layer3.epilogue.style_mod.dense.wscale.bias':
'8x8/Conv1/StyleMod/bias', # [1024]
'synthesis.layer4.conv.weight':
'16x16/Conv0_up/weight', # [512, 512, 3, 3]
'synthesis.layer4.epilogue.apply_noise.weight':
'16x16/Conv0_up/Noise/weight', # [512]
'synthesis.layer4.epilogue.bias':
'16x16/Conv0_up/bias', # [512]
'synthesis.layer4.epilogue.style_mod.dense.linear.weight':
'16x16/Conv0_up/StyleMod/weight', # [1024, 512]
'synthesis.layer4.epilogue.style_mod.dense.wscale.bias':
'16x16/Conv0_up/StyleMod/bias', # [1024]
'synthesis.layer5.conv.weight':
'16x16/Conv1/weight', # [512, 512, 3, 3]
'synthesis.layer5.epilogue.apply_noise.weight':
'16x16/Conv1/Noise/weight', # [512]
'synthesis.layer5.epilogue.bias':
'16x16/Conv1/bias', # [512]
'synthesis.layer5.epilogue.style_mod.dense.linear.weight':
'16x16/Conv1/StyleMod/weight', # [1024, 512]
'synthesis.layer5.epilogue.style_mod.dense.wscale.bias':
'16x16/Conv1/StyleMod/bias', # [1024]
'synthesis.layer6.conv.weight':
'32x32/Conv0_up/weight', # [512, 512, 3, 3]
'synthesis.layer6.epilogue.apply_noise.weight':
'32x32/Conv0_up/Noise/weight', # [512]
'synthesis.layer6.epilogue.bias':
'32x32/Conv0_up/bias', # [512]
'synthesis.layer6.epilogue.style_mod.dense.linear.weight':
'32x32/Conv0_up/StyleMod/weight', # [1024, 512]
'synthesis.layer6.epilogue.style_mod.dense.wscale.bias':
'32x32/Conv0_up/StyleMod/bias', # [1024]
'synthesis.layer7.conv.weight':
'32x32/Conv1/weight', # [512, 512, 3, 3]
'synthesis.layer7.epilogue.apply_noise.weight':
'32x32/Conv1/Noise/weight', # [512]
'synthesis.layer7.epilogue.bias':
'32x32/Conv1/bias', # [512]
'synthesis.layer7.epilogue.style_mod.dense.linear.weight':
'32x32/Conv1/StyleMod/weight', # [1024, 512]
'synthesis.layer7.epilogue.style_mod.dense.wscale.bias':
'32x32/Conv1/StyleMod/bias', # [1024]
'synthesis.layer8.conv.weight':
'64x64/Conv0_up/weight', # [256, 512, 3, 3]
'synthesis.layer8.epilogue.apply_noise.weight':
'64x64/Conv0_up/Noise/weight', # [256]
'synthesis.layer8.epilogue.bias':
'64x64/Conv0_up/bias', # [256]
'synthesis.layer8.epilogue.style_mod.dense.linear.weight':
'64x64/Conv0_up/StyleMod/weight', # [512, 512]
'synthesis.layer8.epilogue.style_mod.dense.wscale.bias':
'64x64/Conv0_up/StyleMod/bias', # [512]
'synthesis.layer9.conv.weight':
'64x64/Conv1/weight', # [256, 256, 3, 3]
'synthesis.layer9.epilogue.apply_noise.weight':
'64x64/Conv1/Noise/weight', # [256]
'synthesis.layer9.epilogue.bias':
'64x64/Conv1/bias', # [256]
'synthesis.layer9.epilogue.style_mod.dense.linear.weight':
'64x64/Conv1/StyleMod/weight', # [512, 512]
'synthesis.layer9.epilogue.style_mod.dense.wscale.bias':
'64x64/Conv1/StyleMod/bias', # [512]
'synthesis.layer10.weight':
'128x128/Conv0_up/weight', # [3, 3, 256, 128]
'synthesis.layer10.epilogue.apply_noise.weight':
'128x128/Conv0_up/Noise/weight', # [128]
'synthesis.layer10.epilogue.bias':
'128x128/Conv0_up/bias', # [128]
'synthesis.layer10.epilogue.style_mod.dense.linear.weight':
'128x128/Conv0_up/StyleMod/weight', # [256, 512]
'synthesis.layer10.epilogue.style_mod.dense.wscale.bias':
'128x128/Conv0_up/StyleMod/bias', # [256]
'synthesis.layer11.conv.weight':
'128x128/Conv1/weight', # [128, 128, 3, 3]
'synthesis.layer11.epilogue.apply_noise.weight':
'128x128/Conv1/Noise/weight', # [128]
'synthesis.layer11.epilogue.bias':
'128x128/Conv1/bias', # [128]
'synthesis.layer11.epilogue.style_mod.dense.linear.weight':
'128x128/Conv1/StyleMod/weight', # [256, 512]
'synthesis.layer11.epilogue.style_mod.dense.wscale.bias':
'128x128/Conv1/StyleMod/bias', # [256]
'synthesis.layer12.weight':
'256x256/Conv0_up/weight', # [3, 3, 128, 64]
'synthesis.layer12.epilogue.apply_noise.weight':
'256x256/Conv0_up/Noise/weight', # [64]
'synthesis.layer12.epilogue.bias':
'256x256/Conv0_up/bias', # [64]
'synthesis.layer12.epilogue.style_mod.dense.linear.weight':
'256x256/Conv0_up/StyleMod/weight', # [128, 512]
'synthesis.layer12.epilogue.style_mod.dense.wscale.bias':
'256x256/Conv0_up/StyleMod/bias', # [128]
'synthesis.layer13.conv.weight':
'256x256/Conv1/weight', # [64, 64, 3, 3]
'synthesis.layer13.epilogue.apply_noise.weight':
'256x256/Conv1/Noise/weight', # [64]
'synthesis.layer13.epilogue.bias':
'256x256/Conv1/bias', # [64]
'synthesis.layer13.epilogue.style_mod.dense.linear.weight':
'256x256/Conv1/StyleMod/weight', # [128, 512]
'synthesis.layer13.epilogue.style_mod.dense.wscale.bias':
'256x256/Conv1/StyleMod/bias', # [128]
'synthesis.layer14.weight':
'512x512/Conv0_up/weight', # [3, 3, 64, 32]
'synthesis.layer14.epilogue.apply_noise.weight':
'512x512/Conv0_up/Noise/weight', # [32]
'synthesis.layer14.epilogue.bias':
'512x512/Conv0_up/bias', # [32]
'synthesis.layer14.epilogue.style_mod.dense.linear.weight':
'512x512/Conv0_up/StyleMod/weight', # [64, 512]
'synthesis.layer14.epilogue.style_mod.dense.wscale.bias':
'512x512/Conv0_up/StyleMod/bias', # [64]
'synthesis.layer15.conv.weight':
'512x512/Conv1/weight', # [32, 32, 3, 3]
'synthesis.layer15.epilogue.apply_noise.weight':
'512x512/Conv1/Noise/weight', # [32]
'synthesis.layer15.epilogue.bias':
'512x512/Conv1/bias', # [32]
'synthesis.layer15.epilogue.style_mod.dense.linear.weight':
'512x512/Conv1/StyleMod/weight', # [64, 512]
'synthesis.layer15.epilogue.style_mod.dense.wscale.bias':
'512x512/Conv1/StyleMod/bias', # [64]
'synthesis.layer16.weight':
'1024x1024/Conv0_up/weight', # [3, 3, 32, 16]
'synthesis.layer16.epilogue.apply_noise.weight':
'1024x1024/Conv0_up/Noise/weight', # [16]
'synthesis.layer16.epilogue.bias':
'1024x1024/Conv0_up/bias', # [16]
'synthesis.layer16.epilogue.style_mod.dense.linear.weight':
'1024x1024/Conv0_up/StyleMod/weight', # [32, 512]
'synthesis.layer16.epilogue.style_mod.dense.wscale.bias':
'1024x1024/Conv0_up/StyleMod/bias', # [32]
'synthesis.layer17.conv.weight':
'1024x1024/Conv1/weight', # [16, 16, 3, 3]
'synthesis.layer17.epilogue.apply_noise.weight':
'1024x1024/Conv1/Noise/weight', # [16]
'synthesis.layer17.epilogue.bias':
'1024x1024/Conv1/bias', # [16]
'synthesis.layer17.epilogue.style_mod.dense.linear.weight':
'1024x1024/Conv1/StyleMod/weight', # [32, 512]
'synthesis.layer17.epilogue.style_mod.dense.wscale.bias':
'1024x1024/Conv1/StyleMod/bias', # [32]
'synthesis.output.conv.weight':
'ToRGB_lod0/weight', # [3, 16, 1, 1]
'synthesis.output.bias':
'ToRGB_lod0/bias', # [3]
}
class StyleGANGeneratorModel(nn.Module):
"""Defines the generator module in StyleGAN.
Note that the generated images are with RGB color channels.
"""
def __init__(self,
resolution=1024,
w_space_dim=512,
truncation_psi=0.7,
truncation_layers=8,
randomize_noise=False):
"""Initializes the generator with basic settings.
Args:
resolution: The resolution of the final output image.
w_space_dim: The dimension of the disentangled latent vectors, w.
truncation_psi: Style strength multiplier for the truncation trick.
`None` or `1.0` indicates no truncation.
truncation_layers: Number of layers for which to apply the truncation
trick. `None` indicates no truncation.
Raises:
ValueError: If the input `resolution` is not supported.
"""
super().__init__()
self.mapping = MappingModule(final_space_dim=w_space_dim)
self.truncation = TruncationModule(resolution=resolution,
w_space_dim=w_space_dim,
truncation_psi=truncation_psi,
truncation_layers=truncation_layers)
self.synthesis = SynthesisModule(resolution=resolution,
randomize_noise=randomize_noise)
self.pth_to_tf_var_mapping = _STYLEGAN_PTH_VARS_TO_TF_VARS
def forward(self, z):
w = self.mapping(z)
w = self.truncation(w)
x = self.synthesis(w)
return x
class MappingModule(nn.Sequential):
"""Implements the latent space mapping module used in StyleGAN.
Basically, this module executes several dense layers in sequence.
"""
def __init__(self,
normalize_input=True,
input_space_dim=512,
hidden_space_dim=512,
final_space_dim=512,
num_layers=8):
sequence = OrderedDict()
def _add_layer(layer, name=None):
name = name or f'dense{len(sequence) + (not normalize_input) - 1}'
sequence[name] = layer
if normalize_input:
_add_layer(PixelNormLayer(), name='normalize')
for i in range(num_layers):
in_dim = input_space_dim if i == 0 else hidden_space_dim
out_dim = final_space_dim if i == (num_layers - 1) else hidden_space_dim
_add_layer(DenseBlock(in_dim, out_dim))
super().__init__(sequence)
def forward(self, x):
if len(x.shape) != 2:
raise ValueError(f'The input tensor should be with shape [batch_size, '
f'noise_dim], but {x.shape} received!')
return super().forward(x)
class TruncationModule(nn.Module):
"""Implements the truncation module used in StyleGAN."""
def __init__(self,
resolution=1024,
w_space_dim=512,
truncation_psi=0.7,
truncation_layers=8):
super().__init__()
self.num_layers = int(np.log2(resolution)) * 2 - 2
self.w_space_dim = w_space_dim
if truncation_psi is not None and truncation_layers is not None:
self.use_truncation = True
else:
self.use_truncation = False
truncation_psi = 1.0
truncation_layers = 0
self.register_buffer('w_avg', torch.zeros(w_space_dim))
layer_idx = np.arange(self.num_layers).reshape(1, self.num_layers, 1)
coefs = np.ones_like(layer_idx, dtype=np.float32)
coefs[layer_idx < truncation_layers] *= truncation_psi
self.register_buffer('truncation', torch.from_numpy(coefs))
def forward(self, w):
if len(w.shape) == 2:
w = w.view(-1, 1, self.w_space_dim).repeat(1, self.num_layers, 1)
if self.use_truncation:
w_avg = self.w_avg.view(1, 1, self.w_space_dim)
w = w_avg + (w - w_avg) * self.truncation
return w
class SynthesisModule(nn.Module):
"""Implements the image synthesis module used in StyleGAN.
Basically, this module executes several convolutional layers in sequence.
"""
def __init__(self,
resolution=1024,
randomize_noise=False):
super().__init__()
try:
channels = _RESOLUTIONS_TO_CHANNELS[resolution]
except KeyError:
raise ValueError(f'Invalid resolution: {resolution}!\n'
f'Resolutions allowed: '
f'{list(_RESOLUTIONS_TO_CHANNELS)}.')
self.num_layers = int(np.log2(resolution)) * 2 - 2
for i in range(1, len(channels)):
if i == 1:
self.add_module('layer0', FirstConvBlock(channels[0], randomize_noise))
else:
self.add_module(
f'layer{i * 2 - 2}',
UpConvBlock(layer_idx=i * 2 - 2,
in_channels=channels[i - 1],
out_channels=channels[i],
randomize_noise=randomize_noise))
self.add_module(
f'layer{i * 2 - 1}',
ConvBlock(layer_idx=i * 2 - 1,
in_channels=channels[i],
out_channels=channels[i],
randomize_noise=randomize_noise))
self.add_module('output', LastConvBlock(channels[-1]))
def forward(self, w):
x = self.layer0(w[:, 0])
for i in range(1, self.num_layers):
x = self.__getattr__(f'layer{i}')(x, w[:, i])
x = self.output(x)
return x
class PixelNormLayer(nn.Module):
"""Implements pixel-wise feature vector normalization layer."""
def __init__(self, epsilon=1e-8):
super().__init__()
self.epsilon = epsilon
def forward(self, x):
return x / torch.sqrt(torch.mean(x**2, dim=1, keepdim=True) + self.epsilon)
class InstanceNormLayer(nn.Module):
"""Implements instance normalization layer."""
def __init__(self, epsilon=1e-8):
super().__init__()
self.epsilon = epsilon
def forward(self, x):
if len(x.shape) != 4:
raise ValueError(f'The input tensor should be with shape [batch_size, '
f'num_channels, height, width], but {x.shape} received!')
x = x - torch.mean(x, dim=[2, 3], keepdim=True)
x = x / torch.sqrt(torch.mean(x**2, dim=[2, 3], keepdim=True) +
self.epsilon)
return x
class ResolutionScalingLayer(nn.Module):
"""Implements the resolution scaling layer.
Basically, this layer can be used to upsample or downsample feature maps from
spatial domain with nearest neighbor interpolation.
"""
def __init__(self, scale_factor=2):
super().__init__()
self.scale_factor = scale_factor
def forward(self, x):
return F.interpolate(x, scale_factor=self.scale_factor, mode='nearest')
class BlurLayer(nn.Module):
"""Implements the blur layer used in StyleGAN."""
def __init__(self,
channels,
kernel=(1, 2, 1),
normalize=True,
flip=False):
super().__init__()
kernel = np.array(kernel, dtype=np.float32).reshape(1, 3)
kernel = kernel.T.dot(kernel)
if normalize:
kernel /= np.sum(kernel)
if flip:
kernel = kernel[::-1, ::-1]
kernel = kernel.reshape(3, 3, 1, 1)
kernel = np.tile(kernel, [1, 1, channels, 1])
kernel = np.transpose(kernel, [2, 3, 0, 1])
self.register_buffer('kernel', torch.from_numpy(kernel))
self.channels = channels
def forward(self, x):
return F.conv2d(x, self.kernel, stride=1, padding=1, groups=self.channels)
class NoiseApplyingLayer(nn.Module):
"""Implements the noise applying layer used in StyleGAN."""
def __init__(self, layer_idx, channels, randomize_noise=False):
super().__init__()
self.randomize_noise = randomize_noise
self.res = 2**(layer_idx // 2 + 2)
self.register_buffer('noise', torch.randn(1, 1, self.res, self.res))
self.weight = nn.Parameter(torch.zeros(channels))
def forward(self, x):
if len(x.shape) != 4:
raise ValueError(f'The input tensor should be with shape [batch_size, '
f'num_channels, height, width], but {x.shape} received!')
if self.randomize_noise:
noise = torch.randn(x.shape[0], 1, self.res, self.res)
if x.is_cuda:
noise = noise.cuda()
else:
noise = self.noise
return x + noise * self.weight.view(1, -1, 1, 1)
class StyleModulationLayer(nn.Module):
"""Implements the style modulation layer used in StyleGAN."""
def __init__(self, channels, w_space_dim=512):
super().__init__()
self.channels = channels
self.dense = DenseBlock(in_features=w_space_dim,
out_features=channels*2,
wscale_gain=1.0,
wscale_lr_multiplier=1.0,
activation_type='linear')
def forward(self, x, w):
if len(w.shape) != 2:
raise ValueError(f'The input tensor should be with shape [batch_size, '
f'num_channels], but {x.shape} received!')
style = self.dense(w)
style = style.view(-1, 2, self.channels, 1, 1)
return x * (style[:, 0] + 1) + style[:, 1]
class WScaleLayer(nn.Module):
"""Implements the layer to scale weight variable and add bias.
Note that, the weight variable is trained in `nn.Conv2d` layer (or `nn.Linear`
layer), and only scaled with a constant number , which is not trainable, in
this layer. However, the bias variable is trainable in this layer.
"""
def __init__(self,
in_channels,
out_channels,
kernel_size,
gain=np.sqrt(2.0),
lr_multiplier=1.0):
super().__init__()
fan_in = in_channels * kernel_size * kernel_size
self.scale = gain / np.sqrt(fan_in) * lr_multiplier
self.bias = nn.Parameter(torch.zeros(out_channels))
self.lr_multiplier = lr_multiplier
def forward(self, x):
if len(x.shape) == 4:
return x * self.scale + self.bias.view(1, -1, 1, 1) * self.lr_multiplier
if len(x.shape) == 2:
return x * self.scale + self.bias.view(1, -1) * self.lr_multiplier
raise ValueError(f'The input tensor should be with shape [batch_size, '
f'num_channels, height, width], or [batch_size, '
f'num_channels], but {x.shape} received!')
class EpilogueBlock(nn.Module):
"""Implements the epilogue block of each conv block."""
def __init__(self,
layer_idx,
channels,
randomize_noise=False,
normalization_fn='instance'):
super().__init__()
self.apply_noise = NoiseApplyingLayer(layer_idx, channels, randomize_noise)
self.bias = nn.Parameter(torch.zeros(channels))
self.activate = nn.LeakyReLU(negative_slope=0.2, inplace=True)
if normalization_fn == 'pixel':
self.norm = PixelNormLayer()
elif normalization_fn == 'instance':
self.norm = InstanceNormLayer()
else:
raise NotImplementedError(f'Not implemented normalization function: '
f'{normalization_fn}!')
self.style_mod = StyleModulationLayer(channels)
def forward(self, x, w):
x = self.apply_noise(x)
x = x + self.bias.view(1, -1, 1, 1)
x = self.activate(x)
x = self.norm(x)
x = self.style_mod(x, w)
return x
class FirstConvBlock(nn.Module):
"""Implements the first convolutional block used in StyleGAN.
Basically, this block starts from a const input, which is `ones(512, 4, 4)`.
"""
def __init__(self, channels, randomize_noise=False):
super().__init__()
self.first_layer = nn.Parameter(torch.ones(1, channels, 4, 4))
self.epilogue = EpilogueBlock(layer_idx=0,
channels=channels,
randomize_noise=randomize_noise)
def forward(self, w):
x = self.first_layer.repeat(w.shape[0], 1, 1, 1)
x = self.epilogue(x, w)
return x
class UpConvBlock(nn.Module):
"""Implements the convolutional block used in StyleGAN.
Basically, this block is used as the first convolutional block for each
resolution, which will execute upsampling.
"""
def __init__(self,
layer_idx,
in_channels,
out_channels,
kernel_size=3,
stride=1,
padding=1,
dilation=1,
add_bias=False,
wscale_gain=np.sqrt(2.0),
wscale_lr_multiplier=1.0,
randomize_noise=False):
"""Initializes the class with block settings.
Args:
in_channels: Number of channels of the input tensor fed into this block.
out_channels: Number of channels (kernels) of the output tensor.
kernel_size: Size of the convolutional kernel.
stride: Stride parameter for convolution operation.
padding: Padding parameter for convolution operation.
dilation: Dilation rate for convolution operation.
add_bias: Whether to add bias onto the convolutional result.
wscale_gain: The gain factor for `wscale` layer.
wscale_lr_multiplier: The learning rate multiplier factor for `wscale`
layer.
Raises:
ValueError: If the block is not applied to the first block for a
particular resolution.
"""
super().__init__()
if layer_idx % 2 == 1:
raise ValueError(f'This block is implemented as the first block of each '
f'resolution, but is applied to layer {layer_idx}!')
self.layer_idx = layer_idx
if self.layer_idx > 9:
self.weight = nn.Parameter(
torch.randn(kernel_size, kernel_size, in_channels, out_channels))
else:
self.upsample = ResolutionScalingLayer()
self.conv = nn.Conv2d(in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
groups=1,
bias=add_bias)
fan_in = in_channels * kernel_size * kernel_size
self.scale = wscale_gain / np.sqrt(fan_in) * wscale_lr_multiplier
self.blur = BlurLayer(channels=out_channels)
self.epilogue = EpilogueBlock(layer_idx=layer_idx,
channels=out_channels,
randomize_noise=randomize_noise)
def forward(self, x, w):
if self.layer_idx > 9:
kernel = self.weight * self.scale
kernel = F.pad(kernel, (0, 0, 0, 0, 1, 1, 1, 1), 'constant', 0.0)
kernel = (kernel[1:, 1:] + kernel[:-1, 1:] +
kernel[1:, :-1] + kernel[:-1, :-1])
kernel = kernel.permute(2, 3, 0, 1)
x = F.conv_transpose2d(x, kernel, stride=2, padding=1)
else:
x = self.upsample(x)
x = self.conv(x) * self.scale
x = self.blur(x)
x = self.epilogue(x, w)
return x
class ConvBlock(nn.Module):
"""Implements the convolutional block used in StyleGAN.
Basically, this block is used as the second convolutional block for each
resolution.
"""
def __init__(self,
layer_idx,
in_channels,
out_channels,
kernel_size=3,
stride=1,
padding=1,
dilation=1,
add_bias=False,
wscale_gain=np.sqrt(2.0),
wscale_lr_multiplier=1.0,
randomize_noise=False):
"""Initializes the class with block settings.
Args:
in_channels: Number of channels of the input tensor fed into this block.
out_channels: Number of channels (kernels) of the output tensor.
kernel_size: Size of the convolutional kernel.
stride: Stride parameter for convolution operation.
padding: Padding parameter for convolution operation.
dilation: Dilation rate for convolution operation.
add_bias: Whether to add bias onto the convolutional result.
wscale_gain: The gain factor for `wscale` layer.
wscale_lr_multiplier: The learning rate multiplier factor for `wscale`
layer.
Raises:
ValueError: If the block is not applied to the second block for a
particular resolution.
"""
super().__init__()
if layer_idx % 2 == 0:
raise ValueError(f'This block is implemented as the second block of each '
f'resolution, but is applied to layer {layer_idx}!')
self.conv = nn.Conv2d(in_channels=in_channels,
out_channels=out_channels,
kernel_size=kernel_size,
stride=stride,
padding=padding,
dilation=dilation,
groups=1,
bias=add_bias)
fan_in = in_channels * kernel_size * kernel_size
self.scale = wscale_gain / np.sqrt(fan_in) * wscale_lr_multiplier
self.epilogue = EpilogueBlock(layer_idx=layer_idx,
channels=out_channels,
randomize_noise=randomize_noise)
def forward(self, x, w):
x = self.conv(x) * self.scale
x = self.epilogue(x, w)
return x
class LastConvBlock(nn.Module):
"""Implements the last convolutional block used in StyleGAN.
Basically, this block converts the final feature map to RGB image.
"""
def __init__(self, channels):
super().__init__()
self.conv = nn.Conv2d(in_channels=channels,
out_channels=3,
kernel_size=1,
bias=False)
self.scale = 1 / np.sqrt(channels)
self.bias = nn.Parameter(torch.zeros(3))
def forward(self, x):
x = self.conv(x) * self.scale
x = x + self.bias.view(1, -1, 1, 1)
return x
class DenseBlock(nn.Module):
"""Implements the dense block used in StyleGAN.
Basically, this block executes fully-connected layer, weight-scale layer,
and activation layer in sequence.
"""
def __init__(self,
in_features,
out_features,
add_bias=False,
wscale_gain=np.sqrt(2.0),
wscale_lr_multiplier=0.01,
activation_type='lrelu'):
"""Initializes the class with block settings.
Args:
in_features: Number of channels of the input tensor fed into this block.
out_features: Number of channels of the output tensor.
add_bias: Whether to add bias onto the fully-connected result.
wscale_gain: The gain factor for `wscale` layer.
wscale_lr_multiplier: The learning rate multiplier factor for `wscale`
layer.
activation_type: Type of activation function. Support `linear` and
`lrelu`.
Raises:
NotImplementedError: If the input `activation_type` is not supported.
"""
super().__init__()
self.linear = nn.Linear(in_features=in_features,
out_features=out_features,
bias=add_bias)
self.wscale = WScaleLayer(in_channels=in_features,
out_channels=out_features,
kernel_size=1,
gain=wscale_gain,
lr_multiplier=wscale_lr_multiplier)
if activation_type == 'linear':
self.activate = (lambda x: x)
elif activation_type == 'lrelu':
self.activate = nn.LeakyReLU(negative_slope=0.2, inplace=True)
else:
raise NotImplementedError(f'Not implemented activation function: '
f'{activation_type}!')
def forward(self, x):
x = self.linear(x)
x = self.wscale(x)
x = self.activate(x)
return x
| [
"torch.nn.functional.conv2d",
"numpy.sqrt",
"torch.from_numpy",
"numpy.array",
"torch.nn.functional.conv_transpose2d",
"torch.nn.functional.interpolate",
"torch.nn.functional.pad",
"numpy.arange",
"torch.mean",
"torch.randn",
"numpy.tile",
"collections.OrderedDict",
"torch.nn.LeakyReLU",
"numpy.log2",
"numpy.transpose",
"numpy.ones_like",
"torch.nn.Conv2d",
"numpy.sum",
"torch.nn.Linear",
"torch.zeros",
"torch.ones"
] | [((14984, 14997), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (14995, 14997), False, 'from collections import OrderedDict\n'), ((16405, 16446), 'numpy.ones_like', 'np.ones_like', (['layer_idx'], {'dtype': 'np.float32'}), '(layer_idx, dtype=np.float32)\n', (16417, 16446), True, 'import numpy as np\n'), ((19548, 19612), 'torch.nn.functional.interpolate', 'F.interpolate', (['x'], {'scale_factor': 'self.scale_factor', 'mode': '"""nearest"""'}), "(x, scale_factor=self.scale_factor, mode='nearest')\n", (19561, 19612), True, 'import torch.nn.functional as F\n'), ((20102, 20138), 'numpy.tile', 'np.tile', (['kernel', '[1, 1, channels, 1]'], {}), '(kernel, [1, 1, channels, 1])\n', (20109, 20138), True, 'import numpy as np\n'), ((20152, 20186), 'numpy.transpose', 'np.transpose', (['kernel', '[2, 3, 0, 1]'], {}), '(kernel, [2, 3, 0, 1])\n', (20164, 20186), True, 'import numpy as np\n'), ((20313, 20380), 'torch.nn.functional.conv2d', 'F.conv2d', (['x', 'self.kernel'], {'stride': '(1)', 'padding': '(1)', 'groups': 'self.channels'}), '(x, self.kernel, stride=1, padding=1, groups=self.channels)\n', (20321, 20380), True, 'import torch.nn.functional as F\n'), ((22465, 22477), 'numpy.sqrt', 'np.sqrt', (['(2.0)'], {}), '(2.0)\n', (22472, 22477), True, 'import numpy as np\n'), ((23604, 23650), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (23616, 23650), True, 'import torch.nn as nn\n'), ((25283, 25295), 'numpy.sqrt', 'np.sqrt', (['(2.0)'], {}), '(2.0)\n', (25290, 25295), True, 'import numpy as np\n'), ((28279, 28291), 'numpy.sqrt', 'np.sqrt', (['(2.0)'], {}), '(2.0)\n', (28286, 28291), True, 'import numpy as np\n'), ((29374, 29546), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'in_channels', 'out_channels': 'out_channels', 'kernel_size': 'kernel_size', 'stride': 'stride', 'padding': 'padding', 'dilation': 'dilation', 'groups': '(1)', 'bias': 'add_bias'}), '(in_channels=in_channels, out_channels=out_channels, kernel_size=\n kernel_size, stride=stride, padding=padding, dilation=dilation, groups=\n 1, bias=add_bias)\n', (29383, 29546), True, 'import torch.nn as nn\n'), ((30369, 30443), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'channels', 'out_channels': '(3)', 'kernel_size': '(1)', 'bias': '(False)'}), '(in_channels=channels, out_channels=3, kernel_size=1, bias=False)\n', (30378, 30443), True, 'import torch.nn as nn\n'), ((31055, 31067), 'numpy.sqrt', 'np.sqrt', (['(2.0)'], {}), '(2.0)\n', (31062, 31067), True, 'import numpy as np\n'), ((31797, 31873), 'torch.nn.Linear', 'nn.Linear', ([], {'in_features': 'in_features', 'out_features': 'out_features', 'bias': 'add_bias'}), '(in_features=in_features, out_features=out_features, bias=add_bias)\n', (31806, 31873), True, 'import torch.nn as nn\n'), ((16293, 16317), 'torch.zeros', 'torch.zeros', (['w_space_dim'], {}), '(w_space_dim)\n', (16304, 16317), False, 'import torch\n'), ((16545, 16568), 'torch.from_numpy', 'torch.from_numpy', (['coefs'], {}), '(coefs)\n', (16561, 16568), False, 'import torch\n'), ((19025, 19064), 'torch.mean', 'torch.mean', (['x'], {'dim': '[2, 3]', 'keepdim': '(True)'}), '(x, dim=[2, 3], keepdim=True)\n', (19035, 19064), False, 'import torch\n'), ((19987, 20001), 'numpy.sum', 'np.sum', (['kernel'], {}), '(kernel)\n', (19993, 20001), True, 'import numpy as np\n'), ((20222, 20246), 'torch.from_numpy', 'torch.from_numpy', (['kernel'], {}), '(kernel)\n', (20238, 20246), False, 'import torch\n'), ((20688, 20725), 'torch.randn', 'torch.randn', (['(1)', '(1)', 'self.res', 'self.res'], {}), '(1, 1, self.res, self.res)\n', (20699, 20725), False, 'import torch\n'), ((20758, 20779), 'torch.zeros', 'torch.zeros', (['channels'], {}), '(channels)\n', (20769, 20779), False, 'import torch\n'), ((21034, 21080), 'torch.randn', 'torch.randn', (['x.shape[0]', '(1)', 'self.res', 'self.res'], {}), '(x.shape[0], 1, self.res, self.res)\n', (21045, 21080), False, 'import torch\n'), ((22675, 22700), 'torch.zeros', 'torch.zeros', (['out_channels'], {}), '(out_channels)\n', (22686, 22700), False, 'import torch\n'), ((23561, 23582), 'torch.zeros', 'torch.zeros', (['channels'], {}), '(channels)\n', (23572, 23582), False, 'import torch\n'), ((24479, 24508), 'torch.ones', 'torch.ones', (['(1)', 'channels', '(4)', '(4)'], {}), '(1, channels, 4, 4)\n', (24489, 24508), False, 'import torch\n'), ((26605, 26777), 'torch.nn.Conv2d', 'nn.Conv2d', ([], {'in_channels': 'in_channels', 'out_channels': 'out_channels', 'kernel_size': 'kernel_size', 'stride': 'stride', 'padding': 'padding', 'dilation': 'dilation', 'groups': '(1)', 'bias': 'add_bias'}), '(in_channels=in_channels, out_channels=out_channels, kernel_size=\n kernel_size, stride=stride, padding=padding, dilation=dilation, groups=\n 1, bias=add_bias)\n', (26614, 26777), True, 'import torch.nn as nn\n'), ((27426, 27482), 'torch.nn.functional.pad', 'F.pad', (['kernel', '(0, 0, 0, 0, 1, 1, 1, 1)', '"""constant"""', '(0.0)'], {}), "(kernel, (0, 0, 0, 0, 1, 1, 1, 1), 'constant', 0.0)\n", (27431, 27482), True, 'import torch.nn.functional as F\n'), ((27638, 27688), 'torch.nn.functional.conv_transpose2d', 'F.conv_transpose2d', (['x', 'kernel'], {'stride': '(2)', 'padding': '(1)'}), '(x, kernel, stride=2, padding=1)\n', (27656, 27688), True, 'import torch.nn.functional as F\n'), ((30543, 30560), 'numpy.sqrt', 'np.sqrt', (['channels'], {}), '(channels)\n', (30550, 30560), True, 'import numpy as np\n'), ((30590, 30604), 'torch.zeros', 'torch.zeros', (['(3)'], {}), '(3)\n', (30601, 30604), False, 'import torch\n'), ((16335, 16361), 'numpy.arange', 'np.arange', (['self.num_layers'], {}), '(self.num_layers)\n', (16344, 16361), True, 'import numpy as np\n'), ((19870, 19904), 'numpy.array', 'np.array', (['kernel'], {'dtype': 'np.float32'}), '(kernel, dtype=np.float32)\n', (19878, 19904), True, 'import numpy as np\n'), ((22614, 22629), 'numpy.sqrt', 'np.sqrt', (['fan_in'], {}), '(fan_in)\n', (22621, 22629), True, 'import numpy as np\n'), ((26463, 26527), 'torch.randn', 'torch.randn', (['kernel_size', 'kernel_size', 'in_channels', 'out_channels'], {}), '(kernel_size, kernel_size, in_channels, out_channels)\n', (26474, 26527), False, 'import torch\n'), ((27049, 27064), 'numpy.sqrt', 'np.sqrt', (['fan_in'], {}), '(fan_in)\n', (27056, 27064), True, 'import numpy as np\n'), ((29803, 29818), 'numpy.sqrt', 'np.sqrt', (['fan_in'], {}), '(fan_in)\n', (29810, 29818), True, 'import numpy as np\n'), ((32332, 32378), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (32344, 32378), True, 'import torch.nn as nn\n'), ((15994, 16013), 'numpy.log2', 'np.log2', (['resolution'], {}), '(resolution)\n', (16001, 16013), True, 'import numpy as np\n'), ((17415, 17434), 'numpy.log2', 'np.log2', (['resolution'], {}), '(resolution)\n', (17422, 17434), True, 'import numpy as np\n'), ((18575, 18614), 'torch.mean', 'torch.mean', (['(x ** 2)'], {'dim': '(1)', 'keepdim': '(True)'}), '(x ** 2, dim=1, keepdim=True)\n', (18585, 18614), False, 'import torch\n'), ((19088, 19132), 'torch.mean', 'torch.mean', (['(x ** 2)'], {'dim': '[2, 3]', 'keepdim': '(True)'}), '(x ** 2, dim=[2, 3], keepdim=True)\n', (19098, 19132), False, 'import torch\n')] |
from math import inf, isinf
from typing import Iterable
from enum import Enum
class Scales(Enum):
THOUSANDS = 1.E-3
MILLIONS = 1.E-6
BILLIONS = 1.E-9
def parse_d(d):
"""
Author: <NAME>
Purpose: We expect an Annuity to occur over a finite duration.
(for an infinite duration, refer to Perpetuity implementation)
This duration does not necessarily have to start at the first period,
so n is specified to be a list of two whole numbers, inclusively
indicating the start and end period for the annuity. However, it may
be desirable to assume n starts at one and supply a single integer value
to specify the end date for syntactic clarity.
This method is used to convert a nonspecific parameter n into a form
compliant with the design of the library.
Parameter: d [any] - An integer, whole-number float or 1 or 2-element list.
Integers, floats and one-element lists are assumed to specify
the end period of an annuity starting at period one.
Returns: A two-element list of the start and end periods of the annuity.
"""
if isinstance(d, Iterable):
if len(d) > 2:
return TypeError("Length of Iterable d must not exceed 2")
if len(d) == 1:
d = [0, d[0]]
# Validate d0
if int(d[0]) == d[0]:
pass
else:
return TypeError("Type of d0 must be an integer!")
# Validate d1
if isinf(d[1]):
pass
elif int(d[1]) == d[1]:
pass
else:
return TypeError("Type of d1 must be an integer or infinite!")
if d[1] >= d[0]:
pass
else:
return ValueError("Value of d0 must not exceed d1!")
return d
elif isinf(d):
return parse_d([0, d])
elif int(d) == d:
return parse_d([0, d])
else:
raise TypeError("Type of d must be an integer or infinite, or list thereof")
def parse_ns(val):
if type(val) == int:
ns = (val,) # Get the cashflows in a period as an array
elif type(val) == tuple:
ns = val # Get the cashflows of multiple periods as a 2D array
elif type(val) == slice:
start = val.start or 0
stop = val.stop + 1
step = val.step or 1
ns = range(start, stop, step)
return ns
def get_final_period(cashflows, finite=True):
from .cashflow import Present, Future, Annuity, Perpetuity, Dynamic
from .taxation import Depreciation
if not isinstance(cashflows, Iterable):
cashflows = [cashflows]
def final_period(cf):
if isinstance(cf, Future): # also accounts for present
return cf.n
elif isinstance(cf, Annuity):
if finite and cf.d[1] is inf:
return cf.d[0]
else:
return cf.d[1]
elif isinstance(cf, Dynamic):
return cf.d[1]
elif isinstance(cf, Depreciation):
return cf.d[1]
else:
return 0
final_n = 0
for cashflow in cashflows:
n = final_period(cashflow)
final_n = n if n > final_n else final_n
return final_n
| [
"math.isinf"
] | [((1567, 1578), 'math.isinf', 'isinf', (['d[1]'], {}), '(d[1])\n', (1572, 1578), False, 'from math import inf, isinf\n'), ((1883, 1891), 'math.isinf', 'isinf', (['d'], {}), '(d)\n', (1888, 1891), False, 'from math import inf, isinf\n')] |
import cv2
from copy import deepcopy
Icolor = cv2.imread('images/mustafa.jpeg')
# Red Channel
red = deepcopy(Icolor)
red[:,:,0] = 0
red[:,:,1] = 0
cv2.imwrite("red.png", red)
# Green Channel
green = deepcopy(Icolor)
green[:,:,0] = 0
green[:,:,2] = 0
cv2.imwrite("green.png", green)
# Blue Channel
blue = deepcopy(Icolor)
blue[:,:,1] = 0
blue[:,:,2] = 0
cv2.imwrite("blue.png", blue)
# RedGreen Channel
redgreen = deepcopy(Icolor)
redgreen[:,:,0] = 0
cv2.imwrite("redgreen.png", redgreen)
# RedBlue Channel
redblue = deepcopy(Icolor)
redblue[:,:,1] = 0
cv2.imwrite("redblue.png", redblue)
# GreenBlue Channel
greenblue = deepcopy(Icolor)
greenblue[:,:,2] = 0
cv2.imwrite("greenblue.png", greenblue) | [
"cv2.imwrite",
"cv2.imread",
"copy.deepcopy"
] | [((47, 80), 'cv2.imread', 'cv2.imread', (['"""images/mustafa.jpeg"""'], {}), "('images/mustafa.jpeg')\n", (57, 80), False, 'import cv2\n'), ((102, 118), 'copy.deepcopy', 'deepcopy', (['Icolor'], {}), '(Icolor)\n', (110, 118), False, 'from copy import deepcopy\n'), ((150, 177), 'cv2.imwrite', 'cv2.imwrite', (['"""red.png"""', 'red'], {}), "('red.png', red)\n", (161, 177), False, 'import cv2\n'), ((203, 219), 'copy.deepcopy', 'deepcopy', (['Icolor'], {}), '(Icolor)\n', (211, 219), False, 'from copy import deepcopy\n'), ((255, 286), 'cv2.imwrite', 'cv2.imwrite', (['"""green.png"""', 'green'], {}), "('green.png', green)\n", (266, 286), False, 'import cv2\n'), ((310, 326), 'copy.deepcopy', 'deepcopy', (['Icolor'], {}), '(Icolor)\n', (318, 326), False, 'from copy import deepcopy\n'), ((360, 389), 'cv2.imwrite', 'cv2.imwrite', (['"""blue.png"""', 'blue'], {}), "('blue.png', blue)\n", (371, 389), False, 'import cv2\n'), ((421, 437), 'copy.deepcopy', 'deepcopy', (['Icolor'], {}), '(Icolor)\n', (429, 437), False, 'from copy import deepcopy\n'), ((459, 496), 'cv2.imwrite', 'cv2.imwrite', (['"""redgreen.png"""', 'redgreen'], {}), "('redgreen.png', redgreen)\n", (470, 496), False, 'import cv2\n'), ((526, 542), 'copy.deepcopy', 'deepcopy', (['Icolor'], {}), '(Icolor)\n', (534, 542), False, 'from copy import deepcopy\n'), ((563, 598), 'cv2.imwrite', 'cv2.imwrite', (['"""redblue.png"""', 'redblue'], {}), "('redblue.png', redblue)\n", (574, 598), False, 'import cv2\n'), ((632, 648), 'copy.deepcopy', 'deepcopy', (['Icolor'], {}), '(Icolor)\n', (640, 648), False, 'from copy import deepcopy\n'), ((671, 710), 'cv2.imwrite', 'cv2.imwrite', (['"""greenblue.png"""', 'greenblue'], {}), "('greenblue.png', greenblue)\n", (682, 710), False, 'import cv2\n')] |
# -*- coding: utf-8 -*-
"""
Created on Thu Oct 3 18:13:01 2019
@author: Funato
"""
# -*- coding: utf-8 -*-
"""
Created on Thu Aug 15 13:48:14 2019
@author: <NAME>
"""
#このプログラムは自由に使用、改変していただけますが、それにより生じた、またそれを利用したことにより生じたいかなる損害についても責任を負いません。
import numpy as np
import mnist
import matplotlib.pyplot as plt
train_img, train_label, test_img, test_label = mnist.load_mnist()
# 学習データの準備
train_img = np.asarray(train_img)
train_label = np.asarray(train_label)
test_img = np.asarray(test_img)
test_label = np.asarray(test_label)
learning_rate = 0.1 #学習係数の設定
number = 10000 #学習回数の設定
batch_size = 100 #
data_size = train_img.shape[0]
t_data_size = test_img.shape[0]
test_count = 1000 #テスト回数
test_frequency = 100 #テスト頻度
# ネットワークの構造の設定 全体は3層構造
input = train_img.shape[1] #784入力
hidden = 300 #中間層は100
output = train_label.shape[1] #5出力
W1 = 0.01 * np.random.randn(hidden, input) #荷重を??から??の範囲で乱数により初期化
W2 = 0.01 * np.random.randn(output, hidden)
b1 = np.zeros(hidden) #しきい値をすべて0で初期化
b2 = np.zeros(output)
test_acc_list = []
class Relu:
def forward(self, x):
return x * (x > 0)
def dash(self, x):
return 1 * (x > 0)
class sigmoid:
def forward(self, x): #シグモイド関数の定義
return 1 / (1 + np.exp(-x))
def dash(self, x): #シグモイド関数の微分の定義
return self.forward(x) * (1 - self.forward(x))
activation_func_1 = Relu()
def softmax(x): #ソフトマックス関数の定義
x = x.T
x_max = np.max(x, axis = 0)
exp_a = np.exp(x - x_max)
sum_exp_a = np.sum(exp_a, axis = 0)
return (exp_a / sum_exp_a).T
def network_operate(x, t):
delta_W1 = np.zeros((hidden, input))
delta_W2 = np.zeros((output, hidden))
delta_b1 = np.zeros(hidden)
delta_b2 = np.zeros(output)
X1 = (W1 @ x.T).T + b1
Z1 = activation_func_1.forward(X1) #2層目の出力の計算
#print(Z1.shape)
X2 = (W2 @ Z1.T).T + b2 #出力の計算
Z2 = softmax(X2)
# 逆方向の計算
delta_out = (Z2- t) / x.shape[0] #誤差の計算
delta_W2 = delta_out.T @ Z1 #2層目の荷重の修正量の計算
delta_b2 = np.sum(delta_out.T, axis = 1) #2層目のしきい値の修正量
delta_hidden = (delta_out @ W2) * activation_func_1.dash(X1) #誤差の逆伝搬
delta_W1 = delta_hidden.T @ x #1層目の荷重の修正量の計算
delta_b1 = np.sum(delta_hidden.T, axis = 1) #1層目のしきい値の修正量
return delta_W2, delta_b2, delta_W1, delta_b1
"""
# 荷重としきい値の修正量を格納する変数 すべて0で初期化
delta_W1 = np.zeros((hidden, input))
delta_W2 = np.zeros((output, hidden))
delta_b1 = np.zeros(hidden)
delta_b2 = np.zeros(output)
"""
for i in range(number + 1):
batch_mask = np.random.choice(data_size, batch_size)
data = train_img[batch_mask]
teach = train_label[batch_mask]
delta_W2, delta_b2, delta_W1, delta_b1 = network_operate(data, teach)
# 荷重としきい値の更新
W1 -= learning_rate * delta_W1
W2 -= learning_rate * delta_W2
b1 -= learning_rate * delta_b1
b2 -= learning_rate * delta_b2
#print(i)
if i % test_frequency == 0:
batch_mask = np.random.choice(t_data_size, test_count)
t_data = test_img[batch_mask]
t_teach = test_label[batch_mask]
X1 = (np.dot(W1, t_data.T)).T + b1
Z1 = activation_func_1.forward(X1) #2層目の出力の計算
#print(Z1.shape)
X2 = (np.dot(W2, Z1.T)).T + b2 #出力の計算
Z2 = softmax(X2)
y = np.argmax(Z2, axis = 1)
acc = np.sum(y == t_teach) / test_count * 100
test_acc_list.append(acc)
print(str(acc) + "%")
if number == i:
miss_list = [y != t_teach]
miss_img = t_data[miss_list]
for img in miss_img:
img = img.reshape((28, 28))
plt.imshow(img)
plt.gray()
plt.show()
# 結果の表示
x_len = np.arange(len(test_acc_list))
plt.plot(x_len, test_acc_list)
plt.show()
"""
print(Z2)
accuracy = -1* np.dot(teach[i], np.log(Z2)) #??精度??
#print(accuracy)
"""
| [
"matplotlib.pyplot.imshow",
"matplotlib.pyplot.gray",
"numpy.random.choice",
"matplotlib.pyplot.plot",
"numpy.asarray",
"numpy.argmax",
"numpy.max",
"numpy.exp",
"numpy.sum",
"numpy.zeros",
"numpy.dot",
"mnist.load_mnist",
"numpy.random.randn",
"matplotlib.pyplot.show"
] | [((358, 376), 'mnist.load_mnist', 'mnist.load_mnist', ([], {}), '()\n', (374, 376), False, 'import mnist\n'), ((401, 422), 'numpy.asarray', 'np.asarray', (['train_img'], {}), '(train_img)\n', (411, 422), True, 'import numpy as np\n'), ((437, 460), 'numpy.asarray', 'np.asarray', (['train_label'], {}), '(train_label)\n', (447, 460), True, 'import numpy as np\n'), ((472, 492), 'numpy.asarray', 'np.asarray', (['test_img'], {}), '(test_img)\n', (482, 492), True, 'import numpy as np\n'), ((506, 528), 'numpy.asarray', 'np.asarray', (['test_label'], {}), '(test_label)\n', (516, 528), True, 'import numpy as np\n'), ((971, 987), 'numpy.zeros', 'np.zeros', (['hidden'], {}), '(hidden)\n', (979, 987), True, 'import numpy as np\n'), ((1011, 1027), 'numpy.zeros', 'np.zeros', (['output'], {}), '(output)\n', (1019, 1027), True, 'import numpy as np\n'), ((3870, 3900), 'matplotlib.pyplot.plot', 'plt.plot', (['x_len', 'test_acc_list'], {}), '(x_len, test_acc_list)\n', (3878, 3900), True, 'import matplotlib.pyplot as plt\n'), ((3901, 3911), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3909, 3911), True, 'import matplotlib.pyplot as plt\n'), ((865, 895), 'numpy.random.randn', 'np.random.randn', (['hidden', 'input'], {}), '(hidden, input)\n', (880, 895), True, 'import numpy as np\n'), ((934, 965), 'numpy.random.randn', 'np.random.randn', (['output', 'hidden'], {}), '(output, hidden)\n', (949, 965), True, 'import numpy as np\n'), ((1441, 1458), 'numpy.max', 'np.max', (['x'], {'axis': '(0)'}), '(x, axis=0)\n', (1447, 1458), True, 'import numpy as np\n'), ((1473, 1490), 'numpy.exp', 'np.exp', (['(x - x_max)'], {}), '(x - x_max)\n', (1479, 1490), True, 'import numpy as np\n'), ((1507, 1528), 'numpy.sum', 'np.sum', (['exp_a'], {'axis': '(0)'}), '(exp_a, axis=0)\n', (1513, 1528), True, 'import numpy as np\n'), ((1607, 1632), 'numpy.zeros', 'np.zeros', (['(hidden, input)'], {}), '((hidden, input))\n', (1615, 1632), True, 'import numpy as np\n'), ((1652, 1678), 'numpy.zeros', 'np.zeros', (['(output, hidden)'], {}), '((output, hidden))\n', (1660, 1678), True, 'import numpy as np\n'), ((1694, 1710), 'numpy.zeros', 'np.zeros', (['hidden'], {}), '(hidden)\n', (1702, 1710), True, 'import numpy as np\n'), ((1726, 1742), 'numpy.zeros', 'np.zeros', (['output'], {}), '(output)\n', (1734, 1742), True, 'import numpy as np\n'), ((2054, 2081), 'numpy.sum', 'np.sum', (['delta_out.T'], {'axis': '(1)'}), '(delta_out.T, axis=1)\n', (2060, 2081), True, 'import numpy as np\n'), ((2258, 2288), 'numpy.sum', 'np.sum', (['delta_hidden.T'], {'axis': '(1)'}), '(delta_hidden.T, axis=1)\n', (2264, 2288), True, 'import numpy as np\n'), ((2592, 2631), 'numpy.random.choice', 'np.random.choice', (['data_size', 'batch_size'], {}), '(data_size, batch_size)\n', (2608, 2631), True, 'import numpy as np\n'), ((3034, 3075), 'numpy.random.choice', 'np.random.choice', (['t_data_size', 'test_count'], {}), '(t_data_size, test_count)\n', (3050, 3075), True, 'import numpy as np\n'), ((3381, 3402), 'numpy.argmax', 'np.argmax', (['Z2'], {'axis': '(1)'}), '(Z2, axis=1)\n', (3390, 3402), True, 'import numpy as np\n'), ((1251, 1261), 'numpy.exp', 'np.exp', (['(-x)'], {}), '(-x)\n', (1257, 1261), True, 'import numpy as np\n'), ((3170, 3190), 'numpy.dot', 'np.dot', (['W1', 't_data.T'], {}), '(W1, t_data.T)\n', (3176, 3190), True, 'import numpy as np\n'), ((3307, 3323), 'numpy.dot', 'np.dot', (['W2', 'Z1.T'], {}), '(W2, Z1.T)\n', (3313, 3323), True, 'import numpy as np\n'), ((3428, 3448), 'numpy.sum', 'np.sum', (['(y == t_teach)'], {}), '(y == t_teach)\n', (3434, 3448), True, 'import numpy as np\n'), ((3744, 3759), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {}), '(img)\n', (3754, 3759), True, 'import matplotlib.pyplot as plt\n'), ((3776, 3786), 'matplotlib.pyplot.gray', 'plt.gray', ([], {}), '()\n', (3784, 3786), True, 'import matplotlib.pyplot as plt\n'), ((3803, 3813), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3811, 3813), True, 'import matplotlib.pyplot as plt\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import lostpet.models
import django_google_maps.fields
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Pet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=300)),
('species', models.CharField(max_length=3, choices=[(b'CAT', b'gato'), (b'DOG', b'cachorro')])),
('photo', models.ImageField(upload_to=lostpet.models.get_image_path)),
('address', django_google_maps.fields.AddressField(max_length=200)),
('geolocation', django_google_maps.fields.GeoLocationField(max_length=100)),
],
options={
},
bases=(models.Model,),
),
]
| [
"django.db.models.ImageField",
"django.db.models.AutoField",
"django.db.models.CharField"
] | [((350, 443), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (366, 443), False, 'from django.db import models, migrations\n'), ((467, 499), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (483, 499), False, 'from django.db import models, migrations\n'), ((530, 616), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(3)', 'choices': "[(b'CAT', b'gato'), (b'DOG', b'cachorro')]"}), "(max_length=3, choices=[(b'CAT', b'gato'), (b'DOG',\n b'cachorro')])\n", (546, 616), False, 'from django.db import models, migrations\n'), ((641, 699), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': 'lostpet.models.get_image_path'}), '(upload_to=lostpet.models.get_image_path)\n', (658, 699), False, 'from django.db import models, migrations\n')] |
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django.forms import ModelForm, PasswordInput, RadioSelect, Textarea, TextInput
from .models import Review, Ticket, UserFollow
class TicketForm(ModelForm):
class Meta:
model = Ticket
exclude = ["user", "review_id"]
widgets = {
"title": TextInput(attrs={"class": "input"}),
"description": Textarea(attrs={"class": "textarea"}),
}
class ReviewForm(ModelForm):
class Meta:
model = Review
exclude = ["user", "ticket_id"]
widgets = {
"headline": TextInput(attrs={"class": "input"}),
"body": Textarea(attrs={"class": "textarea"}),
"rating": RadioSelect(
choices=[(0, "0"), (1, "1"), (2, "2"), (3, "3"), (4, "4"), (5, "5")]
),
}
class UserFollowForm(ModelForm):
class Meta:
model = UserFollow
exclude = ["user", "followed_user"]
widgets = {
"user_to_add": TextInput(
attrs={
"class": "input",
"type": "text",
"placeholder": "Enter user name",
}
),
}
class RegisterForm(UserCreationForm):
class Meta:
model = User
fields = ["username", "password1", "<PASSWORD>"]
widgets = {
"username": TextInput(
attrs={"class": "input", "type": "text", "placeholder": "Username"}
),
}
def __init__(self, *args, **kwargs):
super(RegisterForm, self).__init__(*args, **kwargs)
self.fields["password1"].widget = PasswordInput(
attrs={"class": "input", "type": "password", "placeholder": "Password"}
)
self.fields["password2"].widget = PasswordInput(
attrs={
"class": "input",
"type": "password",
"placeholder": "Password confirmation",
}
)
| [
"django.forms.RadioSelect",
"django.forms.Textarea",
"django.forms.PasswordInput",
"django.forms.TextInput"
] | [((1707, 1797), 'django.forms.PasswordInput', 'PasswordInput', ([], {'attrs': "{'class': 'input', 'type': 'password', 'placeholder': 'Password'}"}), "(attrs={'class': 'input', 'type': 'password', 'placeholder':\n 'Password'})\n", (1720, 1797), False, 'from django.forms import ModelForm, PasswordInput, RadioSelect, Textarea, TextInput\n'), ((1858, 1961), 'django.forms.PasswordInput', 'PasswordInput', ([], {'attrs': "{'class': 'input', 'type': 'password', 'placeholder': 'Password confirmation'}"}), "(attrs={'class': 'input', 'type': 'password', 'placeholder':\n 'Password confirmation'})\n", (1871, 1961), False, 'from django.forms import ModelForm, PasswordInput, RadioSelect, Textarea, TextInput\n'), ((382, 417), 'django.forms.TextInput', 'TextInput', ([], {'attrs': "{'class': 'input'}"}), "(attrs={'class': 'input'})\n", (391, 417), False, 'from django.forms import ModelForm, PasswordInput, RadioSelect, Textarea, TextInput\n'), ((446, 483), 'django.forms.Textarea', 'Textarea', ([], {'attrs': "{'class': 'textarea'}"}), "(attrs={'class': 'textarea'})\n", (454, 483), False, 'from django.forms import ModelForm, PasswordInput, RadioSelect, Textarea, TextInput\n'), ((649, 684), 'django.forms.TextInput', 'TextInput', ([], {'attrs': "{'class': 'input'}"}), "(attrs={'class': 'input'})\n", (658, 684), False, 'from django.forms import ModelForm, PasswordInput, RadioSelect, Textarea, TextInput\n'), ((706, 743), 'django.forms.Textarea', 'Textarea', ([], {'attrs': "{'class': 'textarea'}"}), "(attrs={'class': 'textarea'})\n", (714, 743), False, 'from django.forms import ModelForm, PasswordInput, RadioSelect, Textarea, TextInput\n'), ((767, 852), 'django.forms.RadioSelect', 'RadioSelect', ([], {'choices': "[(0, '0'), (1, '1'), (2, '2'), (3, '3'), (4, '4'), (5, '5')]"}), "(choices=[(0, '0'), (1, '1'), (2, '2'), (3, '3'), (4, '4'), (5,\n '5')])\n", (778, 852), False, 'from django.forms import ModelForm, PasswordInput, RadioSelect, Textarea, TextInput\n'), ((1059, 1148), 'django.forms.TextInput', 'TextInput', ([], {'attrs': "{'class': 'input', 'type': 'text', 'placeholder': 'Enter user name'}"}), "(attrs={'class': 'input', 'type': 'text', 'placeholder':\n 'Enter user name'})\n", (1068, 1148), False, 'from django.forms import ModelForm, PasswordInput, RadioSelect, Textarea, TextInput\n'), ((1443, 1521), 'django.forms.TextInput', 'TextInput', ([], {'attrs': "{'class': 'input', 'type': 'text', 'placeholder': 'Username'}"}), "(attrs={'class': 'input', 'type': 'text', 'placeholder': 'Username'})\n", (1452, 1521), False, 'from django.forms import ModelForm, PasswordInput, RadioSelect, Textarea, TextInput\n')] |
# -*- coding: utf-8 -*-
import numpy as np
from tensorflow.examples.tutorials.mnist import input_data
import pdb
def softmax(y):
'''
Input:
y = np.dot(x,theta): (m,n)*(n,10) = (m,10)
Output:
softmax(y): (m,10)
'''
exp_y = np.exp(y)
return exp_y / np.sum(exp_y,axis=1).reshape(-1,1)
def main():
# set params
lr = 0.2
nEpoch = 20
# load data
mnist = input_data.read_data_sets("MNIST_data/",one_hot=True)
x_all,y_all = mnist.train.images,mnist.train.labels
print("MNIST training images:",mnist.train.images.shape)
print("MNIST testing images:",mnist.test.images.shape)
# initialize weights
theta = np.random.randn(784,10) * 0.005
y_true = np.argmax(y_all,1)
for i in range(nEpoch):
x_train,y_train = mnist.train.next_batch(1000)
h = softmax(np.dot(x_train,theta)) # n_samples,10
grad = (-1/y_train.shape[0]) * np.dot(x_train.T,y_train-h)# 784,10
theta = theta - lr * grad # 784,10
pred = np.argmax(softmax(np.dot(x_all,theta)),1) # n_samples,1
acc = np.float32(pred==y_true).sum()/len(y_true)
print("{}: {}".format(i,acc))
pdb.set_trace()
if __name__ == "__main__":
main()
| [
"numpy.argmax",
"numpy.exp",
"tensorflow.examples.tutorials.mnist.input_data.read_data_sets",
"numpy.dot",
"numpy.sum",
"pdb.set_trace",
"numpy.random.randn",
"numpy.float32"
] | [((260, 269), 'numpy.exp', 'np.exp', (['y'], {}), '(y)\n', (266, 269), True, 'import numpy as np\n'), ((416, 470), 'tensorflow.examples.tutorials.mnist.input_data.read_data_sets', 'input_data.read_data_sets', (['"""MNIST_data/"""'], {'one_hot': '(True)'}), "('MNIST_data/', one_hot=True)\n", (441, 470), False, 'from tensorflow.examples.tutorials.mnist import input_data\n'), ((736, 755), 'numpy.argmax', 'np.argmax', (['y_all', '(1)'], {}), '(y_all, 1)\n', (745, 755), True, 'import numpy as np\n'), ((1209, 1224), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (1222, 1224), False, 'import pdb\n'), ((686, 710), 'numpy.random.randn', 'np.random.randn', (['(784)', '(10)'], {}), '(784, 10)\n', (701, 710), True, 'import numpy as np\n'), ((858, 880), 'numpy.dot', 'np.dot', (['x_train', 'theta'], {}), '(x_train, theta)\n', (864, 880), True, 'import numpy as np\n'), ((944, 974), 'numpy.dot', 'np.dot', (['x_train.T', '(y_train - h)'], {}), '(x_train.T, y_train - h)\n', (950, 974), True, 'import numpy as np\n'), ((289, 310), 'numpy.sum', 'np.sum', (['exp_y'], {'axis': '(1)'}), '(exp_y, axis=1)\n', (295, 310), True, 'import numpy as np\n'), ((1066, 1086), 'numpy.dot', 'np.dot', (['x_all', 'theta'], {}), '(x_all, theta)\n', (1072, 1086), True, 'import numpy as np\n'), ((1118, 1144), 'numpy.float32', 'np.float32', (['(pred == y_true)'], {}), '(pred == y_true)\n', (1128, 1144), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 2 23:47:34 2021
Based on:
bot template by magnitopic at:
https://github.com/magnitopic/YouTubeCode/blob/master/Python/TelegramBots/TelegramBot.py
@author: Pablo
"""
# -*- coding: UTF8 -*-
import requests
import datetime
# For debugging: knowing when bot was on and making a fancy tree out of requests
import datetime
import json
# Back end
from funcion_resumen_texto import resumelo
from pdf_a_texto_modulo import pdf_a_texto
from txt_a_pdf import escribe_pdf
import os
#import funcion_resumen_texto as fr
import re
#%%
# Bot handling
class BotHandler:
def __init__(self, token):
self.token = token
self.api_url = "https://api.telegram.org/bot{}/".format(token)
self.file_api_url = "https://api.telegram.org/file/bot{}/".format(token)
#url = "https://api.telegram.org/bot<token>/"
def get_updates(self, offset=0, timeout=30):
method = 'getUpdates'
params = {'timeout': timeout, 'offset': offset}
resp = requests.get(self.api_url + method, params)
#print('\n\nPREVIO: \n\n',resp)
result_json = resp.json()['result']
return result_json
def send_message(self, chat_id, text):
params = {'chat_id': chat_id, 'text': text, 'parse_mode': 'HTML'}
method = 'sendMessage'
resp = requests.post(self.api_url + method, params)
return resp
def get_first_update(self):
get_result = self.get_updates()
if len(get_result) > 0:
last_update = get_result[0]
else:
last_update = None
return last_update
# New method
def get_pdf_path(self, file_id):
method = 'getFile'
params = {'file_id': file_id}
resp = requests.get(self.api_url + method, params)
#print('\n\nPREVIO: \n\n',resp)
result_json = resp.json()['result']
if 'file_path' not in result_json:
return None
else:
return result_json['file_path']
def get_pdf_file(self,file_path):
#print(self.file_api_url + file_path)
#resp = requests.get(self.file_api_url + file_path)
return self.file_api_url + file_path
"""
TODO
AQUIIIIIIIIIIIIIIIIIIII
"""
def upload_pdf(self,chat_id,pdf):
pass
#method = 'sendPhoto'
#params = {'chat_id':chat_id,'file_id': file_id}
#resp = requests.post(self.api_url + method, params)
def get_pdf_file_old(self,file_path):
#print(self.file_api_url + file_path)
resp = requests.get(self.file_api_url + file_path)
return resp.text
#%%
# Get information from the update
def get_chat_text(update):
if 'text' not in update['message']:
return 'No habia texto!'
else:
return update['message']['text']
def get_chat_name(update):
if 'first_name' in update['message']:
return update['message']['chat']['first_name']
elif 'new_chat_member' in update['message']:
return update['message']['new_chat_member']['username']
elif 'from' in update['message']:
return update['message']['from']['first_name']
else:
return "usuario desconocido"
def get_pdf_id(update):
if 'document' in update['message']:
#print('hay document')
if update['message']['document']['mime_type'] == "application/pdf":
return update['message']['document']['file_id']
else:
return None
def get_caption(update):
if 'caption' not in update['message']:
return None
else:
return update['message']['caption']
def get_pdf_name(update):
if 'file_name' not in update['message']['document']:
return None
else:
return update['message']['document']['file_name']
#%%
# Bot service
token = 'putYourOwnBotTokenHere' #Given by botfather
resumelo_bot = BotHandler(token) #Your bot's name
def main():
hora_actual = str(datetime.datetime.now())
print("Sesion abierta a las: "+hora_actual)
log = open("log.txt", "a")
log.write("\n\nSesion nueva: "+hora_actual)
new_offset = 0
while True:
"""
Keeps looking for all updates left unanswered at its url.
As long as there are updates, it answers them
"""
all_updates=resumelo_bot.get_updates(new_offset)
if len(all_updates) > 0:
for update in all_updates:
"""
Code for processing each update
"""
# Logs current update
info_consulta = json.dumps(update, indent = 6)
log.write(str(info_consulta)+"\n\n")
print('Consulta:\n',info_consulta,'\n\n')
# Gets update information
update_id = update['update_id']
chat_id = update['message']['chat']['id']
chat_text = get_chat_text(update)
req_chat_name = get_chat_name(update)
pdf_id = get_pdf_id(update)
# If user sent a pdf file
if pdf_id is not None:
# We get it
pdf_path = resumelo_bot.get_pdf_path(pdf_id)
if pdf_path is not None:
pdf_full_path = resumelo_bot.get_pdf_file(pdf_path)
idioma = get_caption(update)
nombre_pdf = str(get_pdf_name(update))
if idioma != 'eng' and idioma != 'spa':
idioma = 'spa' # Default language
#Guardo una copia del pdf en el ordenador
print('Guardando una copia del pdf en el pc')
log.write('\nGuardando copia de'+pdf_full_path+' en pdfs/'+nombre_pdf+'\n')
copia_del_pdf = requests.get(pdf_full_path)
with open("pdfs/"+nombre_pdf+".pdf", 'wb') as f:
f.write(copia_del_pdf.content)
#f.close()
# Log that pdf file was received
idioma_log = 'español' if idioma == 'spa' else 'inglés'
print('Pasando a texto ',pdf_path,' en ',idioma_log)
log.write('\nPasando a texto '+pdf_path+' en '+idioma_log+'\n')
# Extract text with OCR and summarize it
resumelo_bot.send_message(chat_id, 'Extrayendo texto en '+idioma_log+' de '+nombre_pdf+'.')
texto = pdf_a_texto(pdf_full_path,idioma)
#Guardo una copia del texto en el ordenador
print('Guardando una copia del texto en el pc')
log.write('\nGuardando copia del texto en un txt')
with open("pdfs/"+nombre_pdf+" (texto).txt", 'w') as f:
f.writelines(texto)
f.close()
resumelo_bot.send_message(chat_id, 'Resumiendo texto.')
resumen = str(resumelo(texto)).replace("\n\n","\n")
#Guardo una copia del resumen en el ordenador
print('Guardando una copia del resumen en el pc')
log.write('\nGuardando copia del resumen en un txt')
with open("pdfs/"+nombre_pdf+" (resumen).txt", 'w') as f:
f.writelines(resumen)
f.close()
"""
#Mando resumen en trozos por debajo del limite de tamaño de la API de telegram
resumelo_bot.send_message(chat_id,'Resumen: \n')
def chunkstring(string, length):
return (string[0+i:length+i] for i in range(0, len(string), length))
for chunk in chunkstring(resumen, 3000):
resumelo_bot.send_message(chat_id, str(chunk))
#print(chunk)
resumelo_bot.send_message(chat_id, 'Resumen: \n\n'+resumen+'\n\n Un placer ayudarte, '+req_chat_name+'\n Algo más?')
#resumelo_bot.send_message(chat_id, '\nUn placer ayudarte, '+req_chat_name+'\n Algo más?')
"""
#Devuelvo el PDF:
#resumelo_bot.send_message(chat_id, '\nAhora te lo paso resumido en pdf:\n\n')
#Genero el PDF
path_pdf = "./out/"+nombre_pdf
#Elimino caracteres chungos del resumen y titulo
#resumen.decode('utf-8','ignore').encode("utf-8")
#path_pdf.decode('utf-8','ignore').encode("utf-8")
escribe_pdf(resumen,path_pdf)
#Abro el mismo pdf para obtener sus bytes:
f = open(path_pdf.replace(".pdf","")+" (resumen).pdf",'rb')
bytes_pdf = f.read()
f.close()
#response = {'document':(f.name,bytes_pdf)}
#method_name = 'sendDocument'
url2 = "https://api.telegram.org/bot"+str(token)+"/sendDocument?chat_id=" + str(chat_id)
#resp = requests.post(url=url2,files={'document':bytes_pdf})
nombre_pdf = str(nombre_pdf.replace(".pdf","")+" (resumen).pdf")
print(nombre_pdf)
resp = requests.post(url=url2,files={'document':(nombre_pdf,bytes_pdf)})
print('responde: ',resp)
resumelo_bot.send_message(chat_id, '\nUn placer.\n\n')
#params = {'chat_id': chat_id, 'text': text, 'parse_mode': 'HTML'}
#method = 'sendMessage'
#resp = requests.post(url2 + method_name, params)
#Lo guardo
#Lo subo a telegram con direccion local del pdf
#status = requests.post("https://api.telegram.org/bot"+str(token)+"/sendDocument?chat_id=" + str(chat_id), files=pdf_resumen)
#status = requests.post("https://api.telegram.org/bot"+str(token)+"/sendDocument?chat_id=" + str(chat_id), files=pdf_resumen)
#print('Al subirlo me dice: ',status,chat_id)
#bot.sendDocument(chat_id=chat_id, document=open(file, 'rb'))
#Lo descargo
#TODO: Mando algo cualquiera
print('FIN')
new_offset = update_id + 1
else:
print('Error leyendo ruta pdf')
resumelo_bot.send_message(chat_id, 'Lo siento, no pude encontrar tu archivo!')
new_offset = update_id + 1
# Paso a siguiente consulta
# Otherwise we answer text updates
else:
# Mensaje inicial
if chat_text == '/start':
resumelo_bot.send_message(chat_id, 'Encantado de verte, '+req_chat_name+'.\n¿Qué quieres que te resuma?')
new_offset = update_id + 1
# Easter egg
elif chat_text == 'lasaña':
resumelo_bot.send_message(chat_id, 'Has descubierto mi easter egg, ' + req_chat_name+'.\nLa lasaña me gusta mucho')
new_offset = update_id + 1
# Warning de texto breve
elif len(chat_text) < 20:
resumelo_bot.send_message(chat_id, 'Por favor, mándame algo más largo.\nNo acostumbro resumir textos de menos de 20 caracteres.')
new_offset = update_id + 1
# Resumen de texto
else:
resumen = str(resumelo(chat_text))
resumen = re.sub(r'[^\x00-\x7f]',r'', resumen)
#titulo = re.sub(r'[^\x00-\x7f]',r'', titulo)
print('\nResumen: ',resumen)
log.write('\nResumen: '+resumen)
resumelo_bot.send_message(chat_id, 'Resumen: \n\n'+resumen+'\n\n Un placer ayudarte, '+req_chat_name+'\n Algo más?')
new_offset = update_id + 1
if __name__ == '__main__':
try:
print('Iniciando resumelobot.')
main()
except KeyboardInterrupt:
print('Saliendo de resumelobot')
exit()
| [
"requests.post",
"json.dumps",
"requests.get",
"txt_a_pdf.escribe_pdf",
"funcion_resumen_texto.resumelo",
"datetime.datetime.now",
"re.sub",
"pdf_a_texto_modulo.pdf_a_texto"
] | [((1048, 1091), 'requests.get', 'requests.get', (['(self.api_url + method)', 'params'], {}), '(self.api_url + method, params)\n', (1060, 1091), False, 'import requests\n'), ((1367, 1411), 'requests.post', 'requests.post', (['(self.api_url + method)', 'params'], {}), '(self.api_url + method, params)\n', (1380, 1411), False, 'import requests\n'), ((1786, 1829), 'requests.get', 'requests.get', (['(self.api_url + method)', 'params'], {}), '(self.api_url + method, params)\n', (1798, 1829), False, 'import requests\n'), ((2623, 2666), 'requests.get', 'requests.get', (['(self.file_api_url + file_path)'], {}), '(self.file_api_url + file_path)\n', (2635, 2666), False, 'import requests\n'), ((4071, 4094), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (4092, 4094), False, 'import datetime\n'), ((4765, 4793), 'json.dumps', 'json.dumps', (['update'], {'indent': '(6)'}), '(update, indent=6)\n', (4775, 4793), False, 'import json\n'), ((6189, 6216), 'requests.get', 'requests.get', (['pdf_full_path'], {}), '(pdf_full_path)\n', (6201, 6216), False, 'import requests\n'), ((6978, 7012), 'pdf_a_texto_modulo.pdf_a_texto', 'pdf_a_texto', (['pdf_full_path', 'idioma'], {}), '(pdf_full_path, idioma)\n', (6989, 7012), False, 'from pdf_a_texto_modulo import pdf_a_texto\n'), ((9682, 9712), 'txt_a_pdf.escribe_pdf', 'escribe_pdf', (['resumen', 'path_pdf'], {}), '(resumen, path_pdf)\n', (9693, 9712), False, 'from txt_a_pdf import escribe_pdf\n'), ((10545, 10613), 'requests.post', 'requests.post', ([], {'url': 'url2', 'files': "{'document': (nombre_pdf, bytes_pdf)}"}), "(url=url2, files={'document': (nombre_pdf, bytes_pdf)})\n", (10558, 10613), False, 'import requests\n'), ((13668, 13705), 're.sub', 're.sub', (['"""[^\\\\x00-\\\\x7f]"""', '""""""', 'resumen'], {}), "('[^\\\\x00-\\\\x7f]', '', resumen)\n", (13674, 13705), False, 'import re\n'), ((7541, 7556), 'funcion_resumen_texto.resumelo', 'resumelo', (['texto'], {}), '(texto)\n', (7549, 7556), False, 'from funcion_resumen_texto import resumelo\n'), ((13588, 13607), 'funcion_resumen_texto.resumelo', 'resumelo', (['chat_text'], {}), '(chat_text)\n', (13596, 13607), False, 'from funcion_resumen_texto import resumelo\n')] |
import numpy as np
from numpy.core.arrayprint import printoptions
import pandas as pd
import matplotlib.pyplot as plt
import time
from cyberbrain import trace
ALPHA = 0.1
GAMMA = 0.95
EPSILION = 0.9
N_STATE = 6
ACTIONS = ['left', 'right']
MAX_EPISODES = 20
FRESH_TIME = 0.1
def build_q_table(n_states, actions):
q_table = pd.DataFrame(
np.zeros((n_states, len(actions))),
np.arange(n_states),
actions
)
print(q_table)
return q_table
@trace
def choose_action(state, q_table):
state_actions = q_table.loc[state, :]
if (np.random.uniform() > EPSILION) or (state_actions==0).all():
action_name = np.random.choice(ACTIONS)
else:
action_name = state_actions.idxmax()
return action_name
def get_env_feedback(state, action):
if action == "right":
if state == N_STATE - 2:
next_state = "terminal"
reward = 1
else:
next_state = state + 1
reward = -0.5
else:
if state == 0:
next_state = 0
else:
next_state = state - 1
reward = -0.5
return next_state, reward
def update_env(state,episode, step_counter):
env = ['-'] *(N_STATE-1)+['T']
if state =='terminal':
print("Episode {}, the total step is {}".format(episode+1, step_counter))
final_env = ['-'] *(N_STATE-1)+['T']
return True, step_counter
else:
env[state]='*'
env = ''.join(env)
print(env)
time.sleep(FRESH_TIME)
return False, step_counter
def sarsa_learning():
q_table = build_q_table(N_STATE, ACTIONS)
step_counter_times = []
for episode in range(MAX_EPISODES):
state = 0
is_terminal = False
step_counter = 0
update_env(state, episode, step_counter)
while not is_terminal:
action = choose_action(state, q_table) # epsilon greedy
q_predict = q_table.loc[state, action]
next_state, reward = get_env_feedback(state, action)
if next_state != 'terminal':
next_action = choose_action(next_state, q_table) #sarsa update method
q_target = reward + GAMMA * q_table.loc[next_state, next_action]
q_table.loc[state, action] += ALPHA * (q_target - q_predict)
else:
next_action = action
is_terminal = True
state = next_state
is_terminal,steps = update_env(state, episode, step_counter+1)
step_counter+=1
if is_terminal:
step_counter_times.append(steps)
return q_table, step_counter_times
if __name__ == '__main__':
q_table, step_counter_times = sarsa_learning()
print(f"Q table \n {q_table}\n")
print("end")
plt.plot(step_counter_times, 'g-')
plt.ylabel("steps")
plt.show()
print(f"The step counter_times is {step_counter_times}") | [
"matplotlib.pyplot.ylabel",
"numpy.random.choice",
"matplotlib.pyplot.plot",
"time.sleep",
"numpy.random.uniform",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((2840, 2874), 'matplotlib.pyplot.plot', 'plt.plot', (['step_counter_times', '"""g-"""'], {}), "(step_counter_times, 'g-')\n", (2848, 2874), True, 'import matplotlib.pyplot as plt\n'), ((2879, 2898), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""steps"""'], {}), "('steps')\n", (2889, 2898), True, 'import matplotlib.pyplot as plt\n'), ((2903, 2913), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2911, 2913), True, 'import matplotlib.pyplot as plt\n'), ((395, 414), 'numpy.arange', 'np.arange', (['n_states'], {}), '(n_states)\n', (404, 414), True, 'import numpy as np\n'), ((651, 676), 'numpy.random.choice', 'np.random.choice', (['ACTIONS'], {}), '(ACTIONS)\n', (667, 676), True, 'import numpy as np\n'), ((1509, 1531), 'time.sleep', 'time.sleep', (['FRESH_TIME'], {}), '(FRESH_TIME)\n', (1519, 1531), False, 'import time\n'), ((568, 587), 'numpy.random.uniform', 'np.random.uniform', ([], {}), '()\n', (585, 587), True, 'import numpy as np\n')] |
from collections import defaultdict
import argparse
# Pro tip: To copy the results of this script from the terminal in Mac OS,
# use command-alt-shift-c. That'll copy the tabs as tabs, not spaces.
def get_strings(path, row_filter=None):
''' Extract tab-delimited results in a fixed order for each run in a results.tsv file.
Arguments:
path: Path to a results.tsv file.
row_filter: Only return strings matching the specified run name. Also removes the name prefix.
'''
strings = []
with open(path) as f:
for line in f:
if "mnli-diagnostic" not in line or "micro_avg" not in line:
continue
if row_filter and row_filter not in line:
continue
#name, results = line.strip().split(None, 1)
results = line.strip()#.split(None, 1)
coarse = {}
fine = defaultdict(dict)
if row_filter:
outstr = ""
else:
#outstr = name + "\t"
outstr = ""
for result in results.split(', '):
dataset, value = result.split(': ')
value = float(value) * 100
if 'accuracy' in dataset or 'mnli-diagnostic' not in dataset or dataset == "":
continue
subset = dataset.split('mnli-diagnostic_', 1)[1]
sp = subset.split('__')
if len(sp) == 1 or sp[1] == 'missing':
coarse[sp[0]] = value
else:
fine[sp[0]][sp[1]] = value
for key in sorted(coarse.keys()):
outstr += "%.02f " % coarse[key]
for key in sorted(fine.keys()):
for inner_key in sorted(fine[key].keys()):
outstr += "%.02f " % fine[key][inner_key]
strings.append(outstr)
return strings
def get_args():
parser = argparse.ArgumentParser(description='Extract GLUE results from log files.')
parser.add_argument('log_files', type=str, nargs='+',
help='One or more tsv files to parse. Files are seperated by white space')
args = parser.parse_args()
return args
if __name__ == '__main__':
args = get_args()
for path in args.log_files:
#try:
#print(path)
strings = get_strings(path)
for outstr in strings:
print(outstr)
#except BaseException as e:
# print("Error:", e, path)
| [
"collections.defaultdict",
"argparse.ArgumentParser"
] | [((1934, 2009), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Extract GLUE results from log files."""'}), "(description='Extract GLUE results from log files.')\n", (1957, 2009), False, 'import argparse\n'), ((892, 909), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (903, 909), False, 'from collections import defaultdict\n')] |
# -*- coding: utf-8 -*-
'''
name: downloaderMiddleWares.py
usage: --
author: [[
date: 2018-02-19 20:44:49
version: 1.0
Env.: Python 3.6.4, WIN 10
'''
import random
class HttpProxyMiddleware(object):
proxy_pool = [
'http://192.168.3.11:30944',
'http://172.16.58.3:26979',
'http://192.168.127.12:42877',
'http://172.16.17.32:39897',
'http://192.168.3.11:44047',
'http://172.16.58.3:30277',
'http://192.168.3.11:42031',
'http://172.16.31.10:47797',
'http://172.16.31.10:27212',
'http://172.16.17.32:40259'
]
def process_request(self, request, spider):
# request.meta['proxy'] = "http://127.0.0.1:8118"
request.meta['proxy'] = random.choice(self.proxy_pool)
| [
"random.choice"
] | [((800, 830), 'random.choice', 'random.choice', (['self.proxy_pool'], {}), '(self.proxy_pool)\n', (813, 830), False, 'import random\n')] |
import curses
import logging
import traceback
from curses import ascii
from assertEquals.interactive.utils import ScrollArea, format_tb
from assertEquals.interactive.screens.base import BaseScreen
logger = logging.getLogger('assertEquals.screens.error')
class ErrorScreen(BaseScreen):
"""Display a traceback within curses.
"""
def __init__(self, screen, traceback_):
"""Takes the screen where the error occured and the traceback.
"""
self.screen = screen
self.colors = screen.colors
self.blocks = screen.blocks
self.traceback_ = traceback_
self.win = self.screen.win
self.win.clear()
self.win.refresh()
# BaseScreen contracts
# ====================
ui_chars = ( ord('q')
, curses.KEY_UP
, curses.KEY_DOWN
, curses.KEY_LEFT
, curses.KEY_PPAGE
, curses.KEY_NPAGE
, curses.KEY_BACKSPACE
)
def resize(self):
try:
self.lines = format_tb(self.W-1, self.traceback_)
self.area = ScrollArea(self.H, len(self.lines), 0)
self.draw()
except:
logger.critical(traceback.format_exc())
def react(self, c):
try:
if c in ( ord('q')
, curses.KEY_BACKSPACE
, ascii.BS
, ascii.ESC
, curses.KEY_LEFT
):
return self.screen
elif c == curses.KEY_UP: # up
self.area.move_cursor(0)
self.area.scroll(-1)
elif c == curses.KEY_DOWN: # down
self.area.move_cursor(self.area.numrows-1)
self.area.scroll(1)
elif c == curses.KEY_PPAGE: # page up
self.area.page_up()
elif c == curses.KEY_NPAGE: # page down
self.area.page_down()
self.draw()
except:
logger.critical(traceback.format_exc())
# Writes
# ======
def draw(self):
# Clear the screen and then draw our rows.
# ========================================
self.win.clear()
self.win.refresh()
for index, rownum in self.area:
self.win.addstr(rownum,0,self.lines[index])
# Continuation indicators
# =======================
color = self.colors.BLUE
if self.area.start > 0:
c = curses.ACS_UARROW
else:
c = ord(' ')
self.win.addch(0,self.W,c,color)
if self.area.end_ < self.area.numitems:
c = curses.ACS_LANTERN
else:
c = ord(' ')
self.win.addch(self.H-1,self.W,c,color)
# Commit
# ======
self.win.refresh()
| [
"logging.getLogger",
"traceback.format_exc",
"assertEquals.interactive.utils.format_tb"
] | [((209, 256), 'logging.getLogger', 'logging.getLogger', (['"""assertEquals.screens.error"""'], {}), "('assertEquals.screens.error')\n", (226, 256), False, 'import logging\n'), ((1058, 1096), 'assertEquals.interactive.utils.format_tb', 'format_tb', (['(self.W - 1)', 'self.traceback_'], {}), '(self.W - 1, self.traceback_)\n', (1067, 1096), False, 'from assertEquals.interactive.utils import ScrollArea, format_tb\n'), ((1226, 1248), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (1246, 1248), False, 'import traceback\n'), ((2031, 2053), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (2051, 2053), False, 'import traceback\n')] |
#!/usr/bin/env python
import sys, traceback
import cv2
import numpy as np
import argparse
import string
import plantcv as pcv
### Parse command-line arguments
def options():
parser = argparse.ArgumentParser(description="Imaging processing with opencv")
parser.add_argument("-i", "--image", help="Input image file.", required=True)
parser.add_argument("-m", "--roi", help="Input region of interest file.", required=False)
parser.add_argument("-o", "--outdir", help="Output directory for image files.", required=True)
parser.add_argument("-D", "--debug", help="Turn on debug, prints intermediate images.", action="store_true")
args = parser.parse_args()
return args
### Main pipeline
def main():
# Get options
args = options()
# Read image
img, path, filename = pcv.readimage(args.image)
#roi = cv2.imread(args.roi)
# Pipeline step
device = 0
# Convert RGB to HSV and extract the Saturation channel
device, s = pcv.rgb2gray_hsv(img, 's', device, args.debug)
# Threshold the Saturation image
device, s_thresh = pcv.binary_threshold(s, 35, 255, 'light', device, args.debug)
# Median Filter
device, s_mblur = pcv.median_blur(s_thresh, 5, device, args.debug)
#
# Apply Mask (for vis images, mask_color=white)
device, masked = pcv.apply_mask(img, s_mblur, 'white', device, args.debug)
#
# Convert RGB to LAB and extract the Green-Magenta and Blue-Yellow channels
device, masked_b = pcv.rgb2gray_lab(masked, 'b', device, args.debug)
#
# Threshold the green-magenta and blue images
device, maskedb_thresh = pcv.binary_threshold(masked_b, 128, 255, 'light', device, args.debug)
device, maskedb_cnt = pcv.binary_threshold(masked_b, 128, 255, 'light', device, args.debug)
#
# # Fill small objects
device, ab_fill = pcv.fill(maskedb_thresh, maskedb_cnt, 200, device, args.debug)
# Apply mask (for vis images, mask_color=white)
device, masked2 = pcv.apply_mask(masked, ab_fill, 'white', device, args.debug)
# Select area with black bars and find overlapping plant material
device, roi1, roi_hierarchy1= pcv.define_roi(masked2,'rectangle', device, None, 'default', args.debug,True, 0, 0,-1700,0)
device, id_objects1,obj_hierarchy1 = pcv.find_objects(masked2, ab_fill, device, args.debug)
device,roi_objects1, hierarchy1, kept_mask1, obj_area1 = pcv.roi_objects(masked2,'cutto',roi1,roi_hierarchy1,id_objects1,obj_hierarchy1,device, args.debug)
device, masked3 = pcv.apply_mask(masked2, kept_mask1, 'white', device, args.debug)
device, masked_a1 = pcv.rgb2gray_lab(masked3, 'a', device, args.debug)
device, masked_b1 = pcv.rgb2gray_lab(masked3, 'b', device, args.debug)
device, maskeda_thresh1 = pcv.binary_threshold(masked_a1, 122, 255, 'dark', device, args.debug)
device, maskedb_thresh1 = pcv.binary_threshold(masked_b1, 170, 255, 'light', device, args.debug)
device, ab1 = pcv.logical_or(maskeda_thresh1, maskedb_thresh1, device, args.debug)
device, ab_cnt1 = pcv.logical_or(maskeda_thresh1, maskedb_thresh1, device, args.debug)
device, ab_fill1 = pcv.fill(ab1, ab_cnt1, 300, device, args.debug)
device, roi2, roi_hierarchy2= pcv.define_roi(masked2,'rectangle', device, None, 'default', args.debug,True, 1700, 0,0,0)
device, id_objects2,obj_hierarchy2 = pcv.find_objects(masked2, ab_fill, device, args.debug)
device,roi_objects2, hierarchy2, kept_mask2, obj_area2 = pcv.roi_objects(masked2,'cutto',roi2,roi_hierarchy2,id_objects2,obj_hierarchy2,device, args.debug)
device, masked4 = pcv.apply_mask(masked2, kept_mask2, 'white', device, args.debug)
device, masked_a2 = pcv.rgb2gray_lab(masked4, 'a', device, args.debug)
device, masked_b2 = pcv.rgb2gray_lab(masked4, 'b', device, args.debug)
device, maskeda_thresh2 = pcv.binary_threshold(masked_a2, 122, 255, 'dark', device, args.debug)
device, maskedb_thresh2 = pcv.binary_threshold(masked_b2, 170, 255, 'light', device, args.debug)
device, ab2 = pcv.logical_or(maskeda_thresh2, maskedb_thresh2, device, args.debug)
device, ab_cnt2 = pcv.logical_or(maskeda_thresh2, maskedb_thresh2, device, args.debug)
device, ab_fill2 = pcv.fill(ab2, ab_cnt2, 200, device, args.debug)
device, ab_cnt3 = pcv.logical_or(ab_fill1, ab_fill2, device, args.debug)
device, masked3 = pcv.apply_mask(masked2, ab_cnt3, 'white', device, args.debug)
# Identify objects
device, id_objects3,obj_hierarchy3 = pcv.find_objects(masked2, ab_fill, device, args.debug)
# Define ROI
device, roi3, roi_hierarchy3= pcv.define_roi(masked2,'rectangle', device, None, 'default', args.debug,True, 650, 0,-650,-120)
# Decide which objects to keep and combine with objects overlapping with black bars
device,roi_objects3, hierarchy3, kept_mask3, obj_area1 = pcv.roi_objects(img,'cutto',roi3,roi_hierarchy3,id_objects3,obj_hierarchy3,device, args.debug)
device, kept_mask4 = pcv.logical_or(ab_cnt3, kept_mask3, device, args.debug)
device, masked5 = pcv.apply_mask(masked2, kept_mask4, 'white', device, args.debug)
device, masked5_a = pcv.rgb2gray_lab(masked5, 'a', device, args.debug)
device, masked5_a_thresh = pcv.binary_threshold(masked5_a, 130, 255, 'light', device, args.debug)
device, masked5_a_cnt = pcv.binary_threshold(masked5_a, 130, 255, 'light', device, args.debug)
device, masked5_a_fill = pcv.fill(masked5_a_thresh, masked5_a_cnt, 200, device, args.debug)
device, masked5_mblur = pcv.median_blur(masked5_a_fill, 7, device, args.debug)
device, id_objects4,obj_hierarchy4 = pcv.find_objects(masked5, masked5_mblur, device, args.debug)
device, roi4, roi_hierarchy4= pcv.define_roi(masked2,'rectangle', device, None, 'default', args.debug,False, 0, 0,0,0)
device,roi_objects4, hierarchy4, kept_mask4, obj_area = pcv.roi_objects(img,'partial',roi4,roi_hierarchy4,id_objects4,obj_hierarchy4,device, args.debug)
# Object combine kept objects
device, obj, mask = pcv.object_composition(img, roi_objects4, hierarchy4, device, args.debug)
############## Analysis ################
# Find shape properties, output shape image (optional)
device, shape_header,shape_data,shape_img = pcv.analyze_object(img, args.image, obj, mask, device,args.debug,args.outdir+'/'+filename)
# Shape properties relative to user boundary line (optional)
device, boundary_header,boundary_data, boundary_img1= pcv.analyze_bound(img, args.image,obj, mask, 270, device,args.debug,args.outdir+'/'+filename)
# Determine color properties: Histograms, Color Slices and Pseudocolored Images, output color analyzed images (optional)
device, color_header,color_data,norm_slice= pcv.analyze_color(img, args.image, mask, 256, device, args.debug,'all','rgb','v','img',300,args.outdir+'/'+filename)
# Output shape and color data
pcv.print_results(args.image, shape_header, shape_data)
pcv.print_results(args.image, color_header, color_data)
pcv.print_results(args.image, boundary_header, boundary_data)
if __name__ == '__main__':
main()#!/usr/bin/env python
| [
"plantcv.analyze_color",
"plantcv.find_objects",
"plantcv.binary_threshold",
"plantcv.rgb2gray_hsv",
"plantcv.readimage",
"argparse.ArgumentParser",
"plantcv.fill",
"plantcv.logical_or",
"plantcv.analyze_object",
"plantcv.rgb2gray_lab",
"plantcv.median_blur",
"plantcv.apply_mask",
"plantcv.roi_objects",
"plantcv.analyze_bound",
"plantcv.object_composition",
"plantcv.print_results",
"plantcv.define_roi"
] | [((186, 255), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Imaging processing with opencv"""'}), "(description='Imaging processing with opencv')\n", (209, 255), False, 'import argparse\n'), ((787, 812), 'plantcv.readimage', 'pcv.readimage', (['args.image'], {}), '(args.image)\n', (800, 812), True, 'import plantcv as pcv\n'), ((950, 996), 'plantcv.rgb2gray_hsv', 'pcv.rgb2gray_hsv', (['img', '"""s"""', 'device', 'args.debug'], {}), "(img, 's', device, args.debug)\n", (966, 996), True, 'import plantcv as pcv\n'), ((1056, 1117), 'plantcv.binary_threshold', 'pcv.binary_threshold', (['s', '(35)', '(255)', '"""light"""', 'device', 'args.debug'], {}), "(s, 35, 255, 'light', device, args.debug)\n", (1076, 1117), True, 'import plantcv as pcv\n'), ((1159, 1207), 'plantcv.median_blur', 'pcv.median_blur', (['s_thresh', '(5)', 'device', 'args.debug'], {}), '(s_thresh, 5, device, args.debug)\n', (1174, 1207), True, 'import plantcv as pcv\n'), ((1281, 1338), 'plantcv.apply_mask', 'pcv.apply_mask', (['img', 's_mblur', '"""white"""', 'device', 'args.debug'], {}), "(img, s_mblur, 'white', device, args.debug)\n", (1295, 1338), True, 'import plantcv as pcv\n'), ((1442, 1491), 'plantcv.rgb2gray_lab', 'pcv.rgb2gray_lab', (['masked', '"""b"""', 'device', 'args.debug'], {}), "(masked, 'b', device, args.debug)\n", (1458, 1491), True, 'import plantcv as pcv\n'), ((1571, 1640), 'plantcv.binary_threshold', 'pcv.binary_threshold', (['masked_b', '(128)', '(255)', '"""light"""', 'device', 'args.debug'], {}), "(masked_b, 128, 255, 'light', device, args.debug)\n", (1591, 1640), True, 'import plantcv as pcv\n'), ((1665, 1734), 'plantcv.binary_threshold', 'pcv.binary_threshold', (['masked_b', '(128)', '(255)', '"""light"""', 'device', 'args.debug'], {}), "(masked_b, 128, 255, 'light', device, args.debug)\n", (1685, 1734), True, 'import plantcv as pcv\n'), ((1783, 1845), 'plantcv.fill', 'pcv.fill', (['maskedb_thresh', 'maskedb_cnt', '(200)', 'device', 'args.debug'], {}), '(maskedb_thresh, maskedb_cnt, 200, device, args.debug)\n', (1791, 1845), True, 'import plantcv as pcv\n'), ((1919, 1979), 'plantcv.apply_mask', 'pcv.apply_mask', (['masked', 'ab_fill', '"""white"""', 'device', 'args.debug'], {}), "(masked, ab_fill, 'white', device, args.debug)\n", (1933, 1979), True, 'import plantcv as pcv\n'), ((2083, 2183), 'plantcv.define_roi', 'pcv.define_roi', (['masked2', '"""rectangle"""', 'device', 'None', '"""default"""', 'args.debug', '(True)', '(0)', '(0)', '(-1700)', '(0)'], {}), "(masked2, 'rectangle', device, None, 'default', args.debug, \n True, 0, 0, -1700, 0)\n", (2097, 2183), True, 'import plantcv as pcv\n'), ((2214, 2268), 'plantcv.find_objects', 'pcv.find_objects', (['masked2', 'ab_fill', 'device', 'args.debug'], {}), '(masked2, ab_fill, device, args.debug)\n', (2230, 2268), True, 'import plantcv as pcv\n'), ((2328, 2436), 'plantcv.roi_objects', 'pcv.roi_objects', (['masked2', '"""cutto"""', 'roi1', 'roi_hierarchy1', 'id_objects1', 'obj_hierarchy1', 'device', 'args.debug'], {}), "(masked2, 'cutto', roi1, roi_hierarchy1, id_objects1,\n obj_hierarchy1, device, args.debug)\n", (2343, 2436), True, 'import plantcv as pcv\n'), ((2447, 2511), 'plantcv.apply_mask', 'pcv.apply_mask', (['masked2', 'kept_mask1', '"""white"""', 'device', 'args.debug'], {}), "(masked2, kept_mask1, 'white', device, args.debug)\n", (2461, 2511), True, 'import plantcv as pcv\n'), ((2534, 2584), 'plantcv.rgb2gray_lab', 'pcv.rgb2gray_lab', (['masked3', '"""a"""', 'device', 'args.debug'], {}), "(masked3, 'a', device, args.debug)\n", (2550, 2584), True, 'import plantcv as pcv\n'), ((2607, 2657), 'plantcv.rgb2gray_lab', 'pcv.rgb2gray_lab', (['masked3', '"""b"""', 'device', 'args.debug'], {}), "(masked3, 'b', device, args.debug)\n", (2623, 2657), True, 'import plantcv as pcv\n'), ((2686, 2755), 'plantcv.binary_threshold', 'pcv.binary_threshold', (['masked_a1', '(122)', '(255)', '"""dark"""', 'device', 'args.debug'], {}), "(masked_a1, 122, 255, 'dark', device, args.debug)\n", (2706, 2755), True, 'import plantcv as pcv\n'), ((2784, 2854), 'plantcv.binary_threshold', 'pcv.binary_threshold', (['masked_b1', '(170)', '(255)', '"""light"""', 'device', 'args.debug'], {}), "(masked_b1, 170, 255, 'light', device, args.debug)\n", (2804, 2854), True, 'import plantcv as pcv\n'), ((2871, 2939), 'plantcv.logical_or', 'pcv.logical_or', (['maskeda_thresh1', 'maskedb_thresh1', 'device', 'args.debug'], {}), '(maskeda_thresh1, maskedb_thresh1, device, args.debug)\n', (2885, 2939), True, 'import plantcv as pcv\n'), ((2960, 3028), 'plantcv.logical_or', 'pcv.logical_or', (['maskeda_thresh1', 'maskedb_thresh1', 'device', 'args.debug'], {}), '(maskeda_thresh1, maskedb_thresh1, device, args.debug)\n', (2974, 3028), True, 'import plantcv as pcv\n'), ((3050, 3097), 'plantcv.fill', 'pcv.fill', (['ab1', 'ab_cnt1', '(300)', 'device', 'args.debug'], {}), '(ab1, ab_cnt1, 300, device, args.debug)\n', (3058, 3097), True, 'import plantcv as pcv\n'), ((3134, 3233), 'plantcv.define_roi', 'pcv.define_roi', (['masked2', '"""rectangle"""', 'device', 'None', '"""default"""', 'args.debug', '(True)', '(1700)', '(0)', '(0)', '(0)'], {}), "(masked2, 'rectangle', device, None, 'default', args.debug, \n True, 1700, 0, 0, 0)\n", (3148, 3233), True, 'import plantcv as pcv\n'), ((3264, 3318), 'plantcv.find_objects', 'pcv.find_objects', (['masked2', 'ab_fill', 'device', 'args.debug'], {}), '(masked2, ab_fill, device, args.debug)\n', (3280, 3318), True, 'import plantcv as pcv\n'), ((3378, 3486), 'plantcv.roi_objects', 'pcv.roi_objects', (['masked2', '"""cutto"""', 'roi2', 'roi_hierarchy2', 'id_objects2', 'obj_hierarchy2', 'device', 'args.debug'], {}), "(masked2, 'cutto', roi2, roi_hierarchy2, id_objects2,\n obj_hierarchy2, device, args.debug)\n", (3393, 3486), True, 'import plantcv as pcv\n'), ((3497, 3561), 'plantcv.apply_mask', 'pcv.apply_mask', (['masked2', 'kept_mask2', '"""white"""', 'device', 'args.debug'], {}), "(masked2, kept_mask2, 'white', device, args.debug)\n", (3511, 3561), True, 'import plantcv as pcv\n'), ((3584, 3634), 'plantcv.rgb2gray_lab', 'pcv.rgb2gray_lab', (['masked4', '"""a"""', 'device', 'args.debug'], {}), "(masked4, 'a', device, args.debug)\n", (3600, 3634), True, 'import plantcv as pcv\n'), ((3657, 3707), 'plantcv.rgb2gray_lab', 'pcv.rgb2gray_lab', (['masked4', '"""b"""', 'device', 'args.debug'], {}), "(masked4, 'b', device, args.debug)\n", (3673, 3707), True, 'import plantcv as pcv\n'), ((3736, 3805), 'plantcv.binary_threshold', 'pcv.binary_threshold', (['masked_a2', '(122)', '(255)', '"""dark"""', 'device', 'args.debug'], {}), "(masked_a2, 122, 255, 'dark', device, args.debug)\n", (3756, 3805), True, 'import plantcv as pcv\n'), ((3834, 3904), 'plantcv.binary_threshold', 'pcv.binary_threshold', (['masked_b2', '(170)', '(255)', '"""light"""', 'device', 'args.debug'], {}), "(masked_b2, 170, 255, 'light', device, args.debug)\n", (3854, 3904), True, 'import plantcv as pcv\n'), ((3921, 3989), 'plantcv.logical_or', 'pcv.logical_or', (['maskeda_thresh2', 'maskedb_thresh2', 'device', 'args.debug'], {}), '(maskeda_thresh2, maskedb_thresh2, device, args.debug)\n', (3935, 3989), True, 'import plantcv as pcv\n'), ((4010, 4078), 'plantcv.logical_or', 'pcv.logical_or', (['maskeda_thresh2', 'maskedb_thresh2', 'device', 'args.debug'], {}), '(maskeda_thresh2, maskedb_thresh2, device, args.debug)\n', (4024, 4078), True, 'import plantcv as pcv\n'), ((4100, 4147), 'plantcv.fill', 'pcv.fill', (['ab2', 'ab_cnt2', '(200)', 'device', 'args.debug'], {}), '(ab2, ab_cnt2, 200, device, args.debug)\n', (4108, 4147), True, 'import plantcv as pcv\n'), ((4171, 4225), 'plantcv.logical_or', 'pcv.logical_or', (['ab_fill1', 'ab_fill2', 'device', 'args.debug'], {}), '(ab_fill1, ab_fill2, device, args.debug)\n', (4185, 4225), True, 'import plantcv as pcv\n'), ((4246, 4307), 'plantcv.apply_mask', 'pcv.apply_mask', (['masked2', 'ab_cnt3', '"""white"""', 'device', 'args.debug'], {}), "(masked2, ab_cnt3, 'white', device, args.debug)\n", (4260, 4307), True, 'import plantcv as pcv\n'), ((4371, 4425), 'plantcv.find_objects', 'pcv.find_objects', (['masked2', 'ab_fill', 'device', 'args.debug'], {}), '(masked2, ab_fill, device, args.debug)\n', (4387, 4425), True, 'import plantcv as pcv\n'), ((4474, 4578), 'plantcv.define_roi', 'pcv.define_roi', (['masked2', '"""rectangle"""', 'device', 'None', '"""default"""', 'args.debug', '(True)', '(650)', '(0)', '(-650)', '(-120)'], {}), "(masked2, 'rectangle', device, None, 'default', args.debug, \n True, 650, 0, -650, -120)\n", (4488, 4578), True, 'import plantcv as pcv\n'), ((4717, 4821), 'plantcv.roi_objects', 'pcv.roi_objects', (['img', '"""cutto"""', 'roi3', 'roi_hierarchy3', 'id_objects3', 'obj_hierarchy3', 'device', 'args.debug'], {}), "(img, 'cutto', roi3, roi_hierarchy3, id_objects3,\n obj_hierarchy3, device, args.debug)\n", (4732, 4821), True, 'import plantcv as pcv\n'), ((4835, 4890), 'plantcv.logical_or', 'pcv.logical_or', (['ab_cnt3', 'kept_mask3', 'device', 'args.debug'], {}), '(ab_cnt3, kept_mask3, device, args.debug)\n', (4849, 4890), True, 'import plantcv as pcv\n'), ((4911, 4975), 'plantcv.apply_mask', 'pcv.apply_mask', (['masked2', 'kept_mask4', '"""white"""', 'device', 'args.debug'], {}), "(masked2, kept_mask4, 'white', device, args.debug)\n", (4925, 4975), True, 'import plantcv as pcv\n'), ((4998, 5048), 'plantcv.rgb2gray_lab', 'pcv.rgb2gray_lab', (['masked5', '"""a"""', 'device', 'args.debug'], {}), "(masked5, 'a', device, args.debug)\n", (5014, 5048), True, 'import plantcv as pcv\n'), ((5078, 5148), 'plantcv.binary_threshold', 'pcv.binary_threshold', (['masked5_a', '(130)', '(255)', '"""light"""', 'device', 'args.debug'], {}), "(masked5_a, 130, 255, 'light', device, args.debug)\n", (5098, 5148), True, 'import plantcv as pcv\n'), ((5175, 5245), 'plantcv.binary_threshold', 'pcv.binary_threshold', (['masked5_a', '(130)', '(255)', '"""light"""', 'device', 'args.debug'], {}), "(masked5_a, 130, 255, 'light', device, args.debug)\n", (5195, 5245), True, 'import plantcv as pcv\n'), ((5273, 5339), 'plantcv.fill', 'pcv.fill', (['masked5_a_thresh', 'masked5_a_cnt', '(200)', 'device', 'args.debug'], {}), '(masked5_a_thresh, masked5_a_cnt, 200, device, args.debug)\n', (5281, 5339), True, 'import plantcv as pcv\n'), ((5366, 5420), 'plantcv.median_blur', 'pcv.median_blur', (['masked5_a_fill', '(7)', 'device', 'args.debug'], {}), '(masked5_a_fill, 7, device, args.debug)\n', (5381, 5420), True, 'import plantcv as pcv\n'), ((5461, 5521), 'plantcv.find_objects', 'pcv.find_objects', (['masked5', 'masked5_mblur', 'device', 'args.debug'], {}), '(masked5, masked5_mblur, device, args.debug)\n', (5477, 5521), True, 'import plantcv as pcv\n'), ((5554, 5651), 'plantcv.define_roi', 'pcv.define_roi', (['masked2', '"""rectangle"""', 'device', 'None', '"""default"""', 'args.debug', '(False)', '(0)', '(0)', '(0)', '(0)'], {}), "(masked2, 'rectangle', device, None, 'default', args.debug, \n False, 0, 0, 0, 0)\n", (5568, 5651), True, 'import plantcv as pcv\n'), ((5701, 5807), 'plantcv.roi_objects', 'pcv.roi_objects', (['img', '"""partial"""', 'roi4', 'roi_hierarchy4', 'id_objects4', 'obj_hierarchy4', 'device', 'args.debug'], {}), "(img, 'partial', roi4, roi_hierarchy4, id_objects4,\n obj_hierarchy4, device, args.debug)\n", (5716, 5807), True, 'import plantcv as pcv\n'), ((5852, 5925), 'plantcv.object_composition', 'pcv.object_composition', (['img', 'roi_objects4', 'hierarchy4', 'device', 'args.debug'], {}), '(img, roi_objects4, hierarchy4, device, args.debug)\n', (5874, 5925), True, 'import plantcv as pcv\n'), ((6078, 6179), 'plantcv.analyze_object', 'pcv.analyze_object', (['img', 'args.image', 'obj', 'mask', 'device', 'args.debug', "(args.outdir + '/' + filename)"], {}), "(img, args.image, obj, mask, device, args.debug, args.\n outdir + '/' + filename)\n", (6096, 6179), True, 'import plantcv as pcv\n'), ((6292, 6397), 'plantcv.analyze_bound', 'pcv.analyze_bound', (['img', 'args.image', 'obj', 'mask', '(270)', 'device', 'args.debug', "(args.outdir + '/' + filename)"], {}), "(img, args.image, obj, mask, 270, device, args.debug, args\n .outdir + '/' + filename)\n", (6309, 6397), True, 'import plantcv as pcv\n'), ((6558, 6688), 'plantcv.analyze_color', 'pcv.analyze_color', (['img', 'args.image', 'mask', '(256)', 'device', 'args.debug', '"""all"""', '"""rgb"""', '"""v"""', '"""img"""', '(300)', "(args.outdir + '/' + filename)"], {}), "(img, args.image, mask, 256, device, args.debug, 'all',\n 'rgb', 'v', 'img', 300, args.outdir + '/' + filename)\n", (6575, 6688), True, 'import plantcv as pcv\n'), ((6712, 6767), 'plantcv.print_results', 'pcv.print_results', (['args.image', 'shape_header', 'shape_data'], {}), '(args.image, shape_header, shape_data)\n', (6729, 6767), True, 'import plantcv as pcv\n'), ((6770, 6825), 'plantcv.print_results', 'pcv.print_results', (['args.image', 'color_header', 'color_data'], {}), '(args.image, color_header, color_data)\n', (6787, 6825), True, 'import plantcv as pcv\n'), ((6828, 6889), 'plantcv.print_results', 'pcv.print_results', (['args.image', 'boundary_header', 'boundary_data'], {}), '(args.image, boundary_header, boundary_data)\n', (6845, 6889), True, 'import plantcv as pcv\n')] |
#
# Vortex OpenSplice
#
# This software and documentation are Copyright 2006 to TO_YEAR ADLINK
# Technology Limited, its affiliated companies and licensors. All rights
# reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
'''
Created on Dec 22, 2017
@author: prismtech
'''
import unittest
import ddsutil
import enum
class MyEnum(enum.Enum):
ONE = 0
TWO = 1
class TestTypeCheckers(unittest.TestCase):
_bool_checker = ddsutil._bool_checker
def testBool(self):
self.assertRaises(TypeError, lambda: self._bool_checker(None))
self.assertRaises(TypeError, lambda: self._bool_checker(1))
self._bool_checker(True)
self._bool_checker(False)
_octet_checker = ddsutil._octet_checker
_ushort_checker = ddsutil._ushort_checker
_ulong_checker = ddsutil._ulong_checker
_ulonglong_checker = ddsutil._ulonglong_checker
_short_checker = ddsutil._short_checker
_long_checker = ddsutil._long_checker
_longlong_checker = ddsutil._longlong_checker
def testOctet(self):
self.assertRaises(TypeError, lambda: self._octet_checker(None))
self.assertRaises(TypeError, lambda: self._octet_checker(3.5))
self.assertRaises(TypeError, lambda: self._octet_checker('a'))
self.assertRaises(TypeError, lambda: self._octet_checker(b'a'))
self.assertRaises(TypeError, lambda: self._octet_checker(-1))
self.assertRaises(TypeError, lambda: self._octet_checker(256))
self.assertRaises(TypeError, lambda: self._octet_checker(-1000))
self.assertRaises(TypeError, lambda: self._octet_checker(2560))
self._octet_checker(0)
self._octet_checker(128)
self._octet_checker(255)
def testUShort(self):
self.assertRaises(TypeError, lambda: self._ushort_checker(None))
self.assertRaises(TypeError, lambda: self._ushort_checker(3.5))
self.assertRaises(TypeError, lambda: self._ushort_checker('a'))
self.assertRaises(TypeError, lambda: self._ushort_checker(b'a'))
self.assertRaises(TypeError, lambda: self._ushort_checker(-1))
self.assertRaises(TypeError, lambda: self._ushort_checker((1<<16)))
self.assertRaises(TypeError, lambda: self._ushort_checker(-1000))
self.assertRaises(TypeError, lambda: self._ushort_checker(350000))
self._ushort_checker(0)
self._ushort_checker((1<<15))
self._ushort_checker((1<<16)-1)
def testULong(self):
self.assertRaises(TypeError, lambda: self._ulong_checker(None))
self.assertRaises(TypeError, lambda: self._ulong_checker(3.5))
self.assertRaises(TypeError, lambda: self._ulong_checker('a'))
self.assertRaises(TypeError, lambda: self._ulong_checker(b'a'))
self.assertRaises(TypeError, lambda: self._ulong_checker(-1))
self.assertRaises(TypeError, lambda: self._ulong_checker((1<<32)))
self.assertRaises(TypeError, lambda: self._ulong_checker(-1000))
self.assertRaises(TypeError, lambda: self._ulong_checker((1<<40)))
self._ulong_checker(0)
self._ulong_checker((1<<31))
self._ulong_checker((1<<32)-1)
def testULongLong(self):
self.assertRaises(TypeError, lambda: self._ulonglong_checker(None))
self.assertRaises(TypeError, lambda: self._ulonglong_checker(3.5))
self.assertRaises(TypeError, lambda: self._ulonglong_checker('a'))
self.assertRaises(TypeError, lambda: self._ulonglong_checker(b'a'))
self.assertRaises(TypeError, lambda: self._ulonglong_checker(-1))
self.assertRaises(TypeError, lambda: self._ulonglong_checker((1<<64)))
self.assertRaises(TypeError, lambda: self._ulonglong_checker(-1000))
self.assertRaises(TypeError, lambda: self._ulonglong_checker((1<<66)))
self._ulonglong_checker(0)
self._ulonglong_checker((1<<60))
self._ulonglong_checker((1<<64)-1)
def testShort(self):
self.assertRaises(TypeError, lambda: self._short_checker(None))
self.assertRaises(TypeError, lambda: self._short_checker(3.5))
self.assertRaises(TypeError, lambda: self._short_checker('a'))
self.assertRaises(TypeError, lambda: self._short_checker(b'a'))
self.assertRaises(TypeError, lambda: self._short_checker(-(1<<15) - 1)) # right outside the bounds
self.assertRaises(TypeError, lambda: self._short_checker((1<<15))) # right outside the bounds
self.assertRaises(TypeError, lambda: self._short_checker(-(1<<16) - 1)) # well outside the bounds
self.assertRaises(TypeError, lambda: self._short_checker((1<<16))) # well outside the bounds
self._short_checker(-(1<<15))
self._short_checker(-5)
self._short_checker(0)
self._short_checker(5)
self._short_checker((1<<15)-1)
def testLong(self):
self.assertRaises(TypeError, lambda: self._long_checker(None))
self.assertRaises(TypeError, lambda: self._long_checker(3.5))
self.assertRaises(TypeError, lambda: self._long_checker('a'))
self.assertRaises(TypeError, lambda: self._long_checker(b'a'))
self.assertRaises(TypeError, lambda: self._long_checker(-(1<<31) - 1)) # right outside the bounds
self.assertRaises(TypeError, lambda: self._long_checker((1<<31))) # right outside the bounds
self.assertRaises(TypeError, lambda: self._long_checker(-(1<<32) - 1)) # well outside the bounds
self.assertRaises(TypeError, lambda: self._long_checker((1<<32))) # well outside the bounds
self._long_checker(-(1<<31))
self._long_checker(-5)
self._long_checker(0)
self._long_checker(5)
self._long_checker((1<<31)-1)
def testLongLong(self):
self.assertRaises(TypeError, lambda: self._longlong_checker(None))
self.assertRaises(TypeError, lambda: self._longlong_checker(3.5))
self.assertRaises(TypeError, lambda: self._longlong_checker('a'))
self.assertRaises(TypeError, lambda: self._longlong_checker(b'a'))
self.assertRaises(TypeError, lambda: self._longlong_checker(-(1<<63) - 1)) # right outside the bounds
self.assertRaises(TypeError, lambda: self._longlong_checker((1<<63))) # right outside the bounds
self.assertRaises(TypeError, lambda: self._longlong_checker(-(1<<64) - 1)) # well outside the bounds
self.assertRaises(TypeError, lambda: self._longlong_checker((1<<64))) # well outside the bounds
self._longlong_checker(-(1<<63))
self._longlong_checker(-5)
self._longlong_checker(0)
self._longlong_checker(5)
self._longlong_checker((1<<63)-1)
_char_checker = ddsutil._char_checker
def testChar(self):
self.assertRaises(TypeError, lambda: self._char_checker(None))
self.assertRaises(TypeError, lambda: self._char_checker(3))
self.assertRaises(TypeError, lambda: self._char_checker(b'a'))
self.assertRaises(TypeError, lambda: self._char_checker(''))
self.assertRaises(TypeError, lambda: self._char_checker('aa'))
self.assertRaises(TypeError, lambda: self._char_checker(chr(256)))
self._char_checker(chr(0))
self._char_checker('1')
self._char_checker(chr(255))
_str_checker = ddsutil._str_checker
_str2_checker = ddsutil._bounded_str_checker(2)
def testStr(self):
self.assertRaises(TypeError, lambda: self._str2_checker(None))
self.assertRaises(TypeError, lambda: self._str2_checker(3))
self.assertRaises(TypeError, lambda: self._str2_checker(b'a'))
self.assertRaises(TypeError, lambda: self._str2_checker('aab'))
self.assertRaises(TypeError, lambda: self._str2_checker(chr(256)))
self._str2_checker('')
self._str2_checker(chr(0))
self._str2_checker('1')
self._str2_checker('11')
self._str2_checker(chr(255))
_enum_checker = ddsutil._class_checker(MyEnum)
def testClass(self):
self.assertRaises(TypeError, lambda: self._enum_checker(None))
self.assertRaises(TypeError, lambda: self._enum_checker(1))
self.assertRaises(TypeError, lambda: self._enum_checker('A'))
self._enum_checker(MyEnum.ONE)
self._enum_checker(MyEnum.TWO)
_float_checker = ddsutil._float_checker
def testFloat(self):
self.assertRaises(TypeError, lambda: self._float_checker(None))
self.assertRaises(TypeError, lambda: self._float_checker(1))
self.assertRaises(TypeError, lambda: self._float_checker('A'))
self._float_checker(3.15)
self._float_checker(1.0)
self._float_checker(-9.183)
_long_array_checker = ddsutil._array_checker(2, ddsutil._long_checker)
def testLongArray(self):
self.assertRaises(TypeError, lambda: self._long_array_checker(None))
self.assertRaises(TypeError, lambda: self._long_array_checker(1))
self.assertRaises(TypeError, lambda: self._long_array_checker(1.0))
self.assertRaises(TypeError, lambda: self._long_array_checker('A'))
self.assertRaises(TypeError, lambda: self._long_array_checker([]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1,2,3]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1.0,2]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1,2.0]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1<<32,2]))
self.assertRaises(TypeError, lambda: self._long_array_checker([1,1<<32]))
self._long_array_checker([1,2])
self._long_array_checker([-(1<<31),(1<<31)-1])
_long_seq_checker = ddsutil._seq_checker(2, ddsutil._long_checker)
def testLongSeq(self):
self.assertRaises(TypeError, lambda: self._long_seq_checker(None))
self.assertRaises(TypeError, lambda: self._long_seq_checker(1))
self.assertRaises(TypeError, lambda: self._long_seq_checker(1.0))
self.assertRaises(TypeError, lambda: self._long_seq_checker('A'))
self.assertRaises(TypeError, lambda: self._long_seq_checker([1,2,3]))
self.assertRaises(TypeError, lambda: self._long_seq_checker([1.0,2]))
self.assertRaises(TypeError, lambda: self._long_seq_checker([1,2.0]))
self.assertRaises(TypeError, lambda: self._long_seq_checker([1<<32,2]))
self.assertRaises(TypeError, lambda: self._long_seq_checker([1,1<<32]))
self._long_seq_checker([])
self._long_seq_checker([1])
self._long_seq_checker([1,2])
self._long_seq_checker([-(1<<31),(1<<31)-1])
_long_ubseq_checker = ddsutil._seq_checker(0, ddsutil._long_checker)
def testLongUBSeq(self):
self.assertRaises(TypeError, lambda: self._long_ubseq_checker(None))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker(1))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker(1.0))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker('A'))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker([1.0,2]))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker([1,2.0]))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker([1<<32,2]))
self.assertRaises(TypeError, lambda: self._long_ubseq_checker([1,1<<32]))
self._long_ubseq_checker([])
self._long_ubseq_checker([1])
self._long_ubseq_checker([1,2])
self._long_ubseq_checker([1,2,3,4,5])
self._long_ubseq_checker([-(1<<31),(1<<31)-1])
_long_matrix_checker = ddsutil._array_checker(1,ddsutil._array_checker(2,ddsutil._long_checker))
def testLongMatrix(self):
self.assertRaises(TypeError, lambda: self._long_matrix_checker([]))
self.assertRaises(TypeError, lambda: self._long_matrix_checker([[]]))
self.assertRaises(TypeError, lambda: self._long_matrix_checker([[1]]))
self.assertRaises(TypeError, lambda: self._long_matrix_checker([[1,2,3]]))
self.assertRaises(TypeError, lambda: self._long_matrix_checker([[1,2],[3,4]]))
self._long_matrix_checker([[1,2]])
_long_seqseq_checker = ddsutil._seq_checker(1,ddsutil._seq_checker(2,ddsutil._long_checker))
def testLongSeqSeq(self):
self._long_seqseq_checker([])
self._long_seqseq_checker([[]])
self._long_seqseq_checker([[1]])
self._long_seqseq_checker([[1,2]])
self.assertRaises(TypeError, lambda: self._long_seqseq_checker([[1,2,3]]))
self.assertRaises(TypeError, lambda: self._long_seqseq_checker([[1,2],[3,4]]))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | [
"ddsutil._bounded_str_checker",
"unittest.main",
"ddsutil._array_checker",
"ddsutil._seq_checker",
"ddsutil._class_checker"
] | [((7846, 7877), 'ddsutil._bounded_str_checker', 'ddsutil._bounded_str_checker', (['(2)'], {}), '(2)\n', (7874, 7877), False, 'import ddsutil\n'), ((8452, 8482), 'ddsutil._class_checker', 'ddsutil._class_checker', (['MyEnum'], {}), '(MyEnum)\n', (8474, 8482), False, 'import ddsutil\n'), ((9216, 9264), 'ddsutil._array_checker', 'ddsutil._array_checker', (['(2)', 'ddsutil._long_checker'], {}), '(2, ddsutil._long_checker)\n', (9238, 9264), False, 'import ddsutil\n'), ((10280, 10326), 'ddsutil._seq_checker', 'ddsutil._seq_checker', (['(2)', 'ddsutil._long_checker'], {}), '(2, ddsutil._long_checker)\n', (10300, 10326), False, 'import ddsutil\n'), ((11232, 11278), 'ddsutil._seq_checker', 'ddsutil._seq_checker', (['(0)', 'ddsutil._long_checker'], {}), '(0, ddsutil._long_checker)\n', (11252, 11278), False, 'import ddsutil\n'), ((13278, 13293), 'unittest.main', 'unittest.main', ([], {}), '()\n', (13291, 13293), False, 'import unittest\n'), ((12212, 12260), 'ddsutil._array_checker', 'ddsutil._array_checker', (['(2)', 'ddsutil._long_checker'], {}), '(2, ddsutil._long_checker)\n', (12234, 12260), False, 'import ddsutil\n'), ((12788, 12834), 'ddsutil._seq_checker', 'ddsutil._seq_checker', (['(2)', 'ddsutil._long_checker'], {}), '(2, ddsutil._long_checker)\n', (12808, 12834), False, 'import ddsutil\n')] |
#!/usr/bin/env python
u"""
format_bibtex.py (12/2020)
Reformats journal bibtex files into a standard form with Universal citekeys
COMMAND LINE OPTIONS:
-O, --output: Output to new bibtex file
-C, --cleanup: Remove the input file after formatting
-V, --verbose: Verbose output of input files and entries
PROGRAM DEPENDENCIES:
gen_citekeys.py: Generates Papers2-like cite keys for BibTeX
read_referencerc.py: Sets default file path and file format for output files
language_conversion.py: Outputs map for converting symbols between languages
NOTES:
May get capitalization incorrect for authors with lowercase first letters
possible for cases when author fields are initially fully capitalized
Check unicode characters with http://www.fileformat.info/
can add more entries to the language_conversion matrix
Papers2 Universal Citekey generation javascript
https://github.com/cparnot/universal-citekey-js
UPDATE HISTORY:
Updated 12/2020: using argparse to set command line options
Updated 07/2019: modifications for python3 string compatibility
Updated 07/2018: format editor fields to be "family name, given name"
Updated 04/2018: use regular expression for splitting between authors
Updated 02/2018: changed variable name of bibentry to bibtype
Updated 11/2017: remove line skips and series of whitespace from title
Updated 10/2017: if --output place file in reference directory
use data path and data file format from referencerc file
Updated 06/2017: Separate initials of authors if listed as singular variable
format author names even if in family name, given name format
added option --cleanup to remove the input RIS file after formatting
Written 05/2017
"""
from __future__ import print_function
import sys
import os
import re
import inspect
import argparse
from gen_citekeys import gen_citekey
from read_referencerc import read_referencerc
from language_conversion import language_conversion
#-- current file path for the program
filename = inspect.getframeinfo(inspect.currentframe()).filename
filepath = os.path.dirname(os.path.abspath(filename))
#-- PURPOSE: formats an input bibtex file
def format_bibtex(file_contents, OUTPUT=False, VERBOSE=False):
#-- get reference filepath and reference format from referencerc file
datapath,dataformat=read_referencerc(os.path.join(filepath,'.referencerc'))
#-- valid bibtex entry types
bibtex_entry_types = ['article','book','booklet','conference','inbook',
'incollection','inproceedings','manual','mastersthesis','phdthesis',
'proceedings','techreport','unpublished','webpage']
entry_regex = '[?<=\@](' + '|'.join([i for i in bibtex_entry_types]) + \
')[\s]?\{(.*?)[\s]?,[\s]?'
R1 = re.compile(entry_regex, flags=re.IGNORECASE)
#-- bibtex fields to be printed in the output file
bibtex_field_types = ['address','affiliation','annote','author','booktitle',
'chapter','crossref','doi','edition','editor','howpublished','institution',
'isbn','issn','journal','key','keywords','month','note','number','organization',
'pages','publisher','school','series','title','type','url','volume','year']
field_regex = '[\s]?(' + '|'.join([i for i in bibtex_field_types]) + \
')[\s]?\=[\s]?[\"|\']?[\{]?[\{]?[\s]?(.*?)[\s+]?[\}]?[\}]?[\"|\']?[\s]?[\,]?[\s]?\n'
R2 = re.compile(field_regex, flags=re.IGNORECASE)
#-- sort bibtex fields in output files
bibtex_field_sort = {'address':15,'affiliation':16,'annote':25,'author':0,
'booktitle':12,'chapter':13,'crossref':27,'doi':10,'edition':19,'editor':21,
'howpublished':22,'institution':17,'isbn':8,'issn':7,'journal':2,'key':24,
'keywords':28,'month':4,'note':23,'number':6,'organization':17,'pages':11,
'publisher':14,'school':18,'series':20,'title':1,'type':26,'url':9,
'volume':5,'year':3}
#-- regular expression pattern to extract doi from webpages or "doi:"
doi_regex = '(doi\:[\s]?|http[s]?\:\/\/(dx\.)?doi\.org\/)?(10\.(.*?))$'
R3 = re.compile(doi_regex, flags=re.IGNORECASE)
#-- list of known compound surnames to search for
compound_surname_regex = []
compound_surname_regex.append('(?<=\s)van\s[de|den]?[\s]?(.*?)')
compound_surname_regex.append('(?<=\s)von\s[de|den]?[\s]?(.*?)')
compound_surname_regex.append('(?<![van|von])(?<=\s)de\s(.*?)')
compound_surname_regex.append('(?<!de)(?<=\s)(la|los)\s?(.*?)')
#-- create python dictionary with entry
bibtex_entry = {}
bibtex_key = {}
#-- extract bibtex entry type and bibtex cite key
bibtype,bibkey = R1.findall(file_contents).pop()
bibtex_key['entrytype'] = bibtype.lower()
bibtex_key['citekey'] = bibkey
bibtex_field_entries = R2.findall(file_contents)
bibtex_keywords = []
for key,val in bibtex_field_entries:
if (key.lower() == 'title'):
#-- format titles in double curly brackets
bibtex_entry[key.lower()] = '{{{0}}}'.format(val)
elif (key.lower() in ('author','editor')) and (',' not in val):
#-- format authors in surname, given name(s)
current_authors = []
for A in re.split(' and ', val, flags=re.IGNORECASE):
#-- flip given name(s) and lastname
i = None; j = 0
#-- check if lastname is in list of known compound surnames
while (i is None) and (j < len(compound_surname_regex)):
R = re.compile(compound_surname_regex[j],flags=re.IGNORECASE)
i = R.search(A).start() if R.search(A) else None
j += 1
#-- if the lastname was compound
if i is not None:
ALN,AGN = A[i:],A[:i].rstrip()
else:
#-- flip given name(s) and lastname
author_fields = A.split(' ')
ALN = author_fields[-1]
AGN = ' '.join(author_fields[:-1])
#-- split initials if as a single variable
if re.match('([A-Z])\.([A-Z])\.', AGN):
AGN=' '.join(re.findall('([A-Z])\.([A-Z])\.',AGN).pop())
elif re.match('([A-Za-z]+)\s([A-Z])\.', AGN):
AGN=' '.join(re.findall('([A-Za-z]+)\s([A-Z])\.',AGN).pop())
elif re.match('([A-Z])\.', AGN):
AGN=' '.join(re.findall('([A-Z])\.',AGN))
#-- add to current authors list
current_authors.append('{0}, {1}'.format(ALN,AGN))
#-- merge authors list
bibtex_entry[key.lower()] = ' and '.join(current_authors)
elif (key.lower() in ('author','editor')):
current_authors = []
for A in re.split(' and ', val, flags=re.IGNORECASE):
ALN,AGN = A.split(', ')
#-- split initials if as a single variable
if re.match('([A-Z])\.([A-Z])\.', AGN):
AGN=' '.join(re.findall('([A-Z])\.([A-Z])\.',AGN).pop())
elif re.match('([A-Za-z]+)\s([A-Z])\.', AGN):
AGN=' '.join(re.findall('([A-Za-z]+)\s([A-Z])\.',AGN).pop())
elif re.match('([A-Z])\.', AGN):
AGN=' '.join(re.findall('([A-Z])\.',AGN))
#-- add to current authors list
current_authors.append('{0}, {1}'.format(ALN,AGN))
#-- merge authors list
bibtex_entry[key.lower()] = ' and '.join(current_authors)
elif (key.lower() == 'doi') and bool(R3.match(val)):
bibtex_entry[key.lower()] = R3.match(val).group(3)
elif (key.lower() == 'pages') and re.match('(.*?)\s\-\s(.*?)$',val):
pages, = re.findall('(.*?)\s\-\s(.*?)$',val)
bibtex_entry[key.lower()] = '{0}--{1}'.format(pages[0],pages[1])
elif (key.lower() == 'keywords'):
bibtex_keywords.append(val)
else:
bibtex_entry[key.lower()] = val
#-- if author fields are initially completely uppercase: change to title()
if bibtex_entry['author'].isupper():
bibtex_entry['author'] = bibtex_entry['author'].title()
#-- extract surname of first author
firstauthor = bibtex_entry['author'].split(',')[0]
author_directory = bibtex_entry['author'].split(',')[0]
#-- decode from utf-8
if sys.version_info[0] == 2:
firstauthor = firstauthor.decode('utf-8')
author_directory = author_directory.decode('utf-8')
bibtex_entry['author'] = bibtex_entry['author'].decode('utf-8')
bibtex_entry['title'] = bibtex_entry['title'].decode('utf-8')
#-- firstauthor: replace unicode characters with plain text
#-- author_directory: replace unicode characters with combined unicode
#-- bibtex entry for authors: replace unicode characters with latex symbols
#-- bibtex entry for titles: replace unicode characters with latex symbols
#-- 1st column: latex, 2nd: combining unicode, 3rd: unicode, 4th: plain text
for LV, CV, UV, PV in language_conversion():
firstauthor = firstauthor.replace(LV, PV).replace(UV, PV)
author_directory = author_directory.replace(LV, CV).replace(UV, CV)
bibtex_entry['author'] = bibtex_entry['author'].replace(UV, LV)
bibtex_entry['title'] = bibtex_entry['title'].replace(UV, LV)
#-- encode as utf-8
firstauthor = firstauthor.encode('utf-8')
#-- remove line skips and series of whitespace from title
bibtex_entry['title'] = re.sub('\s+',' ',bibtex_entry['title'])
#-- remove spaces, dashes and apostrophes from author_directory
author_directory = re.sub('\s','_',author_directory)
author_directory = re.sub('\-|\'','',author_directory)
year_directory, = re.findall('\d+',bibtex_entry['year'])
#-- create list of article keywords if present in bibliography file
if bibtex_keywords:
bibtex_entry['keywords'] = ', '.join(bibtex_keywords)
#-- extract DOI and title for generating universal citekeys
doi = bibtex_entry['doi'] if 'doi' in bibtex_entry.keys() else None
title = bibtex_entry['title'] if 'title' in bibtex_entry.keys() else None
#-- calculate the universal citekey
univ_key = gen_citekey(firstauthor.decode('utf-8'),
bibtex_entry['year'],doi,title)
#-- if printing to file: output bibtex file for author and year
if OUTPUT:
#-- parse universal citekey to generate output filename
authkey,citekey,=re.findall('(\D+)\:(\d+\D+)',univ_key).pop()
bibtex_file = '{0}-{1}.bib'.format(authkey,citekey)
#-- output directory
bibtex_dir = os.path.join(datapath,year_directory,author_directory)
os.makedirs(bibtex_dir) if not os.path.exists(bibtex_dir) else None
#-- create file object for output file
fid = open(os.path.join(bibtex_dir,bibtex_file),'w')
print(' --> {0}'.format(bibtex_file)) if VERBOSE else None
else:
fid = sys.stdout
#-- print the bibtex citation
print('@{0}{{{1},'.format(bibtex_key['entrytype'],univ_key),file=fid)
#-- sort output bibtex files as listed above
field_indices = [bibtex_field_sort[b] for b in bibtex_entry.keys()]
field_tuple = zip(field_indices,bibtex_entry.keys(),bibtex_entry.values())
#-- for each field within the entry
for s,k,v in sorted(field_tuple):
#-- make sure ampersands are in latex format (marked with symbol)
v = re.sub('(?<=\s)\&','\\\&',v) if re.search('(?<=\s)\&',v) else v
#-- do not put the month field in brackets
if (k == 'month'):
print('{0} = {1},'.format(k,v.lower()),file=fid)
else:
print('{0} = {{{1}}},'.format(k,v),file=fid)
print('}', file=fid)
#-- close the output file
if OUTPUT:
fid.close()
#-- main program that calls format_bibtex()
def main():
#-- Read the system arguments listed after the program
parser = argparse.ArgumentParser(
description="""Reformats journal BibTeX files into a standard form with
Universal citekeys
"""
)
#-- command line parameters
parser.add_argument('infile',
type=lambda p: os.path.abspath(os.path.expanduser(p)), nargs='+',
help='BibTeX file to be copied into the reference path')
parser.add_argument('--output','-O',
default=False, action='store_true',
help='Output to bibtex files')
parser.add_argument('--cleanup','-C',
default=False, action='store_true',
help='Remove input BibTeX file after conversion')
parser.add_argument('--verbose','-V',
default=False, action='store_true',
help='Verbose output of input and output files')
args = parser.parse_args()
#-- for each file entered
for FILE in args.infile:
#-- run for the input file
print(os.path.basename(FILE)) if args.verbose else None
with open(FILE,'r') as f:
file_contents = f.read()
try:
format_bibtex(re.sub('(\s+)\n','\n',file_contents),
OUTPUT=args.output, VERBOSE=args.verbose)
except:
pass
else:
#-- remove the input file
os.remove(FILE) if args.cleanup else None
#-- run main program
if __name__ == '__main__':
main()
| [
"language_conversion.language_conversion",
"re.split",
"os.path.exists",
"argparse.ArgumentParser",
"re.compile",
"os.makedirs",
"inspect.currentframe",
"os.path.join",
"re.match",
"os.remove",
"os.path.basename",
"os.path.abspath",
"re.sub",
"re.findall",
"os.path.expanduser",
"re.search"
] | [((2152, 2177), 'os.path.abspath', 'os.path.abspath', (['filename'], {}), '(filename)\n', (2167, 2177), False, 'import os\n'), ((2806, 2850), 're.compile', 're.compile', (['entry_regex'], {'flags': 're.IGNORECASE'}), '(entry_regex, flags=re.IGNORECASE)\n', (2816, 2850), False, 'import re\n'), ((3421, 3465), 're.compile', 're.compile', (['field_regex'], {'flags': 're.IGNORECASE'}), '(field_regex, flags=re.IGNORECASE)\n', (3431, 3465), False, 'import re\n'), ((4103, 4145), 're.compile', 're.compile', (['doi_regex'], {'flags': 're.IGNORECASE'}), '(doi_regex, flags=re.IGNORECASE)\n', (4113, 4145), False, 'import re\n'), ((9109, 9130), 'language_conversion.language_conversion', 'language_conversion', ([], {}), '()\n', (9128, 9130), False, 'from language_conversion import language_conversion\n'), ((9576, 9618), 're.sub', 're.sub', (['"""\\\\s+"""', '""" """', "bibtex_entry['title']"], {}), "('\\\\s+', ' ', bibtex_entry['title'])\n", (9582, 9618), False, 'import re\n'), ((9707, 9743), 're.sub', 're.sub', (['"""\\\\s"""', '"""_"""', 'author_directory'], {}), "('\\\\s', '_', author_directory)\n", (9713, 9743), False, 'import re\n'), ((9764, 9801), 're.sub', 're.sub', (['"""\\\\-|\'"""', '""""""', 'author_directory'], {}), '("\\\\-|\'", \'\', author_directory)\n', (9770, 9801), False, 'import re\n'), ((9822, 9862), 're.findall', 're.findall', (['"""\\\\d+"""', "bibtex_entry['year']"], {}), "('\\\\d+', bibtex_entry['year'])\n", (9832, 9862), False, 'import re\n'), ((12008, 12161), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Reformats journal BibTeX files into a standard form with\n Universal citekeys\n """'}), '(description=\n """Reformats journal BibTeX files into a standard form with\n Universal citekeys\n """\n )\n', (12031, 12161), False, 'import argparse\n'), ((2092, 2114), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (2112, 2114), False, 'import inspect\n'), ((2400, 2438), 'os.path.join', 'os.path.join', (['filepath', '""".referencerc"""'], {}), "(filepath, '.referencerc')\n", (2412, 2438), False, 'import os\n'), ((10699, 10755), 'os.path.join', 'os.path.join', (['datapath', 'year_directory', 'author_directory'], {}), '(datapath, year_directory, author_directory)\n', (10711, 10755), False, 'import os\n'), ((10762, 10785), 'os.makedirs', 'os.makedirs', (['bibtex_dir'], {}), '(bibtex_dir)\n', (10773, 10785), False, 'import os\n'), ((10896, 10933), 'os.path.join', 'os.path.join', (['bibtex_dir', 'bibtex_file'], {}), '(bibtex_dir, bibtex_file)\n', (10908, 10933), False, 'import os\n'), ((11546, 11573), 're.search', 're.search', (['"""(?<=\\\\s)\\\\&"""', 'v'], {}), "('(?<=\\\\s)\\\\&', v)\n", (11555, 11573), False, 'import re\n'), ((11514, 11547), 're.sub', 're.sub', (['"""(?<=\\\\s)\\\\&"""', '"""\\\\\\\\&"""', 'v'], {}), "('(?<=\\\\s)\\\\&', '\\\\\\\\&', v)\n", (11520, 11547), False, 'import re\n'), ((5238, 5281), 're.split', 're.split', (['""" and """', 'val'], {'flags': 're.IGNORECASE'}), "(' and ', val, flags=re.IGNORECASE)\n", (5246, 5281), False, 'import re\n'), ((10544, 10587), 're.findall', 're.findall', (['"""(\\\\D+)\\\\:(\\\\d+\\\\D+)"""', 'univ_key'], {}), "('(\\\\D+)\\\\:(\\\\d+\\\\D+)', univ_key)\n", (10554, 10587), False, 'import re\n'), ((10793, 10819), 'os.path.exists', 'os.path.exists', (['bibtex_dir'], {}), '(bibtex_dir)\n', (10807, 10819), False, 'import os\n'), ((12922, 12944), 'os.path.basename', 'os.path.basename', (['FILE'], {}), '(FILE)\n', (12938, 12944), False, 'import os\n'), ((13082, 13121), 're.sub', 're.sub', (['"""(\\\\s+)\n"""', '"""\n"""', 'file_contents'], {}), "('(\\\\s+)\\n', '\\n', file_contents)\n", (13088, 13121), False, 'import re\n'), ((13275, 13290), 'os.remove', 'os.remove', (['FILE'], {}), '(FILE)\n', (13284, 13290), False, 'import os\n'), ((6132, 6169), 're.match', 're.match', (['"""([A-Z])\\\\.([A-Z])\\\\."""', 'AGN'], {}), "('([A-Z])\\\\.([A-Z])\\\\.', AGN)\n", (6140, 6169), False, 'import re\n'), ((6825, 6868), 're.split', 're.split', (['""" and """', 'val'], {'flags': 're.IGNORECASE'}), "(' and ', val, flags=re.IGNORECASE)\n", (6833, 6868), False, 'import re\n'), ((12271, 12292), 'os.path.expanduser', 'os.path.expanduser', (['p'], {}), '(p)\n', (12289, 12292), False, 'import os\n'), ((5540, 5598), 're.compile', 're.compile', (['compound_surname_regex[j]'], {'flags': 're.IGNORECASE'}), '(compound_surname_regex[j], flags=re.IGNORECASE)\n', (5550, 5598), False, 'import re\n'), ((6267, 6308), 're.match', 're.match', (['"""([A-Za-z]+)\\\\s([A-Z])\\\\."""', 'AGN'], {}), "('([A-Za-z]+)\\\\s([A-Z])\\\\.', AGN)\n", (6275, 6308), False, 'import re\n'), ((6988, 7025), 're.match', 're.match', (['"""([A-Z])\\\\.([A-Z])\\\\."""', 'AGN'], {}), "('([A-Z])\\\\.([A-Z])\\\\.', AGN)\n", (6996, 7025), False, 'import re\n'), ((6410, 6437), 're.match', 're.match', (['"""([A-Z])\\\\."""', 'AGN'], {}), "('([A-Z])\\\\.', AGN)\n", (6418, 6437), False, 'import re\n'), ((7123, 7164), 're.match', 're.match', (['"""([A-Za-z]+)\\\\s([A-Z])\\\\."""', 'AGN'], {}), "('([A-Za-z]+)\\\\s([A-Z])\\\\.', AGN)\n", (7131, 7164), False, 'import re\n'), ((7742, 7779), 're.match', 're.match', (['"""(.*?)\\\\s\\\\-\\\\s(.*?)$"""', 'val'], {}), "('(.*?)\\\\s\\\\-\\\\s(.*?)$', val)\n", (7750, 7779), False, 'import re\n'), ((7798, 7837), 're.findall', 're.findall', (['"""(.*?)\\\\s\\\\-\\\\s(.*?)$"""', 'val'], {}), "('(.*?)\\\\s\\\\-\\\\s(.*?)$', val)\n", (7808, 7837), False, 'import re\n'), ((7266, 7293), 're.match', 're.match', (['"""([A-Z])\\\\."""', 'AGN'], {}), "('([A-Z])\\\\.', AGN)\n", (7274, 7293), False, 'import re\n'), ((6202, 6241), 're.findall', 're.findall', (['"""([A-Z])\\\\.([A-Z])\\\\."""', 'AGN'], {}), "('([A-Z])\\\\.([A-Z])\\\\.', AGN)\n", (6212, 6241), False, 'import re\n'), ((6471, 6500), 're.findall', 're.findall', (['"""([A-Z])\\\\."""', 'AGN'], {}), "('([A-Z])\\\\.', AGN)\n", (6481, 6500), False, 'import re\n'), ((6341, 6384), 're.findall', 're.findall', (['"""([A-Za-z]+)\\\\s([A-Z])\\\\."""', 'AGN'], {}), "('([A-Za-z]+)\\\\s([A-Z])\\\\.', AGN)\n", (6351, 6384), False, 'import re\n'), ((7058, 7097), 're.findall', 're.findall', (['"""([A-Z])\\\\.([A-Z])\\\\."""', 'AGN'], {}), "('([A-Z])\\\\.([A-Z])\\\\.', AGN)\n", (7068, 7097), False, 'import re\n'), ((7327, 7356), 're.findall', 're.findall', (['"""([A-Z])\\\\."""', 'AGN'], {}), "('([A-Z])\\\\.', AGN)\n", (7337, 7356), False, 'import re\n'), ((7197, 7240), 're.findall', 're.findall', (['"""([A-Za-z]+)\\\\s([A-Z])\\\\."""', 'AGN'], {}), "('([A-Za-z]+)\\\\s([A-Z])\\\\.', AGN)\n", (7207, 7240), False, 'import re\n')] |
from os import path
import subprocess
import muon
import numpy as np
## VIASH START
meta = {
'functionality_name': 'foo',
'resources_dir': 'resources_test/'
}
## VIASH END
print("> Reading input file")
input_path = f"{meta['resources_dir']}/pbmc_1k_protein_v3/pbmc_1k_protein_v3_filtered_feature_bc_matrix.h5mu"
mu_in = muon.read_h5mu(input_path)
orig_obs = mu_in.mod['rna'].n_obs
orig_vars = mu_in.mod['rna'].n_vars
orig_prot_obs = mu_in.mod['prot'].n_obs
orig_prot_vars = mu_in.mod['prot'].n_vars
ad_rna = mu_in.mod['rna']
print(f" input: {ad_rna}")
ad_rna.obs["filter_none"] = np.repeat(True, ad_rna.n_obs)
ad_rna.obs["filter_with_random"] = np.random.choice(a=[False, True], size=ad_rna.n_obs)
ad_rna.var["filter_with_random"] = np.random.choice(a=[False, True], size=ad_rna.n_vars)
mu_in.write_h5mu("input_with_extra_columns.h5mu")
# TEST 1: filtering a little bit
print("> Check filtering a little bit")
out = subprocess.check_output([
f"./{meta['functionality_name']}",
"--input", "input_with_extra_columns.h5mu",
"--output", "output-1.h5mu",
"--obs_filter", "filter_none:filter_with_random",
"--var_filter", "filter_with_random"
]).decode("utf-8")
assert path.exists("output-1.h5mu"), "Output file not found"
mu_out = muon.read_h5mu("output-1.h5mu")
print(f" output1: {mu_out.mod['rna']}")
new_obs = mu_out.mod['rna'].n_obs
new_vars = mu_out.mod['rna'].n_vars
assert new_obs < orig_obs, "Some RNA obs should have been filtered"
assert new_vars < orig_vars, "Some RNA vars should have been filtered"
# TEST 2: filtering nothing
print("> Check filtering a little bit")
out = subprocess.check_output([
f"./{meta['functionality_name']}",
"--input", "input_with_extra_columns.h5mu",
"--output", "output-2.h5mu",
"--obs_filter", "filter_none"
]).decode("utf-8")
assert path.exists("output-2.h5mu"), "Output file not found"
mu_out = muon.read_h5mu("output-2.h5mu")
print(f" output2: {mu_out.mod['rna']}")
new_obs = mu_out.mod['rna'].n_obs
new_vars = mu_out.mod['rna'].n_vars
assert new_obs == orig_obs, "No RNA obs should have been filtered"
assert new_vars == orig_vars, "No RNA vars should have been filtered"
# test succeeded
print("> All tests succeeded!") | [
"subprocess.check_output",
"os.path.exists",
"numpy.repeat",
"numpy.random.choice",
"muon.read_h5mu"
] | [((331, 357), 'muon.read_h5mu', 'muon.read_h5mu', (['input_path'], {}), '(input_path)\n', (345, 357), False, 'import muon\n'), ((593, 622), 'numpy.repeat', 'np.repeat', (['(True)', 'ad_rna.n_obs'], {}), '(True, ad_rna.n_obs)\n', (602, 622), True, 'import numpy as np\n'), ((658, 710), 'numpy.random.choice', 'np.random.choice', ([], {'a': '[False, True]', 'size': 'ad_rna.n_obs'}), '(a=[False, True], size=ad_rna.n_obs)\n', (674, 710), True, 'import numpy as np\n'), ((746, 799), 'numpy.random.choice', 'np.random.choice', ([], {'a': '[False, True]', 'size': 'ad_rna.n_vars'}), '(a=[False, True], size=ad_rna.n_vars)\n', (762, 799), True, 'import numpy as np\n'), ((1221, 1249), 'os.path.exists', 'path.exists', (['"""output-1.h5mu"""'], {}), "('output-1.h5mu')\n", (1232, 1249), False, 'from os import path\n'), ((1284, 1315), 'muon.read_h5mu', 'muon.read_h5mu', (['"""output-1.h5mu"""'], {}), "('output-1.h5mu')\n", (1298, 1315), False, 'import muon\n'), ((1868, 1896), 'os.path.exists', 'path.exists', (['"""output-2.h5mu"""'], {}), "('output-2.h5mu')\n", (1879, 1896), False, 'from os import path\n'), ((1931, 1962), 'muon.read_h5mu', 'muon.read_h5mu', (['"""output-2.h5mu"""'], {}), "('output-2.h5mu')\n", (1945, 1962), False, 'import muon\n'), ((931, 1164), 'subprocess.check_output', 'subprocess.check_output', (['[f"./{meta[\'functionality_name\']}", \'--input\',\n \'input_with_extra_columns.h5mu\', \'--output\', \'output-1.h5mu\',\n \'--obs_filter\', \'filter_none:filter_with_random\', \'--var_filter\',\n \'filter_with_random\']'], {}), '([f"./{meta[\'functionality_name\']}", \'--input\',\n \'input_with_extra_columns.h5mu\', \'--output\', \'output-1.h5mu\',\n \'--obs_filter\', \'filter_none:filter_with_random\', \'--var_filter\',\n \'filter_with_random\'])\n', (954, 1164), False, 'import subprocess\n'), ((1643, 1815), 'subprocess.check_output', 'subprocess.check_output', (['[f"./{meta[\'functionality_name\']}", \'--input\',\n \'input_with_extra_columns.h5mu\', \'--output\', \'output-2.h5mu\',\n \'--obs_filter\', \'filter_none\']'], {}), '([f"./{meta[\'functionality_name\']}", \'--input\',\n \'input_with_extra_columns.h5mu\', \'--output\', \'output-2.h5mu\',\n \'--obs_filter\', \'filter_none\'])\n', (1666, 1815), False, 'import subprocess\n')] |
"""
UglifyJS is a JavaScript compressor/minifier written in JavaScript. It also
contains tools that allow one to automate working with Javascript code.
Options:
* source_map : str, None
: Specify an output file where to generate source map
* source_map_root : str, None
: The path to the original source to be included in the
: source map
* source_map_url : str, None
: The path to the source map to be added in
: //# sourceMappingURL.
: Defaults to the value passed with --source-map
* source_map_include_sources : bool, None
: Pass this flag if you want to include the
: content of source files in the source map as
: sourcesContent property
* in_source_map : str, None
: Input source map, useful if you're compressing JS
: that was generated from some other original code
* screw_ie8 : bool, None
: Pass this flag if you don't care about full compliance with
: Internet Explorer 6-8 quirks (by default UglifyJS will try to
: be IE-proof)
* expr : bool, None
: Parse a single expression, rather than a program (for parsing JSON)
* prefix : str, None
: Skip prefix for original filenames that appear in source maps.
: For example --prefix 3 will drop 3 directories from file names
: and ensure they are relative paths. You can also specify --prefix
: relative, which will make UglifyJS figure out itself the relative
: paths between original sources, the source map and the output
: file
* beautify : str, None
: Beautify output/specify output options
* mangle : list, None
: Mangle names/pass mangler options
* reserved : list, None
: Reserved names to exclude from mangling
* compress : str, None
: Enable compressor/pass compressor options. Pass options like
: --compress hoist_vars=false,if_return=false.
: Use --compress with no argument to use the default compression
: options
* define : str, None
: Global definitions
* enclose : str, None
: Embed everything in a big function, with a configurable
: parameter/argument list
* comments : str, None
: Preserve copyright comments in the output.
: By default this works like Google Closure, keeping JSDoc-style
: comments that contain "@license" or "@preserve". You can
: optionally pass one of the following arguments to this flag:
: - "all" to keep all comments
: - a valid JS regexp (needs to start with a slash) to keep
: only comments that match.
: Note that currently not *all* comments can be kept when
: compression is on, because of dead code removal or cascading
: statements into sequences
* preamble : str, None
: Preamble to prepend to the output. You can use this to insert
: a comment, for example for licensing information. This will
: not be parsed, but the source map will adjust for its presence
* stats : bool, None
: Display operations run time on STDERR
* acorn : bool, None
: Use Acorn for parsing
* spidermonkey : bool, None
: Assume input files are SpiderMonkey AST format (as JSON)
* self : bool, None
: Build itself (UglifyJS2) as a library (implies --wrap=UglifyJS
: --export-all)
* wrap : str, None
: Embed everything in a big function, making the "exports" and
: "global" variables available. You need to pass an argument to this
: option to specify the name that your model will take when included
: in, say, a browser
* export_all : bool, None
: Only used when --wrap, this tells UglifyJS to add code to
: automatically export all globals
* lint : bool, None
: Display some scope warnings
* verbose : bool, None
: Verbose
* noerr : bool, None
: Don't throw an error for unknown options in --compress,
: --beautify, --mangle
Requirements:
* node.js
* uglify-js
to install, run `npm install --save-dev uglify-js`
"""
import os
from shutil import copyfile, Error
from pybuildtool import BaseTask
tool_name = __name__
class Task(BaseTask):
name = tool_name
conf = {
'_replace_patterns_': ((r'\.js$', '.min.js'),),
}
def prepare(self):
cfg = self.conf
args = self.args
self.add_bool_args('source_map_include_sources', 'screw_ie8', 'expr',
'stats', 'acorn', 'spidermonkey', 'self', 'export_all', 'lint',
'verbose', 'noerr')
self.add_path_args('source_map_root', 'in_source_map')
self.add_str_args('source_map', 'source_map_url', 'prefix', 'beautify',
'reserved', 'define', 'enclose', 'preamble', 'wrap')
if cfg.get('in_source_map', None):
args.append("--in-source-map='%s'" % cfg['in_source_map'])
for config in ('mangle', 'compress', 'comments'):
if not config in cfg:
continue
c = cfg[config]
if c:
args.append("--%s='%s'" % (config, c))
else:
args.append('--' + config)
def perform(self):
if len(self.file_in) != 1:
self.bld.fatal('%s only need one input' % tool_name.capitalize())
if len(self.file_out) != 1:
self.bld.fatal('%s only have one output' % tool_name.capitalize())
if not self.is_production():
try:
copyfile(self.file_in[0], self.file_out[0])
return 0
except (IOError, Error):
self.bld.fatal('cannot copy file to ' + self.file_out[0])
return -1
executable = self.env['%s_BIN' % tool_name.upper()]
return self.exec_command(
'{exe} {arg} {in_} -o {out}'.format(
exe=executable,
arg=' '.join(self.args),
in_=self.file_in[0],
out=self.file_out[0],
))
def configure(conf):
bin_path = 'node_modules/uglify-js/bin/uglifyjs'
conf.start_msg("Checking for program '%s'" % tool_name)
if os.path.exists(bin_path):
bin_path = os.path.realpath(bin_path)
conf.end_msg(bin_path)
else:
conf.end_msg('not found', color='YELLOW')
bin_path = conf.find_program('uglifyjs')[0]
conf.env['%s_BIN' % tool_name.upper()] = bin_path
| [
"os.path.realpath",
"os.path.exists",
"shutil.copyfile"
] | [((6736, 6760), 'os.path.exists', 'os.path.exists', (['bin_path'], {}), '(bin_path)\n', (6750, 6760), False, 'import os\n'), ((6781, 6807), 'os.path.realpath', 'os.path.realpath', (['bin_path'], {}), '(bin_path)\n', (6797, 6807), False, 'import os\n'), ((6104, 6147), 'shutil.copyfile', 'copyfile', (['self.file_in[0]', 'self.file_out[0]'], {}), '(self.file_in[0], self.file_out[0])\n', (6112, 6147), False, 'from shutil import copyfile, Error\n')] |
#file -- var.py --
import os
from html.parser import HTMLParser
from helpers import getTrophyCount
LOCKED, UNLOCKED = 0x0, 0x1 # Values for locked and unlocked status of trophy
SYNCED, NOTSYNCED = 0x20, 0x00 # Values for if a trophy was synced online or not
PL, GO, SI, BR = 0x1, 0x2, 0x3, 0x4 # Values for trophy types (Platinum, Gold, Silver, Bronze)
NUMOFTROPHY = 0xFF # Number of trophies, found in header of trptitle.dat
GROUPSIZE = 0x70 # Size of a trophy group block
TRPBLOCK1 = 0x70 # Size of TRPTITLE trophy data block1
TRPBLOCK2 = 0x60 # Size of TRPTITLE trophy data block2
TRANBLOCK = 0xB0 # Size of TRPTRANS trophy block
TROPHYID = 0x13 # 0 for platinum
TROPHYSTATE = 0x17 # 0 for lock, 1 for unlock
TROPHYSSYNC = 0x1A # 0x00 for unsynced and 0x20 for synced
TROPHYDATE1 = slice(0x20, 0x28) # Date trophy was unlocked
TROPHYDATE2 = slice(0x28, 0x30) # Date trophy was synced to PSN
EMPTYDATE = bytes(8) # Empty date value
PATH = 'files' # Default folder for support files
TROP = os.path.join(PATH, 'TROP.SFM')
TITLE = os.path.join(PATH, 'TRPTITLE.DAT')
TRANS = os.path.join(PATH, 'TRPTRANS.DAT')
def init_globals():
global processedCount, date_str, timestamp, trophy_type, maxTrophies
processedCount = 0 # Global variable to store number of trophies altered
date_str = '' # Global variable to store date string
timestamp = bytearray(8) # Global variable to store timestamp
trophy_type = 0 # Global variable to store trophy type
maxTrophies = getTrophyCount() # Global variable to store number of trophies
# Creates variable to store trophy data from TROP.SFM
def init_trophyData():
global trophyData
trophyData = {'id': [], 'type': [], 'name': [], 'desc': []}
# Treats TROP.SFM like HTML and parses the tags, attributes and data
class MyHTMLParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.nameTag = False
self.detailTag = False
#HTML Parser Methods
def handle_starttag(self, tag, attrs):
if tag not in ('trophy', 'name', 'detail'):
return
# If trophy tag is found, gather ID and Type
if tag == 'trophy':
for name, value in attrs:
if name == 'id':
trophyData['id'].append(int(value))
if name == 'ttype':
trophyData['type'].append(value)
# If name tag is found, set variable true so handle_data will gather name
if tag == 'name':
self.nameTag = True
# If detail tag is found, set variable true so handle_data will gather detail
if tag == 'detail':
self.detailTag = True
def handle_data(self, data):
# If name variable is true, gather name for this trophy
if self.nameTag == True:
trophyData['name'].append(data)
self.nameTag = False
# If detail variable is true, gather detail for this trophy
if self.detailTag == True:
trophyData['desc'].append(data)
self.detailTag = False
| [
"html.parser.HTMLParser.__init__",
"os.path.join",
"helpers.getTrophyCount"
] | [((1213, 1243), 'os.path.join', 'os.path.join', (['PATH', '"""TROP.SFM"""'], {}), "(PATH, 'TROP.SFM')\n", (1225, 1243), False, 'import os\n'), ((1252, 1286), 'os.path.join', 'os.path.join', (['PATH', '"""TRPTITLE.DAT"""'], {}), "(PATH, 'TRPTITLE.DAT')\n", (1264, 1286), False, 'import os\n'), ((1295, 1329), 'os.path.join', 'os.path.join', (['PATH', '"""TRPTRANS.DAT"""'], {}), "(PATH, 'TRPTRANS.DAT')\n", (1307, 1329), False, 'import os\n'), ((1775, 1791), 'helpers.getTrophyCount', 'getTrophyCount', ([], {}), '()\n', (1789, 1791), False, 'from helpers import getTrophyCount\n'), ((2142, 2167), 'html.parser.HTMLParser.__init__', 'HTMLParser.__init__', (['self'], {}), '(self)\n', (2161, 2167), False, 'from html.parser import HTMLParser\n')] |
from discord.ext import commands
class FunCog(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name='party', help="party hard")
async def sun_command(self, ctx):
await ctx.send("https://tenor.com/view/party-hard-harrypotter-alan-rickman-dumbledore-gif-4934551")
@commands.command(name='blau', help="Blau wie das Meer")
async def blau(self, ctx):
await ctx.send("https://www.youtube.com/watch?v=TgDXGQYib8k")
@commands.command(name='trap', help="Ad<NAME> sagt:")
async def trap(self, ctx):
await ctx.send("https://giphy.com/gifs/Z1LYiyIPhnG9O")
@commands.command(name='darkside', help="Dark site of the force")
async def darkside(self, ctx):
await ctx.send("https://gph.is/VxbsSv")
@commands.command(name='hootsforce', help="a music video with submarines fighting an evil wizard... in space")
async def space_metal(self, ctx):
await ctx.send("https://gph.is/VxbsSv")
@commands.command(name='headache', help="...")
async def star_trek_headache(self, ctx):
await ctx.send("https://giphy.com/gifs/6OWIl75ibpuFO")
@commands.command(name='facepalm', help=":person_facepalming: ")
async def star_trek_facepalm(self, ctx):
await ctx.send("https://gph.is/2nBvKOZ")
@commands.command(name='pika', help="pikaCHUUUUUUUUUU")
async def star_trek_facepalm(self, ctx):
await ctx.send("https://www.youtube.com/watch?v=AjSuZgNgMZc\nMusic are like knights. they must be coverd in metal")
def setup(bot):
bot.add_cog(FunCog(bot))
| [
"discord.ext.commands.command"
] | [((121, 170), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""party"""', 'help': '"""party hard"""'}), "(name='party', help='party hard')\n", (137, 170), False, 'from discord.ext import commands\n'), ((323, 378), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""blau"""', 'help': '"""Blau wie das Meer"""'}), "(name='blau', help='Blau wie das Meer')\n", (339, 378), False, 'from discord.ext import commands\n'), ((486, 538), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""trap"""', 'help': '"""Ad<NAME> sagt:"""'}), "(name='trap', help='Ad<NAME> sagt:')\n", (502, 538), False, 'from discord.ext import commands\n'), ((639, 703), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""darkside"""', 'help': '"""Dark site of the force"""'}), "(name='darkside', help='Dark site of the force')\n", (655, 703), False, 'from discord.ext import commands\n'), ((793, 907), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""hootsforce"""', 'help': '"""a music video with submarines fighting an evil wizard... in space"""'}), "(name='hootsforce', help=\n 'a music video with submarines fighting an evil wizard... in space')\n", (809, 907), False, 'from discord.ext import commands\n'), ((995, 1040), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""headache"""', 'help': '"""..."""'}), "(name='headache', help='...')\n", (1011, 1040), False, 'from discord.ext import commands\n'), ((1155, 1218), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""facepalm"""', 'help': '""":person_facepalming: """'}), "(name='facepalm', help=':person_facepalming: ')\n", (1171, 1218), False, 'from discord.ext import commands\n'), ((1319, 1373), 'discord.ext.commands.command', 'commands.command', ([], {'name': '"""pika"""', 'help': '"""pikaCHUUUUUUUUUU"""'}), "(name='pika', help='pikaCHUUUUUUUUUU')\n", (1335, 1373), False, 'from discord.ext import commands\n')] |
from django.urls import path, re_path
from . import views
app_name = 'blog'
urlpatterns = [
path('', views.index, name='index'),
path('category/<int:id>/', views.categories, name='categories'),
# path('details/<int:id>/<slug:slug>/', views.details, name='details'),
# https://stackoverflow.com/questions/55175353/django-slug-url-in-perisan-404
re_path('details/(?P<id>[0-9]+)/(?P<slug>[\\w-]+)/', views.details, name='details'),
path('add_article/', views.add_article, name='add_article'),
path('edit_article/<int:article_id>', views.edit_article, name='edit_article'),
path('delete_article/<int:article_id>', views.delete_article, name='delete_article'),
path('like/<int:article_id>/', views.like_article, name='like_article'),
path('category_list/', views.CategoryList.as_view(), name='CategoryList'),
] | [
"django.urls.re_path",
"django.urls.path"
] | [((97, 132), 'django.urls.path', 'path', (['""""""', 'views.index'], {'name': '"""index"""'}), "('', views.index, name='index')\n", (101, 132), False, 'from django.urls import path, re_path\n'), ((138, 201), 'django.urls.path', 'path', (['"""category/<int:id>/"""', 'views.categories'], {'name': '"""categories"""'}), "('category/<int:id>/', views.categories, name='categories')\n", (142, 201), False, 'from django.urls import path, re_path\n'), ((365, 453), 'django.urls.re_path', 're_path', (['"""details/(?P<id>[0-9]+)/(?P<slug>[\\\\w-]+)/"""', 'views.details'], {'name': '"""details"""'}), "('details/(?P<id>[0-9]+)/(?P<slug>[\\\\w-]+)/', views.details, name=\n 'details')\n", (372, 453), False, 'from django.urls import path, re_path\n'), ((454, 513), 'django.urls.path', 'path', (['"""add_article/"""', 'views.add_article'], {'name': '"""add_article"""'}), "('add_article/', views.add_article, name='add_article')\n", (458, 513), False, 'from django.urls import path, re_path\n'), ((519, 597), 'django.urls.path', 'path', (['"""edit_article/<int:article_id>"""', 'views.edit_article'], {'name': '"""edit_article"""'}), "('edit_article/<int:article_id>', views.edit_article, name='edit_article')\n", (523, 597), False, 'from django.urls import path, re_path\n'), ((603, 692), 'django.urls.path', 'path', (['"""delete_article/<int:article_id>"""', 'views.delete_article'], {'name': '"""delete_article"""'}), "('delete_article/<int:article_id>', views.delete_article, name=\n 'delete_article')\n", (607, 692), False, 'from django.urls import path, re_path\n'), ((693, 764), 'django.urls.path', 'path', (['"""like/<int:article_id>/"""', 'views.like_article'], {'name': '"""like_article"""'}), "('like/<int:article_id>/', views.like_article, name='like_article')\n", (697, 764), False, 'from django.urls import path, re_path\n')] |
from django import test
from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException
from django.test import TestCase
from .utils import TestCaseWithUtils
# Create your tests here.
class IngredientTests(TestCase):
def test_same_name_equals(self):
ing = Ingredient("a")
ing2 = Ingredient("a")
self.assertEqual(ing,ing2, "Ingredients with the same name are not equal")
def test_differnet_name_not_equals(self):
ing = Ingredient("a")
ing2 = Ingredient("b")
self.assertNotEqual(ing,ing2, "Ingredients with different name are equal")
def test_str(self):
self.assertEqual(
"a",
str(Ingredient("a")),
"Ingredient did not convert to string properly"
)
class MeasurementUnitTests(TestCaseWithUtils):
def test_invalid_conversion_invalid_type(self):
self.assertConversionRaisesException(MeasurementUnit("testA","t",1,UnitType.MASS,True),1,MeasurementUnit("testB","t",1,UnitType.QUANTITY,True),InvalidConversionException)
def test_invalid_conversion_different_base(self):
self.assertConversionRaisesException(MeasurementUnit("testA","t",1,UnitType.MASS,True),1,MeasurementUnit("testB","t",1,UnitType.MASS,True),InvalidConversionException)
def test_invalid_conversion_invalid_target_unit(self):
self.assertConversionRaisesException(MeasurementUnit("testA","t",1,UnitType.MASS),1,MeasurementUnit("testB","t",1," "),InvalidConversionException)
def test_equal_quantity_unit(self):
self.assertEqual(MeasurementUnit("testA","t",1,UnitType.QUANTITY),MeasurementUnit("testA","t",1,UnitType.QUANTITY))
def test_equal_mass_unit(self):
self.assertEqual(MeasurementUnit("testA","t",1,UnitType.MASS),MeasurementUnit("testA","t",1,UnitType.MASS))
def test_invalid_equal_volume_unit(self):
self.assertEqual(MeasurementUnit("testA","t",1,UnitType.VOLUME),MeasurementUnit("testA","t",1,UnitType.VOLUME))
def test_unequal_volume_quantity_unit(self):
self.assertNotEqual(MeasurementUnit("testA","t",1,UnitType.VOLUME),MeasurementUnit("testA","t",1,UnitType.QUANTITY))
def test_convert_1_unit_to_1_unit(self):
self.assertConvertsTo(MeasurementUnit("test","t", None, UnitType.QUANTITY), 1, MeasurementUnit("test2","t2", None, UnitType.QUANTITY), 1, places=3)
def test_convert_kg_to_m3_water(self):
self.assertConvertsTo(MeasurementUnit("test","t", 1, UnitType.MASS), 1, MeasurementUnit("test2","t2", 1, UnitType.VOLUME), 1e-3, density=997, places=3)
def test_convert_kg_to_l_water(self):
self.assertConvertsTo(MeasurementUnit("test","t", 1, UnitType.MASS), 1, MeasurementUnit("test2","t2", 1e-3, UnitType.VOLUME), 1, density=997, places=2)
def test_convert_m3_to_kg_water(self):
self.assertConvertsTo(MeasurementUnit("test","t", 1, UnitType.VOLUME), 1, MeasurementUnit("test2","t2", 1, UnitType.MASS), 997, density=997, places=3)
def test_create_base_unit_with_no_type_raises(self):
self.assertRaises(ValueError, lambda: MeasurementUnit('a','', 1, None))
def test_to_base_unit(self):
unit = MeasurementUnit("test","t", 1e-1, UnitType.MASS)
self.assertAlmostEqual(1e-1,unit.get_base_unit_amount(1))
def test_convert_g_to_kg(self):
self.assertConvertsTo(MeasurementUnit("test","t", 1e-3, UnitType.MASS),1,MeasurementUnit("test2","t2", 1, UnitType.MASS),1e-3)
def test_convert_g_to_g(self):
self.assertConvertsTo(MeasurementUnit("test","t", 1e-3, UnitType.MASS),1,MeasurementUnit("test2","t2", 1e-3, UnitType.MASS),1)
def test_convert_mg_to_g(self):
self.assertConvertsTo(MeasurementUnit("test","t", 1e-6, UnitType.MASS),1,MeasurementUnit("test2","t2", 1e-3, UnitType.MASS),1e-3)
def test_convert_g_to_mg(self):
self.assertConvertsTo(MeasurementUnit("test","t", 1e-3, UnitType.MASS),1,MeasurementUnit("test2","t2", 1e-6, UnitType.MASS),1e3)
def test_to_base_unit(self):
unit = MeasurementUnit("test","t", 1e-1, UnitType.VOLUME)
self.assertAlmostEqual(1e-1,unit.get_base_unit_amount(1))
def test_convert_milm3_to_m3(self):
self.assertConvertsTo(MeasurementUnit("test","t", 1e-3, UnitType.VOLUME),1,MeasurementUnit("test2","t2", 1, UnitType.VOLUME),1e-3)
def test_convert_milm3_to_milm3(self):
self.assertConvertsTo(MeasurementUnit("test","t", 1e-3, UnitType.VOLUME),1,MeasurementUnit("test2","t2", 1e-3, UnitType.VOLUME),1)
def test_convert_microm3_to_milm3(self):
self.assertConvertsTo(MeasurementUnit("test","t", 1e-6, UnitType.VOLUME),1,MeasurementUnit("test2","t2", 1e-3, UnitType.VOLUME),1e-3)
def test_convert_milm3_to_microm3(self):
self.assertConvertsTo(MeasurementUnit("test","t", 1e-3, UnitType.VOLUME),1,MeasurementUnit("test2","t2", 1e-6, UnitType.VOLUME),1e3)
| [
"dj_ingredient_field.MeasurementUnit",
"dj_ingredient_field.Ingredient"
] | [((304, 319), 'dj_ingredient_field.Ingredient', 'Ingredient', (['"""a"""'], {}), "('a')\n", (314, 319), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((335, 350), 'dj_ingredient_field.Ingredient', 'Ingredient', (['"""a"""'], {}), "('a')\n", (345, 350), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((496, 511), 'dj_ingredient_field.Ingredient', 'Ingredient', (['"""a"""'], {}), "('a')\n", (506, 511), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((527, 542), 'dj_ingredient_field.Ingredient', 'Ingredient', (['"""b"""'], {}), "('b')\n", (537, 542), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((3232, 3280), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(0.1)', 'UnitType.MASS'], {}), "('test', 't', 0.1, UnitType.MASS)\n", (3247, 3280), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((4088, 4138), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(0.1)', 'UnitType.VOLUME'], {}), "('test', 't', 0.1, UnitType.VOLUME)\n", (4103, 4138), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((953, 1006), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testA"""', '"""t"""', '(1)', 'UnitType.MASS', '(True)'], {}), "('testA', 't', 1, UnitType.MASS, True)\n", (968, 1006), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((1005, 1062), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testB"""', '"""t"""', '(1)', 'UnitType.QUANTITY', '(True)'], {}), "('testB', 't', 1, UnitType.QUANTITY, True)\n", (1020, 1062), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((1191, 1244), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testA"""', '"""t"""', '(1)', 'UnitType.MASS', '(True)'], {}), "('testA', 't', 1, UnitType.MASS, True)\n", (1206, 1244), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((1243, 1296), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testB"""', '"""t"""', '(1)', 'UnitType.MASS', '(True)'], {}), "('testB', 't', 1, UnitType.MASS, True)\n", (1258, 1296), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((1430, 1477), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testA"""', '"""t"""', '(1)', 'UnitType.MASS'], {}), "('testA', 't', 1, UnitType.MASS)\n", (1445, 1477), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((1477, 1514), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testB"""', '"""t"""', '(1)', '""" """'], {}), "('testB', 't', 1, ' ')\n", (1492, 1514), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((1610, 1661), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testA"""', '"""t"""', '(1)', 'UnitType.QUANTITY'], {}), "('testA', 't', 1, UnitType.QUANTITY)\n", (1625, 1661), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((1659, 1710), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testA"""', '"""t"""', '(1)', 'UnitType.QUANTITY'], {}), "('testA', 't', 1, UnitType.QUANTITY)\n", (1674, 1710), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((1775, 1822), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testA"""', '"""t"""', '(1)', 'UnitType.MASS'], {}), "('testA', 't', 1, UnitType.MASS)\n", (1790, 1822), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((1820, 1867), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testA"""', '"""t"""', '(1)', 'UnitType.MASS'], {}), "('testA', 't', 1, UnitType.MASS)\n", (1835, 1867), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((1942, 1991), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testA"""', '"""t"""', '(1)', 'UnitType.VOLUME'], {}), "('testA', 't', 1, UnitType.VOLUME)\n", (1957, 1991), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((1989, 2038), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testA"""', '"""t"""', '(1)', 'UnitType.VOLUME'], {}), "('testA', 't', 1, UnitType.VOLUME)\n", (2004, 2038), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((2119, 2168), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testA"""', '"""t"""', '(1)', 'UnitType.VOLUME'], {}), "('testA', 't', 1, UnitType.VOLUME)\n", (2134, 2168), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((2166, 2217), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""testA"""', '"""t"""', '(1)', 'UnitType.QUANTITY'], {}), "('testA', 't', 1, UnitType.QUANTITY)\n", (2181, 2217), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((2297, 2350), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', 'None', 'UnitType.QUANTITY'], {}), "('test', 't', None, UnitType.QUANTITY)\n", (2312, 2350), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((2354, 2409), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', 'None', 'UnitType.QUANTITY'], {}), "('test2', 't2', None, UnitType.QUANTITY)\n", (2369, 2409), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((2501, 2547), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(1)', 'UnitType.MASS'], {}), "('test', 't', 1, UnitType.MASS)\n", (2516, 2547), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((2551, 2601), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', '(1)', 'UnitType.VOLUME'], {}), "('test2', 't2', 1, UnitType.VOLUME)\n", (2566, 2601), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((2708, 2754), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(1)', 'UnitType.MASS'], {}), "('test', 't', 1, UnitType.MASS)\n", (2723, 2754), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((2758, 2812), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', '(0.001)', 'UnitType.VOLUME'], {}), "('test2', 't2', 0.001, UnitType.VOLUME)\n", (2773, 2812), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((2912, 2960), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(1)', 'UnitType.VOLUME'], {}), "('test', 't', 1, UnitType.VOLUME)\n", (2927, 2960), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((2964, 3012), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', '(1)', 'UnitType.MASS'], {}), "('test2', 't2', 1, UnitType.MASS)\n", (2979, 3012), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((3414, 3464), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(0.001)', 'UnitType.MASS'], {}), "('test', 't', 0.001, UnitType.MASS)\n", (3429, 3464), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((3465, 3513), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', '(1)', 'UnitType.MASS'], {}), "('test2', 't2', 1, UnitType.MASS)\n", (3480, 3513), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((3585, 3635), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(0.001)', 'UnitType.MASS'], {}), "('test', 't', 0.001, UnitType.MASS)\n", (3600, 3635), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((3636, 3688), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', '(0.001)', 'UnitType.MASS'], {}), "('test2', 't2', 0.001, UnitType.MASS)\n", (3651, 3688), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((3757, 3807), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(1e-06)', 'UnitType.MASS'], {}), "('test', 't', 1e-06, UnitType.MASS)\n", (3772, 3807), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((3808, 3860), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', '(0.001)', 'UnitType.MASS'], {}), "('test2', 't2', 0.001, UnitType.MASS)\n", (3823, 3860), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((3932, 3982), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(0.001)', 'UnitType.MASS'], {}), "('test', 't', 0.001, UnitType.MASS)\n", (3947, 3982), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((3983, 4035), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', '(1e-06)', 'UnitType.MASS'], {}), "('test2', 't2', 1e-06, UnitType.MASS)\n", (3998, 4035), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((4276, 4328), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(0.001)', 'UnitType.VOLUME'], {}), "('test', 't', 0.001, UnitType.VOLUME)\n", (4291, 4328), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((4329, 4379), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', '(1)', 'UnitType.VOLUME'], {}), "('test2', 't2', 1, UnitType.VOLUME)\n", (4344, 4379), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((4459, 4511), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(0.001)', 'UnitType.VOLUME'], {}), "('test', 't', 0.001, UnitType.VOLUME)\n", (4474, 4511), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((4512, 4566), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', '(0.001)', 'UnitType.VOLUME'], {}), "('test2', 't2', 0.001, UnitType.VOLUME)\n", (4527, 4566), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((4644, 4696), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(1e-06)', 'UnitType.VOLUME'], {}), "('test', 't', 1e-06, UnitType.VOLUME)\n", (4659, 4696), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((4697, 4751), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', '(0.001)', 'UnitType.VOLUME'], {}), "('test2', 't2', 0.001, UnitType.VOLUME)\n", (4712, 4751), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((4832, 4884), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test"""', '"""t"""', '(0.001)', 'UnitType.VOLUME'], {}), "('test', 't', 0.001, UnitType.VOLUME)\n", (4847, 4884), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((4885, 4939), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""test2"""', '"""t2"""', '(1e-06)', 'UnitType.VOLUME'], {}), "('test2', 't2', 1e-06, UnitType.VOLUME)\n", (4900, 4939), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((715, 730), 'dj_ingredient_field.Ingredient', 'Ingredient', (['"""a"""'], {}), "('a')\n", (725, 730), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n'), ((3145, 3178), 'dj_ingredient_field.MeasurementUnit', 'MeasurementUnit', (['"""a"""', '""""""', '(1)', 'None'], {}), "('a', '', 1, None)\n", (3160, 3178), False, 'from dj_ingredient_field import Ingredient, MeasurementUnit, UnitType, InvalidConversionException\n')] |
from rest_framework.metadata import SimpleMetadata
from rest_framework.schemas.openapi import AutoSchema
class APIMetadata(SimpleMetadata):
"""Extended metadata generator."""
def get_field_info(self, field):
field_info = super().get_field_info(field)
# Add extra validators using the OpenAPI schema generator
validators = {}
AutoSchema()._map_field_validators(field, validators)
extra_validators = ['format', 'pattern']
for validator in extra_validators:
if validators.get(validator, None):
field_info[validator] = validators[validator]
# Add additional data from serializer
field_info['initial'] = field.initial
field_info['field_name'] = field.field_name
field_info['write_only'] = field.write_only
return field_info
# >> > import json
# >> > from your_app.serializers import UserSerializer
# >> > metadata_generator = APIMetadata()
# >> > metadata = metadata_generator.get_serializer_info(UserSerializer())
# >> > with open('User.json', 'w') as json_file:
# ...
# json.dump(metadata, json_file, indent=2, sort_keys=True) | [
"rest_framework.schemas.openapi.AutoSchema"
] | [((368, 380), 'rest_framework.schemas.openapi.AutoSchema', 'AutoSchema', ([], {}), '()\n', (378, 380), False, 'from rest_framework.schemas.openapi import AutoSchema\n')] |
# -*- coding: UTF-8 -*-
from aliyunIoT import Device # iot组件是连接阿里云物联网平台的组件
import network # Wi-Fi功能所在库
import ujson # json字串解析库
import utime # 延时API所在组件
from driver import GPIO, ADC # driver类,用于控制微处理器的输入输出功能
import gp2y10 # dsm501a 空气质量传感器类
gp2y10Obj = 0
# 物联网平台连接标志位
iot_connected = False
wlan = None
# 三元组信息
productKey = "产品密钥"
deviceName = "设备名称"
deviceSecret = "设备密钥"
# 物联网设备实例
device = None
# Wi-Fi SSID和Password设置
wifiSsid = "路由器名称"
wifiPassword = "<PASSWORD>"
# 等待Wi-Fi成功连接到路由器
def get_wifi_status():
global wlan
wifi_connected = False
wlan.active(True) # 激活界面
wlan.scan() # 扫描接入点
wlan.disconnect() # 断开Wi-Fi
#print("start to connect ", wifiSsid)
# 连接到指定的路由器(路由器名称为wifiSsid, 密码为:<PASSWORD>Password)
wlan.connect(wifiSsid, wifiPassword)
while True:
wifi_connected = wlan.isconnected() # 获取Wi-Fi连接路由器的状态信息
if wifi_connected: # Wi-Fi连接成功则退出while循环
break
else:
utime.sleep(0.5)
print("wifi_connected:", wifi_connected)
ifconfig = wlan.ifconfig() # 获取接口的IP/netmask/gw/DNS地址
print(ifconfig)
utime.sleep(0.5)
# 物联网平台连接成功的回调函数
def on_connect(data):
global iot_connected
iot_connected = True
# 设置props 事件接收函数(当云平台向设备下发属性时)
def on_props(request):
pass
def connect_lk(productKey, deviceName, deviceSecret):
global device, iot_connected
key_info = {
'region': 'cn-shanghai',
'productKey': productKey,
'deviceName': deviceName,
'deviceSecret': deviceSecret,
'keepaliveSec': 60
}
# 将三元组信息设置到iot组件中
device = Device()
# 设定连接到物联网平台的回调函数,如果连接物联网平台成功,则调用on_connect函数
device.on(Device.ON_CONNECT, on_connect)
# 配置收到云端属性控制指令的回调函数,如果收到物联网平台发送的属性控制消息,则调用on_props函数
device.on(Device.ON_PROPS, on_props)
# 启动连接阿里云物联网平台过程
device.connect(key_info)
# 等待设备成功连接到物联网平台
while(True):
if iot_connected:
print('物联网平台连接成功')
break
else:
print('sleep for 1 s')
utime.sleep(1)
print('sleep for 2s')
utime.sleep(2)
def device_init():
global gp2y10Obj
gpioDev = GPIO()
gpioDev.open("gp2y10led")
adcDev = ADC()
adcDev.open("gp2y10out")
gp2y10Obj = gp2y10.GP2Y10(adcDev, gpioDev)
print("gp2y10Obj inited!")
def air_report():
global gp2y10Obj
while True: # 无限循环
# 这里的数据仅表示从ADC上读到的数据,未实现到PM2.5的转换,仅作案例参考
dustValue = gp2y10Obj.getDustVal()
print('dustValue = ', dustValue)
# 生成上报到物联网平台的属性值字串,此处的属性标识符"pm25_value"必须和物联网平台的属性一致
upload_data = {'params': ujson.dumps({
'pollen_value': 0,
'pm25_value': dustValue
})
}
# 上传状态到物联网平台
device.postProps(upload_data)
utime.sleep(60) # 打印完之后休眠60秒
if __name__ == '__main__':
wlan = network.WLAN(network.STA_IF) # 创建WLAN对象
get_wifi_status()
connect_lk(productKey, deviceName, deviceSecret)
device_init()
air_report()
| [
"utime.sleep",
"ujson.dumps",
"gp2y10.GP2Y10",
"driver.ADC",
"driver.GPIO",
"network.WLAN",
"aliyunIoT.Device"
] | [((1224, 1240), 'utime.sleep', 'utime.sleep', (['(0.5)'], {}), '(0.5)\n', (1235, 1240), False, 'import utime\n'), ((1712, 1720), 'aliyunIoT.Device', 'Device', ([], {}), '()\n', (1718, 1720), False, 'from aliyunIoT import Device\n'), ((2187, 2201), 'utime.sleep', 'utime.sleep', (['(2)'], {}), '(2)\n', (2198, 2201), False, 'import utime\n'), ((2259, 2265), 'driver.GPIO', 'GPIO', ([], {}), '()\n', (2263, 2265), False, 'from driver import GPIO, ADC\n'), ((2310, 2315), 'driver.ADC', 'ADC', ([], {}), '()\n', (2313, 2315), False, 'from driver import GPIO, ADC\n'), ((2362, 2392), 'gp2y10.GP2Y10', 'gp2y10.GP2Y10', (['adcDev', 'gpioDev'], {}), '(adcDev, gpioDev)\n', (2375, 2392), False, 'import gp2y10\n'), ((2962, 2990), 'network.WLAN', 'network.WLAN', (['network.STA_IF'], {}), '(network.STA_IF)\n', (2974, 2990), False, 'import network\n'), ((2887, 2902), 'utime.sleep', 'utime.sleep', (['(60)'], {}), '(60)\n', (2898, 2902), False, 'import utime\n'), ((1070, 1086), 'utime.sleep', 'utime.sleep', (['(0.5)'], {}), '(0.5)\n', (1081, 1086), False, 'import utime\n'), ((2142, 2156), 'utime.sleep', 'utime.sleep', (['(1)'], {}), '(1)\n', (2153, 2156), False, 'import utime\n'), ((2717, 2774), 'ujson.dumps', 'ujson.dumps', (["{'pollen_value': 0, 'pm25_value': dustValue}"], {}), "({'pollen_value': 0, 'pm25_value': dustValue})\n", (2728, 2774), False, 'import ujson\n')] |
import unittest
import requests as req
from app.config.config import HOST
from app.api.base.base_name import *
from asi.utils import gentext
class TestAuth(unittest.TestCase):
def test_lesson_create(self):
s = req.Session()
genned_text = gentext(6)
data = {
URL: genned_text,
TYPE: 1,
ID_PROJECT: 1,
TITLE: genned_text
}
r = s.post(HOST + '/lesson', data=data)
print(r.text)
self.assertEqual(r.status_code, 200)
self.assertIsNotNone(r.text)
def test_lessons_get(self):
s = req.Session()
r = s.get(HOST + '/lessons')
print(r.text)
self.assertEqual(r.status_code, 200)
self.assertIsNotNone(r.text)
def test_lesson_update(self):
s = req.Session()
genned_text = gentext(6)
data = {
URL: genned_text,
TYPE: 1,
TITLE: genned_text
}
r = s.put(HOST + '/lesson/1', data=data)
print(r.text)
self.assertEqual(r.status_code, 200)
self.assertIsNotNone(r.text)
def test_lesson_delete(self):
s = req.Session()
r = s.delete(HOST + '/lesson/1')
print(r.text)
self.assertEqual(r.status_code, 200)
self.assertIsNotNone(r.text)
if __name__ == '__main__':
unittest.main()
| [
"unittest.main",
"asi.utils.gentext",
"requests.Session"
] | [((1354, 1369), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1367, 1369), False, 'import unittest\n'), ((223, 236), 'requests.Session', 'req.Session', ([], {}), '()\n', (234, 236), True, 'import requests as req\n'), ((259, 269), 'asi.utils.gentext', 'gentext', (['(6)'], {}), '(6)\n', (266, 269), False, 'from asi.utils import gentext\n'), ((603, 616), 'requests.Session', 'req.Session', ([], {}), '()\n', (614, 616), True, 'import requests as req\n'), ((805, 818), 'requests.Session', 'req.Session', ([], {}), '()\n', (816, 818), True, 'import requests as req\n'), ((841, 851), 'asi.utils.gentext', 'gentext', (['(6)'], {}), '(6)\n', (848, 851), False, 'from asi.utils import gentext\n'), ((1161, 1174), 'requests.Session', 'req.Session', ([], {}), '()\n', (1172, 1174), True, 'import requests as req\n')] |
import uuid
import marshmallow as ma
from app.objects.interfaces.i_object import FirstClassObjectInterface
from app.objects.c_operation import OperationSchema
from app.utility.base_object import BaseObject
class ScheduleSchema(ma.Schema):
id = ma.fields.String()
schedule = ma.fields.Time(required=True)
task = ma.fields.Nested(OperationSchema())
@ma.post_load
def build_schedule(self, data, **kwargs):
return None if kwargs.get('partial') is True else Schedule(**data)
class Schedule(FirstClassObjectInterface, BaseObject):
schema = ScheduleSchema()
@property
def unique(self):
return self.hash('%s' % self.id)
def __init__(self, schedule, task, id=''):
super().__init__()
self.id = str(id) if id else str(uuid.uuid4())
self.schedule = schedule
self.task = task
def store(self, ram):
existing = self.retrieve(ram['schedules'], self.unique)
if not existing:
ram['schedules'].append(self)
return self.retrieve(ram['schedules'], self.unique)
existing.update('schedule', self.schedule)
existing.task.update('state', self.task.state)
existing.task.update('autonomous', self.task.autonomous)
existing.task.update('obfuscator', self.task.obfuscator)
return existing
| [
"app.objects.c_operation.OperationSchema",
"marshmallow.fields.Time",
"uuid.uuid4",
"marshmallow.fields.String"
] | [((253, 271), 'marshmallow.fields.String', 'ma.fields.String', ([], {}), '()\n', (269, 271), True, 'import marshmallow as ma\n'), ((287, 316), 'marshmallow.fields.Time', 'ma.fields.Time', ([], {'required': '(True)'}), '(required=True)\n', (301, 316), True, 'import marshmallow as ma\n'), ((345, 362), 'app.objects.c_operation.OperationSchema', 'OperationSchema', ([], {}), '()\n', (360, 362), False, 'from app.objects.c_operation import OperationSchema\n'), ((785, 797), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (795, 797), False, 'import uuid\n')] |
from collections import Iterable
import inspect
import copy
from ruamel.yaml import YAML, yaml_object
yaml = YAML()
class ListOf(object):
def __init__(self, type_):
self.type_ = type_
class DotDict(dict):
"""dot.notation access to dictionary attributes"""
def __getattr__(self, attr):
return self.get(attr)
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
def __dir__(self):
return self.keys()
# Cargo-cultishly copied from: https://github.com/spindlelabs/pyes/commit/d2076b385c38d6d00cebfe0df7b0d1ba8df934bc
def __deepcopy__(self, memo):
return DotDict([(copy.deepcopy(k, memo), copy.deepcopy(v, memo)) for k, v in self.items()])
# Inspiration : https://codereview.stackexchange.com/questions/81794/dictionary-with-restricted-keys
class LooselyTypedDotDict(DotDict):
"""dot.notation access to dictionary attributes, with limited keys
Note: this class is pretty useless on its own.
You need to subclass it like so:
class MyLooselyTypedDotDict(LooselyTypedDotDict):
_allowed_keys = set([
"x", "y", "z"
])
"""
_allowed_keys = set()
_required_keys = set()
_key_types = {}
def __init__(self, coerce_types=False, **kwargs):
if not self._required_keys.issubset(self._allowed_keys):
raise ValueError("_required_keys : {!r} must be a subset of _allowed_keys {!r}".format(
self._required_keys,
self._allowed_keys,
))
for key, value in kwargs.items():
if key not in self._allowed_keys:
raise KeyError("key: {!r} not in allowed keys: {!r}".format(
key,
self._allowed_keys
))
# if key in self._key_types and not isinstance(key, self._key_types[key]):
if key in self._key_types:
# print(value)
#Update values if coerce_types==True
if coerce_types:
#TODO: Catch errors and raise more informative error messages here
#If the given type is an instance of LooselyTypedDotDict, apply coerce_types recursively
if isinstance(self._key_types[key], ListOf):
if inspect.isclass(self._key_types[key].type_) and issubclass(self._key_types[key].type_, LooselyTypedDotDict):
value = [self._key_types[key].type_(coerce_types=True, **v) for v in value]
else:
value = [self._key_types[key].type_(v) for v in value]
else:
if inspect.isclass(self._key_types[key]) and issubclass(self._key_types[key], LooselyTypedDotDict):
value = self._key_types[key](coerce_types=True, **value)
else:
value = self._key_types[key](value)
# print(value)
#Validate types
self._validate_value_type(key, value, self._key_types[key])
self[key] = value
for key in self._required_keys:
if key not in kwargs:
raise KeyError("key: {!r} is missing even though it's in the required keys: {!r}".format(
key,
self._required_keys
))
def __getattr__(self, attr):
if attr in self._allowed_keys:
return self.get(attr)
else:
# We raise AttributeError in the event that someone tries to access a nonexistent property
# to be more consistent with usual type semantics without losing dictionary access patterns.
# Note that a dictionary would usually raise KeyError
raise AttributeError
# If we did not raise AttributeError from __getattr__, the following would be required to support yaml serialization
# # This is required since our dotdict allows *any* access via dotNotation, blocking the normal
# # behavior of raising an AttributeError when trying to access a nonexistent function
# _yaml_merge = []
#
# @classmethod
# def yaml_anchor(cls):
# # This is required since our dotdict allows *any* access via dotNotation, blocking the normal
# # behavior of raising an AttributeError when trying to access a nonexistent function
# return None
def __setitem__(self, key, val):
if key not in self._allowed_keys:
raise KeyError("key: {!r} not in allowed keys: {!r}".format(
key,
self._allowed_keys
))
if key in self._key_types:
self._validate_value_type(key, val, self._key_types[key])
dict.__setitem__(self, key, val)
def __setattr__(self, key, val):
if key not in self._allowed_keys:
raise KeyError("key: {!r} not in allowed keys: {!r}".format(
key,
self._allowed_keys
))
if key in self._key_types:
self._validate_value_type(key, val, self._key_types[key])
dict.__setitem__(self, key, val)
def __delitem__(self, key):
if key in self._required_keys:
raise KeyError("key: {!r} is required and cannot be deleted".format(
key,
))
dict.__delitem__(self, key)
def __delattr__(self, key):
if key in self._required_keys:
raise KeyError("key: {!r} is required and cannot be deleted".format(
key,
))
dict.__delitem__(self, key)
def _validate_value_type(self, key, value, type_):
if type(value) != type_:
#TODO: Catch errors and raise more informative error messages here
if isinstance(type_, ListOf):
if not isinstance(value, Iterable):
raise TypeError("key: {!r} must be an Iterable type, not {!r}".format(
key,
type(value),
))
for v in value:
if not isinstance(v, type_.type_):
raise TypeError("values in key: {!r} must be of type: {!r}, not {!r} {!r}".format(
key,
type_.type_,
v,
type(v),
))
else:
raise TypeError("key: {!r} must be of type {!r}, not {!r}".format(
key,
type_,
type(value),
))
@classmethod
def to_yaml(cls, representer, node):
"""Use dict representation for DotDict (and subtypes by default)"""
return representer.represent_dict(node) | [
"inspect.isclass",
"ruamel.yaml.YAML",
"copy.deepcopy"
] | [((109, 115), 'ruamel.yaml.YAML', 'YAML', ([], {}), '()\n', (113, 115), False, 'from ruamel.yaml import YAML, yaml_object\n'), ((641, 663), 'copy.deepcopy', 'copy.deepcopy', (['k', 'memo'], {}), '(k, memo)\n', (654, 663), False, 'import copy\n'), ((665, 687), 'copy.deepcopy', 'copy.deepcopy', (['v', 'memo'], {}), '(v, memo)\n', (678, 687), False, 'import copy\n'), ((2317, 2360), 'inspect.isclass', 'inspect.isclass', (['self._key_types[key].type_'], {}), '(self._key_types[key].type_)\n', (2332, 2360), False, 'import inspect\n'), ((2697, 2734), 'inspect.isclass', 'inspect.isclass', (['self._key_types[key]'], {}), '(self._key_types[key])\n', (2712, 2734), False, 'import inspect\n')] |
from django.core.management.base import BaseCommand
from key_server.key_management_system.tasks import delete_expired_keys
class Command(BaseCommand):
help = "Deletes the keys in the database that have reached the expiration date."
def handle(self, *args, **options):
delete_expired_keys()
self.stdout.write("Key Deletion finished", ending="")
| [
"key_server.key_management_system.tasks.delete_expired_keys"
] | [((288, 309), 'key_server.key_management_system.tasks.delete_expired_keys', 'delete_expired_keys', ([], {}), '()\n', (307, 309), False, 'from key_server.key_management_system.tasks import delete_expired_keys\n')] |
from flask import Flask, request, jsonify
from flask_cors import cross_origin
app = Flask(__name__)
@app.route('/', methods=['POST'])
@cross_origin()
def main():
if request.method == 'POST':
record = request.data
print(record)
return jsonify({'status': "acknowledged"})
if __name__ == '__main__':
app.run(host="localhost", port=3005, debug=True)
| [
"flask.jsonify",
"flask_cors.cross_origin",
"flask.Flask"
] | [((85, 100), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (90, 100), False, 'from flask import Flask, request, jsonify\n'), ((138, 152), 'flask_cors.cross_origin', 'cross_origin', ([], {}), '()\n', (150, 152), False, 'from flask_cors import cross_origin\n'), ((265, 300), 'flask.jsonify', 'jsonify', (["{'status': 'acknowledged'}"], {}), "({'status': 'acknowledged'})\n", (272, 300), False, 'from flask import Flask, request, jsonify\n')] |
from functools import reduce
from ws.RLUtils.monitoring.graphing.data_compaction.compaction_mgt import compaction_mgt
def plugin_for_averaging_mgt():
_median_xindex = None
def fn_compute_yval(number_of_entries, strand_num, yvals_for_strands):
total_of_yval_strand = reduce((lambda x, y: x + y), yvals_for_strands[strand_num])
average_of_yval_strand = total_of_yval_strand / number_of_entries
return average_of_yval_strand
def fn_compute_xindex(x_index_list_for_pipe):
x_index = x_index_list_for_pipe[_median_xindex]
return x_index
fn_compress_stream_data = compaction_mgt(fn_compute_xindex, fn_compute_yval)
def fn_compress(x_index_list_for_pipe, y_vals_list_for_pipe):
nonlocal _median_xindex
_median_xindex = int(len(x_index_list_for_pipe) / 2)
return fn_compress_stream_data(x_index_list_for_pipe, y_vals_list_for_pipe)
return fn_compress
| [
"functools.reduce",
"ws.RLUtils.monitoring.graphing.data_compaction.compaction_mgt.compaction_mgt"
] | [((619, 669), 'ws.RLUtils.monitoring.graphing.data_compaction.compaction_mgt.compaction_mgt', 'compaction_mgt', (['fn_compute_xindex', 'fn_compute_yval'], {}), '(fn_compute_xindex, fn_compute_yval)\n', (633, 669), False, 'from ws.RLUtils.monitoring.graphing.data_compaction.compaction_mgt import compaction_mgt\n'), ((285, 342), 'functools.reduce', 'reduce', (['(lambda x, y: x + y)', 'yvals_for_strands[strand_num]'], {}), '(lambda x, y: x + y, yvals_for_strands[strand_num])\n', (291, 342), False, 'from functools import reduce\n')] |
import numpy as np
from sklearn import datasets
from sklearn.model_selection import train_test_split
from collections import Counter
iris = datasets.load_iris()
X_train, X_test, y_train, y_test = train_test_split(iris['data'], iris['target'], test_size = 0.33)
K = [1, 5, 10]
for k in K:
y_pred = []
for x_test in X_test:
distances = np.array([np.sum((x_test - x_train)**2) for x_train in X_train])
nearest_indexes = distances.argsort()[:k]
nearest_labels = y_train[nearest_indexes]
c = Counter(nearest_labels)
y_pred.append(c.most_common()[0][0])
print("k = {}, accuracy = {}".format(k,np.mean(y_pred == y_test)))
| [
"sklearn.datasets.load_iris",
"numpy.mean",
"sklearn.model_selection.train_test_split",
"collections.Counter",
"numpy.sum"
] | [((141, 161), 'sklearn.datasets.load_iris', 'datasets.load_iris', ([], {}), '()\n', (159, 161), False, 'from sklearn import datasets\n'), ((197, 259), 'sklearn.model_selection.train_test_split', 'train_test_split', (["iris['data']", "iris['target']"], {'test_size': '(0.33)'}), "(iris['data'], iris['target'], test_size=0.33)\n", (213, 259), False, 'from sklearn.model_selection import train_test_split\n'), ((529, 552), 'collections.Counter', 'Counter', (['nearest_labels'], {}), '(nearest_labels)\n', (536, 552), False, 'from collections import Counter\n'), ((641, 666), 'numpy.mean', 'np.mean', (['(y_pred == y_test)'], {}), '(y_pred == y_test)\n', (648, 666), True, 'import numpy as np\n'), ((362, 393), 'numpy.sum', 'np.sum', (['((x_test - x_train) ** 2)'], {}), '((x_test - x_train) ** 2)\n', (368, 393), True, 'import numpy as np\n')] |
"""Main entry point module for the application"""
import sys
import logging
from qtpy.QtWidgets import QApplication, QPlainTextEdit
from qtpy.QtCore import Qt
from friendlypics2.dialogs.main_window import MainWindow
from friendlypics2.misc.gui_helpers import GuiLogger
from friendlypics2.version import __version__
def configure_logging():
"""Configure the Python logging system for the app
Only configures the console based logger"""
logging.basicConfig(level=logging.DEBUG)
def run(args):
"""Main entrypoint function
Args:
args (list): command line arguments to be passed to the application
Returns:
int: return code to report back to the shell with
"""
configure_logging()
# HACK: this next line was needed to silence an odd warning message generated by Qt
QApplication.setAttribute(Qt.AA_ShareOpenGLContexts)
# Configure our application
app = QApplication(args)
app.setOrganizationName("The Friendly Coder")
app.setOrganizationDomain("https://github.com/TheFriendlyCoder")
app.setApplicationName("FriendlyPics2")
app.setApplicationVersion(__version__)
# Configure our main window
window = MainWindow()
window.show()
# Attach the Python logging system to the GUI
log_handler = GuiLogger(window.findChild(QPlainTextEdit, "debug_log"))
logging.getLogger().addHandler(log_handler)
# Run our app
return app.exec_()
if __name__ == "__main__":
sys.exit(run(sys.argv))
| [
"logging.basicConfig",
"logging.getLogger",
"qtpy.QtWidgets.QApplication",
"qtpy.QtWidgets.QApplication.setAttribute",
"friendlypics2.dialogs.main_window.MainWindow"
] | [((450, 490), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (469, 490), False, 'import logging\n'), ((824, 876), 'qtpy.QtWidgets.QApplication.setAttribute', 'QApplication.setAttribute', (['Qt.AA_ShareOpenGLContexts'], {}), '(Qt.AA_ShareOpenGLContexts)\n', (849, 876), False, 'from qtpy.QtWidgets import QApplication, QPlainTextEdit\n'), ((920, 938), 'qtpy.QtWidgets.QApplication', 'QApplication', (['args'], {}), '(args)\n', (932, 938), False, 'from qtpy.QtWidgets import QApplication, QPlainTextEdit\n'), ((1191, 1203), 'friendlypics2.dialogs.main_window.MainWindow', 'MainWindow', ([], {}), '()\n', (1201, 1203), False, 'from friendlypics2.dialogs.main_window import MainWindow\n'), ((1352, 1371), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (1369, 1371), False, 'import logging\n')] |
import json
from ..useracl import get_user_acl, serialize_user_acl
def test_user_acl_is_serializeable(cache_versions, user):
acl = get_user_acl(user, cache_versions)
assert serialize_user_acl(acl)
def test_user_acl_is_json_serializeable(cache_versions, user):
acl = get_user_acl(user, cache_versions)
serialized_acl = serialize_user_acl(acl)
assert json.dumps(serialized_acl)
| [
"json.dumps"
] | [((374, 400), 'json.dumps', 'json.dumps', (['serialized_acl'], {}), '(serialized_acl)\n', (384, 400), False, 'import json\n')] |
from discord.ext import commands
from discord.ext.commands import Context
from bot import Yasen
from core.nsfw_core import get_lewd
from scripts.checks import is_nsfw
class Nsfw:
"""
Class of Nsfw commands.
"""
__slots__ = ('bot',)
def __init__(self, bot: Yasen):
"""
Init the instance of this class.
:param bot: the Yasen bot instance.
"""
self.bot = bot
def __local_check(self, ctx: Context):
return is_nsfw(ctx)
async def __process_lewd(self, ctx: Context, site: str, query: tuple):
"""
Process a search request.
:param ctx: the discord context.
:param site: the site name.
:param query: the search queries in a tuple.
"""
if site == 'danbooru' and len(query) > 2:
return ('Sorry, you can only enter up to two '
'Danbooru tags at the same time')
msg, url, tags = await get_lewd(
self.bot.session_manager, site, query,
self.bot.data_manager, *self.bot.config.danbooru
)
if msg:
await ctx.send(msg)
if url:
await ctx.send(url)
if tags:
self.bot.data_manager.set_nsfw_tags(site, tags)
@commands.command()
@commands.cooldown(rate=1, per=5, type=commands.BucketType.user)
async def danbooru(self, ctx: Context, *query):
"""
Description: Search danbooru for lewds.
Usage: "`{prefix}danbooru up_to two_tags` if no tags given this will
search for a random image."
Restrictions: |
Can only be used in DM or a channel with a name that is equal to or
starts with `nsfw` (case insensitive)
Only accepts up to 2 tags.
Cooldown: Once every 5 seconds per user.
"""
await self.__process_lewd(ctx, 'danbooru', query)
@commands.command()
@commands.cooldown(rate=1, per=5, type=commands.BucketType.user)
async def gelbooru(self, ctx: Context, *query):
"""
Description: Search gelbooru for lewds.
Usage: "`{prefix}gelbooru your tags` if no tags given this will
search for a random image."
Restrictions: "Can only be used in DM or a channel with a name that is
equal to or starts with `nsfw` (case insensitive)"
Cooldown: Once every 5 seconds per user.
"""
await self.__process_lewd(ctx, 'gelbooru', query)
@commands.command()
@commands.cooldown(rate=1, per=5, type=commands.BucketType.user)
async def konachan(self, ctx: Context, *query):
"""
Description: Search konachan for lewds.
Usage: "`{prefix}konachan your tags` if no tags given this will
search for a random image."
Restrictions: "Can only be used in DM or a channel with a name that is
equal to or starts with `nsfw` (case insensitive)"
Cooldown: Once every 5 seconds per user.
"""
await self.__process_lewd(ctx, 'konachan', query)
@commands.command()
@commands.cooldown(rate=1, per=5, type=commands.BucketType.user)
async def yandere(self, ctx: Context, *query):
"""
Description: Search yandere for lewds.
Usage: "`{prefix}yandere your tags` if no tags given this will
search for a random image."
Restrictions: "Can only be used in DM or a channel with a name that is
equal to or starts with `nsfw` (case insensitive)"
Cooldown: Once every 5 seconds per user.
"""
await self.__process_lewd(ctx, 'yandere', query)
@commands.command()
@commands.cooldown(rate=1, per=5, type=commands.BucketType.user)
async def e621(self, ctx: Context, *query):
"""
Description: Search e621 for lewds.
Usage: "`{prefix}e621 your tags` if no tags given this will
search for a random image."
Restrictions: "Can only be used in DM or a channel with a name that is
equal to or starts with `nsfw` (case insensitive)"
Cooldown: Once every 5 seconds per user.
"""
await self.__process_lewd(ctx, 'e621', query)
@commands.command()
@commands.cooldown(rate=1, per=5, type=commands.BucketType.user)
async def rule34(self, ctx: Context, *query):
"""
Description: Search rule34 for lewds.
Usage: "`{prefix}rule34 your tags` if no tags given this will
search for a random image."
Restrictions: "Can only be used in DM or a channel with a name that is
equal to or starts with `nsfw` (case insensitive)"
Cooldown: Once every 5 seconds per user.
"""
await self.__process_lewd(ctx, 'rule34', query)
| [
"discord.ext.commands.cooldown",
"discord.ext.commands.command",
"scripts.checks.is_nsfw",
"core.nsfw_core.get_lewd"
] | [((1261, 1279), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (1277, 1279), False, 'from discord.ext import commands\n'), ((1285, 1348), 'discord.ext.commands.cooldown', 'commands.cooldown', ([], {'rate': '(1)', 'per': '(5)', 'type': 'commands.BucketType.user'}), '(rate=1, per=5, type=commands.BucketType.user)\n', (1302, 1348), False, 'from discord.ext import commands\n'), ((1893, 1911), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (1909, 1911), False, 'from discord.ext import commands\n'), ((1917, 1980), 'discord.ext.commands.cooldown', 'commands.cooldown', ([], {'rate': '(1)', 'per': '(5)', 'type': 'commands.BucketType.user'}), '(rate=1, per=5, type=commands.BucketType.user)\n', (1934, 1980), False, 'from discord.ext import commands\n'), ((2464, 2482), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (2480, 2482), False, 'from discord.ext import commands\n'), ((2488, 2551), 'discord.ext.commands.cooldown', 'commands.cooldown', ([], {'rate': '(1)', 'per': '(5)', 'type': 'commands.BucketType.user'}), '(rate=1, per=5, type=commands.BucketType.user)\n', (2505, 2551), False, 'from discord.ext import commands\n'), ((3035, 3053), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (3051, 3053), False, 'from discord.ext import commands\n'), ((3059, 3122), 'discord.ext.commands.cooldown', 'commands.cooldown', ([], {'rate': '(1)', 'per': '(5)', 'type': 'commands.BucketType.user'}), '(rate=1, per=5, type=commands.BucketType.user)\n', (3076, 3122), False, 'from discord.ext import commands\n'), ((3602, 3620), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (3618, 3620), False, 'from discord.ext import commands\n'), ((3626, 3689), 'discord.ext.commands.cooldown', 'commands.cooldown', ([], {'rate': '(1)', 'per': '(5)', 'type': 'commands.BucketType.user'}), '(rate=1, per=5, type=commands.BucketType.user)\n', (3643, 3689), False, 'from discord.ext import commands\n'), ((4157, 4175), 'discord.ext.commands.command', 'commands.command', ([], {}), '()\n', (4173, 4175), False, 'from discord.ext import commands\n'), ((4181, 4244), 'discord.ext.commands.cooldown', 'commands.cooldown', ([], {'rate': '(1)', 'per': '(5)', 'type': 'commands.BucketType.user'}), '(rate=1, per=5, type=commands.BucketType.user)\n', (4198, 4244), False, 'from discord.ext import commands\n'), ((479, 491), 'scripts.checks.is_nsfw', 'is_nsfw', (['ctx'], {}), '(ctx)\n', (486, 491), False, 'from scripts.checks import is_nsfw\n'), ((950, 1052), 'core.nsfw_core.get_lewd', 'get_lewd', (['self.bot.session_manager', 'site', 'query', 'self.bot.data_manager', '*self.bot.config.danbooru'], {}), '(self.bot.session_manager, site, query, self.bot.data_manager, *\n self.bot.config.danbooru)\n', (958, 1052), False, 'from core.nsfw_core import get_lewd\n')] |
#Copyright 2018 Google LLC
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
"""Utils functions for GNN models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
WEIGHT_INIT = tf.contrib.layers.xavier_initializer()
BIAS_INIT = tf.zeros_initializer()
############################## LAYERS #############################
def sparse_dropout(tensor, p_drop, is_training):
"""Dropout with sparse tensor."""
return tf.SparseTensor(
indices=tensor.indices,
values=tf.layers.dropout(
inputs=tensor.values,
rate=p_drop,
training=is_training),
dense_shape=tensor.dense_shape)
def dense(node_features,
in_dim,
out_dim,
p_drop,
is_training,
sparse,
use_bias=False):
"""Dense layer with sparse or dense tensor and dropout."""
w_dense = tf.get_variable(
initializer=WEIGHT_INIT,
dtype=tf.float32,
name='linear',
shape=(in_dim, out_dim))
if sparse:
node_features = sparse_dropout(node_features, p_drop, is_training)
node_features = tf.sparse_tensor_dense_matmul(node_features, w_dense)
else:
node_features = tf.layers.dropout(
inputs=node_features, rate=p_drop, training=is_training)
node_features = tf.matmul(node_features, w_dense)
if use_bias:
node_features = tf.contrib.layers.bias_add(node_features)
return node_features
def sp_gcn_layer(node_features, adj_matrix, in_dim, out_dim, p_drop,
is_training, sparse):
"""Single graph convolution layer with sparse tensors AXW.
Args:
node_features: Tensor of shape (nb_nodes, in_dim) or SparseTensor.
adj_matrix: Sparse Tensor, normalized adjacency matrix.
in_dim: integer specifying the input feature dimension.
out_dim: integer specifying the output feature dimension.
p_drop: dropout probability.
is_training: boolean, True if the model is being trained, False otherwise.
sparse: True if node_features are sparse.
Returns:
node_features: tensor of shape (nb_nodes, out_dim). New node
features obtained from applying one GCN layer.
Raises:
"""
node_features = dense(node_features, in_dim, out_dim, p_drop, is_training,
sparse)
node_features = tf.layers.dropout(
inputs=node_features, rate=p_drop, training=is_training)
node_features = tf.sparse_tensor_dense_matmul(adj_matrix, node_features)
return node_features
def gcn_layer(node_features, adj_matrix, in_dim, out_dim, p_drop, is_training,
sparse):
"""Single graph convolution layer with dense A.
Args:
node_features: Tensor of shape (nb_nodes, in_dim) or SparseTensor.
adj_matrix: Tensor, normalized adjacency matrix.
in_dim: integer specifying the input feature dimension.
out_dim: integer specifying the output feature dimension.
p_drop: dropout probability.
is_training: boolean, True if the model is being trained, False otherwise.
sparse: True if node_features are sparse.
Returns:
node_features: tensor of shape (nb_nodes, out_dim). New node
features obtained from applying one GCN layer.
Raises:
"""
node_features = dense(node_features, in_dim, out_dim, p_drop, is_training,
sparse)
node_features = tf.layers.dropout(
inputs=node_features, rate=p_drop, training=is_training)
node_features = tf.matmul(adj_matrix, node_features)
return node_features
def gcn_pool_layer(node_features, adj_matrix, in_dim, out_dim, sparse,
is_training, p_drop):
"""GCN with maxpooling over neighbours instead of avreaging."""
node_features = dense(node_features, in_dim, out_dim, p_drop, is_training,
sparse)
node_features = tf.expand_dims(node_features, 0) # 1 x N x d
# broadcasting (adj in N x N x 1 and features are 1 x N x d)
node_features = tf.multiply(node_features, adj_matrix)
node_features = tf.transpose(node_features, perm=[0, 2, 1])
node_features = tf.reduce_max(node_features, axis=-1) # N x d
return node_features
def sp_gat_layer(node_features, adj_matrix, in_dim, out_dim, p_drop,
is_training, sparse):
"""Single graph attention layer using sparse tensors.
Args:
node_features: Sparse Tensor of shape (nb_nodes, in_dim) or SparseTensor.
adj_matrix: Sparse Tensor.
in_dim: integer specifying the input feature dimension.
out_dim: integer specifying the output feature dimension.
p_drop: dropout probability.
is_training: boolean, True if the model is being trained, False otherwise
sparse: True if node features are sparse.
Returns:
node_features: tensor of shape (nb_nodes, out_dim). New node
features obtained from applying one head of attention to input.
Raises:
"""
# Linear transform
node_features = dense(node_features, in_dim, out_dim, p_drop, is_training,
sparse)
# Attention scores
alpha = sp_compute_adj_att(node_features, adj_matrix)
alpha = tf.SparseTensor(
indices=alpha.indices,
values=tf.nn.leaky_relu(alpha.values),
dense_shape=alpha.dense_shape)
alpha = tf.sparse_softmax(alpha)
alpha = sparse_dropout(alpha, p_drop, is_training)
node_features = tf.layers.dropout(
inputs=node_features, rate=p_drop, training=is_training)
# Compute self-attention features
node_features = tf.sparse_tensor_dense_matmul(alpha, node_features)
node_features = tf.contrib.layers.bias_add(node_features)
return node_features
def gat_layer(node_features, adj_matrix, out_dim, p_drop, is_training, i, j):
"""Single graph attention layer.
Args:
node_features: Tensor of shape (nb_nodes, feature_dim)
adj_matrix: adjacency matrix. Tensor of shape (nb_nodes, nb_nodes) and type
float. There should be 1 if there is a connection between two nodes and 0
otherwise.
out_dim: integer specifying the output feature dimension.
p_drop: dropout probability.
is_training: boolean, True if the model is being trained, False otherwise
i: layer index, used for naming variables
j: attention mechanism index, used for naming variables
Returns:
node_features: tensor of shape (nb_nodes, out_dim). New node
features obtained from applying one head of attention to input.
Raises:
"""
with tf.variable_scope('gat-{}-{}'.format(i, j)):
node_features = tf.layers.dropout(
inputs=node_features, rate=p_drop, training=is_training)
# Linear transform of the features
w_dense = tf.get_variable(
initializer=WEIGHT_INIT,
dtype=tf.float32,
name='linear',
shape=(node_features.shape[1], out_dim))
node_features = tf.matmul(node_features, w_dense)
alpha = compute_adj_att(node_features)
alpha = tf.nn.leaky_relu(alpha)
# Mask values before activation to inject the graph structure
# Add -infinity to corresponding pairs before normalization
bias_mat = -1e9 * (1. - adj_matrix)
# multiply here if adjacency is weighted
alpha = tf.nn.softmax(alpha + bias_mat, axis=-1)
# alpha = tf.nn.softmax(alpha, axis=-1)
alpha = tf.layers.dropout(inputs=alpha, rate=p_drop, training=is_training)
node_features = tf.layers.dropout(
inputs=node_features, rate=p_drop, training=is_training)
# Compute self-attention features
node_features = tf.matmul(alpha, node_features)
node_features = tf.contrib.layers.bias_add(node_features)
return node_features
def sp_egat_layer(node_features, adj_matrix, in_dim, out_dim, p_drop,
is_training, sparse):
"""Single graph attention layer using sparse tensors.
Args:
node_features: Tensor of shape (nb_nodes, in_dim) or SparseTensor.
adj_matrix: Sparse Tensor.
in_dim: integer specifying the input feature dimension.
out_dim: integer specifying the output feature dimension.
p_drop: dropout probability.
is_training: boolean, True if the model is being trained, False otherwise
sparse: True if node features are sparse.
Returns:
node_features: tensor of shape (nb_nodes, out_dim). New node
features obtained from applying one head of attention to input.
Raises:
"""
# Linear transform
node_features = dense(node_features, in_dim, out_dim, p_drop, is_training,
sparse)
# Attention scores
alpha = sp_compute_adj_att(node_features, adj_matrix)
alpha = tf.SparseTensor(
indices=alpha.indices,
values=tf.nn.leaky_relu(alpha.values),
dense_shape=alpha.dense_shape)
alpha = tf.sparse_softmax(alpha)
alpha = sparse_dropout(alpha, p_drop, is_training)
node_features = tf.layers.dropout(
inputs=node_features, rate=p_drop, training=is_training)
# Compute self-attention features
node_features = tf.sparse_tensor_dense_matmul(alpha, node_features)
node_features = tf.contrib.layers.bias_add(node_features)
return node_features
############################## MULTI LAYERS #############################
def mlp_module(node_features, n_hidden, p_drop, is_training, in_dim,
sparse_features, use_bias, return_hidden=False):
"""MLP."""
nb_layers = len(n_hidden)
hidden_layers = [node_features]
for i, out_dim in enumerate(n_hidden):
with tf.variable_scope('mlp-{}'.format(i)):
if i > 0:
sparse_features = False
if i == nb_layers - 1:
use_bias = False
h_i = dense(hidden_layers[-1], in_dim, out_dim, p_drop, is_training,
sparse_features, use_bias)
if i < nb_layers - 1:
h_i = tf.nn.relu(h_i)
in_dim = out_dim
hidden_layers.append(h_i)
if return_hidden:
return hidden_layers
else:
return hidden_layers[-1]
def gcn_module(node_features, adj_matrix, n_hidden, p_drop, is_training, in_dim,
sparse_features):
"""GCN module with multiple layers."""
nb_layers = len(n_hidden)
for i, out_dim in enumerate(n_hidden):
if i > 0:
sparse_features = False
with tf.variable_scope('gcn-{}'.format(i)):
node_features = sp_gcn_layer(node_features, adj_matrix, in_dim, out_dim,
p_drop, is_training, sparse_features)
if i < nb_layers - 1:
node_features = tf.nn.relu(node_features)
in_dim = out_dim
return node_features
def cheby_module(node_features, cheby_poly, n_hidden, p_drop, is_training,
in_dim, sparse_features):
"""GCN module with multiple layers."""
nb_layers = len(n_hidden)
for i, out_dim in enumerate(n_hidden):
if i > 0:
sparse_features = False
feats = []
for j, poly in enumerate(cheby_poly):
with tf.variable_scope('cheb-{}-{}'.format(i, j)):
sparse_poly = tf.contrib.layers.dense_to_sparse(poly)
feats.append(sp_gcn_layer(node_features, sparse_poly, in_dim, out_dim,
p_drop, is_training, sparse_features))
node_features = tf.add_n(feats)
if i < nb_layers - 1:
node_features = tf.nn.relu(node_features)
in_dim = out_dim
return node_features
def gat_module(node_features, adj_matrix, n_hidden, n_att, p_drop, is_training,
in_dim, sparse_features, average_last):
"""GAT module with muli-headed attention and multiple layers."""
nb_layers = len(n_att)
for i, k in enumerate(n_att):
out_dim = n_hidden[i]
att = []
if i > 0:
sparse_features = False
for j in range(k):
with tf.variable_scope('gat-layer{}-att{}'.format(i, j)):
att.append(
sp_gat_layer(node_features, adj_matrix, in_dim, out_dim, p_drop,
is_training, sparse_features))
# intermediate layers, concatenate features
if i < nb_layers - 1:
in_dim = out_dim * k
node_features = tf.nn.elu(tf.concat(att, axis=-1))
if average_last:
# last layer, average features instead of concatenating
logits = tf.add_n(att) / n_att[-1]
else:
logits = tf.concat(att, axis=-1)
return logits
def egat_module(node_features, adj_matrix, n_hidden, n_att, p_drop, is_training,
in_dim, sparse_features, average_last):
"""Edge-GAT module with muli-headed attention and multiple layers."""
nb_layers = len(n_att)
for i, k in enumerate(n_att):
out_dim = n_hidden[i]
att = []
if i > 0:
sparse_features = False
for j in range(k):
with tf.variable_scope('egat-layer{}-att{}'.format(i, j)):
att.append(
sp_gat_layer(node_features, adj_matrix, in_dim, out_dim, p_drop,
is_training, sparse_features))
# intermediate layers, concatenate features
if i < nb_layers - 1:
in_dim = out_dim * k
node_features = tf.nn.elu(tf.concat(att, axis=-1))
if average_last:
# last layer, average features instead of concatenating
logits = tf.add_n(att) / n_att[-1]
else:
logits = tf.concat(att, axis=-1)
return logits
###################### EDGE SCORES FUNCTIONS #############################
def sp_compute_adj_att(node_features, adj_matrix_sp):
"""Self-attention for edges as in GAT with sparse adjacency."""
out_dim = node_features.shape[-1]
# Self-attention mechanism
a_row = tf.get_variable(
initializer=WEIGHT_INIT,
dtype=tf.float32,
name='selfatt-row',
shape=(out_dim, 1))
a_col = tf.get_variable(
initializer=WEIGHT_INIT,
dtype=tf.float32,
name='selfatt-col',
shape=(out_dim, 1))
alpha_row = tf.matmul(node_features, a_row)
alpha_col = tf.matmul(node_features, a_col)
# Compute matrix with self-attention scores using broadcasting
alpha = tf.sparse_add(adj_matrix_sp * alpha_row,
adj_matrix_sp * tf.transpose(alpha_col, perm=[1, 0]))
return alpha
def compute_adj_att(node_features):
"""Self-attention for edges as in GAT."""
out_dim = node_features.shape[-1]
# Self-attention mechanism
a_row = tf.get_variable(
initializer=WEIGHT_INIT,
dtype=tf.float32,
name='selfatt-row',
shape=(out_dim, 1))
a_col = tf.get_variable(
initializer=WEIGHT_INIT,
dtype=tf.float32,
name='selfatt-col',
shape=(out_dim, 1))
alpha_row = tf.matmul(node_features, a_row)
alpha_col = tf.matmul(node_features, a_col)
# Compute matrix with self-attention scores using broadcasting
alpha = alpha_row + tf.transpose(alpha_col, perm=[1, 0])
# alpha += alpha_col + tf.transpose(alpha_row, perm=[1, 0])
return alpha
def compute_weighted_mat_dot(node_features, nb_dots=1):
"""Compute weighted dot with matrix multiplication."""
adj_scores = []
in_dim = node_features.shape[-1]
for i in range(nb_dots):
weight_mat = tf.get_variable(
initializer=WEIGHT_INIT,
dtype=tf.float32,
name='w-dot-{}'.format(i),
shape=(in_dim, in_dim))
adj_scores.append(tf.matmul(node_features, tf.matmul(
weight_mat, tf.transpose(node_features, perm=[1, 0]))))
return tf.add_n(adj_scores)
def compute_weighted_dot(node_features, nb_dots=4):
"""Compute weighted dot product."""
adj_scores = []
in_dim = node_features.shape[-1]
for i in range(nb_dots):
weight_vec = tf.get_variable(
initializer=WEIGHT_INIT,
dtype=tf.float32,
name='w-dot-{}'.format(i),
shape=(1, in_dim))
weight_vec = tf.nn.softmax(weight_vec, axis=-1)
adj_scores.append(tf.matmul(tf.multiply(weight_vec, node_features),
tf.transpose(node_features, perm=[1, 0])))
return tf.add_n(adj_scores)
def compute_l2_sim_matrix(node_features):
"""Compute squared-L2 distance between each pair of nodes."""
# N x N
# d_scores = tf.matmul(node_features, tf.transpose(node_features,perm=[1, 0]))
# diag = tf.diag_part(d_scores)
# d_scores *= -2.
# d_scores += tf.reshape(diag, (-1, 1)) + tf.reshape(diag, (1, -1))
l2_norm = tf.reduce_sum(tf.square(node_features), 1)
na = tf.reshape(l2_norm, [-1, 1])
nb = tf.reshape(l2_norm, [1, -1])
# return pairwise euclidead difference matrix
l2_scores = tf.maximum(
na - 2*tf.matmul(node_features, node_features, False, True) + nb, 0.0)
return l2_scores
def compute_dot_sim_matrix(node_features):
"""Compute edge scores with dot product."""
sim = tf.matmul(node_features, tf.transpose(node_features, perm=[1, 0]))
return sim
def compute_dot_norm(features):
"""Compute edge scores with normalized dot product."""
features = tf.nn.l2_normalize(features, axis=-1)
sim = tf.matmul(features, tf.transpose(features, perm=[1, 0]))
return sim
def compute_asym_dot(node_features):
"""Compute edge scores with asymmetric dot product."""
feat_left, feat_right = tf.split(node_features, 2, axis=-1)
feat_left = tf.nn.l2_normalize(feat_left, axis=-1)
feat_right = tf.nn.l2_normalize(feat_right, axis=-1)
sim = tf.matmul(feat_left, tf.transpose(feat_right, perm=[1, 0]))
return sim
def compute_adj(features, att_mechanism, p_drop, is_training):
"""Compute adj matrix given node features."""
features = tf.layers.dropout(
inputs=features, rate=p_drop, training=is_training)
if att_mechanism == 'dot':
return compute_dot_sim_matrix(features)
elif att_mechanism == 'weighted-mat-dot':
return compute_weighted_mat_dot(features)
elif att_mechanism == 'weighted-dot':
return compute_weighted_dot(features)
elif att_mechanism == 'att':
return compute_adj_att(features)
elif att_mechanism == 'dot-norm':
return compute_dot_norm(features)
elif att_mechanism == 'asym-dot':
return compute_asym_dot(features)
else:
return compute_l2_sim_matrix(features)
def get_sp_topk(adj_pred, sp_adj_train, nb_nodes, k):
"""Returns binary matrix with topK."""
_, indices = tf.nn.top_k(tf.reshape(adj_pred, (-1,)), k)
indices = tf.reshape(tf.cast(indices, tf.int64), (-1, 1))
sp_adj_pred = tf.SparseTensor(
indices=indices,
values=tf.ones(k),
dense_shape=(nb_nodes * nb_nodes,))
sp_adj_pred = tf.sparse_reshape(sp_adj_pred,
shape=(nb_nodes, nb_nodes, 1))
sp_adj_train = tf.SparseTensor(
indices=sp_adj_train.indices,
values=tf.ones_like(sp_adj_train.values),
dense_shape=sp_adj_train.dense_shape)
sp_adj_train = tf.sparse_reshape(sp_adj_train,
shape=(nb_nodes, nb_nodes, 1))
sp_adj_pred = tf.sparse_concat(
sp_inputs=[sp_adj_pred, sp_adj_train], axis=-1)
return tf.sparse_reduce_max(sp_adj_pred, axis=-1)
@tf.custom_gradient
def mask_edges(scores, mask):
masked_scores = tf.multiply(scores, mask)
def grad(dy):
return dy, None # tf.multiply(scores, dy)
return masked_scores, grad
| [
"tensorflow.get_variable",
"tensorflow.transpose",
"tensorflow.split",
"tensorflow.multiply",
"tensorflow.nn.softmax",
"tensorflow.zeros_initializer",
"tensorflow.ones_like",
"tensorflow.cast",
"tensorflow.sparse_reduce_max",
"tensorflow.concat",
"tensorflow.layers.dropout",
"tensorflow.matmul",
"tensorflow.square",
"tensorflow.nn.l2_normalize",
"tensorflow.nn.leaky_relu",
"tensorflow.reduce_max",
"tensorflow.sparse_softmax",
"tensorflow.reshape",
"tensorflow.contrib.layers.dense_to_sparse",
"tensorflow.expand_dims",
"tensorflow.sparse_reshape",
"tensorflow.sparse_concat",
"tensorflow.nn.relu",
"tensorflow.ones",
"tensorflow.contrib.layers.xavier_initializer",
"tensorflow.add_n",
"tensorflow.sparse_tensor_dense_matmul",
"tensorflow.contrib.layers.bias_add"
] | [((755, 793), 'tensorflow.contrib.layers.xavier_initializer', 'tf.contrib.layers.xavier_initializer', ([], {}), '()\n', (791, 793), True, 'import tensorflow as tf\n'), ((806, 828), 'tensorflow.zeros_initializer', 'tf.zeros_initializer', ([], {}), '()\n', (826, 828), True, 'import tensorflow as tf\n'), ((1423, 1525), 'tensorflow.get_variable', 'tf.get_variable', ([], {'initializer': 'WEIGHT_INIT', 'dtype': 'tf.float32', 'name': '"""linear"""', 'shape': '(in_dim, out_dim)'}), "(initializer=WEIGHT_INIT, dtype=tf.float32, name='linear',\n shape=(in_dim, out_dim))\n", (1438, 1525), True, 'import tensorflow as tf\n'), ((2838, 2912), 'tensorflow.layers.dropout', 'tf.layers.dropout', ([], {'inputs': 'node_features', 'rate': 'p_drop', 'training': 'is_training'}), '(inputs=node_features, rate=p_drop, training=is_training)\n', (2855, 2912), True, 'import tensorflow as tf\n'), ((2938, 2994), 'tensorflow.sparse_tensor_dense_matmul', 'tf.sparse_tensor_dense_matmul', (['adj_matrix', 'node_features'], {}), '(adj_matrix, node_features)\n', (2967, 2994), True, 'import tensorflow as tf\n'), ((3861, 3935), 'tensorflow.layers.dropout', 'tf.layers.dropout', ([], {'inputs': 'node_features', 'rate': 'p_drop', 'training': 'is_training'}), '(inputs=node_features, rate=p_drop, training=is_training)\n', (3878, 3935), True, 'import tensorflow as tf\n'), ((3961, 3997), 'tensorflow.matmul', 'tf.matmul', (['adj_matrix', 'node_features'], {}), '(adj_matrix, node_features)\n', (3970, 3997), True, 'import tensorflow as tf\n'), ((4328, 4360), 'tensorflow.expand_dims', 'tf.expand_dims', (['node_features', '(0)'], {}), '(node_features, 0)\n', (4342, 4360), True, 'import tensorflow as tf\n'), ((4455, 4493), 'tensorflow.multiply', 'tf.multiply', (['node_features', 'adj_matrix'], {}), '(node_features, adj_matrix)\n', (4466, 4493), True, 'import tensorflow as tf\n'), ((4512, 4555), 'tensorflow.transpose', 'tf.transpose', (['node_features'], {'perm': '[0, 2, 1]'}), '(node_features, perm=[0, 2, 1])\n', (4524, 4555), True, 'import tensorflow as tf\n'), ((4574, 4611), 'tensorflow.reduce_max', 'tf.reduce_max', (['node_features'], {'axis': '(-1)'}), '(node_features, axis=-1)\n', (4587, 4611), True, 'import tensorflow as tf\n'), ((5728, 5752), 'tensorflow.sparse_softmax', 'tf.sparse_softmax', (['alpha'], {}), '(alpha)\n', (5745, 5752), True, 'import tensorflow as tf\n'), ((5824, 5898), 'tensorflow.layers.dropout', 'tf.layers.dropout', ([], {'inputs': 'node_features', 'rate': 'p_drop', 'training': 'is_training'}), '(inputs=node_features, rate=p_drop, training=is_training)\n', (5841, 5898), True, 'import tensorflow as tf\n'), ((5960, 6011), 'tensorflow.sparse_tensor_dense_matmul', 'tf.sparse_tensor_dense_matmul', (['alpha', 'node_features'], {}), '(alpha, node_features)\n', (5989, 6011), True, 'import tensorflow as tf\n'), ((6030, 6071), 'tensorflow.contrib.layers.bias_add', 'tf.contrib.layers.bias_add', (['node_features'], {}), '(node_features)\n', (6056, 6071), True, 'import tensorflow as tf\n'), ((9141, 9165), 'tensorflow.sparse_softmax', 'tf.sparse_softmax', (['alpha'], {}), '(alpha)\n', (9158, 9165), True, 'import tensorflow as tf\n'), ((9237, 9311), 'tensorflow.layers.dropout', 'tf.layers.dropout', ([], {'inputs': 'node_features', 'rate': 'p_drop', 'training': 'is_training'}), '(inputs=node_features, rate=p_drop, training=is_training)\n', (9254, 9311), True, 'import tensorflow as tf\n'), ((9373, 9424), 'tensorflow.sparse_tensor_dense_matmul', 'tf.sparse_tensor_dense_matmul', (['alpha', 'node_features'], {}), '(alpha, node_features)\n', (9402, 9424), True, 'import tensorflow as tf\n'), ((9443, 9484), 'tensorflow.contrib.layers.bias_add', 'tf.contrib.layers.bias_add', (['node_features'], {}), '(node_features)\n', (9469, 9484), True, 'import tensorflow as tf\n'), ((13774, 13877), 'tensorflow.get_variable', 'tf.get_variable', ([], {'initializer': 'WEIGHT_INIT', 'dtype': 'tf.float32', 'name': '"""selfatt-row"""', 'shape': '(out_dim, 1)'}), "(initializer=WEIGHT_INIT, dtype=tf.float32, name=\n 'selfatt-row', shape=(out_dim, 1))\n", (13789, 13877), True, 'import tensorflow as tf\n'), ((13908, 14011), 'tensorflow.get_variable', 'tf.get_variable', ([], {'initializer': 'WEIGHT_INIT', 'dtype': 'tf.float32', 'name': '"""selfatt-col"""', 'shape': '(out_dim, 1)'}), "(initializer=WEIGHT_INIT, dtype=tf.float32, name=\n 'selfatt-col', shape=(out_dim, 1))\n", (13923, 14011), True, 'import tensorflow as tf\n'), ((14046, 14077), 'tensorflow.matmul', 'tf.matmul', (['node_features', 'a_row'], {}), '(node_features, a_row)\n', (14055, 14077), True, 'import tensorflow as tf\n'), ((14092, 14123), 'tensorflow.matmul', 'tf.matmul', (['node_features', 'a_col'], {}), '(node_features, a_col)\n', (14101, 14123), True, 'import tensorflow as tf\n'), ((14490, 14593), 'tensorflow.get_variable', 'tf.get_variable', ([], {'initializer': 'WEIGHT_INIT', 'dtype': 'tf.float32', 'name': '"""selfatt-row"""', 'shape': '(out_dim, 1)'}), "(initializer=WEIGHT_INIT, dtype=tf.float32, name=\n 'selfatt-row', shape=(out_dim, 1))\n", (14505, 14593), True, 'import tensorflow as tf\n'), ((14624, 14727), 'tensorflow.get_variable', 'tf.get_variable', ([], {'initializer': 'WEIGHT_INIT', 'dtype': 'tf.float32', 'name': '"""selfatt-col"""', 'shape': '(out_dim, 1)'}), "(initializer=WEIGHT_INIT, dtype=tf.float32, name=\n 'selfatt-col', shape=(out_dim, 1))\n", (14639, 14727), True, 'import tensorflow as tf\n'), ((14762, 14793), 'tensorflow.matmul', 'tf.matmul', (['node_features', 'a_row'], {}), '(node_features, a_row)\n', (14771, 14793), True, 'import tensorflow as tf\n'), ((14808, 14839), 'tensorflow.matmul', 'tf.matmul', (['node_features', 'a_col'], {}), '(node_features, a_col)\n', (14817, 14839), True, 'import tensorflow as tf\n'), ((15527, 15547), 'tensorflow.add_n', 'tf.add_n', (['adj_scores'], {}), '(adj_scores)\n', (15535, 15547), True, 'import tensorflow as tf\n'), ((16083, 16103), 'tensorflow.add_n', 'tf.add_n', (['adj_scores'], {}), '(adj_scores)\n', (16091, 16103), True, 'import tensorflow as tf\n'), ((16489, 16517), 'tensorflow.reshape', 'tf.reshape', (['l2_norm', '[-1, 1]'], {}), '(l2_norm, [-1, 1])\n', (16499, 16517), True, 'import tensorflow as tf\n'), ((16525, 16553), 'tensorflow.reshape', 'tf.reshape', (['l2_norm', '[1, -1]'], {}), '(l2_norm, [1, -1])\n', (16535, 16553), True, 'import tensorflow as tf\n'), ((17007, 17044), 'tensorflow.nn.l2_normalize', 'tf.nn.l2_normalize', (['features'], {'axis': '(-1)'}), '(features, axis=-1)\n', (17025, 17044), True, 'import tensorflow as tf\n'), ((17245, 17280), 'tensorflow.split', 'tf.split', (['node_features', '(2)'], {'axis': '(-1)'}), '(node_features, 2, axis=-1)\n', (17253, 17280), True, 'import tensorflow as tf\n'), ((17295, 17333), 'tensorflow.nn.l2_normalize', 'tf.nn.l2_normalize', (['feat_left'], {'axis': '(-1)'}), '(feat_left, axis=-1)\n', (17313, 17333), True, 'import tensorflow as tf\n'), ((17349, 17388), 'tensorflow.nn.l2_normalize', 'tf.nn.l2_normalize', (['feat_right'], {'axis': '(-1)'}), '(feat_right, axis=-1)\n', (17367, 17388), True, 'import tensorflow as tf\n'), ((17596, 17665), 'tensorflow.layers.dropout', 'tf.layers.dropout', ([], {'inputs': 'features', 'rate': 'p_drop', 'training': 'is_training'}), '(inputs=features, rate=p_drop, training=is_training)\n', (17613, 17665), True, 'import tensorflow as tf\n'), ((18540, 18601), 'tensorflow.sparse_reshape', 'tf.sparse_reshape', (['sp_adj_pred'], {'shape': '(nb_nodes, nb_nodes, 1)'}), '(sp_adj_pred, shape=(nb_nodes, nb_nodes, 1))\n', (18557, 18601), True, 'import tensorflow as tf\n'), ((18815, 18877), 'tensorflow.sparse_reshape', 'tf.sparse_reshape', (['sp_adj_train'], {'shape': '(nb_nodes, nb_nodes, 1)'}), '(sp_adj_train, shape=(nb_nodes, nb_nodes, 1))\n', (18832, 18877), True, 'import tensorflow as tf\n'), ((18929, 18993), 'tensorflow.sparse_concat', 'tf.sparse_concat', ([], {'sp_inputs': '[sp_adj_pred, sp_adj_train]', 'axis': '(-1)'}), '(sp_inputs=[sp_adj_pred, sp_adj_train], axis=-1)\n', (18945, 18993), True, 'import tensorflow as tf\n'), ((19010, 19052), 'tensorflow.sparse_reduce_max', 'tf.sparse_reduce_max', (['sp_adj_pred'], {'axis': '(-1)'}), '(sp_adj_pred, axis=-1)\n', (19030, 19052), True, 'import tensorflow as tf\n'), ((19123, 19148), 'tensorflow.multiply', 'tf.multiply', (['scores', 'mask'], {}), '(scores, mask)\n', (19134, 19148), True, 'import tensorflow as tf\n'), ((1651, 1704), 'tensorflow.sparse_tensor_dense_matmul', 'tf.sparse_tensor_dense_matmul', (['node_features', 'w_dense'], {}), '(node_features, w_dense)\n', (1680, 1704), True, 'import tensorflow as tf\n'), ((1733, 1807), 'tensorflow.layers.dropout', 'tf.layers.dropout', ([], {'inputs': 'node_features', 'rate': 'p_drop', 'training': 'is_training'}), '(inputs=node_features, rate=p_drop, training=is_training)\n', (1750, 1807), True, 'import tensorflow as tf\n'), ((1837, 1870), 'tensorflow.matmul', 'tf.matmul', (['node_features', 'w_dense'], {}), '(node_features, w_dense)\n', (1846, 1870), True, 'import tensorflow as tf\n'), ((1906, 1947), 'tensorflow.contrib.layers.bias_add', 'tf.contrib.layers.bias_add', (['node_features'], {}), '(node_features)\n', (1932, 1947), True, 'import tensorflow as tf\n'), ((6972, 7046), 'tensorflow.layers.dropout', 'tf.layers.dropout', ([], {'inputs': 'node_features', 'rate': 'p_drop', 'training': 'is_training'}), '(inputs=node_features, rate=p_drop, training=is_training)\n', (6989, 7046), True, 'import tensorflow as tf\n'), ((7109, 7227), 'tensorflow.get_variable', 'tf.get_variable', ([], {'initializer': 'WEIGHT_INIT', 'dtype': 'tf.float32', 'name': '"""linear"""', 'shape': '(node_features.shape[1], out_dim)'}), "(initializer=WEIGHT_INIT, dtype=tf.float32, name='linear',\n shape=(node_features.shape[1], out_dim))\n", (7124, 7227), True, 'import tensorflow as tf\n'), ((7277, 7310), 'tensorflow.matmul', 'tf.matmul', (['node_features', 'w_dense'], {}), '(node_features, w_dense)\n', (7286, 7310), True, 'import tensorflow as tf\n'), ((7366, 7389), 'tensorflow.nn.leaky_relu', 'tf.nn.leaky_relu', (['alpha'], {}), '(alpha)\n', (7382, 7389), True, 'import tensorflow as tf\n'), ((7617, 7657), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['(alpha + bias_mat)'], {'axis': '(-1)'}), '(alpha + bias_mat, axis=-1)\n', (7630, 7657), True, 'import tensorflow as tf\n'), ((7714, 7780), 'tensorflow.layers.dropout', 'tf.layers.dropout', ([], {'inputs': 'alpha', 'rate': 'p_drop', 'training': 'is_training'}), '(inputs=alpha, rate=p_drop, training=is_training)\n', (7731, 7780), True, 'import tensorflow as tf\n'), ((7801, 7875), 'tensorflow.layers.dropout', 'tf.layers.dropout', ([], {'inputs': 'node_features', 'rate': 'p_drop', 'training': 'is_training'}), '(inputs=node_features, rate=p_drop, training=is_training)\n', (7818, 7875), True, 'import tensorflow as tf\n'), ((7943, 7974), 'tensorflow.matmul', 'tf.matmul', (['alpha', 'node_features'], {}), '(alpha, node_features)\n', (7952, 7974), True, 'import tensorflow as tf\n'), ((7995, 8036), 'tensorflow.contrib.layers.bias_add', 'tf.contrib.layers.bias_add', (['node_features'], {}), '(node_features)\n', (8021, 8036), True, 'import tensorflow as tf\n'), ((11514, 11529), 'tensorflow.add_n', 'tf.add_n', (['feats'], {}), '(feats)\n', (11522, 11529), True, 'import tensorflow as tf\n'), ((12531, 12554), 'tensorflow.concat', 'tf.concat', (['att'], {'axis': '(-1)'}), '(att, axis=-1)\n', (12540, 12554), True, 'import tensorflow as tf\n'), ((13460, 13483), 'tensorflow.concat', 'tf.concat', (['att'], {'axis': '(-1)'}), '(att, axis=-1)\n', (13469, 13483), True, 'import tensorflow as tf\n'), ((14927, 14963), 'tensorflow.transpose', 'tf.transpose', (['alpha_col'], {'perm': '[1, 0]'}), '(alpha_col, perm=[1, 0])\n', (14939, 14963), True, 'import tensorflow as tf\n'), ((15892, 15926), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['weight_vec'], {'axis': '(-1)'}), '(weight_vec, axis=-1)\n', (15905, 15926), True, 'import tensorflow as tf\n'), ((16453, 16477), 'tensorflow.square', 'tf.square', (['node_features'], {}), '(node_features)\n', (16462, 16477), True, 'import tensorflow as tf\n'), ((16848, 16888), 'tensorflow.transpose', 'tf.transpose', (['node_features'], {'perm': '[1, 0]'}), '(node_features, perm=[1, 0])\n', (16860, 16888), True, 'import tensorflow as tf\n'), ((17073, 17108), 'tensorflow.transpose', 'tf.transpose', (['features'], {'perm': '[1, 0]'}), '(features, perm=[1, 0])\n', (17085, 17108), True, 'import tensorflow as tf\n'), ((17418, 17455), 'tensorflow.transpose', 'tf.transpose', (['feat_right'], {'perm': '[1, 0]'}), '(feat_right, perm=[1, 0])\n', (17430, 17455), True, 'import tensorflow as tf\n'), ((18309, 18336), 'tensorflow.reshape', 'tf.reshape', (['adj_pred', '(-1,)'], {}), '(adj_pred, (-1,))\n', (18319, 18336), True, 'import tensorflow as tf\n'), ((18364, 18390), 'tensorflow.cast', 'tf.cast', (['indices', 'tf.int64'], {}), '(indices, tf.int64)\n', (18371, 18390), True, 'import tensorflow as tf\n'), ((1055, 1129), 'tensorflow.layers.dropout', 'tf.layers.dropout', ([], {'inputs': 'tensor.values', 'rate': 'p_drop', 'training': 'is_training'}), '(inputs=tensor.values, rate=p_drop, training=is_training)\n', (1072, 1129), True, 'import tensorflow as tf\n'), ((5649, 5679), 'tensorflow.nn.leaky_relu', 'tf.nn.leaky_relu', (['alpha.values'], {}), '(alpha.values)\n', (5665, 5679), True, 'import tensorflow as tf\n'), ((9062, 9092), 'tensorflow.nn.leaky_relu', 'tf.nn.leaky_relu', (['alpha.values'], {}), '(alpha.values)\n', (9078, 9092), True, 'import tensorflow as tf\n'), ((10820, 10845), 'tensorflow.nn.relu', 'tf.nn.relu', (['node_features'], {}), '(node_features)\n', (10830, 10845), True, 'import tensorflow as tf\n'), ((11578, 11603), 'tensorflow.nn.relu', 'tf.nn.relu', (['node_features'], {}), '(node_features)\n', (11588, 11603), True, 'import tensorflow as tf\n'), ((12484, 12497), 'tensorflow.add_n', 'tf.add_n', (['att'], {}), '(att)\n', (12492, 12497), True, 'import tensorflow as tf\n'), ((13413, 13426), 'tensorflow.add_n', 'tf.add_n', (['att'], {}), '(att)\n', (13421, 13426), True, 'import tensorflow as tf\n'), ((14280, 14316), 'tensorflow.transpose', 'tf.transpose', (['alpha_col'], {'perm': '[1, 0]'}), '(alpha_col, perm=[1, 0])\n', (14292, 14316), True, 'import tensorflow as tf\n'), ((18470, 18480), 'tensorflow.ones', 'tf.ones', (['k'], {}), '(k)\n', (18477, 18480), True, 'import tensorflow as tf\n'), ((18719, 18752), 'tensorflow.ones_like', 'tf.ones_like', (['sp_adj_train.values'], {}), '(sp_adj_train.values)\n', (18731, 18752), True, 'import tensorflow as tf\n'), ((10147, 10162), 'tensorflow.nn.relu', 'tf.nn.relu', (['h_i'], {}), '(h_i)\n', (10157, 10162), True, 'import tensorflow as tf\n'), ((11302, 11341), 'tensorflow.contrib.layers.dense_to_sparse', 'tf.contrib.layers.dense_to_sparse', (['poly'], {}), '(poly)\n', (11335, 11341), True, 'import tensorflow as tf\n'), ((12367, 12390), 'tensorflow.concat', 'tf.concat', (['att'], {'axis': '(-1)'}), '(att, axis=-1)\n', (12376, 12390), True, 'import tensorflow as tf\n'), ((13296, 13319), 'tensorflow.concat', 'tf.concat', (['att'], {'axis': '(-1)'}), '(att, axis=-1)\n', (13305, 13319), True, 'import tensorflow as tf\n'), ((15959, 15997), 'tensorflow.multiply', 'tf.multiply', (['weight_vec', 'node_features'], {}), '(weight_vec, node_features)\n', (15970, 15997), True, 'import tensorflow as tf\n'), ((16031, 16071), 'tensorflow.transpose', 'tf.transpose', (['node_features'], {'perm': '[1, 0]'}), '(node_features, perm=[1, 0])\n', (16043, 16071), True, 'import tensorflow as tf\n'), ((15474, 15514), 'tensorflow.transpose', 'tf.transpose', (['node_features'], {'perm': '[1, 0]'}), '(node_features, perm=[1, 0])\n', (15486, 15514), True, 'import tensorflow as tf\n'), ((16641, 16693), 'tensorflow.matmul', 'tf.matmul', (['node_features', 'node_features', '(False)', '(True)'], {}), '(node_features, node_features, False, True)\n', (16650, 16693), True, 'import tensorflow as tf\n')] |
#!/usr/bin/env python
import sys,os
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
import numpy as np
from logparser.IPLoM import CandiateClustering as IPLoM
from logparser.LogCluster import LogCluster_iterative
#from logparser.LogCluster import LogCluster_iterative
input_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))+'/logs/Sample_logs/' # The input directory of log file
output_dir = 'CandidateClusteringResult' # The output directory of parsing results
log_file = 'fin-transaction_log_anonimized.log'
log_format = '<Date> <Time> <Level> <Module> \[<StatusAndPayThread>\] - <Content>'
maxEventLen = 200 # The maximal token number of log messages (default: 200)
step2Support = 6 # The minimal support for creating a new partition (default: 0)
CT = 0.35 # The cluster goodness threshold (default: 0.35)
lowerBound = 0.25 # The lower bound distance (default: 0.25)
upperBound = 1.0 # The upper bound distance (default: 0.9)
regex = [] # Regular expression list for optional preprocessing (default: [])
parser = IPLoM.LogParser(log_format=log_format, indir=input_dir, outdir=output_dir,
maxEventLen=maxEventLen, step2Support=step2Support, CT=CT,
lowerBound=lowerBound, upperBound=upperBound, rex=regex)
parser.parse(log_file)
# Applying iterative logCluster on IPLoM results
# Hyperparameter tuning for cisco_router.log :
# numIterations = 27 # Hyperparameter that needs to be tuned.
# rsupports = [23.9, 23, 9, 9, 9, 8, 8, 8, 7, 26.7, 7, 7, 8, 7, 10.1, 7, 5, 9, 5,7,5, 8.6, 5, 8, 5, 5]
# initial_support = 20
# file_name = "cisco_router.log"
# Hyperparameter tuning for 'fin-transaction_log_anonimized.log'
numIterations = 5
rsupports = [1.5, 1.2, 1, 0.0001, 2.95, 2, 2, 2, 1.47, 3.18, 2.3]
file_name = 'output.log'
initial_support = 2
for iteration in range(1,numIterations+1):
if iteration==1:
input_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))+'/CandidateClusteringResult' # The input directory of log file
output_dir = 'CandidateClusteringResult' # The output directory of parsing results
log_file = file_name#'cisco_router.log'
log_format = '<Content>' # cisco_router log format
# log_format = '<Date> <Time> <Level> <Module> \[<StatusAndPayThread>\] - <Content>'
rsupport = initial_support # The minimum threshold of relative support, 10 denotes 10%
regex = []
else :
input_dir = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # The input directory of log file
output_dir = 'CandidateClusteringResult' # The output directory of parsing results
log_file = 'logcluster_input.log'
log_format = '<Content>'
rsupport = rsupports[iteration-2] # The minimum threshold of relative support, 10 denotes 10%
regex = []
print("Iteration : ",iteration)
parser = LogCluster_iterative.LogParser(input_dir, log_format, output_dir, rsupport=rsupport, iteration=iteration, file_name = file_name, initial_support= initial_support)
parser.parse(log_file)
| [
"os.path.abspath",
"logparser.IPLoM.CandiateClustering.LogParser",
"logparser.LogCluster.LogCluster_iterative.LogParser"
] | [((1134, 1333), 'logparser.IPLoM.CandiateClustering.LogParser', 'IPLoM.LogParser', ([], {'log_format': 'log_format', 'indir': 'input_dir', 'outdir': 'output_dir', 'maxEventLen': 'maxEventLen', 'step2Support': 'step2Support', 'CT': 'CT', 'lowerBound': 'lowerBound', 'upperBound': 'upperBound', 'rex': 'regex'}), '(log_format=log_format, indir=input_dir, outdir=output_dir,\n maxEventLen=maxEventLen, step2Support=step2Support, CT=CT, lowerBound=\n lowerBound, upperBound=upperBound, rex=regex)\n', (1149, 1333), True, 'from logparser.IPLoM import CandiateClustering as IPLoM\n'), ((3043, 3212), 'logparser.LogCluster.LogCluster_iterative.LogParser', 'LogCluster_iterative.LogParser', (['input_dir', 'log_format', 'output_dir'], {'rsupport': 'rsupport', 'iteration': 'iteration', 'file_name': 'file_name', 'initial_support': 'initial_support'}), '(input_dir, log_format, output_dir, rsupport=\n rsupport, iteration=iteration, file_name=file_name, initial_support=\n initial_support)\n', (3073, 3212), False, 'from logparser.LogCluster import LogCluster_iterative\n'), ((101, 126), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (116, 126), False, 'import sys, os\n'), ((378, 403), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (393, 403), False, 'import sys, os\n'), ((2632, 2657), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (2647, 2657), False, 'import sys, os\n'), ((2045, 2070), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (2060, 2070), False, 'import sys, os\n')] |
from linear_algebra import Vector
from rotation import Rotation
import math
_DEFAULT_FOCUS = (0,0,-100)
_DEFAULT_LOCATION = (0,0,-10)
_DEFAULT_FOV = 70
class Camera:
'''
Will convert a point in 3D space to a point on a 2D plane.
Will basically:
Subtract target vector by location. Location also represents where the focus point screen is.
Rotate target vector by rotation (inverse)
Find where on x,y plane the line between the target vector and focus (not the same as location) land.
Screen size & FOV will only be considered if screen size is set. If so, will convert the 2D coordinate
to being on a traditional XY plane and also do FOV things. Very important for looking good.
'''
IN_FRONT = 0 #In front of screen
BETWEEN = 1 #Between screen and focus
BEHIND = 2 #Behind focus
def __init__(self, location: Vector = Vector(*_DEFAULT_LOCATION), focus: Vector = Vector(*_DEFAULT_FOCUS),
rotation: Rotation = Rotation(0,0,0),
screen_size: (int, int) = None, fov = _DEFAULT_FOV):
assert location.dimension() == 3
assert focus.dimension() == 3
self._loc = location
self._focus = focus
self._rot = rotation
self._screen = screen_size
self._fov = fov
def __call__(self, v: Vector) -> '2D Vector':
'''
The brains of the operation B)
'''
assert v.dimension() == 3
trans_v = v - self._loc #Move it relative to the screen's location
trans_v = self._rot/trans_v #Rotate it (inverse)
trans_v = trans_v + self._focus
if trans_v[2] <= self._focus[2]: return Vector(0,0), self.BEHIND, trans_v[2]
intersection_finder = trans_v - self._focus #Vector from focus to translated vector
#Now, intersection is when z=0, and z=0 at -self._focus[z]/intersection_finder[z]
fraction = (-self._focus[2])/intersection_finder[2]
returning = Vector(fraction*intersection_finder[0], fraction*intersection_finder[1])
#FOV Stuffs
if self._screen != None:
'''
This will flip the image upside down and move the coordinate so its centered around the vertex,
default being the middle of the window. (Like an x,y plane).
The fov will basically place a fovxfov size box and only include whats in that in the final result
as it will scale everything else out (IT FINALLY WORKS FOV WAS THE TRICK YES!!!!)
'''
fov_mult = min(self._screen[0], self._screen[1])/self._fov
#Will make origin (0,0) middle of screen
returning = Vector((returning[0]*fov_mult)+(self._screen[0]/2), (self._screen[1]/2)-(returning[1]*fov_mult))
if trans_v[2] <= 0: return returning, self.BETWEEN, trans_v[2]
return returning, self.IN_FRONT, trans_v[2]
#Where the v vector is relative to focus and screen
def move_focus(self, v: Vector):
'''
This will just make the camera look funny - mainly for experimental purposes.
'''
self._focus += v
def move(self, v: Vector):
'''
This will move where the focus point is in 3D space. (relative to the rotation).
'''
v = self._rot*v
self._loc += v
def rotate(self, r: Rotation):
'''
Rotates the camera, relative to where its currently looking. Will not let x rotation go over 90deg or under 90deg
'''
self._rot += r
self._rot[0] = max(-math.pi/2, min(math.pi/2, self._rot[0]))
def focus_to(self, v: Vector) -> Vector:
'''
Returns the vector that represents the location -> v
'''
return v-self._loc
def resize(self, screen_size: (int, int)):
'''
For when you change the screen size.
'''
self._screen = screen_size
if __name__ == '__main__':
c = Camera()
print(c(Vector(1,1,0))) #Expect (1,1) - assert not working well...
print(c(Vector(1,1,100))) #(.5,.5)
print(c(Vector(2,1,100))) #(1,.5)
| [
"rotation.Rotation",
"linear_algebra.Vector"
] | [((884, 910), 'linear_algebra.Vector', 'Vector', (['*_DEFAULT_LOCATION'], {}), '(*_DEFAULT_LOCATION)\n', (890, 910), False, 'from linear_algebra import Vector\n'), ((928, 951), 'linear_algebra.Vector', 'Vector', (['*_DEFAULT_FOCUS'], {}), '(*_DEFAULT_FOCUS)\n', (934, 951), False, 'from linear_algebra import Vector\n'), ((998, 1015), 'rotation.Rotation', 'Rotation', (['(0)', '(0)', '(0)'], {}), '(0, 0, 0)\n', (1006, 1015), False, 'from rotation import Rotation\n'), ((1986, 2062), 'linear_algebra.Vector', 'Vector', (['(fraction * intersection_finder[0])', '(fraction * intersection_finder[1])'], {}), '(fraction * intersection_finder[0], fraction * intersection_finder[1])\n', (1992, 2062), False, 'from linear_algebra import Vector\n'), ((2680, 2784), 'linear_algebra.Vector', 'Vector', (['(returning[0] * fov_mult + self._screen[0] / 2)', '(self._screen[1] / 2 - returning[1] * fov_mult)'], {}), '(returning[0] * fov_mult + self._screen[0] / 2, self._screen[1] / 2 -\n returning[1] * fov_mult)\n', (2686, 2784), False, 'from linear_algebra import Vector\n'), ((3970, 3985), 'linear_algebra.Vector', 'Vector', (['(1)', '(1)', '(0)'], {}), '(1, 1, 0)\n', (3976, 3985), False, 'from linear_algebra import Vector\n'), ((4042, 4059), 'linear_algebra.Vector', 'Vector', (['(1)', '(1)', '(100)'], {}), '(1, 1, 100)\n', (4048, 4059), False, 'from linear_algebra import Vector\n'), ((4081, 4098), 'linear_algebra.Vector', 'Vector', (['(2)', '(1)', '(100)'], {}), '(2, 1, 100)\n', (4087, 4098), False, 'from linear_algebra import Vector\n'), ((1686, 1698), 'linear_algebra.Vector', 'Vector', (['(0)', '(0)'], {}), '(0, 0)\n', (1692, 1698), False, 'from linear_algebra import Vector\n')] |
# Let's see how to download data from Yahoo Finance with Python as .csv
import pandas_datareader.data as web #to collect data
import datetime as dt #to specify start and end dates
# Start date data yyy, mm, dd
start = dt.datetime(2020, 1, 1)
# End date data yyy, mm, dd
end = dt.datetime(2020, 9, 7)
tickers = ['GOOG', 'AMZN', 'AAPL']
cc = 'BTC-USD' # We can replace 'tickers' in the following
# lines of code for 'cc' and the data that
# the code will download will be the cc
# (cryptocurrency) data
for ticker in tickers:
data = web.DataReader(ticker, 'yahoo', start, end)
data.to_csv('{}.csv'.format(ticker)) # the {} will be replaced
# with whatever the ticker
# is | [
"datetime.datetime",
"pandas_datareader.data.DataReader"
] | [((220, 243), 'datetime.datetime', 'dt.datetime', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (231, 243), True, 'import datetime as dt\n'), ((279, 302), 'datetime.datetime', 'dt.datetime', (['(2020)', '(9)', '(7)'], {}), '(2020, 9, 7)\n', (290, 302), True, 'import datetime as dt\n'), ((587, 630), 'pandas_datareader.data.DataReader', 'web.DataReader', (['ticker', '"""yahoo"""', 'start', 'end'], {}), "(ticker, 'yahoo', start, end)\n", (601, 630), True, 'import pandas_datareader.data as web\n')] |
import os
from typing import List
import matplotlib.pyplot as plt
import numpy as np
from tensorflow.python.keras import models
from tensorflow.python.keras.preprocessing import image
from networks.utility_functions.user_check import check_existing_folder
class OutputsVisualizer:
def __init__(self, img_path: str, model: models.Sequential, show_input: bool = False):
self.__img = self.__load_image(img_path, show_input)
self.__model = model
@staticmethod
def __load_image(img_path, show) -> np.array:
# Load the image
img = image.load_img(img_path, target_size=(150, 150), color_mode='grayscale')
# Convert the image to an array and preprocess it
img_tensor = image.img_to_array(img)
img_tensor = np.expand_dims(img_tensor, axis=0)
img_tensor /= 255.
if show:
plt.imshow(np.squeeze(img_tensor[0]))
plt.show()
return img_tensor
@staticmethod
def __display_grid(layer_names: List, activations: List, images_per_row: int, save: bool):
# Display the feature maps
for layer_name, layer_activation in zip(layer_names, activations):
# Number of features in the feature map
n_features = layer_activation.shape[-1]
# The feature map has shape (1, size, size, n_features)
size = layer_activation.shape[1]
# Set the number of columns in the grid
n_cols = n_features // images_per_row
# Tile the activation channels in a matrix
display_grid = np.zeros((size * n_cols, images_per_row * size))
# Tile each filter into an horizontal grid
for col in range(n_cols):
for row in range(images_per_row):
channel_image = layer_activation[0, :, :, col * images_per_row + row]
channel_image -= channel_image.mean() # Post-processes the feature to make it visually palatable
channel_image /= channel_image.std()
channel_image *= 64
channel_image += 128
channel_image = np.clip(channel_image, 0, 255).astype('uint8')
display_grid[col * size: (col + 1) * size, row * size: (row + 1) * size] = channel_image
scale = 1. / size
plt.figure(figsize=(scale * display_grid.shape[1], scale * display_grid.shape[0]))
plt.title(layer_name)
plt.grid(False)
plt.imshow(display_grid, aspect='auto', cmap='viridis')
if save:
plt.savefig(os.path.join('networks', 'specialization_tests', 'outputs_visualization',
layer_name + '.png'))
else:
plt.show()
def plot_intermediate_outputs(self, num_layers: int = 12, images_per_row: int = 12, save: bool = False):
print('\nVisualizing the intermediate outputs of the first {} layers...'.format(num_layers))
if (save and check_existing_folder('outputs_visualization')) or not save:
# Collect the name of the layers for the plot
layer_names = [layer.name for layer in self.__model.layers[10:num_layers]]
# Extract the outputs of the layers
layer_outputs = [layer.output for layer in self.__model.layers[:num_layers]]
# Create a model that will return the given outputs on the base of the model input
activation_model = models.Model(inputs=self.__model.input, outputs=layer_outputs)
# Perform a prediction on the test image using the new model
activations = activation_model.predict(self.__img)
# Display the activations in a grid
self.__display_grid(layer_names, activations, images_per_row, save)
| [
"matplotlib.pyplot.imshow",
"numpy.clip",
"tensorflow.python.keras.preprocessing.image.img_to_array",
"matplotlib.pyplot.grid",
"tensorflow.python.keras.models.Model",
"tensorflow.python.keras.preprocessing.image.load_img",
"networks.utility_functions.user_check.check_existing_folder",
"os.path.join",
"numpy.squeeze",
"numpy.zeros",
"matplotlib.pyplot.figure",
"numpy.expand_dims",
"matplotlib.pyplot.title",
"matplotlib.pyplot.show"
] | [((575, 647), 'tensorflow.python.keras.preprocessing.image.load_img', 'image.load_img', (['img_path'], {'target_size': '(150, 150)', 'color_mode': '"""grayscale"""'}), "(img_path, target_size=(150, 150), color_mode='grayscale')\n", (589, 647), False, 'from tensorflow.python.keras.preprocessing import image\n'), ((728, 751), 'tensorflow.python.keras.preprocessing.image.img_to_array', 'image.img_to_array', (['img'], {}), '(img)\n', (746, 751), False, 'from tensorflow.python.keras.preprocessing import image\n'), ((773, 807), 'numpy.expand_dims', 'np.expand_dims', (['img_tensor'], {'axis': '(0)'}), '(img_tensor, axis=0)\n', (787, 807), True, 'import numpy as np\n'), ((915, 925), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (923, 925), True, 'import matplotlib.pyplot as plt\n'), ((1581, 1629), 'numpy.zeros', 'np.zeros', (['(size * n_cols, images_per_row * size)'], {}), '((size * n_cols, images_per_row * size))\n', (1589, 1629), True, 'import numpy as np\n'), ((2356, 2443), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(scale * display_grid.shape[1], scale * display_grid.shape[0])'}), '(figsize=(scale * display_grid.shape[1], scale * display_grid.\n shape[0]))\n', (2366, 2443), True, 'import matplotlib.pyplot as plt\n'), ((2451, 2472), 'matplotlib.pyplot.title', 'plt.title', (['layer_name'], {}), '(layer_name)\n', (2460, 2472), True, 'import matplotlib.pyplot as plt\n'), ((2485, 2500), 'matplotlib.pyplot.grid', 'plt.grid', (['(False)'], {}), '(False)\n', (2493, 2500), True, 'import matplotlib.pyplot as plt\n'), ((2513, 2568), 'matplotlib.pyplot.imshow', 'plt.imshow', (['display_grid'], {'aspect': '"""auto"""', 'cmap': '"""viridis"""'}), "(display_grid, aspect='auto', cmap='viridis')\n", (2523, 2568), True, 'import matplotlib.pyplot as plt\n'), ((3506, 3568), 'tensorflow.python.keras.models.Model', 'models.Model', ([], {'inputs': 'self.__model.input', 'outputs': 'layer_outputs'}), '(inputs=self.__model.input, outputs=layer_outputs)\n', (3518, 3568), False, 'from tensorflow.python.keras import models\n'), ((876, 901), 'numpy.squeeze', 'np.squeeze', (['img_tensor[0]'], {}), '(img_tensor[0])\n', (886, 901), True, 'import numpy as np\n'), ((2790, 2800), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2798, 2800), True, 'import matplotlib.pyplot as plt\n'), ((3035, 3081), 'networks.utility_functions.user_check.check_existing_folder', 'check_existing_folder', (['"""outputs_visualization"""'], {}), "('outputs_visualization')\n", (3056, 3081), False, 'from networks.utility_functions.user_check import check_existing_folder\n'), ((2619, 2718), 'os.path.join', 'os.path.join', (['"""networks"""', '"""specialization_tests"""', '"""outputs_visualization"""', "(layer_name + '.png')"], {}), "('networks', 'specialization_tests', 'outputs_visualization', \n layer_name + '.png')\n", (2631, 2718), False, 'import os\n'), ((2156, 2186), 'numpy.clip', 'np.clip', (['channel_image', '(0)', '(255)'], {}), '(channel_image, 0, 255)\n', (2163, 2186), True, 'import numpy as np\n')] |
from django.contrib import admin
from .forms import IconForm
from .models import Icon
class IconAdmin(admin.ModelAdmin):
'''example'''
form = IconForm
list_display = ['name']
admin.site.register(Icon, IconAdmin)
| [
"django.contrib.admin.site.register"
] | [((192, 228), 'django.contrib.admin.site.register', 'admin.site.register', (['Icon', 'IconAdmin'], {}), '(Icon, IconAdmin)\n', (211, 228), False, 'from django.contrib import admin\n')] |
from Thermal import Thermals
from BoxPlotter import BoxPlotter
from LatexPrinter import LatexPrinter
from CsvParser import CsvParser
from EDPPlotter import EDPPrinter
from typing import *
import argparse
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-f', '--file', required=True, help='The results file to process')
subparsers = parser.add_subparsers()
latex = subparsers.add_parser(name='latex')
latex.set_defaults(which='latex')
latex.add_argument('-o', '--output', type=str, help='Selects where the latex output should be saved')
edp = subparsers.add_parser(name='edp')
edp.set_defaults(which='edp')
edp.add_argument('-w', '--weight', default=2, type=int, help='Sets the weight used to calculate the edp (1, 2 or 3)')
edp.add_argument('-o', '--output', type=str, help='Selects where the edp output should be saved')
plot = subparsers.add_parser(name='plot')
plot.set_defaults(which='plot')
plot.add_argument('-b', '--benchmark', nargs='*', help='Used to select which benchmark(s) to include in the plot comparison, leaving it empty means all benchmarks will be compared')
plot.add_argument('-l', '--language', nargs='*', help='Used to select which language(s) to use when creating comparisons, leaving it empty means showing all languages')
plot.add_argument('-m', '--metric', nargs='*', help='Used to select which metrics to include, leaving it empty means all metrics will be compared')
plot.add_argument('-p', '--paradigm', nargs='*', help='Used to select which paradigms to exclude, leaving it empty means no paradigms will be excluded')
thermal = subparsers.add_parser(name='thermal')
thermal.set_defaults(which='thermal')
thermal.add_argument('-t', '--type', choices=['plot', 'correlate'])
thermal.add_argument('-b', '--benchmark', nargs='*', help='Used to select which benchmark(s) to include in the plot comparison, leaving it empty means all benchmarks will be compared')
thermal.add_argument('-l', '--language', nargs='*', help='Used to select which language(s) to use when creating comparisons, leaving it empty means showing all languages')
thermal.add_argument('-m', '--metric', nargs='*', help='Used to select which metrics to include, leaving it empty means all metrics will be compared')
thermal.add_argument('-p', '--paradigm', nargs='*', help='Used to select which paradigms to exclude, leaving it empty means no paradigms will be excluded')
args = parser.parse_args()
# Read and parse CSV file
results = CsvParser(sep=';').parse(args.file)
# Print result file as latex table
if args.which == 'latex':
LatexPrinter(args.output).output(results)
elif args.which == 'plot':
BoxPlotter(results).plot(args)
elif args.which == 'thermal':
if args.type == 'plot':
Thermals(results).plot(args)
elif args.type == 'correlate':
Thermals(results).correlate(args)
elif args.which == 'edp':
EDPPrinter(args.weight,args.output).output(results)
| [
"CsvParser.CsvParser",
"argparse.ArgumentParser",
"Thermal.Thermals",
"BoxPlotter.BoxPlotter",
"LatexPrinter.LatexPrinter",
"EDPPlotter.EDPPrinter"
] | [((245, 270), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (268, 270), False, 'import argparse\n'), ((2574, 2592), 'CsvParser.CsvParser', 'CsvParser', ([], {'sep': '""";"""'}), "(sep=';')\n", (2583, 2592), False, 'from CsvParser import CsvParser\n'), ((2688, 2713), 'LatexPrinter.LatexPrinter', 'LatexPrinter', (['args.output'], {}), '(args.output)\n', (2700, 2713), False, 'from LatexPrinter import LatexPrinter\n'), ((2769, 2788), 'BoxPlotter.BoxPlotter', 'BoxPlotter', (['results'], {}), '(results)\n', (2779, 2788), False, 'from BoxPlotter import BoxPlotter\n'), ((2878, 2895), 'Thermal.Thermals', 'Thermals', (['results'], {}), '(results)\n', (2886, 2895), False, 'from Thermal import Thermals\n'), ((3030, 3066), 'EDPPlotter.EDPPrinter', 'EDPPrinter', (['args.weight', 'args.output'], {}), '(args.weight, args.output)\n', (3040, 3066), False, 'from EDPPlotter import EDPPrinter\n'), ((2958, 2975), 'Thermal.Thermals', 'Thermals', (['results'], {}), '(results)\n', (2966, 2975), False, 'from Thermal import Thermals\n')] |
from django.db import models
from django.contrib.auth.models import User
import cloudinary
from cloudinary.models import CloudinaryField
# Create your models here.
class Hood(models.Model):
name = models.CharField(max_length =30,null=True)
location = models.CharField(max_length =30,null=True)
image = CloudinaryField("media")
occupants = models.IntegerField(null=True)
user = models.ForeignKey(User,on_delete=models.CASCADE)
objects = models.Manager()
# Admin Foreign key
def __str__(self):
return self.name
def save_hood(self):
self.save()
def delete_hood(self):
self.delete()
@classmethod
def delete_hood_by_id(cls, id):
hood = cls.objects.filter(pk=id)
hood.delete()
@classmethod
def get_hood_by_id(cls, id):
hood = cls.objects.get(pk=id)
return hood
@classmethod
def filter_by_location(cls, location):
hood = cls.objects.filter(location=location)
return hood
@classmethod
def search_hood(cls, search_term):
hood= cls.objects.filter(neighbourhood_name__icontains=search_term)
return hood
@classmethod
def update_hood(cls, id):
hood= cls.objects.filter(id=id).update(id=id)
return hood
# User class
class Profile(models.Model):
pro_photo = CloudinaryField("media")
name = models.CharField(max_length =30,null=True)
location = models.CharField(max_length =30,null=True)
email = models.EmailField(max_length =50,null=True)
neighbourhood = models.ForeignKey(Hood, on_delete=models.CASCADE)
bio = models.CharField(max_length =150,default='WELCOME TO HOODAPP')
user = models.OneToOneField(User,on_delete=models.CASCADE,related_name='profile',null=True)
def __str__(self):
return self.name
def save_profile(self):
self.save()
def delete_profile(self):
self.delete()
class Business(models.Model):
name = models.CharField(max_length =30,null=True)
description = models.CharField(max_length =130,null=True)
email = models.EmailField(max_length =50,null=True)
user = models.ForeignKey(User, on_delete=models.CASCADE)
neighbourhood = models.ForeignKey(Hood, on_delete=models.CASCADE)
objects = models.Manager()
def __str__(self):
return self.name
def save_biz(self):
self.save()
def delete_biz(self):
self.delete()
@classmethod
def delete_business_by_id(cls, id):
businesse = cls.objects.filter(pk=id)
businesse.delete()
@classmethod
def get_business_by_id(cls, id):
business = cls.objects.get(pk=id)
return business
@classmethod
def filter_by_location(cls, location):
business = cls.objects.filter(location=location)
return business
@classmethod
def update_business(cls, id):
business = cls.objects.filter(id=id).update(id=id)
return business
@classmethod
def update_business(cls, id):
business = cls.objects.filter(id=id).update(id=id)
return business
class Post(models.Model):
post = models.CharField(max_length =130,null=True)
user = models.ForeignKey(User, on_delete=models.CASCADE,null=True)
neighbourhood = models.ForeignKey(Hood,on_delete=models.CASCADE,related_name='post',null=True)
class Meta:
ordering = ['id']
objects = models.Manager()
def __str__(self):
return self.post
def save_post(self):
self.save()
def delete_post(self):
self.delete()
| [
"django.db.models.EmailField",
"django.db.models.OneToOneField",
"django.db.models.Manager",
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"cloudinary.models.CloudinaryField",
"django.db.models.CharField"
] | [((203, 245), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'null': '(True)'}), '(max_length=30, null=True)\n', (219, 245), False, 'from django.db import models\n'), ((261, 303), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'null': '(True)'}), '(max_length=30, null=True)\n', (277, 303), False, 'from django.db import models\n'), ((316, 340), 'cloudinary.models.CloudinaryField', 'CloudinaryField', (['"""media"""'], {}), "('media')\n", (331, 340), False, 'from cloudinary.models import CloudinaryField\n'), ((357, 387), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'null': '(True)'}), '(null=True)\n', (376, 387), False, 'from django.db import models\n'), ((399, 448), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (416, 448), False, 'from django.db import models\n'), ((462, 478), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (476, 478), False, 'from django.db import models\n'), ((1349, 1373), 'cloudinary.models.CloudinaryField', 'CloudinaryField', (['"""media"""'], {}), "('media')\n", (1364, 1373), False, 'from cloudinary.models import CloudinaryField\n'), ((1385, 1427), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'null': '(True)'}), '(max_length=30, null=True)\n', (1401, 1427), False, 'from django.db import models\n'), ((1443, 1485), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'null': '(True)'}), '(max_length=30, null=True)\n', (1459, 1485), False, 'from django.db import models\n'), ((1498, 1541), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(50)', 'null': '(True)'}), '(max_length=50, null=True)\n', (1515, 1541), False, 'from django.db import models\n'), ((1562, 1611), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Hood'], {'on_delete': 'models.CASCADE'}), '(Hood, on_delete=models.CASCADE)\n', (1579, 1611), False, 'from django.db import models\n'), ((1622, 1685), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'default': '"""WELCOME TO HOODAPP"""'}), "(max_length=150, default='WELCOME TO HOODAPP')\n", (1638, 1685), False, 'from django.db import models\n'), ((1697, 1788), 'django.db.models.OneToOneField', 'models.OneToOneField', (['User'], {'on_delete': 'models.CASCADE', 'related_name': '"""profile"""', 'null': '(True)'}), "(User, on_delete=models.CASCADE, related_name='profile',\n null=True)\n", (1717, 1788), False, 'from django.db import models\n'), ((1989, 2031), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(30)', 'null': '(True)'}), '(max_length=30, null=True)\n', (2005, 2031), False, 'from django.db import models\n'), ((2050, 2093), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(130)', 'null': '(True)'}), '(max_length=130, null=True)\n', (2066, 2093), False, 'from django.db import models\n'), ((2106, 2149), 'django.db.models.EmailField', 'models.EmailField', ([], {'max_length': '(50)', 'null': '(True)'}), '(max_length=50, null=True)\n', (2123, 2149), False, 'from django.db import models\n'), ((2161, 2210), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE'}), '(User, on_delete=models.CASCADE)\n', (2178, 2210), False, 'from django.db import models\n'), ((2231, 2280), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Hood'], {'on_delete': 'models.CASCADE'}), '(Hood, on_delete=models.CASCADE)\n', (2248, 2280), False, 'from django.db import models\n'), ((2297, 2313), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (2311, 2313), False, 'from django.db import models\n'), ((3163, 3206), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(130)', 'null': '(True)'}), '(max_length=130, null=True)\n', (3179, 3206), False, 'from django.db import models\n'), ((3218, 3278), 'django.db.models.ForeignKey', 'models.ForeignKey', (['User'], {'on_delete': 'models.CASCADE', 'null': '(True)'}), '(User, on_delete=models.CASCADE, null=True)\n', (3235, 3278), False, 'from django.db import models\n'), ((3298, 3384), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Hood'], {'on_delete': 'models.CASCADE', 'related_name': '"""post"""', 'null': '(True)'}), "(Hood, on_delete=models.CASCADE, related_name='post', null\n =True)\n", (3315, 3384), False, 'from django.db import models\n'), ((3434, 3450), 'django.db.models.Manager', 'models.Manager', ([], {}), '()\n', (3448, 3450), False, 'from django.db import models\n')] |
"""Underwater Vehicle Implementation."""
import numpy as np
from rllib.environment.systems.ode_system import ODESystem
from rllib.util.utilities import get_backend
class UnderwaterVehicle(ODESystem):
"""Underwater Vehicle.
Parameters
----------
step_size : float, optional
References
----------
<NAME>., & <NAME>. (2011).
Reinforcement learning in feedback control. Machine learning.
"""
def __init__(self, step_size=0.01):
super().__init__(
func=self._ode, step_size=step_size, dim_action=(1,), dim_state=(1,)
)
def thrust(self, velocity, thrust):
"""Get the thrust coefficient."""
bk = get_backend(velocity)
return (
-0.5 * bk.tanh(0.1 * (bk.abs(self.drag_force(velocity) - thrust) - 30.0))
+ 0.5
)
def drag_force(self, velocity):
"""Get drag force."""
bk = get_backend(velocity)
c = 1.2 + 0.2 * bk.sin(bk.abs(velocity))
return c * velocity * bk.abs(velocity)
def _ode(self, _, state, action):
"""Compute the state time-derivative.
Parameters
----------
state: ndarray or Tensor
States.
action: ndarray or Tensor
Actions.
Returns
-------
state_derivative: Tensor
The state derivative according to the dynamics.
"""
# Physical dynamics
velocity = state
u = action
bk = get_backend(velocity)
m = 3.0 + 1.5 * bk.sin(bk.abs(velocity)) # mass
k = self.thrust(velocity, u) # thrust coefficient.
v_dot = (k * u - self.drag_force(velocity)) / m
return v_dot
if __name__ == "__main__":
sys = UnderwaterVehicle()
f = sys.func(None, np.ones(sys.dim_state), np.ones(sys.dim_action))
print(f)
sys.linearize()
sys.linearize(np.ones(sys.dim_state), np.ones(sys.dim_action))
| [
"rllib.util.utilities.get_backend",
"numpy.ones"
] | [((685, 706), 'rllib.util.utilities.get_backend', 'get_backend', (['velocity'], {}), '(velocity)\n', (696, 706), False, 'from rllib.util.utilities import get_backend\n'), ((918, 939), 'rllib.util.utilities.get_backend', 'get_backend', (['velocity'], {}), '(velocity)\n', (929, 939), False, 'from rllib.util.utilities import get_backend\n'), ((1493, 1514), 'rllib.util.utilities.get_backend', 'get_backend', (['velocity'], {}), '(velocity)\n', (1504, 1514), False, 'from rllib.util.utilities import get_backend\n'), ((1792, 1814), 'numpy.ones', 'np.ones', (['sys.dim_state'], {}), '(sys.dim_state)\n', (1799, 1814), True, 'import numpy as np\n'), ((1816, 1839), 'numpy.ones', 'np.ones', (['sys.dim_action'], {}), '(sys.dim_action)\n', (1823, 1839), True, 'import numpy as np\n'), ((1892, 1914), 'numpy.ones', 'np.ones', (['sys.dim_state'], {}), '(sys.dim_state)\n', (1899, 1914), True, 'import numpy as np\n'), ((1916, 1939), 'numpy.ones', 'np.ones', (['sys.dim_action'], {}), '(sys.dim_action)\n', (1923, 1939), True, 'import numpy as np\n')] |
#! /usr/bin/env python3
# **************************************************************************** #
# #
# ::: :::::::: #
# join_csvs.py :+: :+: :+: #
# +:+ +:+ +:+ #
# By: jackson <<EMAIL>> +#+ +:+ +#+ #
# +#+#+#+#+#+ +#+ #
# Created: 2019/06/17 20:30:17 by tholzheu #+# #+# #
# Updated: 2019/10/11 15:04:25 by jackson ### ########.fr #
# #
# **************************************************************************** #
import pandas as pd
import numpy as np
import sys
import time
from datetime import datetime
def crop_dfs(leap_df, emg_df):
"""Crops both DataFrames to start at the same time"""
i = 0
while (emg_df['ts'][i] < leap_df['Unix Time'][0]):
i += 1
i -= 1
j = len(emg_df) - 1
while (emg_df['ts'][j] > leap_df['Unix Time'][len(leap_df) - 1]):
j -= 1
j += 1
return (leap_df, emg_df[i:j].reset_index(drop=True))
if __name__ == "__main__":
if len(sys.argv) != 4:
print("usage: join_csvs.py leap_data emg_data out_file")
exit(1);
leap_df = pd.read_csv(sys.argv[1])
del(leap_df['Timestamp'])
emg_df = pd.read_csv(sys.argv[2])
emg_df.columns = ['ts', 'ch1', 'ch2', 'ch3', 'ch4', 'ch5', 'ch6', 'ch7', 'ch8'] # this line should not be necessary
print ("Cropping Dataframes")
leap_df, emg_df = crop_dfs(leap_df, emg_df)
rows = len(emg_df)
columns = 74
print('col len:', len(leap_df.columns) + len(emg_df.columns))
length_leap = len(leap_df)
combined = np.zeros((rows, columns))
print ("Joining Dataframes...")
j = 0
k = 0
i = 0
while (i < rows):
while (leap_df["Unix Time"][j] - emg_df["ts"][i] > 0.1):
i += 1
leap = leap_df.iloc[j].values
count = 1
j += 1
while (j < length_leap and leap_df["Unix Time"][j] <= emg_df["ts"][i]):
leap += leap_df.iloc[j].values
j += 1
count += 1
leap /= count
time_leap = leap[0]
leap = leap[4:]
emg = emg_df.iloc[i].values
time_diff = time_leap - emg[0]
combined[i] = np.concatenate([np.array([time_leap]), np.array([time_diff]), emg, leap])
k += 1
i += 1
combined = combined[:k]
print ("Converting to Dataframe...")
column_names = ['Leap timestamp', 'timestamp diff', 'emg timestamp',
'ch1', 'ch2', 'ch3', 'ch4', 'ch5', 'ch6', 'ch7', 'ch8',
'Wrist x', 'Wrist y', 'Wrist z', 'Thumb Proximal x', 'Thumb Proximal y',
'Thumb Proximal z', 'Thumb Intermediate x', 'Thumb Intermediate y',
'Thumb Intermediate z', 'Thumb Distal x', 'Thumb Distal y',
'Thumb Distal z', 'Thumb Tip x', 'Thumb Tip y', 'Thumb Tip z',
'Index Proximal x', 'Index Proximal y', 'Index Proximal z',
'Index Intermediate x', 'Index Intermediate y', 'Index Intermediate z',
'Index Distal x', 'Index Distal y', 'Index Distal z', 'Index Tip x',
'Index Tip y', 'Index Tip z', 'Middle Proximal x', 'Middle Proximal y',
'Middle Proximal z', 'Middle Intermediate x', 'Middle Intermediate y',
'Middle Intermediate z', 'Middle Distal x', 'Middle Distal y',
'Middle Distal z', 'Middle Tip x', 'Middle Tip y', 'Middle Tip z',
'Ring Proximal x', 'Ring Proximal y', 'Ring Proximal z',
'Ring Intermediate x', 'Ring Intermediate y', 'Ring Intermediate z',
'Ring Distal x', 'Ring Distal y', 'Ring Distal z', 'Ring Tip x',
'Ring Tip y', 'Ring Tip z', 'Pinky Proximal x', 'Pinky Proximal y',
'Pinky Proximal z', 'Pinky Intermediate x', 'Pinky Intermediate y',
'Pinky Intermediate z', 'Pinky Distal x', 'Pinky Distal y',
'Pinky Distal z', 'Pinky Tip x', 'Pinky Tip y', 'Pinky Tip z']
combined_df = pd.DataFrame(data=combined, columns=column_names)
print ("Double check:")
print (combined_df.describe()['timestamp diff'])
name = "joined_data_{}_{}_{}.csv".format(len(combined_df), datetime.now().strftime("%d-%b-%y_%H:%M"), sys.argv[3])
combined_df.to_csv(name, index=False)
print ("Finished joining --> {} size = {}".format(name, len(combined_df)))
| [
"pandas.read_csv",
"numpy.array",
"numpy.zeros",
"datetime.datetime.now",
"pandas.DataFrame"
] | [((1446, 1470), 'pandas.read_csv', 'pd.read_csv', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (1457, 1470), True, 'import pandas as pd\n'), ((1508, 1532), 'pandas.read_csv', 'pd.read_csv', (['sys.argv[2]'], {}), '(sys.argv[2])\n', (1519, 1532), True, 'import pandas as pd\n'), ((1864, 1889), 'numpy.zeros', 'np.zeros', (['(rows, columns)'], {}), '((rows, columns))\n', (1872, 1889), True, 'import numpy as np\n'), ((3914, 3963), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'combined', 'columns': 'column_names'}), '(data=combined, columns=column_names)\n', (3926, 3963), True, 'import pandas as pd\n'), ((2369, 2390), 'numpy.array', 'np.array', (['[time_leap]'], {}), '([time_leap])\n', (2377, 2390), True, 'import numpy as np\n'), ((2392, 2413), 'numpy.array', 'np.array', (['[time_diff]'], {}), '([time_diff])\n', (2400, 2413), True, 'import numpy as np\n'), ((4099, 4113), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4111, 4113), False, 'from datetime import datetime\n')] |
from PIL import Image
import os
import os.path
import sys
from torch.utils.data.sampler import *
def pil_loader(path):
# open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)
with open(path, "rb") as img_file:
with Image.open(img_file) as cur_img:
img = cur_img.convert("RGB")
cur_img.close()
img_file.close()
return img
def accimage_loader(path):
import accimage
try:
return accimage.Image(path)
except IOError:
# Potentially a decoding problem, fall back to PIL.Image
return pil_loader(path)
def default_loader(path):
from torchvision import get_image_backend
if get_image_backend() == "accimage":
print("Use accimage")
return accimage_loader(path)
else:
return pil_loader(path)
class ImagenetRandomSampler(Sampler):
r"""Samples elements randomly from a given list of indices, without replacement.
Arguments:
indices (sequence): a sequence of indices
"""
def __init__(self, data_source, repeat_chunk=1):
self.data_source = data_source
self.indices = data_source.current_indices
self.repeat = repeat_chunk
def __iter__(self):
rpt = 0
while True:
shuffled_indices = [
self.indices[i] for i in torch.randperm(len(self.indices))
]
cur_len = len(self.indices)
for i in shuffled_indices:
yield i
rpt += 1
if rpt == self.repeat:
rpt = 0
self.data_source.load_next_chunk()
self.indices = self.data_source.current_indices
def __len__(self):
return len(self.data_source)
| [
"accimage.Image",
"PIL.Image.open",
"torchvision.get_image_backend"
] | [((493, 513), 'accimage.Image', 'accimage.Image', (['path'], {}), '(path)\n', (507, 513), False, 'import accimage\n'), ((713, 732), 'torchvision.get_image_backend', 'get_image_backend', ([], {}), '()\n', (730, 732), False, 'from torchvision import get_image_backend\n'), ((276, 296), 'PIL.Image.open', 'Image.open', (['img_file'], {}), '(img_file)\n', (286, 296), False, 'from PIL import Image\n')] |
from molsysmt._private_tools.lists_and_tuples import is_list_or_tuple
from molsysmt._private_tools._digestion import *
from molsysmt._private_tools.exceptions import *
from molsysmt.tools.molecular_systems import is_a_single_molecular_system
from molsysmt.multitool.convert import convert
from molsysmt.multitool.extract import extract
from molsysmt.multitool.append_frames import append_frames
def concatenate_frames(molecular_systems, selections='all', frame_indices='all', syntaxis='MolSysMT', to_form=None):
if is_a_single_molecular_system(molecular_systems):
raise NeedsMultipleMolecularSystemsError()
tmp_molecular_systems = []
for aux in molecular_systems:
tmp_molecular_systems.append([digest_molecular_system(aux)])
molecular_systems = tmp_molecular_systems
n_molecular_systems = len(molecular_systems)
if not is_list_or_tuple(selections):
selections = [selections for ii in range(n_molecular_systems)]
elif len(selections)!=n_molecular_systems:
raise ValueError("The length of the lists items and selections need to be equal.")
if not is_list_or_tuple(frame_indices):
frame_indices = [digest_frame_indices(frame_indices) for ii in range(n_molecular_systems)]
elif len(frame_indices)!=n_molecular_systems:
raise ValueError("The length of the lists items and frame_indices need to be equal.")
if to_form is None:
tmp_molecular_system = extract(molecular_systems[0], selection=selections[0], frame_indices=frame_indices[0])
else:
tmp_molecular_system = convert(molecular_systems[0], selection=selections[0], frame_indices=frame_indices[0], to_form=to_form)
append_frames(tmp_molecular_system, molecular_systems[1:], selections=selections[1:], frame_indices=frame_indices[1:])
return tmp_molecular_system
| [
"molsysmt.multitool.append_frames.append_frames",
"molsysmt._private_tools.lists_and_tuples.is_list_or_tuple",
"molsysmt.multitool.convert.convert",
"molsysmt.tools.molecular_systems.is_a_single_molecular_system",
"molsysmt.multitool.extract.extract"
] | [((521, 568), 'molsysmt.tools.molecular_systems.is_a_single_molecular_system', 'is_a_single_molecular_system', (['molecular_systems'], {}), '(molecular_systems)\n', (549, 568), False, 'from molsysmt.tools.molecular_systems import is_a_single_molecular_system\n'), ((1684, 1807), 'molsysmt.multitool.append_frames.append_frames', 'append_frames', (['tmp_molecular_system', 'molecular_systems[1:]'], {'selections': 'selections[1:]', 'frame_indices': 'frame_indices[1:]'}), '(tmp_molecular_system, molecular_systems[1:], selections=\n selections[1:], frame_indices=frame_indices[1:])\n', (1697, 1807), False, 'from molsysmt.multitool.append_frames import append_frames\n'), ((864, 892), 'molsysmt._private_tools.lists_and_tuples.is_list_or_tuple', 'is_list_or_tuple', (['selections'], {}), '(selections)\n', (880, 892), False, 'from molsysmt._private_tools.lists_and_tuples import is_list_or_tuple\n'), ((1115, 1146), 'molsysmt._private_tools.lists_and_tuples.is_list_or_tuple', 'is_list_or_tuple', (['frame_indices'], {}), '(frame_indices)\n', (1131, 1146), False, 'from molsysmt._private_tools.lists_and_tuples import is_list_or_tuple\n'), ((1447, 1538), 'molsysmt.multitool.extract.extract', 'extract', (['molecular_systems[0]'], {'selection': 'selections[0]', 'frame_indices': 'frame_indices[0]'}), '(molecular_systems[0], selection=selections[0], frame_indices=\n frame_indices[0])\n', (1454, 1538), False, 'from molsysmt.multitool.extract import extract\n'), ((1575, 1683), 'molsysmt.multitool.convert.convert', 'convert', (['molecular_systems[0]'], {'selection': 'selections[0]', 'frame_indices': 'frame_indices[0]', 'to_form': 'to_form'}), '(molecular_systems[0], selection=selections[0], frame_indices=\n frame_indices[0], to_form=to_form)\n', (1582, 1683), False, 'from molsysmt.multitool.convert import convert\n')] |
# Copyright (c) 2017 Cable Television Laboratories, Inc. ("CableLabs")
# and others. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import re
import shutil
import time
import unittest
import uuid
import os
from neutronclient.common.exceptions import InvalidIpForSubnetClient
from snaps import file_utils
from snaps.openstack import create_network, create_router
from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings
from snaps.openstack.create_image import OpenStackImage, ImageSettings
from snaps.openstack.create_instance import (
VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings,
VmInstanceSettingsError, FloatingIpSettingsError)
from snaps.openstack.create_keypairs import OpenStackKeypair, KeypairSettings
from snaps.openstack.create_network import (
OpenStackNetwork, PortSettings, NetworkSettings)
from snaps.openstack.create_router import OpenStackRouter, RouterSettings
from snaps.openstack.create_security_group import (
SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings,
Direction, Protocol)
from snaps.openstack.create_volume import OpenStackVolume, VolumeSettings
from snaps.openstack.tests import openstack_tests, validation_utils
from snaps.openstack.tests.os_source_file_test import (
OSIntegrationTestCase, OSComponentTestCase)
from snaps.openstack.utils import nova_utils
__author__ = 'spisarski'
VM_BOOT_TIMEOUT = 600
logger = logging.getLogger('create_instance_tests')
class VmInstanceSettingsUnitTests(unittest.TestCase):
"""
Tests the construction of the VmInstanceSettings class
"""
def test_no_params(self):
with self.assertRaises(VmInstanceSettingsError):
VmInstanceSettings()
def test_empty_config(self):
with self.assertRaises(VmInstanceSettingsError):
VmInstanceSettings(config=dict())
def test_name_only(self):
with self.assertRaises(VmInstanceSettingsError):
VmInstanceSettings(name='foo')
def test_config_with_name_only(self):
with self.assertRaises(VmInstanceSettingsError):
VmInstanceSettings(config={'name': 'foo'})
def test_name_flavor_only(self):
with self.assertRaises(VmInstanceSettingsError):
VmInstanceSettings(name='foo', flavor='bar')
def test_config_with_name_flavor_only(self):
with self.assertRaises(VmInstanceSettingsError):
VmInstanceSettings(config={'name': 'foo', 'flavor': 'bar'})
def test_name_flavor_port_only(self):
port_settings = PortSettings(name='foo-port', network_name='bar-net')
settings = VmInstanceSettings(name='foo', flavor='bar',
port_settings=[port_settings])
self.assertEqual('foo', settings.name)
self.assertEqual('bar', settings.flavor)
self.assertEqual(1, len(settings.port_settings))
self.assertEqual('foo-port', settings.port_settings[0].name)
self.assertEqual('bar-net', settings.port_settings[0].network_name)
self.assertEqual(0, len(settings.security_group_names))
self.assertEqual(0, len(settings.floating_ip_settings))
self.assertIsNone(settings.sudo_user)
self.assertEqual(900, settings.vm_boot_timeout)
self.assertEqual(300, settings.vm_delete_timeout)
self.assertEqual(180, settings.ssh_connect_timeout)
self.assertIsNone(settings.availability_zone)
self.assertIsNone(settings.volume_names)
def test_config_with_name_flavor_port_only(self):
port_settings = PortSettings(name='foo-port', network_name='bar-net')
settings = VmInstanceSettings(
**{'name': 'foo', 'flavor': 'bar', 'ports': [port_settings]})
self.assertEqual('foo', settings.name)
self.assertEqual('bar', settings.flavor)
self.assertEqual(1, len(settings.port_settings))
self.assertEqual('foo-port', settings.port_settings[0].name)
self.assertEqual('bar-net', settings.port_settings[0].network_name)
self.assertEqual(0, len(settings.security_group_names))
self.assertEqual(0, len(settings.floating_ip_settings))
self.assertIsNone(settings.sudo_user)
self.assertEqual(900, settings.vm_boot_timeout)
self.assertEqual(300, settings.vm_delete_timeout)
self.assertEqual(180, settings.ssh_connect_timeout)
self.assertIsNone(settings.availability_zone)
self.assertIsNone(settings.volume_names)
def test_all(self):
port_settings = PortSettings(name='foo-port', network_name='bar-net')
fip_settings = FloatingIpSettings(name='foo-fip', port_name='bar-port',
router_name='foo-bar-router')
settings = VmInstanceSettings(
name='foo', flavor='bar', port_settings=[port_settings],
security_group_names=['sec_grp_1'],
floating_ip_settings=[fip_settings], sudo_user='joe',
vm_boot_timeout=999, vm_delete_timeout=333,
ssh_connect_timeout=111, availability_zone='server name',
volume_names=['vol1'])
self.assertEqual('foo', settings.name)
self.assertEqual('bar', settings.flavor)
self.assertEqual(1, len(settings.port_settings))
self.assertEqual('foo-port', settings.port_settings[0].name)
self.assertEqual('bar-net', settings.port_settings[0].network_name)
self.assertEqual(1, len(settings.security_group_names))
self.assertEqual('sec_grp_1', settings.security_group_names[0])
self.assertEqual(1, len(settings.floating_ip_settings))
self.assertEqual('foo-fip', settings.floating_ip_settings[0].name)
self.assertEqual('bar-port',
settings.floating_ip_settings[0].port_name)
self.assertEqual('foo-bar-router',
settings.floating_ip_settings[0].router_name)
self.assertEqual('joe', settings.sudo_user)
self.assertEqual(999, settings.vm_boot_timeout)
self.assertEqual(333, settings.vm_delete_timeout)
self.assertEqual(111, settings.ssh_connect_timeout)
self.assertEqual('server name', settings.availability_zone)
self.assertEqual('vol1', settings.volume_names[0])
def test_config_all(self):
port_settings = PortSettings(name='foo-port', network_name='bar-net')
fip_settings = FloatingIpSettings(name='foo-fip', port_name='bar-port',
router_name='foo-bar-router')
settings = VmInstanceSettings(
**{'name': 'foo', 'flavor': 'bar', 'ports': [port_settings],
'security_group_names': ['sec_grp_1'],
'floating_ips': [fip_settings], 'sudo_user': 'joe',
'vm_boot_timeout': 999, 'vm_delete_timeout': 333,
'ssh_connect_timeout': 111, 'availability_zone': 'server name',
'volume_names': ['vol2']})
self.assertEqual('foo', settings.name)
self.assertEqual('bar', settings.flavor)
self.assertEqual(1, len(settings.port_settings))
self.assertEqual('foo-port', settings.port_settings[0].name)
self.assertEqual('bar-net', settings.port_settings[0].network_name)
self.assertEqual(1, len(settings.security_group_names))
self.assertEqual(1, len(settings.floating_ip_settings))
self.assertEqual('foo-fip', settings.floating_ip_settings[0].name)
self.assertEqual('bar-port',
settings.floating_ip_settings[0].port_name)
self.assertEqual('foo-bar-router',
settings.floating_ip_settings[0].router_name)
self.assertEqual('joe', settings.sudo_user)
self.assertEqual(999, settings.vm_boot_timeout)
self.assertEqual(333, settings.vm_delete_timeout)
self.assertEqual(111, settings.ssh_connect_timeout)
self.assertEqual('server name', settings.availability_zone)
self.assertEqual('vol2', settings.volume_names[0])
class FloatingIpSettingsUnitTests(unittest.TestCase):
"""
Tests the construction of the FloatingIpSettings class
"""
def test_no_params(self):
with self.assertRaises(FloatingIpSettingsError):
FloatingIpSettings()
def test_empty_config(self):
with self.assertRaises(FloatingIpSettingsError):
FloatingIpSettings(**dict())
def test_name_only(self):
with self.assertRaises(FloatingIpSettingsError):
FloatingIpSettings(name='foo')
def test_config_with_name_only(self):
with self.assertRaises(FloatingIpSettingsError):
FloatingIpSettings(**{'name': 'foo'})
def test_name_port_only(self):
with self.assertRaises(FloatingIpSettingsError):
FloatingIpSettings(name='foo', port_name='bar')
def test_config_with_name_port_only(self):
with self.assertRaises(FloatingIpSettingsError):
FloatingIpSettings(**{'name': 'foo', 'port_name': 'bar'})
def test_name_router_only(self):
with self.assertRaises(FloatingIpSettingsError):
FloatingIpSettings(name='foo', router_name='bar')
def test_config_with_name_router_only(self):
with self.assertRaises(FloatingIpSettingsError):
FloatingIpSettings(**{'name': 'foo', 'router_name': 'bar'})
def test_name_port_router_name_only(self):
settings = FloatingIpSettings(name='foo', port_name='foo-port',
router_name='bar-router')
self.assertEqual('foo', settings.name)
self.assertEqual('foo-port', settings.port_name)
self.assertIsNone(settings.port_id)
self.assertEqual('bar-router', settings.router_name)
self.assertIsNone(settings.subnet_name)
self.assertTrue(settings.provisioning)
def test_name_port_router_id_only(self):
settings = FloatingIpSettings(name='foo', port_id='foo-port',
router_name='bar-router')
self.assertEqual('foo', settings.name)
self.assertEqual('foo-port', settings.port_id)
self.assertIsNone(settings.port_name)
self.assertEqual('bar-router', settings.router_name)
self.assertIsNone(settings.subnet_name)
self.assertTrue(settings.provisioning)
def test_config_with_name_port_router_only(self):
settings = FloatingIpSettings(
**{'name': 'foo', 'port_name': 'foo-port',
'router_name': 'bar-router'})
self.assertEqual('foo', settings.name)
self.assertEqual('foo-port', settings.port_name)
self.assertIsNone(settings.port_id)
self.assertEqual('bar-router', settings.router_name)
self.assertIsNone(settings.subnet_name)
self.assertTrue(settings.provisioning)
def test_all(self):
settings = FloatingIpSettings(name='foo', port_name='foo-port',
router_name='bar-router',
subnet_name='bar-subnet',
provisioning=False)
self.assertEqual('foo', settings.name)
self.assertEqual('foo-port', settings.port_name)
self.assertIsNone(settings.port_id)
self.assertEqual('bar-router', settings.router_name)
self.assertEqual('bar-subnet', settings.subnet_name)
self.assertFalse(settings.provisioning)
def test_config_all(self):
settings = FloatingIpSettings(
**{'name': 'foo', 'port_name': 'foo-port',
'router_name': 'bar-router', 'subnet_name': 'bar-subnet',
'provisioning': False})
self.assertEqual('foo', settings.name)
self.assertEqual('foo-port', settings.port_name)
self.assertIsNone(settings.port_id)
self.assertEqual('bar-router', settings.router_name)
self.assertEqual('bar-subnet', settings.subnet_name)
self.assertFalse(settings.provisioning)
class SimpleHealthCheck(OSIntegrationTestCase):
"""
Test for the CreateInstance class with a single NIC/Port with Floating IPs
"""
def setUp(self):
"""
Instantiates the CreateImage object that is responsible for downloading
and creating an OS image file
within OpenStack
"""
super(self.__class__, self).__start__()
self.nova = nova_utils.nova_client(self.os_creds)
guid = self.__class__.__name__ + '-' + str(uuid.uuid4())
self.vm_inst_name = guid + '-inst'
self.port_1_name = guid + 'port-1'
# Initialize for tearDown()
self.image_creator = None
self.network_creator = None
self.flavor_creator = None
self.inst_creator = None
self.priv_net_config = openstack_tests.get_priv_net_config(
net_name=guid + '-priv-net', subnet_name=guid + '-priv-subnet')
self.port_settings = PortSettings(
name=self.port_1_name,
network_name=self.priv_net_config.network_settings.name)
# Create Image
# Set the default image settings, then set any custom parameters sent
# from the app
os_image_settings = openstack_tests.cirros_image_settings(
name=guid + '-image', image_metadata=self.image_metadata)
try:
self.image_creator = OpenStackImage(self.os_creds,
os_image_settings)
self.image_creator.create()
# Create Network
self.network_creator = OpenStackNetwork(
self.os_creds, self.priv_net_config.network_settings)
self.network_creator.create()
# Create Flavor
self.flavor_creator = OpenStackFlavor(
self.admin_os_creds,
FlavorSettings(name=guid + '-flavor-name', ram=256, disk=10,
vcpus=1, metadata=self.flavor_metadata))
self.flavor_creator.create()
except Exception as e:
self.tearDown()
raise e
def tearDown(self):
"""
Cleans the created object
"""
if self.inst_creator:
try:
self.inst_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning VM instance with message'
' - %s', e)
if self.network_creator:
try:
self.network_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning network with message - %s',
e)
if self.flavor_creator:
try:
self.flavor_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning flavor with message - %s',
e)
if self.image_creator and not self.image_creator.image_settings.exists:
try:
self.image_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning image with message - %s',
e)
super(self.__class__, self).__clean__()
def test_check_vm_ip_dhcp(self):
"""
Tests the creation of an OpenStack instance with a single port and
ensures that it's assigned IP address is the actual.
"""
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
self.inst_creator.create()
ip = self.inst_creator.get_port_ip(self.port_settings.name)
self.assertIsNotNone(ip)
self.assertTrue(self.inst_creator.vm_active(block=True))
self.assertTrue(check_dhcp_lease(self.inst_creator, ip))
class CreateInstanceSimpleTests(OSIntegrationTestCase):
"""
Simple instance creation tests without any other objects
"""
def setUp(self):
"""
Instantiates the CreateImage object that is responsible for downloading
and creating an OS image file
within OpenStack
"""
super(self.__class__, self).__start__()
guid = self.__class__.__name__ + '-' + str(uuid.uuid4())
self.vm_inst_name = guid + '-inst'
self.nova = nova_utils.nova_client(self.os_creds)
os_image_settings = openstack_tests.cirros_image_settings(
name=guid + '-image', image_metadata=self.image_metadata)
net_config = openstack_tests.get_priv_net_config(
net_name=guid + '-pub-net', subnet_name=guid + '-pub-subnet',
router_name=guid + '-pub-router', external_net=self.ext_net_name)
# Initialize for tearDown()
self.image_creator = None
self.flavor_creator = None
self.network_creator = None
self.inst_creator = None
try:
# Create Image
self.image_creator = OpenStackImage(self.os_creds,
os_image_settings)
self.image_creator.create()
# Create Flavor
self.flavor_creator = OpenStackFlavor(
self.admin_os_creds,
FlavorSettings(name=guid + '-flavor-name', ram=256, disk=10,
vcpus=2, metadata=self.flavor_metadata))
self.flavor_creator.create()
# Create Network
self.network_creator = OpenStackNetwork(
self.os_creds, net_config.network_settings)
self.network_creator.create()
self.port_settings = PortSettings(
name=guid + '-port',
network_name=net_config.network_settings.name)
except Exception as e:
self.tearDown()
raise e
def tearDown(self):
"""
Cleans the created object
"""
if self.inst_creator:
try:
self.inst_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning VM instance with message '
'- %s', e)
if self.flavor_creator:
try:
self.flavor_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning flavor with message - %s',
e)
if self.network_creator:
try:
self.network_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning network with message - %s',
e)
if self.image_creator and not self.image_creator.image_settings.exists:
try:
self.image_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning image with message - %s', e)
super(self.__class__, self).__clean__()
def test_create_delete_instance(self):
"""
Tests the creation of an OpenStack instance with a single port with a
static IP without a Floating IP.
"""
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
vm_inst = self.inst_creator.create()
self.assertIsNotNone(nova_utils.get_server(
self.nova, vm_inst_settings=instance_settings))
# Delete instance
nova_utils.delete_vm_instance(self.nova, vm_inst)
self.assertTrue(self.inst_creator.vm_deleted(block=True))
self.assertIsNone(nova_utils.get_server(
self.nova, vm_inst_settings=instance_settings))
# Exception should not be thrown
self.inst_creator.clean()
class CreateInstanceSingleNetworkTests(OSIntegrationTestCase):
"""
Test for the CreateInstance class with a single NIC/Port with Floating IPs
"""
def setUp(self):
"""
Instantiates the CreateImage object that is responsible for downloading
and creating an OS image file within OpenStack
"""
super(self.__class__, self).__start__()
self.nova = nova_utils.nova_client(self.os_creds)
guid = self.__class__.__name__ + '-' + str(uuid.uuid4())
self.keypair_priv_filepath = 'tmp/' + guid
self.keypair_pub_filepath = self.keypair_priv_filepath + '.pub'
self.keypair_name = guid + '-kp'
self.vm_inst_name = guid + '-inst'
self.port_1_name = guid + 'port-1'
self.port_2_name = guid + 'port-2'
self.floating_ip_name = guid + 'fip1'
# Initialize for tearDown()
self.image_creator = None
self.network_creator = None
self.router_creator = None
self.flavor_creator = None
self.keypair_creator = None
self.sec_grp_creator = None
self.inst_creators = list()
self.pub_net_config = openstack_tests.get_pub_net_config(
net_name=guid + '-pub-net', subnet_name=guid + '-pub-subnet',
router_name=guid + '-pub-router', external_net=self.ext_net_name)
os_image_settings = openstack_tests.cirros_image_settings(
name=guid + '-image', image_metadata=self.image_metadata)
try:
# Create Image
self.image_creator = OpenStackImage(self.os_creds,
os_image_settings)
self.image_creator.create()
# Create Network
self.network_creator = OpenStackNetwork(
self.os_creds, self.pub_net_config.network_settings)
self.network_creator.create()
# Create Router
self.router_creator = OpenStackRouter(
self.os_creds, self.pub_net_config.router_settings)
self.router_creator.create()
# Create Flavor
self.flavor_creator = OpenStackFlavor(
self.admin_os_creds,
FlavorSettings(name=guid + '-flavor-name', ram=256, disk=10,
vcpus=2, metadata=self.flavor_metadata))
self.flavor_creator.create()
self.keypair_creator = OpenStackKeypair(
self.os_creds, KeypairSettings(
name=self.keypair_name,
public_filepath=self.keypair_pub_filepath,
private_filepath=self.keypair_priv_filepath))
self.keypair_creator.create()
sec_grp_name = guid + '-sec-grp'
rule1 = SecurityGroupRuleSettings(sec_grp_name=sec_grp_name,
direction=Direction.ingress,
protocol=Protocol.icmp)
rule2 = SecurityGroupRuleSettings(sec_grp_name=sec_grp_name,
direction=Direction.ingress,
protocol=Protocol.tcp,
port_range_min=22,
port_range_max=22)
self.sec_grp_creator = OpenStackSecurityGroup(
self.os_creds,
SecurityGroupSettings(name=sec_grp_name,
rule_settings=[rule1, rule2]))
self.sec_grp_creator.create()
except Exception as e:
self.tearDown()
raise e
def tearDown(self):
"""
Cleans the created object
"""
for inst_creator in self.inst_creators:
try:
inst_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning VM instance with message '
'- %s', e)
if self.keypair_creator:
try:
self.keypair_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning keypair with message - %s',
e)
if os.path.isfile(self.keypair_pub_filepath):
os.remove(self.keypair_pub_filepath)
if os.path.isfile(self.keypair_priv_filepath):
os.remove(self.keypair_priv_filepath)
if self.flavor_creator:
try:
self.flavor_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning flavor with message - %s',
e)
if self.sec_grp_creator:
try:
self.sec_grp_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning security group with message'
' - %s', e)
if self.router_creator:
try:
self.router_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning router with message - %s',
e)
if self.network_creator:
try:
self.network_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning network with message - %s',
e)
if self.image_creator and not self.image_creator.image_settings.exists:
try:
self.image_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning image with message - %s', e)
super(self.__class__, self).__clean__()
def test_single_port_static(self):
"""
Tests the creation of an OpenStack instance with a single port with a
static IP without a Floating IP.
"""
ip_1 = '10.55.1.100'
sub_settings = self.pub_net_config.network_settings.subnet_settings
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.pub_net_config.network_settings.name,
ip_addrs=[
{'subnet_name': sub_settings[0].name, 'ip': ip_1}])
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings],
floating_ip_settings=[FloatingIpSettings(
name=self.floating_ip_name, port_name=self.port_1_name,
router_name=self.pub_net_config.router_settings.name)])
inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings,
keypair_settings=self.keypair_creator.keypair_settings)
self.inst_creators.append(inst_creator)
vm_inst = inst_creator.create()
self.assertEqual(ip_1, inst_creator.get_port_ip(self.port_1_name))
self.assertTrue(inst_creator.vm_active(block=True))
self.assertEqual(vm_inst.id, inst_creator.get_vm_inst().id)
def test_ssh_client_fip_before_active(self):
"""
Tests the ability to access a VM via SSH and a floating IP when it has
been assigned prior to being active.
"""
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.pub_net_config.network_settings.name)
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings],
floating_ip_settings=[FloatingIpSettings(
name=self.floating_ip_name, port_name=self.port_1_name,
router_name=self.pub_net_config.router_settings.name)])
inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings,
keypair_settings=self.keypair_creator.keypair_settings)
self.inst_creators.append(inst_creator)
vm_inst = inst_creator.create()
self.assertIsNotNone(vm_inst)
self.assertTrue(inst_creator.vm_active(block=True))
ip = inst_creator.get_port_ip(port_settings.name)
self.assertTrue(check_dhcp_lease(inst_creator, ip))
inst_creator.add_security_group(
self.sec_grp_creator.get_security_group())
self.assertEqual(vm_inst.id, inst_creator.get_vm_inst().id)
self.assertTrue(validate_ssh_client(inst_creator))
def test_ssh_client_fip_after_active(self):
"""
Tests the ability to access a VM via SSH and a floating IP when it has
been assigned prior to being active.
"""
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.pub_net_config.network_settings.name)
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings],
floating_ip_settings=[FloatingIpSettings(
name=self.floating_ip_name, port_name=self.port_1_name,
router_name=self.pub_net_config.router_settings.name)])
inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings,
keypair_settings=self.keypair_creator.keypair_settings)
self.inst_creators.append(inst_creator)
# block=True will force the create() method to block until the
vm_inst = inst_creator.create(block=True)
self.assertIsNotNone(vm_inst)
self.assertTrue(inst_creator.vm_active(block=True))
ip = inst_creator.get_port_ip(port_settings.name)
self.assertTrue(check_dhcp_lease(inst_creator, ip))
inst_creator.add_security_group(
self.sec_grp_creator.get_security_group())
self.assertEqual(vm_inst.id, inst_creator.get_vm_inst().id)
self.assertTrue(validate_ssh_client(inst_creator))
def test_ssh_client_fip_second_creator(self):
"""
Tests the ability to access a VM via SSH and a floating IP via a
creator that is identical to the original creator.
"""
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.pub_net_config.network_settings.name)
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings],
floating_ip_settings=[FloatingIpSettings(
name=self.floating_ip_name, port_name=self.port_1_name,
router_name=self.pub_net_config.router_settings.name)])
inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings,
keypair_settings=self.keypair_creator.keypair_settings)
self.inst_creators.append(inst_creator)
# block=True will force the create() method to block until the
vm_inst = inst_creator.create(block=True)
self.assertIsNotNone(vm_inst)
self.assertTrue(inst_creator.vm_active(block=True))
ip = inst_creator.get_port_ip(port_settings.name)
self.assertTrue(check_dhcp_lease(inst_creator, ip))
inst_creator.add_security_group(
self.sec_grp_creator.get_security_group())
self.assertEqual(vm_inst.id, inst_creator.get_vm_inst().id)
self.assertTrue(validate_ssh_client(inst_creator))
inst_creator2 = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings,
keypair_settings=self.keypair_creator.keypair_settings)
inst_creator2.create()
self.assertTrue(validate_ssh_client(inst_creator2))
class CreateInstancePortManipulationTests(OSIntegrationTestCase):
"""
Test for the CreateInstance class with a single NIC/Port where mac and IP
values are manually set
"""
def setUp(self):
"""
Instantiates the CreateImage object that is responsible for downloading
and creating an OS image file within OpenStack
"""
super(self.__class__, self).__start__()
guid = self.__class__.__name__ + '-' + str(uuid.uuid4())
self.vm_inst_name = guid + '-inst'
self.port_1_name = guid + 'port-1'
self.port_2_name = guid + 'port-2'
self.floating_ip_name = guid + 'fip1'
# Initialize for tearDown()
self.image_creator = None
self.network_creator = None
self.flavor_creator = None
self.inst_creator = None
self.net_config = openstack_tests.get_priv_net_config(
net_name=guid + '-pub-net', subnet_name=guid + '-pub-subnet',
router_name=guid + '-pub-router', external_net=self.ext_net_name)
os_image_settings = openstack_tests.cirros_image_settings(
name=guid + '-image', image_metadata=self.image_metadata)
try:
# Create Image
self.image_creator = OpenStackImage(self.os_creds,
os_image_settings)
self.image_creator.create()
# Create Network
self.network_creator = OpenStackNetwork(
self.os_creds, self.net_config.network_settings)
self.network_creator.create()
# Create Flavor
self.flavor_creator = OpenStackFlavor(
self.admin_os_creds,
FlavorSettings(name=guid + '-flavor-name', ram=256, disk=10,
vcpus=2, metadata=self.flavor_metadata))
self.flavor_creator.create()
except Exception as e:
self.tearDown()
raise e
def tearDown(self):
"""
Cleans the created object
"""
if self.inst_creator:
try:
self.inst_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning VM instance with message '
'- %s', e)
if self.flavor_creator:
try:
self.flavor_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning flavor with message - %s',
e)
if self.network_creator:
try:
self.network_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning network with message - %s',
e)
if self.image_creator and not self.image_creator.image_settings.exists:
try:
self.image_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning image with message - %s', e)
super(self.__class__, self).__clean__()
def test_set_custom_valid_ip_one_subnet(self):
"""
Tests the creation of an OpenStack instance with a single port with a
static IP on a network with one subnet.
"""
ip = '10.55.0.101'
sub_settings = self.net_config.network_settings.subnet_settings
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.net_config.network_settings.name,
ip_addrs=[{'subnet_name': sub_settings[0].name, 'ip': ip}])
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
self.inst_creator.create(block=True)
self.assertEqual(ip, self.inst_creator.get_port_ip(
self.port_1_name,
subnet_name=self.net_config.network_settings.subnet_settings[
0].name))
def test_set_custom_invalid_ip_one_subnet(self):
"""
Tests the creation of an OpenStack instance with a single port with a
static IP on a network with one subnet.
"""
ip = '10.66.0.101'
sub_settings = self.net_config.network_settings.subnet_settings
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.net_config.network_settings.name,
ip_addrs=[{'subnet_name': sub_settings[0].name, 'ip': ip}])
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
with self.assertRaises(InvalidIpForSubnetClient):
self.inst_creator.create()
def test_set_custom_valid_mac(self):
"""
Tests the creation of an OpenStack instance with a single port where
the MAC address is assigned.
"""
mac_addr = '0a:1b:2c:3d:4e:5f'
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.net_config.network_settings.name,
mac_address=mac_addr)
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
self.inst_creator.create(block=True)
self.assertEqual(mac_addr,
self.inst_creator.get_port_mac(self.port_1_name))
def test_set_custom_invalid_mac(self):
"""
Tests the creation of an OpenStack instance with a single port where an
invalid MAC address value is being
assigned. This should raise an Exception
"""
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.net_config.network_settings.name,
mac_address='foo')
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
with self.assertRaises(Exception):
self.inst_creator.create()
def test_set_custom_mac_and_ip(self):
"""
Tests the creation of an OpenStack instance with a single port where
the IP and MAC address is assigned.
"""
ip = '10.55.0.101'
mac_addr = '0a:1b:2c:3d:4e:5f'
sub_settings = self.net_config.network_settings.subnet_settings
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.net_config.network_settings.name,
mac_address=mac_addr,
ip_addrs=[{'subnet_name': sub_settings[0].name, 'ip': ip}])
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
self.inst_creator.create(block=True)
self.assertEqual(ip, self.inst_creator.get_port_ip(
self.port_1_name,
subnet_name=self.net_config.network_settings.subnet_settings[
0].name))
self.assertEqual(mac_addr,
self.inst_creator.get_port_mac(self.port_1_name))
def test_set_allowed_address_pairs(self):
"""
Tests the creation of an OpenStack instance with a single port where
max_allowed_address_pair is set.
"""
ip = '10.55.0.101'
mac_addr = '0a:1b:2c:3d:4e:5f'
pair = {'ip_address': ip, 'mac_address': mac_addr}
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.net_config.network_settings.name,
allowed_address_pairs=[pair])
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
self.inst_creator.create(block=True)
port = self.inst_creator.get_port_by_name(port_settings.name)
self.assertIsNotNone(port)
self.assertIsNotNone(port.allowed_address_pairs)
self.assertEqual(1, len(port.allowed_address_pairs))
validation_utils.objects_equivalent(pair,
port.allowed_address_pairs[0])
def test_set_allowed_address_pairs_bad_mac(self):
"""
Tests the creation of an OpenStack instance with a single port where
max_allowed_address_pair is set with an invalid MAC address.
"""
ip = '10.55.0.101'
mac_addr = 'foo'
pair = {'ip_address': ip, 'mac_address': mac_addr}
pairs = set()
pairs.add((ip, mac_addr))
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.net_config.network_settings.name,
allowed_address_pairs=[pair])
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
with self.assertRaises(Exception):
self.inst_creator.create()
def test_set_allowed_address_pairs_bad_ip(self):
"""
Tests the creation of an OpenStack instance with a single port where
max_allowed_address_pair is set with an invalid MAC address.
"""
ip = 'foo'
mac_addr = '0a:1b:2c:3d:4e:5f'
pair = {'ip_address': ip, 'mac_address': mac_addr}
pairs = set()
pairs.add((ip, mac_addr))
port_settings = PortSettings(
name=self.port_1_name,
network_name=self.net_config.network_settings.name,
allowed_address_pairs=[pair])
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
with self.assertRaises(Exception):
self.inst_creator.create()
class CreateInstanceOnComputeHost(OSIntegrationTestCase):
"""
Test for the CreateInstance where one VM is deployed to each compute node
"""
def setUp(self):
"""
Instantiates the CreateImage object that is responsible for downloading
and creating an OS image file within OpenStack
"""
super(self.__class__, self).__start__()
guid = self.__class__.__name__ + '-' + str(uuid.uuid4())
self.vm_inst_name = guid + '-inst'
self.port_base_name = guid + 'port'
# Initialize for tearDown()
self.image_creator = None
self.flavor_creator = None
self.network_creator = None
self.inst_creators = list()
self.priv_net_config = openstack_tests.get_priv_net_config(
net_name=guid + '-priv-net', subnet_name=guid + '-priv-subnet')
os_image_settings = openstack_tests.cirros_image_settings(
name=guid + '-image', image_metadata=self.image_metadata)
try:
# Create Network
self.network_creator = OpenStackNetwork(
self.admin_os_creds, self.priv_net_config.network_settings)
self.network_creator.create()
# Create Flavor
self.flavor_creator = OpenStackFlavor(
self.admin_os_creds,
FlavorSettings(name=guid + '-flavor-name', ram=512, disk=1,
vcpus=1, metadata=self.flavor_metadata))
self.flavor_creator.create()
# Create Image
self.image_creator = OpenStackImage(self.os_creds,
os_image_settings)
self.image_creator.create()
except Exception as e:
self.tearDown()
raise e
def tearDown(self):
"""
Cleans the created object
"""
for inst_creator in self.inst_creators:
try:
inst_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning VM instance with message '
'- %s', e)
if self.flavor_creator:
try:
self.flavor_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning flavor with message - %s',
e)
if self.network_creator:
try:
self.network_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning network with message - %s',
e)
if self.image_creator and not self.image_creator.image_settings.exists:
try:
self.image_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning image with message - %s', e)
super(self.__class__, self).__clean__()
def test_deploy_vm_to_each_compute_node(self):
"""
Tests the creation of OpenStack VM instances to each compute node.
"""
from snaps.openstack.utils import nova_utils
nova = nova_utils.nova_client(self.admin_os_creds)
zone_hosts = nova_utils.get_availability_zone_hosts(nova)
# Create Instance on each server/zone
ctr = 0
for zone in zone_hosts:
inst_name = self.vm_inst_name + '-' + zone
ctr += 1
port_settings = PortSettings(
name=self.port_base_name + '-' + str(ctr),
network_name=self.priv_net_config.network_settings.name)
instance_settings = VmInstanceSettings(
name=inst_name,
flavor=self.flavor_creator.flavor_settings.name,
availability_zone=zone,
port_settings=[port_settings])
inst_creator = OpenStackVmInstance(
self.admin_os_creds, instance_settings,
self.image_creator.image_settings)
self.inst_creators.append(inst_creator)
inst_creator.create()
# Validate instances to ensure they've been deployed to the correct
# server
index = 0
for zone in zone_hosts:
creator = self.inst_creators[index]
self.assertTrue(creator.vm_active(block=True))
info = creator.get_vm_info()
deployed_zone = info['OS-EXT-AZ:availability_zone']
deployed_host = info['OS-EXT-SRV-ATTR:host']
self.assertEqual(zone, deployed_zone + ':' + deployed_host)
index += 1
class CreateInstancePubPrivNetTests(OSIntegrationTestCase):
"""
Test for the CreateInstance class with two NIC/Ports, eth0 with floating IP
and eth1 w/o.
These tests require a Centos image
"""
def setUp(self):
"""
Instantiates the CreateImage object that is responsible for downloading
and creating an OS image file within OpenStack
"""
super(self.__class__, self).__start__()
self.nova = nova_utils.nova_client(self.os_creds)
# Initialize for tearDown()
self.image_creator = None
self.network_creators = list()
self.router_creators = list()
self.flavor_creator = None
self.keypair_creator = None
self.sec_grp_creator = None
self.inst_creator = None
self.guid = self.__class__.__name__ + '-' + str(uuid.uuid4())
self.keypair_priv_filepath = 'tmp/' + self.guid
self.keypair_pub_filepath = self.keypair_priv_filepath + '.pub'
self.keypair_name = self.guid + '-kp'
self.vm_inst_name = self.guid + '-inst'
self.port_1_name = self.guid + '-port-1'
self.port_2_name = self.guid + '-port-2'
self.floating_ip_name = self.guid + 'fip1'
self.priv_net_config = openstack_tests.get_priv_net_config(
net_name=self.guid + '-priv-net',
subnet_name=self.guid + '-priv-subnet',
router_name=self.guid + '-priv-router',
external_net=self.ext_net_name)
self.pub_net_config = openstack_tests.get_pub_net_config(
net_name=self.guid + '-pub-net',
subnet_name=self.guid + '-pub-subnet',
router_name=self.guid + '-pub-router',
external_net=self.ext_net_name)
image_name = self.__class__.__name__ + '-' + str(uuid.uuid4())
os_image_settings = openstack_tests.centos_image_settings(
name=image_name, image_metadata=self.image_metadata)
try:
# Create Image
self.image_creator = OpenStackImage(self.os_creds,
os_image_settings)
self.image_creator.create()
# First network is public
self.network_creators.append(OpenStackNetwork(
self.os_creds, self.pub_net_config.network_settings))
# Second network is private
self.network_creators.append(OpenStackNetwork(
self.os_creds, self.priv_net_config.network_settings))
for network_creator in self.network_creators:
network_creator.create()
self.router_creators.append(OpenStackRouter(
self.os_creds, self.pub_net_config.router_settings))
self.router_creators.append(OpenStackRouter(
self.os_creds, self.priv_net_config.router_settings))
# Create Routers
for router_creator in self.router_creators:
router_creator.create()
# Create Flavor
self.flavor_creator = OpenStackFlavor(
self.admin_os_creds,
FlavorSettings(name=self.guid + '-flavor-name', ram=512,
disk=10, vcpus=2,
metadata=self.flavor_metadata))
self.flavor_creator.create()
# Create Keypair
self.keypair_creator = OpenStackKeypair(
self.os_creds, KeypairSettings(
name=self.keypair_name,
public_filepath=self.keypair_pub_filepath,
private_filepath=self.keypair_priv_filepath))
self.keypair_creator.create()
sec_grp_name = self.guid + '-sec-grp'
rule1 = SecurityGroupRuleSettings(sec_grp_name=sec_grp_name,
direction=Direction.ingress,
protocol=Protocol.icmp)
rule2 = SecurityGroupRuleSettings(sec_grp_name=sec_grp_name,
direction=Direction.ingress,
protocol=Protocol.tcp,
port_range_min=22,
port_range_max=22)
self.sec_grp_creator = OpenStackSecurityGroup(
self.os_creds,
SecurityGroupSettings(name=sec_grp_name,
rule_settings=[rule1, rule2]))
self.sec_grp_creator.create()
except:
self.tearDown()
raise
def tearDown(self):
"""
Cleans the created objects
"""
if self.inst_creator:
try:
self.inst_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning VM instance with message '
'- %s', e)
if self.keypair_creator:
try:
self.keypair_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning keypair with message - %s',
e)
if os.path.isfile(self.keypair_pub_filepath):
os.remove(self.keypair_pub_filepath)
if os.path.isfile(self.keypair_priv_filepath):
os.remove(self.keypair_priv_filepath)
if self.flavor_creator:
try:
self.flavor_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning flavor with message - %s',
e)
for router_creator in self.router_creators:
try:
router_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning router with message - %s',
e)
for network_creator in self.network_creators:
try:
network_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning network with message - %s',
e)
if self.sec_grp_creator:
try:
self.sec_grp_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning security group with message'
' - %s', e)
if self.image_creator and not self.image_creator.image_settings.exists:
try:
self.image_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning image with message - %s', e)
super(self.__class__, self).__clean__()
def test_dual_ports_dhcp(self):
"""
Tests the creation of an OpenStack instance with a dual ports/NICs with
a DHCP assigned IP.
NOTE: This test and any others that call ansible will most likely fail
unless you do one of two things:
1. Have a ~/.ansible.cfg (or alternate means) to
set host_key_checking = False
2. Set the following environment variable in your executing shell:
ANSIBLE_HOST_KEY_CHECKING=False
Should this not be performed, the creation of the host ssh key will
cause your ansible calls to fail.
"""
# Create ports/NICs for instance
ports_settings = []
ctr = 1
for network_creator in self.network_creators:
ports_settings.append(PortSettings(
name=self.guid + '-port-' + str(ctr),
network_name=network_creator.network_settings.name))
ctr += 1
# Create instance
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=ports_settings,
floating_ip_settings=[FloatingIpSettings(
name=self.floating_ip_name, port_name=self.port_1_name,
router_name=self.pub_net_config.router_settings.name)])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings,
keypair_settings=self.keypair_creator.keypair_settings)
vm_inst = self.inst_creator.create(block=True)
self.assertEqual(vm_inst.id, self.inst_creator.get_vm_inst().id)
# Effectively blocks until VM has been properly activated
self.assertTrue(self.inst_creator.vm_active(block=True))
ip = self.inst_creator.get_port_ip(ports_settings[0].name)
self.assertTrue(check_dhcp_lease(self.inst_creator, ip))
# Add security group to VM
self.inst_creator.add_security_group(
self.sec_grp_creator.get_security_group())
# Effectively blocks until VM's ssh port has been opened
self.assertTrue(self.inst_creator.vm_ssh_active(block=True))
self.assertEqual(0, self.inst_creator.config_nics())
class InstanceSecurityGroupTests(OSIntegrationTestCase):
"""
Tests that include, add, and remove security groups from VM instances
"""
def setUp(self):
"""
Instantiates the CreateImage object that is responsible for downloading
and creating an OS image file within OpenStack
"""
super(self.__class__, self).__start__()
self.guid = self.__class__.__name__ + '-' + str(uuid.uuid4())
self.vm_inst_name = self.guid + '-inst'
self.nova = nova_utils.nova_client(self.os_creds)
os_image_settings = openstack_tests.cirros_image_settings(
name=self.guid + '-image', image_metadata=self.image_metadata)
self.vm_inst_name = self.guid + '-inst'
self.port_1_name = self.guid + 'port-1'
self.port_2_name = self.guid + 'port-2'
self.floating_ip_name = self.guid + 'fip1'
net_config = openstack_tests.get_priv_net_config(
net_name=self.guid + '-pub-net',
subnet_name=self.guid + '-pub-subnet',
router_name=self.guid + '-pub-router',
external_net=self.ext_net_name)
# Initialize for tearDown()
self.image_creator = None
self.flavor_creator = None
self.network_creator = None
self.router_creator = None
self.inst_creator = None
self.sec_grp_creators = list()
try:
# Create Image
self.image_creator = OpenStackImage(self.os_creds,
os_image_settings)
self.image_creator.create()
# Create Network
self.network_creator = OpenStackNetwork(
self.os_creds, net_config.network_settings)
self.network_creator.create()
# Create Flavor
self.flavor_creator = OpenStackFlavor(
self.admin_os_creds,
FlavorSettings(name=self.guid + '-flavor-name', ram=256,
disk=10, vcpus=2,
metadata=self.flavor_metadata))
self.flavor_creator.create()
self.port_settings = PortSettings(
name=self.guid + '-port',
network_name=net_config.network_settings.name)
except Exception as e:
self.tearDown()
raise e
def tearDown(self):
"""
Cleans the created object
"""
if self.inst_creator:
try:
self.inst_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning VM instance with message -'
' %s', e)
for sec_grp_creator in self.sec_grp_creators:
try:
sec_grp_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning security group with message'
' - %s', e)
if self.flavor_creator:
try:
self.flavor_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning flavor with message - %s',
e)
if self.network_creator:
try:
self.network_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning network with message - %s',
e)
if self.image_creator and not self.image_creator.image_settings.exists:
try:
self.image_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning image with message - %s', e)
super(self.__class__, self).__clean__()
def test_add_security_group(self):
"""
Tests the addition of a security group created after the instance.
"""
# Create instance
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
vm_inst = self.inst_creator.create(block=True)
self.assertIsNotNone(vm_inst)
# Create security group object to add to instance
sec_grp_settings = SecurityGroupSettings(name=self.guid + '-name',
description='hello group')
sec_grp_creator = OpenStackSecurityGroup(self.os_creds,
sec_grp_settings)
sec_grp = sec_grp_creator.create()
self.sec_grp_creators.append(sec_grp_creator)
# Check that group has not been added
self.assertFalse(inst_has_sec_grp(
self.nova, self.inst_creator.get_vm_inst(), sec_grp_settings.name))
# Add security group to instance after activated
self.inst_creator.add_security_group(sec_grp)
# Validate that security group has been added
self.assertTrue(inst_has_sec_grp(
self.nova, self.inst_creator.get_vm_inst(), sec_grp_settings.name))
def test_add_invalid_security_group(self):
"""
Tests the addition of a security group that no longer exists.
"""
# Create instance
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
vm_inst = self.inst_creator.create(block=True)
self.assertIsNotNone(vm_inst)
# Create security group object to add to instance
sec_grp_settings = SecurityGroupSettings(name=self.guid + '-name',
description='hello group')
sec_grp_creator = OpenStackSecurityGroup(self.os_creds,
sec_grp_settings)
sec_grp = sec_grp_creator.create()
sec_grp_creator.clean()
self.sec_grp_creators.append(sec_grp_creator)
# Check that group has not been added
self.assertFalse(inst_has_sec_grp(
self.nova, self.inst_creator.get_vm_inst(), sec_grp_settings.name))
# Add security group to instance after activated
self.assertFalse(self.inst_creator.add_security_group(sec_grp))
# Validate that security group has been added
self.assertFalse(inst_has_sec_grp(
self.nova, self.inst_creator.get_vm_inst(), sec_grp_settings.name))
def test_remove_security_group(self):
"""
Tests the removal of a security group created before and added to the
instance.
"""
# Create security group object to add to instance
sec_grp_settings = SecurityGroupSettings(name=self.guid + '-name',
description='hello group')
sec_grp_creator = OpenStackSecurityGroup(self.os_creds,
sec_grp_settings)
sec_grp = sec_grp_creator.create()
self.sec_grp_creators.append(sec_grp_creator)
# Create instance
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
security_group_names=[sec_grp_settings.name],
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
vm_inst = self.inst_creator.create(block=True)
self.assertIsNotNone(vm_inst)
# Check that group has been added
self.assertTrue(inst_has_sec_grp(
self.nova, vm_inst, sec_grp_settings.name))
# Add security group to instance after activated
self.assertTrue(self.inst_creator.remove_security_group(sec_grp))
# Validate that security group has been added
self.assertFalse(inst_has_sec_grp(
self.nova, self.inst_creator.get_vm_inst(), sec_grp_settings.name))
def test_remove_security_group_never_added(self):
"""
Tests the removal of a security group that was never added in the first
place.
"""
# Create security group object to add to instance
sec_grp_settings = SecurityGroupSettings(name=self.guid + '-name',
description='hello group')
sec_grp_creator = OpenStackSecurityGroup(self.os_creds,
sec_grp_settings)
sec_grp = sec_grp_creator.create()
self.sec_grp_creators.append(sec_grp_creator)
# Create instance
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
vm_inst = self.inst_creator.create(block=True)
self.assertIsNotNone(vm_inst)
# Check that group has been added
self.assertFalse(inst_has_sec_grp(
self.nova, self.inst_creator.get_vm_inst(), sec_grp_settings.name))
# Add security group to instance after activated
self.assertFalse(self.inst_creator.remove_security_group(sec_grp))
# Validate that security group has been added
self.assertFalse(inst_has_sec_grp(
self.nova, self.inst_creator.get_vm_inst(), sec_grp_settings.name))
def test_add_same_security_group(self):
"""
Tests the addition of a security group created before add added to the
instance.
"""
# Create security group object to add to instance
sec_grp_settings = SecurityGroupSettings(name=self.guid + '-name',
description='hello group')
sec_grp_creator = OpenStackSecurityGroup(self.os_creds,
sec_grp_settings)
sec_grp = sec_grp_creator.create()
self.sec_grp_creators.append(sec_grp_creator)
# Create instance
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
security_group_names=[sec_grp_settings.name],
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
vm_inst = self.inst_creator.create(block=True)
self.assertIsNotNone(vm_inst)
# Check that group has been added
self.assertTrue(inst_has_sec_grp(
self.nova, self.inst_creator.get_vm_inst(), sec_grp_settings.name))
# Add security group to instance after activated
self.assertTrue(self.inst_creator.add_security_group(sec_grp))
# Validate that security group has been added
self.assertTrue(inst_has_sec_grp(
self.nova, self.inst_creator.get_vm_inst(), sec_grp_settings.name))
def inst_has_sec_grp(nova, vm_inst, sec_grp_name):
"""
Returns true if instance has a security group of a given name
:param nova: the nova client
:param vm_inst: the VmInst domain object
:param sec_grp_name: the name of the security group to validate
:return: T/F
"""
sec_grp_names = nova_utils.get_server_security_group_names(nova, vm_inst)
for name in sec_grp_names:
if sec_grp_name == name:
return True
return False
def validate_ssh_client(instance_creator):
"""
Returns True if instance_creator returns an SSH client that is valid
:param instance_creator: the object responsible for creating the VM
instance
:return: T/F
"""
ssh_active = instance_creator.vm_ssh_active(block=True)
if ssh_active:
ssh_client = instance_creator.ssh_client()
if ssh_client:
try:
out = ssh_client.exec_command('pwd')[1]
channel = out.channel
in_buffer = channel.in_buffer
pwd_out = in_buffer.read(1024)
if not pwd_out or len(pwd_out) < 10:
return False
return True
finally:
ssh_client.close()
else:
return False
return False
class CreateInstanceFromThreePartImage(OSIntegrationTestCase):
"""
Test for the CreateInstance class for creating an image from a 3-part image
"""
def setUp(self):
"""
Instantiates the CreateImage object that is responsible for downloading
and creating an OS image file within OpenStack
"""
super(self.__class__, self).__start__()
guid = self.__class__.__name__ + '-' + str(uuid.uuid4())
self.image_name = guid
self.vm_inst_name = guid + '-inst'
self.nova = nova_utils.nova_client(self.os_creds)
net_config = openstack_tests.get_priv_net_config(
net_name=guid + '-pub-net', subnet_name=guid + '-pub-subnet',
router_name=guid + '-pub-router', external_net=self.ext_net_name)
# Initialize for tearDown()
self.image_creator = None
self.network_creator = None
self.flavor_creator = None
self.inst_creator = None
try:
if self.image_metadata and 'disk_file' in self.image_metadata:
metadata = self.image_metadata
elif self.image_metadata and 'cirros' in self.image_metadata \
and 'disk_file' in self.image_metadata['cirros']:
metadata = self.image_metadata['cirros']
else:
metadata = {
'disk_url': openstack_tests.CIRROS_DEFAULT_IMAGE_URL,
'kernel_url':
openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL,
'ramdisk_url':
openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL}
image_settings = openstack_tests.cirros_image_settings(
name=self.image_name,
image_metadata=metadata)
if not image_settings.ramdisk_image_settings or not \
image_settings.kernel_image_settings:
logger.warn(
'3 Part image will not be tested. Image metadata has '
'overridden this functionality')
self.image_creator = OpenStackImage(self.os_creds, image_settings)
self.image_creator.create()
# Create Flavor
self.flavor_creator = OpenStackFlavor(
self.admin_os_creds,
FlavorSettings(name=guid + '-flavor-name', ram=256, disk=10,
vcpus=2, metadata=self.flavor_metadata))
self.flavor_creator.create()
# Create Network
self.network_creator = OpenStackNetwork(
self.os_creds, net_config.network_settings)
self.network_creator.create()
self.port_settings = PortSettings(
name=guid + '-port',
network_name=net_config.network_settings.name)
except Exception as e:
self.tearDown()
raise e
def tearDown(self):
"""
Cleans the created object
"""
if self.inst_creator:
try:
self.inst_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning VM instance with message -'
' %s', e)
if self.flavor_creator:
try:
self.flavor_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning flavor with message - %s',
e)
if self.network_creator:
try:
self.network_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning network with message - %s',
e)
if self.image_creator and not self.image_creator.image_settings.exists:
try:
self.image_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning image with message - %s', e)
super(self.__class__, self).__clean__()
def test_create_instance_from_three_part_image(self):
"""
Tests the creation of an OpenStack instance from a 3-part image.
"""
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
# The last created image is the main image from which we create the
# instance
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
vm_inst = self.inst_creator.create()
self.assertIsNotNone(vm_inst)
self.assertTrue(self.inst_creator.vm_active(block=True))
class CreateInstanceMockOfflineTests(OSComponentTestCase):
"""
Tests the custom image_metadata that can be set by clients for handling
images differently than the default behavior of the existing tests
primarily for offline testing
"""
def setUp(self):
"""
Instantiates the CreateImage object that is responsible for downloading
and creating an OS image file within OpenStack
"""
self.guid = self.__class__.__name__ + '-' + str(uuid.uuid4())
self.tmpDir = 'tmp/' + str(self.guid)
if not os.path.exists(self.tmpDir):
os.makedirs(self.tmpDir)
self.image_name = self.guid + '-image'
self.vm_inst_name = self.guid + '-inst'
self.port_1_name = self.guid + 'port-1'
# Initialize for tearDown()
self.image_creator = None
self.network_creator = None
self.flavor_creator = None
self.inst_creator = None
self.priv_net_config = openstack_tests.get_priv_net_config(
net_name=self.guid + '-priv-net',
subnet_name=self.guid + '-priv-subnet')
self.port_settings = PortSettings(
name=self.port_1_name,
network_name=self.priv_net_config.network_settings.name)
try:
# Download image file
self.image_file = file_utils.download(
openstack_tests.CIRROS_DEFAULT_IMAGE_URL, self.tmpDir)
# Create Network
self.network_creator = OpenStackNetwork(
self.os_creds, self.priv_net_config.network_settings)
self.network_creator.create()
# Create Flavor
self.flavor_creator = OpenStackFlavor(
self.os_creds,
FlavorSettings(
name=self.guid + '-flavor-name', ram=256, disk=10,
vcpus=1))
self.flavor_creator.create()
except Exception as e:
self.tearDown()
raise e
def tearDown(self):
"""
Cleans the created object
"""
if self.inst_creator:
try:
self.inst_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning VM instance with message - '
'%s', e)
if self.network_creator:
try:
self.network_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning network with message - %s',
e)
if self.flavor_creator:
try:
self.flavor_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning flavor with message - %s',
e)
if self.image_creator:
try:
self.image_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning image with message - %s', e)
if os.path.exists(self.tmpDir) and os.path.isdir(self.tmpDir):
shutil.rmtree(self.tmpDir)
def test_inst_from_file_image_simple_flat(self):
"""
Creates a VM instance from a locally sourced file image using simply
the 'disk_file' attribute vs. using the 'config' option which
completely overrides all image settings
:return:
"""
metadata = {'disk_file': self.image_file.name}
os_image_settings = openstack_tests.cirros_image_settings(
name=self.image_name, image_metadata=metadata)
self.assertEqual(self.image_file.name, os_image_settings.image_file)
self.assertEqual(openstack_tests.CIRROS_USER,
os_image_settings.image_user)
self.assertIsNone(os_image_settings.url)
self.assertFalse(os_image_settings.exists)
self.assertEqual(openstack_tests.DEFAULT_IMAGE_FORMAT,
os_image_settings.format)
self.assertIsNone(os_image_settings.kernel_image_settings)
self.assertIsNone(os_image_settings.ramdisk_image_settings)
self.image_creator = OpenStackImage(self.os_creds, os_image_settings)
self.image_creator.create()
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
self.inst_creator.create()
self.assertTrue(self.inst_creator.vm_active(block=True))
def test_inst_from_file_image_simple_nested(self):
"""
Creates a VM instance from a locally sourced file image using simply
the 'disk_file' attribute under 'cirros' vs. using the 'config' option
which completely overrides all image settings
:return:
"""
metadata = {'cirros': {'disk_file': self.image_file.name}}
os_image_settings = openstack_tests.cirros_image_settings(
name=self.image_name, image_metadata=metadata)
self.assertEqual(self.image_file.name, os_image_settings.image_file)
self.assertEqual(openstack_tests.CIRROS_USER,
os_image_settings.image_user)
self.assertIsNone(os_image_settings.url)
self.assertFalse(os_image_settings.exists)
self.assertEqual(openstack_tests.DEFAULT_IMAGE_FORMAT,
os_image_settings.format)
self.assertIsNone(os_image_settings.kernel_image_settings)
self.assertIsNone(os_image_settings.ramdisk_image_settings)
self.image_creator = OpenStackImage(self.os_creds, os_image_settings)
self.image_creator.create()
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
self.inst_creator.create()
self.assertTrue(self.inst_creator.vm_active(block=True))
def test_inst_from_existing(self):
"""
Creates a VM instance from a image creator that has been configured to
use an existing image
:return:
"""
os_image_settings = openstack_tests.cirros_image_settings(
name=self.image_name)
self.image_creator = OpenStackImage(self.os_creds, os_image_settings)
self.image_creator.create()
image_settings = self.image_creator.image_settings
test_image_creator = OpenStackImage(
self.os_creds,
ImageSettings(name=image_settings.name,
image_user=image_settings.image_user,
exists=True))
test_image_creator.create()
self.assertEqual(self.image_creator.get_image().id,
test_image_creator.get_image().id)
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
test_image_creator.image_settings)
self.inst_creator.create()
self.assertTrue(self.inst_creator.vm_active(block=True))
def test_inst_from_file_image_complex(self):
"""
Creates a VM instance from a locally sourced file image by overriding
the default settings by using a dict() that can be read in by
ImageSettings
:return:
"""
os_image_settings = openstack_tests.cirros_image_settings(
name=self.image_name)
self.image_creator = OpenStackImage(self.os_creds, os_image_settings)
self.image_creator.create()
metadata = {
'cirros': {
'config': {
'name': os_image_settings.name,
'image_user': os_image_settings.image_user,
'exists': True}}}
test_image_settings = openstack_tests.cirros_image_settings(
image_metadata=metadata)
test_image = OpenStackImage(self.os_creds, test_image_settings)
test_image.create()
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(self.os_creds,
instance_settings,
test_image_settings)
self.inst_creator.create()
self.assertTrue(self.inst_creator.vm_active(block=True))
def test_inst_from_file_3part_image_complex(self):
"""
Creates a VM instance from a locally sourced file image by overriding
the default settings by using a dict() that can be read in by
ImageSettings
:return:
"""
kernel_file = file_utils.download(
openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL, self.tmpDir)
ramdisk_file = file_utils.download(
openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL, self.tmpDir)
metadata = {
'cirros': {
'config': {
'name': self.image_name,
'image_user': openstack_tests.CIRROS_USER,
'image_file': self.image_file.name,
'format': openstack_tests.DEFAULT_IMAGE_FORMAT,
'kernel_image_settings': {
'name': self.image_name + '-kernel',
'image_user': openstack_tests.CIRROS_USER,
'image_file': kernel_file.name,
'format': openstack_tests.DEFAULT_IMAGE_FORMAT},
'ramdisk_image_settings': {
'name': self.image_name + '-ramdisk',
'image_user': openstack_tests.CIRROS_USER,
'image_file': ramdisk_file.name,
'format': openstack_tests.DEFAULT_IMAGE_FORMAT}}}}
os_image_settings = openstack_tests.cirros_image_settings(
name=self.image_name, image_metadata=metadata)
self.assertEqual(self.image_name, os_image_settings.name)
self.assertEqual(self.image_file.name, os_image_settings.image_file)
self.assertEqual(openstack_tests.CIRROS_USER,
os_image_settings.image_user)
self.assertIsNone(os_image_settings.url)
self.assertFalse(os_image_settings.exists)
self.assertEqual(openstack_tests.DEFAULT_IMAGE_FORMAT,
os_image_settings.format)
self.assertIsNotNone(os_image_settings.kernel_image_settings)
self.assertEqual(self.image_name + '-kernel',
os_image_settings.kernel_image_settings.name)
self.assertEqual(kernel_file.name,
os_image_settings.kernel_image_settings.image_file)
self.assertEqual(openstack_tests.CIRROS_USER,
os_image_settings.kernel_image_settings.image_user)
self.assertIsNone(os_image_settings.kernel_image_settings.url)
self.assertFalse(os_image_settings.kernel_image_settings.exists)
self.assertEqual(openstack_tests.DEFAULT_IMAGE_FORMAT,
os_image_settings.kernel_image_settings.format)
self.assertIsNotNone(os_image_settings.ramdisk_image_settings)
self.assertEqual(self.image_name + '-ramdisk',
os_image_settings.ramdisk_image_settings.name)
self.assertEqual(ramdisk_file.name,
os_image_settings.ramdisk_image_settings.image_file)
self.assertEqual(openstack_tests.CIRROS_USER,
os_image_settings.ramdisk_image_settings.image_user)
self.assertIsNone(os_image_settings.ramdisk_image_settings.url)
self.assertFalse(os_image_settings.ramdisk_image_settings.exists)
self.assertEqual(openstack_tests.DEFAULT_IMAGE_FORMAT,
os_image_settings.ramdisk_image_settings.format)
self.image_creator = OpenStackImage(self.os_creds, os_image_settings)
self.image_creator.create()
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
self.inst_creator.create()
self.assertTrue(self.inst_creator.vm_active(block=True))
def test_inst_from_file_3part_image_simple_flat(self):
"""
Creates a VM instance from a 3-part image locally sourced from file
images using simply the 'disk_file', 'kernel_file', and 'ramdisk_file'
attributes vs. using the 'config' option which completely overrides all
image settings
:return:
"""
kernel_file = file_utils.download(
openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL, self.tmpDir)
ramdisk_file = file_utils.download(
openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL, self.tmpDir)
metadata = {'disk_file': self.image_file.name,
'kernel_file': kernel_file.name,
'ramdisk_file': ramdisk_file.name}
os_image_settings = openstack_tests.cirros_image_settings(
name=self.image_name, image_metadata=metadata)
self.assertEqual(self.image_name, os_image_settings.name)
self.assertEqual(self.image_file.name, os_image_settings.image_file)
self.assertEqual(openstack_tests.CIRROS_USER,
os_image_settings.image_user)
self.assertIsNone(os_image_settings.url)
self.assertFalse(os_image_settings.exists)
self.assertEqual(openstack_tests.DEFAULT_IMAGE_FORMAT,
os_image_settings.format)
self.assertIsNotNone(os_image_settings.kernel_image_settings)
self.assertEqual(self.image_name + '-kernel',
os_image_settings.kernel_image_settings.name)
self.assertEqual(kernel_file.name,
os_image_settings.kernel_image_settings.image_file)
self.assertEqual(openstack_tests.CIRROS_USER,
os_image_settings.kernel_image_settings.image_user)
self.assertIsNone(os_image_settings.kernel_image_settings.url)
self.assertFalse(os_image_settings.kernel_image_settings.exists)
self.assertEqual(openstack_tests.DEFAULT_IMAGE_FORMAT,
os_image_settings.kernel_image_settings.format)
self.assertIsNotNone(os_image_settings.ramdisk_image_settings)
self.assertEqual(self.image_name + '-ramdisk',
os_image_settings.ramdisk_image_settings.name)
self.assertEqual(ramdisk_file.name,
os_image_settings.ramdisk_image_settings.image_file)
self.assertEqual(openstack_tests.CIRROS_USER,
os_image_settings.ramdisk_image_settings.image_user)
self.assertIsNone(os_image_settings.ramdisk_image_settings.url)
self.assertFalse(os_image_settings.ramdisk_image_settings.exists)
self.assertEqual(openstack_tests.DEFAULT_IMAGE_FORMAT,
os_image_settings.ramdisk_image_settings.format)
self.image_creator = OpenStackImage(self.os_creds, os_image_settings)
self.image_creator.create()
self.assertIsNotNone(self.image_creator.get_kernel_image())
self.assertIsNotNone(self.image_creator.get_ramdisk_image())
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
self.inst_creator.create()
self.assertTrue(self.inst_creator.vm_active(block=True))
def test_inst_from_file_3part_image_simple_nested(self):
"""
Creates a VM instance from a 3-part image locally sourced from file
images using simply the 'disk_file', 'kernel_file', and 'ramdisk_file'
attributes under 'cirros' vs. using the 'config' option which
completely overrides all image settings
:return:
"""
kernel_file = file_utils.download(
openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL, self.tmpDir)
ramdisk_file = file_utils.download(
openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL, self.tmpDir)
metadata = {'cirros': {'disk_file': self.image_file.name,
'kernel_file': kernel_file.name,
'ramdisk_file': ramdisk_file.name}}
os_image_settings = openstack_tests.cirros_image_settings(
name=self.image_name, image_metadata=metadata)
self.assertEqual(self.image_name, os_image_settings.name)
self.assertEqual(self.image_file.name, os_image_settings.image_file)
self.assertEqual(openstack_tests.CIRROS_USER,
os_image_settings.image_user)
self.assertIsNone(os_image_settings.url)
self.assertFalse(os_image_settings.exists)
self.assertEqual(openstack_tests.DEFAULT_IMAGE_FORMAT,
os_image_settings.format)
self.assertIsNotNone(os_image_settings.kernel_image_settings)
self.assertEqual(self.image_name + '-kernel',
os_image_settings.kernel_image_settings.name)
self.assertEqual(kernel_file.name,
os_image_settings.kernel_image_settings.image_file)
self.assertEqual(openstack_tests.CIRROS_USER,
os_image_settings.kernel_image_settings.image_user)
self.assertIsNone(os_image_settings.kernel_image_settings.url)
self.assertFalse(os_image_settings.kernel_image_settings.exists)
self.assertEqual(openstack_tests.DEFAULT_IMAGE_FORMAT,
os_image_settings.kernel_image_settings.format)
self.assertIsNotNone(os_image_settings.ramdisk_image_settings)
self.assertEqual(self.image_name + '-ramdisk',
os_image_settings.ramdisk_image_settings.name)
self.assertEqual(ramdisk_file.name,
os_image_settings.ramdisk_image_settings.image_file)
self.assertEqual(openstack_tests.CIRROS_USER,
os_image_settings.ramdisk_image_settings.image_user)
self.assertIsNone(os_image_settings.ramdisk_image_settings.url)
self.assertFalse(os_image_settings.ramdisk_image_settings.exists)
self.assertEqual(openstack_tests.DEFAULT_IMAGE_FORMAT,
os_image_settings.ramdisk_image_settings.format)
self.image_creator = OpenStackImage(self.os_creds, os_image_settings)
self.image_creator.create()
self.assertIsNotNone(self.image_creator.get_kernel_image())
self.assertIsNotNone(self.image_creator.get_ramdisk_image())
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
self.inst_creator.create()
self.assertTrue(self.inst_creator.vm_active(block=True))
def test_inst_from_file_3part_image_existing(self):
"""
Creates a VM instance from a 3-part image that is existing
:return:
"""
kernel_file = file_utils.download(
openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL, self.tmpDir)
ramdisk_file = file_utils.download(
openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL, self.tmpDir)
metadata = {'cirros': {'disk_file': self.image_file.name,
'kernel_file': kernel_file.name,
'ramdisk_file': ramdisk_file.name}}
os_image_settings = openstack_tests.cirros_image_settings(
name=self.image_name, image_metadata=metadata)
self.image_creator = OpenStackImage(self.os_creds, os_image_settings)
self.image_creator.create()
image_settings = self.image_creator.image_settings
test_image_creator = OpenStackImage(
self.os_creds,
ImageSettings(name=image_settings.name,
image_user=image_settings.image_user,
exists=True))
test_image_creator.create()
self.assertEqual(self.image_creator.get_image().id,
test_image_creator.get_image().id)
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
test_image_creator.image_settings)
self.inst_creator.create()
self.assertTrue(self.inst_creator.vm_active(block=True))
class CreateInstanceTwoNetTests(OSIntegrationTestCase):
"""
Tests the ability of two VMs to communicate when attached to separate
private networks that are tied together with a router.
"""
def setUp(self):
"""
Instantiates the CreateImage object that is responsible for downloading
and creating an OS image file within OpenStack
"""
super(self.__class__, self).__start__()
cidr1 = '10.200.201.0/24'
cidr2 = '10.200.202.0/24'
static_gateway_ip1 = '10.200.201.1'
static_gateway_ip2 = '10.200.202.1'
self.ip1 = '10.200.201.5'
self.ip2 = '10.200.202.5'
self.nova = nova_utils.nova_client(self.os_creds)
# Initialize for tearDown()
self.image_creator = None
self.network_creators = list()
self.router_creator = None
self.flavor_creator = None
self.sec_grp_creator = None
self.inst_creators = list()
self.guid = self.__class__.__name__ + '-' + str(uuid.uuid4())
self.vm_inst1_name = self.guid + '-inst1'
self.vm_inst2_name = self.guid + '-inst2'
self.port_1_name = self.guid + '-vm1-port'
self.port_2_name = self.guid + '-vm2-port'
self.net_config_1 = NetworkSettings(
name=self.guid + '-net1',
subnet_settings=[
create_network.SubnetSettings(
cidr=cidr1, name=self.guid + '-subnet1',
gateway_ip=static_gateway_ip1)])
self.net_config_2 = NetworkSettings(
name=self.guid + '-net2',
subnet_settings=[
create_network.SubnetSettings(
cidr=cidr2, name=self.guid + '-subnet2',
gateway_ip=static_gateway_ip2)])
image_name = self.__class__.__name__ + '-' + str(uuid.uuid4())
os_image_settings = openstack_tests.cirros_image_settings(
name=image_name, image_metadata=self.image_metadata)
try:
# Create Image
self.image_creator = OpenStackImage(self.os_creds,
os_image_settings)
self.image_creator.create()
# First network is public
self.network_creators.append(OpenStackNetwork(
self.os_creds, self.net_config_1))
# Second network is private
self.network_creators.append(OpenStackNetwork(
self.os_creds, self.net_config_2))
for network_creator in self.network_creators:
network_creator.create()
port_settings = [
create_network.PortSettings(
name=self.guid + '-router-port1',
ip_addrs=[{
'subnet_name':
self.net_config_1.subnet_settings[0].name,
'ip': static_gateway_ip1
}],
network_name=self.net_config_1.name,
project_name=self.os_creds.project_name),
create_network.PortSettings(
name=self.guid + '-router-port2',
ip_addrs=[{
'subnet_name':
self.net_config_2.subnet_settings[0].name,
'ip': static_gateway_ip2
}],
network_name=self.net_config_2.name,
project_name=self.os_creds.project_name)]
router_settings = RouterSettings(name=self.guid + '-pub-router',
port_settings=port_settings)
self.router_creator = create_router.OpenStackRouter(
self.os_creds, router_settings)
self.router_creator.create()
# Create Flavor
self.flavor_creator = OpenStackFlavor(
self.admin_os_creds,
FlavorSettings(name=self.guid + '-flavor-name', ram=512,
disk=10, vcpus=2,
metadata=self.flavor_metadata))
self.flavor_creator.create()
sec_grp_name = self.guid + '-sec-grp'
rule1 = SecurityGroupRuleSettings(sec_grp_name=sec_grp_name,
direction=Direction.ingress,
protocol=Protocol.icmp)
self.sec_grp_creator = OpenStackSecurityGroup(
self.os_creds,
SecurityGroupSettings(name=sec_grp_name,
rule_settings=[rule1]))
self.sec_grp_creator.create()
except:
self.tearDown()
raise
def tearDown(self):
"""
Cleans the created objects
"""
for inst_creator in self.inst_creators:
try:
inst_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning VM instance with message '
'- %s', e)
if self.flavor_creator:
try:
self.flavor_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning flavor with message - %s',
e)
if self.router_creator:
try:
self.router_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning router with message - %s',
e)
for network_creator in self.network_creators:
try:
network_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning network with message - %s',
e)
if self.sec_grp_creator:
try:
self.sec_grp_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning security group with message'
' - %s', e)
if self.image_creator and not self.image_creator.image_settings.exists:
try:
self.image_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning image with message - %s', e)
super(self.__class__, self).__clean__()
def test_ping_via_router(self):
"""
Tests the creation of two OpenStack instances with one port on
different private networks wit a router in between to ensure that they
can ping
through
"""
# Create ports/NICs for instance
ports_settings = []
ctr = 1
for network_creator in self.network_creators:
ports_settings.append(PortSettings(
name=self.guid + '-port-' + str(ctr),
network_name=network_creator.network_settings.name))
ctr += 1
# Configure instances
instance1_settings = VmInstanceSettings(
name=self.vm_inst1_name,
flavor=self.flavor_creator.flavor_settings.name,
userdata=_get_ping_userdata(self.ip2),
port_settings=[PortSettings(
name=self.port_1_name,
ip_addrs=[{
'subnet_name':
self.net_config_1.subnet_settings[0].name,
'ip': self.ip1
}],
network_name=self.network_creators[0].network_settings.name)])
instance2_settings = VmInstanceSettings(
name=self.vm_inst2_name,
flavor=self.flavor_creator.flavor_settings.name,
userdata=_get_ping_userdata(self.ip1),
port_settings=[PortSettings(
name=self.port_2_name,
ip_addrs=[{
'subnet_name':
self.net_config_2.subnet_settings[0].name,
'ip': self.ip2
}],
network_name=self.network_creators[1].network_settings.name)])
# Create instances
self.inst_creators.append(OpenStackVmInstance(
self.os_creds, instance1_settings,
self.image_creator.image_settings))
self.inst_creators.append(OpenStackVmInstance(
self.os_creds, instance2_settings,
self.image_creator.image_settings))
for inst_creator in self.inst_creators:
inst_creator.create(block=True)
# Check for DHCP lease
self.assertTrue(check_dhcp_lease(self.inst_creators[0], self.ip1))
self.assertTrue(check_dhcp_lease(self.inst_creators[1], self.ip2))
# Effectively blocks until VM has been properly activated
self.assertTrue(check_ping(self.inst_creators[0]))
self.assertTrue(check_ping(self.inst_creators[1]))
class CreateInstanceVolumeTests(OSIntegrationTestCase):
"""
Simple instance creation with an attached volume
"""
def setUp(self):
"""
Instantiates the CreateImage object that is responsible for downloading
and creating an OS image file
within OpenStack
"""
super(self.__class__, self).__start__()
guid = self.__class__.__name__ + '-' + str(uuid.uuid4())
self.vm_inst_name = guid + '-inst'
self.nova = nova_utils.nova_client(self.os_creds)
os_image_settings = openstack_tests.cirros_image_settings(
name=guid + '-image', image_metadata=self.image_metadata)
net_config = openstack_tests.get_priv_net_config(
net_name=guid + '-pub-net', subnet_name=guid + '-pub-subnet',
router_name=guid + '-pub-router', external_net=self.ext_net_name)
self.volume_settings1 = VolumeSettings(
name=self.__class__.__name__ + '-' + str(guid) + '-1')
self.volume_settings2 = VolumeSettings(
name=self.__class__.__name__ + '-' + str(guid) + '-2')
# Initialize for tearDown()
self.image_creator = None
self.flavor_creator = None
self.network_creator = None
self.inst_creator = None
self.volume_creator1 = None
self.volume_creator2 = None
try:
# Create Image
self.image_creator = OpenStackImage(self.os_creds,
os_image_settings)
self.image_creator.create()
# Create Flavor
self.flavor_creator = OpenStackFlavor(
self.admin_os_creds,
FlavorSettings(name=guid + '-flavor-name', ram=256, disk=1,
vcpus=2, metadata=self.flavor_metadata))
self.flavor_creator.create()
# Create Network
self.network_creator = OpenStackNetwork(
self.os_creds, net_config.network_settings)
self.network_creator.create()
self.port_settings = PortSettings(
name=guid + '-port',
network_name=net_config.network_settings.name)
self.volume_creator1 = OpenStackVolume(
self.os_creds, self.volume_settings1)
self.volume_creator1.create(block=True)
self.volume_creator2 = OpenStackVolume(
self.os_creds, self.volume_settings2)
self.volume_creator2.create(block=True)
except Exception as e:
self.tearDown()
raise e
def tearDown(self):
"""
Cleans the created object
"""
if self.inst_creator:
try:
self.inst_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning VM instance with message '
'- %s', e)
if self.flavor_creator:
try:
self.flavor_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning flavor with message - %s',
e)
if self.network_creator:
try:
self.network_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning network with message - %s',
e)
if self.volume_creator2:
try:
self.volume_creator2.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning volume with message - %s',
e)
if self.volume_creator1:
try:
self.volume_creator1.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning volume with message - %s',
e)
if self.image_creator and not self.image_creator.image_settings.exists:
try:
self.image_creator.clean()
except Exception as e:
logger.error(
'Unexpected exception cleaning image with message - %s', e)
super(self.__class__, self).__clean__()
def test_create_instance_with_one_volume(self):
"""
Tests the creation of an OpenStack instance with a single volume.
"""
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings],
volume_names=[self.volume_settings1.name])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
vm_inst = self.inst_creator.create(block=True)
self.assertIsNotNone(nova_utils.get_server(
self.nova, vm_inst_settings=instance_settings))
self.assertIsNotNone(vm_inst)
self.assertEqual(1, len(vm_inst.volume_ids))
self.assertEqual(self.volume_creator1.get_volume().id,
vm_inst.volume_ids[0]['id'])
def test_create_instance_with_two_volumes(self):
"""
Tests the creation of an OpenStack instance with a single volume.
"""
instance_settings = VmInstanceSettings(
name=self.vm_inst_name,
flavor=self.flavor_creator.flavor_settings.name,
port_settings=[self.port_settings],
volume_names=[self.volume_settings1.name,
self.volume_settings2.name])
self.inst_creator = OpenStackVmInstance(
self.os_creds, instance_settings,
self.image_creator.image_settings)
vm_inst = self.inst_creator.create(block=True)
self.assertIsNotNone(nova_utils.get_server(
self.nova, vm_inst_settings=instance_settings))
self.assertIsNotNone(vm_inst)
self.assertEqual(2, len(vm_inst.volume_ids))
self.assertEqual(self.volume_creator1.get_volume().id,
vm_inst.volume_ids[0]['id'])
self.assertEqual(self.volume_creator2.get_volume().id,
vm_inst.volume_ids[1]['id'])
def check_dhcp_lease(inst_creator, ip, timeout=160):
"""
Returns true if the expected DHCP lease has been acquired
:param inst_creator: the SNAPS OpenStackVmInstance object
:param ip: the IP address to look for
:param timeout: how long to query for IP address
:return:
"""
found = False
start_time = time.time()
logger.info("Looking for IP %s in the console log" % ip)
full_log = ''
while timeout > time.time() - start_time:
output = inst_creator.get_console_output()
full_log = full_log + output
if re.search(ip, output):
logger.info('DHCP lease obtained logged in console')
found = True
break
if not found:
logger.error('Full console output -\n' + full_log)
else:
logger.debug('Full console output -\n' + full_log)
return found
def _get_ping_userdata(test_ip):
"""
Returns the post VM creation script to be added into the VM's userdata
:param test_ip: the IP value to substitute into the script
:return: the bash script contents
"""
if test_ip:
return ("#!/bin/sh\n\n"
"while true; do\n"
" ping -c 1 %s 2>&1 >/dev/null\n"
" RES=$?\n"
" if [ \"Z$RES\" = \"Z0\" ] ; then\n"
" echo 'vPing OK'\n"
" break\n"
" else\n"
" echo 'vPing KO'\n"
" fi\n"
" sleep 1\n"
"done\n" % test_ip)
return None
def check_ping(vm_creator, timeout=160):
"""
Check for VM for ping result
"""
tries = 0
while tries < timeout:
time.sleep(1)
p_console = vm_creator.get_console_output()
if "vPing OK" in p_console:
return True
elif "failed to read iid from metadata" in p_console or tries > 5:
return False
tries += 1
return False
| [
"logging.getLogger",
"snaps.openstack.create_keypairs.KeypairSettings",
"snaps.openstack.create_router.OpenStackRouter",
"snaps.openstack.tests.openstack_tests.get_pub_net_config",
"snaps.openstack.create_image.OpenStackImage",
"snaps.openstack.create_instance.OpenStackVmInstance",
"time.sleep",
"snaps.openstack.create_instance.VmInstanceSettings",
"snaps.openstack.utils.nova_utils.delete_vm_instance",
"snaps.openstack.create_network.PortSettings",
"re.search",
"os.remove",
"os.path.exists",
"snaps.openstack.create_security_group.SecurityGroupSettings",
"snaps.file_utils.download",
"os.path.isdir",
"snaps.openstack.tests.openstack_tests.centos_image_settings",
"snaps.openstack.create_router.RouterSettings",
"snaps.openstack.utils.nova_utils.nova_client",
"snaps.openstack.create_volume.OpenStackVolume",
"snaps.openstack.tests.openstack_tests.get_priv_net_config",
"snaps.openstack.create_instance.FloatingIpSettings",
"snaps.openstack.create_security_group.SecurityGroupRuleSettings",
"snaps.openstack.utils.nova_utils.get_server",
"uuid.uuid4",
"os.path.isfile",
"snaps.openstack.create_flavor.FlavorSettings",
"snaps.openstack.create_security_group.OpenStackSecurityGroup",
"time.time",
"snaps.openstack.create_network.OpenStackNetwork",
"snaps.openstack.create_network.SubnetSettings",
"snaps.openstack.tests.openstack_tests.cirros_image_settings",
"snaps.openstack.utils.nova_utils.get_server_security_group_names",
"os.makedirs",
"shutil.rmtree",
"snaps.openstack.tests.validation_utils.objects_equivalent",
"snaps.openstack.utils.nova_utils.get_availability_zone_hosts",
"snaps.openstack.create_image.ImageSettings"
] | [((1985, 2027), 'logging.getLogger', 'logging.getLogger', (['"""create_instance_tests"""'], {}), "('create_instance_tests')\n", (2002, 2027), False, 'import logging\n'), ((70075, 70132), 'snaps.openstack.utils.nova_utils.get_server_security_group_names', 'nova_utils.get_server_security_group_names', (['nova', 'vm_inst'], {}), '(nova, vm_inst)\n', (70117, 70132), False, 'from snaps.openstack.utils import nova_utils\n'), ((113482, 113493), 'time.time', 'time.time', ([], {}), '()\n', (113491, 113493), False, 'import time\n'), ((3101, 3154), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': '"""foo-port"""', 'network_name': '"""bar-net"""'}), "(name='foo-port', network_name='bar-net')\n", (3113, 3154), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((3174, 3249), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': '"""foo"""', 'flavor': '"""bar"""', 'port_settings': '[port_settings]'}), "(name='foo', flavor='bar', port_settings=[port_settings])\n", (3192, 3249), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((4116, 4169), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': '"""foo-port"""', 'network_name': '"""bar-net"""'}), "(name='foo-port', network_name='bar-net')\n", (4128, 4169), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((4189, 4274), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {}), "(**{'name': 'foo', 'flavor': 'bar', 'ports': [port_settings]}\n )\n", (4207, 4274), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((5081, 5134), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': '"""foo-port"""', 'network_name': '"""bar-net"""'}), "(name='foo-port', network_name='bar-net')\n", (5093, 5134), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((5158, 5249), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': '"""foo-fip"""', 'port_name': '"""bar-port"""', 'router_name': '"""foo-bar-router"""'}), "(name='foo-fip', port_name='bar-port', router_name=\n 'foo-bar-router')\n", (5176, 5249), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((5307, 5614), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': '"""foo"""', 'flavor': '"""bar"""', 'port_settings': '[port_settings]', 'security_group_names': "['sec_grp_1']", 'floating_ip_settings': '[fip_settings]', 'sudo_user': '"""joe"""', 'vm_boot_timeout': '(999)', 'vm_delete_timeout': '(333)', 'ssh_connect_timeout': '(111)', 'availability_zone': '"""server name"""', 'volume_names': "['vol1']"}), "(name='foo', flavor='bar', port_settings=[port_settings],\n security_group_names=['sec_grp_1'], floating_ip_settings=[fip_settings],\n sudo_user='joe', vm_boot_timeout=999, vm_delete_timeout=333,\n ssh_connect_timeout=111, availability_zone='server name', volume_names=\n ['vol1'])\n", (5325, 5614), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((6873, 6926), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': '"""foo-port"""', 'network_name': '"""bar-net"""'}), "(name='foo-port', network_name='bar-net')\n", (6885, 6926), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((6950, 7041), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': '"""foo-fip"""', 'port_name': '"""bar-port"""', 'router_name': '"""foo-bar-router"""'}), "(name='foo-fip', port_name='bar-port', router_name=\n 'foo-bar-router')\n", (6968, 7041), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((7099, 7427), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {}), "(**{'name': 'foo', 'flavor': 'bar', 'ports': [\n port_settings], 'security_group_names': ['sec_grp_1'], 'floating_ips':\n [fip_settings], 'sudo_user': 'joe', 'vm_boot_timeout': 999,\n 'vm_delete_timeout': 333, 'ssh_connect_timeout': 111,\n 'availability_zone': 'server name', 'volume_names': ['vol2']})\n", (7117, 7427), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((9969, 10047), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': '"""foo"""', 'port_name': '"""foo-port"""', 'router_name': '"""bar-router"""'}), "(name='foo', port_name='foo-port', router_name='bar-router')\n", (9987, 10047), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((10455, 10531), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': '"""foo"""', 'port_id': '"""foo-port"""', 'router_name': '"""bar-router"""'}), "(name='foo', port_id='foo-port', router_name='bar-router')\n", (10473, 10531), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((10948, 11043), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {}), "(**{'name': 'foo', 'port_name': 'foo-port', 'router_name':\n 'bar-router'})\n", (10966, 11043), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((11416, 11545), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': '"""foo"""', 'port_name': '"""foo-port"""', 'router_name': '"""bar-router"""', 'subnet_name': '"""bar-subnet"""', 'provisioning': '(False)'}), "(name='foo', port_name='foo-port', router_name=\n 'bar-router', subnet_name='bar-subnet', provisioning=False)\n", (11434, 11545), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((12024, 12171), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {}), "(**{'name': 'foo', 'port_name': 'foo-port', 'router_name':\n 'bar-router', 'subnet_name': 'bar-subnet', 'provisioning': False})\n", (12042, 12171), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((12932, 12969), 'snaps.openstack.utils.nova_utils.nova_client', 'nova_utils.nova_client', (['self.os_creds'], {}), '(self.os_creds)\n', (12954, 12969), False, 'from snaps.openstack.utils import nova_utils\n'), ((13328, 13431), 'snaps.openstack.tests.openstack_tests.get_priv_net_config', 'openstack_tests.get_priv_net_config', ([], {'net_name': "(guid + '-priv-net')", 'subnet_name': "(guid + '-priv-subnet')"}), "(net_name=guid + '-priv-net',\n subnet_name=guid + '-priv-subnet')\n", (13363, 13431), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((13470, 13567), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.priv_net_config.network_settings.name'}), '(name=self.port_1_name, network_name=self.priv_net_config.\n network_settings.name)\n', (13482, 13567), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((13741, 13841), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': "(guid + '-image')", 'image_metadata': 'self.image_metadata'}), "(name=guid + '-image', image_metadata=\n self.image_metadata)\n", (13778, 13841), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((16065, 16197), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (16083, 16197), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((16259, 16352), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (16278, 16352), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((17143, 17180), 'snaps.openstack.utils.nova_utils.nova_client', 'nova_utils.nova_client', (['self.os_creds'], {}), '(self.os_creds)\n', (17165, 17180), False, 'from snaps.openstack.utils import nova_utils\n'), ((17209, 17309), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': "(guid + '-image')", 'image_metadata': 'self.image_metadata'}), "(name=guid + '-image', image_metadata=\n self.image_metadata)\n", (17246, 17309), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((17340, 17513), 'snaps.openstack.tests.openstack_tests.get_priv_net_config', 'openstack_tests.get_priv_net_config', ([], {'net_name': "(guid + '-pub-net')", 'subnet_name': "(guid + '-pub-subnet')", 'router_name': "(guid + '-pub-router')", 'external_net': 'self.ext_net_name'}), "(net_name=guid + '-pub-net', subnet_name\n =guid + '-pub-subnet', router_name=guid + '-pub-router', external_net=\n self.ext_net_name)\n", (17375, 17513), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((20055, 20187), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (20073, 20187), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((20249, 20342), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (20268, 20342), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((20556, 20605), 'snaps.openstack.utils.nova_utils.delete_vm_instance', 'nova_utils.delete_vm_instance', (['self.nova', 'vm_inst'], {}), '(self.nova, vm_inst)\n', (20585, 20605), False, 'from snaps.openstack.utils import nova_utils\n'), ((21268, 21305), 'snaps.openstack.utils.nova_utils.nova_client', 'nova_utils.nova_client', (['self.os_creds'], {}), '(self.os_creds)\n', (21290, 21305), False, 'from snaps.openstack.utils import nova_utils\n'), ((22026, 22198), 'snaps.openstack.tests.openstack_tests.get_pub_net_config', 'openstack_tests.get_pub_net_config', ([], {'net_name': "(guid + '-pub-net')", 'subnet_name': "(guid + '-pub-subnet')", 'router_name': "(guid + '-pub-router')", 'external_net': 'self.ext_net_name'}), "(net_name=guid + '-pub-net', subnet_name=\n guid + '-pub-subnet', router_name=guid + '-pub-router', external_net=\n self.ext_net_name)\n", (22060, 22198), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((22242, 22342), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': "(guid + '-image')", 'image_metadata': 'self.image_metadata'}), "(name=guid + '-image', image_metadata=\n self.image_metadata)\n", (22279, 22342), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((25150, 25191), 'os.path.isfile', 'os.path.isfile', (['self.keypair_pub_filepath'], {}), '(self.keypair_pub_filepath)\n', (25164, 25191), False, 'import os\n'), ((25254, 25296), 'os.path.isfile', 'os.path.isfile', (['self.keypair_priv_filepath'], {}), '(self.keypair_priv_filepath)\n', (25268, 25296), False, 'import os\n'), ((27051, 27213), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.pub_net_config.network_settings.name', 'ip_addrs': "[{'subnet_name': sub_settings[0].name, 'ip': ip_1}]"}), "(name=self.port_1_name, network_name=self.pub_net_config.\n network_settings.name, ip_addrs=[{'subnet_name': sub_settings[0].name,\n 'ip': ip_1}])\n", (27063, 27213), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((27670, 27819), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {'keypair_settings': 'self.keypair_creator.keypair_settings'}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings, keypair_settings=self.keypair_creator.keypair_settings)\n', (27689, 27819), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((28366, 28462), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.pub_net_config.network_settings.name'}), '(name=self.port_1_name, network_name=self.pub_net_config.\n network_settings.name)\n', (28378, 28462), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((28894, 29043), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {'keypair_settings': 'self.keypair_creator.keypair_settings'}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings, keypair_settings=self.keypair_creator.keypair_settings)\n', (28913, 29043), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((29828, 29924), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.pub_net_config.network_settings.name'}), '(name=self.port_1_name, network_name=self.pub_net_config.\n network_settings.name)\n', (29840, 29924), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((30356, 30505), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {'keypair_settings': 'self.keypair_creator.keypair_settings'}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings, keypair_settings=self.keypair_creator.keypair_settings)\n', (30375, 30505), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((31382, 31478), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.pub_net_config.network_settings.name'}), '(name=self.port_1_name, network_name=self.pub_net_config.\n network_settings.name)\n', (31394, 31478), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((31910, 32059), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {'keypair_settings': 'self.keypair_creator.keypair_settings'}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings, keypair_settings=self.keypair_creator.keypair_settings)\n', (31929, 32059), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((32730, 32879), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {'keypair_settings': 'self.keypair_creator.keypair_settings'}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings, keypair_settings=self.keypair_creator.keypair_settings)\n', (32749, 32879), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((33865, 34038), 'snaps.openstack.tests.openstack_tests.get_priv_net_config', 'openstack_tests.get_priv_net_config', ([], {'net_name': "(guid + '-pub-net')", 'subnet_name': "(guid + '-pub-subnet')", 'router_name': "(guid + '-pub-router')", 'external_net': 'self.ext_net_name'}), "(net_name=guid + '-pub-net', subnet_name\n =guid + '-pub-subnet', router_name=guid + '-pub-router', external_net=\n self.ext_net_name)\n", (33900, 34038), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((34082, 34182), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': "(guid + '-image')", 'image_metadata': 'self.image_metadata'}), "(name=guid + '-image', image_metadata=\n self.image_metadata)\n", (34119, 34182), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((36507, 36663), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.net_config.network_settings.name', 'ip_addrs': "[{'subnet_name': sub_settings[0].name, 'ip': ip}]"}), "(name=self.port_1_name, network_name=self.net_config.\n network_settings.name, ip_addrs=[{'subnet_name': sub_settings[0].name,\n 'ip': ip}])\n", (36519, 36663), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((36721, 36848), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[port_settings])\n', (36739, 36848), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((36910, 37003), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (36929, 37003), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((37587, 37743), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.net_config.network_settings.name', 'ip_addrs': "[{'subnet_name': sub_settings[0].name, 'ip': ip}]"}), "(name=self.port_1_name, network_name=self.net_config.\n network_settings.name, ip_addrs=[{'subnet_name': sub_settings[0].name,\n 'ip': ip}])\n", (37599, 37743), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((37801, 37928), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[port_settings])\n', (37819, 37928), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((37990, 38083), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (38009, 38083), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((38445, 38559), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.net_config.network_settings.name', 'mac_address': 'mac_addr'}), '(name=self.port_1_name, network_name=self.net_config.\n network_settings.name, mac_address=mac_addr)\n', (38457, 38559), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((38621, 38748), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[port_settings])\n', (38639, 38748), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((38810, 38903), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (38829, 38903), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((39344, 39455), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.net_config.network_settings.name', 'mac_address': '"""foo"""'}), "(name=self.port_1_name, network_name=self.net_config.\n network_settings.name, mac_address='foo')\n", (39356, 39455), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((39517, 39644), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[port_settings])\n', (39535, 39644), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((39706, 39799), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (39725, 39799), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((40253, 40431), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.net_config.network_settings.name', 'mac_address': 'mac_addr', 'ip_addrs': "[{'subnet_name': sub_settings[0].name, 'ip': ip}]"}), "(name=self.port_1_name, network_name=self.net_config.\n network_settings.name, mac_address=mac_addr, ip_addrs=[{'subnet_name':\n sub_settings[0].name, 'ip': ip}])\n", (40265, 40431), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((40501, 40628), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[port_settings])\n', (40519, 40628), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((40690, 40783), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (40709, 40783), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((41488, 41610), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.net_config.network_settings.name', 'allowed_address_pairs': '[pair]'}), '(name=self.port_1_name, network_name=self.net_config.\n network_settings.name, allowed_address_pairs=[pair])\n', (41500, 41610), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((41672, 41799), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[port_settings])\n', (41690, 41799), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((41861, 41954), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (41880, 41954), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((42252, 42324), 'snaps.openstack.tests.validation_utils.objects_equivalent', 'validation_utils.objects_equivalent', (['pair', 'port.allowed_address_pairs[0]'], {}), '(pair, port.allowed_address_pairs[0])\n', (42287, 42324), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((42785, 42907), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.net_config.network_settings.name', 'allowed_address_pairs': '[pair]'}), '(name=self.port_1_name, network_name=self.net_config.\n network_settings.name, allowed_address_pairs=[pair])\n', (42797, 42907), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((42969, 43096), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[port_settings])\n', (42987, 43096), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((43158, 43251), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (43177, 43251), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((43775, 43897), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.net_config.network_settings.name', 'allowed_address_pairs': '[pair]'}), '(name=self.port_1_name, network_name=self.net_config.\n network_settings.name, allowed_address_pairs=[pair])\n', (43787, 43897), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((43959, 44086), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[port_settings])\n', (43977, 44086), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((44148, 44241), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (44167, 44241), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((45090, 45193), 'snaps.openstack.tests.openstack_tests.get_priv_net_config', 'openstack_tests.get_priv_net_config', ([], {'net_name': "(guid + '-priv-net')", 'subnet_name': "(guid + '-priv-subnet')"}), "(net_name=guid + '-priv-net',\n subnet_name=guid + '-priv-subnet')\n", (45125, 45193), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((45232, 45332), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': "(guid + '-image')", 'image_metadata': 'self.image_metadata'}), "(name=guid + '-image', image_metadata=\n self.image_metadata)\n", (45269, 45332), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((47575, 47618), 'snaps.openstack.utils.nova_utils.nova_client', 'nova_utils.nova_client', (['self.admin_os_creds'], {}), '(self.admin_os_creds)\n', (47597, 47618), False, 'from snaps.openstack.utils import nova_utils\n'), ((47640, 47684), 'snaps.openstack.utils.nova_utils.get_availability_zone_hosts', 'nova_utils.get_availability_zone_hosts', (['nova'], {}), '(nova)\n', (47678, 47684), False, 'from snaps.openstack.utils import nova_utils\n'), ((49481, 49518), 'snaps.openstack.utils.nova_utils.nova_client', 'nova_utils.nova_client', (['self.os_creds'], {}), '(self.os_creds)\n', (49503, 49518), False, 'from snaps.openstack.utils import nova_utils\n'), ((50280, 50469), 'snaps.openstack.tests.openstack_tests.get_priv_net_config', 'openstack_tests.get_priv_net_config', ([], {'net_name': "(self.guid + '-priv-net')", 'subnet_name': "(self.guid + '-priv-subnet')", 'router_name': "(self.guid + '-priv-router')", 'external_net': 'self.ext_net_name'}), "(net_name=self.guid + '-priv-net',\n subnet_name=self.guid + '-priv-subnet', router_name=self.guid +\n '-priv-router', external_net=self.ext_net_name)\n", (50315, 50469), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((50541, 50726), 'snaps.openstack.tests.openstack_tests.get_pub_net_config', 'openstack_tests.get_pub_net_config', ([], {'net_name': "(self.guid + '-pub-net')", 'subnet_name': "(self.guid + '-pub-subnet')", 'router_name': "(self.guid + '-pub-router')", 'external_net': 'self.ext_net_name'}), "(net_name=self.guid + '-pub-net',\n subnet_name=self.guid + '-pub-subnet', router_name=self.guid +\n '-pub-router', external_net=self.ext_net_name)\n", (50575, 50726), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((50868, 50963), 'snaps.openstack.tests.openstack_tests.centos_image_settings', 'openstack_tests.centos_image_settings', ([], {'name': 'image_name', 'image_metadata': 'self.image_metadata'}), '(name=image_name, image_metadata=self.\n image_metadata)\n', (50905, 50963), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((54247, 54288), 'os.path.isfile', 'os.path.isfile', (['self.keypair_pub_filepath'], {}), '(self.keypair_pub_filepath)\n', (54261, 54288), False, 'import os\n'), ((54351, 54393), 'os.path.isfile', 'os.path.isfile', (['self.keypair_priv_filepath'], {}), '(self.keypair_priv_filepath)\n', (54365, 54393), False, 'import os\n'), ((57262, 57411), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {'keypair_settings': 'self.keypair_creator.keypair_settings'}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings, keypair_settings=self.keypair_creator.keypair_settings)\n', (57281, 57411), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((58690, 58727), 'snaps.openstack.utils.nova_utils.nova_client', 'nova_utils.nova_client', (['self.os_creds'], {}), '(self.os_creds)\n', (58712, 58727), False, 'from snaps.openstack.utils import nova_utils\n'), ((58756, 58860), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': "(self.guid + '-image')", 'image_metadata': 'self.image_metadata'}), "(name=self.guid + '-image',\n image_metadata=self.image_metadata)\n", (58793, 58860), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((59088, 59274), 'snaps.openstack.tests.openstack_tests.get_priv_net_config', 'openstack_tests.get_priv_net_config', ([], {'net_name': "(self.guid + '-pub-net')", 'subnet_name': "(self.guid + '-pub-subnet')", 'router_name': "(self.guid + '-pub-router')", 'external_net': 'self.ext_net_name'}), "(net_name=self.guid + '-pub-net',\n subnet_name=self.guid + '-pub-subnet', router_name=self.guid +\n '-pub-router', external_net=self.ext_net_name)\n", (59123, 59274), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((62222, 62354), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (62240, 62354), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((62415, 62508), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (62434, 62508), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((62708, 62782), 'snaps.openstack.create_security_group.SecurityGroupSettings', 'SecurityGroupSettings', ([], {'name': "(self.guid + '-name')", 'description': '"""hello group"""'}), "(name=self.guid + '-name', description='hello group')\n", (62729, 62782), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((62858, 62913), 'snaps.openstack.create_security_group.OpenStackSecurityGroup', 'OpenStackSecurityGroup', (['self.os_creds', 'sec_grp_settings'], {}), '(self.os_creds, sec_grp_settings)\n', (62880, 62913), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((63715, 63847), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (63733, 63847), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((63908, 64001), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (63927, 64001), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((64201, 64275), 'snaps.openstack.create_security_group.SecurityGroupSettings', 'SecurityGroupSettings', ([], {'name': "(self.guid + '-name')", 'description': '"""hello group"""'}), "(name=self.guid + '-name', description='hello group')\n", (64222, 64275), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((64351, 64406), 'snaps.openstack.create_security_group.OpenStackSecurityGroup', 'OpenStackSecurityGroup', (['self.os_creds', 'sec_grp_settings'], {}), '(self.os_creds, sec_grp_settings)\n', (64373, 64406), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((65311, 65385), 'snaps.openstack.create_security_group.SecurityGroupSettings', 'SecurityGroupSettings', ([], {'name': "(self.guid + '-name')", 'description': '"""hello group"""'}), "(name=self.guid + '-name', description='hello group')\n", (65332, 65385), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((65461, 65516), 'snaps.openstack.create_security_group.OpenStackSecurityGroup', 'OpenStackSecurityGroup', (['self.os_creds', 'sec_grp_settings'], {}), '(self.os_creds, sec_grp_settings)\n', (65483, 65516), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((65718, 65900), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'security_group_names': '[sec_grp_settings.name]', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, security_group_names=[sec_grp_settings.name],\n port_settings=[self.port_settings])\n', (65736, 65900), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((65969, 66062), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (65988, 66062), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((66886, 66960), 'snaps.openstack.create_security_group.SecurityGroupSettings', 'SecurityGroupSettings', ([], {'name': "(self.guid + '-name')", 'description': '"""hello group"""'}), "(name=self.guid + '-name', description='hello group')\n", (66907, 66960), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((67036, 67091), 'snaps.openstack.create_security_group.OpenStackSecurityGroup', 'OpenStackSecurityGroup', (['self.os_creds', 'sec_grp_settings'], {}), '(self.os_creds, sec_grp_settings)\n', (67058, 67091), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((67293, 67425), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (67311, 67425), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((67486, 67579), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (67505, 67579), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((68421, 68495), 'snaps.openstack.create_security_group.SecurityGroupSettings', 'SecurityGroupSettings', ([], {'name': "(self.guid + '-name')", 'description': '"""hello group"""'}), "(name=self.guid + '-name', description='hello group')\n", (68442, 68495), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((68571, 68626), 'snaps.openstack.create_security_group.OpenStackSecurityGroup', 'OpenStackSecurityGroup', (['self.os_creds', 'sec_grp_settings'], {}), '(self.os_creds, sec_grp_settings)\n', (68593, 68626), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((68828, 69010), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'security_group_names': '[sec_grp_settings.name]', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, security_group_names=[sec_grp_settings.name],\n port_settings=[self.port_settings])\n', (68846, 69010), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((69079, 69172), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (69098, 69172), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((71634, 71671), 'snaps.openstack.utils.nova_utils.nova_client', 'nova_utils.nova_client', (['self.os_creds'], {}), '(self.os_creds)\n', (71656, 71671), False, 'from snaps.openstack.utils import nova_utils\n'), ((71694, 71867), 'snaps.openstack.tests.openstack_tests.get_priv_net_config', 'openstack_tests.get_priv_net_config', ([], {'net_name': "(guid + '-pub-net')", 'subnet_name': "(guid + '-pub-subnet')", 'router_name': "(guid + '-pub-router')", 'external_net': 'self.ext_net_name'}), "(net_name=guid + '-pub-net', subnet_name\n =guid + '-pub-subnet', router_name=guid + '-pub-router', external_net=\n self.ext_net_name)\n", (71729, 71867), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((75390, 75522), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (75408, 75522), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((75679, 75772), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (75698, 75772), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((76930, 77043), 'snaps.openstack.tests.openstack_tests.get_priv_net_config', 'openstack_tests.get_priv_net_config', ([], {'net_name': "(self.guid + '-priv-net')", 'subnet_name': "(self.guid + '-priv-subnet')"}), "(net_name=self.guid + '-priv-net',\n subnet_name=self.guid + '-priv-subnet')\n", (76965, 77043), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((77094, 77191), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'network_name': 'self.priv_net_config.network_settings.name'}), '(name=self.port_1_name, network_name=self.priv_net_config.\n network_settings.name)\n', (77106, 77191), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((79533, 79622), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': 'self.image_name', 'image_metadata': 'metadata'}), '(name=self.image_name, image_metadata=\n metadata)\n', (79570, 79622), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((80197, 80245), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (80211, 80245), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((80311, 80443), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (80329, 80443), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((80504, 80597), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (80523, 80597), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((81123, 81212), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': 'self.image_name', 'image_metadata': 'metadata'}), '(name=self.image_name, image_metadata=\n metadata)\n', (81160, 81212), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((81787, 81835), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (81801, 81835), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((81901, 82033), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (81919, 82033), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((82094, 82187), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (82113, 82187), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((82528, 82587), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': 'self.image_name'}), '(name=self.image_name)\n', (82565, 82587), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((82630, 82678), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (82644, 82678), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((83188, 83320), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (83206, 83320), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((83381, 83474), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'test_image_creator.image_settings'], {}), '(self.os_creds, instance_settings, test_image_creator.\n image_settings)\n', (83400, 83474), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((83887, 83946), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': 'self.image_name'}), '(name=self.image_name)\n', (83924, 83946), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((83989, 84037), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (84003, 84037), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((84332, 84394), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'image_metadata': 'metadata'}), '(image_metadata=metadata)\n', (84369, 84394), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((84429, 84479), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'test_image_settings'], {}), '(self.os_creds, test_image_settings)\n', (84443, 84479), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((84537, 84669), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (84555, 84669), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((84730, 84804), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'test_image_settings'], {}), '(self.os_creds, instance_settings, test_image_settings)\n', (84749, 84804), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((85293, 85379), 'snaps.file_utils.download', 'file_utils.download', (['openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL', 'self.tmpDir'], {}), '(openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL, self.\n tmpDir)\n', (85312, 85379), False, 'from snaps import file_utils\n'), ((85411, 85498), 'snaps.file_utils.download', 'file_utils.download', (['openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL', 'self.tmpDir'], {}), '(openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL, self.\n tmpDir)\n', (85430, 85498), False, 'from snaps import file_utils\n'), ((86455, 86544), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': 'self.image_name', 'image_metadata': 'metadata'}), '(name=self.image_name, image_metadata=\n metadata)\n', (86492, 86544), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((88512, 88560), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (88526, 88560), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((88626, 88758), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (88644, 88758), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((88819, 88912), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (88838, 88912), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((89416, 89502), 'snaps.file_utils.download', 'file_utils.download', (['openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL', 'self.tmpDir'], {}), '(openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL, self.\n tmpDir)\n', (89435, 89502), False, 'from snaps import file_utils\n'), ((89534, 89621), 'snaps.file_utils.download', 'file_utils.download', (['openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL', 'self.tmpDir'], {}), '(openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL, self.\n tmpDir)\n', (89553, 89621), False, 'from snaps import file_utils\n'), ((89823, 89912), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': 'self.image_name', 'image_metadata': 'metadata'}), '(name=self.image_name, image_metadata=\n metadata)\n', (89860, 89912), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((91881, 91929), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (91895, 91929), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((92133, 92265), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (92151, 92265), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((92326, 92419), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (92345, 92419), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((92940, 93026), 'snaps.file_utils.download', 'file_utils.download', (['openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL', 'self.tmpDir'], {}), '(openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL, self.\n tmpDir)\n', (92959, 93026), False, 'from snaps import file_utils\n'), ((93058, 93145), 'snaps.file_utils.download', 'file_utils.download', (['openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL', 'self.tmpDir'], {}), '(openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL, self.\n tmpDir)\n', (93077, 93145), False, 'from snaps import file_utils\n'), ((93381, 93470), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': 'self.image_name', 'image_metadata': 'metadata'}), '(name=self.image_name, image_metadata=\n metadata)\n', (93418, 93470), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((95439, 95487), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (95453, 95487), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((95691, 95823), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (95709, 95823), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((95884, 95977), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (95903, 95977), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((96287, 96373), 'snaps.file_utils.download', 'file_utils.download', (['openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL', 'self.tmpDir'], {}), '(openstack_tests.CIRROS_DEFAULT_KERNEL_IMAGE_URL, self.\n tmpDir)\n', (96306, 96373), False, 'from snaps import file_utils\n'), ((96405, 96492), 'snaps.file_utils.download', 'file_utils.download', (['openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL', 'self.tmpDir'], {}), '(openstack_tests.CIRROS_DEFAULT_RAMDISK_IMAGE_URL, self.\n tmpDir)\n', (96424, 96492), False, 'from snaps import file_utils\n'), ((96728, 96817), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': 'self.image_name', 'image_metadata': 'metadata'}), '(name=self.image_name, image_metadata=\n metadata)\n', (96765, 96817), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((96855, 96903), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (96869, 96903), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((97413, 97545), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings])\n', (97431, 97545), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((97606, 97699), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'test_image_creator.image_settings'], {}), '(self.os_creds, instance_settings, test_image_creator.\n image_settings)\n', (97625, 97699), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((98503, 98540), 'snaps.openstack.utils.nova_utils.nova_client', 'nova_utils.nova_client', (['self.os_creds'], {}), '(self.os_creds)\n', (98525, 98540), False, 'from snaps.openstack.utils import nova_utils\n'), ((99714, 99809), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': 'image_name', 'image_metadata': 'self.image_metadata'}), '(name=image_name, image_metadata=self.\n image_metadata)\n', (99751, 99809), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((107293, 107330), 'snaps.openstack.utils.nova_utils.nova_client', 'nova_utils.nova_client', (['self.os_creds'], {}), '(self.os_creds)\n', (107315, 107330), False, 'from snaps.openstack.utils import nova_utils\n'), ((107359, 107459), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': "(guid + '-image')", 'image_metadata': 'self.image_metadata'}), "(name=guid + '-image', image_metadata=\n self.image_metadata)\n", (107396, 107459), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((107490, 107663), 'snaps.openstack.tests.openstack_tests.get_priv_net_config', 'openstack_tests.get_priv_net_config', ([], {'net_name': "(guid + '-pub-net')", 'subnet_name': "(guid + '-pub-subnet')", 'router_name': "(guid + '-pub-router')", 'external_net': 'self.ext_net_name'}), "(net_name=guid + '-pub-net', subnet_name\n =guid + '-pub-subnet', router_name=guid + '-pub-router', external_net=\n self.ext_net_name)\n", (107525, 107663), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((111313, 111493), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]', 'volume_names': '[self.volume_settings1.name]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings], volume_names=\n [self.volume_settings1.name])\n', (111331, 111493), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((111562, 111655), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (111581, 111655), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((112233, 112441), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'self.vm_inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'port_settings': '[self.port_settings]', 'volume_names': '[self.volume_settings1.name, self.volume_settings2.name]'}), '(name=self.vm_inst_name, flavor=self.flavor_creator.\n flavor_settings.name, port_settings=[self.port_settings], volume_names=\n [self.volume_settings1.name, self.volume_settings2.name])\n', (112251, 112441), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((112536, 112629), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance_settings, self.image_creator.\n image_settings)\n', (112555, 112629), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((113719, 113740), 're.search', 're.search', (['ip', 'output'], {}), '(ip, output)\n', (113728, 113740), False, 'import re\n'), ((114834, 114847), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (114844, 114847), False, 'import time\n'), ((2259, 2279), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {}), '()\n', (2277, 2279), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((2517, 2547), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': '"""foo"""'}), "(name='foo')\n", (2535, 2547), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((2660, 2702), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'config': "{'name': 'foo'}"}), "(config={'name': 'foo'})\n", (2678, 2702), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((2810, 2854), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': '"""foo"""', 'flavor': '"""bar"""'}), "(name='foo', flavor='bar')\n", (2828, 2854), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((2974, 3033), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'config': "{'name': 'foo', 'flavor': 'bar'}"}), "(config={'name': 'foo', 'flavor': 'bar'})\n", (2992, 3033), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((8804, 8824), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {}), '()\n', (8822, 8824), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((9057, 9087), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': '"""foo"""'}), "(name='foo')\n", (9075, 9087), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((9200, 9237), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {}), "(**{'name': 'foo'})\n", (9218, 9237), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((9343, 9390), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': '"""foo"""', 'port_name': '"""bar"""'}), "(name='foo', port_name='bar')\n", (9361, 9390), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((9508, 9565), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {}), "(**{'name': 'foo', 'port_name': 'bar'})\n", (9526, 9565), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((9673, 9722), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': '"""foo"""', 'router_name': '"""bar"""'}), "(name='foo', router_name='bar')\n", (9691, 9722), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((9842, 9901), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {}), "(**{'name': 'foo', 'router_name': 'bar'})\n", (9860, 9901), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((13897, 13945), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (13911, 13945), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((14099, 14169), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'self.priv_net_config.network_settings'], {}), '(self.os_creds, self.priv_net_config.network_settings)\n', (14115, 14169), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((17779, 17827), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (17793, 17827), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((18288, 18348), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'net_config.network_settings'], {}), '(self.os_creds, net_config.network_settings)\n', (18304, 18348), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((18442, 18527), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': "(guid + '-port')", 'network_name': 'net_config.network_settings.name'}), "(name=guid + '-port', network_name=net_config.network_settings.name\n )\n", (18454, 18527), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((20438, 20506), 'snaps.openstack.utils.nova_utils.get_server', 'nova_utils.get_server', (['self.nova'], {'vm_inst_settings': 'instance_settings'}), '(self.nova, vm_inst_settings=instance_settings)\n', (20459, 20506), False, 'from snaps.openstack.utils import nova_utils\n'), ((20699, 20767), 'snaps.openstack.utils.nova_utils.get_server', 'nova_utils.get_server', (['self.nova'], {'vm_inst_settings': 'instance_settings'}), '(self.nova, vm_inst_settings=instance_settings)\n', (20720, 20767), False, 'from snaps.openstack.utils import nova_utils\n'), ((22424, 22472), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (22438, 22472), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((22626, 22695), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'self.pub_net_config.network_settings'], {}), '(self.os_creds, self.pub_net_config.network_settings)\n', (22642, 22695), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((22818, 22885), 'snaps.openstack.create_router.OpenStackRouter', 'OpenStackRouter', (['self.os_creds', 'self.pub_net_config.router_settings'], {}), '(self.os_creds, self.pub_net_config.router_settings)\n', (22833, 22885), False, 'from snaps.openstack.create_router import OpenStackRouter, RouterSettings\n'), ((23634, 23744), 'snaps.openstack.create_security_group.SecurityGroupRuleSettings', 'SecurityGroupRuleSettings', ([], {'sec_grp_name': 'sec_grp_name', 'direction': 'Direction.ingress', 'protocol': 'Protocol.icmp'}), '(sec_grp_name=sec_grp_name, direction=Direction.\n ingress, protocol=Protocol.icmp)\n', (23659, 23744), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((23852, 23999), 'snaps.openstack.create_security_group.SecurityGroupRuleSettings', 'SecurityGroupRuleSettings', ([], {'sec_grp_name': 'sec_grp_name', 'direction': 'Direction.ingress', 'protocol': 'Protocol.tcp', 'port_range_min': '(22)', 'port_range_max': '(22)'}), '(sec_grp_name=sec_grp_name, direction=Direction.\n ingress, protocol=Protocol.tcp, port_range_min=22, port_range_max=22)\n', (23877, 23999), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((25205, 25241), 'os.remove', 'os.remove', (['self.keypair_pub_filepath'], {}), '(self.keypair_pub_filepath)\n', (25214, 25241), False, 'import os\n'), ((25310, 25347), 'os.remove', 'os.remove', (['self.keypair_priv_filepath'], {}), '(self.keypair_priv_filepath)\n', (25319, 25347), False, 'import os\n'), ((34265, 34313), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (34279, 34313), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((34467, 34532), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'self.net_config.network_settings'], {}), '(self.os_creds, self.net_config.network_settings)\n', (34483, 34532), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((45419, 45495), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.admin_os_creds', 'self.priv_net_config.network_settings'], {}), '(self.admin_os_creds, self.priv_net_config.network_settings)\n', (45435, 45495), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((45922, 45970), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (45936, 45970), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((48063, 48211), 'snaps.openstack.create_instance.VmInstanceSettings', 'VmInstanceSettings', ([], {'name': 'inst_name', 'flavor': 'self.flavor_creator.flavor_settings.name', 'availability_zone': 'zone', 'port_settings': '[port_settings]'}), '(name=inst_name, flavor=self.flavor_creator.\n flavor_settings.name, availability_zone=zone, port_settings=[port_settings]\n )\n', (48081, 48211), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((48294, 48393), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.admin_os_creds', 'instance_settings', 'self.image_creator.image_settings'], {}), '(self.admin_os_creds, instance_settings, self.\n image_creator.image_settings)\n', (48313, 48393), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((51046, 51094), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (51060, 51094), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((52760, 52870), 'snaps.openstack.create_security_group.SecurityGroupRuleSettings', 'SecurityGroupRuleSettings', ([], {'sec_grp_name': 'sec_grp_name', 'direction': 'Direction.ingress', 'protocol': 'Protocol.icmp'}), '(sec_grp_name=sec_grp_name, direction=Direction.\n ingress, protocol=Protocol.icmp)\n', (52785, 52870), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((52978, 53125), 'snaps.openstack.create_security_group.SecurityGroupRuleSettings', 'SecurityGroupRuleSettings', ([], {'sec_grp_name': 'sec_grp_name', 'direction': 'Direction.ingress', 'protocol': 'Protocol.tcp', 'port_range_min': '(22)', 'port_range_max': '(22)'}), '(sec_grp_name=sec_grp_name, direction=Direction.\n ingress, protocol=Protocol.tcp, port_range_min=22, port_range_max=22)\n', (53003, 53125), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((54302, 54338), 'os.remove', 'os.remove', (['self.keypair_pub_filepath'], {}), '(self.keypair_pub_filepath)\n', (54311, 54338), False, 'import os\n'), ((54407, 54444), 'os.remove', 'os.remove', (['self.keypair_priv_filepath'], {}), '(self.keypair_priv_filepath)\n', (54416, 54444), False, 'import os\n'), ((59639, 59687), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (59653, 59687), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((59841, 59901), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'net_config.network_settings'], {}), '(self.os_creds, net_config.network_settings)\n', (59857, 59901), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((60338, 60428), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': "(self.guid + '-port')", 'network_name': 'net_config.network_settings.name'}), "(name=self.guid + '-port', network_name=net_config.\n network_settings.name)\n", (60350, 60428), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((72763, 72852), 'snaps.openstack.tests.openstack_tests.cirros_image_settings', 'openstack_tests.cirros_image_settings', ([], {'name': 'self.image_name', 'image_metadata': 'metadata'}), '(name=self.image_name, image_metadata=\n metadata)\n', (72800, 72852), False, 'from snaps.openstack.tests import openstack_tests, validation_utils\n'), ((73197, 73242), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'image_settings'], {}), '(self.os_creds, image_settings)\n', (73211, 73242), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((73655, 73715), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'net_config.network_settings'], {}), '(self.os_creds, net_config.network_settings)\n', (73671, 73715), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((73809, 73894), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': "(guid + '-port')", 'network_name': 'net_config.network_settings.name'}), "(name=guid + '-port', network_name=net_config.network_settings.name\n )\n", (73821, 73894), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((76513, 76540), 'os.path.exists', 'os.path.exists', (['self.tmpDir'], {}), '(self.tmpDir)\n', (76527, 76540), False, 'import os\n'), ((76554, 76578), 'os.makedirs', 'os.makedirs', (['self.tmpDir'], {}), '(self.tmpDir)\n', (76565, 76578), False, 'import os\n'), ((77290, 77364), 'snaps.file_utils.download', 'file_utils.download', (['openstack_tests.CIRROS_DEFAULT_IMAGE_URL', 'self.tmpDir'], {}), '(openstack_tests.CIRROS_DEFAULT_IMAGE_URL, self.tmpDir)\n', (77309, 77364), False, 'from snaps import file_utils\n'), ((77447, 77517), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'self.priv_net_config.network_settings'], {}), '(self.os_creds, self.priv_net_config.network_settings)\n', (77463, 77517), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((79059, 79086), 'os.path.exists', 'os.path.exists', (['self.tmpDir'], {}), '(self.tmpDir)\n', (79073, 79086), False, 'import os\n'), ((79091, 79117), 'os.path.isdir', 'os.path.isdir', (['self.tmpDir'], {}), '(self.tmpDir)\n', (79104, 79117), False, 'import os\n'), ((79131, 79157), 'shutil.rmtree', 'shutil.rmtree', (['self.tmpDir'], {}), '(self.tmpDir)\n', (79144, 79157), False, 'import shutil\n'), ((82859, 82954), 'snaps.openstack.create_image.ImageSettings', 'ImageSettings', ([], {'name': 'image_settings.name', 'image_user': 'image_settings.image_user', 'exists': '(True)'}), '(name=image_settings.name, image_user=image_settings.\n image_user, exists=True)\n', (82872, 82954), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((97084, 97179), 'snaps.openstack.create_image.ImageSettings', 'ImageSettings', ([], {'name': 'image_settings.name', 'image_user': 'image_settings.image_user', 'exists': '(True)'}), '(name=image_settings.name, image_user=image_settings.\n image_user, exists=True)\n', (97097, 97179), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((99892, 99940), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (99906, 99940), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((101355, 101430), 'snaps.openstack.create_router.RouterSettings', 'RouterSettings', ([], {'name': "(self.guid + '-pub-router')", 'port_settings': 'port_settings'}), "(name=self.guid + '-pub-router', port_settings=port_settings)\n", (101369, 101430), False, 'from snaps.openstack.create_router import OpenStackRouter, RouterSettings\n'), ((101510, 101571), 'snaps.openstack.create_router.OpenStackRouter', 'create_router.OpenStackRouter', (['self.os_creds', 'router_settings'], {}), '(self.os_creds, router_settings)\n', (101539, 101571), False, 'from snaps.openstack import create_network, create_router\n'), ((102044, 102154), 'snaps.openstack.create_security_group.SecurityGroupRuleSettings', 'SecurityGroupRuleSettings', ([], {'sec_grp_name': 'sec_grp_name', 'direction': 'Direction.ingress', 'protocol': 'Protocol.icmp'}), '(sec_grp_name=sec_grp_name, direction=Direction.\n ingress, protocol=Protocol.icmp)\n', (102069, 102154), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((106074, 106168), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance1_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance1_settings, self.image_creator.\n image_settings)\n', (106093, 106168), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((106224, 106318), 'snaps.openstack.create_instance.OpenStackVmInstance', 'OpenStackVmInstance', (['self.os_creds', 'instance2_settings', 'self.image_creator.image_settings'], {}), '(self.os_creds, instance2_settings, self.image_creator.\n image_settings)\n', (106243, 106318), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((108232, 108280), 'snaps.openstack.create_image.OpenStackImage', 'OpenStackImage', (['self.os_creds', 'os_image_settings'], {}), '(self.os_creds, os_image_settings)\n', (108246, 108280), False, 'from snaps.openstack.create_image import OpenStackImage, ImageSettings\n'), ((108740, 108800), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'net_config.network_settings'], {}), '(self.os_creds, net_config.network_settings)\n', (108756, 108800), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((108894, 108979), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': "(guid + '-port')", 'network_name': 'net_config.network_settings.name'}), "(name=guid + '-port', network_name=net_config.network_settings.name\n )\n", (108906, 108979), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((109044, 109097), 'snaps.openstack.create_volume.OpenStackVolume', 'OpenStackVolume', (['self.os_creds', 'self.volume_settings1'], {}), '(self.os_creds, self.volume_settings1)\n', (109059, 109097), False, 'from snaps.openstack.create_volume import OpenStackVolume, VolumeSettings\n'), ((109203, 109256), 'snaps.openstack.create_volume.OpenStackVolume', 'OpenStackVolume', (['self.os_creds', 'self.volume_settings2'], {}), '(self.os_creds, self.volume_settings2)\n', (109218, 109256), False, 'from snaps.openstack.create_volume import OpenStackVolume, VolumeSettings\n'), ((111761, 111829), 'snaps.openstack.utils.nova_utils.get_server', 'nova_utils.get_server', (['self.nova'], {'vm_inst_settings': 'instance_settings'}), '(self.nova, vm_inst_settings=instance_settings)\n', (111782, 111829), False, 'from snaps.openstack.utils import nova_utils\n'), ((112735, 112803), 'snaps.openstack.utils.nova_utils.get_server', 'nova_utils.get_server', (['self.nova'], {'vm_inst_settings': 'instance_settings'}), '(self.nova, vm_inst_settings=instance_settings)\n', (112756, 112803), False, 'from snaps.openstack.utils import nova_utils\n'), ((113594, 113605), 'time.time', 'time.time', ([], {}), '()\n', (113603, 113605), False, 'import time\n'), ((13021, 13033), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (13031, 13033), False, 'import uuid\n'), ((14362, 14466), 'snaps.openstack.create_flavor.FlavorSettings', 'FlavorSettings', ([], {'name': "(guid + '-flavor-name')", 'ram': '(256)', 'disk': '(10)', 'vcpus': '(1)', 'metadata': 'self.flavor_metadata'}), "(name=guid + '-flavor-name', ram=256, disk=10, vcpus=1,\n metadata=self.flavor_metadata)\n", (14376, 14466), False, 'from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings\n'), ((17066, 17078), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (17076, 17078), False, 'import uuid\n'), ((18049, 18153), 'snaps.openstack.create_flavor.FlavorSettings', 'FlavorSettings', ([], {'name': "(guid + '-flavor-name')", 'ram': '(256)', 'disk': '(10)', 'vcpus': '(2)', 'metadata': 'self.flavor_metadata'}), "(name=guid + '-flavor-name', ram=256, disk=10, vcpus=2,\n metadata=self.flavor_metadata)\n", (18063, 18153), False, 'from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings\n'), ((21357, 21369), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (21367, 21369), False, 'import uuid\n'), ((23077, 23181), 'snaps.openstack.create_flavor.FlavorSettings', 'FlavorSettings', ([], {'name': "(guid + '-flavor-name')", 'ram': '(256)', 'disk': '(10)', 'vcpus': '(2)', 'metadata': 'self.flavor_metadata'}), "(name=guid + '-flavor-name', ram=256, disk=10, vcpus=2,\n metadata=self.flavor_metadata)\n", (23091, 23181), False, 'from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings\n'), ((23336, 23468), 'snaps.openstack.create_keypairs.KeypairSettings', 'KeypairSettings', ([], {'name': 'self.keypair_name', 'public_filepath': 'self.keypair_pub_filepath', 'private_filepath': 'self.keypair_priv_filepath'}), '(name=self.keypair_name, public_filepath=self.\n keypair_pub_filepath, private_filepath=self.keypair_priv_filepath)\n', (23351, 23468), False, 'from snaps.openstack.create_keypairs import OpenStackKeypair, KeypairSettings\n'), ((24285, 24355), 'snaps.openstack.create_security_group.SecurityGroupSettings', 'SecurityGroupSettings', ([], {'name': 'sec_grp_name', 'rule_settings': '[rule1, rule2]'}), '(name=sec_grp_name, rule_settings=[rule1, rule2])\n', (24306, 24355), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((33474, 33486), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (33484, 33486), False, 'import uuid\n'), ((34725, 34829), 'snaps.openstack.create_flavor.FlavorSettings', 'FlavorSettings', ([], {'name': "(guid + '-flavor-name')", 'ram': '(256)', 'disk': '(10)', 'vcpus': '(2)', 'metadata': 'self.flavor_metadata'}), "(name=guid + '-flavor-name', ram=256, disk=10, vcpus=2,\n metadata=self.flavor_metadata)\n", (34739, 34829), False, 'from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings\n'), ((44779, 44791), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (44789, 44791), False, 'import uuid\n'), ((45688, 45791), 'snaps.openstack.create_flavor.FlavorSettings', 'FlavorSettings', ([], {'name': "(guid + '-flavor-name')", 'ram': '(512)', 'disk': '(1)', 'vcpus': '(1)', 'metadata': 'self.flavor_metadata'}), "(name=guid + '-flavor-name', ram=512, disk=1, vcpus=1,\n metadata=self.flavor_metadata)\n", (45702, 45791), False, 'from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings\n'), ((49864, 49876), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (49874, 49876), False, 'import uuid\n'), ((50826, 50838), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (50836, 50838), False, 'import uuid\n'), ((51263, 51332), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'self.pub_net_config.network_settings'], {}), '(self.os_creds, self.pub_net_config.network_settings)\n', (51279, 51332), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((51432, 51502), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'self.priv_net_config.network_settings'], {}), '(self.os_creds, self.priv_net_config.network_settings)\n', (51448, 51502), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((51661, 51728), 'snaps.openstack.create_router.OpenStackRouter', 'OpenStackRouter', (['self.os_creds', 'self.pub_net_config.router_settings'], {}), '(self.os_creds, self.pub_net_config.router_settings)\n', (51676, 51728), False, 'from snaps.openstack.create_router import OpenStackRouter, RouterSettings\n'), ((51787, 51855), 'snaps.openstack.create_router.OpenStackRouter', 'OpenStackRouter', (['self.os_creds', 'self.priv_net_config.router_settings'], {}), '(self.os_creds, self.priv_net_config.router_settings)\n', (51802, 51855), False, 'from snaps.openstack.create_router import OpenStackRouter, RouterSettings\n'), ((52133, 52242), 'snaps.openstack.create_flavor.FlavorSettings', 'FlavorSettings', ([], {'name': "(self.guid + '-flavor-name')", 'ram': '(512)', 'disk': '(10)', 'vcpus': '(2)', 'metadata': 'self.flavor_metadata'}), "(name=self.guid + '-flavor-name', ram=512, disk=10, vcpus=2,\n metadata=self.flavor_metadata)\n", (52147, 52242), False, 'from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings\n'), ((52457, 52589), 'snaps.openstack.create_keypairs.KeypairSettings', 'KeypairSettings', ([], {'name': 'self.keypair_name', 'public_filepath': 'self.keypair_pub_filepath', 'private_filepath': 'self.keypair_priv_filepath'}), '(name=self.keypair_name, public_filepath=self.\n keypair_pub_filepath, private_filepath=self.keypair_priv_filepath)\n', (52472, 52589), False, 'from snaps.openstack.create_keypairs import OpenStackKeypair, KeypairSettings\n'), ((53411, 53481), 'snaps.openstack.create_security_group.SecurityGroupSettings', 'SecurityGroupSettings', ([], {'name': 'sec_grp_name', 'rule_settings': '[rule1, rule2]'}), '(name=sec_grp_name, rule_settings=[rule1, rule2])\n', (53432, 53481), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((58608, 58620), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (58618, 58620), False, 'import uuid\n'), ((60094, 60203), 'snaps.openstack.create_flavor.FlavorSettings', 'FlavorSettings', ([], {'name': "(self.guid + '-flavor-name')", 'ram': '(256)', 'disk': '(10)', 'vcpus': '(2)', 'metadata': 'self.flavor_metadata'}), "(name=self.guid + '-flavor-name', ram=256, disk=10, vcpus=2,\n metadata=self.flavor_metadata)\n", (60108, 60203), False, 'from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings\n'), ((71526, 71538), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (71536, 71538), False, 'import uuid\n'), ((73416, 73520), 'snaps.openstack.create_flavor.FlavorSettings', 'FlavorSettings', ([], {'name': "(guid + '-flavor-name')", 'ram': '(256)', 'disk': '(10)', 'vcpus': '(2)', 'metadata': 'self.flavor_metadata'}), "(name=guid + '-flavor-name', ram=256, disk=10, vcpus=2,\n metadata=self.flavor_metadata)\n", (73430, 73520), False, 'from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings\n'), ((76437, 76449), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (76447, 76449), False, 'import uuid\n'), ((77704, 77778), 'snaps.openstack.create_flavor.FlavorSettings', 'FlavorSettings', ([], {'name': "(self.guid + '-flavor-name')", 'ram': '(256)', 'disk': '(10)', 'vcpus': '(1)'}), "(name=self.guid + '-flavor-name', ram=256, disk=10, vcpus=1)\n", (77718, 77778), False, 'from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings\n'), ((98850, 98862), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (98860, 98862), False, 'import uuid\n'), ((99672, 99684), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (99682, 99684), False, 'import uuid\n'), ((100109, 100159), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'self.net_config_1'], {}), '(self.os_creds, self.net_config_1)\n', (100125, 100159), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((100259, 100309), 'snaps.openstack.create_network.OpenStackNetwork', 'OpenStackNetwork', (['self.os_creds', 'self.net_config_2'], {}), '(self.os_creds, self.net_config_2)\n', (100275, 100309), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((100474, 100724), 'snaps.openstack.create_network.PortSettings', 'create_network.PortSettings', ([], {'name': "(self.guid + '-router-port1')", 'ip_addrs': "[{'subnet_name': self.net_config_1.subnet_settings[0].name, 'ip':\n static_gateway_ip1}]", 'network_name': 'self.net_config_1.name', 'project_name': 'self.os_creds.project_name'}), "(name=self.guid + '-router-port1', ip_addrs=[{\n 'subnet_name': self.net_config_1.subnet_settings[0].name, 'ip':\n static_gateway_ip1}], network_name=self.net_config_1.name, project_name\n =self.os_creds.project_name)\n", (100501, 100724), False, 'from snaps.openstack import create_network, create_router\n'), ((100907, 101157), 'snaps.openstack.create_network.PortSettings', 'create_network.PortSettings', ([], {'name': "(self.guid + '-router-port2')", 'ip_addrs': "[{'subnet_name': self.net_config_2.subnet_settings[0].name, 'ip':\n static_gateway_ip2}]", 'network_name': 'self.net_config_2.name', 'project_name': 'self.os_creds.project_name'}), "(name=self.guid + '-router-port2', ip_addrs=[{\n 'subnet_name': self.net_config_2.subnet_settings[0].name, 'ip':\n static_gateway_ip2}], network_name=self.net_config_2.name, project_name\n =self.os_creds.project_name)\n", (100934, 101157), False, 'from snaps.openstack import create_network, create_router\n'), ((101763, 101872), 'snaps.openstack.create_flavor.FlavorSettings', 'FlavorSettings', ([], {'name': "(self.guid + '-flavor-name')", 'ram': '(512)', 'disk': '(10)', 'vcpus': '(2)', 'metadata': 'self.flavor_metadata'}), "(name=self.guid + '-flavor-name', ram=512, disk=10, vcpus=2,\n metadata=self.flavor_metadata)\n", (101777, 101872), False, 'from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings\n'), ((102348, 102411), 'snaps.openstack.create_security_group.SecurityGroupSettings', 'SecurityGroupSettings', ([], {'name': 'sec_grp_name', 'rule_settings': '[rule1]'}), '(name=sec_grp_name, rule_settings=[rule1])\n', (102369, 102411), False, 'from snaps.openstack.create_security_group import SecurityGroupSettings, OpenStackSecurityGroup, SecurityGroupRuleSettings, Direction, Protocol\n'), ((107216, 107228), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (107226, 107228), False, 'import uuid\n'), ((108502, 108605), 'snaps.openstack.create_flavor.FlavorSettings', 'FlavorSettings', ([], {'name': "(guid + '-flavor-name')", 'ram': '(256)', 'disk': '(1)', 'vcpus': '(2)', 'metadata': 'self.flavor_metadata'}), "(name=guid + '-flavor-name', ram=256, disk=1, vcpus=2,\n metadata=self.flavor_metadata)\n", (108516, 108605), False, 'from snaps.openstack.create_flavor import OpenStackFlavor, FlavorSettings\n'), ((27482, 27614), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': 'self.floating_ip_name', 'port_name': 'self.port_1_name', 'router_name': 'self.pub_net_config.router_settings.name'}), '(name=self.floating_ip_name, port_name=self.port_1_name,\n router_name=self.pub_net_config.router_settings.name)\n', (27500, 27614), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((28706, 28838), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': 'self.floating_ip_name', 'port_name': 'self.port_1_name', 'router_name': 'self.pub_net_config.router_settings.name'}), '(name=self.floating_ip_name, port_name=self.port_1_name,\n router_name=self.pub_net_config.router_settings.name)\n', (28724, 28838), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((30168, 30300), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': 'self.floating_ip_name', 'port_name': 'self.port_1_name', 'router_name': 'self.pub_net_config.router_settings.name'}), '(name=self.floating_ip_name, port_name=self.port_1_name,\n router_name=self.pub_net_config.router_settings.name)\n', (30186, 30300), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((31722, 31854), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': 'self.floating_ip_name', 'port_name': 'self.port_1_name', 'router_name': 'self.pub_net_config.router_settings.name'}), '(name=self.floating_ip_name, port_name=self.port_1_name,\n router_name=self.pub_net_config.router_settings.name)\n', (31740, 31854), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((57069, 57201), 'snaps.openstack.create_instance.FloatingIpSettings', 'FloatingIpSettings', ([], {'name': 'self.floating_ip_name', 'port_name': 'self.port_1_name', 'router_name': 'self.pub_net_config.router_settings.name'}), '(name=self.floating_ip_name, port_name=self.port_1_name,\n router_name=self.pub_net_config.router_settings.name)\n', (57087, 57201), False, 'from snaps.openstack.create_instance import VmInstanceSettings, OpenStackVmInstance, FloatingIpSettings, VmInstanceSettingsError, FloatingIpSettingsError\n'), ((99195, 99300), 'snaps.openstack.create_network.SubnetSettings', 'create_network.SubnetSettings', ([], {'cidr': 'cidr1', 'name': "(self.guid + '-subnet1')", 'gateway_ip': 'static_gateway_ip1'}), "(cidr=cidr1, name=self.guid + '-subnet1',\n gateway_ip=static_gateway_ip1)\n", (99224, 99300), False, 'from snaps.openstack import create_network, create_router\n'), ((99469, 99574), 'snaps.openstack.create_network.SubnetSettings', 'create_network.SubnetSettings', ([], {'cidr': 'cidr2', 'name': "(self.guid + '-subnet2')", 'gateway_ip': 'static_gateway_ip2'}), "(cidr=cidr2, name=self.guid + '-subnet2',\n gateway_ip=static_gateway_ip2)\n", (99498, 99574), False, 'from snaps.openstack import create_network, create_router\n'), ((105153, 105346), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_1_name', 'ip_addrs': "[{'subnet_name': self.net_config_1.subnet_settings[0].name, 'ip': self.ip1}]", 'network_name': 'self.network_creators[0].network_settings.name'}), "(name=self.port_1_name, ip_addrs=[{'subnet_name': self.\n net_config_1.subnet_settings[0].name, 'ip': self.ip1}], network_name=\n self.network_creators[0].network_settings.name)\n", (105165, 105346), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n'), ((105695, 105888), 'snaps.openstack.create_network.PortSettings', 'PortSettings', ([], {'name': 'self.port_2_name', 'ip_addrs': "[{'subnet_name': self.net_config_2.subnet_settings[0].name, 'ip': self.ip2}]", 'network_name': 'self.network_creators[1].network_settings.name'}), "(name=self.port_2_name, ip_addrs=[{'subnet_name': self.\n net_config_2.subnet_settings[0].name, 'ip': self.ip2}], network_name=\n self.network_creators[1].network_settings.name)\n", (105707, 105888), False, 'from snaps.openstack.create_network import OpenStackNetwork, PortSettings, NetworkSettings\n')] |
#!/usr/bin/env python3.4
#
import sys
import time
import RPi.GPIO as GPIO
sys.path.append('../../lib')
from GPIOACNode import GPIOACNode
class GPIOOut(GPIOACNode):
name="Unknown"
def setup(self):
super().setup()
if self.cnf.offline:
self.logger.info("TEST: configuring hardware.")
return
self.logger.debug("Initializing hardware.")
GPIO.setmode(GPIO.BCM)
GPIO.setup(self.pin, GPIO.OUT)
GPIO.output(self.pin, False)
def gpioout(self,onOff):
self.logger.info("GPIO[{}]::{} {}".format(self.pin, self.name, onOff))
if self.cnf.offline:
return
GPIO.output(self.pin, onOff)
def on_exit(self):
GPIO.output(self.pin,False)
GPIO.setup(self.pin, 0)
super().on_exit()
| [
"RPi.GPIO.setup",
"sys.path.append",
"RPi.GPIO.output",
"RPi.GPIO.setmode"
] | [((76, 104), 'sys.path.append', 'sys.path.append', (['"""../../lib"""'], {}), "('../../lib')\n", (91, 104), False, 'import sys\n'), ((373, 395), 'RPi.GPIO.setmode', 'GPIO.setmode', (['GPIO.BCM'], {}), '(GPIO.BCM)\n', (385, 395), True, 'import RPi.GPIO as GPIO\n'), ((400, 430), 'RPi.GPIO.setup', 'GPIO.setup', (['self.pin', 'GPIO.OUT'], {}), '(self.pin, GPIO.OUT)\n', (410, 430), True, 'import RPi.GPIO as GPIO\n'), ((435, 463), 'RPi.GPIO.output', 'GPIO.output', (['self.pin', '(False)'], {}), '(self.pin, False)\n', (446, 463), True, 'import RPi.GPIO as GPIO\n'), ((614, 642), 'RPi.GPIO.output', 'GPIO.output', (['self.pin', 'onOff'], {}), '(self.pin, onOff)\n', (625, 642), True, 'import RPi.GPIO as GPIO\n'), ((669, 697), 'RPi.GPIO.output', 'GPIO.output', (['self.pin', '(False)'], {}), '(self.pin, False)\n', (680, 697), True, 'import RPi.GPIO as GPIO\n'), ((701, 724), 'RPi.GPIO.setup', 'GPIO.setup', (['self.pin', '(0)'], {}), '(self.pin, 0)\n', (711, 724), True, 'import RPi.GPIO as GPIO\n')] |
from multiworld.core.wrapper_env import ProxyEnv
from gym.spaces import Box, Dict
import numpy as np
def transform_obs(obs):
obs = np.transpose(obs, (2, 0, 1))
obs = np.float32(obs)
flat_obs = obs.flatten() / 255.0
flat_obs = dict(image=flat_obs)
return flat_obs
class FlatEnv(ProxyEnv):
def __init__(
self,
wrapped_env,
):
self.quick_init(locals())
super(FlatEnv, self).__init__(wrapped_env)
self.wrapped_env.image_shape = (64, 64)
total_dim = 64*64*3
img_space = Box(low=0.0, high=1.0, shape=(total_dim,))
spaces = {'image': img_space}
self.observation_space = Dict(spaces)
def step(self, action):
obs, reward, done, info = self.wrapped_env.step(action)
obs = transform_obs(obs)
return obs, reward, done, info
def reset(self):
obs = self.wrapped_env.reset()
obs = transform_obs(obs)
return obs
| [
"gym.spaces.Dict",
"numpy.transpose",
"numpy.float32",
"gym.spaces.Box"
] | [((137, 165), 'numpy.transpose', 'np.transpose', (['obs', '(2, 0, 1)'], {}), '(obs, (2, 0, 1))\n', (149, 165), True, 'import numpy as np\n'), ((176, 191), 'numpy.float32', 'np.float32', (['obs'], {}), '(obs)\n', (186, 191), True, 'import numpy as np\n'), ((562, 604), 'gym.spaces.Box', 'Box', ([], {'low': '(0.0)', 'high': '(1.0)', 'shape': '(total_dim,)'}), '(low=0.0, high=1.0, shape=(total_dim,))\n', (565, 604), False, 'from gym.spaces import Box, Dict\n'), ((676, 688), 'gym.spaces.Dict', 'Dict', (['spaces'], {}), '(spaces)\n', (680, 688), False, 'from gym.spaces import Box, Dict\n')] |
# Generated by Django 3.0.1 on 2020-02-10 02:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('recipes', '0001_initial'),
('main', '0016_auto_20191222_2333'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='saved_recipes',
field=models.ManyToManyField(related_name='saved_by', to='recipes.Recipe'),
),
]
| [
"django.db.models.ManyToManyField"
] | [((378, 446), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""saved_by"""', 'to': '"""recipes.Recipe"""'}), "(related_name='saved_by', to='recipes.Recipe')\n", (400, 446), False, 'from django.db import migrations, models\n')] |
from typing import Any, Dict
import logging
logger = logging.getLogger('presignup')
def presignup_handler(event: Dict[str, Any], context: Dict[str, Any]) -> Dict[str, Any]:
"""
Executed after the user submits the signup payload
:param event:
:param context:
:return:
"""
# TODO: handle pre-signup
return event
| [
"logging.getLogger"
] | [((54, 84), 'logging.getLogger', 'logging.getLogger', (['"""presignup"""'], {}), "('presignup')\n", (71, 84), False, 'import logging\n')] |
from nanpy.arduinoboard import ArduinoObject
from nanpy.arduinoboard import (arduinoobjectmethod, returns)
class ColorSensor(ArduinoObject):
cfg_h_name = 'USE_ColorSensor'
def __init__(self, s0, s1, s2, s3, sensorInput, connection=None):
ArduinoObject.__init__(self, connection=connection)
self.id = self.call('new', s0, s1, s2, s3, sensorInput)
@returns(int)
@arduinoobjectmethod
def get_color(self, color):
pass
| [
"nanpy.arduinoboard.returns",
"nanpy.arduinoboard.ArduinoObject.__init__"
] | [((378, 390), 'nanpy.arduinoboard.returns', 'returns', (['int'], {}), '(int)\n', (385, 390), False, 'from nanpy.arduinoboard import arduinoobjectmethod, returns\n'), ((256, 307), 'nanpy.arduinoboard.ArduinoObject.__init__', 'ArduinoObject.__init__', (['self'], {'connection': 'connection'}), '(self, connection=connection)\n', (278, 307), False, 'from nanpy.arduinoboard import ArduinoObject\n')] |
import sys
import os
import subprocess
import numpy as np
import time, subprocess
from time import sleep
RUN_SCRIPT = 'run_video.py'
RANDOM_SEED = 42
RUN_TIME = 480 #280 # sec
ABR_ALGO = [ 'RL','robustMPC']#[ 'RL','BB','FIXED','robustMPC', 'fastMPC', 'BOLA', 'FESTIVE','RB']
# [ 'RL','BB','FIXED','robustMPC', 'fastMPC', 'BOLA', 'FESTIVE','RB']
REPEAT_TIME = 1
def main():
np.random.seed(RANDOM_SEED)
with open('./chrome_retry_log', 'wb') as log:
log.write('chrome retry log\n')
log.flush()
for rt in xrange(REPEAT_TIME):
np.random.shuffle(ABR_ALGO)
for abr_algo in ABR_ALGO:
while True:
###################################
current_path=os.getcwd()
new_path="../../pantheon/"
os.chdir(new_path)
os.system("mkdir ./data/")
os.system("mkdir ./logx/")
os.system("echo "+ abr_algo + str(rt) +"> logx/now.txt")
os.system("touch ./data/DONE.txt")
time.sleep(6)
ttt='"python third_party/indigo/env/proxy.py 9999 INDIGO;exit;exec bash"'
os.system("gnome-terminal -e 'sh -c "+ttt+"'")
os.chdir(current_path)
##################################
script = 'python ' + RUN_SCRIPT + ' ' + \
abr_algo + ' ' + str(RUN_TIME) + ' ' + str(rt)
print(script)
proc = subprocess.Popen(script,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True)
(out, err) = proc.communicate()
print(out)
print(err)
if out == 'done\n' or out == 'timeout\n' or out == 'HTTP Error 400: Bad Request\n' or out == 'Timeout\n':
break
else:
log.write(abr_algo + '_' + str(rt) + '\n')
log.write(out + '\n')
log.flush()
###################################
current_path=os.getcwd()
new_path="../../pantheon/"
os.chdir(new_path)
os.system("touch ./data/DONE.txt")
time.sleep(6)
os.chdir(current_path)
##################################
if __name__ == '__main__':
main()
| [
"subprocess.Popen",
"time.sleep",
"os.getcwd",
"os.chdir",
"numpy.random.seed",
"os.system",
"numpy.random.shuffle"
] | [((383, 410), 'numpy.random.seed', 'np.random.seed', (['RANDOM_SEED'], {}), '(RANDOM_SEED)\n', (397, 410), True, 'import numpy as np\n'), ((544, 571), 'numpy.random.shuffle', 'np.random.shuffle', (['ABR_ALGO'], {}), '(ABR_ALGO)\n', (561, 571), True, 'import numpy as np\n'), ((1734, 1745), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1743, 1745), False, 'import os\n'), ((1781, 1799), 'os.chdir', 'os.chdir', (['new_path'], {}), '(new_path)\n', (1789, 1799), False, 'import os\n'), ((1804, 1838), 'os.system', 'os.system', (['"""touch ./data/DONE.txt"""'], {}), "('touch ./data/DONE.txt')\n", (1813, 1838), False, 'import os\n'), ((1843, 1856), 'time.sleep', 'time.sleep', (['(6)'], {}), '(6)\n', (1853, 1856), False, 'import time, subprocess\n'), ((1861, 1883), 'os.chdir', 'os.chdir', (['current_path'], {}), '(current_path)\n', (1869, 1883), False, 'import os\n'), ((677, 688), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (686, 688), False, 'import os\n'), ((726, 744), 'os.chdir', 'os.chdir', (['new_path'], {}), '(new_path)\n', (734, 744), False, 'import os\n'), ((750, 776), 'os.system', 'os.system', (['"""mkdir ./data/"""'], {}), "('mkdir ./data/')\n", (759, 776), False, 'import os\n'), ((782, 808), 'os.system', 'os.system', (['"""mkdir ./logx/"""'], {}), "('mkdir ./logx/')\n", (791, 808), False, 'import os\n'), ((877, 911), 'os.system', 'os.system', (['"""touch ./data/DONE.txt"""'], {}), "('touch ./data/DONE.txt')\n", (886, 911), False, 'import os\n'), ((917, 930), 'time.sleep', 'time.sleep', (['(6)'], {}), '(6)\n', (927, 930), False, 'import time, subprocess\n'), ((1015, 1065), 'os.system', 'os.system', (['("gnome-terminal -e \'sh -c " + ttt + "\'")'], {}), '("gnome-terminal -e \'sh -c " + ttt + "\'")\n', (1024, 1065), False, 'import os\n'), ((1067, 1089), 'os.chdir', 'os.chdir', (['current_path'], {}), '(current_path)\n', (1075, 1089), False, 'import os\n'), ((1264, 1352), 'subprocess.Popen', 'subprocess.Popen', (['script'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'shell': '(True)'}), '(script, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n shell=True)\n', (1280, 1352), False, 'import time, subprocess\n')] |
#!/usr/bin/env python
from collections import defaultdict
import locale
# pylint: disable=W0311
class Stream(object):
"""
A one-way stream of sets of HTTP headers.
For our purposes, a stream is the unit that gets compressed; i.e., the
headers in it have a shared context.
"""
def __init__(self, name, messages, msg_type, procs):
self.name = name # identifier for the stream; e.g., "example.com reqs"
self.messages = messages
self.msg_type = msg_type # "req" or "res"
self.procs = procs # order of processors
self.lname = max([len(p) for p in procs]) # longest processor name
self.sizes = defaultdict(list)
self.ratios = defaultdict(list)
self.times = defaultdict(list)
def record_result(self, proc_name, size, ratio, time):
"Record the results of processing, by proc_name."
self.sizes[proc_name].append(size)
self.ratios[proc_name].append(ratio)
self.times[proc_name].append(time)
def print_header(self, output):
"Print a header for the summary to output."
output("* %s: %i %s messages\n" %
(self.name, len(self.messages), self.msg_type))
def print_summary(self, output, baseline):
"Print a summary of the stream to output, compared to baseline."
lines = []
baseline_size = sum(self.sizes[baseline])
for proc in self.procs:
ttl_size = sum(self.sizes[proc])
ttl_time = sum(self.times[proc])
pretty_size = locale.format("%13d", ttl_size, grouping=True)
ratio = 1.0 * ttl_size / baseline_size
try:
std = meanstdv(self.ratios[proc])[1]
except ZeroDivisionError:
std = 0
min_ratio = min(self.ratios[proc])
max_ratio = max(self.ratios[proc])
lines.append((proc, pretty_size, ttl_time, ratio, min_ratio, max_ratio, std))
output(' %%%ds size time | ratio min max std\n' % (self.lname + 9) % '')
fmt = ' %%%ds %%s %%5.2f | %%2.2f %%2.2f %%2.2f %%2.2f\n' % self.lname
for line in lines:
output(fmt % line)
output("\n")
def print_tsv_header(self, output):
"Print a TSV header to output."
header = "\t".join(["num", "name"] + self.procs)
output("%s\n" % header)
def print_tsv(self, output, count=0):
"Print the stream as TSV to output, using count as a counter."
lines = list(zip(*[self.sizes[proc] for proc in self.procs]))
for line in lines:
count += 1
output("\t".join([str(count), self.name] + [str(j) for j in line]))
output("\n")
return count
def __add__(self, other):
assert self.msg_type == other.msg_type
new = Stream('', self.messages, self.msg_type, self.procs)
new.messages.extend(other.messages) # NB: not great for memory
new.sizes = merge_dols(self.sizes, other.sizes)
new.ratios = merge_dols(self.ratios, other.ratios)
new.times = merge_dols(self.times, other.times)
new.procs = self.procs
new.lname = self.lname
return new
def __radd__(self, other):
new = Stream('', self.messages, self.msg_type, self.procs)
new.sizes = self.sizes
new.ratios = self.ratios
new.times = self.times
new.procs = self.procs
new.lname = self.lname
return new
def merge_dols(dol1, dol2):
"""
Merge two dictionaries of lists.
"""
result = dict(dol1, **dol2)
result.update((k, dol1[k] + dol2[k])
for k in set(dol1).intersection(dol2))
return result
def meanstdv(members):
"""
Calculate mean and standard deviation of data x[]:
mean = {\sum_i x_i \over n}
std = sqrt(\sum_i (x_i - mean)^2 \over n-1)
"""
from math import sqrt
num, mean, std = len(members), 0, 0
for item in members:
mean = mean + item
mean = mean / float(num)
for item in members:
std = std + (item - mean)**2
std = sqrt(std / float(num - 1))
return mean, std | [
"locale.format",
"collections.defaultdict"
] | [((631, 648), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (642, 648), False, 'from collections import defaultdict\n'), ((667, 684), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (678, 684), False, 'from collections import defaultdict\n'), ((702, 719), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (713, 719), False, 'from collections import defaultdict\n'), ((1428, 1474), 'locale.format', 'locale.format', (['"""%13d"""', 'ttl_size'], {'grouping': '(True)'}), "('%13d', ttl_size, grouping=True)\n", (1441, 1474), False, 'import locale\n')] |
import sys
from converter.converter_3ds import Converter3DS
if __name__ == '__main__':
converter = Converter3DS(sys.argv[1])
converter.convert(sys.argv[2])
| [
"converter.converter_3ds.Converter3DS"
] | [((104, 129), 'converter.converter_3ds.Converter3DS', 'Converter3DS', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (116, 129), False, 'from converter.converter_3ds import Converter3DS\n')] |
from network import *
from submission import *
from data.mapping import *
import numpy as np
import re
# Construct bayesian network
def construct_sample_network(data):
network = bayes_network(data)
# You need to list the nodes so that parents are introduced before children
# You can inspect data.mapping to see all the features
network.append_node(MEDICALSERV, "MEDICALSERV", [])
network.append_node(SCHOOLHYMN, "SCHOOLHYMN", [MEDICALSERV])
network.append_node(MILSERVICE, "MILSERVICE", [MEDICALSERV])
network.append_node(METROPOLIS, "METROPOLIS", [SCHOOLHYMN])
network.append_node(NATO, "NATO", [MILSERVICE])
network.append_node(SAIMAASEAL, "SAIMAASEAL", [SCHOOLHYMN, MILSERVICE])
return network
def is_answer_close(a, b, EPSILON = 1e-2):
return abs(a - b) <= EPSILON
# See that constructed CPT matches the example
def task_conditional_probability(data):
tests = [({SAIMAASEAL : 1}, {MILSERVICE : 1, SCHOOLHYMN: 1}, ["SAIMAASEAL", "MILSERVICE", "SCHOOLHYMN"], 0.857),
({NATO : 1}, {MILSERVICE : 0}, ["NATO", "-MILSERVICE"], 0.82),
({MEDICALSERV : 1}, {}, ["MEDICALSERV"], 0.128),
({SAIMAASEAL : 1}, {MILSERVICE : 0, SCHOOLHYMN: 1}, ["SAIMAASEAL", "-MILSERVICE", "SCHOOLHYMN"], 0.790)]
for query, conditions, fields, answer in tests:
prob = get_conditional_probability(data, query, conditions)
if is_answer_close(prob, answer):
print("correct probability P({}|{}) = {}".format(fields[0], " & ".join(fields[1:]), round(prob,3)))
else:
print("Conditional probability failed: got {}, true answer {}".format(prob, answer))
# See that constructed CPT matches the example
def task_cpt(data):
tests = [(SAIMAASEAL, [MILSERVICE, SCHOOLHYMN], ["SAIMAASEAL", "MILSERVICE", "SCHOOLHYMN"], {"0 0":0.587, "0 1":0.790, "1 0":0.834, "1 1":0.857}),]
for query, conditions, fields, answer in tests:
table = construct_probability_table(data, query, conditions)
print("Calculating CPT for P({}|{})".format(fields[0], " & ".join(fields[1:])))
print("{} : {}".format(" ".join(fields[1:]), fields[0]))
for key, probability in table.items():
assignments = re.findall(".([0-9]+):([0-1]).", key)
str_assignment = " ".join([val for _, val in assignments])
passed = "Correct" if is_answer_close(answer[str_assignment], probability) else "Not right probability, correct: {}".format(answer[str_assignment])
print("{} : {} <- {}".format(str_assignment, round(probability, 3), passed))
def test_brute_force(data, network):
tests = [([MILSERVICE], ([MEDICALSERV, SAIMAASEAL, METROPOLIS], [0, 0, 0]), ["MILSERVICE", "MEDICALSERV", "SAIMAASEAL", "METROPOLIS"], 0.183)]
for query, (E,e), fields, answer in tests:
prob = brute_force(network, query, E, e)
print("Calculating P({}|{})".format(fields[0], " & ".join(fields[1:])))
if is_answer_close(answer, prob):
print("Correct probability {}".format(round(prob, 3)))
else:
print("Wrong, true answer was {} while yours was {}".format(answer, round(prob, 3)))
def test_sampling(data, network):
tests = [([MILSERVICE], ([MEDICALSERV, SAIMAASEAL, METROPOLIS], [0, 0, 0]), ["MILSERVICE", "MEDICALSERV", "SAIMAASEAL", "METROPOLIS"], 0.183)]
for query, (E,e), fields, answer in tests:
prob = [approximate_distribution(network, query, E, e) for _ in range(3)]
print("Calculating P({}|{})".format(fields[0], " & ".join(fields[1:])))
if any([is_answer_close(answer, p, EPSILON = 3e-2) for p in prob]):
print("Correct probability {}".format(round(np.average(prob), 3)))
else:
print("Wrong, true answer was {} while yours was {}".format(answer, round(np.average(prob), 3)))
def main():
# Load data
filename = "data/hs.txt"
data = np.loadtxt(filename, delimiter=" ")
# Construct same bayesian network as in the lecture
network = construct_sample_network(data)
print("\n===\nTesting conditional probability\n===")
task_conditional_probability(data)
print("\n===\n Making CPT\n===")
task_cpt(data)
print("\n===\nTesting brute force inference\n===")
test_brute_force(data, network)
print("\n===\nTesting sampling\n===")
test_sampling(data, network)
if __name__ == "__main__":
main() | [
"re.findall",
"numpy.loadtxt",
"numpy.average"
] | [((3902, 3937), 'numpy.loadtxt', 'np.loadtxt', (['filename'], {'delimiter': '""" """'}), "(filename, delimiter=' ')\n", (3912, 3937), True, 'import numpy as np\n'), ((2220, 2257), 're.findall', 're.findall', (['""".([0-9]+):([0-1])."""', 'key'], {}), "('.([0-9]+):([0-1]).', key)\n", (2230, 2257), False, 'import re\n'), ((3687, 3703), 'numpy.average', 'np.average', (['prob'], {}), '(prob)\n', (3697, 3703), True, 'import numpy as np\n'), ((3810, 3826), 'numpy.average', 'np.average', (['prob'], {}), '(prob)\n', (3820, 3826), True, 'import numpy as np\n')] |
# (c) Copyright 2014,2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from freezer_dr.monitors.common.driver import MonitorBaseDriver
CONF = cfg.CONF
class DummyDriver(MonitorBaseDriver):
"""A monitoring driver that returns a configured list of nodes as failed.
This can be useful for testing without actually shutting down the nodes.
The nodes that should be reported as failing, can be configured in the
monitoring section of the freezer_dr configuration file as follows:
kwargs = nodes_down:hostname1;hostname2
"""
_OPTS = [
cfg.ListOpt('nodes_down',
default=[],
required=True,
help="fake list of failed compute nodes.")
]
def __init__(self, backend_name, notifier):
super(DummyDriver, self).__init__(backend_name=backend_name,
notifier=notifier)
hostnames = self.conf.get('nodes_down', [])
self.nodes_down = [{'host': n} for n in hostnames]
def get_data(self):
return self.nodes_down
def get_metrics(self):
raise NotImplementedError()
def process_failed(self, nodes=None, wait=0):
return nodes
def analyze_nodes(self, nodes):
return nodes
def is_alive(self):
return True
def get_info(self):
return {
'name': 'Freezer DR Dummy Driver',
'version': 1.0,
'author': 'Hewlett-Packard Enterprise Development, L.P'
}
| [
"oslo_config.cfg.ListOpt"
] | [((1142, 1242), 'oslo_config.cfg.ListOpt', 'cfg.ListOpt', (['"""nodes_down"""'], {'default': '[]', 'required': '(True)', 'help': '"""fake list of failed compute nodes."""'}), "('nodes_down', default=[], required=True, help=\n 'fake list of failed compute nodes.')\n", (1153, 1242), False, 'from oslo_config import cfg\n')] |
"""
Handler responsible for returning aucote's scanners
"""
from api.storage_handler import StorageHandler
from scans.tools_scanner import ToolsScanner
from utils.time import parse_timestamp_to_time
class ScannersHandler(StorageHandler):
def get(self, scan=None):
"""
Handle get method and returns scanners information
Returns:
None - writes aucote status in JSON
"""
if not scan:
self.write(self.scanners())
return
self.write(self.scanner_status(scan))
def scanner_status(self, scan):
"""
Get scanner status
"""
scanner = self.get_scanner(name=scan)
if not scanner:
return self.not_found('Scanner not found')
if isinstance(scanner, ToolsScanner):
return self.internal_error('Security scanners are not implemented right now')
stats = {
'scan': scan,
'current_scan': scanner.scan.start,
'current_scan_human': parse_timestamp_to_time(scanner.scan.start),
'previous_scan': scanner.previous_scan,
'previous_scan_human': parse_timestamp_to_time(scanner.previous_scan),
'next_scan': scanner.next_scan,
'next_scan_human': parse_timestamp_to_time(scanner.next_scan),
'scanners': {protocol: [subscanner.command.NAME for subscanner in subscanners]
for protocol, subscanners in scanner.scanners.items()},
'status': scanner.status.value if scanner.status is not None else None,
'nodes': [str(node) for node in scanner.nodes]
}
return stats
def get_scanner(self, name):
for scanner in self.aucote.scanners:
if scanner.NAME == name:
return scanner
return None
def scanners(self):
"""
Get current status of aucote tasks
Returns:
dict
"""
return {
'scanners': [self.pretty_scanner(scanner) for scanner in self.aucote.scanners],
}
def pretty_scanner(self, scanner):
return {
'name': scanner.NAME,
'url': self._url_scanner(scanner.NAME)
}
| [
"utils.time.parse_timestamp_to_time"
] | [((1025, 1068), 'utils.time.parse_timestamp_to_time', 'parse_timestamp_to_time', (['scanner.scan.start'], {}), '(scanner.scan.start)\n', (1048, 1068), False, 'from utils.time import parse_timestamp_to_time\n'), ((1157, 1203), 'utils.time.parse_timestamp_to_time', 'parse_timestamp_to_time', (['scanner.previous_scan'], {}), '(scanner.previous_scan)\n', (1180, 1203), False, 'from utils.time import parse_timestamp_to_time\n'), ((1280, 1322), 'utils.time.parse_timestamp_to_time', 'parse_timestamp_to_time', (['scanner.next_scan'], {}), '(scanner.next_scan)\n', (1303, 1322), False, 'from utils.time import parse_timestamp_to_time\n')] |
from datetime import datetime, time
def time_converter(time_to_read):
hour, minute = time_to_read.split(':')
hour, minute = int(hour), int(minute)
AM_or_PM = 'p.m.' if hour >= 12 else 'a.m.'
return time(hour=hour, minute=minute).strftime(f'%-I:%M {AM_or_PM}')
if __name__ == '__main__':
print("Example:")
print(time_converter('12:30'))
#These "asserts" using only for self-checking and not necessary for auto-testing
assert time_converter('12:30') == '12:30 p.m.'
assert time_converter('09:00') == '9:00 a.m.'
assert time_converter('23:15') == '11:15 p.m.'
print("Coding complete? Click 'Check' to earn cool rewards!")
| [
"datetime.time"
] | [((215, 245), 'datetime.time', 'time', ([], {'hour': 'hour', 'minute': 'minute'}), '(hour=hour, minute=minute)\n', (219, 245), False, 'from datetime import datetime, time\n')] |
from Crypto.PublicKey import RSA
PRIVATE_KEY = "-----BEGIN RSA PRIVATE KEY-----\n" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"<KEY>cPuvyU14Z+wQKB<KEY>" \
"zdddhAZpW/ctVFi1gIou+0YEPg4HLBmAtbBqNjwd85+2OBCajOghpe4oPTM4ULua\n" \
"kAt8/gI2xLh1vD/EG2JmBfNMLoEQ1Pkn5dt0LuAGqDdEtLpdGRJyM1aeVw5xJRmx\n" \
"<KEY>" \
"<KEY>" \
"<KEY>" \
"-----END RSA PRIVATE KEY-----"
# Not related to above private key
PUBKEY = "-----<KEY>" \
"<KEY>" \
"8iIxE/2wQEgMUL\nAeVbJtAriXM4zydL7c91agFMJu1aHp0lxzoH8I13xzUetGMutR1tbcfWvoQvPAoU\n89uAz5j/DFMhWrkVEKGeWt1" \
"YtHMmJqpYqR6961GDlwRuUsOBsLgLLVohzlBsTBSn\n3580o2E6G3DEaX0Az9WB9ylhNeV/L/PP3c5htpEyoPZSy1pgtut6TRYQwC8wns" \
"qO\nbVIbFBkrKoaRDyVCnpMuKdDNLZqOOfhzas+SWRAby6D8VsXpPi/DpeS9XkX0o/uH\nJ9N49GuYMSUGC8gKtaddD13pUqS/9rpSvLD" \
"rrDQe5Lhuyusgd28wgEAPCTmM3pEt\nQnlxEeEmFMIn3OBLbEDw5TFE7iED0z7a4dAkqqz8KCGEt12e1Kz7ujuOVMxJxzk6\nNtwt40Sq" \
"EOPcdsGHAA+hqzJnXUihXfmtmFkropaCxM2f+Ha0bOQdDDui5crcV3sX\njShmcqN6YqFzmoPK0XM9P1qC+lfL2Mz6bHC5p9M8/FtcM46" \
"hCj1TF/tl8zaZxtHP\nOrMuFJy4j4yAsyVy3ddO69ECAwEAAQ==\n-----END PUBLIC KEY-----\n"
SIGNATURE = "A/vVRxM3V1ceEH1JrnPOaIZGM3gMjw/fnT9TgUh3poI4q9eH95AIoig+3eTA8XFuGvuo0tivxci4e0NJ1VLVkl/aqp8rvBNrRI1RQk" \
"n2WVF6zk15Gq6KSia/wyzyiJHGxNGM8oFY4qPfNp6K+8ydUti22J11tVBEvQn+7FPAoloF2Xz1waK48ZZCFs8Rxzj+4jlz1PmuXCnT" \
"j7v7GYS1Rb6sdFz4nBSuVk5X8tGOSXIRYxPgmtsDRMRrvDeEK+v3OY6VnT8dLTckS0qCwTRUULub1CGwkz/2mReZk/M1W4EbUnugF5" \
"ptslmFqYDYJZM8PA/g89EKVpkx2gaFbsC4KXocWnxHNiue18rrFQ5hMnDuDRiRybLnQkxXbE/HDuLdnognt2S5wRshPoZmhe95v3qq" \
"/5nH/GX1D7VmxEEIG9fX+XX+Vh9kzO9bLbwoJZwm50zXxCvrLlye/2JU5Vd2Hbm4aMuAyRAZiLS/EQcBlsts4DaFu4txe60HbXSh6n" \
"qNofGkusuzZnCd0VObOpXizrI8xNQzZpjJEB5QqE2gbCC2YZNdOS0eBGXw42dAXa/QV3jZXGES7DdQlqPqqT3YjcMFLiRrWQR8cl4h" \
"JIBRpV5piGyLmMMKYrWu7hQSrdRAEL3K6mNZZU6/yoG879LjtQbVwaFGPeT29B4zBE97FIo="
SIGNATURE2 = "Xla/AlirMihx72hehGMgpKILRUA2ZkEhFgVc65sl80iN+F62yQdSikGyUQVL+LaGNUgmzgK0zEahamfaMFep/9HE2FWuXlTCM+ZXx" \
"OhGWUnjkGW9vi41/Turm7ALzaJoFm1f3Iv4nh1sRD1jySzlZvYwrq4LwmgZ8r0M+Q6xUSIIJfgS8Zjmp43strKo28vKT+DmUKu9Fg" \
"jZWjW3S8WPPJFO0UqA0b1UQspmNLZOVxsNpa0OCM1pofJvT09n6xG+byV30Bed27Kw+D3fzfYq5xvohyeCyliTq8LHnOykecki3Y2" \
"Pvl1qsxxBehlwc/WH8yIUiwC2Du6zY61tN3LGgMAoIFl40Roo1z/I7YfOy4ZCukOGqqyiLdjoXxIVQqqsPtKsrVXS+A9OQ+sVESgw" \
"f8jeEIw/KXLVB/aEyrZJXQR1pBfqkOTCSnAfZVBSjJyxhanS/8iGmnRV5zz3auYMLR9aA8QHjV/VZOj0Bxhuba9VIzJlY9XoUt5Vs" \
"h3uILJM3uVJzSjlZV+Jw3O+NdQFnZyh7m1+eJUMQJ8i0Sr3sMLsdb9me/I0HueXCa5eBHAoTtAyQgS4uN4NMhvpqrB/lQCx7pqnkt" \
"xiCO/bUEZONQjWrvJT+EfD+I0UMFtPFiGDzJ0yi0Ah7LxSTGEGPFZHH5RgsJA8lJwGMCUtc9Cpy8A="
SIGNATURE3 = "hVdLwsWXe6yVy88m9H1903+Bj/DjSGsYL+ZIpEz+G6u/aVx6QfsvnWHzasjqN8SU+brHfL0c8KrapWcACO+jyCuXlHMZb9zKmJkHR" \
"FSOiprCJ3tqNpv/4MIa9CXu0YDqnLHBSyxS01luKw3EqgpWPQdYcqDpOkjjTOq45dQC0PGHA/DXjP7LBptV9AwW200LIcL5Li8tDU" \
"a8VSQybspDDfDpXU3+Xl5tJIBVS4ercPczp5B39Cwne4q2gyj/Y5RdIoX5RMqmFhfucw1he38T1oRC9AHTJqj4CBcDt7gc6jPHuzk" \
"N7u1eUf0IK3+KTDKsCkkoHcGaoxT+NeWcS8Ki1A=="
XML = "<comment><guid>0dd40d800db1013514416c626dd55703</guid><parent_guid>69ab2b83-aa69-4456-ad0a-dd669" \
"7f54714</parent_guid><text>Woop Woop</text><diaspora_handle><EMAIL></diaspora_handle></comment>"
XML2 = "<comment><guid>d728fe501584013514526c626dd55703</guid><parent_guid>d641bd35-8142-414e-a12d-f956cc2c1bb9" \
"</parent_guid><text>What about the mystical problem with 👍 (pt2 with more logging)</text>" \
"<diaspora_handle><EMAIL></diaspora_handle></comment>"
def get_dummy_private_key():
return RSA.importKey(PRIVATE_KEY)
| [
"Crypto.PublicKey.RSA.importKey"
] | [((4205, 4231), 'Crypto.PublicKey.RSA.importKey', 'RSA.importKey', (['PRIVATE_KEY'], {}), '(PRIVATE_KEY)\n', (4218, 4231), False, 'from Crypto.PublicKey import RSA\n')] |
import os
import os.path
import mss
cnt = 1
while cnt < 5:
with mss.mss() as sct:
filename = sct.shot(mon = -1, output = 'screenshot_{}.png'.format(str(cnt)))
print(filename)
cnt += 1 | [
"mss.mss"
] | [((69, 78), 'mss.mss', 'mss.mss', ([], {}), '()\n', (76, 78), False, 'import mss\n')] |
# -*- coding: utf-8 -*-
# MIT License
#
# Copyright (c) 2018 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from GraphWaveResponse import GraphWaveResponse
from burp import IHttpListener
from threading import Lock
class GraphWaveHttpListener(IHttpListener):
"""The GraphWaveHttpListener listens to all spider packages flowing through Burp Suite.
Attributes:
enabled (bool): If the extension should be listening.
"""
enabled = False
def __init__(self, config, graph, refreshInterface, helpers):
"""Construct the HTTP listener.
Args:
config (:class:`GraphWaveConfig`): The GraphWave config.
grpah (:class:`GraphWave`): The GraphWave graph.
refreshInterface (func): Function to refresh the GUI.
helpers (obj): The Burp Suite helpers (this is a Java class).
"""
self._config = config
self._graph = graph
self._refreshInterface = refreshInterface
self._helpers = helpers
self._lock = Lock()
def setEnabled(self, enabled):
"""Enable or disable the HTTP listener.
Args:
enabled (bool): True if it should be listening, False otherwise
"""
self.enabled = enabled
def processHttpMessage(self, toolFlag, messageIsRequest, requestResponse):
"""The function that is called if Burp Suite processes an HTTP message.
Args:
toolFlag (int): The Burp Suite callback constant (https://portswigger.net/burp/extender/api/constant-values.html).
messageIsRequest (bool): True if the message is a request, False if it is a response.
requestResponse (obj): The request or response.
"""
# If disabled, stop.
if not self.enabled:
return None
# If not a spider response, stop.
if toolFlag != 8 or messageIsRequest:
return None
request = self._helpers.analyzeRequest(requestResponse)
response = self._helpers.analyzeResponse(requestResponse.getResponse())
html = self._helpers.bytesToString(requestResponse.getResponse())
self._lock.acquire()
if self.shouldContinueWithMessage(request, response, html):
response = GraphWaveResponse(request.getUrl().toString(), html)
if self._graph.addResponse(response) == False:
self._config.exclude(request.getUrl().toString())
else:
self._config.include(request.getUrl().toString())
else:
self._config.include(request.getUrl().toString())
self._refreshInterface()
self._lock.release()
def shouldContinueWithMessage(self, request, response, html):
"""Check if a message could be ignored. A message can't be ignored if
the graph can't check if it has similar code flows, or if the response
contains certain characteristics that should always be scanned.
Args:
request (obj): The request that was processed.
response (obj): The response that was processed.
html (str): The HTML body of the response.
Returns:
bool: True if this response could possibly be ignored.
"""
if "html" not in response.getStatedMimeType().decode("UTF-8").lower():
# Only scan HTML
return False
if int(response.getStatusCode()) != 200:
# Only scan HTTP 200 OK
return False
if "Index of" in html and "Parent Directory" in html and "Last modified" in html:
# Do not continue with directory listing
return False
return True
| [
"threading.Lock"
] | [((2047, 2053), 'threading.Lock', 'Lock', ([], {}), '()\n', (2051, 2053), False, 'from threading import Lock\n')] |
import pandas as pd
import numpy as np
from sklearn.metrics import mean_squared_error
import sys
import json
def load_data(learner_file, learner_workspace):
y_learner = pd.read_csv(learner_file)
y_actual = pd.read_csv(learner_workspace + 'actual.csv')
return y_learner,y_actual
def validate_submission(y_learner,y_actual):
error = 0
msg = "No error"
if(list(y_learner.columns) != list(y_actual.columns)):
msg = "The column names of the submission file do not match the submission format."
error = 1
if(y_learner.shape[0] != y_actual.shape[0]):
msg = "The submission file should contain {} records".format(y_actual.shape[0])
error = 1
if(y_learner.shape[1] != y_actual.shape[1]):
msg = "The submission file should contain {} columns".format(y_actual.shape[1])
error = 1
return error,msg
def score_submission(y_learner,y_actual):
mse = mean_squared_error(y_learner.num_orders, y_actual.num_orders)
if mse<6500:
projected_points = 1
elif (mse>6500) and (mse<10000):
projected_points = 0.75
elif mse>10000:
projected_points = 0
return mse,projected_points
if __name__ == "__main__":
learner_file = sys.argv[1]
learner_workspace = sys.argv[2]
y_learner,y_actual = load_data(learner_file, learner_workspace)
err,msg = validate_submission(y_learner,y_actual)
if(err == 1):
result = json.dumps({'error_msg':msg})
print(result, end='')
else:
raw_score,projected_points = score_submission(y_learner,y_actual)
result = json.dumps({'raw_score': raw_score,'multiplier':projected_points})
print(result, end='') | [
"json.dumps",
"pandas.read_csv",
"sklearn.metrics.mean_squared_error"
] | [((175, 200), 'pandas.read_csv', 'pd.read_csv', (['learner_file'], {}), '(learner_file)\n', (186, 200), True, 'import pandas as pd\n'), ((216, 261), 'pandas.read_csv', 'pd.read_csv', (["(learner_workspace + 'actual.csv')"], {}), "(learner_workspace + 'actual.csv')\n", (227, 261), True, 'import pandas as pd\n'), ((926, 987), 'sklearn.metrics.mean_squared_error', 'mean_squared_error', (['y_learner.num_orders', 'y_actual.num_orders'], {}), '(y_learner.num_orders, y_actual.num_orders)\n', (944, 987), False, 'from sklearn.metrics import mean_squared_error\n'), ((1408, 1438), 'json.dumps', 'json.dumps', (["{'error_msg': msg}"], {}), "({'error_msg': msg})\n", (1418, 1438), False, 'import json\n'), ((1563, 1631), 'json.dumps', 'json.dumps', (["{'raw_score': raw_score, 'multiplier': projected_points}"], {}), "({'raw_score': raw_score, 'multiplier': projected_points})\n", (1573, 1631), False, 'import json\n')] |
"""
Peer into the outputs and resources of a stack
"""
import collections
import click
import halo
from ..utils import (accounts_regions_and_names, class_filter, plural,
set_stacks)
@click.command()
@accounts_regions_and_names
def peer(ctx, accounts, regions, names):
"""
Peer into the outputs and resources of a stack
"""
set_stacks(ctx)
count, found_stacks = class_filter(ctx.obj.stacks,
account=accounts,
region=regions,
name=names)
click.echo(f'Found {plural(count, "local stack")}\n')
describe_stacks = collections.defaultdict(dict)
to_change = []
for stack in sorted(found_stacks, key=lambda x: x.name):
ctx.obj.debug(
f'Found {stack.name} in region {stack.region} with account number {stack.account_id}'
)
click.secho(click.style('Outputs', bold=True))
stack_dict = stack.template.to_dict
print(stack_dict['Outputs'] if stack.template.
to_dict['Outputs'] else None)
for resource in stack.resources:
print(resource['LogicalResourceId'],
resource['PhysicalResourceId'], resource['ResourceStatus'],
resource.get('ResourceStatusReason', ''))
| [
"click.command",
"collections.defaultdict",
"click.style"
] | [((208, 223), 'click.command', 'click.command', ([], {}), '()\n', (221, 223), False, 'import click\n'), ((680, 709), 'collections.defaultdict', 'collections.defaultdict', (['dict'], {}), '(dict)\n', (703, 709), False, 'import collections\n'), ((942, 975), 'click.style', 'click.style', (['"""Outputs"""'], {'bold': '(True)'}), "('Outputs', bold=True)\n", (953, 975), False, 'import click\n')] |
import sys
import os
import numpy as np
import torch
import argparse
import pickle
proj_root = '.'
sys.path.insert(0, proj_root)
data_root = 'data'
model_root = 'models'
from gan.networks import Generator
from gan.proj_utils.local_utils import save_images
from gan.proj_utils.torch_utils import to_numpy, to_torch
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Gans')
parser.add_argument('--epoch', type=int, default=0,
help='load from epoch')
parser.add_argument('--model', type=str, default='',
help='model name')
parser.add_argument('--nb_interp', type=int, default=5,
help='nb_interp')
parser.add_argument('--mv1_ida', type=int, default=None,
help='Moving noise low id')
parser.add_argument('--mv1_idb', type=int, default=None,
help='Moving noise high id')
parser.add_argument('--mv2_ida', type=int, default=None,
help='Moving noise low id')
parser.add_argument('--mv2_idb', type=int, default=None,
help='Moving noise high id')
parser.add_argument('--mv3_ida', type=int, default=None,
help='Moving noise low id')
parser.add_argument('--mv3_idb', type=int, default=None,
help='Moving noise high id')
args = parser.parse_args()
epoch = args.epoch
model_name = args.model
nb_interp = args.nb_interp
mv1_ida = args.mv1_ida
mv1_idb = args.mv1_idb
mv2_ida = args.mv2_ida
mv2_idb = args.mv2_idb
mv3_ida = args.mv3_ida
mv3_idb = args.mv3_idb
if not mv1_ida:
mv1_ida = np.random.randint(0,100)
if not mv1_idb:
mv1_idb = np.random.randint(0,100)
if not mv2_ida:
mv2_ida = np.random.randint(0,100)
if not mv2_idb:
mv2_idb = np.random.randint(0,100)
if not mv3_ida:
mv3_ida = np.random.randint(0,100)
if not mv3_idb:
mv3_idb = np.random.randint(0,100)
# set file name
file = 'epoch_%d' % epoch
sample_name = file
z_file = file + '.pickle'
# cfg
data_name = 'birds'
# folders
model_name = '{}_{}'.format(model_name, data_name)
model_folder = os.path.join(model_root, model_name)
sample_folder = os.path.join(model_folder, sample_name)
# open noise tensor
with open(os.path.join(sample_folder, z_file), 'br') as f:
z_list = pickle.load(f)
# Load model
netG = Generator(tcode_dim=512, scode_dim=1024, emb_dim=128, hid_dim=128)
G_weightspath = os.path.join(model_folder, 'G_epoch{}.pth'.format(epoch))
netG.load_state_dict(torch.load(G_weightspath))
netG = netG.cuda()
netG.eval()
# get noise
n1a = z_list[0][mv1_ida].unsqueeze(0)
n1b = z_list[0][mv1_idb].unsqueeze(0)
n2a = z_list[1][mv2_ida].unsqueeze(0)
n2b = z_list[1][mv2_idb].unsqueeze(0)
n3a = z_list[2][mv3_ida].unsqueeze(0)
n3b = z_list[2][mv3_idb].unsqueeze(0)
# generation
for z in range(nb_interp):
vis_samples = [None] * nb_interp
for x in range(nb_interp):
column = np.empty(shape=(nb_interp, 3, 64, 64), dtype=np.float32)
for y in range(nb_interp):
# 3D interpolation
cx = x/(nb_interp)
cy = y/(nb_interp)
cz = z/(nb_interp)
z_mv1 = n1a * (1-cx) + n1b * (cx)
z_mv2 = n2a * (1-cy) + n2b * (cy)
z_mv3 = n3a * (1-cz) + n3b * (cz)
# get sample
f_image, _ = netG(z_list=[z_mv1, z_mv2, z_mv3])
np_fake = to_numpy(f_image)
column[y] = np_fake
vis_samples[x] = column
# save images
png_file = '3Dinterp__text=%dto%d__shape=%dto%d__background=%dto%d-%d.png' % \
(mv1_ida, mv1_idb, mv2_ida, mv2_idb, mv3_ida, mv3_idb, z)
save_images(vis_samples, save=not sample_folder == '',
save_path=os.path.join(sample_folder, png_file), dim_ordering='th')
print('Images saved at %s' % sample_folder) | [
"sys.path.insert",
"argparse.ArgumentParser",
"gan.networks.Generator",
"torch.load",
"gan.proj_utils.torch_utils.to_numpy",
"os.path.join",
"pickle.load",
"numpy.random.randint",
"numpy.empty"
] | [((100, 129), 'sys.path.insert', 'sys.path.insert', (['(0)', 'proj_root'], {}), '(0, proj_root)\n', (115, 129), False, 'import sys\n'), ((359, 402), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Gans"""'}), "(description='Gans')\n", (382, 402), False, 'import argparse\n'), ((2297, 2333), 'os.path.join', 'os.path.join', (['model_root', 'model_name'], {}), '(model_root, model_name)\n', (2309, 2333), False, 'import os\n'), ((2354, 2393), 'os.path.join', 'os.path.join', (['model_folder', 'sample_name'], {}), '(model_folder, sample_name)\n', (2366, 2393), False, 'import os\n'), ((2544, 2610), 'gan.networks.Generator', 'Generator', ([], {'tcode_dim': '(512)', 'scode_dim': '(1024)', 'emb_dim': '(128)', 'hid_dim': '(128)'}), '(tcode_dim=512, scode_dim=1024, emb_dim=128, hid_dim=128)\n', (2553, 2610), False, 'from gan.networks import Generator\n'), ((1729, 1754), 'numpy.random.randint', 'np.random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (1746, 1754), True, 'import numpy as np\n'), ((1792, 1817), 'numpy.random.randint', 'np.random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (1809, 1817), True, 'import numpy as np\n'), ((1855, 1880), 'numpy.random.randint', 'np.random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (1872, 1880), True, 'import numpy as np\n'), ((1918, 1943), 'numpy.random.randint', 'np.random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (1935, 1943), True, 'import numpy as np\n'), ((1981, 2006), 'numpy.random.randint', 'np.random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (1998, 2006), True, 'import numpy as np\n'), ((2044, 2069), 'numpy.random.randint', 'np.random.randint', (['(0)', '(100)'], {}), '(0, 100)\n', (2061, 2069), True, 'import numpy as np\n'), ((2499, 2513), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2510, 2513), False, 'import pickle\n'), ((2715, 2740), 'torch.load', 'torch.load', (['G_weightspath'], {}), '(G_weightspath)\n', (2725, 2740), False, 'import torch\n'), ((2433, 2468), 'os.path.join', 'os.path.join', (['sample_folder', 'z_file'], {}), '(sample_folder, z_file)\n', (2445, 2468), False, 'import os\n'), ((3196, 3252), 'numpy.empty', 'np.empty', ([], {'shape': '(nb_interp, 3, 64, 64)', 'dtype': 'np.float32'}), '(shape=(nb_interp, 3, 64, 64), dtype=np.float32)\n', (3204, 3252), True, 'import numpy as np\n'), ((3703, 3720), 'gan.proj_utils.torch_utils.to_numpy', 'to_numpy', (['f_image'], {}), '(f_image)\n', (3711, 3720), False, 'from gan.proj_utils.torch_utils import to_numpy, to_torch\n'), ((4062, 4099), 'os.path.join', 'os.path.join', (['sample_folder', 'png_file'], {}), '(sample_folder, png_file)\n', (4074, 4099), False, 'import os\n')] |
import itertools
import logging
import os
import re
from collections import ChainMap
from datetime import datetime
from typing import Text, Dict, List
from mongoengine import Document
from mongoengine.errors import DoesNotExist
from mongoengine.errors import NotUniqueError
from rasa.constants import DEFAULT_CONFIG_PATH, DEFAULT_DATA_PATH, DEFAULT_DOMAIN_PATH
from rasa.core.agent import Agent
from rasa.core.constants import INTENT_MESSAGE_PREFIX
from rasa.core.domain import InvalidDomain
from rasa.core.domain import SessionConfig
from rasa.core.events import Form, ActionExecuted, UserUttered
from rasa.core.slots import CategoricalSlot, FloatSlot
from rasa.core.training.structures import Checkpoint
from rasa.core.training.structures import STORY_START
from rasa.core.training.structures import StoryGraph, StoryStep, SlotSet
from rasa.data import get_core_nlu_files
from rasa.importers import utils
from rasa.importers.rasa import Domain, StoryFileReader
from rasa.nlu.training_data import Message, TrainingData
from rasa.nlu.training_data.formats.markdown import entity_regex
from rasa.train import DEFAULT_MODELS_PATH
from rasa.utils.endpoints import EndpointConfig
from rasa.utils.io import read_config_file
from bot_trainer.exceptions import AppException
from bot_trainer.utils import Utility
from .cache import InMemoryAgentCache
from .constant import (
DOMAIN,
SESSION_CONFIG,
STORY_EVENT,
REGEX_FEATURES,
LOOKUP_TABLE,
TRAINING_EXAMPLE,
RESPONSE,
ENTITY,
SLOTS,
MODEL_TRAINING_STATUS,
)
from .data_objects import (
Responses,
SessionConfigs,
Configs,
Endpoints,
Entities,
EntitySynonyms,
TrainingExamples,
Stories,
Actions,
Intents,
Forms,
LookupTables,
RegexFeatures,
Entity,
ResponseText,
ResponseCustom,
ResponseButton,
EndPointBot,
EndPointAction,
EndPointTracker,
Slots,
StoryEvents,
ModelTraining,
ModelDeployment
)
class MongoProcessor:
async def upload_and_save(
self,
nlu: bytes,
domain: bytes,
stories: bytes,
config: bytes,
bot: Text,
user: Text,
overwrite: bool = True,
):
"""Upload the training data to temporary path and then save into mongo."""
data_path = Utility.save_files(nlu, domain, stories, config)
await self.save_from_path(data_path, bot, overwrite, user)
Utility.delete_directory(data_path)
def download_files(self, bot: Text):
nlu = self.load_nlu(bot)
domain = self.load_domain(bot)
stories = self.load_stories(bot)
config = self.load_config(bot)
return Utility.create_zip_file(nlu, domain, stories, config, bot)
async def save_from_path(
self, path: Text, bot: Text, overwrite: bool = True, user="default"
):
""" This function reads the bot files, using the file path (input)
for a particular bot (input) and saves data into objects.
Eg. MongoProcessor.save_from_path(main_path,bot_name) """
try:
story_files, nlu_files = get_core_nlu_files(
os.path.join(path, DEFAULT_DATA_PATH)
)
nlu = utils.training_data_from_paths(nlu_files, "en")
domain = Domain.from_file(os.path.join(path, DEFAULT_DOMAIN_PATH))
domain.check_missing_templates()
story_steps = await StoryFileReader.read_from_files(story_files, domain)
config = read_config_file(os.path.join(path, DEFAULT_CONFIG_PATH))
if overwrite:
self.delete_bot_data(bot, user)
self.save_domain(domain, bot, user)
self.save_stories(story_steps, bot, user)
self.save_nlu(nlu, bot, user)
self.save_config(config, bot, user)
except InvalidDomain as e:
logging.info(e)
raise AppException(
"""Failed to validate yaml file.
Please make sure the file is initial and all mandatory parameters are specified"""
)
except Exception as e:
logging.info(e)
raise AppException(e)
def delete_bot_data(self, bot: Text, user: Text):
self.delete_domain(bot, user)
self.delete_stories(bot, user)
self.delete_nlu(bot, user)
self.delete_config(bot, user)
def save_nlu(self, nlu: TrainingData, bot: Text, user: Text):
""" saves the nlu data (input) of the bot (input) into respective objects.
Eg. story_files, nlu_files = get_core_nlu_files(os.path.join(main_bot_path, DEFAULT_DATA_PATH))
nlu = utils.training_data_from_paths(nlu_files, "en")
MongoProcessor.save_nlu(nlu,bot_name,user_name) """
self.__save_training_examples(nlu.training_examples, bot, user)
self.__save_entity_synonyms(nlu.entity_synonyms, bot, user)
self.__save_lookup_tables(nlu.lookup_tables, bot, user)
self.__save_regex_features(nlu.regex_features, bot, user)
def delete_nlu(self, bot: Text, user: Text):
'''perform soft delete of nlu data for particular bot'''
Utility.delete_document([TrainingExamples,
EntitySynonyms,
LookupTables,
RegexFeatures], user, bot)
def load_nlu(self, bot: Text) -> TrainingData:
""" loads nlu data of the bot (input) from respective objects.
Eg. MongoProcessor.load_nlu(bot_name) """
training_examples = self.__prepare_training_examples(bot)
entity_synonyms = self.__prepare_training_synonyms(bot)
lookup_tables = self.__prepare_training_lookup_tables(bot)
regex_features = self.__prepare_training_regex_features(bot)
return TrainingData(
training_examples=training_examples,
entity_synonyms=entity_synonyms,
lookup_tables=lookup_tables,
regex_features=regex_features,
)
def save_domain(self, domain: Domain, bot: Text, user: Text):
""" saves the domain data (input) of the bot (input) into respective objects.
Eg. domain = Domain.from_file(os.path.join(main_path, DEFAULT_DOMAIN_PATH))
MongoProcessor.save_domain(domain,bot_name,user_name) """
self.__save_intents(domain.intents, bot, user)
self.__save_domain_entities(domain.entities, bot, user)
self.__save_forms(domain.form_names, bot, user)
self.__save_actions(domain.user_actions, bot, user)
self.__save_responses(domain.templates, bot, user)
self.__save_slots(domain.slots, bot, user)
self.__save_session_config(domain.session_config, bot, user)
def delete_domain(self, bot: Text, user: Text):
'''perform soft delete on domain data for particular bot'''
Utility.delete_document([Intents,
Entities,
Forms,
Actions,
Responses,
Slots], bot, user)
def load_domain(self, bot: Text) -> Domain:
""" loads domain data of the bot (input) from respective objects.
Eg. MongoProcessor.load_domain(bot_name) """
domain_dict = {
DOMAIN.INTENTS.value: self.__prepare_training_intents(bot),
DOMAIN.ACTIONS.value: self.__prepare_training_actions(bot),
DOMAIN.SLOTS.value: self.__prepare_training_slots(bot),
DOMAIN.SESSION_CONFIG.value: self.__prepare_training_session_config(bot),
DOMAIN.RESPONSES.value: self.__prepare_training_responses(bot),
DOMAIN.FORMS.value: self.__prepare_training_forms(bot),
DOMAIN.ENTITIES.value: self.__prepare_training_domain_entities(bot),
}
return Domain.from_dict(domain_dict)
def save_stories(self, story_steps: List[StoryStep], bot: Text, user: Text):
""" saves the stories data (input) of the bot (input) into respective objects.
Eg. story_files, nlu_files = get_core_nlu_files(os.path.join(main_path, DEFAULT_DATA_PATH))
domain = Domain.from_file(os.path.join(path, DEFAULT_DOMAIN_PATH))
loop = asyncio.new_event_loop()
story_steps = loop.run_until_complete(StoryFileReader.read_from_files(story_files, domain))
MongoProcessor.save_stories(story_steps,bot_name,user_name) """
self.__save_stories(story_steps, bot, user)
def delete_stories(self, bot: Text, user: Text):
"""perform soft delete on stories data for particular bot"""
Utility.delete_document([Stories], bot, user)
def load_stories(self, bot: Text) -> StoryGraph:
""" loads the stories data of the bot (input) from the respective objects.
Eg. MongoProcessor.load_stories(bot_name) """
return self.__prepare_training_story(bot)
def __save_training_examples(self, training_examples, bot: Text, user: Text):
if training_examples:
new_examples = list(self.__extract_training_examples(training_examples, bot, user))
if new_examples:
TrainingExamples.objects.insert(
new_examples
)
def __extract_entities(self, entities):
for entity in entities:
entity_data = Entity(
start=entity[ENTITY.START.value],
end=entity[ENTITY.END.value],
value=entity[ENTITY.VALUE.value],
entity=entity[ENTITY.ENTITY.value],
)
yield entity_data
def __extract_training_examples(self, training_examples, bot: Text, user: Text):
saved_training_examples, _ = self.get_all_training_examples(bot)
for training_example in training_examples:
if training_example not in saved_training_examples:
training_data = TrainingExamples()
training_data.intent = training_example.data[TRAINING_EXAMPLE.INTENT.value]
training_data.text = training_example.text
training_data.bot = bot
training_data.user = user
if "entities" in training_example.data:
training_data.entities = list(
self.__extract_entities(
training_example.data[TRAINING_EXAMPLE.ENTITIES.value]
)
)
yield training_data
def __fetch_all_synonyms_value(self, bot: Text):
synonyms = list(EntitySynonyms.objects(bot=bot, status=True).aggregate([{
"$group": {
"_id": "$bot",
"values": {"$push": "$value"},
}
}]))
if synonyms:
return synonyms[0]['values']
else:
return []
def __extract_synonyms(self, synonyms, bot: Text, user: Text):
saved_synonyms = self.__fetch_all_synonyms_value(bot)
for key, value in synonyms.items():
if key not in saved_synonyms:
yield EntitySynonyms(bot=bot, synonym=value, value=key, user=user)
def __save_entity_synonyms(self, entity_synonyms, bot: Text, user: Text):
if entity_synonyms:
new_synonyms = list(self.__extract_synonyms(entity_synonyms, bot, user))
if new_synonyms:
EntitySynonyms.objects.insert(
new_synonyms
)
def fetch_synonyms(self, bot: Text, status=True):
""" Loads the entity synonyms of the bot (input).
Eg. MongoProcessor.fetch_synonyms(bot_name) """
entitySynonyms = EntitySynonyms.objects(bot=bot, status=status)
for entitySynonym in entitySynonyms:
yield {entitySynonym.value: entitySynonym.synonym}
def __prepare_training_synonyms(self, bot: Text):
synonyms = list(self.fetch_synonyms(bot))
return dict(ChainMap(*synonyms))
def __prepare_entities(self, entities):
for entity in entities:
yield entity.to_mongo().to_dict()
def fetch_training_examples(self, bot: Text, status=True):
""" Returns the training examples (questions/sentences) of the bot (input).
Eg. MongoProcessor.fetch_training_examples(bot_name) """
trainingExamples = TrainingExamples.objects(bot=bot, status=status)
for trainingExample in trainingExamples:
message = Message(trainingExample.text)
message.data = {TRAINING_EXAMPLE.INTENT.value: trainingExample.intent}
if trainingExample.entities:
message.data[TRAINING_EXAMPLE.ENTITIES.value] = list(
self.__prepare_entities(trainingExample.entities)
)
yield message
def __prepare_training_examples(self, bot: Text):
return list(self.fetch_training_examples(bot))
def __fetch_all_lookup_values(self, bot: Text):
lookup_tables = list(LookupTables.objects(bot=bot, status=True).aggregate([{
"$group": {
"_id": "$bot",
"values": {"$push": "$value"},
}
}]))
if lookup_tables:
return lookup_tables[0]['values']
else:
return []
def __extract_lookup_tables(self, lookup_tables, bot: Text, user: Text):
saved_lookup = self.__fetch_all_lookup_values(bot)
for lookup_table in lookup_tables:
name = lookup_table[LOOKUP_TABLE.NAME.value]
for element in lookup_table[LOOKUP_TABLE.ELEMENTS.value]:
if element not in saved_lookup:
yield LookupTables(name=name, value=element, bot=bot, user=user)
def __save_lookup_tables(self, lookup_tables, bot: Text, user: Text):
if lookup_tables:
new_lookup = list(self.__extract_lookup_tables(lookup_tables, bot, user))
if new_lookup:
LookupTables.objects.insert(
new_lookup
)
def fetch_lookup_tables(self, bot: Text, status=True):
""" Returns the lookup tables of the bot (input).
Eg. MongoProcessor.fetch_lookup_tables(bot_name) """
lookup_tables = LookupTables.objects(bot=bot, status=status).aggregate(
[{"$group": {"_id": "$name", "elements": {"$push": "$value"}}}]
)
for lookup_table in lookup_tables:
yield {
LOOKUP_TABLE.NAME.value: lookup_table["_id"],
LOOKUP_TABLE.ELEMENTS.value: lookup_table["elements"],
}
def __prepare_training_lookup_tables(self, bot: Text):
return list(self.fetch_lookup_tables(bot))
def __fetch_all_regex_patterns(self, bot: Text):
regex_patterns = list(RegexFeatures.objects(bot=bot, status=True).aggregate([{
"$group": {
"_id": "$bot",
"patterns": {"$push": "$pattern"},
}
}]))
if regex_patterns:
return regex_patterns[0]['patterns']
else:
return []
def __extract_regex_features(self, regex_features, bot: Text, user: Text):
saved_regex_patterns = self.__fetch_all_regex_patterns(bot)
for regex_feature in regex_features:
if regex_feature['pattern'] not in saved_regex_patterns:
regex_data = RegexFeatures(**regex_feature)
regex_data.bot = bot
regex_data.user = user
yield regex_data
def __save_regex_features(self, regex_features, bot: Text, user: Text):
if regex_features:
new_regex_patterns = list(self.__extract_regex_features(regex_features, bot, user))
if new_regex_patterns:
RegexFeatures.objects.insert(
new_regex_patterns
)
def fetch_regex_features(self, bot: Text, status=True):
""" Returns the regex features of the bot (input).
Eg. MongoProcessor.fetch_regex_features(bot_name) """
regex_features = RegexFeatures.objects(bot=bot, status=status)
for regex_feature in regex_features:
yield {
REGEX_FEATURES.NAME.value: regex_feature["name"],
REGEX_FEATURES.PATTERN.value: regex_feature["pattern"],
}
def __prepare_training_regex_features(self, bot: Text):
return list(self.fetch_regex_features(bot))
def __extract_intents(self, intents, bot: Text, user: Text):
saved_intents = self.__prepare_training_intents(bot)
for intent in intents:
if intent not in saved_intents:
yield Intents(name=intent, bot=bot, user=user)
def __save_intents(self, intents, bot: Text, user: Text):
if intents:
new_intents = list(self.__extract_intents(intents, bot, user))
if new_intents:
Intents.objects.insert(new_intents)
def fetch_intents(self, bot: Text, status=True):
""" Returns the intent list of the bot (input).
Eg. MongoProcessor.fetch_intents(bot_name) """
intents = Intents.objects(bot=bot, status=status).aggregate(
[{"$group": {"_id": "$bot", "intents": {"$push": "$name"}}}]
)
return list(intents)
def __prepare_training_intents(self, bot: Text):
intents = self.fetch_intents(bot)
if intents:
return intents[0]["intents"]
else:
return []
def __extract_domain_entities(self, entities: List[str], bot: Text, user: Text):
saved_entities = self.__prepare_training_domain_entities(bot=bot)
for entity in entities:
if entity not in saved_entities:
yield Entities(name=entity, bot=bot, user=user)
def __save_domain_entities(self, entities: List[str], bot: Text, user: Text):
if entities:
new_entities = list(self.__extract_domain_entities(entities, bot, user))
if new_entities:
Entities.objects.insert(new_entities)
def fetch_domain_entities(self, bot: Text, status=True):
""" Returns the list of entities of the bot (input).
Eg. MongoProcessor.fetch_domain_entities(bot_name) """
entities = Entities.objects(bot=bot, status=status).aggregate(
[{"$group": {"_id": "$bot", "entities": {"$push": "$name"}}}]
)
return list(entities)
def __prepare_training_domain_entities(self, bot: Text):
entities = self.fetch_domain_entities(bot)
if entities:
return entities[0]["entities"]
else:
return []
def __extract_forms(self, forms, bot: Text, user: Text):
saved_forms = self.__prepare_training_forms(bot)
for form in forms:
if form not in saved_forms:
yield Forms(name=form, bot=bot, user=user)
def __save_forms(self, forms, bot: Text, user: Text):
if forms:
new_forms = list(self.__extract_forms(forms, bot, user))
if new_forms:
Forms.objects.insert(new_forms)
def fetch_forms(self, bot: Text, status=True):
""" Returns the list of forms of the bot (input).
Eg. MongoProcessor.fetch_forms(bot_name) """
forms = Forms.objects(bot=bot, status=status).aggregate(
[{"$group": {"_id": "$bot", "forms": {"$push": "$name"}}}]
)
return list(forms)
def __prepare_training_forms(self, bot: Text):
forms = self.fetch_forms(bot)
if forms:
return forms[0]["forms"]
else:
return []
def __extract_actions(self, actions, bot: Text, user: Text):
saved_actions = self.__prepare_training_actions(bot)
for action in actions:
if action not in saved_actions:
yield Actions(name=action, bot=bot, user=user)
def __save_actions(self, actions, bot: Text, user: Text):
if actions:
new_actions = list(self.__extract_actions(actions, bot, user))
if new_actions:
Actions.objects.insert(new_actions)
def fetch_actions(self, bot: Text, status=True):
""" Returns the list of actions of the bot (input).
Eg. MongoProcessor.fetch_actions(bot_name) """
actions = Actions.objects(bot=bot, status=status).aggregate(
[{"$group": {"_id": "$bot", "actions": {"$push": "$name"}}}]
)
return list(actions)
def __prepare_training_actions(self, bot: Text):
actions = self.fetch_actions(bot)
if actions:
return actions[0]["actions"]
else:
return []
def __extract_session_config(
self, session_config: SessionConfig, bot: Text, user: Text
):
return SessionConfigs(
sesssionExpirationTime=session_config.session_expiration_time,
carryOverSlots=session_config.carry_over_slots,
bot=bot,
user=user,
)
def __save_session_config(
self, session_config: SessionConfig, bot: Text, user: Text
):
try:
if session_config:
try:
session = SessionConfigs.objects().get(bot=bot)
session.session_expiration_time = (
session_config.session_expiration_time
)
session.carryOverSlots = True
session.user = user
except DoesNotExist:
session = self.__extract_session_config(session_config, bot, user)
session.save()
except NotUniqueError as e:
logging.info(e)
raise AppException("Session Config already exists for the bot")
except Exception as e:
logging.info(e)
raise AppException("Internal Server Error")
def fetch_session_config(self, bot: Text):
""" Returns the session configurations of the bot (input).
Eg. MongoProcessor.fetch_session_config(bot_name) """
try:
session_config = SessionConfigs.objects().get(bot=bot)
except DoesNotExist as e:
logging.info(e)
session_config = None
return session_config
def __prepare_training_session_config(self, bot: Text):
session_config = self.fetch_session_config(bot)
if session_config:
return {
SESSION_CONFIG.SESSION_EXPIRATION_TIME.value: session_config.sesssionExpirationTime,
SESSION_CONFIG.CARRY_OVER_SLOTS.value: session_config.carryOverSlots,
}
else:
default_session = SessionConfig.default()
return {
SESSION_CONFIG.SESSION_EXPIRATION_TIME.value: default_session.session_expiration_time,
SESSION_CONFIG.CARRY_OVER_SLOTS.value: default_session.carry_over_slots,
}
def __extract_response_button(self, buttons):
for button in buttons:
yield ResponseButton._from_son(button)
def __extract_response_value(self, values: List[Dict], key, bot: Text, user: Text):
saved_responses = self.__fetch_list_of_response(bot)
for value in values:
if value not in saved_responses:
response = Responses()
response.name = key.strip()
response.bot = bot
response.user = user
if RESPONSE.Text.value in value:
response_text = ResponseText()
response_text.text = str(value[RESPONSE.Text.value]).strip()
if RESPONSE.IMAGE.value in value:
response_text.image = value[RESPONSE.IMAGE.value]
if RESPONSE.CHANNEL.value in value:
response_text.channel = value["channel"]
if RESPONSE.BUTTONS.value in value:
response_text.buttons = list(
self.__extract_response_button(value[RESPONSE.BUTTONS.value])
)
response.text = response_text
elif RESPONSE.CUSTOM.value in value:
response.custom = ResponseCustom._from_son(
{RESPONSE.CUSTOM.value: value[RESPONSE.CUSTOM.value]}
)
yield response
def __extract_response(self, responses, bot: Text, user: Text):
responses_result = []
for key, values in responses.items():
responses_to_saved = list(self.__extract_response_value(values, key, bot, user))
responses_result.extend(responses_to_saved)
return responses_result
def __save_responses(self, responses, bot: Text, user: Text):
if responses:
new_responses = self.__extract_response(responses, bot, user)
if new_responses:
Responses.objects.insert(new_responses)
def __prepare_response_Text(self, texts: List[Dict]):
for text in texts:
yield text
def fetch_responses(self, bot: Text, status=True):
""" Yields the response dictionary of the bot (input).
Eg. MongoProcessor.fetch_responses(bot_name) """
responses = Responses.objects(bot=bot, status=status).aggregate(
[
{
"$group": {
"_id": "$name",
"texts": {"$push": "$text"},
"customs": {"$push": "$custom"},
}
}
]
)
for response in responses:
key = response["_id"]
value = list(self.__prepare_response_Text(response["texts"]))
if response["customs"]:
value.extend(response["customs"])
yield {key: value}
def __prepare_training_responses(self, bot: Text):
responses = dict(ChainMap(*list(self.fetch_responses(bot))))
return responses
def __fetch_slot_names(self, bot: Text):
saved_slots = list(
Slots.objects(bot=bot, status=True).aggregate(
[{"$group": {"_id": "$bot", "slots": {"$push": "$name"}}}]
)
)
slots_list = []
if saved_slots:
slots_list = saved_slots[0]["slots"]
return slots_list
def __extract_slots(self, slots, bot: Text, user: Text):
slots_name_list = self.__fetch_slot_names(bot)
for slot in slots:
items = vars(slot)
if items["name"] not in slots_name_list:
items["type"] = slot.type_name
items["value_reset_delay"] = items["_value_reset_delay"]
items.pop("_value_reset_delay")
items["bot"] = bot
items["user"] = user
items.pop("value")
yield Slots._from_son(items)
def __save_slots(self, slots, bot: Text, user: Text):
if slots:
new_slots = list(self.__extract_slots(slots, bot, user))
if new_slots:
Slots.objects.insert(new_slots)
def fetch_slots(self, bot: Text, status=True):
""" Returns the list of slots of the bot (input).
Eg. MongoProcessor.fetch_slots(bot_name) """
slots = Slots.objects(bot=bot, status=status)
return list(slots)
def __prepare_training_slots(self, bot: Text):
slots = self.fetch_slots(bot)
results = []
for slot in slots:
key = slot.name
if slot.type == FloatSlot.type_name:
value = {
SLOTS.INITIAL_VALUE.value: slot.initial_value,
SLOTS.VALUE_RESET_DELAY.value: slot.value_reset_delay,
SLOTS.AUTO_FILL.value: slot.auto_fill,
SLOTS.MIN_VALUE.value: slot.min_value,
SLOTS.MAX_VALUE.value: slot.max_value,
}
elif slot.type == CategoricalSlot.type_name:
value = {
SLOTS.INITIAL_VALUE.value: slot.initial_value,
SLOTS.VALUE_RESET_DELAY.value: slot.value_reset_delay,
SLOTS.AUTO_FILL.value: slot.auto_fill,
SLOTS.VALUES.value: slot.values,
}
else:
value = {
SLOTS.INITIAL_VALUE.value: slot.initial_value,
SLOTS.VALUE_RESET_DELAY.value: slot.value_reset_delay,
SLOTS.AUTO_FILL.value: slot.auto_fill,
}
value[SLOTS.TYPE.value] = slot.type
results.append({key: value})
return dict(ChainMap(*results))
def __extract_story_events(self, events):
for event in events:
if isinstance(event, UserUttered):
yield StoryEvents(type=event.type_name, name=event.text)
elif isinstance(event, ActionExecuted):
yield StoryEvents(type=event.type_name, name=event.action_name)
elif isinstance(event, Form):
yield StoryEvents(type=event.type_name, name=event.name)
elif isinstance(event, SlotSet):
yield StoryEvents(
type=event.type_name, name=event.key, value=event.value
)
def __fetch_story_block_names(self, bot: Text):
saved_stories = list(Stories.objects(bot=bot, status=True).aggregate([
{
"$group": {
"_id": "$bot",
"block": {"$push": "$block_name"},
}
}
]))
result = []
if saved_stories:
result = saved_stories[0]["block"]
return result
def __extract_story_step(self, story_steps, bot: Text, user: Text):
saved_stories = self.__fetch_story_block_names(bot)
for story_step in story_steps:
if story_step.block_name not in saved_stories:
story_events = list(self.__extract_story_events(story_step.events))
story = Stories(
block_name=story_step.block_name,
start_checkpoints=[
start_checkpoint.name
for start_checkpoint in story_step.start_checkpoints
],
end_checkpoints=[
end_checkpoint.name for end_checkpoint in story_step.end_checkpoints
],
events=story_events,
)
story.bot = bot
story.user = user
yield story
def __save_stories(self, story_steps, bot: Text, user: Text):
if story_steps:
new_stories = list(self.__extract_story_step(story_steps, bot, user))
if new_stories:
Stories.objects.insert(
new_stories
)
def __prepare_training_story_events(self, events, timestamp):
for event in events:
if event.type == UserUttered.type_name:
intent = {
STORY_EVENT.NAME.value: event.name,
STORY_EVENT.CONFIDENCE.value: 1.0,
}
parse_data = {"text": INTENT_MESSAGE_PREFIX + event.name,
"intent": intent,
"intent_ranking": [intent],
"entities": []}
yield UserUttered(text=event.name, intent=intent, parse_data=parse_data, timestamp=timestamp)
elif event.type == ActionExecuted.type_name:
yield ActionExecuted(action_name=event.name, timestamp=timestamp)
elif event.type == Form.type_name:
yield Form(name=event.name, timestamp=timestamp)
elif event.type == SlotSet.type_name:
yield SlotSet(key=event.name, value=event.value, timestamp=timestamp)
def fetch_stories(self, bot: Text, status=True):
""" Returns the list of stories of the bot (input).
Eg. MongoProcessor.fetch_stories(bot_name) """
return list(Stories.objects(bot=bot, status=status))
def __prepare_training_story_step(self, bot: Text):
for story in Stories.objects(bot=bot, status=True):
story_events = list(
self.__prepare_training_story_events(
story.events, datetime.now().timestamp()
)
)
yield StoryStep(
block_name=story.block_name,
events=story_events,
start_checkpoints=[
Checkpoint(start_checkpoint)
for start_checkpoint in story.start_checkpoints
],
end_checkpoints=[
Checkpoint(end_checkpoints)
for end_checkpoints in story.end_checkpoints
],
)
def __prepare_training_story(self, bot: Text):
return StoryGraph(list(self.__prepare_training_story_step(bot)))
def save_config(self, config: dict, bot: Text, user: Text):
'''save bot pipeline and policies'''
try:
config_obj = Configs.objects().get(bot=bot)
config_obj.pipeline = config["pipeline"]
config_obj.language = config["language"]
config_obj.policies = config["policies"]
except DoesNotExist:
config["bot"] = bot
config["user"] = user
config_obj = Configs._from_son(config)
config_obj.save()
def delete_config(self, bot: Text, user: Text):
"""perform soft delete on bot pipeline and policies configuration"""
Utility.delete_document([Configs], bot, user)
def fetch_configs(self, bot: Text):
""" Returns the configuration details of the bot (input).
Eg. MongoProcessor.fetch_configs(bot_name) """
try:
configs = Configs.objects().get(bot=bot)
except DoesNotExist as e:
logging.info(e)
configs = Configs._from_son(read_config_file("./template/config.yml"))
return configs
def load_config(self, bot: Text):
""" Returns the configuration dictionary created from the config object of the bot (input).
Eg. MongoProcessor.load_config(bot_name) """
configs = self.fetch_configs(bot)
config_dict = configs.to_mongo().to_dict()
return {
key: config_dict[key]
for key in config_dict
if key in ["language", "pipeline", "policies"]
}
def add_intent(self, text: Text, bot: Text, user: Text):
""" Adds a new intent (input) to the bot (input).
Eg. MongoProcessor.add_intent(intent_name,bot_name,user_name) """
assert not Utility.check_empty_string(text), "Intent Name cannot be empty or blank spaces"
Utility.is_exist(
Intents,
exp_message="Intent already exists!",
name__iexact=text.strip(),
bot=bot,
status=True
)
saved = Intents(name=text, bot=bot, user=user).save().to_mongo().to_dict()
return saved["_id"].__str__()
def get_intents(self, bot: Text):
""" Returns the list of intents of the bot (input) """
intents = Intents.objects(bot=bot, status=True).order_by("-timestamp")
return list(self.__prepare_document_list(intents, "name"))
def add_training_example(
self, examples: List[Text], intent: Text, bot: Text, user: Text
):
""" Adds a sentence/question (training example) for an intent of the bot.
Eg. MongoProcessor.add_training_example([training_example],intent_name,bot_name,user_name) """
assert not Utility.check_empty_string(intent), "Training Example name and text cannot be empty or blank spaces"
if not Utility.is_exist(
Intents, raise_error=False, name__iexact=intent, bot=bot, status=True
):
self.add_intent(intent, bot, user)
for example in examples:
try:
assert not Utility.check_empty_string(example), "Training Example name and text cannot be empty or blank spaces"
example = example.strip()
if Utility.is_exist(
TrainingExamples, raise_error=False, text__iexact=example, bot=bot, status=True
):
yield {
"text": example,
"message": "Training Example already exists!",
"_id": None,
}
else:
entities = Utility.markdown_reader._find_entities_in_training_example(
example
)
if entities:
ext_entity = [ent["entity"] for ent in entities]
self.__save_domain_entities(ext_entity, bot=bot, user=user)
self.__add_slots_from_entities(ext_entity, bot, user)
text = re.sub(
entity_regex, lambda m: m.groupdict()["entity_text"], example
)
new_entities = list(
self.__extract_entities(entities)
)
else:
new_entities = None
text = example
training_example = TrainingExamples(
intent=intent.strip(), text=text, entities=new_entities, bot=bot, user=user
)
saved = training_example.save().to_mongo().to_dict()
yield {
"text": example,
"_id": saved["_id"].__str__(),
"message": "Training Example added successfully!",
}
except Exception as e:
yield {"text": example, "_id": None, "message": str(e)}
def get_training_examples(self, intent: Text, bot: Text):
""" Yields training examples for an intent of the bot.
Eg. MongoProcessor.get_training_examples(intent_name,bot_name) """
training_examples = list(
TrainingExamples
.objects(bot=bot, intent__iexact=intent, status=True)
.order_by("-timestamp")
)
for training_example in training_examples:
example = training_example.to_mongo().to_dict()
entities = example["entities"] if "entities" in example else None
yield {
"_id": example["_id"].__str__(),
"text": Utility.prepare_nlu_text(example["text"], entities),
}
def get_all_training_examples(self, bot: Text):
""" Returns list of all training examples of a bot """
training_examples = list(
TrainingExamples.objects(bot=bot, status=True).aggregate(
[
{
"$group": {
"_id": "$bot",
"text": {"$push": "$text"},
"id": {"$push": {"$toString": "$_id"}},
}
}
]
)
)
if training_examples:
return training_examples[0]["text"], training_examples[0]["id"]
else:
return [], []
def remove_document(self, document: Document, id: Text, bot: Text, user: Text):
""" Removes a document of the bot.
Eg. MongoProcessor.remove_document(document_name,doc_ID,bot_name,user_name) """
try:
doc = document.objects(bot=bot).get(id=id)
doc.status=False
doc.user=user
doc.save()
except DoesNotExist as e:
logging.info(e)
raise AppException("Unable to remove document")
except Exception as e:
logging.info(e)
raise AppException("Unable to remove document")
def __prepare_document_list(self, documents: List[Document], field: Text):
for document in documents:
doc_dict = document.to_mongo().to_dict()
yield {"_id": doc_dict["_id"].__str__(), field: doc_dict[field]}
def add_entity(self, name: Text, bot: Text, user: Text):
""" Adds an entity for a bot of a user.
Eg. MongoProcessor.add_entity(entity_name,bot_name,user_name) """
assert not Utility.check_empty_string(name), "Entity Name cannot be empty or blank spaces"
Utility.is_exist(
Entities,
exp_message="Entity already exists!",
name__iexact=name.strip(),
bot=bot,
status=True
)
Entities(name=name.strip(), bot=bot, user=user).save()
if not Utility.is_exist(
Slots, raise_error=False, name__iexact=name, bot=bot, status=True
):
Slots(name=name.strip(), type="text", bot=bot, user=user).save()
def get_entities(self, bot: Text):
""" Returns the list of entities of a bot (input) """
entities = Entities.objects(bot=bot, status=True)
return list(self.__prepare_document_list(entities, "name"))
def add_action(self, name: Text, bot: Text, user: Text):
""" Adds an action to the bot.
Eg. MongoProcessor.add_action(action_name,bot_name,user_name) """
assert not Utility.check_empty_string(name), "Action name cannot be empty or blank spaces"
Utility.is_exist(
Actions,
exp_message="Entity already exists!",
name__iexact=name.strip(),
bot=bot,
status=True
)
Actions(name=name.strip(), bot=bot, user=user).save()
def get_actions(self, bot: Text):
""" Returns the list of actions of a bot (input) """
actions = Actions.objects(bot=bot, status=True)
return list(self.__prepare_document_list(actions, "name"))
def __add_slots_from_entities(self, entities: List[Text], bot: Text, user: Text):
slot_name_list = self.__fetch_slot_names(bot)
slots = [
Slots(name=entity, type="text", bot=bot, user=user)
for entity in entities
if entity not in slot_name_list
]
if slots:
Slots.objects.insert(slots)
def add_text_response(self, utterance: Text, name: Text, bot: Text, user: Text):
""" Adds a text response to an utterance of the bot.
Eg. MongoProcessor.add_text_response(response,utterance_name,bot_name,user_name) """
assert not Utility.check_empty_string(utterance), "Response text cannot be empty or blank spaces"
assert not Utility.check_empty_string(name), "Response name cannot be empty or blank spaces"
return self.add_response(
utterances={"text": utterance.strip()}, name=name, bot=bot, user=user
)
def add_response(self, utterances: Dict, name: Text, bot: Text, user: Text):
""" Adds an utterance to the bot.
Eg. MongoProcessor.add_response({utterance_dict},utterance_name,bot_name,user_name) """
self.__check_response_existence(
response=utterances, bot=bot, exp_message="Response already exists!"
)
response = list(
self.__extract_response_value(
values=[utterances], key=name, bot=bot, user=user
)
)[0]
value = response.save().to_mongo().to_dict()
if not Utility.is_exist(
Actions, raise_error=False, name__iexact=name, bot=bot, status=True
):
Actions(name=name.strip(), bot=bot, user=user).save()
return value["_id"].__str__()
def get_response(self, name: Text, bot: Text):
""" Yields bot response based on utterance name.
Eg. MongoProcessor.get_response(utterance_name,bot_name) """
values = Responses.objects(bot=bot, status=True, name__iexact=name).order_by("-timestamp")
for value in values:
val = None
if value.text:
val = list(
self.__prepare_response_Text([value.text.to_mongo().to_dict()])
)[0]
elif value.custom:
val = value.custom.to_mongo().to_dict()
yield {"_id": value.id.__str__(), "value": val}
def __fetch_list_of_response(self, bot: Text):
saved_responses = list(
Responses.objects(bot=bot, status=True).aggregate(
[
{
"$group": {
"_id": "$name",
"texts": {"$push": "$text"},
"customs": {"$push": "$custom"},
}
}
]
)
)
saved_items = list(
itertools.chain.from_iterable(
[items["texts"] + items["customs"] for items in saved_responses]
)
)
return saved_items;
def __check_response_existence(
self, response: Dict, bot: Text, exp_message: Text = None, raise_error=True
):
saved_items = self.__fetch_list_of_response(bot)
if response in saved_items:
if raise_error:
if Utility.check_empty_string(exp_message):
raise AppException("Exception message cannot be empty")
raise AppException(exp_message)
else:
return True
else:
if not raise_error:
return False
## need to add the logic to add action, slots and forms if it does not exist
def add_story(self, name: Text, events: List[Dict], bot: Text, user: Text):
""" Adds a new story to the bot.
Eg. MongoProcessor.add_story(story_name,[Dictionaries of conversation flow],bot_name,user_name) """
assert not Utility.check_empty_string(name), "Story path name cannot be empty or blank spaces"
self.__check_event_existence(
events, bot=bot, exp_message="Story already exists!"
)
return (
Stories(
block_name=name.strip(),
events=events,
bot=bot,
user=user,
start_checkpoints=[STORY_START],
)
.save()
.to_mongo()
.to_dict()["_id"]
.__str__()
)
def __fetch_list_of_events(self, bot: Text):
saved_events = list(
Stories.objects(bot=bot, status=True).aggregate(
[{"$group": {"_id": "$name", "events": {"$push": "$events"}}}]
)
)
saved_items = list(
itertools.chain.from_iterable([items["events"] for items in saved_events])
)
return saved_items
def __check_event_existence(
self, events: List[Dict], bot: Text, exp_message: Text = None, raise_error=True
):
saved_items = self.__fetch_list_of_events(bot)
if events in saved_items:
if raise_error:
if Utility.check_empty_string(exp_message):
raise AppException("Exception message cannot be empty")
raise AppException(exp_message)
else:
return True
else:
if not raise_error:
return False
def get_stories(self, bot: Text):
""" Yields all the stories of the bot """
for value in Stories.objects(bot=bot, status=True):
item = value.to_mongo().to_dict()
item.pop("bot")
item.pop("user")
item.pop("timestamp")
item.pop("status")
item["_id"] = item["_id"].__str__()
yield item
def get_utterance_from_intent(self, intent: Text, bot: Text):
""" Returns the bot response for a particular intent.
Eg. MongoProcessor.get_utterance_from_intent(intent_name,bot_name) """
assert not Utility.check_empty_string(intent), "Intent cannot be empty or blank spaces"
responses = Responses.objects(bot=bot, status=True).distinct(field="name")
story = Stories.objects(bot=bot, status=True, events__name=intent)
if story:
events = story[0].events
search = False
for i in range(len(events)):
event = events[i]
if event.type == "user":
if str(event.name).lower() == intent.lower():
search = True
else:
search = False
if search and event.type == "action" and event.name in responses:
return event.name
def add_session_config(
self,
bot: Text,
user: Text,
id: Text = None,
sesssionExpirationTime: int = 60,
carryOverSlots: bool = True,
):
""" Adds a session configuration to the bot.
Eg. MongoProcessor.add_session_config(bot_name,user_name) """
if not Utility.check_empty_string(id):
session_config = SessionConfigs.objects().get(id=id)
session_config.sesssionExpirationTime = sesssionExpirationTime
session_config.carryOverSlots = carryOverSlots
else:
if SessionConfigs.objects(bot=bot):
raise AppException("Session config already exists!")
session_config = SessionConfigs(
sesssionExpirationTime=sesssionExpirationTime,
carryOverSlots=carryOverSlots,
bot=bot,
user=user,
)
return session_config.save().to_mongo().to_dict()["_id"].__str__()
def get_session_config(self, bot: Text):
""" Returns the session configuration of the bot (input) """
session_config = SessionConfigs.objects().get(bot=bot).to_mongo().to_dict()
return {
"_id": session_config["_id"].__str__(),
"sesssionExpirationTime": session_config["sesssionExpirationTime"],
"carryOverSlots": session_config["carryOverSlots"],
}
def add_endpoints(self, endpoint_config: Dict, bot: Text, user: Text):
""" Adds endpoints to the bot and user.
Eg. MongoProcessor.add_endpoints({endpoint config},bot_name,user_name) """
try:
endpoint = Endpoints.objects().get(bot=bot)
except DoesNotExist:
if Endpoints.objects(bot=bot):
raise AppException("Endpoint Configuration already exists!")
endpoint = Endpoints()
if endpoint_config.get("bot_endpoint"):
endpoint.bot_endpoint = EndPointBot(**endpoint_config.get("bot_endpoint"))
if endpoint_config.get("action_endpoint"):
endpoint.action_endpoint = EndPointAction(**endpoint_config.get("action_endpoint"))
if endpoint_config.get("tracker_endpoint"):
endpoint.tracker_endpoint = EndPointTracker(**endpoint_config.get("tracker_endpoint"))
endpoint.bot = bot
endpoint.user = user
return endpoint.save().to_mongo().to_dict()["_id"].__str__()
def get_endpoints(self, bot: Text, raise_exception=True):
""" Returns the endpoints of the bot (input) """
try:
endpoint = Endpoints.objects().get(bot=bot).to_mongo().to_dict()
endpoint.pop("bot")
endpoint.pop("user")
endpoint.pop("timestamp")
endpoint["_id"] = endpoint["_id"].__str__()
return endpoint
except DoesNotExist as e:
logging.info(e)
if raise_exception:
raise AppException("Endpoint Configuration does not exists!")
else:
return {}
def add_model_deployment_history(self, bot: Text, user: Text, model: Text, url: Text, status: Text):
return (ModelDeployment(bot=bot,
user=user,
model=model,
url=url,
status=status)
.save()
.to_mongo()
.to_dict().get("_id").__str__())
def get_model_deployment_history(self, bot: Text):
model_deployments = (ModelDeployment
.objects(bot=bot)
.order_by("-timestamp"))
for deployment in model_deployments:
value = deployment.to_mongo().to_dict()
value.pop("bot")
value.pop("_id")
yield value
def deploy_model(self, bot: Text, user: Text):
endpoint = {}
model = None
try:
endpoint = self.get_endpoints(bot, raise_exception=False)
response, model = Utility.deploy_model(endpoint, bot)
except Exception as e:
response = str(e)
self.add_model_deployment_history(bot=bot,
user=user,
model=model,
url=(endpoint.get("bot_endpoint").get("url")
if endpoint.get("bot_endpoint")
else None),
status=response)
return response
class AgentProcessor:
mongo_processor = MongoProcessor()
@staticmethod
def get_agent(bot: Text) -> Agent:
""" Loads the agent of the bot (input) """
if not InMemoryAgentCache.is_exists(bot):
AgentProcessor.reload(bot)
return InMemoryAgentCache.get(bot)
@staticmethod
def get_latest_model(bot: Text):
return Utility.get_latest_file(os.path.join(DEFAULT_MODELS_PATH, bot))
@staticmethod
def reload(bot: Text):
""" Reloads the bot (input) """
try:
endpoint = AgentProcessor.mongo_processor.get_endpoints(
bot, raise_exception=False
)
action_endpoint = (
EndpointConfig(url=endpoint["action_endpoint"]["url"])
if endpoint and endpoint.get("action_endpoint")
else None
)
model_path = AgentProcessor.get_latest_model(bot)
domain = AgentProcessor.mongo_processor.load_domain(bot)
mongo_store = Utility.get_local_mongo_store(bot, domain)
agent = Agent.load(model_path, action_endpoint=action_endpoint, tracker_store=mongo_store)
InMemoryAgentCache.set(bot, agent)
except Exception as e:
logging.info(e)
raise AppException("Bot has not been trained yet !")
class ModelProcessor:
@staticmethod
def set_training_status(
bot: Text,
user: Text,
status: Text,
model_path: Text = None,
exception: Text = None,
):
try:
doc = ModelTraining.objects(bot=bot).get(status=MODEL_TRAINING_STATUS.INPROGRESS.value)
doc.status = status
doc.end_timestamp = datetime.utcnow()
except DoesNotExist:
doc = ModelTraining()
doc.status = MODEL_TRAINING_STATUS.INPROGRESS.value
doc.start_timestamp = datetime.utcnow()
doc.bot = bot
doc.user = user
doc.model_path = model_path
doc.exception = exception
doc.save()
@staticmethod
def is_training_inprogress(bot: Text, raise_exception=True):
if ModelTraining.objects(
bot=bot, status=MODEL_TRAINING_STATUS.INPROGRESS.value
).count():
if raise_exception:
raise AppException("Previous model training in progress.")
else:
return True
else:
return False
@staticmethod
def is_daily_training_limit_exceeded(bot: Text, raise_exception=True):
today = datetime.today()
today_start = today.replace(hour=0, minute=0, second=0)
doc_count = ModelTraining.objects(
bot=bot,
start_timestamp__gte=today_start
).count()
print(doc_count)
if doc_count >= Utility.environment["MODEL_TRAINING_LIMIT_PER_DAY"]:
if raise_exception:
raise AppException("Daily model training limit exceeded.")
else:
return True
else:
return False
@staticmethod
def get_training_history(bot: Text):
for value in ModelTraining.objects(bot=bot).order_by("-start_timestamp"):
item = value.to_mongo().to_dict()
item.pop("bot")
item["_id"] = item["_id"].__str__()
yield item | [
"rasa.importers.rasa.Domain.from_dict",
"bot_trainer.utils.Utility.check_empty_string",
"rasa.core.training.structures.Checkpoint",
"collections.ChainMap",
"bot_trainer.utils.Utility.create_zip_file",
"bot_trainer.utils.Utility.is_exist",
"datetime.datetime.today",
"rasa.core.events.Form",
"bot_trainer.utils.Utility.get_local_mongo_store",
"logging.info",
"bot_trainer.utils.Utility.markdown_reader._find_entities_in_training_example",
"bot_trainer.utils.Utility.delete_document",
"itertools.chain.from_iterable",
"rasa.utils.io.read_config_file",
"rasa.core.events.UserUttered",
"rasa.core.training.structures.SlotSet",
"rasa.importers.utils.training_data_from_paths",
"rasa.nlu.training_data.Message",
"rasa.core.domain.SessionConfig.default",
"bot_trainer.utils.Utility.prepare_nlu_text",
"rasa.core.agent.Agent.load",
"rasa.core.events.ActionExecuted",
"rasa.utils.endpoints.EndpointConfig",
"rasa.nlu.training_data.TrainingData",
"bot_trainer.utils.Utility.delete_directory",
"datetime.datetime.utcnow",
"bot_trainer.utils.Utility.deploy_model",
"os.path.join",
"datetime.datetime.now",
"rasa.importers.rasa.StoryFileReader.read_from_files",
"bot_trainer.exceptions.AppException",
"bot_trainer.utils.Utility.save_files"
] | [((2347, 2395), 'bot_trainer.utils.Utility.save_files', 'Utility.save_files', (['nlu', 'domain', 'stories', 'config'], {}), '(nlu, domain, stories, config)\n', (2365, 2395), False, 'from bot_trainer.utils import Utility\n'), ((2471, 2506), 'bot_trainer.utils.Utility.delete_directory', 'Utility.delete_directory', (['data_path'], {}), '(data_path)\n', (2495, 2506), False, 'from bot_trainer.utils import Utility\n'), ((2716, 2774), 'bot_trainer.utils.Utility.create_zip_file', 'Utility.create_zip_file', (['nlu', 'domain', 'stories', 'config', 'bot'], {}), '(nlu, domain, stories, config, bot)\n', (2739, 2774), False, 'from bot_trainer.utils import Utility\n'), ((5224, 5327), 'bot_trainer.utils.Utility.delete_document', 'Utility.delete_document', (['[TrainingExamples, EntitySynonyms, LookupTables, RegexFeatures]', 'user', 'bot'], {}), '([TrainingExamples, EntitySynonyms, LookupTables,\n RegexFeatures], user, bot)\n', (5247, 5327), False, 'from bot_trainer.utils import Utility\n'), ((5881, 6033), 'rasa.nlu.training_data.TrainingData', 'TrainingData', ([], {'training_examples': 'training_examples', 'entity_synonyms': 'entity_synonyms', 'lookup_tables': 'lookup_tables', 'regex_features': 'regex_features'}), '(training_examples=training_examples, entity_synonyms=\n entity_synonyms, lookup_tables=lookup_tables, regex_features=regex_features\n )\n', (5893, 6033), False, 'from rasa.nlu.training_data import Message, TrainingData\n'), ((6941, 7034), 'bot_trainer.utils.Utility.delete_document', 'Utility.delete_document', (['[Intents, Entities, Forms, Actions, Responses, Slots]', 'bot', 'user'], {}), '([Intents, Entities, Forms, Actions, Responses,\n Slots], bot, user)\n', (6964, 7034), False, 'from bot_trainer.utils import Utility\n'), ((7948, 7977), 'rasa.importers.rasa.Domain.from_dict', 'Domain.from_dict', (['domain_dict'], {}), '(domain_dict)\n', (7964, 7977), False, 'from rasa.importers.rasa import Domain, StoryFileReader\n'), ((8753, 8798), 'bot_trainer.utils.Utility.delete_document', 'Utility.delete_document', (['[Stories]', 'bot', 'user'], {}), '([Stories], bot, user)\n', (8776, 8798), False, 'from bot_trainer.utils import Utility\n'), ((33884, 33929), 'bot_trainer.utils.Utility.delete_document', 'Utility.delete_document', (['[Configs]', 'bot', 'user'], {}), '([Configs], bot, user)\n', (33907, 33929), False, 'from bot_trainer.utils import Utility\n'), ((56234, 56250), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (56248, 56250), False, 'from datetime import datetime\n'), ((3264, 3311), 'rasa.importers.utils.training_data_from_paths', 'utils.training_data_from_paths', (['nlu_files', '"""en"""'], {}), "(nlu_files, 'en')\n", (3294, 3311), False, 'from rasa.importers import utils\n'), ((12075, 12094), 'collections.ChainMap', 'ChainMap', (['*synonyms'], {}), '(*synonyms)\n', (12083, 12094), False, 'from collections import ChainMap\n'), ((12583, 12612), 'rasa.nlu.training_data.Message', 'Message', (['trainingExample.text'], {}), '(trainingExample.text)\n', (12590, 12612), False, 'from rasa.nlu.training_data import Message, TrainingData\n'), ((22826, 22849), 'rasa.core.domain.SessionConfig.default', 'SessionConfig.default', ([], {}), '()\n', (22847, 22849), False, 'from rasa.core.domain import SessionConfig\n'), ((28844, 28862), 'collections.ChainMap', 'ChainMap', (['*results'], {}), '(*results)\n', (28852, 28862), False, 'from collections import ChainMap\n'), ((34991, 35023), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['text'], {}), '(text)\n', (35017, 35023), False, 'from bot_trainer.utils import Utility\n'), ((35953, 35987), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['intent'], {}), '(intent)\n', (35979, 35987), False, 'from bot_trainer.utils import Utility\n'), ((36069, 36160), 'bot_trainer.utils.Utility.is_exist', 'Utility.is_exist', (['Intents'], {'raise_error': '(False)', 'name__iexact': 'intent', 'bot': 'bot', 'status': '(True)'}), '(Intents, raise_error=False, name__iexact=intent, bot=bot,\n status=True)\n', (36085, 36160), False, 'from bot_trainer.utils import Utility\n'), ((40718, 40750), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['name'], {}), '(name)\n', (40744, 40750), False, 'from bot_trainer.utils import Utility\n'), ((41068, 41155), 'bot_trainer.utils.Utility.is_exist', 'Utility.is_exist', (['Slots'], {'raise_error': '(False)', 'name__iexact': 'name', 'bot': 'bot', 'status': '(True)'}), '(Slots, raise_error=False, name__iexact=name, bot=bot,\n status=True)\n', (41084, 41155), False, 'from bot_trainer.utils import Utility\n'), ((41682, 41714), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['name'], {}), '(name)\n', (41708, 41714), False, 'from bot_trainer.utils import Utility\n'), ((42871, 42908), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['utterance'], {}), '(utterance)\n', (42897, 42908), False, 'from bot_trainer.utils import Utility\n'), ((42977, 43009), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['name'], {}), '(name)\n', (43003, 43009), False, 'from bot_trainer.utils import Utility\n'), ((43770, 43859), 'bot_trainer.utils.Utility.is_exist', 'Utility.is_exist', (['Actions'], {'raise_error': '(False)', 'name__iexact': 'name', 'bot': 'bot', 'status': '(True)'}), '(Actions, raise_error=False, name__iexact=name, bot=bot,\n status=True)\n', (43786, 43859), False, 'from bot_trainer.utils import Utility\n'), ((45143, 45244), 'itertools.chain.from_iterable', 'itertools.chain.from_iterable', (["[(items['texts'] + items['customs']) for items in saved_responses]"], {}), "([(items['texts'] + items['customs']) for\n items in saved_responses])\n", (45172, 45244), False, 'import itertools\n'), ((46201, 46233), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['name'], {}), '(name)\n', (46227, 46233), False, 'from bot_trainer.utils import Utility\n'), ((47030, 47104), 'itertools.chain.from_iterable', 'itertools.chain.from_iterable', (["[items['events'] for items in saved_events]"], {}), "([items['events'] for items in saved_events])\n", (47059, 47104), False, 'import itertools\n'), ((48318, 48352), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['intent'], {}), '(intent)\n', (48344, 48352), False, 'from bot_trainer.utils import Utility\n'), ((49399, 49429), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['id'], {}), '(id)\n', (49425, 49429), False, 'from bot_trainer.utils import Utility\n'), ((53077, 53112), 'bot_trainer.utils.Utility.deploy_model', 'Utility.deploy_model', (['endpoint', 'bot'], {}), '(endpoint, bot)\n', (53097, 53112), False, 'from bot_trainer.utils import Utility\n'), ((54043, 54081), 'os.path.join', 'os.path.join', (['DEFAULT_MODELS_PATH', 'bot'], {}), '(DEFAULT_MODELS_PATH, bot)\n', (54055, 54081), False, 'import os\n'), ((54672, 54714), 'bot_trainer.utils.Utility.get_local_mongo_store', 'Utility.get_local_mongo_store', (['bot', 'domain'], {}), '(bot, domain)\n', (54701, 54714), False, 'from bot_trainer.utils import Utility\n'), ((54735, 54822), 'rasa.core.agent.Agent.load', 'Agent.load', (['model_path'], {'action_endpoint': 'action_endpoint', 'tracker_store': 'mongo_store'}), '(model_path, action_endpoint=action_endpoint, tracker_store=\n mongo_store)\n', (54745, 54822), False, 'from rasa.core.agent import Agent\n'), ((55391, 55408), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (55406, 55408), False, 'from datetime import datetime\n'), ((3194, 3231), 'os.path.join', 'os.path.join', (['path', 'DEFAULT_DATA_PATH'], {}), '(path, DEFAULT_DATA_PATH)\n', (3206, 3231), False, 'import os\n'), ((3350, 3389), 'os.path.join', 'os.path.join', (['path', 'DEFAULT_DOMAIN_PATH'], {}), '(path, DEFAULT_DOMAIN_PATH)\n', (3362, 3389), False, 'import os\n'), ((3468, 3520), 'rasa.importers.rasa.StoryFileReader.read_from_files', 'StoryFileReader.read_from_files', (['story_files', 'domain'], {}), '(story_files, domain)\n', (3499, 3520), False, 'from rasa.importers.rasa import Domain, StoryFileReader\n'), ((3559, 3598), 'os.path.join', 'os.path.join', (['path', 'DEFAULT_CONFIG_PATH'], {}), '(path, DEFAULT_CONFIG_PATH)\n', (3571, 3598), False, 'import os\n'), ((3915, 3930), 'logging.info', 'logging.info', (['e'], {}), '(e)\n', (3927, 3930), False, 'import logging\n'), ((3949, 4116), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Failed to validate yaml file.\n Please make sure the file is initial and all mandatory parameters are specified"""'], {}), '(\n """Failed to validate yaml file.\n Please make sure the file is initial and all mandatory parameters are specified"""\n )\n', (3961, 4116), False, 'from bot_trainer.exceptions import AppException\n'), ((4180, 4195), 'logging.info', 'logging.info', (['e'], {}), '(e)\n', (4192, 4195), False, 'import logging\n'), ((4214, 4229), 'bot_trainer.exceptions.AppException', 'AppException', (['e'], {}), '(e)\n', (4226, 4229), False, 'from bot_trainer.exceptions import AppException\n'), ((21822, 21837), 'logging.info', 'logging.info', (['e'], {}), '(e)\n', (21834, 21837), False, 'import logging\n'), ((21856, 21913), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Session Config already exists for the bot"""'], {}), "('Session Config already exists for the bot')\n", (21868, 21913), False, 'from bot_trainer.exceptions import AppException\n'), ((21957, 21972), 'logging.info', 'logging.info', (['e'], {}), '(e)\n', (21969, 21972), False, 'import logging\n'), ((21991, 22028), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Internal Server Error"""'], {}), "('Internal Server Error')\n", (22003, 22028), False, 'from bot_trainer.exceptions import AppException\n'), ((22336, 22351), 'logging.info', 'logging.info', (['e'], {}), '(e)\n', (22348, 22351), False, 'import logging\n'), ((34208, 34223), 'logging.info', 'logging.info', (['e'], {}), '(e)\n', (34220, 34223), False, 'import logging\n'), ((36472, 36573), 'bot_trainer.utils.Utility.is_exist', 'Utility.is_exist', (['TrainingExamples'], {'raise_error': '(False)', 'text__iexact': 'example', 'bot': 'bot', 'status': '(True)'}), '(TrainingExamples, raise_error=False, text__iexact=example,\n bot=bot, status=True)\n', (36488, 36573), False, 'from bot_trainer.utils import Utility\n'), ((40071, 40086), 'logging.info', 'logging.info', (['e'], {}), '(e)\n', (40083, 40086), False, 'import logging\n'), ((40105, 40146), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Unable to remove document"""'], {}), "('Unable to remove document')\n", (40117, 40146), False, 'from bot_trainer.exceptions import AppException\n'), ((40190, 40205), 'logging.info', 'logging.info', (['e'], {}), '(e)\n', (40202, 40205), False, 'import logging\n'), ((40224, 40265), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Unable to remove document"""'], {}), "('Unable to remove document')\n", (40236, 40265), False, 'from bot_trainer.exceptions import AppException\n'), ((45581, 45620), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['exp_message'], {}), '(exp_message)\n', (45607, 45620), False, 'from bot_trainer.utils import Utility\n'), ((45720, 45745), 'bot_trainer.exceptions.AppException', 'AppException', (['exp_message'], {}), '(exp_message)\n', (45732, 45745), False, 'from bot_trainer.exceptions import AppException\n'), ((47413, 47452), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['exp_message'], {}), '(exp_message)\n', (47439, 47452), False, 'from bot_trainer.utils import Utility\n'), ((47552, 47577), 'bot_trainer.exceptions.AppException', 'AppException', (['exp_message'], {}), '(exp_message)\n', (47564, 47577), False, 'from bot_trainer.exceptions import AppException\n'), ((49715, 49761), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Session config already exists!"""'], {}), "('Session config already exists!')\n", (49727, 49761), False, 'from bot_trainer.exceptions import AppException\n'), ((51950, 51965), 'logging.info', 'logging.info', (['e'], {}), '(e)\n', (51962, 51965), False, 'import logging\n'), ((54356, 54410), 'rasa.utils.endpoints.EndpointConfig', 'EndpointConfig', ([], {'url': "endpoint['action_endpoint']['url']"}), "(url=endpoint['action_endpoint']['url'])\n", (54370, 54410), False, 'from rasa.utils.endpoints import EndpointConfig\n'), ((54908, 54923), 'logging.info', 'logging.info', (['e'], {}), '(e)\n', (54920, 54923), False, 'import logging\n'), ((54942, 54988), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Bot has not been trained yet !"""'], {}), "('Bot has not been trained yet !')\n", (54954, 54988), False, 'from bot_trainer.exceptions import AppException\n'), ((55570, 55587), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (55585, 55587), False, 'from datetime import datetime\n'), ((55986, 56038), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Previous model training in progress."""'], {}), "('Previous model training in progress.')\n", (55998, 56038), False, 'from bot_trainer.exceptions import AppException\n'), ((56599, 56651), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Daily model training limit exceeded."""'], {}), "('Daily model training limit exceeded.')\n", (56611, 56651), False, 'from bot_trainer.exceptions import AppException\n'), ((31642, 31733), 'rasa.core.events.UserUttered', 'UserUttered', ([], {'text': 'event.name', 'intent': 'intent', 'parse_data': 'parse_data', 'timestamp': 'timestamp'}), '(text=event.name, intent=intent, parse_data=parse_data,\n timestamp=timestamp)\n', (31653, 31733), False, 'from rasa.core.events import Form, ActionExecuted, UserUttered\n'), ((34264, 34305), 'rasa.utils.io.read_config_file', 'read_config_file', (['"""./template/config.yml"""'], {}), "('./template/config.yml')\n", (34280, 34305), False, 'from rasa.utils.io import read_config_file\n'), ((36309, 36344), 'bot_trainer.utils.Utility.check_empty_string', 'Utility.check_empty_string', (['example'], {}), '(example)\n', (36335, 36344), False, 'from bot_trainer.utils import Utility\n'), ((36865, 36932), 'bot_trainer.utils.Utility.markdown_reader._find_entities_in_training_example', 'Utility.markdown_reader._find_entities_in_training_example', (['example'], {}), '(example)\n', (36923, 36932), False, 'from bot_trainer.utils import Utility\n'), ((38892, 38943), 'bot_trainer.utils.Utility.prepare_nlu_text', 'Utility.prepare_nlu_text', (["example['text']", 'entities'], {}), "(example['text'], entities)\n", (38916, 38943), False, 'from bot_trainer.utils import Utility\n'), ((45648, 45697), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Exception message cannot be empty"""'], {}), "('Exception message cannot be empty')\n", (45660, 45697), False, 'from bot_trainer.exceptions import AppException\n'), ((47480, 47529), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Exception message cannot be empty"""'], {}), "('Exception message cannot be empty')\n", (47492, 47529), False, 'from bot_trainer.exceptions import AppException\n'), ((50855, 50909), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Endpoint Configuration already exists!"""'], {}), "('Endpoint Configuration already exists!')\n", (50867, 50909), False, 'from bot_trainer.exceptions import AppException\n'), ((52020, 52075), 'bot_trainer.exceptions.AppException', 'AppException', (['"""Endpoint Configuration does not exists!"""'], {}), "('Endpoint Configuration does not exists!')\n", (52032, 52075), False, 'from bot_trainer.exceptions import AppException\n'), ((31809, 31868), 'rasa.core.events.ActionExecuted', 'ActionExecuted', ([], {'action_name': 'event.name', 'timestamp': 'timestamp'}), '(action_name=event.name, timestamp=timestamp)\n', (31823, 31868), False, 'from rasa.core.events import Form, ActionExecuted, UserUttered\n'), ((31938, 31980), 'rasa.core.events.Form', 'Form', ([], {'name': 'event.name', 'timestamp': 'timestamp'}), '(name=event.name, timestamp=timestamp)\n', (31942, 31980), False, 'from rasa.core.events import Form, ActionExecuted, UserUttered\n'), ((32589, 32603), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (32601, 32603), False, 'from datetime import datetime\n'), ((32815, 32843), 'rasa.core.training.structures.Checkpoint', 'Checkpoint', (['start_checkpoint'], {}), '(start_checkpoint)\n', (32825, 32843), False, 'from rasa.core.training.structures import Checkpoint\n'), ((32985, 33012), 'rasa.core.training.structures.Checkpoint', 'Checkpoint', (['end_checkpoints'], {}), '(end_checkpoints)\n', (32995, 33012), False, 'from rasa.core.training.structures import Checkpoint\n'), ((32053, 32116), 'rasa.core.training.structures.SlotSet', 'SlotSet', ([], {'key': 'event.name', 'value': 'event.value', 'timestamp': 'timestamp'}), '(key=event.name, value=event.value, timestamp=timestamp)\n', (32060, 32116), False, 'from rasa.core.training.structures import StoryGraph, StoryStep, SlotSet\n')] |
import numpy as np
import pandas as pd
import itertools
import networkx as nx
from collections import OrderedDict, deque
from tqdm import tqdm
from typing import List, Union, Tuple, Generator, Dict
from clustviz.chameleon.graphtools import connecting_edges, get_weights, plot2d_data, plot2d_graph, knn_graph, \
pre_part_graph
from clustviz.chameleon.chameleon import (
internal_closeness,
get_cluster,
len_edges,
rebuild_labels,
)
NxGraph = nx.Graph
def w_int_closeness(graph: NxGraph, cluster: List[int]) -> float:
"""
Compute the internal closeness of the input cluster weighted by the number of its internal edges.
:param graph: kNN graph.
:param cluster: cluster represented by a list of nodes belonging to it.
:return: weighted internal closeness.
"""
return internal_closeness(graph, cluster) / len_edges(graph, cluster)
def relative_closeness2(graph: NxGraph, cluster_i: List[int], cluster_j: List[int], m_fact: float) -> float:
"""
Compute the relative closeness of the two input clusters.
:param graph: kNN graph.
:param cluster_i: first cluster.
:param cluster_j: second cluster.
:param m_fact: multiplicative factor for clusters composed of a single node.
:return: relative closeness of the two input clusters.
"""
edges = connecting_edges((cluster_i, cluster_j), graph)
if not edges:
return 0.0
else:
S_bar = np.mean(get_weights(graph, edges))
sCi, sCj = (
internal_closeness(graph, cluster_i),
internal_closeness(graph, cluster_j),
)
ratio = S_bar / (sCi + sCj)
if (len_edges(graph, cluster_i) == 0) or (
len_edges(graph, cluster_j) == 0
):
return m_fact * ratio
else:
return (
len_edges(graph, cluster_i) + len_edges(graph, cluster_j)
) * ratio
def relative_interconnectivity2(graph: NxGraph, cluster_i: List[int], cluster_j: List[int],
beta: float) -> float:
"""
Compute the relative interconnectivity of the two input clusters.
:param graph: kNN graph.
:param cluster_i: first cluster.
:param cluster_j: second cluster.
:param beta: exponent of the rho factor; the larger, the less encouraged the merging of clusters connected
by a large number of edges relative to the number of edges inside the cluster.
:return: relative interconnectivity of the two input clusters.
"""
if (len_edges(graph, cluster_i) == 0) or (len_edges(graph, cluster_j) == 0):
return 1.0
else:
edges = connecting_edges((cluster_i, cluster_j), graph)
denom = min(len_edges(graph, cluster_i), len_edges(graph, cluster_j))
return (len(edges) / denom) * np.power(rho(graph, cluster_i, cluster_j), beta)
def rho(graph: NxGraph, cluster_i: List[int], cluster_j: List[int]) -> float:
"""
Compute the rho factor, which discourages the algorithm from merging clusters with different densities.
:param graph: kNN graph.
:param cluster_i: first cluster.
:param cluster_j: second cluster.
:return: rho factor.
"""
s_Ci, s_Cj = (
w_int_closeness(graph, cluster_i),
w_int_closeness(graph, cluster_j),
)
return min(s_Ci, s_Cj) / max(s_Ci, s_Cj)
def merge_score2(graph: NxGraph, ci: List[int], cj: List[int], alpha: float, beta: float, m_fact: float) -> float:
"""
Compute the score associated with the merging of the two clusters.
:param graph: kNN graph.
:param ci: first cluster.
:param cj: second cluster.
:param alpha: exponent of relative closeness; the larger, the more important relative closeness is than
relative interconnectivity.
:param beta: exponent of the rho factor; the larger, the less encouraged the merging of clusters connected
by a large number of edges relative to the number of edges inside the cluster.
:param m_fact: multiplicative factor for clusters composed of a single node.
:return: merging score
"""
ri = relative_interconnectivity2(graph, ci, cj, beta)
rc_pot = np.power(relative_closeness2(graph, ci, cj, m_fact), alpha)
if (ri != 0) and (rc_pot != 0):
return ri * rc_pot
else:
return ri + rc_pot
def merge_best2(graph: NxGraph, df: pd.DataFrame, alpha: float, beta: float, m_fact: float,
k: int, verbose: bool = False, verbose2: bool = True) -> Union[Tuple[pd.DataFrame, float, int], bool]:
"""
Find the two clusters with the highest score and merge them.
:param graph: kNN graph.
:param df: input dataframe.
:param alpha: exponent of relative closeness; the larger, the more important relative closeness is than
relative interconnectivity.
:param beta: exponent of the rho factor; the larger, the less encouraged the merging of clusters connected
by a large number of edges relative to the number of edges inside the cluster.
:param m_fact: multiplicative factor for clusters composed of a single node.
:param k: desired number of clusters.
:param verbose: if True, print additional infos.
:param verbose2: if True, print labels of merging clusters and their score.
:return: input dataframe with clustering label column, maximum merging score and newly merged cluster label.
"""
clusters = np.unique(df["cluster"])
max_score = 0
ci, cj = -1, -1
if len(clusters) <= k:
return False
for combination in itertools.combinations(clusters, 2):
i, j = combination
if i != j:
if verbose:
print(f"Checking c{i} c{j}.")
gi = get_cluster(graph, i)
gj = get_cluster(graph, j)
edges = connecting_edges((gi, gj), graph)
if not edges:
continue
ms = merge_score2(graph, gi, gj, alpha, beta, m_fact)
if verbose:
print(f"Merge score: {ms}.")
if ms > max_score:
if verbose:
print(f"Better than: {max_score}.")
max_score = ms
ci, cj = i, j
if max_score > 0:
if verbose2:
print(f"Merging c{ci} and c{cj}.")
print(f"score: {max_score}.")
df.loc[df["cluster"] == cj, "cluster"] = ci
for i, p in enumerate(graph.nodes()):
if graph.nodes[p]["cluster"] == cj:
graph.nodes[p]["cluster"] = ci
else:
print("No Merging.")
print(f"score: {max_score}.")
print("Early stopping.")
print("Increase k of kNN to perform each merging step.")
return df, max_score, ci
def cluster2(df: pd.DataFrame, k: int = None, knn: int = None, m: int = 30, alpha: float = 2.0, beta: float = 1,
m_fact: float = 1e3, verbose: bool = False, verbose1: bool = True, verbose2: bool = True,
plot: bool = True, auto_extract: bool = False) -> Tuple[pd.DataFrame, Dict[int, float]]:
"""
:param df: input dataframe.
:param k: desired number of clusters.
:param knn: parameter k of K-nearest_neighbors.
:param m: number of clusters to reach in the initial clustering phase.
:param alpha: exponent of relative closeness; the larger, the more important relative closeness is than
relative interconnectivity.
:param beta: exponent of the rho factor; the larger, the less encouraged the merging of clusters connected
by a large number of edges relative to the number of edges inside the cluster.
:param m_fact: multiplicative factor for clusters composed of a single node.
:param verbose: if True, print general infos.
:param verbose1: if True, print infos about the prepartitioning phase.
:param verbose2: if True, print labels of merging clusters and their scores in the merging phase.
:param plot: if True, show plots.
:param auto_extract: if True, try to extract the optimal number of clusters and print it.
:return: dataframe with cluster labels and dictionary of merging scores (similarities).
"""
if knn is None:
knn = int(round(2 * np.log(len(df))))
if k is None:
k = 1
if verbose:
print(f"Building symmetrical kNN graph (k = {knn})...")
graph_knn = knn_graph(df=df, k=knn, symmetrical=True, verbose=verbose1)
if plot:
plot2d_graph(graph_knn, print_clust=False)
graph_pp = pre_part_graph(graph_knn, m, df, verbose1, plotting=plot)
if verbose:
print("flood fill...")
graph_ff, increased_m = flood_fill(graph_pp, graph_knn, df)
m = increased_m
if verbose:
print(f"new m: {m}")
if plot:
plot2d_graph(graph_ff, print_clust=False)
merging_similarities = {}
iterm = (
tqdm(enumerate(range(m - k)), total=m - k)
if verbose1
else enumerate(range(m - k))
)
for i, _ in iterm:
df, ms, ci = merge_best2(
graph_ff, df, alpha, beta, m_fact, k, False, verbose2
)
if ms == 0:
break
merging_similarities[m - (i + 1)] = ms
if plot:
plot2d_data(df, ci)
if verbose:
print(f"merging_similarities: {merging_similarities}")
res = rebuild_labels(df)
if auto_extract is True:
extract_optimal_n_clust(merging_similarities, m)
return res, merging_similarities
def connected_components(connected_points: dict) -> Generator:
"""
Find connected components from a dictionary of connected nodes.
:param connected_points: (symmetrically) connected points.
:return: connected components.
"""
seen = set()
for root in list(connected_points.keys()):
if root not in seen:
seen.add(root)
component = []
queue = deque([root])
while queue:
node = queue.popleft()
component.append(node)
for neighbor in connected_points[node]:
if neighbor not in seen:
seen.add(neighbor)
queue.append(neighbor)
yield component
def prepro_edge(graph: nx.Graph) -> OrderedDict:
"""
Build a dictionary having points as keys and all the points that are symmetrically
connected through edges to the key point as values, i.e. 0: [5, 7] means that there are edges 0->5 and 0->7, but
also 5->0 and 7->0.
:param graph: kNN graph.
:return: dictionary of symmetrically connected points.
"""
z = np.array((graph.edges()))
g = pd.DataFrame(z, columns=["a", "b"])
g_bis = pd.concat([g["b"], g["a"]], axis=1, keys=["a", "b"])
g = g.append(g_bis, ignore_index=True)
g["b"] = g["b"].astype("str")
g1 = g.groupby("a")["b"].apply(lambda x: ",".join(x))
g1 = g1.apply(lambda x: x.split(","))
for k in list(g1.index):
g1[k] = [int(i) for i in g1[k]]
g1 = dict(g1)
for i in range(len(graph)):
if i not in list(g1.keys()):
g1[i] = []
g1 = OrderedDict(sorted(g1.items(), key=lambda t: t[0]))
return g1
# def conn_comp(graph: nx.Graph) -> List[list]:
# """
# Find the connected components of the input graph, e.g. [[0,2], [1,3]], with numbers corresponding to nodes.
#
# :param graph: kNN graph.
# :return: list of connected componenents, each one identified by its nodes.
# """
# sym_connected_points = prepro_edge(graph)
#
# return list(connected_components(sym_connected_points))
def flood_fill(preprocessed_graph: NxGraph, knn_graph: NxGraph, df: pd.DataFrame) -> Tuple[NxGraph, int]:
"""
Find clusters composed by more than one connected component and divide them accordingly. Adjust
the parameter m, which indicates the number of clusters to reach in the initial phase.
:param preprocessed_graph: clustered kNN graph.
:param knn_graph: kNN graph.
:param df: input dataframe.
:return: preprocessed graph with updated cluster labels, new m parameter.
"""
len_0_clusters = 0
cl_dict = {
list(preprocessed_graph.nodes)[i]: preprocessed_graph.nodes[i]["cluster"]
for i in range(len(preprocessed_graph))
}
new_cl_ind = max(cl_dict.values()) + 1
dic_edge = prepro_edge(knn_graph)
# print(cl_dict)
# print("******"*10)
# print(dic_edge)
for num in range(max(cl_dict.values()) + 1):
points = [k for k, v in cl_dict.items() if v == num]
restr_dict = {p: dic_edge[p] for p in points}
r_dict = {}
for k in restr_dict.keys():
r_dict[k] = [i for i in restr_dict[k] if i in points]
cc_list = list(connected_components(r_dict))
print("cluster_label: {0}, #_connected_components: {1}".format(num, len(cc_list)))
if len(cc_list) == 1:
continue
elif len(cc_list) == 0:
len_0_clusters += 1
else:
# skip the first
for component in cc_list[1:]:
print(f"new index for the component: {new_cl_ind}")
for el in component:
cl_dict[el] = new_cl_ind
new_cl_ind += 1
df["cluster"] = list(cl_dict.values())
for i in range(len(preprocessed_graph)):
preprocessed_graph.nodes[i]["cluster"] = cl_dict[i]
increased_m = max(cl_dict.values()) + 1 - len_0_clusters
return preprocessed_graph, increased_m
def dendrogram_height(merging_similarities: Dict[int, float], m: int) -> Dict[int, float]:
"""
Find dendrogram height, defined with a recursive sum of the reciprocal of the merging scores.
:param merging_similarities: merging scores of the algorithm.
:param m: initial number of clusters.
:return: dendrogram height.
"""
dh = {m - 1: (1 / merging_similarities[m - 1])}
for i in list(merging_similarities.keys())[:-1]:
dh[i - 1] = dh[i] + 1 / merging_similarities[i - 1]
return dh
def find_bigger_jump(dh: Dict[int, float], jump: float) -> float:
"""
Find a bigger jump in the dendrogram levels.
:param dh: dendrogram height.
:param jump: threshold to exceed.
:return: best level where to cut off th dendrogram if found, else 0.
"""
lower = list(dh.values())[int(len(dh) / 2) + 1]
for i in list(range(int(len(dh) / 2), len(dh))):
upper = list(dh.values())[i]
if upper - lower > jump:
return lower + (upper - lower) / 2
lower = upper
return 0
def first_jump_cutoff(dh: Dict[int, float], mult: float, eta: float, m: int) -> float:
"""
Find the first large gap between tree level, which heuristically is the best level where clusters should be
divided.
:param dh: dendrogram height.
:param mult: additional factor.
:param eta: decrease coefficient.
:param m: initial number of clusters.
:return: best level where to cut off dendrogram.
"""
half = int(round(len(dh) / 2))
l = reversed(range(m - 1 - half, m - 1))
half_dict = {j: dh[j] for j in l}
avg = np.mean(list(half_dict.values()))
res = 0
while mult > 0:
res = find_bigger_jump(dh, mult * avg)
if res != 0:
return res
else:
mult /= eta
def find_nearest_height(dh: Dict[int, float], value: float) -> int:
"""
Find nearest height to cutoff value.
:param dh: dendrogram height.
:param value: first_jump cutoff value.
:return: nearest dendrogram height to cutoff value.
"""
idx = np.searchsorted(list(dh.values()), value, side="left")
el = list(dh.values())[idx]
key_list = [k for (k, v) in dh.items() if v == el]
return key_list[0]
def extract_optimal_n_clust(merging_similarities: Dict[int, float], m: int, f: float = 1000, eta: float = 2) -> None:
"""
Extract the optimal number of clusters using the dendrogram.
:param merging_similarities: merging scores of the algorithm.
:param m: initial number of clusters.
:param f: threshold parameter to determine if jump is large enough.
:param eta: decrease coefficient.
"""
dh = dendrogram_height(merging_similarities, m)
if len(dh) <= 3:
print("Insufficient merging steps to perform auto_extract; decrease k and/or increase m.")
return
fjc = first_jump_cutoff(dh, f, eta, m)
opt_n_clust = find_nearest_height(dh, fjc)
print(f"Optimal number of clusters: {opt_n_clust}")
| [
"collections.deque",
"numpy.unique",
"clustviz.chameleon.graphtools.get_weights",
"clustviz.chameleon.graphtools.plot2d_data",
"clustviz.chameleon.chameleon.get_cluster",
"clustviz.chameleon.chameleon.len_edges",
"itertools.combinations",
"clustviz.chameleon.graphtools.connecting_edges",
"clustviz.chameleon.chameleon.rebuild_labels",
"clustviz.chameleon.chameleon.internal_closeness",
"clustviz.chameleon.graphtools.plot2d_graph",
"pandas.DataFrame",
"clustviz.chameleon.graphtools.pre_part_graph",
"pandas.concat",
"clustviz.chameleon.graphtools.knn_graph"
] | [((1326, 1373), 'clustviz.chameleon.graphtools.connecting_edges', 'connecting_edges', (['(cluster_i, cluster_j)', 'graph'], {}), '((cluster_i, cluster_j), graph)\n', (1342, 1373), False, 'from clustviz.chameleon.graphtools import connecting_edges, get_weights, plot2d_data, plot2d_graph, knn_graph, pre_part_graph\n'), ((5405, 5429), 'numpy.unique', 'np.unique', (["df['cluster']"], {}), "(df['cluster'])\n", (5414, 5429), True, 'import numpy as np\n'), ((5540, 5575), 'itertools.combinations', 'itertools.combinations', (['clusters', '(2)'], {}), '(clusters, 2)\n', (5562, 5575), False, 'import itertools\n'), ((8345, 8404), 'clustviz.chameleon.graphtools.knn_graph', 'knn_graph', ([], {'df': 'df', 'k': 'knn', 'symmetrical': '(True)', 'verbose': 'verbose1'}), '(df=df, k=knn, symmetrical=True, verbose=verbose1)\n', (8354, 8404), False, 'from clustviz.chameleon.graphtools import connecting_edges, get_weights, plot2d_data, plot2d_graph, knn_graph, pre_part_graph\n'), ((8486, 8543), 'clustviz.chameleon.graphtools.pre_part_graph', 'pre_part_graph', (['graph_knn', 'm', 'df', 'verbose1'], {'plotting': 'plot'}), '(graph_knn, m, df, verbose1, plotting=plot)\n', (8500, 8543), False, 'from clustviz.chameleon.graphtools import connecting_edges, get_weights, plot2d_data, plot2d_graph, knn_graph, pre_part_graph\n'), ((9306, 9324), 'clustviz.chameleon.chameleon.rebuild_labels', 'rebuild_labels', (['df'], {}), '(df)\n', (9320, 9324), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((10629, 10664), 'pandas.DataFrame', 'pd.DataFrame', (['z'], {'columns': "['a', 'b']"}), "(z, columns=['a', 'b'])\n", (10641, 10664), True, 'import pandas as pd\n'), ((10677, 10729), 'pandas.concat', 'pd.concat', (["[g['b'], g['a']]"], {'axis': '(1)', 'keys': "['a', 'b']"}), "([g['b'], g['a']], axis=1, keys=['a', 'b'])\n", (10686, 10729), True, 'import pandas as pd\n'), ((817, 851), 'clustviz.chameleon.chameleon.internal_closeness', 'internal_closeness', (['graph', 'cluster'], {}), '(graph, cluster)\n', (835, 851), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((854, 879), 'clustviz.chameleon.chameleon.len_edges', 'len_edges', (['graph', 'cluster'], {}), '(graph, cluster)\n', (863, 879), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((1501, 1537), 'clustviz.chameleon.chameleon.internal_closeness', 'internal_closeness', (['graph', 'cluster_i'], {}), '(graph, cluster_i)\n', (1519, 1537), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((1547, 1583), 'clustviz.chameleon.chameleon.internal_closeness', 'internal_closeness', (['graph', 'cluster_j'], {}), '(graph, cluster_j)\n', (1565, 1583), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((2608, 2655), 'clustviz.chameleon.graphtools.connecting_edges', 'connecting_edges', (['(cluster_i, cluster_j)', 'graph'], {}), '((cluster_i, cluster_j), graph)\n', (2624, 2655), False, 'from clustviz.chameleon.graphtools import connecting_edges, get_weights, plot2d_data, plot2d_graph, knn_graph, pre_part_graph\n'), ((8427, 8469), 'clustviz.chameleon.graphtools.plot2d_graph', 'plot2d_graph', (['graph_knn'], {'print_clust': '(False)'}), '(graph_knn, print_clust=False)\n', (8439, 8469), False, 'from clustviz.chameleon.graphtools import connecting_edges, get_weights, plot2d_data, plot2d_graph, knn_graph, pre_part_graph\n'), ((8744, 8785), 'clustviz.chameleon.graphtools.plot2d_graph', 'plot2d_graph', (['graph_ff'], {'print_clust': '(False)'}), '(graph_ff, print_clust=False)\n', (8756, 8785), False, 'from clustviz.chameleon.graphtools import connecting_edges, get_weights, plot2d_data, plot2d_graph, knn_graph, pre_part_graph\n'), ((1448, 1473), 'clustviz.chameleon.graphtools.get_weights', 'get_weights', (['graph', 'edges'], {}), '(graph, edges)\n', (1459, 1473), False, 'from clustviz.chameleon.graphtools import connecting_edges, get_weights, plot2d_data, plot2d_graph, knn_graph, pre_part_graph\n'), ((1633, 1660), 'clustviz.chameleon.chameleon.len_edges', 'len_edges', (['graph', 'cluster_i'], {}), '(graph, cluster_i)\n', (1642, 1660), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((1680, 1707), 'clustviz.chameleon.chameleon.len_edges', 'len_edges', (['graph', 'cluster_j'], {}), '(graph, cluster_j)\n', (1689, 1707), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((2490, 2517), 'clustviz.chameleon.chameleon.len_edges', 'len_edges', (['graph', 'cluster_i'], {}), '(graph, cluster_i)\n', (2499, 2517), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((2528, 2555), 'clustviz.chameleon.chameleon.len_edges', 'len_edges', (['graph', 'cluster_j'], {}), '(graph, cluster_j)\n', (2537, 2555), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((2676, 2703), 'clustviz.chameleon.chameleon.len_edges', 'len_edges', (['graph', 'cluster_i'], {}), '(graph, cluster_i)\n', (2685, 2703), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((2705, 2732), 'clustviz.chameleon.chameleon.len_edges', 'len_edges', (['graph', 'cluster_j'], {}), '(graph, cluster_j)\n', (2714, 2732), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((5710, 5731), 'clustviz.chameleon.chameleon.get_cluster', 'get_cluster', (['graph', 'i'], {}), '(graph, i)\n', (5721, 5731), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((5749, 5770), 'clustviz.chameleon.chameleon.get_cluster', 'get_cluster', (['graph', 'j'], {}), '(graph, j)\n', (5760, 5770), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((5791, 5824), 'clustviz.chameleon.graphtools.connecting_edges', 'connecting_edges', (['(gi, gj)', 'graph'], {}), '((gi, gj), graph)\n', (5807, 5824), False, 'from clustviz.chameleon.graphtools import connecting_edges, get_weights, plot2d_data, plot2d_graph, knn_graph, pre_part_graph\n'), ((9197, 9216), 'clustviz.chameleon.graphtools.plot2d_data', 'plot2d_data', (['df', 'ci'], {}), '(df, ci)\n', (9208, 9216), False, 'from clustviz.chameleon.graphtools import connecting_edges, get_weights, plot2d_data, plot2d_graph, knn_graph, pre_part_graph\n'), ((9866, 9879), 'collections.deque', 'deque', (['[root]'], {}), '([root])\n', (9871, 9879), False, 'from collections import OrderedDict, deque\n'), ((1792, 1819), 'clustviz.chameleon.chameleon.len_edges', 'len_edges', (['graph', 'cluster_i'], {}), '(graph, cluster_i)\n', (1801, 1819), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n'), ((1822, 1849), 'clustviz.chameleon.chameleon.len_edges', 'len_edges', (['graph', 'cluster_j'], {}), '(graph, cluster_j)\n', (1831, 1849), False, 'from clustviz.chameleon.chameleon import internal_closeness, get_cluster, len_edges, rebuild_labels\n')] |
"""
# Search View
Some Desc
## Inheritance
SearchView<-BaseView
### BaseView function dependencies
- _format_facets
"""
from urllib.parse import urlencode
from pyramid.httpexceptions import HTTPBadRequest # pylint: disable=import-error
from elasticsearch.helpers import scan # pylint: disable=import-error
from snovault.elasticsearch.interfaces import RESOURCES_INDEX
from snovault.helpers.helper import (
sort_query,
get_filtered_query,
set_sort_order,
get_search_fields,
list_visible_columns_for_schemas,
list_result_fields,
set_filters,
set_facets,
iter_long_json,
format_results,
get_pagination,
prepare_search_term,
normalize_query,
)
from snovault.viewconfigs.base_view import BaseView
class SearchView(BaseView): # pylint: disable=too-few-public-methods
'''Search View'''
view_name = 'search'
def __init__(
self,
context,
request,
search_type=None,
return_generator=False,
default_doc_types=None
):
# pylint: disable=too-many-arguments
super(SearchView, self).__init__(context, request)
self._search_type = search_type
self._return_generator = return_generator
self._default_doc_types = default_doc_types or []
self._context = context
def preprocess_view(self, views=None, search_result_actions=None): # pylint: disable=too-many-statements, too-many-branches, too-many-locals
'''
Main function to construct query and build view results json
* Only publicly accessible function
'''
types = self._types
search_base = normalize_query(self._request)
result = {
'@context': self._request.route_path('jsonld_context'),
'@id': '/search/' + search_base,
'@type': ['Search'],
'title': 'Search',
'filters': [],
}
es_index = RESOURCES_INDEX
search_audit = self._request.has_permission('search_audit')
from_, size = get_pagination(self._request)
search_term = prepare_search_term(self._request)
if (
hasattr(self._context, 'type_info') and
hasattr(self._context.type_info, 'name') and
self._context.type_info.name
):
doc_types = [self._context.type_info.name]
else:
doc_types = self._request.params.getall('type')
if '*' in doc_types:
doc_types = ['Item']
# Normalize to item_type
try:
doc_types = sorted({types[name].name for name in doc_types})
except KeyError:
# Check for invalid types
bad_types = [t for t in doc_types if t not in types]
msg = "Invalid type: {}".format(', '.join(bad_types))
raise HTTPBadRequest(explanation=msg)
searchterm_specs = self._request.params.getall('searchTerm')
searchterm_only = urlencode(
[
("searchTerm", searchterm)
for searchterm in searchterm_specs
]
)
if searchterm_only:
clear_qs = searchterm_only
else:
clear_qs = urlencode([("type", typ) for typ in doc_types])
search_route = self._request.route_path('search', slash='/')
clear_route = '?' + clear_qs if clear_qs else ''
result['clear_filters'] = search_route + clear_route
if not doc_types:
if self._request.params.get('mode') == 'picker':
doc_types = ['Item']
else:
doc_types = self._default_doc_types
else:
for item_type in doc_types:
t_thing = types[item_type]
q_thing = urlencode(
[
(k.encode('utf-8'), v.encode('utf-8'))
for k, v in self._request.params.items()
if not (k == 'type' and types['Item' if v == '*' else v] is t_thing)
]
)
result['filters'].append({
'field': 'type',
'term': t_thing.name,
'remove': '{}?{}'.format(self._request.path, q_thing)
})
if views:
result['views'] = views
search_fields, _ = get_search_fields(self._request, doc_types)
query = get_filtered_query(
search_term,
search_fields,
sorted(list_result_fields(self._request, doc_types)),
self._principals,
doc_types,
)
schemas = [types[doc_type].schema for doc_type in doc_types]
columns = list_visible_columns_for_schemas(self._request, schemas)
if columns:
result['columns'] = columns
if search_term == '*':
del query['query']['query_string']
else:
query['query']['query_string']['fields'].extend(
['_all', '*.uuid', '*.md5sum', '*.submitted_file_name']
)
set_sort_order(self._request, search_term, types, doc_types, query, result)
used_filters = set_filters(self._request, query, result)
facets = [
('type', {'title': 'Data Type'}),
]
if len(doc_types) == 1 and 'facets' in types[doc_types[0]].schema:
facets.extend(types[doc_types[0]].schema['facets'].items())
for audit_facet in self._audit_facets:
if (
search_audit and
'group.submitter' in self._principals or
'INTERNAL_ACTION' not in audit_facet[0]
):
facets.append(audit_facet)
query['aggs'] = set_facets(facets, used_filters, self._principals, doc_types)
query = sort_query(query)
do_scan = size is None or size > 1000
if not self._request.params.get('type') or 'Item' in doc_types:
es_index = RESOURCES_INDEX
else:
es_index = [
types[type_name].item_type
for type_name in doc_types
if hasattr(types[type_name], 'item_type')
]
if do_scan:
es_results = self._elastic_search.search(
body=query,
index=es_index,
search_type='query_then_fetch'
)
else:
es_results = self._elastic_search.search(
body=query,
index=es_index,
from_=from_, size=size,
request_cache=True
)
total = es_results['hits']['total']
result['total'] = total
schemas = (types[item_type].schema for item_type in doc_types)
result['facets'] = self._format_facets(
es_results,
facets,
used_filters,
schemas,
total,
self._principals
)
if search_result_actions:
result.update(
search_result_actions(
self._request, doc_types, es_results
)
)
if size is not None and size < result['total']:
params = [(k, v) for k, v in self._request.params.items() if k != 'limit']
params.append(('limit', 'all'))
result['all'] = '%s?%s' % (
self._request.resource_path(self._context),
urlencode(params)
)
if not result['total']:
self._request.response.status_code = 404
result['notification'] = 'No results found'
result['@graph'] = []
return result if not self._return_generator else []
result['notification'] = 'Success'
if not do_scan:
graph = format_results(
self._request,
es_results['hits']['hits'],
result
)
if self._return_generator:
return graph
result['@graph'] = list(graph)
return result
del query['aggs']
if size is None:
hits = scan(
self._elastic_search,
query=query,
index=es_index,
preserve_order=False
)
else:
hits = scan(
self._elastic_search,
query=query,
index=es_index,
from_=from_,
size=size,
preserve_order=False
)
graph = format_results(self._request, hits, result)
if self._request.__parent__ is not None or self._return_generator:
if self._return_generator:
return graph
result['@graph'] = list(graph)
return result
app_iter = iter_long_json('@graph', graph, result)
self._request.response.content_type = 'application/json'
if str is bytes: # Python 2 vs 3 wsgi differences
self._request.response.app_iter = app_iter # Python 2
else:
self._request.response.app_iter = (
item.encode('utf-8') for item in app_iter
)
return self._request.response
| [
"snovault.helpers.helper.get_search_fields",
"elasticsearch.helpers.scan",
"pyramid.httpexceptions.HTTPBadRequest",
"urllib.parse.urlencode",
"snovault.helpers.helper.set_facets",
"snovault.helpers.helper.normalize_query",
"snovault.helpers.helper.sort_query",
"snovault.helpers.helper.iter_long_json",
"snovault.helpers.helper.list_result_fields",
"snovault.helpers.helper.prepare_search_term",
"snovault.helpers.helper.format_results",
"snovault.helpers.helper.get_pagination",
"snovault.helpers.helper.set_filters",
"snovault.helpers.helper.list_visible_columns_for_schemas",
"snovault.helpers.helper.set_sort_order"
] | [((1677, 1707), 'snovault.helpers.helper.normalize_query', 'normalize_query', (['self._request'], {}), '(self._request)\n', (1692, 1707), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((2066, 2095), 'snovault.helpers.helper.get_pagination', 'get_pagination', (['self._request'], {}), '(self._request)\n', (2080, 2095), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((2118, 2152), 'snovault.helpers.helper.prepare_search_term', 'prepare_search_term', (['self._request'], {}), '(self._request)\n', (2137, 2152), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((3001, 3075), 'urllib.parse.urlencode', 'urlencode', (["[('searchTerm', searchterm) for searchterm in searchterm_specs]"], {}), "([('searchTerm', searchterm) for searchterm in searchterm_specs])\n", (3010, 3075), False, 'from urllib.parse import urlencode\n'), ((4398, 4441), 'snovault.helpers.helper.get_search_fields', 'get_search_fields', (['self._request', 'doc_types'], {}), '(self._request, doc_types)\n', (4415, 4441), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((4746, 4802), 'snovault.helpers.helper.list_visible_columns_for_schemas', 'list_visible_columns_for_schemas', (['self._request', 'schemas'], {}), '(self._request, schemas)\n', (4778, 4802), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((5110, 5185), 'snovault.helpers.helper.set_sort_order', 'set_sort_order', (['self._request', 'search_term', 'types', 'doc_types', 'query', 'result'], {}), '(self._request, search_term, types, doc_types, query, result)\n', (5124, 5185), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((5209, 5250), 'snovault.helpers.helper.set_filters', 'set_filters', (['self._request', 'query', 'result'], {}), '(self._request, query, result)\n', (5220, 5250), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((5781, 5842), 'snovault.helpers.helper.set_facets', 'set_facets', (['facets', 'used_filters', 'self._principals', 'doc_types'], {}), '(facets, used_filters, self._principals, doc_types)\n', (5791, 5842), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((5859, 5876), 'snovault.helpers.helper.sort_query', 'sort_query', (['query'], {}), '(query)\n', (5869, 5876), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((8589, 8632), 'snovault.helpers.helper.format_results', 'format_results', (['self._request', 'hits', 'result'], {}), '(self._request, hits, result)\n', (8603, 8632), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((8864, 8903), 'snovault.helpers.helper.iter_long_json', 'iter_long_json', (['"""@graph"""', 'graph', 'result'], {}), "('@graph', graph, result)\n", (8878, 8903), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((3248, 3295), 'urllib.parse.urlencode', 'urlencode', (["[('type', typ) for typ in doc_types]"], {}), "([('type', typ) for typ in doc_types])\n", (3257, 3295), False, 'from urllib.parse import urlencode\n'), ((7837, 7902), 'snovault.helpers.helper.format_results', 'format_results', (['self._request', "es_results['hits']['hits']", 'result'], {}), "(self._request, es_results['hits']['hits'], result)\n", (7851, 7902), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((8172, 8249), 'elasticsearch.helpers.scan', 'scan', (['self._elastic_search'], {'query': 'query', 'index': 'es_index', 'preserve_order': '(False)'}), '(self._elastic_search, query=query, index=es_index, preserve_order=False)\n', (8176, 8249), False, 'from elasticsearch.helpers import scan\n'), ((8361, 8467), 'elasticsearch.helpers.scan', 'scan', (['self._elastic_search'], {'query': 'query', 'index': 'es_index', 'from_': 'from_', 'size': 'size', 'preserve_order': '(False)'}), '(self._elastic_search, query=query, index=es_index, from_=from_, size=\n size, preserve_order=False)\n', (8365, 8467), False, 'from elasticsearch.helpers import scan\n'), ((2874, 2905), 'pyramid.httpexceptions.HTTPBadRequest', 'HTTPBadRequest', ([], {'explanation': 'msg'}), '(explanation=msg)\n', (2888, 2905), False, 'from pyramid.httpexceptions import HTTPBadRequest\n'), ((4549, 4593), 'snovault.helpers.helper.list_result_fields', 'list_result_fields', (['self._request', 'doc_types'], {}), '(self._request, doc_types)\n', (4567, 4593), False, 'from snovault.helpers.helper import sort_query, get_filtered_query, set_sort_order, get_search_fields, list_visible_columns_for_schemas, list_result_fields, set_filters, set_facets, iter_long_json, format_results, get_pagination, prepare_search_term, normalize_query\n'), ((7479, 7496), 'urllib.parse.urlencode', 'urlencode', (['params'], {}), '(params)\n', (7488, 7496), False, 'from urllib.parse import urlencode\n')] |
from django.contrib import admin
# Register your models here.
from django.contrib import admin
from .models import User
# Register your models here.
# admin.site.register(User)
@admin.register(User)
class AuthorizationUserAdmin(admin.ModelAdmin):
# 不显示open_id
exclude = ['open_id']
pass | [
"django.contrib.admin.register"
] | [((183, 203), 'django.contrib.admin.register', 'admin.register', (['User'], {}), '(User)\n', (197, 203), False, 'from django.contrib import admin\n')] |
# Generate some simple large join queries to evaluate imputedb
import random
# tables from cdc data, share id column for joins
tables = ['demo', 'labs', 'exams']
# use a single column from each table for projection
missing_col = {'demo': 'marital_status', 'labs': 'creatine', 'exams': 'waist_circumference'}
def join_query(tables):
n = len(tables)
tids = range(0, n)
from_clause = ['%s t%d' % (tbl, id) for id, tbl in zip(tids, tables)]
from_clause = ', '.join(from_clause)
where_clause = ['t%d.id = t%d.id' % (t1, t2) for t1, t2 in zip(tids, tids[1:])]
where_clause = " and ".join(where_clause)
select_clause = ['t%d.%s' % (id, missing_col[table]) for id, table in zip(tids, tables)]
select_clause = ', '.join(select_clause)
return "select %s from %s where %s" % (select_clause, from_clause, where_clause)
def self_join(tbl, n):
return join_query([tbl] * n)
def create_join_workload(n_tables, n_queries):
random.seed(1)
workload = []
# n random queries with joins of given size
for _ in range(0, n_queries):
random_tables = [tables[random.randint(0, len(tables) - 1)] for _ in range(0, n_tables)]
workload.append(join_query(random_tables))
return '\n'.join([q + ';' for q in workload])
| [
"random.seed"
] | [((932, 946), 'random.seed', 'random.seed', (['(1)'], {}), '(1)\n', (943, 946), False, 'import random\n')] |
from django.contrib import admin
from .models import *
# Register your models here.
@admin.register(Staff)
class StaffAdmin(admin.ModelAdmin):
# To show in admin app
list_display = (
'staff_id',
'user',
'first_name',
'middle_name',
'last_name',
'contact_no',
'address',
'email_address',
)
# Adding search bar
search_fields = [
'staff_id',
'user',
'first_name',
'middle_name',
'last_name',
'contact_no',
'address',
'email_address',
]
# Categorizing the fields
fieldsets = (
(None, {
'fields': ('profile_picture', ('first_name', 'middle_name', 'last_name'),)
}),
('Contact Information', {
'fields': (('contact_no', 'email_address'), 'address')
})
)
@admin.register(Customer)
class CustomerAdmin(admin.ModelAdmin):
list_display = (
'customer_id',
'first_name',
'middle_name',
'last_name',
'contact_no',
'address',
'email_address',
)
search_fields = [
'customer_id',
'first_name',
'middle_name',
'last_name',
'contact_no',
'address',
'email_address',
]
fieldsets = (
(None, {
'fields': (('first_name', 'middle_name', 'last_name'),)
}),
('Contact Information', {
'fields': (('contact_no', 'email_address'), 'address')
})
)
@admin.register(Reservation)
class ReservationAdmin(admin.ModelAdmin):
list_display = (
'reservation_id',
'customer',
'staff',
'no_of_children',
'no_of_adults',
'reservation_date_time',
'expected_arrival_date_time',
'expected_departure_date_time',
)
@admin.register(Room)
class RoomAdmin(admin.ModelAdmin):
list_display = (
'room_no',
'room_type',
'reservation',
'availability',
'display_facility',
)
# Adding filter
list_filter = ('room_type', 'availability')
filter_horizontal = ('facility',)
fields = (('room_no', 'room_type'), 'reservation', 'facility')
search_fields = [
'reservation__customer__first_name',
'reservation__customer__middle_name',
'reservation__customer__last_name',
]
@admin.register(Facility)
class FacilityAdmin(admin.ModelAdmin):
list_display = ('name', 'price')
@admin.register(RoomType)
class RoomTypeAdmin(admin.ModelAdmin):
list_display = ('name', 'price')
| [
"django.contrib.admin.register"
] | [((89, 110), 'django.contrib.admin.register', 'admin.register', (['Staff'], {}), '(Staff)\n', (103, 110), False, 'from django.contrib import admin\n'), ((874, 898), 'django.contrib.admin.register', 'admin.register', (['Customer'], {}), '(Customer)\n', (888, 898), False, 'from django.contrib import admin\n'), ((1540, 1567), 'django.contrib.admin.register', 'admin.register', (['Reservation'], {}), '(Reservation)\n', (1554, 1567), False, 'from django.contrib import admin\n'), ((1864, 1884), 'django.contrib.admin.register', 'admin.register', (['Room'], {}), '(Room)\n', (1878, 1884), False, 'from django.contrib import admin\n'), ((2401, 2425), 'django.contrib.admin.register', 'admin.register', (['Facility'], {}), '(Facility)\n', (2415, 2425), False, 'from django.contrib import admin\n'), ((2505, 2529), 'django.contrib.admin.register', 'admin.register', (['RoomType'], {}), '(RoomType)\n', (2519, 2529), False, 'from django.contrib import admin\n')] |
import pandas as pd
import json
def read_csv():
res = True
try:
with open('./dataset/market.csv'):
df = pd.read_csv('./dataset/market.csv',index_col=0)
FileRes = True
return df,res
except IOError as err:
print('dont have the market.csv in dataset folder,please use the getMarket.run() fuction')
res = False
return None,res
def set_json():
df,res = read_csv()
if(res == True):
df_json = df.to_json(orient = 'records', force_ascii = False)
fo = open("./dataset/market.json", "w", encoding='utf-8')
fo.write(df_json)
else:
print('have not data')
def get_json():
fo = open("./dataset/market.json", encoding='utf-8')
data = json.loads(fo.read())
return data
def get_id_list():
df,res = read_csv()
if(res == True):
data = df.ID.values
return data
else:
print('have not data')
def get_name_list():
df,res = read_csv()
if(res == True):
data = df.name.values
return data
else:
print('have not data')
| [
"pandas.read_csv"
] | [((119, 167), 'pandas.read_csv', 'pd.read_csv', (['"""./dataset/market.csv"""'], {'index_col': '(0)'}), "('./dataset/market.csv', index_col=0)\n", (130, 167), True, 'import pandas as pd\n')] |
def factorial_recursion(number: int) -> int:
"""
>>> factorial_recursion(5)
120
>>> factorial_recursion(0)
1
>>> import random
>>> import math
>>> numbers = list(range(0, 50))
>>> for num in numbers:
... assert factorial_recursion(num) == math.factorial(num)
>>> factorial_recursion(-1)
Traceback (most recent call last):
...
ValueError: factorial() not defined for negative values
"""
if number < 0:
raise ValueError("factorial() not defined for negative values")
return 1 if number == 0 or number == 1 else number * factorial_recursion(number - 1)
if __name__ == "__main__":
from doctest import testmod
testmod()
| [
"doctest.testmod"
] | [((696, 705), 'doctest.testmod', 'testmod', ([], {}), '()\n', (703, 705), False, 'from doctest import testmod\n')] |
import threading, time
import csmapi, SpecialModel
import db as db1
import ec_config
db1.connect(ec_config.DB_NAME)
excluded_dm = SpecialModel.control_channel_excluded_dm
def get_dm_from_MAC_addr(db, MAC_addr):
s = db1.get_session()
dm_name = (s.query(db1.DeviceModel.dm_name)
.select_from(db1.DeviceModel)
.join(db1.Device)
.filter(db1.DeviceModel.dm_id == db1.Device.dm_id)
.filter(db1.Device.mac_addr == MAC_addr)
.first())
s.close()
if dm_name: return dm_name[0]
else: return None
def get_MAC_addr_from_d_id(db, d_id):
s = db1.get_session()
MAC_addr = (s.query(db1.Device.mac_addr)
.select_from(db1.Device)
.filter(db1.Device.d_id == d_id)
.first())
s.close()
if MAC_addr: return MAC_addr[0]
else: return None
def get_MAC_addr_from_do_id(db, do_id):
s = db1.get_session()
MAC_list = (s.query(db1.Device.mac_addr)
.select_from(db1.Device)
.join(db1.DeviceObject)
.filter(db1.Device.d_id == db1.DeviceObject.d_id)
.filter(db1.DeviceObject.do_id == do_id)
.first())
s.close()
if MAC_list: return MAC_list[0]
else: return None
def get_do_id_list_from_MAC_addr(db, do_id, MAC_addr):
s = db1.get_session()
do_id_list = (s.query(db1.DeviceObject.do_id)
.select_from(db1.DeviceObject)
.join(db1.Device)
.filter(db1.Device.d_id == db1.DeviceObject.d_id)
.filter(db1.Device.mac_addr == MAC_addr))
s.close()
do_id_list = [column.do_id for column in do_id_list]
do_id_list.append(do_id) #因為從mac addr中查到的do_id都都是掛載中的,而這個新增的do_id是尚未存在的,所以要額外再加入列表
return do_id_list
def get_all_MAC_addr_from_p_id(db, p_id):
s = db1.get_session()
all_MAC_addr = (s.query(db1.Device.mac_addr)
.select_from(db1.Device)
.join(db1.DeviceObject)
.join(db1.Project)
.filter(db1.Device.d_id == db1.DeviceObject.d_id)
.filter(db1.DeviceObject.p_id == db1.Project.p_id)
.filter(db1.Project.p_id == p_id)
.all())
s.close()
all_MAC_addr_list = [mac[0] for mac in all_MAC_addr]
return all_MAC_addr_list
def wait_for_SET_DF_STATUS_RSP(MAC_addr, DF_STATUS, timestamp=None):
control_channel_timestamp = None
Command = ['RESUME',{'cmd_params':[]}]
for cycle in range (200):
try:
time.sleep(0.2)
RSP = csmapi.pull(MAC_addr, '__Ctl_I__')
if RSP == []:
#print ( 'threadID:', threading.get_ident(),': No response in cycle:',cycle)
continue
if control_channel_timestamp == RSP[0][0]: continue
control_channel_timestamp = RSP[0][0]
if (RSP[0][1][0] != 'SET_DF_STATUS_RSP'):
print ('cycle: ',cycle, 'threadID:', threading.get_ident(),': It is not SET_DF_STATUS_RSP, got', RSP[0][1][0])
continue
if RSP[0][1][1]['cmd_params'][0] != DF_STATUS:
print('\033[1;33;44m threadID:', threading.get_ident(), ': Wrong SET_DF_STATUS_RSP, keep waiting...\033[0m')
continue
break
except Exception as e:
print('Control Channel error: ', e)
continue
if (cycle != 199): print ( '\033[1;33;44m threadID:', threading.get_ident(),'Got SET_DF_STATIS_RSP, then send RESUME command.\033[0m')
else: print (DF_STATUS, '\033[1;33;44m threadID:', threading.get_ident(), 'Retry 200 times and failed to get SET_DF_STATUS, force send RESUME command.\033[0m')
csmapi.push(MAC_addr, '__Ctl_O__', Command)
def SET_DF_STATUS(db, do_id, d_id=None):
if d_id == None:
MAC_addr = get_MAC_addr_from_do_id(db, do_id)
else:
MAC_addr = get_MAC_addr_from_d_id(db, d_id)
if MAC_addr != None:
do_id_list = get_do_id_list_from_MAC_addr(db, do_id, MAC_addr)
Device_profile = (csmapi.pull(MAC_addr, 'profile'))
Real_df_list = Device_profile['df_list']
if (Device_profile['dm_name'] == 'MorSensor'):
DF_STATUS = ''
x=0
for x in range(len(Real_df_list)):
DF_STATUS = DF_STATUS + '1'
else:
Selected_df_list = []
'''
#Only return the selected df_list from the last binded do_od
Selected_df_list = (session.query(db1.DeviceFeature.df_name)
.join(db1.DFObject)
.filter(db1.DeviceFeature.df_id == db1.DFObject.df_id)
.filter(db1.DFObject.do_id == do_id)
)
'''
#Return the union of selected df_list from all binded do_id
s = db1.get_session()
for do_id in do_id_list:
Selected_df_list += (s.query(db1.DeviceFeature.df_name)
.select_from(db1.DeviceFeature)
.join(db1.DFObject)
.filter(db1.DeviceFeature.df_id == db1.DFObject.df_id)
.filter(db1.DFObject.do_id == do_id))
s.close()
DF_STATUS_list = ['0' for x in range(len(Real_df_list))]
for column in Selected_df_list:
try:
index = Real_df_list.index(column.df_name) #still need to deal with exception, otherwise it will cause crash!
except ValueError:
print('Feature not found: "{}"'.format(column.df_name))
else:
DF_STATUS_list[index] = '1'
DF_STATUS = ''.join(DF_STATUS_list)
Command = ['SET_DF_STATUS',{'cmd_params':[DF_STATUS]}]
print ('push to __Crl_O__:', Command)
csmapi.push(MAC_addr, '__Ctl_O__', Command)
s = db1.get_session()
prj_status = (s.query(db1.Project.status)
.select_from(db1.Project)
.join(db1.DeviceObject)
.filter(db1.DeviceObject.do_id == do_id)
.first())
s.close()
if prj_status != None:
if prj_status[0] == 'off':
print ('Project status == off')
return 200
else:
print ('Project status == on')
dm_name = get_dm_from_MAC_addr(db, MAC_addr)
if dm_name not in excluded_dm:
threading.Thread(target=wait_for_SET_DF_STATUS_RSP, name='Thd-'+MAC_addr, args=(MAC_addr,DF_STATUS)).start()
return 200
else:
print (dm_name, ' cannot handle with RESUME command, no RESUME command.')
return 200
print ('MAC_addr is None.')
return 400
def SUSPEND_device(db, do_id):
MAC_addr = get_MAC_addr_from_do_id(db, do_id)
dm_name = get_dm_from_MAC_addr(db, MAC_addr)
if dm_name not in excluded_dm:
Command = ['SUSPEND',{'cmd_params':[]}]
csmapi.push(MAC_addr, '__Ctl_O__', Command)
print ('SUSPEND_device:', MAC_addr)
return 200
def SUSPEND(db, p_id):
all_MAC_addr = get_all_MAC_addr_from_p_id(db, p_id)
Command = ['SUSPEND',{'cmd_params':[]}]
for MAC_addr in all_MAC_addr:
dm_name = get_dm_from_MAC_addr(db, MAC_addr)
if (dm_name not in excluded_dm):
csmapi.push(MAC_addr, '__Ctl_O__', Command)
print ('SUSPEND all devices', p_id)
return 200
def RESUME(db, p_id):
all_MAC_addr = get_all_MAC_addr_from_p_id(db, p_id)
Command = ['RESUME',{'cmd_params':[]}]
for MAC_addr in all_MAC_addr:
dm_name = get_dm_from_MAC_addr(db, MAC_addr)
if (dm_name not in excluded_dm):
csmapi.push(MAC_addr, '__Ctl_O__', Command)
print ('RESUME all devices', p_id)
return 200
| [
"csmapi.pull",
"time.sleep",
"db.connect",
"db.get_session",
"threading.get_ident",
"threading.Thread",
"csmapi.push"
] | [((85, 115), 'db.connect', 'db1.connect', (['ec_config.DB_NAME'], {}), '(ec_config.DB_NAME)\n', (96, 115), True, 'import db as db1\n'), ((222, 239), 'db.get_session', 'db1.get_session', ([], {}), '()\n', (237, 239), True, 'import db as db1\n'), ((630, 647), 'db.get_session', 'db1.get_session', ([], {}), '()\n', (645, 647), True, 'import db as db1\n'), ((930, 947), 'db.get_session', 'db1.get_session', ([], {}), '()\n', (945, 947), True, 'import db as db1\n'), ((1359, 1376), 'db.get_session', 'db1.get_session', ([], {}), '()\n', (1374, 1376), True, 'import db as db1\n'), ((1871, 1888), 'db.get_session', 'db1.get_session', ([], {}), '()\n', (1886, 1888), True, 'import db as db1\n'), ((3804, 3847), 'csmapi.push', 'csmapi.push', (['MAC_addr', '"""__Ctl_O__"""', 'Command'], {}), "(MAC_addr, '__Ctl_O__', Command)\n", (3815, 3847), False, 'import csmapi, SpecialModel\n'), ((4156, 4188), 'csmapi.pull', 'csmapi.pull', (['MAC_addr', '"""profile"""'], {}), "(MAC_addr, 'profile')\n", (4167, 4188), False, 'import csmapi, SpecialModel\n'), ((6038, 6081), 'csmapi.push', 'csmapi.push', (['MAC_addr', '"""__Ctl_O__"""', 'Command'], {}), "(MAC_addr, '__Ctl_O__', Command)\n", (6049, 6081), False, 'import csmapi, SpecialModel\n'), ((6095, 6112), 'db.get_session', 'db1.get_session', ([], {}), '()\n', (6110, 6112), True, 'import db as db1\n'), ((7251, 7294), 'csmapi.push', 'csmapi.push', (['MAC_addr', '"""__Ctl_O__"""', 'Command'], {}), "(MAC_addr, '__Ctl_O__', Command)\n", (7262, 7294), False, 'import csmapi, SpecialModel\n'), ((2599, 2614), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (2609, 2614), False, 'import threading, time\n'), ((2633, 2667), 'csmapi.pull', 'csmapi.pull', (['MAC_addr', '"""__Ctl_I__"""'], {}), "(MAC_addr, '__Ctl_I__')\n", (2644, 2667), False, 'import csmapi, SpecialModel\n'), ((3555, 3576), 'threading.get_ident', 'threading.get_ident', ([], {}), '()\n', (3574, 3576), False, 'import threading, time\n'), ((3691, 3712), 'threading.get_ident', 'threading.get_ident', ([], {}), '()\n', (3710, 3712), False, 'import threading, time\n'), ((5003, 5020), 'db.get_session', 'db1.get_session', ([], {}), '()\n', (5018, 5020), True, 'import db as db1\n'), ((7618, 7661), 'csmapi.push', 'csmapi.push', (['MAC_addr', '"""__Ctl_O__"""', 'Command'], {}), "(MAC_addr, '__Ctl_O__', Command)\n", (7629, 7661), False, 'import csmapi, SpecialModel\n'), ((7980, 8023), 'csmapi.push', 'csmapi.push', (['MAC_addr', '"""__Ctl_O__"""', 'Command'], {}), "(MAC_addr, '__Ctl_O__', Command)\n", (7991, 8023), False, 'import csmapi, SpecialModel\n'), ((3062, 3083), 'threading.get_ident', 'threading.get_ident', ([], {}), '()\n', (3081, 3083), False, 'import threading, time\n'), ((3276, 3297), 'threading.get_ident', 'threading.get_ident', ([], {}), '()\n', (3295, 3297), False, 'import threading, time\n'), ((6707, 6814), 'threading.Thread', 'threading.Thread', ([], {'target': 'wait_for_SET_DF_STATUS_RSP', 'name': "('Thd-' + MAC_addr)", 'args': '(MAC_addr, DF_STATUS)'}), "(target=wait_for_SET_DF_STATUS_RSP, name='Thd-' + MAC_addr,\n args=(MAC_addr, DF_STATUS))\n", (6723, 6814), False, 'import threading, time\n')] |
#!/usr/bin/env python
import os
import argparse
import tensorflow as tf
import numpy as np
from PIL import Image
def _int64_feature(values):
if not isinstance(values, (tuple, list)):
values = [values]
return tf.train.Feature(int64_list=tf.train.Int64List(value=values))
def _bytes_feature(values):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[values]))
def _float_feature(values):
return tf.train.Feature(float_list=tf.train.FloatList(value=values))
def _create_tfrecord(images, num_images, out_name):
"""Loop over all the images in filenames and create the TFRecord
"""
tfrecords_filename = os.path.join('./', out_name)
writer = tf.io.TFRecordWriter(tfrecords_filename)
with tf.Graph().as_default():
image_placeholder = tf.compat.v1.placeholder(dtype=tf.uint8, name='image_placeholder')
encoded_image = tf.image.encode_jpeg(image_placeholder)
i = 0
with tf.compat.v1.Session('') as sess:
for img_path in images:
print(img_path)
img = np.array(Image.open(img_path), np.uint8)
image_height = img.shape[0]
image_width = img.shape[1]
img_jpeg = sess.run(encoded_image, feed_dict={image_placeholder: img})
print("converting image number {}: {}".format(i, img_path))
example = tf.train.Example(features=tf.train.Features(feature={
'height': _int64_feature(image_height),
'width': _int64_feature(image_width),
'image_name': _bytes_feature(str.encode(os.path.basename(img_path))),
'image_jpeg': _bytes_feature(img_jpeg),
}))
writer.write(example.SerializeToString())
i += 1
if i > num_images:
break
writer.close()
return i
def get_image_files(data_dir, split_file):
print('data dir: {}'.format(data_dir))
print('split file: {}'.format(split_file))
files = []
if split_file == 'all':
file_list = os.listdir(data_dir)
for f in file_list:
if os.path.isfile(os.path.join(data_dir, f)):
files.append(os.path.join(data_dir, f))
print('file list: {}'.format(files))
else:
with open(split_file, 'r') as f:
for line in f:
line = line.strip()
img = line + '.png'
files.append(os.path.join(data_dir, img))
return files
def run(data_dir, split_file, num_images, out_name):
assert data_dir != '', 'no data directory'
assert split_file != '', 'no split file'
images = get_image_files(data_dir, split_file)
images_num = _create_tfrecord(images, num_images, out_name)
print('Done converting {} images'.format(images_num))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data', help="data directory", type=str, default='')
parser.add_argument('--split', help="split file", type=str, default='')
parser.add_argument('--num-images', help="limit the number of images", type=int, default=127)
parser.add_argument('--out-name', help="name of output file", type=str, default='output.tfrecord')
args = parser.parse_args()
run(args.data, args.split, args.num_images, args.out_name)
| [
"tensorflow.compat.v1.placeholder",
"tensorflow.Graph",
"os.listdir",
"PIL.Image.open",
"argparse.ArgumentParser",
"tensorflow.io.TFRecordWriter",
"os.path.join",
"tensorflow.train.Int64List",
"tensorflow.train.BytesList",
"tensorflow.train.FloatList",
"os.path.basename",
"tensorflow.compat.v1.Session",
"tensorflow.image.encode_jpeg"
] | [((653, 681), 'os.path.join', 'os.path.join', (['"""./"""', 'out_name'], {}), "('./', out_name)\n", (665, 681), False, 'import os\n'), ((695, 735), 'tensorflow.io.TFRecordWriter', 'tf.io.TFRecordWriter', (['tfrecords_filename'], {}), '(tfrecords_filename)\n', (715, 735), True, 'import tensorflow as tf\n'), ((2924, 2949), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2947, 2949), False, 'import argparse\n'), ((799, 865), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', ([], {'dtype': 'tf.uint8', 'name': '"""image_placeholder"""'}), "(dtype=tf.uint8, name='image_placeholder')\n", (823, 865), True, 'import tensorflow as tf\n'), ((890, 929), 'tensorflow.image.encode_jpeg', 'tf.image.encode_jpeg', (['image_placeholder'], {}), '(image_placeholder)\n', (910, 929), True, 'import tensorflow as tf\n'), ((2129, 2149), 'os.listdir', 'os.listdir', (['data_dir'], {}), '(data_dir)\n', (2139, 2149), False, 'import os\n'), ((255, 287), 'tensorflow.train.Int64List', 'tf.train.Int64List', ([], {'value': 'values'}), '(value=values)\n', (273, 287), True, 'import tensorflow as tf\n'), ((358, 392), 'tensorflow.train.BytesList', 'tf.train.BytesList', ([], {'value': '[values]'}), '(value=[values])\n', (376, 392), True, 'import tensorflow as tf\n'), ((463, 495), 'tensorflow.train.FloatList', 'tf.train.FloatList', ([], {'value': 'values'}), '(value=values)\n', (481, 495), True, 'import tensorflow as tf\n'), ((957, 981), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', (['""""""'], {}), "('')\n", (977, 981), True, 'import tensorflow as tf\n'), ((746, 756), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (754, 756), True, 'import tensorflow as tf\n'), ((2208, 2233), 'os.path.join', 'os.path.join', (['data_dir', 'f'], {}), '(data_dir, f)\n', (2220, 2233), False, 'import os\n'), ((1090, 1110), 'PIL.Image.open', 'Image.open', (['img_path'], {}), '(img_path)\n', (1100, 1110), False, 'from PIL import Image\n'), ((2265, 2290), 'os.path.join', 'os.path.join', (['data_dir', 'f'], {}), '(data_dir, f)\n', (2277, 2290), False, 'import os\n'), ((2516, 2543), 'os.path.join', 'os.path.join', (['data_dir', 'img'], {}), '(data_dir, img)\n', (2528, 2543), False, 'import os\n'), ((1631, 1657), 'os.path.basename', 'os.path.basename', (['img_path'], {}), '(img_path)\n', (1647, 1657), False, 'import os\n')] |
import os
from datetime import datetime
from typing import Optional
import click
from sqlite_utils import Database
from .clickup_client import Client, Task, TimeEntry
def fetch_teams(db: Database, client: Client) -> None:
teams = client.get_teams()
teams_dict = []
members_dict = []
for team in teams:
team_json = team.dict()
for member in team_json.pop("members"):
members_dict.append({"team_id": team_json["id"], **member["user"]})
teams_dict.append(team_json)
db["teams"].insert_all(teams_dict, pk="id", replace=True)
db["members"].insert_all(members_dict, pk="id", replace=True)
db["members"].add_foreign_key("team_id", "teams", "id", ignore=True)
def fetch_spaces(db: Database, client: Client) -> None:
team_ids = [r["id"] for r in db.query("SELECT id FROM teams")]
space_dicts = []
for team_id in team_ids:
spaces = client.get_spaces(team_id)
for space in spaces:
space_dicts.append(space.dict())
db["spaces"].insert_all(space_dicts, pk="id", replace=True)
def fetch_folders_and_lists(db: Database, client: Client, space_id: str) -> None:
folder_dicts = client.get_folders_raw(space_id)
folders = []
lists = []
def format_list(list_dict: dict, folder_id: Optional[str] = None) -> dict:
if "folder" in list_dict and list_dict["folder"]:
list_dict["folder_id"] = list_dict.pop("folder")["id"]
else:
list_dict["folder_id"] = None
list_dict["space_id"] = list_dict.pop("space")["id"]
return list_dict
for folder_dict in folder_dicts:
folder_dict["space_id"] = folder_dict.pop("space")["id"]
list_dicts = folder_dict.pop("lists")
lists.extend(
format_list(list_dict, folder_dict["id"]) for list_dict in list_dicts
)
folders.append(folder_dict)
folderless_list_dicts = client.get_folderless_lists_raw(space_id)
for list_dict in folderless_list_dicts:
lists.append(format_list(list_dict))
db["folders"].insert_all(folders, pk="id", replace=True)
db["folders"].add_foreign_key("space_id", "spaces", "id", ignore=True)
db["lists"].insert_all(lists, pk="id", replace=True)
db["lists"].add_foreign_key("folder_id", "folders", "id", ignore=True)
db["lists"].add_foreign_key("space_id", "spaces", "id", ignore=True)
def fetch_tasks(db: Database, client: Client, team_id: str) -> None:
tasks = list(
client.get_filtered_team_tasks(
team_id, {"include_closed": True, "subtasks": True}
)
)
def format_task(task: Task) -> dict:
task_dict = task.dict(exclude={"list", "project", "folder", "space"})
task_dict.update(
{
"list_id": task.list.id,
"project_id": task.project.id,
"folder_id": task.folder.id,
"space_id": task.space.id,
}
)
return task_dict
task_dicts = [format_task(t) for t in tasks]
db["tasks"].insert_all(task_dicts, pk="id", replace=True)
db["tasks"].add_foreign_key("list_id", "lists", "id", ignore=True)
db["tasks"].add_foreign_key("folder_id", "folders", "id", ignore=True)
db["tasks"].add_foreign_key("space_id", "spaces", "id", ignore=True)
db["tasks"].add_foreign_key("team_id", "teams", "id", ignore=True)
def fetch_time_entries(
db: Database, client: Client, team_id: str, start_date: datetime, end_date: datetime
) -> None:
time_entries = list(
client.get_time_entries_within_a_date_range(
team_id, start_date=start_date, end_date=end_date
)
)
def format_time_entries(time: TimeEntry) -> dict:
time_dict = time.dict(exclude={"task", "user", "duration"})
time_dict.update(
{
"team_id": team_id,
"user_id": time.user.id,
"task_id": time.task.id if time.task is not None else None,
"duration": time.duration.total_seconds(),
}
)
return time_dict
time_dicts = [format_time_entries(t) for t in time_entries]
db["timeentries"].insert_all(time_dicts, pk="id", replace=True)
db["timeentries"].add_foreign_key("task_id", "tasks", "id", ignore=True)
db["timeentries"].add_foreign_key("user_id", "members", "id", ignore=True)
db["timeentries"].add_foreign_key("team_id", "teams", "id", ignore=True)
@click.group()
def cli():
pass
@cli.command()
@click.argument(
"db_path",
type=click.Path(file_okay=True, dir_okay=False, allow_dash=False),
required=True,
)
@click.option(
"--access-token",
envvar="CLICKUP_ACCESS_TOKEN",
help=(
"Your personal access token. Retrieve from ClickUp at "
"'Settings > My Apps > Apps > API Token'. Will read from "
"CLICKUP_ACCESS_TOKEN environment variable."
),
)
def fetch(db_path: str, access_token: str):
"""
Fetch data from clickup and store into DB_PATH. If the database already
exists, then entries will be updated.
Entries that have been deleted from clickup will not be removed from the
database though.
"""
# TODO: Make this available via some auth flow.
client = Client(access_token)
db = Database(db_path)
fetch_teams(db, client)
fetch_spaces(db, client)
team_ids = [r["id"] for r in db.query("SELECT id FROM teams")]
space_ids = [r["id"] for r in db.query("SELECT id FROM spaces")]
for space_id in space_ids:
fetch_folders_and_lists(db, client, space_id=space_id)
for team_id in team_ids:
fetch_tasks(db, client, team_id=team_id)
now = datetime.utcnow()
start_date = now.replace(year=now.year - 10)
end_date = now.replace(year=now.year + 10)
for team_id in team_ids:
fetch_time_entries(
db, client, team_id=team_id, start_date=start_date, end_date=end_date
)
| [
"datetime.datetime.utcnow",
"click.group",
"click.option",
"click.Path",
"sqlite_utils.Database"
] | [((4469, 4482), 'click.group', 'click.group', ([], {}), '()\n', (4480, 4482), False, 'import click\n'), ((4645, 4876), 'click.option', 'click.option', (['"""--access-token"""'], {'envvar': '"""CLICKUP_ACCESS_TOKEN"""', 'help': '"""Your personal access token. Retrieve from ClickUp at \'Settings > My Apps > Apps > API Token\'. Will read from CLICKUP_ACCESS_TOKEN environment variable."""'}), '(\'--access-token\', envvar=\'CLICKUP_ACCESS_TOKEN\', help=\n "Your personal access token. Retrieve from ClickUp at \'Settings > My Apps > Apps > API Token\'. Will read from CLICKUP_ACCESS_TOKEN environment variable."\n )\n', (4657, 4876), False, 'import click\n'), ((5292, 5309), 'sqlite_utils.Database', 'Database', (['db_path'], {}), '(db_path)\n', (5300, 5309), False, 'from sqlite_utils import Database\n'), ((5689, 5706), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (5704, 5706), False, 'from datetime import datetime\n'), ((4561, 4621), 'click.Path', 'click.Path', ([], {'file_okay': '(True)', 'dir_okay': '(False)', 'allow_dash': '(False)'}), '(file_okay=True, dir_okay=False, allow_dash=False)\n', (4571, 4621), False, 'import click\n')] |
from toapi import Api
from items.pexels import Pexels
from items.pixabay import Pixabay
from settings import MySettings
api = Api(settings=MySettings)
api.register(Pixabay)
api.register(Pexels)
if __name__ == '__main__':
api.serve()
| [
"toapi.Api"
] | [((128, 152), 'toapi.Api', 'Api', ([], {'settings': 'MySettings'}), '(settings=MySettings)\n', (131, 152), False, 'from toapi import Api\n')] |
import unittest
from omrdatasettools.downloaders.EdiromDatasetDownloader import EdiromDatasetDownloader
from omrdatasettools.tests.DatasetDownloaderTest import DatasetDownloaderTest
class EdiromDatasetTest(unittest.TestCase):
def test_download_and_extract_bargheer_edirom_dataset_expect_folder_to_be_created(self):
destination_directory = "Bargheer"
downloader = EdiromDatasetDownloader("Bargheer")
zip_file = downloader.get_dataset_filename()
number_of_samples_in_the_dataset = 9
target_file_extension = "*.xml"
# noinspection PyCallByClass
DatasetDownloaderTest.download_dataset_and_verify_correct_extraction(self, destination_directory,
number_of_samples_in_the_dataset,
target_file_extension, zip_file,
downloader)
def test_download_and_extract_freischuetz_edirom_dataset_expect_folder_to_be_created(self):
destination_directory = "FreischuetzDigital"
downloader = EdiromDatasetDownloader("FreischuetzDigital")
zip_file = downloader.get_dataset_filename()
number_of_samples_in_the_dataset = 15
target_file_extension = "*.xml"
# noinspection PyCallByClass
DatasetDownloaderTest.download_dataset_and_verify_correct_extraction(self, destination_directory,
number_of_samples_in_the_dataset,
target_file_extension, zip_file,
downloader)
| [
"omrdatasettools.tests.DatasetDownloaderTest.DatasetDownloaderTest.download_dataset_and_verify_correct_extraction",
"omrdatasettools.downloaders.EdiromDatasetDownloader.EdiromDatasetDownloader"
] | [((386, 421), 'omrdatasettools.downloaders.EdiromDatasetDownloader.EdiromDatasetDownloader', 'EdiromDatasetDownloader', (['"""Bargheer"""'], {}), "('Bargheer')\n", (409, 421), False, 'from omrdatasettools.downloaders.EdiromDatasetDownloader import EdiromDatasetDownloader\n'), ((606, 790), 'omrdatasettools.tests.DatasetDownloaderTest.DatasetDownloaderTest.download_dataset_and_verify_correct_extraction', 'DatasetDownloaderTest.download_dataset_and_verify_correct_extraction', (['self', 'destination_directory', 'number_of_samples_in_the_dataset', 'target_file_extension', 'zip_file', 'downloader'], {}), '(self,\n destination_directory, number_of_samples_in_the_dataset,\n target_file_extension, zip_file, downloader)\n', (674, 790), False, 'from omrdatasettools.tests.DatasetDownloaderTest import DatasetDownloaderTest\n'), ((1185, 1230), 'omrdatasettools.downloaders.EdiromDatasetDownloader.EdiromDatasetDownloader', 'EdiromDatasetDownloader', (['"""FreischuetzDigital"""'], {}), "('FreischuetzDigital')\n", (1208, 1230), False, 'from omrdatasettools.downloaders.EdiromDatasetDownloader import EdiromDatasetDownloader\n'), ((1416, 1600), 'omrdatasettools.tests.DatasetDownloaderTest.DatasetDownloaderTest.download_dataset_and_verify_correct_extraction', 'DatasetDownloaderTest.download_dataset_and_verify_correct_extraction', (['self', 'destination_directory', 'number_of_samples_in_the_dataset', 'target_file_extension', 'zip_file', 'downloader'], {}), '(self,\n destination_directory, number_of_samples_in_the_dataset,\n target_file_extension, zip_file, downloader)\n', (1484, 1600), False, 'from omrdatasettools.tests.DatasetDownloaderTest import DatasetDownloaderTest\n')] |
import ast
import curses
import json
from argparse import ArgumentParser
from .ctable import Table
def get_args():
parser = ArgumentParser(
description="""
Display a typical API json object (a list of dictionaries) in a curses table.
"""
)
parser.add_argument("data", help="json data.")
parser.add_argument(
"--columns",
"-c",
nargs="+",
help="Specify which fields you want to translate into columns.",
)
return parser.parse_args()
def init_table(stdscr, data, columns=None):
if columns:
columns = columns
else:
columns = list(set().union(*(d.keys() for d in data)))
return Table(stdscr, data, columns).init()
def show_table(data, columns):
return curses.wrapper(init_table, data, columns)
def main():
args = get_args()
if type(args.data) == list:
data = args.data
else:
# data = ast.literal_eval(args.data)
data = json.loads(args.data)
print(show_table(data, args.columns))
if __name__ == "__main__":
main()
| [
"json.loads",
"curses.wrapper",
"argparse.ArgumentParser"
] | [((130, 261), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""\n Display a typical API json object (a list of dictionaries) in a curses table.\n """'}), '(description=\n """\n Display a typical API json object (a list of dictionaries) in a curses table.\n """\n )\n', (144, 261), False, 'from argparse import ArgumentParser\n'), ((759, 800), 'curses.wrapper', 'curses.wrapper', (['init_table', 'data', 'columns'], {}), '(init_table, data, columns)\n', (773, 800), False, 'import curses\n'), ((964, 985), 'json.loads', 'json.loads', (['args.data'], {}), '(args.data)\n', (974, 985), False, 'import json\n')] |
from setuptools import setup, find_packages
version = open("VERSION").read().strip()
PROJECT_DESC = "Tool for injecting attacks into serial data bus datasets"
PROJECT_URL = "https://github.com/matthewRekos/serialNightshade"
PACKAGE_NAME = "nightshade"
setup(
name="serialNightshade-test",
version=version,
description=PROJECT_DESC,
long_description=open("README.md").read(),
#long_description_content_type="text/markdown",
url=PROJECT_URL,
author="matthewRekos",
author_email="redacted_for_submission",
# packages=[PACKAGE_NAME, "{}.tests".format(PACKAGE_NAME), "{}.utils".format(PACKAGE_NAME)],
# Add any non-python package files to this list to be captured in pypi packaging
# package_data={PACKAGE_NAME: ["data/*", "data/configs/*"]},
# install_requires=INSTALL_REQUIRES,
entry_points={
"console_scripts": [
"nightshade = serialNightshade.main:begin_attacks"
],
},
data_files=[(".", ["VERSION"])],
# package_dir={"":"."},
packages=find_packages(where="."),
)
| [
"setuptools.find_packages"
] | [((1034, 1058), 'setuptools.find_packages', 'find_packages', ([], {'where': '"""."""'}), "(where='.')\n", (1047, 1058), False, 'from setuptools import setup, find_packages\n')] |
import unittest
from backend.corpora.common.corpora_orm import CollectionVisibility
from tests.unit.backend.corpora.api_server.base_api_test import BaseAuthAPITest
class TestDeleteCollection(BaseAuthAPITest):
def _test(self, collection_uuid, header, expected_status):
if header == "owner":
headers = self.get_auth_headers()
elif header == "super":
headers = self.make_super_curator_header()
elif header == "not_owner":
headers = self.make_not_owner_header()
elif "noauth":
headers = {}
response = self.app.delete(f"/curation/v1/collections/{collection_uuid}", headers=headers)
self.assertEqual(expected_status, response.status_code)
if response.status_code == 204:
response = self.app.delete(f"/curation/v1/collections/{collection_uuid}", headers=headers)
self.assertEqual(403, response.status_code)
def test__delete_public_collection(self):
tests = [("not_owner", 403), ("noauth", 401), ("owner", 405), ("super", 405)]
public_collection_uuid = self.generate_collection(self.session, visibility=CollectionVisibility.PUBLIC.name).id
for auth, expected_response in tests:
with self.subTest(auth):
self._test(public_collection_uuid, auth, expected_response)
def test__delete_revision_collection(self):
tests = [("not_owner", 403), ("noauth", 401), ("owner", 204), ("super", 204)]
for auth, expected_response in tests:
with self.subTest(auth):
public_collection_uuid = self.generate_collection(
self.session, visibility=CollectionVisibility.PUBLIC.name
).id
revision_collection_uuid = self.generate_collection(
self.session, visibility=CollectionVisibility.PRIVATE.name, revision_of=public_collection_uuid
).id
self._test(revision_collection_uuid, auth, expected_response)
def test__delete_private_collection(self):
tests = [("not_owner", 403), ("noauth", 401), ("owner", 204), ("super", 204)]
for auth, expected_response in tests:
with self.subTest(auth):
private_collection_uuid = self.generate_collection(
self.session, visibility=CollectionVisibility.PRIVATE.name
).id
self._test(private_collection_uuid, auth, expected_response)
def test__delete_tombstone_collection(self):
tests = [("not_owner", 403), ("noauth", 401), ("owner", 403), ("super", 403)]
for auth, expected_response in tests:
with self.subTest(auth):
tombstone_collection_uuid = self.generate_collection(
self.session, visibility=CollectionVisibility.PUBLIC.name, tombstone=True
).id
self._test(tombstone_collection_uuid, auth, expected_response)
if __name__ == "__main__":
unittest.main()
| [
"unittest.main"
] | [((2993, 3008), 'unittest.main', 'unittest.main', ([], {}), '()\n', (3006, 3008), False, 'import unittest\n')] |
from flask_sqlalchemy import SQLAlchemy
from flask import Flask
# Create Flask application
app = Flask(__name__)
app.config.from_pyfile('config.py')
db = SQLAlchemy(app)
roles_users = db.Table(
'roles_users',
db.Column('user_id', db.Integer(), db.ForeignKey('user.id')),
db.Column('role_id', db.Integer(), db.ForeignKey('role.id'))
)
| [
"flask_sqlalchemy.SQLAlchemy",
"flask.Flask"
] | [((99, 114), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (104, 114), False, 'from flask import Flask\n'), ((156, 171), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (166, 171), False, 'from flask_sqlalchemy import SQLAlchemy\n')] |
# -*- coding: utf-8 -*-
"""
progress handler.
Old progress funcs needto be depricated ProgressIter and ProgChunks are pretty
much the only useful things here.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import time
import math
import datetime
from functools import partial
from utool import util_logging
from utool import util_inject
from utool import util_arg
from utool import util_time
from utool import util_iter
from utool import util_cplat
from six.moves import range, zip
import collections
import six # NOQA
print, rrr, profile = util_inject.inject2(__name__)
default_timer = util_time.default_timer
SILENT = util_arg.SILENT
VERBOSE = util_arg.VERBOSE
VALID_PROGRESS_TYPES = ['none', 'dots', 'fmtstr', 'simple']
AGGROFLUSH = util_arg.get_argflag('--aggroflush')
PROGGRESS_BACKSPACE = not util_arg.get_argflag(('--screen', '--progress-backspace'))
NO_PROGRESS = util_arg.get_argflag(('--no-progress', '--noprogress'))
FORCE_ALL_PROGRESS = util_arg.get_argflag(('--force-all-progress',))
# ('--screen' not in sys.argv and '--progress-backspace' not in sys.argv)
DEBUG_FREQ_ADJUST = util_arg.get_argflag('--debug-adjust-freq')
def test_progress():
"""
CommandLine:
python -m utool.util_progress --test-test_progress
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_progress import * # NOQA
>>> test_progress()
"""
import utool as ut
# import time
# ut.rrrr()
print('_________________')
# numiter = 50
# sleeptime = 1E-4
# sleeptime2 = 1E-2
numiter = 20
sleeptime = 1e-7
sleeptime2 = 1e-7
with ut.Timer():
for x in ut.ProgressIter(range(0, numiter), freq=8, adjust=True):
time.sleep(sleeptime)
print('_________________')
numiter = 50
sleeptime = 1e-4
with ut.Timer():
for x in ut.ProgressIter(range(0, numiter), freq=8, adjust=True):
time.sleep(sleeptime)
print('_________________')
print('No frequncy run:')
with ut.Timer():
for x in range(0, numiter):
time.sleep(sleeptime)
print('_________________')
numiter = 500
sleeptime = 8e-7
with ut.Timer():
for x in ut.ProgressIter(range(0, numiter), freq=8, adjust=True):
time.sleep(sleeptime)
print('_________________')
with ut.Timer():
for x in ut.ProgressIter(range(0, numiter), freq=200):
time.sleep(sleeptime)
print('_________________')
print('No frequncy run:')
with ut.Timer():
for x in range(0, numiter):
time.sleep(sleeptime)
print('_________________')
# Test nested iter
# progiter1 = ut.ProgressIter(range(0, 10), lbl='prog1', freq=1, adjust=False)
# for count1 in progiter1:
# progiter_partials = progiter1.get_subindexers(1)
# progiter2 = progiter_partials[0](range(0, 7), lbl='sub_prog1', freq=1, adjust=False)
# for count2 in progiter2:
# pass
for x in ut.ProgressIter(zip(range(10), range(10)), freq=8, adjust=True):
time.sleep(sleeptime)
# progiter3 = progiter_partials[1](range(0, 3), lbl='sub_prog2', freq=1, adjust=False)
# for count3 in progiter3:
# pass
print('Double backspace progress 1')
progiter1 = ut.ProgressIter(
range(0, 10), lbl='prog1', freq=1, adjust=False, backspace=False
)
for count1 in progiter1:
progiter2 = ut.ProgressIter(
range(0, 10), lbl='prog2', freq=1, adjust=False, backspace=True
)
for count2 in progiter2:
time.sleep(sleeptime2)
print('Double backspace progress 2')
progiter1 = ut.ProgressIter(
range(0, 10), lbl='prog1', freq=1, adjust=False, backspace=True
)
for count1 in progiter1:
progiter2 = ut.ProgressIter(
range(0, 10), lbl='prog2', freq=1, adjust=False, backspace=True
)
for count2 in progiter2:
time.sleep(sleeptime2)
def get_num_chunks(length, chunksize):
r"""
Returns the number of chunks that a list will be split into given a
chunksize.
Args:
length (int):
chunksize (int):
Returns:
int: n_chunks
CommandLine:
python -m utool.util_progress --exec-get_num_chunks:0
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_progress import * # NOQA
>>> length = 2000
>>> chunksize = 256
>>> n_chunks = get_num_chunks(length, chunksize)
>>> result = ('n_chunks = %s' % (six.text_type(n_chunks),))
>>> print(result)
n_chunks = 8
"""
n_chunks = int(math.ceil(length / chunksize))
return n_chunks
def ProgChunks(list_, chunksize, nInput=None, **kwargs):
"""
Yeilds an iterator in chunks and computes progress
Progress version of ut.ichunks
Args:
list_ (list):
chunksize (?):
nInput (None): (default = None)
Kwargs:
length, freq
Returns:
ProgressIter
CommandLine:
python -m utool.util_progress ProgChunks --show
Example:
>>> # ENABLE_DOCTEST
>>> from utool.util_progress import * # NOQA
>>> import utool as ut
>>> list_ = range(100)
>>> chunksize = 10
>>> nInput = None
>>> progiter_ = ProgChunks(list_, chunksize, nInput)
>>> iter_ = iter(progiter_)
>>> chunk = six.next(iter_)
>>> assert len(chunk) == 10
>>> rest = ut.flatten(list(progiter_))
>>> assert len(rest) == 90
"""
if nInput is None:
nInput = len(list_)
n_chunks = get_num_chunks(nInput, chunksize)
kwargs['length'] = n_chunks
if 'freq' not in kwargs:
kwargs['freq'] = 1
chunk_iter = util_iter.ichunks(list_, chunksize)
progiter_ = ProgressIter(chunk_iter, **kwargs)
return progiter_
def ProgPartial(*args, **kwargs):
return partial(ProgressIter, *args, **kwargs)
class ProgressIter(object):
"""
Wraps a for loop with progress reporting
lbl='Progress: ', length=0, flushfreq=4, startafter=-1, start=True,
repl=False, approx=False, disable=False, writefreq=1, with_time=False,
backspace=True, pad_stdout=False, wfreq=None, ffreq=None, freq=None,
total=None, num=None, with_totaltime=None
Referencs:
https://github.com/verigak/progress/blob/master/progress/__init__.py
Args:
iterable (): iterable normally passed to for loop
lbl (str): progress label
length (int):
flushfreq (int):
startafter (int):
start (bool):
repl (bool):
approx (bool):
enabled (bool):
writefreq (int):
with_totaltime (bool):
backspace (bool):
pad_stdout (bool):
autoadjust (bool): no adjusting frequency if True (default False)
wfreq (None): alias for write_freq
ffreq (None): alias for flush_freq
total (None): alias for length
num (None): alias for length
Timeit:
>>> import utool as ut
>>> setup = ut.codeblock(
>>> '''
>>> import utool as ut
>>> from six.moves import range, zip
>>> import time
>>> def time_append(size):
>>> start_time = time.time()
>>> last_time = start_time
>>> list2 = []
>>> for x in range(size):
>>> now_time = time.time()
>>> between = now_time - last_time
>>> last_time = now_time
>>> list2.append(between)
>>>
>>> def time_assign(size):
>>> start_time = time.time()
>>> last_time = start_time
>>> list1 = ut.alloc_nones(size)
>>> for x in range(size):
>>> now_time = time.time()
>>> between = now_time - last_time
>>> last_time = now_time
>>> list1[x] = between
>>>
>>> def time_baseline(size):
>>> start_time = time.time()
>>> last_time = start_time
>>> for x in range(size):
>>> now_time = time.time()
>>> between = now_time - last_time
>>> last_time = now_time
>>>
>>> def time_null(size):
>>> for x in range(size):
>>> pass
>>> ''')
>>>
>>> input_sizes = [2 ** count for count in range(7, 12)]
>>> stmt_list = ['time_assign', 'time_append', 'time_baseline', 'time_null']
>>> input_sizes=[100, 1000, 10000]
>>> ut.timeit_grid(stmt_list, setup, input_sizes=input_sizes, show=True)
CommandLine:
python -m utool.util_progress --test-ProgressIter
python -m utool.util_progress --test-ProgressIter:0
python -m utool.util_progress --test-ProgressIter:1
python -m utool.util_progress --test-ProgressIter:2
python -m utool.util_progress --test-ProgressIter:3
Example:
>>> # ENABLE_DOCTEST
>>> import utool as ut
>>> from six.moves import range
>>> num = 1000
>>> num2 = 10001
>>> results1 = [x for x in ut.ProgressIter(range(num), wfreq=10, adjust=True)]
>>> results4 = [x for x in ut.ProgressIter(range(num), wfreq=1, adjust=True)]
>>> results2 = [x for x in range(num)]
>>> results3 = [x for x in ut.progiter((y + 1 for y in range(num2)),
>>> ntotal=num2, wfreq=1000,
>>> backspace=True, adjust=True)]
>>> assert results1 == results2
Example:
>>> # DISABLE_DOCTEST
>>> # SLOW_DOCTEST
>>> import utool as ut
>>> from six.moves import range
>>> num2 = 10001
>>> progiter = ut.ProgressIter(range(num2), lbl='testing primes',
>>> report_unit='seconds', freq=1,
>>> time_thresh=.1, adjust=True)
>>> [ut.get_nth_prime_bruteforce(29) for x in progiter]
Example:
>>> # DISABLE_DOCTEST
>>> # SLOW_DOCTEST
>>> import utool as ut
>>> from six.moves import range
>>> num2 = 100001
>>> progiter = ut.ProgressIter(range(num2), lbl='testing primes',
>>> report_unit='seconds', freq=1,
>>> time_thresh=3, adjust=True, bs=True)
>>> [ut.get_nth_prime_bruteforce(29) for x in progiter]
Example:
>>> # DISABLE_DOCTEST
>>> # SLOW_DOCTEST
>>> import utool as ut
>>> from six.moves import range
>>> import time
>>> crazy_time_list = [.001, .01, .0001] * 1000
>>> crazy_time_iter = (time.sleep(x) for x in crazy_time_list)
>>> progiter = ut.ProgressIter(crazy_time_iter, lbl='crazy times', length=len(crazy_time_list), freq=10)
>>> list(progiter)
"""
def __init__(self, iterable=None, *args, **kwargs):
self.iterable = iterable
if len(args) < 2 and 'nTotal' not in kwargs and 'length' not in kwargs:
try:
length = len(iterable)
kwargs['length'] = length
except Exception:
pass
self.use_rate = kwargs.pop('use_rate', True)
self.use_rate = True # Force
self.lbl = kwargs.get('lbl', 'lbl')
self.lbl = kwargs.get('label', self.lbl)
self.length = kwargs.get('nTotal', kwargs.get('length', 0))
# self.backspace = kwargs.get('backspace', True)
self.backspace = kwargs.get('backspace', kwargs.get('bs', False))
self.freq = kwargs.get('freq', 1)
self.invert_rate = kwargs.get('invert_rate', False)
self.auto_invert_rate = kwargs.get('auto_invert_rate', True)
self.verbose = kwargs.pop('verbose', True) # VERBOSE
# self.report_unit = kwargs.get('report_unit', 'minutes')
self.enabled = kwargs.get('enabled', True)
self.report_unit = kwargs.get('report_unit', 'seconds')
# autoadjust frequency of reporting
self.autoadjust = kwargs.get('autoadjust', kwargs.get('adjust', False))
self.time_thresh = kwargs.pop('time_thresh', None)
self.prog_hook = kwargs.pop('prog_hook', None)
self.prehack = kwargs.pop('prehack', None)
self.freq_est_strat = kwargs.pop('freq_est', 'between')
if 'separate' in kwargs:
print('WARNING separate no longer supported by ProgIter')
# FIXME: get these subinder things working
# ~/code/guitool/guitool/guitool_components.py
# self.substep_min = kwargs.pop('substep_min', 0)
# self.substep_size = kwargs.pop('substep_size', 1)
# self.level = kwargs.pop('level', 0)
self.parent_index = kwargs.pop('parent_index', 0)
self.parent_length = kwargs.pop('parent_length', 1)
self.parent_offset = self.parent_index * self.length
self._cursor_at_newline = True
# Window sizes for estimates
self.est_window = kwargs.pop('est_window', 64)
# self.start_offset = self.substep_min
self.stream = kwargs.pop('stream', None)
self.extra = ''
if FORCE_ALL_PROGRESS:
self.freq = 1
self.autoadjust = False
if self.prog_hook is not None:
# Sets the label of a progress bar to the ProgIter label
self.prog_hook.register_progiter(self)
# self.time_thresh_growth = kwargs.pop('time_thresh_growth', 1.0)
self.time_thresh_growth = kwargs.pop('time_thresh_growth', 1.0)
self.with_totaltime = False
if self.freq is None:
self.freq = 1
if self.use_rate:
# Hacky so hacky. this needs major cleanup
# saving args and kwargs so can wait on log_progress call
# not sure where it is called and dont want to break things
self.args = args
self.kwargs = kwargs
self.mark = None
self.end = None
# else:
# self.mark, self.end = log_progress(*args, **kwargs)
self.count = 0
def __call__(self, iterable):
self.iterable = iterable
return self
def __iter__(self):
if not self.enabled:
return iter(self.iterable)
if NO_PROGRESS:
# IF PROGRESS IS TURNED OFF
msg = 'Iterating ' + self.lbl + ' with no progress'
if self.verbose:
print(msg)
# with ut.Timer(msg):
return iter(self.iterable)
else:
# if self.use_rate:
# STANDARD CALL CASE
return self.iter_rate()
# else:
# return self.iter_without_rate()
# def get_subindexers(prog_iter, num_substeps):
# # FIXME and make this a method of progiter
# step_min = (((prog_iter.count - 1) / prog_iter.length) *
# prog_iter.substep_size + prog_iter.substep_min)
# step_size = (1.0 / prog_iter.length) * prog_iter.substep_size
# substep_size = step_size / num_substeps
# substep_min_list = [(step * substep_size) + step_min
# for step in range(num_substeps)]
# #level = prog_iter.level + 1
# DEBUG = False
# if DEBUG:
# with ut.Indenter(' ' * 4 * prog_iter.level):
# print('\n')
# print('+____<NEW SUBSTEPS>____')
# print('Making %d substeps for prog_iter.lbl = %s' % (
# num_substeps, prog_iter.lbl,))
# print(' * step_min = %.2f' % (step_min,))
# print(' * step_size = %.2f' % (step_size,))
# print(' * substep_size = %.2f' % (substep_size,))
# print(' * substep_min_list = %r' % (substep_min_list,))
# print(r'L____</NEW SUBSTEPS>____')
# print('\n')
# subprog_partial_list = [
# partial(ProgressIter,
# parent_length=prog_iter.length * num_substeps,
# parent_index=(prog_iter.count - 1) + (prog_iter.length * step))
# for step in range(num_substeps)]
# return subprog_partial_list
# def build_msg_fmtstr_time(self, lbl, invert_rate, backspace):
# with_wall = True
# tzname = time.tzname[0]
# if util_cplat.WIN32:
# tzname = tzname.replace('Eastern Standard Time', 'EST')
# msg_fmtstr_time = ''.join((
# 'rate=%3.3f seconds/iter, ' if invert_rate else 'rate=%4.2f Hz,',
# ' etr=%s,',
# ' ellapsed=%s,',
# ' wall=%s ' + tzname if with_wall else '',
# #'' if backspace else '\n',
# '\n' if backspace else '',
# ))
# return msg_fmtstr_time
@staticmethod
def build_msg_fmtstr_head_cols(length, lbl):
nTotal_ = '?' if length == 0 else six.text_type(length)
msg_head_columns = ['', lbl, ' {count:4d}/', nTotal_, '... ']
return msg_head_columns
@staticmethod
def build_msg_fmtstr2(lbl, length, invert_rate, backspace):
r"""
Args:
lbl (str):
invert_rate (bool):
backspace (bool):
Returns:
str: msg_fmtstr_time
CommandLine:
python -m utool.util_progress --exec-ProgressIter.build_msg_fmtstr2
Setup:
>>> from utool.util_progress import * # NOQA
>>> lbl = 'foo'
>>> invert_rate = True
>>> backspace = False
>>> length = None
Example:
>>> # DISABLE_DOCTEST
>>> msg_fmtstr_time = ProgressIter.build_msg_fmtstr2(lbl, length, invert_rate, backspace)
>>> result = ('%s' % (ut.repr2(msg_fmtstr_time),))
>>> print(result)
"""
with_wall = True
tzname = time.tzname[0]
if util_cplat.WIN32:
tzname = tzname.replace('Eastern Standard Time', 'EST')
# ansii/vt100 code for clearline
# CLEARLINE_L2 = '\33[2K'
# BEFORE_PROG = '\r\033[?25l'
CLEARLINE_EL0 = '\33[0K' # clear line to right
# CLEARLINE_EL1 = '\33[1K' # clear line to left
CLEARLINE_EL2 = '\33[2K' # clear line
# DECTCEM_HIDE = '\033[?25l' # hide cursor
CLEAR_BEFORE = '\r' + CLEARLINE_EL2 # + DECTCEM_HIDE
# FIXME: hideing cursor persists if the program crashes
CLEAR_AFTER = CLEARLINE_EL0
msg_head = ProgressIter.build_msg_fmtstr_head_cols(length, lbl)
if backspace:
msg_head = [CLEAR_BEFORE] + msg_head
msg_tail = [
(
'rate={rate:4.2f} iter/sec, '
if invert_rate
else 'rate={rate:4.2f} sec/iter, '
),
('' if length == 0 else ' etr={etr},'),
' ellapsed={ellapsed},',
(' wall={wall} ' + tzname if with_wall else ''),
# backslash-r is a carrage return and undoes all previous output on
# a written line
(' {extra}'),
CLEAR_AFTER if backspace else '\n',
]
msg_fmtstr_time = ''.join((msg_head + msg_tail))
return msg_fmtstr_time
def iter_rate(self):
"""
pun not intended
# TODO: record iteration times for analysis
# TODO Incorporate this better
# FIXME; pad_stdout into subfunctions
import dis
dis.dis(ut.ProgressIter.iter_rate)
"""
# class IterState(object):
# def __init__(state):
# state.freq = 1
# state.freq = 1
# pass
adjust = self.autoadjust
self._cursor_at_newline = not self.backspace
# SETUP VARIABLES
# HACK: reaquire logging print funcs in case they have changed
if self.stream is None:
self.write = util_logging._utool_write()
self.flush = util_logging._utool_flush()
else:
self.write = lambda msg: self.stream.write(msg) # NOQA
self.flush = lambda: self.stream.flush() # NOQA
length = self.length * self.parent_length # hack
freq = self.freq
self.count = 0
between_count = 0
last_count = 0
# how long iterations should be before a flush
# (used for freq adjustment)
time_thresh = (
self._get_timethresh_heuristics()
if self.time_thresh is None
else self.time_thresh
)
time_thresh_growth = self.time_thresh_growth
if time_thresh_growth > 1:
# time_thresh_growth is specified for very long processes
# print out the starting timestamp in that case
timestamp = time.strftime('%Y-%m-%d %H:%M:%S') + ' ' + time.tzname[0]
print('Start progress lbl= %s at %s' % (self.lbl, timestamp))
# time_thresh = 0.5
max_between_time = -1.0
max_between_count = -1.0 # why is this different? # because frequency varies
# TODO: should be kept as a statistic that uses the max time from a
# list of iterations divided by the size of that list that will account
# for buffering issues
iters_per_second = 0
self.iters_per_second = float('nan')
self.est_seconds_left = 0
self.total_seconds = 0
# Write initial message
# force_newlines = not self.backspace
start_msg_fmt = ''.join(self.build_msg_fmtstr_head_cols(length, self.lbl))
self.msg_fmtstr = self.build_msg_fmtstr2(
self.lbl, length, self.invert_rate, self.backspace
)
try:
util_logging._utool_flush()()
except IOError as ex:
# There is some weird error when doing progress in IPython notebook
if util_arg.VERBOSE:
print('IOError flushing %s' % (ex,))
if not self.prehack:
if self.backspace:
self.display_message()
elif self.verbose:
start_msg = start_msg_fmt.format(count=self.parent_offset)
util_logging._utool_write()(start_msg + '\n')
self._cursor_at_newline = not self.backspace
try:
util_logging._utool_flush()()
except IOError as ex:
# There is some weird error when doing progress in IPython notebook
if util_arg.VERBOSE:
print('IOError flushing %s' % (ex,))
else:
self._cursor_at_newline = True
if self.prog_hook is not None:
self.prog_hook(self.count, length)
# TODO: on windows is time.clock better?
# http://exnumerus.blogspot.com/2011/02/how-to-quickly-plot-multiple-line.html
start_time = default_timer()
last_time = start_time
start = 1 + self.parent_offset
if self.freq_est_strat == 'between':
FREQ_EST = 0
elif self.freq_est_strat == 'absolute':
FREQ_EST = 1
else:
FREQ_EST = 1
USE_RECORD = True
USE_RECORD_WINDOWED_AVG = True
USE_RECORD_WINDOWED_WEIGHT = 0.9
# use last 64 times to compute a more stable average rate
measure_between_time = collections.deque([], maxlen=self.est_window)
# Wrap the for loop with a generator
try:
for self.count, item in enumerate(self.iterable, start=start):
if self.prehack:
# hack to print before yeilding
# so much for efficiency
self.set_extra((self.lbl + '=' + self.prehack) % item)
self.display_message()
self.ensure_newline()
# GENERATE
yield item
if self.prehack or (self.count) % freq == 0:
now_time = default_timer()
between_time = now_time - last_time
between_count = self.count - last_count
total_seconds = now_time - start_time
self.total_seconds = total_seconds
if FREQ_EST == 0:
if USE_RECORD:
measure_between_time.append(
between_count / (float(between_time) + 1e-9)
)
if USE_RECORD_WINDOWED_AVG:
iters_per_second = None
for measure_between in measure_between_time:
if iters_per_second is None:
iters_per_second = measure_between
else:
iters_per_second = (
USE_RECORD_WINDOWED_WEIGHT
) * iters_per_second + (
1.0 - USE_RECORD_WINDOWED_WEIGHT
) * measure_between
else:
iters_per_second = sum(measure_between_time) / len(
measure_between_time
)
else:
iters_per_second = between_count / (
float(between_time) + 1e-9
)
elif FREQ_EST == 1:
iters_per_second = (now_time - start_time) / self.count
self.iters_per_second = iters_per_second
# If the future is known
if length is None:
est_seconds_left = -1
else:
iters_left = length - self.count
est_seconds_left = iters_left / (1.0 / iters_per_second + 1e-9)
self.est_seconds_left = est_seconds_left
# /future
last_count = self.count
last_time = now_time
# ADJUST FREQ IF NEEDED
# Adjust frequency if printing too quickly
# so progress doesnt slow down actual function
# TODO: better adjust algorithm
time_thresh *= time_thresh_growth
if adjust and (
between_time < time_thresh or between_time > time_thresh * 2.0
):
max_between_time = max(max(max_between_time, between_time), 1e-9)
max_between_count = max(max_between_count, between_count)
# If progress was uniform and all time estimates were
# perfect this would be the new freq to achieve time_thresh
new_freq = max(
int(time_thresh * max_between_count / max_between_time), 1
)
if DEBUG_FREQ_ADJUST:
print('\n+---')
print('[prog] between_count = %r' % between_count)
print('[prog] between_time = %.8r' % between_time)
print('[prog] time_thresh = %r' % time_thresh)
print('[prog] max_between_count = %r' % max_between_count)
print('[prog] max_between_time = %.8r' % max_between_time)
print('[prog] Adusting frequency from: %r' % freq)
print('[prog] Adusting frequency to: %r' % new_freq)
print('L___')
# But things are not perfect. So, don't make drastic changes
max_freq_change_up = max(256, freq * 2)
max_freq_change_down = freq // 2
if (new_freq - freq) > max_freq_change_up:
freq += max_freq_change_up
elif (freq - new_freq) > max_freq_change_down:
freq -= max_freq_change_down
else:
freq = new_freq
if not self.prehack:
self.display_message()
# DO PROGRESS INFO
if self.prog_hook is not None:
# From the point of view of the progress iter, we are about
# to enter the body of a for loop. (But we may have
# executed the body implicitly in the yeild.... so it is
# ambiguous. In the second case 0 will be executed twice.
self.prog_hook(self.count, length)
except (RuntimeError, StopIteration):
pass
if self.prehack:
self.set_extra('')
# --- end of main loop
# cleanup
if (self.count) % freq != 0:
# If the final line of progress was not written in the loop, write
# it here
self.est_seconds_left = 0
self.total_seconds = default_timer() - start_time
self.display_message()
if self.prog_hook is not None:
# From the point of view of the progress iter, we are about to
# enter the body of a for loop. (But we may have executed the
# body implicitly in the yeild.... so it is ambiguous. In the
# second case 0 will be executed twice.
self.prog_hook(self.count, length)
self.ensure_newline()
def display_message(self):
# HACK to be more like sklearn.extrnals ProgIter version
if self.verbose:
instant_invert_rate = self.iters_per_second < 0.1
if self.auto_invert_rate and self.invert_rate != instant_invert_rate:
self.invert_rate = instant_invert_rate
length = self.length * self.parent_length # hack
self.msg_fmtstr = self.build_msg_fmtstr2(
self.lbl, length, self.invert_rate, self.backspace
)
rate = (
1.0 / (self.iters_per_second + 1e-9)
if self.invert_rate
else self.iters_per_second
)
msg = self.msg_fmtstr.format(
count=self.count,
rate=rate,
etr=six.text_type(datetime.timedelta(seconds=int(self.est_seconds_left))),
ellapsed=six.text_type(
datetime.timedelta(seconds=int(self.total_seconds))
),
wall=time.strftime('%H:%M'),
extra=self.extra,
)
self.write(msg)
self._cursor_at_newline = not self.backspace
try:
self.flush()
except IOError as ex:
if util_arg.VERBOSE:
print('IOError flushing %s' % (ex,))
# print('self.flush = %r' % (self.flush,))
# import utool as ut
# ut.debug_logging_iostreams()
# ut.printex(ex)
# raise
pass
def set_extra(self, extra):
"""
specify a custom info appended to the end of the next message
TODO: come up with a better name and rename
"""
self.extra = extra
def ensure_newline(self):
"""
use before any custom printing when using the progress iter to ensure
your print statement starts on a new line instead of at the end of a
progress line
"""
DECTCEM_SHOW = '\033[?25h' # show cursor
AT_END = DECTCEM_SHOW + '\n'
if not self._cursor_at_newline:
self.write(AT_END)
self._cursor_at_newline = True
def _get_timethresh_heuristics(self):
"""
resonably decent hueristics for how much time to wait before
updating progress.
"""
if self.length > 1e5:
time_thresh = 2.5
elif self.length > 1e4:
time_thresh = 2.0
elif self.length > 1e3:
time_thresh = 1.0
else:
time_thresh = 0.5
return time_thresh
progiter = ProgressIter
class ProgIter(ProgressIter):
"""Thin wrapper with better arg positions"""
def __init__(self, iterable, lbl='Prog', adjust=True, freq=1, bs=True, **kwargs):
import utool as ut
super(ut.ProgIter, self).__init__(
iterable, lbl=lbl, adjust=adjust, freq=freq, bs=bs, **kwargs
)
def progress_str(
max_val, lbl='Progress: ', repl=False, approx=False, backspace=PROGGRESS_BACKSPACE
):
r"""makes format string that prints progress: %Xd/MAX_VAL with backspaces
NOTE: \r can be used instead of backspaces. This function is not very
relevant because of that.
"""
# string that displays max value
max_str = six.text_type(max_val)
if approx:
# denote approximate maximum
max_str = '~' + max_str
dnumstr = six.text_type(len(max_str))
# string that displays current progress
cur_str = '%' + dnumstr + 'd'
# If user passed in the label
if repl:
_fmt_str = lbl.replace('<cur_str>', cur_str).replace('<max_str>', max_str)
else:
_fmt_str = lbl + cur_str + '/' + max_str
if backspace:
# put backspace characters into the progress string
# (looks nice on normal terminals)
# nBackspaces = len(_fmt_str) - len(dnumstr) + len(max_str)
# backspaces = '\b' * nBackspaces
# fmt_str = backspaces + _fmt_str
# FIXME: USE CARAGE RETURN INSTEAD OF BACKSPACES
fmt_str = '\r' + _fmt_str
else:
# FIXME: USE CARAGE RETURN INSTEAD OF BACKSPACES
# this looks better on terminals without backspaces
fmt_str = _fmt_str + '\n'
return fmt_str
def log_progress(
lbl='Progress: ',
length=0,
flushfreq=4,
startafter=-1,
start=True,
repl=False,
approx=False,
disable=False,
writefreq=1,
with_time=False,
backspace=True,
pad_stdout=False,
wfreq=None,
ffreq=None,
freq=None,
total=None,
num=None,
with_totaltime=None,
):
"""
DEPRICATE
FIXME: depricate for ProgressIter.
still used in util_dev
"""
global AGGROFLUSH
# Alias kwargs with simpler names
if num is not None:
length = num
if total is not None:
length = total
if wfreq is not None:
writefreq = wfreq
if ffreq is not None:
flushfreq = ffreq
if freq is not None:
writefreq = flushfreq = freq
if with_totaltime is not None:
with_time = with_totaltime
# flush frequency must be a multiple of write frequency
flushfreq = max(int(round(flushfreq / writefreq)), 1) * writefreq
if length < startafter or disable:
# Do not mark progress if only executing a small number of tasks
def mark_progress(*args):
pass
def end_progress(*args):
pass
return mark_progress, end_progress
else:
write_fn = util_logging._utool_write()
flush_fn = util_logging._utool_flush()
# build format string for displaying progress
fmt_str = progress_str(
length, lbl=lbl, repl=repl, approx=approx, backspace=backspace
)
if AGGROFLUSH:
# Progress function which automatically flushes
def mark_progress(count, flush_fn=flush_fn):
count_ = count + 1
write_fn(fmt_str % (count_))
flush_fn()
else:
# Progress function flushes every <flushfreq> times
def mark_progress(
count,
fmt_str=fmt_str,
flushfreq=flushfreq,
writefreq=writefreq,
write_fn=write_fn,
flush_fn=flush_fn,
):
count_ = count + 1
if count_ % writefreq == 0:
write_fn(fmt_str % count_)
if count_ % flushfreq == 0:
flush_fn()
if pad_stdout:
write_fn('\n')
write_fn('\n')
flush_fn()
if with_time:
tt = util_time.tic(lbl)
def end_progress(count_=length, write_fn=write_fn, flush_fn=flush_fn):
write_fn(fmt_str % (count_))
write_fn('\n')
flush_fn()
if with_time:
util_time.toc(tt)
if pad_stdout:
write_fn('\n\n')
flush_fn()
# mark_progress(0)
if start:
mark_progress(-1)
return mark_progress, end_progress
if __name__ == '__main__':
"""
CommandLine:
python -c "import utool, utool.util_progress; utool.doctest_funcs(utool.util_progress, allexamples=True)"
python -c "import utool, utool.util_progress; utool.doctest_funcs(utool.util_progress)"
python -m utool.util_progress
python -m utool.util_progress --allexamples
"""
import multiprocessing
multiprocessing.freeze_support() # for win32
import utool as ut # NOQA
ut.doctest_funcs()
| [
"utool.util_arg.get_argflag",
"six.moves.range",
"math.ceil",
"collections.deque",
"utool.util_time.tic",
"utool.util_time.toc",
"utool.util_logging._utool_write",
"utool.util_iter.ichunks",
"time.strftime",
"utool.doctest_funcs",
"utool.Timer",
"time.sleep",
"multiprocessing.freeze_support",
"functools.partial",
"utool.util_logging._utool_flush",
"six.text_type",
"utool.util_inject.inject2"
] | [((584, 613), 'utool.util_inject.inject2', 'util_inject.inject2', (['__name__'], {}), '(__name__)\n', (603, 613), False, 'from utool import util_inject\n'), ((782, 818), 'utool.util_arg.get_argflag', 'util_arg.get_argflag', (['"""--aggroflush"""'], {}), "('--aggroflush')\n", (802, 818), False, 'from utool import util_arg\n'), ((918, 973), 'utool.util_arg.get_argflag', 'util_arg.get_argflag', (["('--no-progress', '--noprogress')"], {}), "(('--no-progress', '--noprogress'))\n", (938, 973), False, 'from utool import util_arg\n'), ((995, 1042), 'utool.util_arg.get_argflag', 'util_arg.get_argflag', (["('--force-all-progress',)"], {}), "(('--force-all-progress',))\n", (1015, 1042), False, 'from utool import util_arg\n'), ((1138, 1181), 'utool.util_arg.get_argflag', 'util_arg.get_argflag', (['"""--debug-adjust-freq"""'], {}), "('--debug-adjust-freq')\n", (1158, 1181), False, 'from utool import util_arg\n'), ((845, 903), 'utool.util_arg.get_argflag', 'util_arg.get_argflag', (["('--screen', '--progress-backspace')"], {}), "(('--screen', '--progress-backspace'))\n", (865, 903), False, 'from utool import util_arg\n'), ((5778, 5813), 'utool.util_iter.ichunks', 'util_iter.ichunks', (['list_', 'chunksize'], {}), '(list_, chunksize)\n', (5795, 5813), False, 'from utool import util_iter\n'), ((5933, 5971), 'functools.partial', 'partial', (['ProgressIter', '*args'], {}), '(ProgressIter, *args, **kwargs)\n', (5940, 5971), False, 'from functools import partial\n'), ((33242, 33264), 'six.text_type', 'six.text_type', (['max_val'], {}), '(max_val)\n', (33255, 33264), False, 'import six\n'), ((37469, 37501), 'multiprocessing.freeze_support', 'multiprocessing.freeze_support', ([], {}), '()\n', (37499, 37501), False, 'import multiprocessing\n'), ((37551, 37569), 'utool.doctest_funcs', 'ut.doctest_funcs', ([], {}), '()\n', (37567, 37569), True, 'import utool as ut\n'), ((1646, 1656), 'utool.Timer', 'ut.Timer', ([], {}), '()\n', (1654, 1656), True, 'import utool as ut\n'), ((1844, 1854), 'utool.Timer', 'ut.Timer', ([], {}), '()\n', (1852, 1854), True, 'import utool as ut\n'), ((2034, 2044), 'utool.Timer', 'ut.Timer', ([], {}), '()\n', (2042, 2044), True, 'import utool as ut\n'), ((2063, 2080), 'six.moves.range', 'range', (['(0)', 'numiter'], {}), '(0, numiter)\n', (2068, 2080), False, 'from six.moves import range, zip\n'), ((2195, 2205), 'utool.Timer', 'ut.Timer', ([], {}), '()\n', (2203, 2205), True, 'import utool as ut\n'), ((2355, 2365), 'utool.Timer', 'ut.Timer', ([], {}), '()\n', (2363, 2365), True, 'import utool as ut\n'), ((2534, 2544), 'utool.Timer', 'ut.Timer', ([], {}), '()\n', (2542, 2544), True, 'import utool as ut\n'), ((2563, 2580), 'six.moves.range', 'range', (['(0)', 'numiter'], {}), '(0, numiter)\n', (2568, 2580), False, 'from six.moves import range, zip\n'), ((3078, 3099), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (3088, 3099), False, 'import time\n'), ((3330, 3342), 'six.moves.range', 'range', (['(0)', '(10)'], {}), '(0, 10)\n', (3335, 3342), False, 'from six.moves import range, zip\n'), ((3704, 3716), 'six.moves.range', 'range', (['(0)', '(10)'], {}), '(0, 10)\n', (3709, 3716), False, 'from six.moves import range, zip\n'), ((4655, 4684), 'math.ceil', 'math.ceil', (['(length / chunksize)'], {}), '(length / chunksize)\n', (4664, 4684), False, 'import math\n'), ((23466, 23511), 'collections.deque', 'collections.deque', (['[]'], {'maxlen': 'self.est_window'}), '([], maxlen=self.est_window)\n', (23483, 23511), False, 'import collections\n'), ((35451, 35478), 'utool.util_logging._utool_write', 'util_logging._utool_write', ([], {}), '()\n', (35476, 35478), False, 'from utool import util_logging\n'), ((35498, 35525), 'utool.util_logging._utool_flush', 'util_logging._utool_flush', ([], {}), '()\n', (35523, 35525), False, 'from utool import util_logging\n'), ((1691, 1708), 'six.moves.range', 'range', (['(0)', 'numiter'], {}), '(0, numiter)\n', (1696, 1708), False, 'from six.moves import range, zip\n'), ((1744, 1765), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (1754, 1765), False, 'import time\n'), ((1889, 1906), 'six.moves.range', 'range', (['(0)', 'numiter'], {}), '(0, numiter)\n', (1894, 1906), False, 'from six.moves import range, zip\n'), ((1942, 1963), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (1952, 1963), False, 'import time\n'), ((2094, 2115), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (2104, 2115), False, 'import time\n'), ((2240, 2257), 'six.moves.range', 'range', (['(0)', 'numiter'], {}), '(0, numiter)\n', (2245, 2257), False, 'from six.moves import range, zip\n'), ((2293, 2314), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (2303, 2314), False, 'import time\n'), ((2400, 2417), 'six.moves.range', 'range', (['(0)', 'numiter'], {}), '(0, numiter)\n', (2405, 2417), False, 'from six.moves import range, zip\n'), ((2442, 2463), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (2452, 2463), False, 'import time\n'), ((2594, 2615), 'time.sleep', 'time.sleep', (['sleeptime'], {}), '(sleeptime)\n', (2604, 2615), False, 'import time\n'), ((3025, 3034), 'six.moves.range', 'range', (['(10)'], {}), '(10)\n', (3030, 3034), False, 'from six.moves import range, zip\n'), ((3036, 3045), 'six.moves.range', 'range', (['(10)'], {}), '(10)\n', (3041, 3045), False, 'from six.moves import range, zip\n'), ((3479, 3491), 'six.moves.range', 'range', (['(0)', '(10)'], {}), '(0, 10)\n', (3484, 3491), False, 'from six.moves import range, zip\n'), ((3598, 3620), 'time.sleep', 'time.sleep', (['sleeptime2'], {}), '(sleeptime2)\n', (3608, 3620), False, 'import time\n'), ((3852, 3864), 'six.moves.range', 'range', (['(0)', '(10)'], {}), '(0, 10)\n', (3857, 3864), False, 'from six.moves import range, zip\n'), ((3971, 3993), 'time.sleep', 'time.sleep', (['sleeptime2'], {}), '(sleeptime2)\n', (3981, 3993), False, 'import time\n'), ((17076, 17097), 'six.text_type', 'six.text_type', (['length'], {}), '(length)\n', (17089, 17097), False, 'import six\n'), ((20077, 20104), 'utool.util_logging._utool_write', 'util_logging._utool_write', ([], {}), '()\n', (20102, 20104), False, 'from utool import util_logging\n'), ((20130, 20157), 'utool.util_logging._utool_flush', 'util_logging._utool_flush', ([], {}), '()\n', (20155, 20157), False, 'from utool import util_logging\n'), ((36619, 36637), 'utool.util_time.tic', 'util_time.tic', (['lbl'], {}), '(lbl)\n', (36632, 36637), False, 'from utool import util_time\n'), ((21862, 21889), 'utool.util_logging._utool_flush', 'util_logging._utool_flush', ([], {}), '()\n', (21887, 21889), False, 'from utool import util_logging\n'), ((36851, 36868), 'utool.util_time.toc', 'util_time.toc', (['tt'], {}), '(tt)\n', (36864, 36868), False, 'from utool import util_time\n'), ((20946, 20980), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d %H:%M:%S"""'], {}), "('%Y-%m-%d %H:%M:%S')\n", (20959, 20980), False, 'import time\n'), ((22447, 22474), 'utool.util_logging._utool_flush', 'util_logging._utool_flush', ([], {}), '()\n', (22472, 22474), False, 'from utool import util_logging\n'), ((30940, 30962), 'time.strftime', 'time.strftime', (['"""%H:%M"""'], {}), "('%H:%M')\n", (30953, 30962), False, 'import time\n'), ((22309, 22336), 'utool.util_logging._utool_write', 'util_logging._utool_write', ([], {}), '()\n', (22334, 22336), False, 'from utool import util_logging\n')] |
Subsets and Splits