repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
CloCkWeRX/rabbitvcs | rabbitvcs/vcs/git/gittyup/client.py | 1 | 62939 | #
# client.py
#
import os
import os.path
import re
import shutil
import fnmatch
import time
from string import ascii_letters, digits
from datetime import datetime
from mimetypes import guess_type
import subprocess
import dulwich.errors
import dulwich.repo
import dulwich.objects
from dulwich.pack import Pack
from dulwich.index import commit_index, write_index_dict, SHA1Writer
#from dulwich.patch import write_tree_diff
from exceptions import *
import util
from objects import *
from config import GittyupLocalFallbackConfig
from command import GittyupCommand
TZ = -1 * time.timezone
ENCODING = "UTF-8"
def callback_notify_null(val):
pass
def callback_get_user():
from pwd import getpwuid
pwuid = getpwuid(os.getuid())
user = pwuid[0]
fullname = pwuid[4]
host = os.getenv("HOSTNAME")
return (fullname, "%s@%s" % (user, host))
def callback_get_cancel():
return False
def get_tmp_path(filename):
tmpdir = "/tmp/rabbitvcs"
if not os.path.isdir(tmpdir):
os.mkdir(tmpdir)
class GittyupClient:
def __init__(self, path=None, create=False):
self.callback_notify = callback_notify_null
self.callback_progress_update = None
self.callback_get_user = callback_get_user
self.callback_get_cancel = callback_get_cancel
self.global_ignore_patterns = []
self.git_version = None
self.numberOfCommandStages = 0
self.numberOfCommandStagesExecuted = 0
if path:
try:
self.repo = dulwich.repo.Repo(path)
self._load_config()
self.global_ignore_patterns = self._get_global_ignore_patterns()
except dulwich.errors.NotGitRepository:
if create:
self.initialize_repository(path)
self.global_ignore_patterns = self._get_global_ignore_patterns()
else:
raise NotRepositoryError()
else:
self.repo = None
#
# Start Private Methods
#
def _initialize_index(self):
index_path = self.repo.index_path()
f = open(index_path, "wb")
try:
f = SHA1Writer(f)
write_index_dict(f, {})
finally:
f.close()
def _get_index(self):
if self.repo.has_index() == False:
self._initialize_index()
return self.repo.open_index()
def _get_tree_at_head(self):
try:
tree = self.repo[self.repo[self.repo.head()].tree]
except KeyError, e:
tree = dulwich.objects.Tree()
return tree
def _get_working_tree(self):
return self.repo[commit_index(self.repo.object_store, self._get_index())]
def _get_tree_from_sha1(self, sha1):
return self.repo[self.repo[sha1].tree]
def _get_tree_index(self, tree=None):
if tree is None:
tree = self._get_tree_at_head()
tree_index = {}
if tree:
for item in self.repo.object_store.iter_tree_contents(tree.id):
tree_index[item[0]] = (item[1], item[2])
return tree_index
def _get_git_version(self):
"""
Gets the local git version
"""
if self.git_version:
return self.git_version
else:
try:
proc = subprocess.Popen(["git", "--version"], stdout=subprocess.PIPE)
response = proc.communicate()[0].split()
version = response[2].split(".")
self.git_version = version
return self.git_version
except Exception, e:
return None
def _version_greater_than(self, version1, version2):
len1 = len(version1)
len2 = len(version2)
max = 5
# Pad the version lists so they are the same length
if max > len1:
version1 += [0] * (max-len1)
if max > len2:
version2 += [0] * (max-len2)
if version1[0] > version2[0]:
return True
if (version1[0] == version2[0]
and version1[1] > version2[1]):
return True
if (version1[0] == version2[0]
and version1[1] == version2[1]
and version1[2] > version2[2]):
return True
if (version1[0] == version2[0]
and version1[1] == version2[1]
and version1[2] == version2[2]
and version1[3] > version2[3]):
return True
if (version1[0] == version2[0]
and version1[1] == version2[1]
and version1[2] == version2[2]
and version1[3] == version2[3]
and version1[4] > version2[4]):
return True
return False
def _get_global_ignore_patterns(self):
"""
Get ignore patterns from $GIT_DIR/info/exclude then from
core.excludesfile in gitconfig.
"""
patterns = []
files = self.get_global_ignore_files()
for path in files:
patterns += self.get_ignore_patterns_from_file(path)
return patterns
def get_global_ignore_files(self):
"""
Returns a list of ignore files possible for this repository
"""
try:
git_dir = os.environ["GIT_DIR"]
except KeyError:
git_dir = os.path.join(self.repo.path, ".git")
files = []
excludefile = os.path.join(git_dir, "info", "exclude")
files.append(excludefile)
try:
core_excludesfile = self.config.get("core", "excludesfile")
if core_excludesfile:
files.append(core_excludesfile)
except KeyError:
pass
return files
def get_local_ignore_file(self, path):
if not os.path.exists(path):
return []
if os.path.isfile(path):
path = os.path.basename(path)
return os.path.join(path, ".gitignore")
def get_ignore_patterns_from_file(self, path):
"""
Read in an ignore patterns file (i.e. .gitignore, $GIT_DIR/info/exclude)
and return a list of patterns
"""
patterns = []
if os.path.isfile(path):
file = open(path, "r")
try:
for line in file:
if line == "" or line.startswith("#"):
continue
patterns.append(line.rstrip("\n"))
finally:
file.close()
return patterns
def get_local_config_file(self):
try:
git_dir = os.environ["GIT_DIR"]
except KeyError:
git_dir = os.path.join(self.repo.path, ".git")
return git_dir + "/config"
def _ignore_file(self, patterns, filename):
"""
Determine whether the given file should be ignored
"""
for pattern in patterns:
if fnmatch.fnmatch(filename, pattern) and not pattern.startswith("!"):
return True
return False
def _read_directory_tree(self, path, show_ignored_files=False):
files = []
directories = []
for root, dirs, filenames in os.walk(path, topdown=True):
try:
dirs.remove(".git")
removed_git_dir = True
except ValueError:
pass
# Find the relative root path of this folder
if root == self.repo.path:
rel_root = ""
else:
rel_root = self.get_relative_path(root)
for filename in filenames:
files.append(os.path.join(rel_root, filename))
for _d in dirs:
directories.append(os.path.join(rel_root, _d))
directories.append(rel_root)
#Remove duplicates in list
directories=list(set(directories))
return (sorted(files), directories)
def _get_blob_from_file(self, path):
file = open(path, "rb")
try:
blob = dulwich.objects.Blob.from_string(file.read())
finally:
file.close()
return blob
def _write_blob_to_file(self, path, blob):
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
file = open(path, "wb")
try:
file.write(blob.data)
finally:
file.close()
def _load_config(self):
self.config = GittyupLocalFallbackConfig(self.repo.path)
def _get_config_user(self):
try:
config_user_name = self.config.get("user", "name")
config_user_email = self.config.get("user", "email")
if config_user_name == "" or config_user_email == "":
raise KeyError()
except KeyError:
(config_user_name, config_user_email) = self.callback_get_user()
if config_user_name == None and config_user_email == None:
return None
self.config.set("user", "name", config_user_name)
self.config.set("user", "email", config_user_email)
self.config.write()
return "%s <%s>" % (config_user_name, config_user_email)
def _write_packed_refs(self, refs):
packed_refs_str = ""
for ref,sha in refs.items():
packed_refs_str = "%s %s\n" % (sha, ref)
fd = open(os.path.join(self.repo.controldir(), "packed-refs"), "wb")
fd.write(packed_refs_str)
fd.close()
def _remove_from_index(self, index, key):
del index._byname[key]
#
# Start Public Methods
#
def initialize_repository(self, path, bare=False):
if not os.path.isdir(path):
os.mkdir(path)
cmd = ["git", "init"]
if bare:
cmd.append("--bare")
cmd.append(path)
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def set_repository(self, path):
try:
self.repo = dulwich.repo.Repo(path)
self._load_config()
except dulwich.errors.NotGitRepository:
raise NotRepositoryError()
def get_repository(self):
return self.repo.path
def find_repository_path(self, path):
path_to_check = path
while path_to_check != "/" and path_to_check != "":
if os.path.isdir(os.path.join(path_to_check, ".git")):
return path_to_check
path_to_check = os.path.split(path_to_check)[0]
return None
def get_relative_path(self, path):
if path == self.repo.path:
return ""
return util.relativepath(self.repo.path, path)
def get_absolute_path(self, path):
return os.path.join(self.repo.path, path).rstrip("/")
def track(self, name):
self.repo.refs.set_symbolic_ref("HEAD", name)
def is_tracking(self, name):
return (self.repo.refs.read_ref("HEAD")[5:] == name)
def tracking(self):
return self.repo.refs.read_ref("HEAD")[5:]
def head(self):
return self.repo.refs["HEAD"]
def get_sha1_from_refspec(self, refspec):
if refspec in self.repo.refs:
return self.repo.refs[refspec]
else:
return None
def stage(self, paths):
"""
Stage files to be committed or tracked
@type paths: list
@param paths: A list of files
"""
index = self._get_index()
if type(paths) in (str, unicode):
paths = [paths]
for path in paths:
relative_path = self.get_relative_path(path)
absolute_path = self.get_absolute_path(path)
blob = self._get_blob_from_file(absolute_path)
if relative_path in index:
(ctime, mtime, dev, ino, mode, uid, gid, size, blob_id, flags) = index[relative_path]
else:
flags = 0
# make sure mtime and ctime is updated every time a file is staged
(mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(path)
index[relative_path] = (ctime, mtime, dev, ino, mode, uid, gid, size, blob.id, flags)
index.write()
self.notify({
"action": "Staged",
"path": absolute_path,
"mime_type": guess_type(absolute_path)[0]
})
self.repo.object_store.add_object(blob)
def stage_all(self):
"""
Stage all files in a repository to be committed or tracked
"""
index = self._get_index()
for status in self.status():
if status in [AddedStatus, RemovedStatus, ModifiedStatus]:
abs_path = self.get_absolute_path(status.path)
if os.path.isfile(abs_path):
self.stage(abs_path)
if status == MissingStatus:
self._remove_from_index(index, status.path)
index.write()
def unstage(self, paths):
"""
Unstage files so they are not committed or tracked
@type paths: list
@param paths: A list of files
"""
index = self._get_index()
tree = self._get_tree_index()
if type(paths) in (str, unicode):
paths = [paths]
for path in paths:
relative_path = self.get_relative_path(path)
if relative_path in index:
if relative_path in tree:
(ctime, mtime, dev, ino, mode, uid, gid, size, blob_id, flags) = index[relative_path]
(mode, blob_id) = tree[relative_path]
# If the file is locally modified, set these vars to 0
# I'm not sure yet why this needs to happen, but it does
# in order for the file to appear modified and not normal
blob = self._get_blob_from_file(path)
if blob.id != blob_id:
ctime = 0
mtime = 0
dev = 0
ino = 0
uid = 0
gid = 0
size = 0
index[relative_path] = (ctime, mtime, dev, ino, mode, uid, gid, size, blob_id, flags)
else:
self._remove_from_index(index, relative_path)
else:
if relative_path in tree:
index[relative_path] = (0, 0, 0, 0, tree[relative_path][0], 0, 0, 0, tree[relative_path][1], 0)
self.notify({
"action": "Unstaged",
"path": path,
"mime_type": guess_type(path)[0]
})
index.write()
def unstage_all(self):
"""
Unstage all files so they are not committed or tracked
@type paths: list
@param paths: A list of files
"""
index = self._get_index()
for status in self.status():
abs_path = self.get_absolute_path(status.path)
if os.path.isfile(abs_path):
self.unstage(abs_path)
def get_staged(self):
"""
Gets a list of files that are staged
"""
staged = []
tree = self._get_tree_at_head()
index = self._get_index()
if len(tree) > 0:
for item in index.changes_from_tree(self.repo.object_store, tree.id):
((old_name, new_name), (old_mode, new_mode), (old_sha, new_sha)) = item
if new_name:
staged.append(new_name)
if old_name and old_name != new_name:
staged.append(old_name)
else:
for path in index:
staged.append(path)
return staged
def is_staged(self, path, staged_files=None):
"""
Determines if the specified path is staged
@type path: string
@param path: A file path
@rtype boolean
"""
if not staged_files:
staged_files = self.get_staged()
relative_path = self.get_relative_path(path)
return (relative_path in staged_files)
def branch(self, name, commit_sha=None, track=False):
"""
Create a new branch
@type name: string
@param name: The name of the new branch
@type commit_sha: string
@param commit_sha: A commit sha to branch from. If None, branches
from head
@type track: boolean
@param track: Whether or not to track the new branch, or just create it
"""
cmd = ["git", "branch"]
if track:
cmd.append("-t")
cmd += [name, commit_sha]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def branch_delete(self, name):
"""
Delete a branch
@type name: string
@param name: The name of the branch
"""
ref_name = "refs/heads/%s" % name
refs = self.repo.get_refs()
if ref_name in refs:
if self.is_tracking(ref_name):
self.track("refs/heads/master")
del self.repo.refs[ref_name]
def branch_rename(self, old_name, new_name):
"""
Rename a branch
@type old_name: string
@param old_name: The name of the branch to be renamed
@type new_name: string
@param new_name: The name of the new branch
"""
old_ref_name = "refs/heads/%s" % old_name
new_ref_name = "refs/heads/%s" % new_name
refs = self.repo.get_refs()
if old_ref_name in refs:
self.repo.refs[new_ref_name] = self.repo.refs[old_ref_name]
if self.is_tracking(old_ref_name):
self.track(new_ref_name)
del self.repo.refs[old_ref_name]
def branch_list(self, commit_sha=None):
"""
List all branches
"""
"""
refs = self.repo.get_refs()
branches = []
for ref,branch_sha in refs.items():
if ref.startswith("refs/heads"):
branch = Branch(ref[11:], branch_sha, self.repo[branch_sha])
branches.append(branch)
return branches
"""
cmd = ["git", "branch", "-lv", "--no-abbrev"]
if commit_sha:
cmd += ["--contains", commit_sha]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
branches = []
for line in stdout:
if not line:
continue
components = line.split()
if components[0] != "*":
components.insert(0, "")
tracking = components.pop(0) == "*" and True or False
if components[0] == "(no":
name = components.pop(0) + " " + components.pop(0)
else:
name = components.pop(0)
revision = components.pop(0)
message = " ".join(components)
branches.append({
"tracking": tracking,
"name": name,
"revision": revision,
"message": message
})
return branches
def checkout(self, paths=[], revision="HEAD"):
"""
Checkout a series of paths from a tree or commit. If no tree or commit
information is given, it will check out the files from head. If no
paths are given, all files will be checked out from head.
@type paths: list
@param paths: A list of files to checkout
@type revision: string
@param revision: The sha or branch to checkout
"""
if len(paths) == 1 and paths[0] == self.repo.path:
paths = []
cmd = ["git", "checkout", "-m", revision] + paths
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def clone(self, host, path, bare=False, origin="origin"):
"""
Clone a repository
@type host: string
@param host: The url of the git repository
@type path: string
@param path: The path to clone to
@type bare: boolean
@param bare: Create a bare repository or not
@type origin: string
@param origin: Specify the origin of the repository
"""
self.numberOfCommandStages = 3
more = ["-o", "origin","--progress"]
if bare:
more.append("--bare")
base_dir = os.path.split(path)[0]
cmd = ["git", "clone", host, path] + more
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=base_dir, notify=self.notify_and_parse_progress, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def commit(self, message, parents=None, committer=None, commit_time=None,
commit_timezone=None, author=None, author_time=None,
author_timezone=None, encoding=None, commit_all=False):
"""
Commit staged files to the local repository
@type message: string
@param message: The log message
@type parents: list
@param parents: A list of parent SHAs. Defaults to head.
@type committer: string
@param committer: The person committing. Defaults to
"user.name <user.email>"
@type commit_time: int
@param commit_time: The commit time. Defaults to time.time()
@type commit_timezone: int
@param commit_timezone: The commit timezone.
Defaults to (-1 * time.timezone)
@type author: string
@param author: The author of the file changes. Defaults to
"user.name <user.email>"
@type author_time: int
@param author_time: The author time. Defaults to time.time()
@type author_timezone: int
@param author_timezone: The author timezone.
Defaults to (-1 * time.timezone)
@type encoding: string
@param encoding: The encoding of the commit. Defaults to UTF-8.
@type commit_all: boolean
@param commit_all: Stage all changed files before committing
"""
if not committer:
committer = self._get_config_user()
if not committer:
raise GittyupCommandError("A committer was not specified")
if not author:
author = self._get_config_user()
if not author:
raise GittyupCommandError("An author was not specified")
if commit_all:
self.stage_all()
commit = dulwich.objects.Commit()
commit.message = message
commit.tree = commit_index(self.repo.object_store, self._get_index())
initial_commit = False
try:
commit.parents = (parents and parents or [self.repo.head()])
except KeyError:
# The initial commit has no parent
initial_commit = True
pass
commit.committer = committer
commit.commit_time = (commit_time and commit_time or int(time.time()))
commit.commit_timezone = (commit_timezone and commit_timezone or TZ)
commit.author = author
commit.author_time = (author_time and author_time or int(time.time()))
commit.author_timezone = (author_timezone and author_timezone or TZ)
commit.encoding = (encoding and encoding or ENCODING)
self.repo.object_store.add_object(commit)
self.repo.refs["HEAD"] = commit.id
if initial_commit:
self.track("refs/heads/master")
# Get the branch for this repository.
branch_full = self.repo.refs.read_ref("HEAD")
if (branch_full != None):
branch_components = re.search("refs/heads/(.+)", branch_full)
if (branch_components != None):
branch = branch_components.group(1)
self.notify("[" + commit.id + "] -> " + branch)
self.notify("To branch: " + branch)
#Print tree changes.
#dulwich.patch.write_tree_diff(sys.stdout, self.repo.object_store, commit.tree, commit.id)
return commit.id
def remove(self, paths):
"""
Remove path from the repository. Also deletes the local file.
@type paths: list
@param paths: A list of paths to remove
"""
if type(paths) in (str, unicode):
paths = [paths]
index = self._get_index()
for path in paths:
relative_path = self.get_relative_path(path)
if relative_path in index:
self._remove_from_index(index, relative_path)
os.remove(path)
index.write()
def move(self, source, dest):
"""
Move a file within the repository
@type source: string
@param source: The source file
@type dest: string
@param dest: The destination. If dest exists as a directory, source
will be added as a child. Otherwise, source will be renamed to
dest.
"""
index = self._get_index()
relative_source = self.get_relative_path(source)
relative_dest = self.get_relative_path(dest)
# Get a list of affected files so we can update the index
source_files = []
if os.path.isdir(source):
for name in index:
if name.startswith(relative_source):
source_files.append(name)
else:
source_files.append(self.get_relative_path(source))
# Rename the affected index entries
for source_file in source_files:
new_path = source_file.replace(relative_source, relative_dest)
if os.path.isdir(dest):
new_path = os.path.join(new_path, os.path.basename(source_file))
index[new_path] = index[source_file]
self._remove_from_index(index, source_file)
index.write()
# Actually move the file/folder
shutil.move(source, dest)
def pull(self, repository="origin", refspec="master"):
"""
Fetch objects from a remote repository and merge with the local
repository
@type repository: string
@param repository: The name of the repository
@type refspec: string
@param refspec: The branch name to pull from
"""
self.numberOfCommandStages = 2
cmd = ["git", "pull","--progress", repository, refspec]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify_and_parse_git_pull, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def push(self, repository="origin", refspec="master"):
"""
Push objects from the local repository into the remote repository
and merge them.
@type repository: string
@param repository: The name of the repository
@type refspec: string
@param refspec: The branch name to pull from
"""
self.numberOfCommandStages = 2
cmd = ["git", "push", "--progress", repository, refspec]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify_and_parse_git_push, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def fetch(self, host):
"""
Fetch objects from a remote repository. This will not merge the files
into the local working copy, use pull for that.
@type host: string
@param host: The git url from which to fetch
"""
client, host_path = util.get_transport_and_path(host)
graphwalker = self.repo.get_graph_walker()
f, commit = self.repo.object_store.add_pack()
refs = client.fetch_pack(host_path, self.repo.object_store.determine_wants_all,
graphwalker, f.write, self.callback_notify)
commit()
return refs
def merge(self, branch):
cmd = ["git", "merge", branch]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def remote_add(self, name, host):
"""
Add a remote repository
@type name: string
@param name: The name to give to the remote repository
@type host: string
@param host: The git url to add
"""
cmd = ["git", "remote", "add", name, host]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def remote_rename(self, current_name, new_name):
"""
Rename a remote repository
@type current_name: string
@param current_name: The current name of the repository
@type new_name: string
@param new_name: The name to give to the remote repository
"""
cmd = ["git", "remote", "rename", current_name, new_name]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def remote_set_url(self, name, url):
"""
Change a remote repository's url
@type name: string
@param name: The name of the repository
@type url: string
@param url: The url for the repository
"""
cmd = ["git", "remote", "set-url", name, url]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def remote_delete(self, name):
"""
Remove a remote repository
@type name: string
@param name: The name of the remote repository to remove
"""
cmd = ["git", "remote", "rm", name]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def remote_list(self):
"""
Return a list of the remote repositories
@rtype list
@return A list of dicts with keys: remote, url, fetch
"""
cmd = ["git", "remote", "-v"]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
stdout = []
returner = []
for line in stdout:
components = line.split()
if components:
name = components[0]
host = components[1]
add = True
for item in returner:
if item["name"] == name:
add = False
if add:
returner.append({
"name": name,
"host": host
})
return returner
def tag(self, name, message, revision="HEAD"):
"""
Create a tag object
@type name: string
@param name: The name to give the tag
@type message: string
@param message: A log message
@type revision: string
@param revision: The revision to tag. Defaults to HEAD
"""
self._get_config_user()
cmd = ["git", "tag", "-m", message, name, revision]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
return
def tag_delete(self, name):
"""
Delete a tag
@type name: string
@param name: The name of the tag to delete
"""
ref_name = "refs/tags/%s" % name
refs = self.repo.get_refs()
if ref_name in refs:
del self.repo.refs[ref_name]
def tag_list(self):
"""
Return a list of Tag objects
"""
refs = self.repo.get_refs()
tags = []
for ref,tag_sha in refs.items():
if ref.startswith("refs/tags"):
if type(self.repo[tag_sha]) == dulwich.objects.Commit:
tag = CommitTag(ref[10:], tag_sha, self.repo[tag_sha])
else:
tag = Tag(tag_sha, self.repo[tag_sha])
tags.append(tag)
return tags
def status_porcelain(self, path):
if os.path.isdir(path):
(files, directories) = self._read_directory_tree(path)
else:
files = [self.get_relative_path(path)]
directories = []
files_hash = {}
for file in files:
files_hash[file] = True
cmd = ["git", "status", "--porcelain", path]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify).execute()
except GittyupCommandError, e:
self.callback_notify(e)
statuses = []
modified_files = []
for line in stdout:
components = re.match("^([\sA-Z\?]+)\s(.*?)$", line)
if components:
status = components.group(1)
strip_status = status.strip()
path = components.group(2)
if status == " D":
statuses.append(MissingStatus(path))
elif strip_status in ["M", "R", "U"]:
statuses.append(ModifiedStatus(path))
elif strip_status in ["A", "C"]:
statuses.append(AddedStatus(path))
elif strip_status == "D":
statuses.append(RemovedStatus(path))
elif strip_status == "??":
statuses.append(UntrackedStatus(path))
modified_files.append(path)
try:
del files_hash[path]
except Exception, e:
pass
# Determine untracked directories
cmd = ["git", "clean", "-nd", self.repo.path]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify).execute()
except GittyupCommandError, e:
self.callback_notify(e)
untracked_directories = []
for line in stdout:
components = re.match("^(Would remove)\s(.*?)$", line)
untracked_path = components.group(2)
if untracked_path[-1]=='/':
untracked_directories.append(untracked_path[:-1])
#Determine the ignored files and directories in Repo
cmd = ["git", "clean", "-ndX", self.repo.path]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify).execute()
except GittyupCommandError, e:
self.callback_notify(e)
ignored_directories=[]
for line in stdout:
components = re.match("^(Would remove)\s(.*?)$", line)
ignored_path=components.group(2)
if ignored_path[-1]=='/':
ignored_directories.append(ignored_path[:-1])
next
statuses.append(IgnoredStatus(ignored_path))
self.ignored_paths.append(ignored_path)
try:
del files_hash[ignored_path]
except Exception, e:
pass
for file,data in files_hash.items():
ignore_file=False
untracked_file=False
for ignored_path in ignored_directories:
if ignored_path in file:
ignore_file=True
break
for untracked_path in untracked_directories:
if untracked_path in file:
untracked_file=True
break
if untracked_file==True:
statuses.append(UntrackedStatus(file))
if ignore_file==True:
self.ignored_paths.append(file)
elif ignore_file==True:
statuses.append(IgnoredStatus(file))
self.ignored_paths.append(file)
else:
statuses.append(NormalStatus(file))
# Determine status of folders based on child contents
for d in directories:
d_status = NormalStatus(d)
# Check if directory is untracked or a sub-directory of an untracked directory
for untracked_path in untracked_directories:
if untracked_path in d:
d_status = UntrackedStatus(d)
break
# Check if directory includes modified files
for file in modified_files:
if file.startswith(d):
d_status = ModifiedStatus(d)
break
# Check if directory is ignored
for ignored_path in ignored_directories:
if ignored_path in d:
d_status = IgnoredStatus(d)
break
statuses.append(d_status)
return statuses
def status_dulwich(self, path):
tree = self._get_tree_index()
index = self._get_index()
if os.path.isdir(path):
(files, directories) = self._read_directory_tree(path)
else:
files = [self.get_relative_path(path)]
directories = []
files_hash = {}
for file in files:
files_hash[file] = True
statuses = []
# Calculate statuses for files in the current HEAD
modified_files = []
for name in tree:
try:
if index[name]:
inIndex = True
except Exception, e:
inIndex = False
if inIndex:
absolute_path = self.get_absolute_path(name)
if os.path.isfile(absolute_path):
# Cached, determine if modified or not
blob = self._get_blob_from_file(absolute_path)
if blob.id == tree[name][1]:
statuses.append(NormalStatus(name))
else:
modified_files.append(name)
statuses.append(ModifiedStatus(name))
else:
modified_files.append(name)
statuses.append(MissingStatus(name))
else:
modified_files.append(name)
statuses.append(RemovedStatus(name))
try:
del files_hash[name]
except Exception, e:
pass
# Calculate statuses for untracked files
for name,data in files_hash.items():
try:
inTreeIndex = tree[name]
except Exception, e:
inTreeIndex = False
try:
inIndex = index[name]
except Exception, e:
inIndex = False
if inIndex and not inTreeIndex:
modified_files.append(name)
statuses.append(AddedStatus(name))
continue
# Generate a list of appropriate ignore patterns
patterns = []
path_to_check = os.path.dirname(self.get_absolute_path(name))
while path_to_check != self.repo.path:
patterns += self.get_ignore_patterns_from_file(self.get_local_ignore_file(path_to_check))
path_to_check = os.path.split(path_to_check)[0]
patterns += self.get_ignore_patterns_from_file(self.get_local_ignore_file(self.repo.path))
patterns += self.global_ignore_patterns
if not self._ignore_file(patterns, os.path.basename(name)):
statuses.append(UntrackedStatus(name))
else:
self.ignored_paths.append(name)
# Determine status of folders based on child contents
for d in directories:
d_status = NormalStatus(d)
for file in modified_files:
if os.path.join(d, os.path.basename(file)) == file:
d_status = ModifiedStatus(d)
break
statuses.append(d_status)
return statuses
def get_all_ignore_file_paths(self, path):
return self.ignored_paths
def status(self, path):
# TODO - simply get this from the status implementation / avoid global state
self.ignored_paths = []
version = self._get_git_version()
if version and self._version_greater_than(version, [1,7,-1]):
return self.status_porcelain(path)
else:
return self.status_dulwich(path)
def log(self, path="", skip=0, limit=None, revision="", showtype="all"):
cmd = ["git", "--no-pager", "log", "--numstat", "--parents", "--pretty=fuller",
"--date-order"]
if showtype == "all":
cmd.append("--all")
if limit:
cmd.append("-%s" % limit)
if skip:
cmd.append("--skip=%s" % skip)
if revision:
cmd.append(revision)
if path == self.repo.path:
path = ""
if path:
cmd += ["--", path]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
return []
revisions = []
revision = {}
changed_file = {}
for line in stdout:
if line == "":
continue
if line[0:6] == "commit":
if revision:
if "changed_paths" not in revision:
revision["changed_paths"] = {}
revisions.append(revision)
revision = {}
changed_file = {}
commit_line = line.split(" ")
revision["commit"] = commit_line[1]
revision["parents"] = []
for parent in commit_line[2:]:
revision["parents"].append(parent)
elif line[0:7] == "Author:":
revision["author"] = line[7:].strip()
elif line[0:11] == "AuthorDate:":
revision["author_date"] = line[11:].strip()
elif line[0:7] == "Commit:":
revision["committer"] = line[7:].strip()
elif line[0:11] == "CommitDate:":
revision["commit_date"] = line[11:].strip()
elif line[0:4] == " ":
message = line[4:]
if "message" not in revision:
revision["message"] = ""
else:
revision["message"] += "\n"
revision["message"] = revision["message"] + message
elif line[0].isdigit() or line[0] in "-":
file_line = line.split("\t")
if not changed_file:
revision["changed_paths"] = []
if len(file_line) == 3:
changed_file = {
"additions": file_line[0],
"removals": file_line[1],
"path": file_line[2]
}
revision["changed_paths"].append(changed_file)
if revision:
revisions.append(revision)
return revisions
def annotate(self, path, revision_obj="HEAD"):
"""
Returns an annotation for a specified file
@type path: string
@param path: The absolute path to a tracked file
@type revision: string
@param revision: HEAD or a sha1 hash
"""
relative_path = self.get_relative_path(path)
cmd = ["git", "annotate", "-l", revision_obj, relative_path]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
stdout = []
returner = []
for line in stdout:
components = re.split("\t", line, 3)
if len(components) < 4:
continue
dt = datetime(*time.strptime(components[2][:-6],"%Y-%m-%d %H:%M:%S")[:-2])
message = components[3].split(")", 1)
code = message[1]
if len(components) == 5:
code = components[4]
returner.append({
"revision": components[0],
"author": components[1][1:],
"date": dt,
"line": code,
"number": message[0]
})
return returner
def show(self, path, revision_obj):
"""
Returns a particular file at a given revision object.
@type path: string
@param path: The absolute path to a file
@type revision_obj: git.Revision()
@param revision_obj: The revision object for path
"""
if not revision_obj:
revision_obj = "HEAD"
relative_path = self.get_relative_path(path)
cmd = ["git", "show", "%s:%s" % (revision_obj, relative_path)]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
stdout = []
return "\n".join(stdout)
def diff(self, path1, revision_obj1, path2=None, revision_obj2=None, summarize=False):
"""
Returns the diff between the path(s)/revision(s)
@type path1: string
@param path1: The absolute path to a file
@type revision_obj1: git.Revision()
@param revision_obj1: The revision object for path1
@type path2: string
@param path2: The absolute path to a file
@type revision_obj2: git.Revision()
@param revision_obj2: The revision object for path2
"""
relative_path1 = None
relative_path2 = None
if path1:
relative_path1 = self.get_relative_path(path1)
if path2:
relative_path2 = self.get_relative_path(path2)
cmd = ["git", "diff"]
if revision_obj1:
cmd += [revision_obj1]
if revision_obj2 and path2:
cmd += [revision_obj2]
if relative_path1:
cmd += [relative_path1]
if relative_path2 and relative_path2 != relative_path1:
cmd += [relative_path2]
if summarize:
cmd.append("--name-status")
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
stdout = []
return "\n".join(stdout)
def diff_summarize(self, path1, revision_obj1, path2=None, revision_obj2=None):
results = self.diff(path1, revision_obj1, path2, revision_obj2, True)
summary = []
for line in results.split("\n"):
if not line:
continue
(action, path) = line.split("\t")
summary.append({
"action": action,
"path": path
})
return summary
def export(self, path, dest_path, revision):
"""
Exports a file or directory from a given revision
@type path: string
@param path: The source file/folder to export
@type dest_path: string
@param dest_path: The path to put the exported file(s)
@type revision: string
@param revision: The revision/tree/commit of the source file being exported
"""
tmp_file = get_tmp_path("rabbitvcs-git-export.tar")
cmd1 = ["git", "archive", "--format", "tar", "-o", tmp_file, revision, path]
cmd2 = ["tar", "-xf", tmp_file, "-C", dest_path]
if not os.path.isdir(dest_path):
os.mkdir(dest_path)
try:
(status, stdout, stderr) = GittyupCommand(cmd1, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
(status, stdout, stderr) = GittyupCommand(cmd2, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
stdout = []
self.notify("%s at %s exported to %s" % (path, revision, dest_path))
return "\n".join(stdout)
def clean(self, path, remove_dir=True, remove_ignored_too=False,
remove_only_ignored=False, dry_run=False, force=True):
cmd = ["git", "clean"]
if remove_dir:
cmd.append("-d")
if remove_ignored_too:
cmd.append("-x")
if remove_only_ignored:
cmd.append("-X")
if dry_run:
cmd.append("-n")
if force:
cmd.append("-f")
relative_path = self.get_relative_path(path)
cmd.append(relative_path)
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
return
def reset(self, path, revision, type=None):
relative_path = self.get_relative_path(path)
cmd = ["git", "reset"]
if type:
cmd.append("--%s" % type)
cmd.append(revision)
if relative_path:
cmd.append(relative_path)
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
return
def set_callback_notify(self, func):
self.callback_notify = func
def set_callback_progress_update(self, func):
self.callback_progress_update = func
def set_callback_get_user(self, func):
self.callback_get_user = func
def set_callback_get_cancel(self, func):
self.callback_get_cancel = func
def notify(self, data):
self.callback_notify(data)
def notify_and_parse_progress(self, data):
# When progress is requested to a git command, it will
# respond with the current operation, and that operations current progress
# in the following format: "<Command>: <percentage>% (<pieces compeated>/<num pieces>)".
#
# When a command has reached 100% the format of this final message assumes the formatt:
# "<Command>: 100% (<num pieces>/<num pieces>), <total size> <unit>, done."
returnData = {"action":"","path":"","mime_type":""}
#print "parsing message: " + str(data)
# If data is already a dict, we'll assume it's already been parsed, and return.
if isinstance (data, dict):
self.notify (data);
return
# Is this an error?
message_components = re.search("^([eE]rror|[fF]atal): (.+)", data)
if message_components != None:
returnData["action"] = "Error"
returnData["path"] = message_components.group(2)
self.notify (returnData)
return
# Check to see if this is a remote command.
remote_check = re.search("^(remote: )(.+)$", data)
if remote_check != None:
returnData["action"] = "Remote"
message = remote_check.group(2)
else:
message = data
# First, we'll test to see if this is a progress notification.
if "%" not in message:
# No, this is just a regular message.
# Some messages have a strage tendancy to append a non-printable character,
# followed by a right square brace and a capitol "K". This tests for, and
# strips these superfluous characters.
message_components = re.search("^(.+).\[K", message)
if message_components != None:
returnData["path"] = message_components.group(1)
else:
returnData["path"] = message
self.notify (returnData)
return
# Extract the percentage, which will be all numerals directly
# prior to '%'.
message_components = re.search("^(.+): +([0-9]+)%", message)
if message_components == None:
print "Error: failed to parse git string: " + data
return
fraction = float(message_components.group(2)) / 100 # Convert percentage to fraction.
current_action = message_components.group(1)
# If we're at 0%, then we want to notify which action we're performing.
if fraction == 0:
returnData["path"] = current_action
self.notify(returnData)
#print "stage fraction: " + str (fraction)
# If we're using a number of stages, adjust the fraction acordingly.
if self.numberOfCommandStages > 0:
fraction = (self.numberOfCommandStagesExecuted + fraction) / self.numberOfCommandStages
# If we've finished the current stage (100%).
if "done" in message:
self.numberOfCommandStagesExecuted += 1
# If we've registered a callback for progress, update with the new fraction.
if self.callback_progress_update != None:
#print "setting pbar: " + str(fraction)
self.callback_progress_update(fraction)
# If we've finished the whole command (all stages).
if fraction == 1 and "done" in message:
# Reset stage variables.
self.numberOfCommandStages = 0
self.numberOfCommandStagesExecuted = 0
def notify_and_parse_git_pull (self, data):
return_data = {"action":"","path":"","mime_type":""}
message_parsed = False
# Look for "From" line (e.g. "From ssh://server:22/my_project")
message_components = re.search("^From (.+)", data)
if message_components != None:
return_data["action"] = "From"
return_data["path"] = message_components.group(1)
message_parsed = True
# Look for "Branch" line (e.g. "* branch master -> FETCH_HEAD")
message_components = re.search("\* branch +([A-z0-9]+) +-> (.+)", data)
if message_components != None:
return_data["action"] = "Branch"
return_data["path"] = message_components.group(1) + " -> " + message_components.group(2)
message_parsed = True
# Look for a file line (e.g. "src/somefile.py | 5 -++++")
message_components = re.search(" +(.+) +\| *([0-9]+) ([+-]+)", data)
if message_components != None:
return_data["action"] = "Modified"
return_data["path"] = message_components.group(1)
return_data["mime_type"] = message_components.group(2) + " " + message_components.group(3)
message_parsed = True
# Look for a updating line (e.g. "Updating ffffff..ffffff")
message_components = re.search("^Updating ([a-f0-9.]+)", data)
if message_components != None:
return_data["action"] = "Updating"
return_data["path"] = message_components.group(1)
message_parsed = True
# Look for a "create mode" line (e.g. "create mode 100755 file.py")
message_components = re.search("create mode ([0-9]+) (.+)", data)
if message_components != None:
return_data["action"] = "Create"
return_data["path"] = message_components.group(2)
return_data["mime_type"] = "mode: " + message_components.group(1)
message_parsed = True
# Look for a "delete mode" line (e.g. "create mode 100755 file.py")
message_components = re.search("delete mode ([0-9]+) (.+)", data)
if message_components != None:
return_data["action"] = "Delete"
return_data["path"] = message_components.group(2)
return_data["mime_type"] = "mode: " + message_components.group(1)
message_parsed = True
# Look for an "Auto-merging" line (e.g. "Auto-merging src/file.py")
message_components = re.search("^Auto-merging (.+)", data)
if message_components != None:
return_data["action"] = "Merging"
return_data["path"] = message_components.group(1)
message_parsed = True
# Look for a "binary" line (e.g. "icons/file.png" | Bin 0 -> 55555 bytes)
message_components = re.search("^[ ](.+) +\| Bin ([0-9]+ -> [0-9]+ bytes)", data)
if message_components != None:
return_data["action"] = "Binary"
return_data["path"] = message_components.group(1)
return_data["mime_type"] = message_components.group(2)
message_parsed = True
# Look for a "rename" line (e.g. "rename src/{foo.py => bar.py} (50%)")
message_components = re.search("rename (.+}) \([0-9]+%\)", data)
if message_components != None:
return_data["action"] = "Rename"
return_data["path"] = message_components.group(1)
message_parsed = True
# Look for a "copy" line (e.g. "copy src/{foo.py => bar.py} (50%)")
message_components = re.search("copy (.+}) \([0-9]+%\)", data)
if message_components != None:
return_data["action"] = "Copy"
return_data["path"] = message_components.group(1)
message_parsed = True
# Prepend "Error" to conflict lines. e.g. :
# CONFLICT (content): Merge conflict in file.py.
# Automatic merge failed; fix conflicts and then commit the result.
message_components = re.search("^CONFLICT \(|Automatic merge failed", data)
if message_components != None:
return_data["action"] = "Error"
return_data["path"] = data
message_parsed = True
if message_parsed == False:
return_data = data
self.notify_and_parse_progress (return_data)
def notify_and_parse_git_push (self, data):
return_data = {"action":"","path":"","mime_type":""}
message_parsed = False
# Look for to line. e.g. "To [email protected]:project.git". Exclude any
# lines that include a space (as this could be a message about something else)
message_components = re.search("^To ([^ ]+$)", data)
if message_components != None:
return_data["action"] = "To"
return_data["path"] = message_components.group(1)
message_parsed = True
# Look for "new branch" line. e.g. " * [new branch] master -> master"
message_components = re.search("^ \* \[new branch\] +(.+) -> (.+)", data)
if message_components != None:
return_data["action"] = "New Branch"
return_data["path"] = message_components.group(1) + " -> " + message_components.group(2)
message_parsed = True
# Look for "rejected" line. e.g. " ![rejected] master -> master (non-fast-forward)".
message_components = re.search("!\[rejected\] +(.+)", data)
if message_components != None:
return_data["action"] = "Rejected"
return_data["path"] = message_components.group(1)
message_parsed = True
if message_parsed == False:
return_data = data
self.notify_and_parse_progress (return_data)
def get_cancel(self):
return self.callback_get_cancel
| gpl-2.0 | 601,569,765,097,626,400 | 32.073568 | 151 | 0.524556 | false |
utlco/tcnc | tcnc/cam/offset.py | 1 | 7587 | #-----------------------------------------------------------------------------
# Copyright 2012-2016 Claude Zervas
# email: [email protected]
#-----------------------------------------------------------------------------
"""
Offset Line/Arc segments in a tool path to compensate for tool trail offset.
"""
# Python 3 compatibility boilerplate
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from future_builtins import *
import math
import logging
import geom
from . import toolpath
from . import util
logger = logging.getLogger(__name__)
def offset_path(path, offset, min_arc_dist, g1_tolerance=None):
"""Recalculate path to compensate for a trailing tangential offset.
This will shift all of the segments by `offset` amount. Arcs will
be recalculated to correct for the shift offset.
Args:
path: The path to recalculate.
offset: The amount of tangential tool trail.
min_arc_dist: The minimum distance between two connected
segment end points that can be bridged with an arc.
A line will be used if the distance is less than this.
g1_tolerance: The angle tolerance to determine if two segments
are g1 continuous.
Returns:
A new path
Raises:
:class:`cam.toolpath.ToolpathException`: if the path contains segment
types other than Line or Arc.
"""
if geom.float_eq(offset, 0.0):
return path;
offset_path = []
prev_seg = None
prev_offset_seg = None
for seg in path:
if seg.p1 == seg.p2:
# Skip zero length segments
continue
if isinstance(seg, geom.Line):
# Line segments are easy - just shift them forward by offset
offset_seg = seg.shift(offset)
elif isinstance(seg, geom.Arc):
offset_seg = offset_arc(seg, offset)
else:
raise toolpath.ToolpathException('Unrecognized path segment type.')
# Fix discontinuities caused by offsetting non-G1 segments
if prev_seg is not None:
if prev_offset_seg.p2 != offset_seg.p1:
seg_distance = prev_offset_seg.p2.distance(offset_seg.p1)
# If the distance between the two segments is less than the
# minimum arc distance or if the segments are G1 continuous
# then just insert a connecting line.
if (seg_distance < min_arc_dist
or geom.segments_are_g1(prev_offset_seg, offset_seg,
g1_tolerance)):
connect_seg = geom.Line(prev_offset_seg.p2, offset_seg.p1)
else:
# Insert an arc in tool path to rotate the tool to the next
# starting tangent when the segments are not G1 continuous.
# TODO: avoid creating tiny segments by extending
# offset segment.
p1 = prev_offset_seg.p2
p2 = offset_seg.p1
angle = prev_seg.p2.angle2(p1, p2)
# TODO: This should be a straight line if the arc is tiny
connect_seg = geom.Arc(p1, p2, offset, angle, prev_seg.p2)
# if connect_seg.length() < 0.01:
# logger.debug('tiny arc! length= %f, radius=%f, angle=%f', connect_seg.length(), connect_seg.radius, connect_seg.angle)
connect_seg.inline_start_angle = prev_seg.end_tangent_angle()
connect_seg.inline_end_angle = seg.start_tangent_angle()
offset_path.append(connect_seg)
prev_offset_seg = connect_seg
elif (geom.segments_are_g1(prev_seg, seg, g1_tolerance) and
not hasattr(prev_seg, 'ignore_g1') and
not hasattr(seg, 'ignore_g1')):
# Add hint for smoothing pass
prev_offset_seg.g1 = True
prev_seg = seg
prev_offset_seg = offset_seg
offset_path.append(offset_seg)
# Compensate for starting angle
start_angle = (offset_path[0].p1 - path[0].p1).angle()
offset_path[0].inline_start_angle = start_angle
return offset_path
def offset_arc(arc, offset):
"""Offset the arc by the specified offset.
"""
start_angle = arc.start_tangent_angle()
end_angle = arc.end_tangent_angle()
p1 = arc.p1 + geom.P.from_polar(offset, start_angle)
p2 = arc.p2 + geom.P.from_polar(offset, end_angle)
radius = math.hypot(offset, arc.radius)
offset_arc = geom.Arc(p1, p2, radius, arc.angle, arc.center)
offset_arc.inline_start_angle = start_angle
offset_arc.inline_end_angle = end_angle
return offset_arc
def fix_G1_path(path, tolerance, line_flatness):
"""
"""
new_path = []
if len(path) < 2:
return path
seg1 = path[0]
cp1 = seg1.p1
for seg2 in path[1:]:
if getattr(seg1, 'g1', False):
arcs, cp1 = smoothing_arcs(seg1, seg2, cp1,
tolerance=tolerance, max_depth=1,
line_flatness=line_flatness)
new_path.extend(arcs)
else:
cp1 = seg2.p1
new_path.append(seg1)
seg1 = seg2
# Process last segment...
if getattr(seg1, 'g1', False):
arcs, cp1 = smoothing_arcs(seg1, None, cp1,
tolerance=tolerance, max_depth=1,
line_flatness=line_flatness)
new_path.extend(arcs)
else:
new_path.append(seg1)
return new_path
def smoothing_arcs(seg1, seg2, cp1=None,
tolerance=0.0001, line_flatness=0.0001,
max_depth=1, match_arcs=True):
"""Create circular smoothing biarcs between two segments
that are not currently G1 continuous.
Args:
seg1: First path segment containing first and second points.
Can be a geom.Line or geom.Arc.
seg2: Second path segment containing second and third points.
Can be a geom.Line or geom.Arc.
cp1: Control point computed from previous invocation.
tolerance: Biarc matching tolerance.
line_flatness: Curve to line tolerance.
max_depth: Max Bezier subdivision recursion depth.
match_arcs: Attempt to more closely match existing arc segments.
Default is True.
Returns:
A tuple containing a list of biarc segments and the control point
for the next curve.
"""
curve, cp1 = geom.bezier.smoothing_curve(seg1, seg2, cp1, match_arcs)
# geom.debug.draw_bezier(curve, color='#00ff44') #DEBUG
biarc_segs = curve.biarc_approximation(tolerance=tolerance,
max_depth=max_depth,
line_flatness=line_flatness)
if not biarc_segs:
return ((seg1,), seg1.p2)
# Compute total arc length of biarc approximation
biarc_length = 0
for seg in biarc_segs:
biarc_length += seg.length()
# Fix inline rotation hints for each new arc segment.
a_start = util.seg_start_angle(seg1)
a_end = a_start
sweep = geom.normalize_angle(util.seg_end_angle(seg1) - a_start, center=0.0)
sweep_scale = sweep / biarc_length
for arc in biarc_segs:
a_end = a_start + (arc.length() * sweep_scale)
arc.inline_start_angle = a_start
arc.inline_end_angle = a_end
a_start = a_end
return (biarc_segs, cp1)
| lgpl-3.0 | 6,676,673,369,152,770,000 | 39.790323 | 143 | 0.57704 | false |
edugrasa/demonstrator | gen_templates.py | 1 | 11003 | #
# Copyright (C) 2014-2017 Nextworks
# Author: Vincenzo Maffione <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# Template for a IPCM configuration file
ipcmconf_base = {
"configFileVersion": "1.4.1",
"localConfiguration": {
"installationPath": "%(installpath)s/bin",
"libraryPath": "%(installpath)s/lib",
"logPath": "%(varpath)s/var/log",
"consoleSocket": "%(varpath)s/var/run/ipcm-console.sock",
"system-name": "%(sysname)s",
"pluginsPaths": [
"%(installpath)s/lib/rinad/ipcp",
"/lib/modules/4.9.28-irati/extra"
]
},
"ipcProcessesToCreate": [],
"difConfigurations": [],
}
da_map_base = {
"applicationToDIFMappings": [
{
"encodedAppName": "rina.apps.echotime.server-1--",
"difName": "n.DIF"
},
{
"encodedAppName": "traffic.generator.server-1--",
"difName": "n.DIF"
}
],
}
# Template for a normal DIF configuration file
normal_dif_base = {
"difType" : "normal-ipc",
"dataTransferConstants" : {
"addressLength" : 2,
"cepIdLength" : 2,
"lengthLength" : 2,
"portIdLength" : 2,
"qosIdLength" : 2,
"rateLength" : 4,
"frameLength" : 4,
"sequenceNumberLength" : 4,
"ctrlSequenceNumberLength" : 4,
"maxPduSize" : 1470,
"maxSduSize" : 10000,
"difFragmentation" : True,
"maxPduLifetime" : 60000
},
"qosCubes" : [ {
"name" : "unreliablewithflowcontrol",
"id" : 1,
"partialDelivery" : False,
"orderedDelivery" : True,
"efcpPolicies" : {
"dtpPolicySet" : {
"name" : "default",
"version" : "0"
},
"initialATimer" : 0,
"dtcpPresent" : True,
"dtcpConfiguration" : {
"dtcpPolicySet" : {
"name" : "default",
"version" : "0"
},
"rtxControl" : False,
"flowControl" : True,
"flowControlConfig" : {
"rateBased" : False,
"windowBased" : True,
"windowBasedConfig" : {
"maxClosedWindowQueueLength" : 10,
"initialCredit" : 200
}
}
}
}
}, {
"name" : "reliablewithflowcontrol",
"id" : 2,
"partialDelivery" : False,
"orderedDelivery" : True,
"maxAllowableGap": 0,
"efcpPolicies" : {
"dtpPolicySet" : {
"name" : "default",
"version" : "0"
},
"initialATimer" : 0,
"dtcpPresent" : True,
"dtcpConfiguration" : {
"dtcpPolicySet" : {
"name" : "default",
"version" : "0"
},
"rtxControl" : True,
"rtxControlConfig" : {
"dataRxmsNmax" : 5,
"initialRtxTime" : 1000
},
"flowControl" : True,
"flowControlConfig" : {
"rateBased" : False,
"windowBased" : True,
"windowBasedConfig" : {
"maxClosedWindowQueueLength" : 10,
"initialCredit" : 200
}
}
}
}
} ],
"knownIPCProcessAddresses": [],
"addressPrefixes" : [ {
"addressPrefix" : 0,
"organization" : "N.Bourbaki"
}, {
"addressPrefix" : 16,
"organization" : "IRATI"
} ],
"rmtConfiguration" : {
"pffConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "0"
}
},
"policySet" : {
"name" : "default",
"version" : "1"
}
},
"enrollmentTaskConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "1",
"parameters" : [ {
"name" : "enrollTimeoutInMs",
"value" : "10000"
}, {
"name" : "watchdogPeriodInMs",
"value" : "30000"
}, {
"name" : "declaredDeadIntervalInMs",
"value" : "120000"
}, {
"name" : "neighborsEnrollerPeriodInMs",
"value" : "0"
}, {
"name" : "maxEnrollmentRetries",
"value" : "0"
} ]
}
},
"flowAllocatorConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "1"
}
},
"namespaceManagerConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "1"
}
},
"securityManagerConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "1"
}
},
"resourceAllocatorConfiguration" : {
"pduftgConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "0"
}
}
},
"routingConfiguration" : {
"policySet" : {
"name" : "link-state",
"version" : "1",
"parameters" : [ {
"name" : "objectMaximumAge",
"value" : "10000"
},{
"name" : "waitUntilReadCDAP",
"value" : "5001"
},{
"name" : "waitUntilError",
"value" : "5001"
},{
"name" : "waitUntilPDUFTComputation",
"value" : "103"
},{
"name" : "waitUntilFSODBPropagation",
"value" : "101"
},{
"name" : "waitUntilAgeIncrement",
"value" : "997"
},{
"name" : "routingAlgorithm",
"value" : "Dijkstra"
}]
}
}
}
def ps_set(d, k, v, parms):
if k not in d:
d[k] = {'name': '', 'version': '1'}
if d[k]["name"] == v and "parameters" in d[k]:
cur_names = [p["name"] for p in d[k]["parameters"]]
for p in parms:
name, value = p.split('=')
if name in cur_names:
for i in range(len(d[k]["parameters"])):
if d[k]["parameters"][i]["name"] == name:
d[k]["parameters"][i]["value"] = value
break
else:
d[k]["parameters"].append({ 'name': name, 'value': value })
elif len(parms) > 0:
d[k]["parameters"] = [ { 'name': p.split('=')[0], 'value': p.split('=')[1]} for p in parms ]
d[k]["name"] = v
def dtp_ps_set(d, v, parms):
for i in range(len(d["qosCubes"])):
ps_set(d["qosCubes"][i]["efcpPolicies"], "dtpPolicySet", v, parms)
def dtcp_ps_set(d, v, parms):
for i in range(len(d["qosCubes"])):
ps_set(d["qosCubes"][i]["efcpPolicies"]["dtcpConfiguration"], "dtcpPolicySet", v, parms)
policy_translator = {
'rmt.pff': lambda d, v, p: ps_set(d["rmtConfiguration"]["pffConfiguration"], "policySet", v, p),
'rmt': lambda d, v, p: ps_set(d["rmtConfiguration"], "policySet", v, p),
'enrollment-task': lambda d, v, p: ps_set(d["enrollmentTaskConfiguration"], "policySet", v, p),
'flow-allocator': lambda d, v, p: ps_set(d["flowAllocatorConfiguration"], "policySet", v, p),
'namespace-manager': lambda d, v, p: ps_set(d["namespaceManagerConfiguration"], "policySet", v, p),
'security-manager': lambda d, v, p: ps_set(d["securityManagerConfiguration"], "policySet", v, p),
'routing': lambda d, v, p: ps_set(d["routingConfiguration"], "policySet", v, p),
'resource-allocator.pduftg': lambda d, v, p: ps_set(d["resourceAllocatorConfiguration"], "policySet", v, p),
'efcp.*.dtcp': None,
'efcp.*.dtp': None,
}
def is_security_path(path):
sp = path.split('.')
return (len(sp) == 3) and (sp[0] == 'security-manager') and (sp[1] in ['auth', 'encrypt', 'ttl', 'errorcheck'])
# Do we know this path ?
def policy_path_valid(path):
if path in policy_translator:
return True
# Try to validate security configuration
if is_security_path(path):
return True
return False
def translate_security_path(d, path, ps, parms):
u1, component, profile = path.split('.')
if "authSDUProtProfiles" not in d["securityManagerConfiguration"]:
d["securityManagerConfiguration"]["authSDUProtProfiles"] = {}
d = d["securityManagerConfiguration"]["authSDUProtProfiles"]
tr = {'auth': 'authPolicy', 'encrypt': 'encryptPolicy',
'ttl': 'TTLPolicy', 'errorcheck': 'ErrorCheckPolicy'}
if profile == 'default':
if profile not in d:
d["default"] = {}
ps_set(d["default"], tr[component], ps, parms)
else: # profile is the name of a DIF
if "specific" not in d:
d["specific"] = []
j = -1
for i in range(len(d["specific"])):
if d["specific"][i]["underlyingDIF"] == profile + ".DIF":
j = i
break
if j == -1: # We need to create an entry for the new DIF
d["specific"].append({"underlyingDIF" : profile + ".DIF"})
ps_set(d["specific"][j], tr[component], ps, parms)
def translate_policy(difconf, path, ps, parms):
if path =='efcp.*.dtcp':
dtcp_ps_set(difconf, ps, parms)
elif path == 'efcp.*.dtp':
dtp_ps_set(difconf, ps, parms)
elif is_security_path(path):
translate_security_path(difconf, path, ps, parms)
else:
policy_translator[path](difconf, ps, parms)
| gpl-2.0 | -1,483,902,271,451,091,000 | 30.527221 | 115 | 0.468418 | false |
evernote/zing | tests/statistics/proxy.py | 1 | 4163 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import pytest
from pootle_statistics.models import Submission, SubmissionTypes
from pootle_statistics.proxy import SubmissionProxy
def _test_submission_proxy(proxy, sub, fields):
assert proxy.field == sub.field
if sub.field:
assert proxy.field_name
if sub.suggestion:
assert proxy.suggestion == sub.suggestion.pk
assert proxy.suggestion_target == sub.suggestion.target
if sub.unit and "unit_id" in fields:
assert proxy.unit == sub.unit.pk
assert proxy.unit_source == sub.unit.source
assert proxy.unit_translate_url == sub.unit.get_translate_url()
assert proxy.unit_pootle_path == sub.unit.store.pootle_path
assert proxy.unit_state == sub.unit.state
assert proxy.type == sub.type
is_suggestion = bool(
proxy.suggestion
and proxy.type in (SubmissionTypes.SUGG_ACCEPT, SubmissionTypes.SUGG_REJECT)
)
assert proxy.is_suggestion == is_suggestion
if sub.quality_check:
assert proxy.qc_name == sub.quality_check.name
else:
assert proxy.qc_name is None
with pytest.raises(AttributeError):
proxy.asdf
@pytest.mark.django_db
@pytest.mark.xfail(reason="this test needs to be replaced with snapshot-based one")
def test_submission_proxy_info(submissions):
values = Submission.objects.values(*(("id",) + SubmissionProxy.info_fields))
for v in values.iterator():
proxy = SubmissionProxy(v)
submission = submissions[v["id"]]
_test_submission_proxy(proxy, submission, SubmissionProxy.info_fields)
assert sorted(proxy.get_submission_info().items()) == sorted(
submission.get_submission_info().items()
)
@pytest.mark.django_db
def test_submission_proxy_timeline(submissions):
values = Submission.objects.values(*(("id",) + SubmissionProxy.timeline_fields))
for v in values.iterator():
_test_submission_proxy(
SubmissionProxy(v), submissions[v["id"]], SubmissionProxy.timeline_fields
)
@pytest.mark.django_db
def test_submission_proxy_qc_timeline(quality_check_submission):
subs = Submission.objects.filter(pk=quality_check_submission.pk)
_test_submission_proxy(
SubmissionProxy(subs.values(*SubmissionProxy.timeline_fields).first()),
quality_check_submission,
SubmissionProxy.timeline_fields,
)
@pytest.mark.django_db
@pytest.mark.xfail(reason="this test needs to be replaced with snapshot-based one")
def test_submission_proxy_qc_info(quality_check_submission):
subs = Submission.objects.filter(pk=quality_check_submission.pk)
proxy = SubmissionProxy(subs.values(*SubmissionProxy.info_fields).first())
_test_submission_proxy(proxy, quality_check_submission, SubmissionProxy.info_fields)
assert sorted(proxy.get_submission_info().items()) == sorted(
quality_check_submission.get_submission_info().items()
)
@pytest.mark.django_db
@pytest.mark.xfail(reason="this test needs to be replaced with snapshot-based one")
def test_submission_proxy_timeline_info(quality_check_submission):
"""If you use the timeline fields but call get_submission_info you will
get the sub info without the unit data
"""
subs = Submission.objects.filter(pk=quality_check_submission.pk)
sub = subs.values(*SubmissionProxy.timeline_fields).first()
proxy = SubmissionProxy(sub)
assert proxy.unit_info == {}
assert proxy.unit_translate_url is None
assert proxy.unit_pootle_path is None
assert proxy.unit_state is None
non_unit_fields = [
"username",
"display_datetime",
"displayname",
"mtime",
"type",
"email",
"profile_url",
]
proxy_info = proxy.get_submission_info()
sub_info = quality_check_submission.get_submission_info()
for k in non_unit_fields:
assert proxy_info[k] == sub_info[k]
| gpl-3.0 | 221,443,621,008,085,500 | 36.845455 | 88 | 0.694932 | false |
anomalizer/ngx_aws_auth | reference-impl-py/reference_v2.py | 1 | 2701 | #!/usr/bin/env python
from datetime import datetime
from hashlib import sha1
import hmac
import sys
try:
from urllib.request import Request, urlopen, HTTPError # Python 3
except:
from urllib2 import Request, urlopen, HTTPError # Python 2
'''
Authorization = "AWS" + " " + AWSAccessKeyId + ":" + Signature;
Signature = Base64( HMAC-SHA1( YourSecretAccessKeyID, UTF-8-Encoding-Of( StringToSign ) ) );
StringToSign = HTTP-Verb + "\n" +
Content-MD5 + "\n" +
Content-Type + "\n" +
Date + "\n" +
CanonicalizedAmzHeaders +
CanonicalizedResource;
CanonicalizedResource = [ "/" + Bucket ] +
<HTTP-Request-URI, from the protocol name up to the query string> +
[ subresource, if present. For example "?acl", "?location", "?logging", or "?torrent"];
CanonicalizedAmzHeaders = <described below>
'''
def canon_resource(vhost_mode, bucket, url):
val = "/%s" % bucket if vhost_mode else ""
val = val+url
return val
def str_to_sign_v2(method, vhost_mode, bucket, url):
cr = canon_resource(vhost_mode, bucket, url)
ctype = ""
cmd5 = ""
dt = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
azh = ""
retval = "%s\n%s\n%s\n%s\n%s%s" % (method,
cmd5, ctype, dt, azh, cr)
headers = {}
headers['Date'] = dt
if vhost_mode:
headers['Host'] = "%s.s3.amazonaws.com" % bucket
return {'s2s': retval, 'headers': headers }
def v2sign(key, method, vhost_mode, bucket, url):
raw = str_to_sign_v2(method, vhost_mode, bucket, url)
print "String to sign is\n----------------------\n%s\n---------------------\n" % raw['s2s']
retval = hmac.new(key, raw['s2s'], sha1)
return {'sign': retval.digest().encode("base64").rstrip("\n"),
'headers': raw['headers']}
def az_h(ak, key, method, vhost_mode, bucket, url):
sig = v2sign(key, method, vhost_mode, bucket, url)
ahv = "AWS %s:%s" % (ak, sig['sign'])
sig['headers']['Authorization'] = ahv
return sig['headers']
def get_data(ak, key, method, vhost_mode, bucket, url):
if vhost_mode:
rurl = "http://%s.s3.amazonaws.com%s" % (bucket, url)
else:
rurl = "http://s3.amazonaws.com%s" % (url)
q = Request(rurl)
headers = az_h(ak, key, method, vhost_mode, bucket, url)
print 'About to make a request'
print url
print headers
for k,v in headers.iteritems():
q.add_header(k, v)
try:
return urlopen(q).read()
except HTTPError as e:
print 'Got exception', e
if __name__ == "__main__":
ak = sys.argv[1]
k = sys.argv[2]
print get_data(ak, k, "GET", True, "hw.anomalizer", "/lock.txt")
print get_data(ak, k, "GET", False, "hw.anomalizer", "/hw.anomalizer/nq.c")
| bsd-2-clause | -5,930,532,583,128,790,000 | 29.011111 | 95 | 0.601999 | false |
novapost/workalendar | workalendar/tests/test_registry.py | 1 | 5157 | from unittest import TestCase
from ..core import Calendar
from ..exceptions import ISORegistryError
from ..registry import IsoRegistry
class RegionCalendar(Calendar):
'Region'
class SubRegionCalendar(Calendar):
'Sub Region'
class NotACalendarClass:
"Not a Calendar"
class NonStandardRegistryTest(TestCase):
def setUp(self):
self.region = RegionCalendar
self.subregion = SubRegionCalendar
def test_region_registry(self):
registry = IsoRegistry(load_standard_modules=False)
self.assertEqual(0, len(registry.region_registry))
registry.register('RE', self.region)
self.assertEqual(1, len(registry.region_registry))
self.assertEqual(RegionCalendar, registry.region_registry['RE'])
def test_register_non_calendar(self):
registry = IsoRegistry(load_standard_modules=False)
with self.assertRaises(ISORegistryError):
registry.register("NAC", NotACalendarClass)
def test_get(self):
registry = IsoRegistry(load_standard_modules=False)
registry.register('RE', self.region)
registry.register('RE-SR', self.subregion)
calendar_class = registry.get('RE')
self.assertEqual(calendar_class, RegionCalendar)
# Subregion
calendar_class = registry.get('RE-SR')
self.assertEqual(calendar_class, SubRegionCalendar)
# Unknown code/region
self.assertIsNone(registry.get('XX'))
def test_get_subregions(self):
registry = IsoRegistry(load_standard_modules=False)
registry.register('RE', self.region)
registry.register('RE-SR', self.subregion)
registry.register('OR-SR', self.subregion)
subregions = registry.get_subregions('RE')
# Only one sub-region here
self.assertEqual(1, len(subregions))
self.assertIn('RE-SR', subregions)
def test_get_calendars(self):
registry = IsoRegistry(load_standard_modules=False)
registry.register('RE', self.region)
registry.register('RE-SR', self.subregion)
registry.register('OR-SR', self.subregion)
calendars = registry.get_calendars(['RE'], include_subregions=True)
self.assertEqual(2, len(calendars))
self.assertIn('RE', calendars)
self.assertIn('RE-SR', calendars)
calendars = registry.get_calendars(['RE'], include_subregions=False)
self.assertEqual(1, len(calendars))
self.assertIn('RE', calendars)
def test_get_calendars_unknown(self):
registry = IsoRegistry(load_standard_modules=False)
registry.register('RE', self.region)
calendars = registry.get_calendars(['XX'])
self.assertEqual(calendars, {})
def test_get_calendars_with_subregions(self):
registry = IsoRegistry(load_standard_modules=False)
registry.register('RE', self.region)
registry.register('RE2', self.region)
registry.register('RE-SR', self.subregion)
calendars = registry.get_calendars(
['RE2', "RE-SR"], include_subregions=True)
self.assertEqual(2, len(calendars))
self.assertIn('RE2', calendars)
self.assertIn('RE-SR', calendars)
calendars = registry.get_calendars(
['RE2', "RE-SR"], include_subregions=False)
self.assertEqual(2, len(calendars))
self.assertIn('RE2', calendars)
self.assertIn('RE-SR', calendars)
# Only a subregion
calendars = registry.get_calendars(["RE-SR"], include_subregions=True)
self.assertEqual(1, len(calendars))
self.assertIn('RE-SR', calendars)
def test_get_calendars_empty_arg(self):
registry = IsoRegistry(load_standard_modules=False)
# 3 regions, one sub-region
registry.register('RE', self.region)
registry.register('RE2', self.region)
registry.register('RE3', self.region)
registry.register('RE-SR', self.subregion)
# Empty arg, no subregions
calendars = registry.get_calendars([], include_subregions=False)
self.assertEqual(len(calendars), 3)
self.assertEqual({"RE", "RE2", "RE3"}, set(calendars))
# Empty arg, with subregions
calendars = registry.get_calendars([], include_subregions=True)
self.assertEqual(len(calendars), 4)
self.assertEqual({"RE", "RE2", "RE3", "RE-SR"}, set(calendars))
def test_get_calendars_no_arg(self):
registry = IsoRegistry(load_standard_modules=False)
# 3 regions, one sub-region
registry.register('RE', self.region)
registry.register('RE2', self.region)
registry.register('RE3', self.region)
registry.register('RE-SR', self.subregion)
# Should be equivalent to [] + no subregions
calendars = registry.get_calendars()
self.assertEqual(len(calendars), 3)
self.assertEqual({"RE", "RE2", "RE3"}, set(calendars))
# Should be equivalent to [] + include subregions
calendars = registry.get_calendars(include_subregions=True)
self.assertEqual(len(calendars), 4)
self.assertEqual({"RE", "RE2", "RE3", "RE-SR"}, set(calendars))
| mit | 291,042,332,081,102,500 | 38.068182 | 78 | 0.651929 | false |
CARocha/simasinnovacion | servicios/views.py | 1 | 1928 | # -*- coding: utf-8 -*-
from django.shortcuts import render, get_object_or_404
from .models import Servicios
from .forms import ServiciosForm
import json
from django.http import HttpResponse
def _queryset_filtrado(request):
params = {}
if 'tipos_servicios' in request.session:
params['tipos_servicios'] = request.session['tipos_servicios']
if 'temas_abordan' in request.session:
params['temas_abordan'] = request.session['temas_abordan']
if 'org_benefician' in request.session:
params['org_benefician'] = request.session['org_benefician']
if 'fecha' in request.session:
params['fecha'] = request.session['fecha']
unvalid_keys = []
for key in params:
if not params[key]:
unvalid_keys.append(key)
for key in unvalid_keys:
del params[key]
return Servicios.objects.filter(**params)
def servicios_index(request, template="servicios/servicios.html"):
if request.method == 'POST':
form = ServiciosForm(request.POST)
if form.is_valid():
request.session['tipos_servicios'] = form.cleaned_data['tipos_servicios']
request.session['temas_abordan'] = form.cleaned_data['temas_abordan']
request.session['org_benefician'] = form.cleaned_data['org_benefician']
request.session['fecha'] = form.cleaned_data['fecha']
request.session['bandera'] = 1
else:
form = ServiciosForm()
request.session['bandera'] = 0
if request.session['bandera'] == 1:
con = _queryset_filtrado(request)
else:
con = ''
return render(request, template, {'form':form,
'lista_servicios':con})
def servicios_pagina(request, id, template="servicios/ficha_servicios.html"):
servicio = get_object_or_404(Servicios, id=id)
return render(request, template, {'servicio':servicio}) | mit | -5,750,363,850,856,495,000 | 36.096154 | 97 | 0.633817 | false |
lunixbochs/actualvim | lib/asyncio/locks.py | 1 | 14849 | """Synchronization primitives."""
__all__ = ['Lock', 'Event', 'Condition', 'Semaphore', 'BoundedSemaphore']
import collections
from ActualVim.lib.asyncio_inc import compat
from . import events
from . import futures
from .coroutines import coroutine
class _ContextManager:
"""Context manager.
This enables the following idiom for acquiring and releasing a
lock around a block:
with (yield from lock):
<block>
while failing loudly when accidentally using:
with lock:
<block>
"""
def __init__(self, lock):
self._lock = lock
def __enter__(self):
# We have no use for the "as ..." clause in the with
# statement for locks.
return None
def __exit__(self, *args):
try:
self._lock.release()
finally:
self._lock = None # Crudely prevent reuse.
class _ContextManagerMixin:
def __enter__(self):
raise RuntimeError(
'"yield from" should be used as context manager expression')
def __exit__(self, *args):
# This must exist because __enter__ exists, even though that
# always raises; that's how the with-statement works.
pass
@coroutine
def __iter__(self):
# This is not a coroutine. It is meant to enable the idiom:
#
# with (yield from lock):
# <block>
#
# as an alternative to:
#
# yield from lock.acquire()
# try:
# <block>
# finally:
# lock.release()
yield from self.acquire()
return _ContextManager(self)
if compat.PY35:
def __await__(self):
# To make "with await lock" work.
yield from self.acquire()
return _ContextManager(self)
@coroutine
def __aenter__(self):
yield from self.acquire()
# We have no use for the "as ..." clause in the with
# statement for locks.
return None
@coroutine
def __aexit__(self, exc_type, exc, tb):
self.release()
class Lock(_ContextManagerMixin):
"""Primitive lock objects.
A primitive lock is a synchronization primitive that is not owned
by a particular coroutine when locked. A primitive lock is in one
of two states, 'locked' or 'unlocked'.
It is created in the unlocked state. It has two basic methods,
acquire() and release(). When the state is unlocked, acquire()
changes the state to locked and returns immediately. When the
state is locked, acquire() blocks until a call to release() in
another coroutine changes it to unlocked, then the acquire() call
resets it to locked and returns. The release() method should only
be called in the locked state; it changes the state to unlocked
and returns immediately. If an attempt is made to release an
unlocked lock, a RuntimeError will be raised.
When more than one coroutine is blocked in acquire() waiting for
the state to turn to unlocked, only one coroutine proceeds when a
release() call resets the state to unlocked; first coroutine which
is blocked in acquire() is being processed.
acquire() is a coroutine and should be called with 'yield from'.
Locks also support the context management protocol. '(yield from lock)'
should be used as the context manager expression.
Usage:
lock = Lock()
...
yield from lock
try:
...
finally:
lock.release()
Context manager usage:
lock = Lock()
...
with (yield from lock):
...
Lock objects can be tested for locking state:
if not lock.locked():
yield from lock
else:
# lock is acquired
...
"""
def __init__(self, *, loop=None):
self._waiters = collections.deque()
self._locked = False
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
def __repr__(self):
res = super().__repr__()
extra = 'locked' if self._locked else 'unlocked'
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
def locked(self):
"""Return True if lock is acquired."""
return self._locked
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
finally:
self._waiters.remove(fut)
def release(self):
"""Release a lock.
When the lock is locked, reset it to unlocked, and return.
If any other coroutines are blocked waiting for the lock to become
unlocked, allow exactly one of them to proceed.
When invoked on an unlocked lock, a RuntimeError is raised.
There is no return value.
"""
if self._locked:
self._locked = False
# Wake up the first waiter who isn't cancelled.
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
else:
raise RuntimeError('Lock is not acquired.')
class Event:
"""Asynchronous equivalent to threading.Event.
Class implementing event objects. An event manages a flag that can be set
to true with the set() method and reset to false with the clear() method.
The wait() method blocks until the flag is true. The flag is initially
false.
"""
def __init__(self, *, loop=None):
self._waiters = collections.deque()
self._value = False
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
def __repr__(self):
res = super().__repr__()
extra = 'set' if self._value else 'unset'
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
def is_set(self):
"""Return True if and only if the internal flag is true."""
return self._value
def set(self):
"""Set the internal flag to true. All coroutines waiting for it to
become true are awakened. Coroutine that call wait() once the flag is
true will not block at all.
"""
if not self._value:
self._value = True
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
def clear(self):
"""Reset the internal flag to false. Subsequently, coroutines calling
wait() will block until set() is called to set the internal flag
to true again."""
self._value = False
@coroutine
def wait(self):
"""Block until the internal flag is true.
If the internal flag is true on entry, return True
immediately. Otherwise, block until another coroutine calls
set() to set the flag to true, then return True.
"""
if self._value:
return True
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
return True
finally:
self._waiters.remove(fut)
class Condition(_ContextManagerMixin):
"""Asynchronous equivalent to threading.Condition.
This class implements condition variable objects. A condition variable
allows one or more coroutines to wait until they are notified by another
coroutine.
A new Lock object is created and used as the underlying lock.
"""
def __init__(self, lock=None, *, loop=None):
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
if lock is None:
lock = Lock(loop=self._loop)
elif lock._loop is not self._loop:
raise ValueError("loop argument must agree with lock")
self._lock = lock
# Export the lock's locked(), acquire() and release() methods.
self.locked = lock.locked
self.acquire = lock.acquire
self.release = lock.release
self._waiters = collections.deque()
def __repr__(self):
res = super().__repr__()
extra = 'locked' if self.locked() else 'unlocked'
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
@coroutine
def wait(self):
"""Wait until notified.
If the calling coroutine has not acquired the lock when this
method is called, a RuntimeError is raised.
This method releases the underlying lock, and then blocks
until it is awakened by a notify() or notify_all() call for
the same condition variable in another coroutine. Once
awakened, it re-acquires the lock and returns True.
"""
if not self.locked():
raise RuntimeError('cannot wait on un-acquired lock')
self.release()
try:
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
return True
finally:
self._waiters.remove(fut)
finally:
# Must reacquire lock even if wait is cancelled
while True:
try:
yield from self.acquire()
break
except futures.CancelledError:
pass
@coroutine
def wait_for(self, predicate):
"""Wait until a predicate becomes true.
The predicate should be a callable which result will be
interpreted as a boolean value. The final predicate value is
the return value.
"""
result = predicate()
while not result:
yield from self.wait()
result = predicate()
return result
def notify(self, n=1):
"""By default, wake up one coroutine waiting on this condition, if any.
If the calling coroutine has not acquired the lock when this method
is called, a RuntimeError is raised.
This method wakes up at most n of the coroutines waiting for the
condition variable; it is a no-op if no coroutines are waiting.
Note: an awakened coroutine does not actually return from its
wait() call until it can reacquire the lock. Since notify() does
not release the lock, its caller should.
"""
if not self.locked():
raise RuntimeError('cannot notify on un-acquired lock')
idx = 0
for fut in self._waiters:
if idx >= n:
break
if not fut.done():
idx += 1
fut.set_result(False)
def notify_all(self):
"""Wake up all threads waiting on this condition. This method acts
like notify(), but wakes up all waiting threads instead of one. If the
calling thread has not acquired the lock when this method is called,
a RuntimeError is raised.
"""
self.notify(len(self._waiters))
class Semaphore(_ContextManagerMixin):
"""A Semaphore implementation.
A semaphore manages an internal counter which is decremented by each
acquire() call and incremented by each release() call. The counter
can never go below zero; when acquire() finds that it is zero, it blocks,
waiting until some other thread calls release().
Semaphores also support the context management protocol.
The optional argument gives the initial value for the internal
counter; it defaults to 1. If the value given is less than 0,
ValueError is raised.
"""
def __init__(self, value=1, *, loop=None):
if value < 0:
raise ValueError("Semaphore initial value must be >= 0")
self._value = value
self._waiters = collections.deque()
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
def __repr__(self):
res = super().__repr__()
extra = 'locked' if self.locked() else 'unlocked,value:{}'.format(
self._value)
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
def _wake_up_next(self):
while self._waiters:
waiter = self._waiters.popleft()
if not waiter.done():
waiter.set_result(None)
return
def locked(self):
"""Returns True if semaphore can not be acquired immediately."""
return self._value == 0
@coroutine
def acquire(self):
"""Acquire a semaphore.
If the internal counter is larger than zero on entry,
decrement it by one and return True immediately. If it is
zero on entry, block, waiting until some other coroutine has
called release() to make it larger than 0, and then return
True.
"""
while self._value <= 0:
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
except:
# See the similar code in Queue.get.
fut.cancel()
if self._value > 0 and not fut.cancelled():
self._wake_up_next()
raise
self._value -= 1
return True
def release(self):
"""Release a semaphore, incrementing the internal counter by one.
When it was zero on entry and another coroutine is waiting for it to
become larger than zero again, wake up that coroutine.
"""
self._value += 1
self._wake_up_next()
class BoundedSemaphore(Semaphore):
"""A bounded semaphore implementation.
This raises ValueError in release() if it would increase the value
above the initial value.
"""
def __init__(self, value=1, *, loop=None):
self._bound_value = value
super().__init__(value, loop=loop)
def release(self):
if self._value >= self._bound_value:
raise ValueError('BoundedSemaphore released too many times')
super().release()
| mit | -7,098,526,981,324,719,000 | 30.064854 | 79 | 0.576739 | false |
podhmo/cssdiff | cssdiff/__init__.py | 1 | 4506 | # -*- coding:utf-8 -*-
import sys
import cssutils
from collections import defaultdict
VERBOSE = False
class DiffObject(object):
def __init__(self, src, dst):
self.src = src
self.dst = dst
self.merged = full_difference(src, dst)
def to_string(self):
buf = []
for style, diff_line_list in sorted(self.merged.items()):
buf.append("{style} {{".format(style=style))
for diff_line in diff_line_list:
op = diff_line[0]
if op == "-" or op == "+":
buf.append("{op} {name}: {value};".format(op=op, name=diff_line[1], value=diff_line[2]))
elif op == "->":
buf.append("- {name}: {value};".format(op=op, name=diff_line[1], value=diff_line[2]))
buf.append("+ {name}: {value};".format(op=op, name=diff_line[1], value=diff_line[3]))
buf.append("}\n")
return "\n".join(buf)
class Element(object):
def __init__(self, sheet, structure=None, verbose=True):
self.sheet = sheet
self.structure = structure or to_dict(self.sheet, verbose)
self.verbose = verbose
def simplify(self):
return self.__class__(self.sheet, simplify(self.structure), verbose=self.verbose)
def difference(self, other):
src = simplify(self.structure)
dst = simplify(other.structure)
return DiffObject(src, dst)
def loads(css, verbose=VERBOSE):
sheet = cssutils.parseString(css, validate=verbose)
return Element(sheet, verbose=verbose)
def load(rf, verbose=VERBOSE):
return loads(rf.read(), verbose=verbose)
def load_from_file(filename, verbose=VERBOSE):
with open(filename) as rf:
return load(rf)
def describe(sheet):
for rule in sheet:
print("S")
for selector in rule.selectorList:
print("\t{}".format(selector.selectorText))
print("R")
for prop in rule.style:
print("\t{} {}".format(prop.name, prop.value))
print("-")
def simplify(structure):
return {k1: {k2: vs[-1] for k2, vs in sd.items()} for k1, sd in structure.items()}
def full_difference(src, dst):
merged = defaultdict(list)
added_or_changed = difference(dst, src, op="+", iterate=lambda x: x.items())
deleted_or_changed = difference(src, dst, op="-", iterate=lambda x: x.items())
for k, vs in added_or_changed.items():
merged[k].extend(vs)
for k, vs in deleted_or_changed.items():
for v in vs:
if v[0] == '-':
merged[k].append(v)
return merged
def difference(s1, s2, op="+", iterate=lambda s: sorted(s.items())):
"""s1 - s2"""
def change(name, x, y):
return ("->", name, x, y)
def add(name, v):
return (op, name, v)
def addall(rules):
return [add(name, value) for name, value in iterate(rules)]
# Dict[style, Dict[name, value]]
d = defaultdict(list)
for style, rules in iterate(s1):
another_rules = s2.get(style)
if another_rules is None:
d[style].extend(addall(rules))
continue
for name, value in iterate(rules):
another_value = another_rules.get(name)
if another_value is None:
d[style].append(add(name, value))
elif value != another_value:
d[style].append(change(name, another_value, value))
return d
def to_dict(sheet, verbose=True):
d = defaultdict(lambda: defaultdict(list))
for rule in sheet:
if not hasattr(rule, "selectorList"):
if verbose:
sys.stderr.write("hmm: {}\n".format(type(rule)))
continue
for selector in rule.selectorList:
sd = d[selector.selectorText]
for prop in rule.style:
sd[prop.name].append(prop.value)
return d
# todo: remove
def pp(d):
def default(o):
return o.structure
import json
print(json.dumps(d, indent=2, default=default))
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("src", type=argparse.FileType('r'))
parser.add_argument("dst", type=argparse.FileType('r'))
parser.add_argument("--verbose", action="store_true", default=False)
args = parser.parse_args()
s0 = load(args.src, verbose=args.verbose)
s1 = load(args.dst, verbose=args.verbose)
print(s0.difference(s1).to_string())
if __name__ == "__main__":
main()
| mit | 8,262,920,245,644,976,000 | 29.04 | 109 | 0.580781 | false |
cristian99garcia/showntell-activity | slideshow.py | 1 | 25638 | # -*- mode:python; tab-width:4; indent-tabs-mode:t; -*-
# slideshow.py
#
# Classes to represent a deck of slides, and handle things like file I/O and
# formats
# B. Mayton <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import sys
import subprocess
from time import strftime
import xml.dom.minidom
import logging
from gi.repository import Gtk
from gi.repository import GObject
from gi.repository import GdkPixbuf
from path import path
from sugar3.activity import activity
from sugar3.datastore import datastore
class Deck(GObject.GObject):
__gsignals__ = {
'slide-changed': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'decktitle-changed': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'slide-redraw': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'remove-path': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_INT,)),
'deck-changed': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'local-ink-added': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_STRING,)),
'remote-ink-added': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_STRING,)),
'instr-state-propagate': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_BOOLEAN,)),
'lock-state-propagate': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_BOOLEAN,)),
'ink-submitted': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_STRING, GObject.TYPE_STRING)),
'ink-broadcast': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE,
(GObject.TYPE_STRING, GObject.TYPE_STRING, GObject.TYPE_STRING)),
'update-submissions': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_INT,)),
'instructor-ink-cleared': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_INT,)),
'instructor-ink-removed': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_INT, GObject.TYPE_INT)),
}
def __init__(self, sugaractivity, handle, rsrc, base="/nfs/show"):
GObject.GObject.__init__(self)
self.__handle = handle
if self.__handle.object_id is None:
print 'slideshow - from home view'
else:
obj = datastore.get(self.__handle.object_id)
print 'object:', obj.get_file_path()
self.__logger = logging.getLogger('Deck')
self.__base = base
self.__rsrc = rsrc
self.__activity = sugaractivity
self.__is_initiating = True
self.__nav_locked = False
self.__active_sub = -1
self.__self_text = ""
self.__text_tag = None
self.__xmlpath = os.path.join(base, "deck.xml")
# we always create a new presentation and copy over it on resume
if path(base).exists():
# we can't have permissions.info for this to work
subprocess.call(
"cp -r " +
base +
" " +
os.path.expanduser("~/save"),
shell=True)
subprocess.call("rm -rf " + base + '/*', shell=True)
else:
path.mkdir(base)
path.copy(self.__rsrc / 'deck.xml', base / 'deck.xml')
path.copy(self.__rsrc / 'title.html', base / 'title.html')
path.copy(self.__rsrc / 'title_thumb.png', base / 'title_thumb.png')
self.reload()
self.set_title('New')
def set_locked_mode(self, locked):
""" Setter method for the navigation lock flag"""
self.__logger.debug("Lock state: " + str(locked))
self.__nav_locked = locked
self.emit('lock-state-propagate', locked)
def set_is_initiating(self, is_init):
""" Setter method for the instructor flag """
self.__logger.debug("Instructor state: " + str(is_init))
self.__is_initiating = is_init
self.emit('instr-state-propagate', is_init)
def getIsInitiating(self):
return self.__is_initiating
def make_title_slide(self, title):
# open and read title.html
self.__work_path = os.path.join(
activity.get_activity_root(), 'instance')
deckpath = path(activity.get_activity_root()) / 'instance' / 'deck'
slide = open(deckpath / 'title.html', 'r')
txt = slide.read()
slide.close()
# here change title.html - change between <h1> and </h1>
h1pos = txt.find('<h1>')
h1end = txt.find('</h1>')
txtmod = txt[:h1pos + 4] + title + txt[h1end:]
# here change date - change between <h3> and </h3>
h3pos = txtmod.find('<h3>')
h3end = txtmod.find('</h3>')
txt = txtmod[:h3pos + 4] + \
strftime("%a, %b %d, %Y %H:%M") + txtmod[h3end:]
# save title.html and close
slide = open(deckpath / 'title.html', 'w')
slide.write(txt)
slide.close()
print 'title slide changed', title
def set_title(self, title):
nodes = self.__dom.getElementsByTagName("title")
nodes[0].firstChild.data = title
self.make_title_slide(title)
self.save()
self.goToIndex(0, is_local=False)
self.emit('deck-changed')
print 'set_title', self.get_title()
def get_title(self):
nodes = self.__dom.getElementsByTagName("title")
return nodes[0].firstChild.data
def reload(self):
self.__logger.debug("Reading deck")
print 'reload:', self.__xmlpath
if os.path.exists(self.__xmlpath):
self.__dom = xml.dom.minidom.parse(self.__xmlpath)
decks = self.__dom.getElementsByTagName("deck")
self.__deck = decks[0]
# Get the slides from the show
self.__slides = self.__deck.getElementsByTagName("slide")
self.__nslides = len(self.__slides)
self.__logger.debug(str(self.__nslides) + " slides in show")
self.goToIndex(0, is_local=False)
self.emit('deck-changed')
print 'deck reloaded'
def save(self, path=None):
"""Writes the XML DOM in memory out to disk"""
print 'save:', path
if not path:
path = self.__xmlpath
"""
print '***************save************************'
print self.__dom.toprettyxml()
print '***************save************************'
"""
outfile = open(path, "w")
self.__dom.writexml(outfile)
outfile.close()
def rebuild_dom(self, title, slides):
dom = xml.dom.minidom.Document()
deck = dom.createElement("deck")
title = dom.createElement("title")
title.appendChild(dom.createTextNode("new"))
deck.appendChild(title)
for slide in slides:
deck.appendChild(slide)
dom.appendChild(deck)
print '*************rebuild**************************'
print dom.toprettyxml()
print '**********************************************'
return dom
def getDeckPath(self):
"""Returns the path to the folder that stores this slide deck"""
return self.__base
def resizeImage(self, inpath, outpath, w, h):
# resize an image
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(inpath, w, h)
#scaled_buf = pixbuf.scale.simple(w, h, Gtk.gdk.INTERP_BILINEAR)
pixbuf.save(outpath, "png")
def get_SlideTitle(self):
n = self.getIndex()
slide = self.__slides[n]
return slide.getAttribute('title')
def set_SlideTitle(self, slideTitle):
n = self.getIndex()
slide = self.__slides[n]
slide.setAttribute('title', slideTitle)
def addSlide(self, file_path):
INSTANCE = path(activity.get_activity_root()) / 'instance'
filepath = path(file_path)
print 'addSlide file_path', filepath.exists(), filepath
filename = filepath.name
inpath = INSTANCE / 'deck' / filename
print 'inpath', inpath.exists(), inpath
path.copy(filepath, inpath)
outpath = path(activity.get_activity_root()) / \
'instance' / 'deck' / filename
print 'outpath=', outpath.exists(), outpath
self.resizeImage(inpath, outpath, 640, 480)
print 'outpath=', outpath.exists(), outpath
print 'get slide dimensions'
dims = self.getSlideDimensionsFromXML(0)
if not dims:
wf = 640
hf = 480
else:
wf, hf = dims
w = str(int(wf))
h = str(int(hf))
print 'add slide', w, h
newslide = self.__dom.createElement("slide")
newslide.setAttribute("height", h)
newslide.setAttribute("title", "newslide")
newslide.setAttribute("width", w)
newlayer = self.__dom.createElement("layer")
txt = self.__dom.createTextNode(filename)
newlayer.appendChild(txt)
newslide.appendChild(newlayer)
self.__deck.appendChild(newslide)
print '**************addSlide*************'
print self.__dom.toprettyxml()
print '***********************************'
self.save()
def removeSlide(self, n):
del self.__slides[n]
self.__dom = self.rebuild_dom("modified deck", self.__slides)
def moveSlide(self, f, t):
if f < t:
self.__slides.insert(t, self.__slides[f])
del self.__slides[f]
elif t < f:
self.__slides.insert(t, self.__slides[f])
del self.__slides[f + 1]
self.__dom = self.rebuild_dom("modified deck", self.__slides)
def getSlideLayers(self, n=-1):
"""Returns a list of the layers that comprise this slide"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
self.__layers = slide.getElementsByTagName("layer")
layers = []
for l in self.__layers:
p = os.path.join(self.__base, l.firstChild.nodeValue)
layers.append(p)
return layers
def getInstructorInk(self):
self.__instructor_ink = []
instr = self.__slide.getElementsByTagName("instructor")
if len(instr) > 0:
self.__instructor_tag = instr[0]
pathtags = self.__instructor_tag.getElementsByTagName("path")
for pathstr in pathtags:
self.__instructor_ink.append(pathstr.firstChild.nodeValue)
return self.__instructor_ink
def getSelfInkOrSubmission(self):
if self.__active_sub == -1:
return (self.__self_ink, self.__self_text)
subtags = self.__slide.getElementsByTagName("submission")
if self.__active_sub > -1 and self.__active_sub < len(subtags):
active_subtag = subtags[self.__active_sub]
text = ""
texts = active_subtag.getElementsByTagName("text")
if len(texts) > 0:
if texts[0].firstChild:
text = texts[0].firstChild.nodeValue
pathlist = []
paths = active_subtag.getElementsByTagName("path")
for path in paths:
if path.firstChild:
pathlist.append(path.firstChild.nodeValue)
return (pathlist, text)
return None
def setActiveSubmission(self, sub):
self.__active_sub = sub
self.emit('slide-redraw')
def getActiveSubmission(self):
return self.__active_sub
def getSubmissionList(self, n=None):
if n is None:
n = self.__pos
subtags = self.__slide.getElementsByTagName("submission")
sublist = []
for subtag in subtags:
sublist.append(subtag.getAttribute("from"))
return sublist
def addSubmission(self, whofrom, inks, text="", n=None):
if n is None:
n = self.__pos
if n >= 0 and n < self.getSlideCount():
slide = self.__slides[n]
else:
slide = self.__slides[self.__pos]
newsub = self.__dom.createElement("submission")
newsub.setAttribute("from", whofrom)
substrparts = inks.split("$")
for part in substrparts:
if len(part) > 0:
newpath = self.__dom.createElement("path")
newpath.appendChild(self.__dom.createTextNode(part))
newsub.appendChild(newpath)
subtext = self.__dom.createElement("text")
subtext.appendChild(self.__dom.createTextNode(text))
newsub.appendChild(subtext)
subs = slide.getElementsByTagName("submission")
for sub in subs:
if sub.getAttribute("from") == whofrom:
slide.removeChild(sub)
slide.appendChild(newsub)
subs = slide.getElementsByTagName("submission")
if n == self.__pos:
self.emit('update-submissions', len(subs) - 1)
def addInkToSlide(self, pathstr, islocal, n=None):
"""Adds ink to the current slide, or slide n if given. Instructor ink may be added to any slide;
but it only makes sense to add student ink to the current slide (n will be ignored)"""
if n is None:
slide = self.__slide
instr_tag = self.__instructor_tag
if instr_tag is None:
instr_tag = self.__dom.createElement("instructor")
slide.appendChild(instr_tag)
self.__instructor_tag = instr_tag
else:
if n < self.getSlideCount and n >= 0:
slide = self.__slides[n]
else:
slide = self.__slides[self.__pos]
instr_tags = slide.getElementsByTagName("instructor")
if len(instr_tags) > 0:
instr_tag = instr_tags[0]
else:
instr_tag = self.__dom.createElement("instructor")
slide.appendChild(instr_tag)
if not islocal or self.__is_initiating:
self.__instructor_ink.append(pathstr)
path = self.__dom.createElement("path")
path.appendChild(self.__dom.createTextNode(pathstr))
instr_tag.appendChild(path)
else:
self.__self_ink.append(pathstr)
if not self.__self_ink_tag:
self.__self_ink_tag = self.__dom.createElement("self")
self.__slide.appendChild(self.__self_ink_tag)
path = self.__dom.createElement("path")
path.appendChild(self.__dom.createTextNode(pathstr))
self.__self_ink_tag.appendChild(path)
if islocal:
self.emit("local-ink-added", pathstr)
else:
if n is None or n == self.__pos:
self.emit("remote-ink-added", pathstr)
def clearInk(self, n=None):
if n is None:
n = self.__pos
slide = self.__slides[n]
if self.__is_initiating:
self.clearInstructorInk(n)
self.emit('instructor-ink-cleared', n)
self_tags = slide.getElementsByTagName("self")
for self_tag in self_tags:
slide.removeChild(self_tag)
self.__self_ink = []
self.__self_ink_tag = None
def clearInstructorInk(self, n=None):
if n is None:
n = self.__pos
slide = self.__slides[n]
instructor_tags = slide.getElementsByTagName("instructor")
for instructor_tag in instructor_tags:
slide.removeChild(instructor_tag)
if n == self.__pos:
self.__instructor_ink = []
self.__instructor_tag = None
self.emit('slide-redraw')
def removeInstructorPathByUID(self, uid, n=None):
if n is None:
n = self.__pos
needs_redraw = False
slide = self.__slides[n]
instructor_tags = slide.getElementsByTagName("instructor")
if len(instructor_tags) > 0:
instructor_tag = instructor_tags[0]
else:
return
path_tags = instructor_tag.getElementsByTagName("path")
for path_tag in path_tags:
if path_tag.firstChild:
pathstr = path_tag.firstChild.nodeValue
path_uid = 0
try:
path_uid = int(pathstr[0:pathstr.find(';')])
except Exception as e:
pass
if path_uid == uid:
instructor_tag.removeChild(path_tag)
needs_redraw = True
if n == self.__pos and needs_redraw:
self.emit('remove-path', uid)
def removeLocalPathByUID(self, uid, n=None):
if n is None:
n = self.__pos
slide = self.__slides[n]
if self.__is_initiating:
self.emit('instructor_ink_removed', uid, n)
tags = slide.getElementsByTagName("instructor")
else:
tags = slide.getElementsByTagName("self")
if len(tags) > 0:
tag = tags[0]
else:
return
path_tags = tag.getElementsByTagName("path")
for path_tag in path_tags:
if path_tag.firstChild:
pathstr = path_tag.firstChild.nodeValue
path_uid = 0
try:
path_uid = int(pathstr[0:pathstr.find(';')])
except Exception as e:
pass
if path_uid == uid:
tag.removeChild(path_tag)
def doSubmit(self):
inks, text, whofrom = self.getSerializedInkSubmission()
self.__logger.debug("Submitting ink: " + str(inks) + " text: " + text)
self.emit('ink-submitted', inks, text)
def doBroadcast(self):
inks, text, whofrom = self.getSerializedInkSubmission()
self.emit('ink-broadcast', whofrom, inks, text)
def getSerializedInkSubmission(self):
sub = ""
text = ""
if self.__active_sub == -1:
self_tags = self.__slide.getElementsByTagName("self")
if len(self_tags) > 0:
texts = self_tags[0].getElementsByTagName("text")
if len(texts) > 0:
if texts[0].firstChild:
text = texts[0].firstChild.nodeValue
for path in self_tags[0].getElementsByTagName("path"):
sub = sub + path.firstChild.nodeValue + "$"
return sub, text, "myself"
else:
sub = ""
whofrom = "unknown"
subtags = self.__slide.getElementsByTagName("submission")
if self.__active_sub > -1 and self.__active_sub < len(subtags):
active_subtag = subtags[self.__active_sub]
text = ""
whofrom = active_subtag.getAttribute("from")
texts = active_subtag.getElementsByTagName("text")
if len(texts) > 0:
if texts[0].firstChild:
text = texts[0].firstChild.nodeValue
pathlist = []
paths = active_subtag.getElementsByTagName("path")
for path in paths:
if path.firstChild:
sub = sub + path.firstChild.nodeValue + "$"
return sub, text, whofrom
def getSlideThumb(self, n=-1):
"""Returns the full path to the thumbnail for this slide if it is defined; otherwise False"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
print slide.toprettyxml()
thumbs = slide.getElementsByTagName("thumb")
if len(thumbs) < 1:
return False
return os.path.join(self.__base, thumbs[0].firstChild.nodeValue)
def setSlideThumb(self, filename, n=-1):
"""Sets the thumbnail for this slide to filename (provide a *relative* path!)"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
thumbs = slide.getElementsByTagName("thumb")
for t in thumbs:
slide.removeChild(t)
thumb = self.__dom.createElement("thumb")
thumb.appendChild(self.__dom.createTextNode(filename))
slide.appendChild(thumb)
def getSlideClip(self, n=-1):
"""Returns the full path to the audio clip for this slide if it is defined; otherwise False"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
clip = slide.getElementsByTagName("clip")
if len(clip) < 1:
return False
return os.path.join(self.__base, clip[0].firstChild.nodeValue)
def setSlideClip(self, filename, n=-1):
"""Sets the clip for this slide to filename (provide a *relative* path!)"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
clips = slide.getElementsByTagName("clip")
for clip in clips:
slide.removeChild(clip)
clip = self.__dom.createElement("clip")
clip.appendChild(self.__dom.createTextNode(filename))
slide.appendChild(clip)
def setSlideText(self, textval):
self.__self_text = textval
if self.__text_tag:
if self.__text_tag.firstChild:
self.__text_tag.firstChild.nodeValue = textval
else:
self.__text_tag.appendChild(self.__dom.createTextNode(textval))
def doNewIndex(self):
"""Updates any necessary state associated with moving to a new slide"""
self.__slide = self.__slides[self.__pos]
# set slide title - entry text from xml
self.set_SlideTitle(self.__slide.getAttribute('title'))
self_ink = self.__slide.getElementsByTagName("self")
self.__instructor_tag = None
self.__self_ink_tag = None
self.__instructor_ink = []
self.__self_ink = []
self.__self_text = ""
self.__text_tag = None
self.__active_sub = -1
if len(self_ink) > 0:
self.__self_ink_tag = self_ink[0]
texttags = self.__self_ink_tag.getElementsByTagName("text")
if len(texttags) > 0:
self.__text_tag = texttags[0]
else:
self.__text_tag = self.__dom.createElement(text)
self.__text_tag.appendChild(self.__dom.createTextNode(""))
self.__self_ink_tag.appendChild(text)
pathtags = self.__self_ink_tag.getElementsByTagName("path")
for pathstr in pathtags:
self.__self_ink.append(pathstr.firstChild.nodeValue)
else:
self.__self_ink_tag = self.__dom.createElement("self")
self.__slide.appendChild(self.__self_ink_tag)
self.__text_tag = self.__dom.createElement("text")
self.__text_tag.appendChild(self.__dom.createTextNode(""))
self.__self_ink_tag.appendChild(self.__text_tag)
if self.__text_tag.firstChild:
self.__self_text = self.__text_tag.firstChild.nodeValue
self.__activity.set_screen(0)
self.emit("slide-changed")
self.emit("update-submissions", self.__active_sub)
self.emit("slide-redraw")
def goToIndex(self, index, is_local):
"""Jumps to the slide at the given index, if it's valid"""
self.__logger.debug(
"Trying to change slides: locked? %u, instructor? %u, is_local? %u",
self.__nav_locked,
self.__is_initiating,
is_local)
in_range = index < self.__nslides and index >= 0
if (self.__is_initiating or not is_local or not self.__nav_locked) and in_range:
self.__logger.debug("Changing slide to index: %u", index)
self.__pos = index
self.doNewIndex()
else:
self.__pos = index
print 'invalid index', index
def getIndex(self):
"""Returns the index of the current slide"""
return self.__pos
def next(self):
"""Moves to the next slide"""
self.goToIndex(self.__pos + 1, is_local=True)
def previous(self):
"""Moves to the previous slide"""
self.goToIndex(self.__pos - 1, is_local=True)
def isAtBeginning(self):
"""Returns true if show is on the first slide in the deck"""
if self.__nslides < 1:
return True
if self.__pos == 0:
return True
else:
return False
def isAtEnd(self):
"""Returns true if the show is at the last slide in the deck"""
if self.__nslides < 1:
return True
if self.__pos == self.__nslides - 1:
return True
else:
return False
def getSlideDimensionsFromXML(self, n=-1):
"""Returns the dimensions for the slide at index n, if they're specified"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
wstring = slide.getAttribute("width")
hstring = slide.getAttribute("height")
if wstring != '' and hstring != '':
return [float(wstring), float(hstring)]
return False
def getSlideCount(self):
return self.__nslides
GObject.type_register(Deck)
| gpl-2.0 | -5,962,084,091,396,877,000 | 37.728097 | 117 | 0.562056 | false |
lornemcintosh/OptAnim | optanim/animation.py | 1 | 21918 | # -------------------------------------------------------------------------
# Copyright (c) 2010-2012 Lorne McIntosh
#
# This file is part of OptAnim.
#
# OptAnim is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OptAnim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OptAnim. If not, see <http://www.gnu.org/licenses/>.
# -------------------------------------------------------------------------
'''
OptAnim, animation module
'''
from __future__ import division
import math
import copy
import itertools
import re
import time
import logging
import numpy
import cma
from exporters import *
from joints import *
from specifier import *
from threadpool import *
from utils import *
LOG = logging.getLogger(__name__)
pool = ThreadPool()
class ParameterSpace(object):
"""
A utility class that makes it easy to specify and generate large sets of
character animations using specifiers in a parameterized, combinatorial way
Examples
--------
Create a new ParameterSpace:
>>> space = ParameterSpace(Name='space')
Add 1 dimension to space, with 1 set of 1 specifier
>>> space.add_dimension( [[ a ]] )
Result: 1 animation: (a)
Add 1 dimension to space, with 1 set of 2 specifiers
>>> space.add_dimension( [[ a, b ]] )
Result: 1 animation: (a,b)
Add 1 dimension to space, with 2 sets of specifiers
>>> space.add_dimension( [[ a, b ], [ c ]] )
Result: 2 animations: (a,b) (c)
Add 2 dimensions to space (2 sets, 1 set)
>>> space.add_dimension( [[ a, b ], [ c ]] )
>>> space.add_dimension( [[ d ]] )
Result: 2 animations (2x1): (a,b,d) (c,d)
Add 2 dimensions to space (2 sets, 2 sets)
>>> space.add_dimension( [[ a, b ], [ c ]] )
>>> space.add_dimension( [[ d ], [ e ]] )
Result: 4 animations (2x2): (a,b,d) (a,b,e) (c,d) (c,e)
Add 3 dimensions to space (2 sets, 2 sets, 2 sets)
>>> space.add_dimension( [[ a, b ], [ c ]] )
>>> space.add_dimension( [[ d ], [ e ]] )
>>> space.add_dimension( [[ f ], [ g ]] )
Result: 8 animations (2x2x2): (a,b,d,f) (a,b,d,g) (a,b,e,f) (a,b,e,g)
(c,d,f) (c,d,g) (c,e,f) (c,e,g)
"""
def __init__(self, Name, Length=None, FPS=25):
'''Constructor'''
self.Name = Name
self.Length = Length
self.FPS = FPS
self.ContactTimesDict = None
self.DimensionList = []
self.CharacterList = []
self.AnimationList = []
def set_length(self, length):
self.Length = float(length)
def set_contact_times(self, dict):
self.ContactTimesDict = dict
def get_frame_length(self):
return float(1.0 / self.FPS)
def get_frame_count(self):
return int(round(self.Length * self.FPS))
def add_character(self, character):
self.CharacterList.append(character)
def add_dimension(self, dim):
'''Adds a dimension to the ParameterSpace'''
self.DimensionList.append(dim)
def get_num_combinations(self):
ret = len(self.CharacterList)
for dim in self.DimensionList:
ret *= max(len(dim), 1)
return ret
def get_animations_with_tag(self, tag):
return [anim for anim in self.AnimationList if anim.has_tag(tag)]
def generate(self, solver='ipopt'):
#print a helpful message about the number of combinations generated
LOG.info("Generating %s (%i combinations)" % (self.Name, self.get_num_combinations()))
#make an anim for each combination of characters/specifiers
for character in self.CharacterList:
for index, comb in enumerate(itertools.product(*self.DimensionList)):
#build out constraint and objective lists
paramList = list(itertools.chain.from_iterable(comb))
animSpecifierList = character.SpecifierList + paramList
#create an animation instance
animName = character.Name + "_" + self.Name + "_" + str(index)
anim = Animation(animName, self.Length, self.FPS, character,
animSpecifierList, self.ContactTimesDict);
self.AnimationList.append(anim)
anim.optimize(solver) #non-blocking
def wait_for_results(self):
'''Polls the animations and returns when they're all done'''
alldone = False
while(alldone is False):
alldone = True
for anim in self.AnimationList:
if anim.Done is False:
alldone = False
time.sleep(1)
break
def export(self, outdir):
'''Exports all the animations that solved'''
self.wait_for_results()
for anim in self.AnimationList:
if anim.Solved:
anim.export(outdir)
class Animation(object):
'''Represents a specific character animation. The character and constraints
etc. are set in stone. If solved, it also stores the optimization results (the
animation data)'''
def __init__(self, Name, Length, FPS, Character, SpecifierList, ContactTimes):
'''Constructor'''
self.Name = Name
self.Length = Length
self.FPS = FPS
self.Character = Character
self.SpecifierList = SpecifierList
self.ContactTimesDict = ContactTimes
self.Done = False
self.Solved = False
self.ObjectiveValue = numpy.NaN
self.AnimationData = {}
self.CachedConstraintList = []
self.CachedObjectiveList = []
def __str__(self):
return self.Name + " (Length=" + str(self.Length) + ", FPS=" + str(self.FPS) + ", frame_count=" + str(self.get_frame_count()) + ")"
def get_frame_length(self):
return float(1.0 / self.FPS)
def get_frame_count(self):
return int(round(self.Length * self.FPS))
def get_contact_frames(self, joint):
try:
footsteps = self.ContactTimesDict[joint]
contactSet = set()
for step in footsteps:
startTime, intervalTime = [x * self.Length for x in step] #convert from fraction of length to real seconds
#TODO: goofy 'double rounding' here is to avoid small floating-point errors; use decimal package instead?
intervalFrames = int(round(round(intervalTime * self.FPS, 1)))
startFrame = int(round(round(startTime * self.FPS, 1)))
endFrame = startFrame + intervalFrames
contactSet = contactSet | set([x % self.get_frame_count() for x in range(startFrame, endFrame)]) #loop
return contactSet
except KeyError:
raise BaseException('Character "%s" has a contact joint "%s". You must specify timings for %s.' % (self.Character.Name, joint.Name, joint.Name))
def has_tag(self, tag):
'''Returns True if this animation has the specified tag; False otherwise'''
return tag in self.SpecifierList
def get_frame_slice(self, firstFrame, lastFrame):
'''Returns a new Animation containing just the frames between firstFrame and lastFrame'''
#clamp frames
firstFrame = max(0, firstFrame)
lastFrame = min(self.get_frame_count(), lastFrame)
newName = self.Name + "_" + str(firstFrame) + "to" + str(lastFrame)
newLength = (lastFrame-firstFrame+1)*self.get_frame_length()
ret = Animation(newName, newLength, self.FPS, self.Character, None, None)
#setup animation data
ret.AnimationData = {}
for body in ret.Character.BodyList:
ret.AnimationData[str(body.Name)] = [None] * ret.get_frame_count()
#copy slice of animation data from original
for k, v in ret.AnimationData.items():
ret.AnimationData[k] = self.AnimationData[k][firstFrame:lastFrame + 1]
ret.Done = True
ret.Solved = True
return ret
def animdata_resample(self, fps):
'''Returns a new Animation, resampled at the specified fps'''
ret = copy.deepcopy(self) #TODO: get rid of this deepcopy (too memory hungry)
ret.FPS = fps
frameCount = ret.get_frame_count()
#clear existing animation data
ret.AnimationData = {}
for body in ret.Character.BodyList:
ret.AnimationData[str(body.Name)] = [None] * frameCount
#do the resampling
for frame in range(frameCount):
time = frame / (frameCount-1)
interpData = self.animdata_get_interpolated(time, self.Character.DefaultRoot)
for body in ret.Character.BodyList:
ret.AnimationData[str(body.Name)][frame] = interpData[body.Name][0]
return ret
def animdata_get_interpolated(self, time, root):
'''Returns the interpolated state at time, where time is 0 to 1'''
assert(0.0 <= time <= 1.0)
nframes = (len(self.AnimationData.items()[0][1])-1)
frameA = int(math.floor(time * nframes))
frameB = int(math.ceil(time * nframes))
if frameA == frameB:
ret = {}
for k, v in self.AnimationData.items():
ret[k] = self.AnimationData[k][frameA:frameB + 1]
return ret
else:
timeA = frameA / nframes
timeB = frameB / nframes
timeAB = (time - timeA) / (timeB - timeA)
a,b = {},{}
for k, v in self.AnimationData.items():
a[k] = self.AnimationData[k][frameA:frameA + 1]
b[k] = self.AnimationData[k][frameB:frameB + 1]
ret = frame_interpolate(self.Character, root, a, b, timeAB)
return ret
def blend(self, other, weight, root=None, fps=25):
'''Returns a new Animation, the result of blending between self and
other at the specified weight, using root as the root point (body), and
sampled at the specified fps'''
if root is None:
root = self.Character.DefaultRoot
LOG.info("Blending " + str(self) + " and " + str(other) + ". Weight = " + str(weight) + ". Root = " + str(root.Name))
#calculate length (in seconds) of new animation clip:
#(formula from Safonova & Hodgins / Analyzing the Physical Correctness of Interpolated Human Motion)
length = math.sqrt(math.pow(self.Length,2)*weight + math.pow(other.Length,2)*(1-weight))
ret = Animation(str(self.Name) + "_and_" + str(other.Name), length, fps, self.Character, None, None)
frameCount = ret.get_frame_count()
for body in ret.Character.BodyList:
ret.AnimationData[str(body.Name)] = [None] * frameCount
for frame in range(frameCount):
frameTime = frame / (frameCount-1)
a = self.animdata_get_interpolated(frameTime, root)
b = other.animdata_get_interpolated(frameTime, root)
tmp = frame_interpolate(self.Character, root, a, b, weight)
for body in ret.Character.BodyList:
ret.AnimationData[str(body.Name)][frame] = tmp[str(body.Name)][0]
ret.Done = True
ret.Solved = True
return ret
def _write_header(self):
ret = ''
ret += 'param pH = %f;\n' % self.get_frame_length()
ret += 'param pTimeBegin = 0;\n'
ret += 'param pTimeEnd = %i;\n' % (self.get_frame_count()-1)
ret += 'set sTimeSteps := pTimeBegin .. pTimeEnd;\n'
#write joint timing sets
for j in self.Character.get_joints_contact():
contactSet = self.get_contact_frames(j)
contactStr = '{' + (', '.join(map(str, contactSet))) + '}'
ret += 'set sTimeSteps_%sOn := %s;\n' % (j.Name, contactStr)
ret += 'set sTimeSteps_%sOff := sTimeSteps diff sTimeSteps_%sOn;\n' % (j.Name, j.Name)
ret += '\n'
ret += '\n'
return ret
def _write_specifiers(self):
ret = ''
#write constraints
for eq in self.CachedConstraintList:
ret += str(eq)
#write weighted objectives
if self.CachedObjectiveList:
ret += 'minimize objective: (\n'
for i, obj in enumerate(self.CachedObjectiveList):
ret += str(obj)
if(i == len(self.CachedObjectiveList)-1):
ret += ') / (pTimeEnd+1);\n' #we divide by time so animations of different lengths can be compared fairly
else:
ret += ' +\n'
return ret
def _write_footer(self, solver):
ret = ''
ret = 'option reset_initial_guesses 1;\n'
#ret += 'option show_stats 1;\n'
ret += 'option solver ' + solver + ';\n'
ret += 'option ipopt_options \'max_iter=10000 print_level=0\';\n' #TODO: what about other solvers? max_cpu_time=1200
ret += 'option snopt_options \'meminc=10000000\';\n' #TODO: what about other solvers?
ret += 'solve;\n'
ret += '\n'
ret += 'display solve_result;\n'
if self.CachedObjectiveList:
ret += 'display objective;\n'
#for interest we can output the values of individual objectives in the solution
for i, obj in enumerate(self.CachedObjectiveList):
ret += obj.write_debug_str()
ret += 'if solve_result = "solved" then {\n'
for frame in range(0, self.get_frame_count()):
for body in self.Character.BodyList:
varstr = ', '.join([str(body.q[x]) + '[' + str(frame) + ']' for x in range(0, dof)])
fstr = ', '.join(['%f'] * dof)
ret += '\tprintf "' + str(body.Name) + '[' + str(frame) + '] = ' + fstr + '\\n", ' + varstr + ';\n'
ret += '}\n'
ret += 'if solve_result = "solved" then{ display {j in 1.._nvars} (_varname[j],_var[j]); }\n'
ret += 'exit;\n'
return ret
def _solvedcallback(self, amplresult):
#cache solution to a file
file = open(self.Name + '.amplsol', 'w')
file.write(amplresult)
file.close()
#did it solve correctly?
self.Solved = ("solve_result = solved" in amplresult)
if self.Solved:
if self.CachedObjectiveList:
objectivematch = re.search("(?<=objective = )" + regex_float, amplresult)
self.ObjectiveValue = float(objectivematch.group(0))
#read the solution variables into a dict {indexed on body name}[frame][dof]
self.AnimationData = {}
for body in self.Character.BodyList:
self.AnimationData[str(body.Name)] = [None] * self.get_frame_count()
for frame in range(0, self.get_frame_count()):
regex_float_str = ', '.join([regex_float] * dof)
pattern = str(body.Name) + "\[" + str(frame) + "\] = " + regex_float_str
match = re.findall(pattern, amplresult)[0]
q = [float(match[x * 2]) for x in range(dof)]
self.AnimationData[str(body.Name)][frame] = q
#if looped, append an extra frame (identical to first frame, but offset)
for s in self.SpecifierList:
if isinstance(s, SpecifierPluginLoop):
for frame in range(0, 2): #duplicate first 2 frames
for b in self.Character.BodyList:
q = self.AnimationData[str(b.Name)][frame]
q = s.get_offset(q, 1) #apply offset
q = map(float, q)
self.AnimationData[str(b.Name)].append(q) #append extra frame
LOG.info('%s solved! (Objective = %f)' % (self.Name, self.ObjectiveValue))
self.export('.') #export immediately so we can see the results
else:
LOG.info('%s failed!' % self.Name)
self.Done = True #this must come last to avoid a thread sync issue
def export(self, outdir):
if self.Solved is False:
raise BaseException('Animation is not solved. Cannot export!')
'''filename = outdir + "\\" + self.Name + '.bvh'
LOG.info('Writing %s,' % filename),
file = openfile(filename, 'w')
file.write(export_bvh(self))
file.close()'''
filename = outdir + "\\" + self.Name + '.flat.bvh'
LOG.info('Writing %s,' % filename),
file = openfile(filename, 'w')
file.write(export_bvh_flat(self))
file.close()
filename = outdir + "\\" + self.Name + '.skeleton.xml'
LOG.info('Writing %s' % filename)
xmltree = ogre3d_export_animation(self)
xmltree.write(filename)
def _solve(self, solver, writeAMPL=False):
'''This handles the 'inner' (spacetime) optimization. It assumes that
length and contact timings are set. Use optimize() instead.'''
#reset the solution
self.Done = False
self.Solved = False
self.ObjectiveValue = numpy.NaN
self.AnimationData = {}
self.CachedConstraintList = []
self.CachedObjectiveList = []
#split specifiers into constraints and objectives for easier processing
for s in self.SpecifierList:
#regular constraints/objectives
if isinstance(s, Constraint):
self.CachedConstraintList.append(s)
elif isinstance(s, Objective):
self.CachedObjectiveList.append(s)
#plugins
elif isinstance(s, SpecifierPlugin):
for c in s.get_specifiers(self, self.Character):
if isinstance(c, Constraint):
self.CachedConstraintList.append(c)
elif isinstance(c, Objective):
self.CachedObjectiveList.append(c)
#generate the ampl model
amplcmd = ''
amplcmd += self._write_header()
amplcmd += self.Character.get_model() #character body & physical eq.
amplcmd += self._write_specifiers() #other constraints & objectives
amplcmd += self._write_footer(solver)
#for debugging purposes we'll write out the ampl file
if writeAMPL:
file = open(self.Name + '.ampl', 'w')
file.write(amplcmd)
file.close()
try:
#try to load cached solution
file = open(self.Name + '.amplsol', 'r')
amplresult = file.read();
file.close()
#pretend it solved, and use the callback
self._solvedcallback(amplresult)
except IOError:
#couldn't load cached solution file, we'll have to solve it with ampl
#use the thread pool for this
pool.add_job(amplsolve, args=[amplcmd], return_callback=self._solvedcallback)
def optimize(self, solver):
'''This handles the 'outer' optimization that's necessary to determine
animation length and contact timings (if they are not explicitly provided).'''
optLength = self.Length is None
optContacts = self.ContactTimesDict is None and len(self.Character.get_joints_contact()) > 0
if optLength or optContacts:
LOG.info("Starting CMA-ES optimization for %s..." % self.Name)
startPoint = []
lowerBounds = []
upperBounds = []
if optLength:
startPoint.append(0.5)
lowerBounds.append(self.get_frame_length() * 3.0) #3 frame minimum
upperBounds.append(1.0)
if optContacts:
f = 1.0 / len(self.Character.get_joints_contact())
for j, joint in enumerate(self.Character.get_joints_contact()):
evenly = (j * f) + (f / 2.0) #space the contacts evenly
startPoint.extend([evenly, 0.5])
lowerBounds.extend([0.0, 0.0])
upperBounds.extend([1.0, 1.0])
#optimize anim length and contact timings with CMA-ES
es = cma.CMAEvolutionStrategy(startPoint, 1.0 / 3.0,
{'maxiter':100, 'bounds':[lowerBounds, upperBounds]})
# iterate until termination
while not es.stop:
X = []
fit = []
for i in range(es.popsize):
curr_fit = numpy.NaN
while curr_fit is numpy.NaN:
x = es.ask(1)[0]
if optLength:
self.Length = x[0] * 3 #TODO: handle scaling better
if optContacts:
m = 1 if optLength else 0
self.ContactTimesDict = {}
for j, joint in enumerate(self.Character.get_joints_contact()):
self.ContactTimesDict[joint] = [(x[j * 2 + 0 + m], x[j * 2 + 1 + m])]
curr_fit = self._solve(solver) #might return numpy.NaN
fit.append(curr_fit)
X.append(x)
print '.',
es.tell(X, fit)
print ''
es.printline(1)
print 'termination: ', es.stopdict
print(es.best[0])
#TODO: Because we don't bother saving the animation data, we have to
#solve the best one (again) to get it. This code is just a re-run
#from above, except it solves the best one found
if optLength:
self.Length = es.best[0][0] * 3 #TODO: handle scaling better
if optContacts:
m = 1 if optLength else 0
self.ContactTimesDict = {}
for j, joint in enumerate(self.Character.get_joints_contact()):
self.ContactTimesDict[joint] = [(es.best[0][j * 2 + 0 + m], es.best[0][j * 2 + 1 + m])]
return self._solve(solver, writeAMPL=True)
else:
LOG.info("CMA-ES optimization unnecessary for %s. Solving..." % self.Name)
return self._solve(solver, writeAMPL=True)
def frame_interpolate(character, root, frameDataA, frameDataB, weight):
'''Given a character, a root body, two frames of animation data, and a
weight, this returns an interpolated frame of animation data'''
assert(0.0 <= weight <= 1.0)
#setup new animation data structure
ret = {}
for body in character.BodyList:
ret[str(body.Name)] = [None]
#traverse character, starting at root
for parent,child,joint in character.traverse_bfs(root):
if parent is None:
#special case: this is the root body
#just do a straight-forward lerp for position and rotation
dataA = frameDataA[str(child.Name)][0]
dataB = frameDataB[str(child.Name)][0]
lerpData = num_q_lerp(dataA, dataB, weight)
ret[str(child.Name)] = [lerpData]
else:
#regular case: child rotation must be handled relative to parent
#frameA
parentDataA, childDataA = frameDataA[str(parent.Name)][0], frameDataA[str(child.Name)][0]
assert(True not in [(math.isnan(x) or math.isinf(x)) for x in parentDataA+childDataA])
parentEulerA, childEulerA = parentDataA[3:dof], childDataA[3:dof]
parentQuatA, childQuatA = num_euler_to_quat(parentEulerA), num_euler_to_quat(childEulerA)
#express child relative to parent
relativeQuatA = parentQuatA.inverse() * childQuatA
#frameB
parentDataB, childDataB = frameDataB[str(parent.Name)][0], frameDataB[str(child.Name)][0]
assert(True not in [(math.isnan(x) or math.isinf(x)) for x in parentDataB+childDataB])
parentEulerB, childEulerB = parentDataB[3:dof], childDataB[3:dof]
parentQuatB, childQuatB = num_euler_to_quat(parentEulerB), num_euler_to_quat(childEulerB)
#express child relative to parent
relativeQuatB = parentQuatB.inverse() * childQuatB
#do the interpolation
relativeQuatA,relativeQuatB = relativeQuatA.normalize(), relativeQuatB.normalize()
newChildQuat = slerp(weight, relativeQuatA, relativeQuatB)
#undo relative transform
newParentData = ret[str(parent.Name)][0]
newParentEuler = newParentData[3:dof]
newParentQuat = num_euler_to_quat(newParentEuler)
newChildQuat = newParentQuat * newChildQuat
newChildEuler = num_quat_to_euler(newChildQuat)
#now calculate the position
pjp, cjp = [], []
if joint.BodyA is parent and joint.BodyB is child:
pjp, cjp = joint.PointA, joint.PointB
elif joint.BodyA is child and joint.BodyB is parent:
pjp, cjp = joint.PointB, joint.PointA
else:
raise BaseException("Output from character.traverse_bfs() makes no sense")
jointPosWorld = num_world_xf(pjp, newParentData)
jointPosWorld = map(float, jointPosWorld)
newChildPos = cgtypes.vec3(jointPosWorld) - newChildQuat.rotateVec(cgtypes.vec3(cjp))
ret[str(child.Name)] = [[newChildPos.x, newChildPos.y, newChildPos.z] + newChildEuler]
return ret | gpl-3.0 | -2,979,587,800,556,766,000 | 33.627172 | 147 | 0.67424 | false |
toogad/PooPyLab_Project | PooPyLab/ASMModel/asm_2d.py | 1 | 26614 | # This file is part of PooPyLab.
#
# PooPyLab is a simulation software for biological wastewater treatment processes using International Water Association
# Activated Sludge Models.
#
# Copyright (C) Kai Zhang
#
# PooPyLab is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with PooPyLab. If not, see
# <http://www.gnu.org/licenses/>.
#
#
# This is the definition of the ASM1 model to be imported as part of the Reactor object
#
#
"""Definition of the IWA Activated Sludge Model #1.
Reference:
Grady Jr. et al, 1999: Biological Wastewater Treatment, 2nd Ed.
IWA Task Group on Math. Modelling for Design and Operation of Biological
Wastewater Treatment, 2000. Activated Sludge Model No. 1, in Activated
Sludge Models ASM1, ASM2, ASM2d, and ASM 3.
"""
## @namespace asm_2d
## @file asm_2d.py
from ..ASMModel import constants
from .asmbase import asm_model
class ASM_1(asm_model):
"""
Kinetics and stoichiometrics of the IWA ASM 2d model.
"""
__id = 0
def __init__(self, ww_temp=20, DO=2):
"""
Initialize the model with water temperature and dissolved O2.
Args:
ww_temp: wastewater temperature, degC;
DO: dissolved oxygen, mg/L
Return:
None
See:
_set_ideal_kinetics_20C();
_set_params();
_set_stoichs().
"""
asm_model.__init__(self)
self.__class__.__id += 1
self._set_ideal_kinetics_20C_to_defaults()
# wastewater temperature used in the model, degC
self._temperature = ww_temp
# mixed liquor bulk dissolved oxygen, mg/L
self._bulk_DO = DO
# temperature difference b/t what's used and baseline (20C), degC
self._delta_t = self._temperature - 20
self.update(ww_temp, DO)
# The Components the ASM components IN THE REACTOR
# For ASM #1:
#
# self._comps[0]: S_DO as COD
# self._comps[1]: S_I
# self._comps[2]: S_S
# self._comps[3]: S_NH
# self._comps[4]: S_NS
# self._comps[5]: S_NO
# self._comps[6]: S_ALK
# self._comps[7]: X_I
# self._comps[8]: X_S
# self._comps[9]: X_BH
# self._comps[10]: X_BA
# self._comps[11]: X_D
# self._comps[12]: X_NS
#
# ASM model components
self._comps = [0.0] * constants._NUM_ASM1_COMPONENTS
return None
def _set_ideal_kinetics_20C_to_defaults(self):
"""
Set the kinetic params/consts @ 20C to default ideal values.
See:
update();
_set_params();
_set_stoichs().
"""
# Ideal Growth Rate of Heterotrophs (u_max_H, 1/DAY)
self._kinetics_20C['u_max_H'] = 6.0
# Decay Rate of Heterotrophs (b_H, 1/DAY)
self._kinetics_20C['b_LH'] = 0.62
# Ideal Growth Rate of Autotrophs (u_max_A, 1/DAY)
self._kinetics_20C['u_max_A'] = 0.8
# Decay Rate of Autotrophs (b_A, 1/DAY)
# A wide range exists. Table 6.3 on Grady 1999 shows 0.096 (1/d). IWA's
# ASM report did not even show b_A on its table for typical value. ASIM
# software show a value of "0.000", probably cut off by the print
# function. I can only assume it was < 0.0005 (1/d) at 20C.
#self._kinetics_20C['b_LA'] = 0.096
self._kinetics_20C['b_LA'] = 0.0007
# Half Growth Rate Concentration of Heterotrophs (K_s, mgCOD/L)
self._kinetics_20C['K_S'] = 20.0
# Switch Coefficient for Dissolved O2 of Hetero. (K_OH, mgO2/L)
self._kinetics_20C['K_OH'] = 0.2
# Association Conc. for Dissolved O2 of Auto. (K_OA, mgN/L)
self._kinetics_20C['K_OA'] = 0.4
# Association Conc. for NH3-N of Auto. (K_NH, mgN/L)
self._kinetics_20C['K_NH'] = 1.0
# Association Conc. for NOx of Hetero. (K_NO, mgN/L)
self._kinetics_20C['K_NO'] = 0.5
# Hydrolysis Rate (k_h, mgCOD/mgBiomassCOD-day)
self._kinetics_20C['k_h'] = 3.0
# Half Rate Conc. for Hetero. Growth on Part. COD
# (K_X, mgCOD/mgBiomassCOD)
self._kinetics_20C['K_X'] = 0.03
# Ammonification of Org-N in biomass (k_a, L/mgBiomassCOD-day)
self._kinetics_20C['k_a'] = 0.08
# Yield of Hetero. Growth on COD (Y_H, mgBiomassCOD/mgCODremoved)
self._kinetics_20C['Y_H'] = 0.67
# Yield of Auto. Growth on TKN (Y_A, mgBiomassCOD/mgTKNoxidized)
self._kinetics_20C['Y_A'] = 0.24
# Fract. of Debris in Lysed Biomass(f_D, gDebrisCOD/gBiomassCOD)
self._kinetics_20C['f_D'] = 0.08
# Correction Factor for Hydrolysis (cf_h, unitless)
self._kinetics_20C['cf_h'] = 0.4
# Correction Factor for Anoxic Heterotrophic Growth (cf_g, unitless)
self._kinetics_20C['cf_g'] = 0.8
# Ratio of N in Active Biomass (i_N_XB, mgN/mgActiveBiomassCOD)
self._kinetics_20C['i_N_XB'] = 0.086
# Ratio of N in Debris Biomass (i_N_XD, mgN/mgDebrisBiomassCOD)
self._kinetics_20C['i_N_XD'] = 0.06
return None
def _set_params(self):
"""
Set the kinetic parameters/constants @ project temperature.
This function updates the self._params based on the model temperature
and DO.
See:
update();
_set_ideal_kinetics_20C();
_set_stoichs().
"""
# Ideal Growth Rate of Heterotrophs (u_max_H, 1/DAY)
self._params['u_max_H'] = self._kinetics_20C['u_max_H']\
* pow(1.072, self._delta_t)
# Decay Rate of Heterotrophs (b_H, 1/DAY)
self._params['b_LH'] = self._kinetics_20C['b_LH']\
* pow(1.12, self._delta_t)
# Ideal Growth Rate of Autotrophs (u_max_A, 1/DAY)
self._params['u_max_A'] = self._kinetics_20C['u_max_A']\
* pow(1.103, self._delta_t)
# Decay Rate of Autotrophs (b_A, 1/DAY)
self._params['b_LA'] = self._kinetics_20C['b_LA']\
* pow(1.114, self._delta_t)
# Half Growth Rate Concentration of Heterotrophs (K_s, mgCOD/L)
self._params['K_S'] = self._kinetics_20C['K_S']
# Switch Coefficient for Dissolved O2 of Hetero. (K_OH, mgO2/L)
self._params['K_OH'] = self._kinetics_20C['K_OH']
# Association Conc. for Dissolved O2 of Auto. (K_OA, mgN/L)
self._params['K_OA'] = self._kinetics_20C['K_OA']
# Association Conc. for NH3-N of Auto. (K_NH, mgN/L)
self._params['K_NH'] = self._kinetics_20C['K_NH']
# Association Conc. for NOx of Hetero. (K_NO, mgN/L)
self._params['K_NO'] = self._kinetics_20C['K_NO']
# Hydrolysis Rate (k_h, mgCOD/mgBiomassCOD-day)
self._params['k_h'] = self._kinetics_20C['k_h']\
* pow(1.116, self._delta_t)
# Half Rate Conc. for Hetero. Growth on Part. COD
# (K_X, mgCOD/mgBiomassCOD)
self._params['K_X'] = self._kinetics_20C['K_X']\
* pow(1.116, self._delta_t)
# Ammonification of Org-N in biomass (k_a, L/mgBiomassCOD-day)
self._params['k_a'] = self._kinetics_20C['k_a']\
* pow(1.072, self._delta_t)
# Yield of Hetero. Growth on COD (Y_H, mgBiomassCOD/mgCODremoved)
self._params['Y_H'] = self._kinetics_20C['Y_H']
# Yield of Auto. Growth on TKN (Y_A, mgBiomassCOD/mgTKNoxidized)
self._params['Y_A'] = self._kinetics_20C['Y_A']
# Fract. of Debris in Lysed Biomass(f_D, gDebrisCOD/gBiomassCOD)
self._params['f_D'] = self._kinetics_20C['f_D']
# Correction Factor for Hydrolysis (cf_h, unitless)
self._params['cf_h'] = self._kinetics_20C['cf_h']
# Correction Factor for Anoxic Heterotrophic Growth (cf_g, unitless)
self._params['cf_g'] = self._kinetics_20C['cf_g']
# Ratio of N in Active Biomass (i_N_XB, mgN/mgActiveBiomassCOD)
self._params['i_N_XB'] = self._kinetics_20C['i_N_XB']
# Ratio of N in Debris Biomass (i_N_XD, mgN/mgDebrisBiomassCOD)
self._params['i_N_XD'] = self._kinetics_20C['i_N_XD']
return None
# STOCHIOMETRIC MATRIX
def _set_stoichs(self):
"""
Set the stoichiometrics for the model.
Note:
Make sure to match the .csv model template file in the
model_builder folder, Sep 04, 2019):
_stoichs['x_y'] ==> x is process rate id, and y is component id
See:
_set_params();
_set_ideal_kinetics_20C();
update().
"""
# S_O for aerobic hetero. growth, as O2
self._stoichs['0_0'] = (self._params['Y_H'] - 1.0) \
/ self._params['Y_H']
# S_O for aerobic auto. growth, as O2
self._stoichs['2_0'] = (self._params['Y_A'] - 4.57) \
/ self._params['Y_A']
# S_S for aerobic hetero. growth, as COD
self._stoichs['0_2'] = -1.0 / self._params['Y_H']
# S_S for anoxic hetero. growth, as COD
self._stoichs['1_2'] = -1.0 / self._params['Y_H']
# S_S for hydrolysis of part. substrate
self._stoichs['6_2'] = 1.0
# S_NH required for aerobic hetero. growth, as N
self._stoichs['0_3'] = -self._params['i_N_XB']
# S_NH required for anoxic hetero. growth, as N
self._stoichs['1_3'] = -self._params['i_N_XB']
# S_NH required for aerobic auto. growth, as N
self._stoichs['2_3'] = -self._params['i_N_XB'] \
- 1.0 / self._params['Y_A']
# S_NH from ammonification, as N
self._stoichs['5_3'] = 1.0
# S_NS used by ammonification, as N
self._stoichs['5_4'] = -1.0
# S_NS from hydrolysis of part.TKN, as N
self._stoichs['7_4'] = 1.0
# S_NO for anoxic hetero. growth, as N
self._stoichs['1_5'] = (self._params['Y_H'] - 1.0) \
/ (2.86 * self._params['Y_H'])
# S_NO from nitrification, as N
self._stoichs['2_5'] = 1.0 / self._params['Y_A']
# S_ALK consumed by aerobic hetero. growth, as mM CaCO3
self._stoichs['0_6'] = -self._params['i_N_XB'] / 14.0
# S_ALK generated by anoxic hetero. growth, as mM CaCO3
self._stoichs['1_6'] = (1.0 - self._params['Y_H']) \
/ (14.0 * 2.86 * self._params['Y_H']) \
- self._params['i_N_XB'] / 14.0
# S_ALK consumed by aerobic auto. growth, as mM CaCO3
self._stoichs['2_6'] = -self._params['i_N_XB'] / 14 \
- 1.0 / (7.0 * self._params['Y_A'])
# S_ALK generated by ammonification, as mM CaCO3
self._stoichs['5_6'] = 1.0 / 14.0
# X_S from hetero. decay, as COD
self._stoichs['3_8'] = 1.0 - self._params['f_D']
# X_S from auto. decay, as COD
self._stoichs['4_8'] = 1.0 - self._params['f_D']
# X_S consumed by hydrolysis of biomass
self._stoichs['6_8'] = -1.0
# X_BH from aerobic hetero. growth, as COD
self._stoichs['0_9'] = 1.0
# X_BH from anoxic hetero. growth, as COD
self._stoichs['1_9'] = 1.0
# X_BH lost in hetero. decay, as COD
self._stoichs['3_9'] = -1.0
# X_BA from aerobic auto. growth, as COD
self._stoichs['2_10'] = 1.0
# X_BA lost in auto. decay, as COD
self._stoichs['4_10'] = -1.0
# X_D from hetero. decay, as COD
self._stoichs['3_11'] = self._params['f_D']
# X_D from auto. decay, as COD
self._stoichs['4_11'] = self._params['f_D']
# X_NS from hetero. decay, as N
self._stoichs['3_12'] = self._params['i_N_XB'] - self._params['f_D'] \
* self._params['i_N_XD']
# X_NS from auto. decay, as COD
self._stoichs['4_12'] = self._params['i_N_XB'] - self._params['f_D'] \
* self._params['i_N_XD']
# X_NS consumed in hydrolysis of part. TKN, as N
self._stoichs['7_12'] = -1.0
return None
# PROCESS RATE DEFINITIONS (Rj, M/L^3/T):
#
def _r0_AerGH(self, comps):
"""
Aerobic Growth Rate of Heterotrophs (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['u_max_H'] \
* self._monod(comps[2], self._params['K_S']) \
* self._monod(comps[0], self._params['K_OH']) \
* comps[9]
def _r1_AxGH(self, comps):
"""
Anoxic Growth Rate of Heterotrophs (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['u_max_H'] \
* self._monod(comps[2], self._params['K_S']) \
* self._monod(self._params['K_OH'], comps[0]) \
* self._monod(comps[5], self._params['K_NO']) \
* self._params['cf_g'] \
* comps[9]
def _r2_AerGA(self, comps):
"""
Aerobic Growth Rate of Autotrophs (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['u_max_A'] \
* self._monod(comps[3], self._params['K_NH']) \
* self._monod(comps[0], self._params['K_OA']) \
* comps[10]
def _r3_DLH(self, comps):
"""
Death and Lysis Rate of Heterotrophs (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['b_LH'] * comps[9]
def _r4_DLA(self, comps):
"""
Death and Lysis Rate of Autotrophs (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['b_LA'] * comps[10]
def _r5_AmmSN(self, comps):
"""
Ammonification Rate of Soluable Organic N (mgN/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['k_a'] \
* comps[4] \
* comps[9]
def _r6_HydX(self, comps):
"""
Hydrolysis Rate of Particulate Organics (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['k_h'] \
* self._monod(comps[8] / comps[9], \
self._params['K_X']) \
* (self._monod(comps[0], self._params['K_OH']) \
+ self._params['cf_h'] \
* self._monod(self._params['K_OH'], comps[0]) \
* self._monod(comps[5], self._params['K_NO'])) \
* comps[9]
def _r7_HydXN(self, comps):
"""
Hydrolysis Rate of Particulate Organic N (mgN/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._r6_HydX(comps) * comps[12] / comps[8]
# OVERALL PROCESS RATE EQUATIONS FOR INDIVIDUAL COMPONENTS
def _rate0_S_DO(self, comps):
"""
Overall process rate for dissolved O2 (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['0_0'] * self._r0_AerGH(comps)\
+ self._stoichs['2_0'] * self._r2_AerGA(comps)
def _rate1_S_I(self, comps):
"""
Overall process rate for inert soluble COD (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
0.0
"""
return 0.0
def _rate2_S_S(self, comps):
"""
Overall process rate for soluble biodegradable COD (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['0_2'] * self._r0_AerGH(comps)\
+ self._stoichs['1_2'] * self._r1_AxGH(comps)\
+ self._stoichs['6_2'] * self._r6_HydX(comps)
def _rate3_S_NH(self, comps):
"""
Overall process rate for ammonia nitrogen (mgN/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['0_3'] * self._r0_AerGH(comps)\
+ self._stoichs['1_3'] * self._r1_AxGH(comps)\
+ self._stoichs['2_3'] * self._r2_AerGA(comps)\
+ self._stoichs['5_3'] * self._r5_AmmSN(comps)
def _rate4_S_NS(self, comps):
"""
Overall process rate for soluble organic nitrogen (mgN/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['5_4'] * self._r5_AmmSN(comps)\
+ self._stoichs['7_4'] * self._r7_HydXN(comps)
def _rate5_S_NO(self, comps):
"""
Overall process rate for nitrite/nitrate nitrogen (mgN/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['1_5'] * self._r1_AxGH(comps)\
+ self._stoichs['2_5'] * self._r2_AerGA(comps)
def _rate6_S_ALK(self, comps):
"""
Overall process rate for alkalinity (mg/L/d as CaCO3)
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['0_6'] * self._r0_AerGH(comps)\
+ self._stoichs['1_6'] * self._r1_AxGH(comps)\
+ self._stoichs['2_6'] * self._r2_AerGA(comps)\
+ self._stoichs['5_6'] * self._r5_AmmSN(comps)
def _rate7_X_I(self, comps):
"""
Overall process rate for inert particulate COD (mgCOD/L/d)
Args:
comps: list of current model components (concentrations).
Return:
0.0
"""
return 0.0
def _rate8_X_S(self, comps):
"""
Overall process rate for particulate biodegradable COD (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['3_8'] * self._r3_DLH(comps)\
+ self._stoichs['4_8'] * self._r4_DLA(comps)\
+ self._stoichs['6_8'] * self._r6_HydX(comps)
def _rate9_X_BH(self, comps):
"""
Overall process rate for heterotrophic biomass (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['0_9'] * self._r0_AerGH(comps)\
+ self._stoichs['1_9'] * self._r1_AxGH(comps)\
+ self._stoichs['3_9'] * self._r3_DLH(comps)
def _rate10_X_BA(self, comps):
"""
Overall process rate for autotrophic biomass (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['2_10'] * self._r2_AerGA(comps)\
+ self._stoichs['4_10'] * self._r4_DLA(comps)
def _rate11_X_D(self, comps):
"""
Overall process rate for biomass debris (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['3_11'] * self._r3_DLH(comps)\
+ self._stoichs['4_11'] * self._r4_DLA(comps)
def _rate12_X_NS(self, comps):
"""
Overall process rate for particulate organic nitrogen (mgN/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['3_12'] * self._r3_DLH(comps)\
+ self._stoichs['4_12'] * self._r4_DLA(comps)\
+ self._stoichs['7_12'] * self._r7_HydXN(comps)
def _dCdt(self, t, mo_comps, vol, flow, in_comps, fix_DO, DO_sat_T):
'''
Defines dC/dt for the reactor based on mass balance.
Overall mass balance:
dComp/dt == InfFlow / Actvol * (in_comps - mo_comps) + GrowthRate
== (in_comps - mo_comps) / HRT + GrowthRate
Args:
t: time for use in ODE integration routine, d
mo_comps: list of model component for mainstream outlet, mg/L.
vol: reactor's active volume, m3;
flow: reactor's total inflow, m3/d
in_comps: list of model components for inlet, mg/L;
fix_DO: whether to use a fix DO setpoint, bool
DO_sat_T: saturation DO of the project elev. and temp, mg/L
Return:
dC/dt of the system ([float])
ASM1 Components:
0_S_DO, 1_S_I, 2_S_S, 3_S_NH, 4_S_NS, 5_S_NO, 6_S_ALK,
7_X_I, 8_X_S, 9_X_BH, 10_X_BA, 11_X_D, 12_X_NS
'''
_HRT = vol / flow
# set DO rate to zero since DO is set to a fix conc., which is
# recommended for steady state simulation; alternatively, use the given
# KLa to dynamically estimate residual DO
if fix_DO or self._bulk_DO == 0:
result = [0.0]
else: #TODO: what if the user provides a fix scfm of air?
result = [(in_comps[0] - mo_comps[0] ) / _HRT
+ self._KLa * (DO_sat_T - mo_comps[0])
+ self._rate0_S_DO(mo_comps)]
result.append((in_comps[1] - mo_comps[1]) / _HRT
+ self._rate1_S_I(mo_comps))
result.append((in_comps[2] - mo_comps[2]) / _HRT
+ self._rate2_S_S(mo_comps))
result.append((in_comps[3] - mo_comps[3]) / _HRT
+ self._rate3_S_NH(mo_comps))
result.append((in_comps[4] - mo_comps[4]) / _HRT
+ self._rate4_S_NS(mo_comps))
result.append((in_comps[5] - mo_comps[5]) / _HRT
+ self._rate5_S_NO(mo_comps))
result.append((in_comps[6] - mo_comps[6]) / _HRT
+ self._rate6_S_ALK(mo_comps))
result.append((in_comps[7] - mo_comps[7]) / _HRT
+ self._rate7_X_I(mo_comps))
result.append((in_comps[8] - mo_comps[8]) / _HRT
+ self._rate8_X_S(mo_comps))
result.append((in_comps[9] - mo_comps[9]) / _HRT
+ self._rate9_X_BH(mo_comps))
result.append((in_comps[10] - mo_comps[10]) / _HRT
+ self._rate10_X_BA(mo_comps))
result.append((in_comps[11] - mo_comps[11]) / _HRT
+ self._rate11_X_D(mo_comps))
result.append((in_comps[12] - mo_comps[12]) / _HRT
+ self._rate12_X_NS(mo_comps))
return result[:]
def _dCdt_kz(self, mo_comps, vol, flow, in_comps):
'''
Defines dC/dt for the reactor based on mass balance.
Overall mass balance:
dComp/dt == InfFlow / Actvol * (in_comps - mo_comps) + GrowthRate
== (in_comps - mo_comps) / HRT + GrowthRate
Args:
t: time for use in ODE integration routine, d
mo_comps: list of model component for mainstream outlet, mg/L.
vol: reactor's active volume, m3;
flow: reactor's total inflow, m3/d
in_comps: list of model components for inlet, mg/L;
Return:
dC/dt of the system ([float])
ASM1 Components:
0_S_DO, 1_S_I, 2_S_S, 3_S_NH, 4_S_NS, 5_S_NO, 6_S_ALK,
7_X_I, 8_X_S, 9_X_BH, 10_X_BA, 11_X_D, 12_X_NS
'''
_HRT = vol / flow
# set DO rate to zero since DO is set to a fix conc.
result = [0.0]
result.append((in_comps[1] - mo_comps[1]) / _HRT
+ self._rate1_S_I(mo_comps))
result.append((in_comps[2] - mo_comps[2]) / _HRT
+ self._rate2_S_S(mo_comps))
result.append((in_comps[3] - mo_comps[3]) / _HRT
+ self._rate3_S_NH(mo_comps))
result.append((in_comps[4] - mo_comps[4]) / _HRT
+ self._rate4_S_NS(mo_comps))
result.append((in_comps[5] - mo_comps[5]) / _HRT
+ self._rate5_S_NO(mo_comps))
result.append((in_comps[6] - mo_comps[6]) / _HRT
+ self._rate6_S_ALK(mo_comps))
result.append((in_comps[7] - mo_comps[7]) / _HRT
+ self._rate7_X_I(mo_comps))
result.append((in_comps[8] - mo_comps[8]) / _HRT
+ self._rate8_X_S(mo_comps))
result.append((in_comps[9] - mo_comps[9]) / _HRT
+ self._rate9_X_BH(mo_comps))
result.append((in_comps[10] - mo_comps[10]) / _HRT
+ self._rate10_X_BA(mo_comps))
result.append((in_comps[11] - mo_comps[11]) / _HRT
+ self._rate11_X_D(mo_comps))
result.append((in_comps[12] - mo_comps[12]) / _HRT
+ self._rate12_X_NS(mo_comps))
return result[:]
| gpl-3.0 | -344,751,125,513,634,200 | 30.683333 | 119 | 0.512399 | false |
MISP/MISP-TAXII--broken- | taxii_client.py | 1 | 3442 | #!flask/bin/python
__version__ = '0.2'
import os
import argparse
import datetime
from dateutil.tz import tzutc
import libtaxii as t
import libtaxii.messages as tm
import libtaxii.clients as tc
try:
import simplejson as json
except ImportError:
import json
PID_FILE = '/tmp/taxii_client.pid'
PROXY_ENABLED = False
PROXY_SCHEME = 'http'
PROXY_STRING = '127.0.0.1:8008'
ATTACHMENTS_PATH_OUT = '/var/tmp/files_out'
"""Search for attachments in this path and attach them to the attribute"""
TAXII_SERVICE_HOST = '127.0.0.1'
TAXII_SERVICE_PORT = 4242
TAXII_SERVICE_PATH = '/inbox'
def is_process_running(pid):
try:
os.kill(pid, 0)
except OSError:
return
else:
return pid
def check_process(path):
if os.path.exists(path):
pid = int(open(path).read())
if is_process_running(pid):
print "Process %d is still running" % pid
raise SystemExit
else:
os.remove(path)
pid = os.getpid()
open(path, 'w').write(str(pid))
return pid
def create_inbox_message(data, content_binding=t.VID_CERT_EU_JSON_10):
"""Creates TAXII message from data"""
content_block = tm.ContentBlock(
content_binding=content_binding,
content=data,
timestamp_label=datetime.datetime.now(tzutc()))
msg_id = tm.generate_message_id()
inbox_message = tm.InboxMessage(
message_id=msg_id,
content_blocks=[content_block])
return msg_id, inbox_message.to_json()
def main(**args):
check_process(PID_FILE)
client = tc.HttpClient()
if PROXY_ENABLED:
client.proxy_type = PROXY_SCHEME
client.proxy_string = PROXY_STRING
msg_id, msg = '', ''
if args['data_type'] == 'string':
msg_id, msg = create_inbox_message(args['data'])
else:
print '[-] Please use a JSON string'
raise SystemExit
http_response = client.callTaxiiService2(
args['host'], args['path'],
t.VID_CERT_EU_JSON_10, msg, args['port'])
taxii_response = t.get_message_from_http_response(http_response, msg_id)
print(taxii_response.to_json())
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='TAXII Client',
epilog='DO NOT USE IN PRODUCTION',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
"-t", "--type",
dest="data_type",
choices=['string'],
default='string',
help='Data type your posting, "sync" will read DB')
parser.add_argument(
"-d", "--data",
dest="data",
required=False,
help='Data to be posted to TAXII Service')
parser.add_argument(
"-th", "--taxii_host",
dest="host",
default=TAXII_SERVICE_HOST,
help='TAXII Service Host')
parser.add_argument(
"-tp", "--taxii_port",
dest="port",
default=TAXII_SERVICE_PORT,
help='TAXII Service Port')
parser.add_argument(
"-tpath", "--taxii_path",
dest="path",
default=TAXII_SERVICE_PATH,
help='TAXII Service Path')
parser.add_argument(
"-key", "--api_key",
dest="api_key",
help='MISP API Key')
parser.add_argument(
"-v", "--version",
action='version',
version='%(prog)s {version}'.format(version=__version__))
args = parser.parse_args()
main(**vars(args))
| agpl-3.0 | 7,958,191,412,627,987,000 | 23.06993 | 76 | 0.600813 | false |
palmer159/openstack-test | packstack/plugins/serverprep_001.py | 1 | 27202 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Plugin responsible for Server Preparation.
"""
import os
import re
import logging
import platform
from packstack.installer import exceptions
from packstack.installer import utils
from packstack.installer import validators
from packstack.modules.common import filtered_hosts
from packstack.modules.common import is_all_in_one
# ------------ Server Preparation Packstack Plugin Initialization -------------
PLUGIN_NAME = "OS-SERVERPREPARE"
PLUGIN_NAME_COLORED = utils.color_text(PLUGIN_NAME, 'blue')
def initConfig(controller):
conf_params = {
"SERVERPREPARE": [
{"CMD_OPTION": "use-epel",
"USAGE": "To subscribe each server to EPEL enter \"y\"",
"PROMPT": "To subscribe each server to EPEL enter \"y\"",
"OPTION_LIST": ["y", "n"],
"VALIDATORS": [validators.validate_options],
"DEFAULT_VALUE": "n",
"MASK_INPUT": False,
"LOOSE_VALIDATION": True,
"CONF_NAME": "CONFIG_USE_EPEL",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "additional-repo",
"USAGE": ("A comma separated list of URLs to any additional yum "
"repositories to install"),
"PROMPT": ("Enter a comma separated list of URLs to any "
"additional yum repositories to install"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": False,
"LOOSE_VALIDATION": True,
"CONF_NAME": "CONFIG_REPO",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False}
],
"RHEL": [
{"CMD_OPTION": "rh-username",
"USAGE": ("To subscribe each server with Red Hat subscription "
"manager, include this with CONFIG_RH_PW"),
"PROMPT": "To subscribe each server to Red Hat enter a username ",
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": False,
"LOOSE_VALIDATION": True,
"CONF_NAME": "CONFIG_RH_USER",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rhn-satellite-server",
"USAGE": ("To subscribe each server with RHN Satellite,fill "
"Satellite's URL here. Note that either satellite's "
"username/password or activation key has "
"to be provided"),
"PROMPT": ("To subscribe each server with RHN Satellite enter "
"RHN Satellite server URL"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": False,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_SATELLITE_URL",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False}
],
"RHSM": [
{"CMD_OPTION": "rh-password",
"USAGE": ("To subscribe each server with Red Hat subscription "
"manager, include this with CONFIG_RH_USER"),
"PROMPT": ("To subscribe each server to Red Hat enter your "
"password"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": True,
"CONF_NAME": "CONFIG_RH_PW",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rh-enable-optional",
"USAGE": "To enable RHEL optional repos use value \"y\"",
"PROMPT": "To enable RHEL optional repos use value \"y\"",
"OPTION_LIST": ["y", "n"],
"VALIDATORS": [validators.validate_options],
"DEFAULT_VALUE": "y",
"MASK_INPUT": False,
"LOOSE_VALIDATION": True,
"CONF_NAME": "CONFIG_RH_OPTIONAL",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rh-proxy-host",
"USAGE": ("Specify a HTTP proxy to use with Red Hat subscription "
"manager"),
"PROMPT": ("Specify a HTTP proxy to use with Red Hat subscription"
" manager"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_RH_PROXY",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False}
],
"RHSM_PROXY": [
{"CMD_OPTION": "rh-proxy-port",
"USAGE": ("Specify port of Red Hat subscription manager HTTP "
"proxy"),
"PROMPT": ("Specify port of Red Hat subscription manager HTTP "
"proxy"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": False,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_RH_PROXY_PORT",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rh-proxy-user",
"USAGE": ("Specify a username to use with Red Hat subscription "
"manager HTTP proxy"),
"PROMPT": ("Specify a username to use with Red Hat subscription "
"manager HTTP proxy"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_RH_PROXY_USER",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rh-proxy-password",
"USAGE": ("Specify a password to use with Red Hat subscription "
"manager HTTP proxy"),
"PROMPT": ("Specify a password to use with Red Hat subscription "
"manager HTTP proxy"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_RH_PROXY_PW",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False}
],
"SATELLITE": [
{"CMD_OPTION": "rhn-satellite-username",
"USAGE": "Username to access RHN Satellite",
"PROMPT": ("Enter RHN Satellite username or leave plain if you "
"will use activation key instead"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": False,
"LOOSE_VALIDATION": True,
"CONF_NAME": "CONFIG_SATELLITE_USER",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rhn-satellite-password",
"USAGE": "Password to access RHN Satellite",
"PROMPT": ("Enter RHN Satellite password or leave plain if you "
"will use activation key instead"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_SATELLITE_PW",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rhn-satellite-activation-key",
"USAGE": "Activation key for subscription to RHN Satellite",
"PROMPT": ("Enter RHN Satellite activation key or leave plain if "
"you used username/password instead"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_SATELLITE_AKEY",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rhn-satellite-cacert",
"USAGE": "Specify a path or URL to a SSL CA certificate to use",
"PROMPT": "Specify a path or URL to a SSL CA certificate to use",
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_SATELLITE_CACERT",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rhn-satellite-profile",
"USAGE": ("If required specify the profile name that should be "
"used as an identifier for the system "
"in RHN Satellite"),
"PROMPT": ("If required specify the profile name that should be "
"used as an identifier for the system "
"in RHN Satellite"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_SATELLITE_PROFILE",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rhn-satellite-flags",
"USAGE": ("Comma separated list of flags passed to rhnreg_ks. "
"Valid flags are: novirtinfo, norhnsd, nopackages"),
"PROMPT": ("Enter comma separated list of flags passed "
"to rhnreg_ks"),
"OPTION_LIST": ['novirtinfo', 'norhnsd', 'nopackages'],
"VALIDATORS": [validators.validate_multi_options],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_SATELLITE_FLAGS",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rhn-satellite-proxy-host",
"USAGE": "Specify a HTTP proxy to use with RHN Satellite",
"PROMPT": "Specify a HTTP proxy to use with RHN Satellite",
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_SATELLITE_PROXY",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False}
],
"SATELLITE_PROXY": [
{"CMD_OPTION": "rhn-satellite-proxy-username",
"USAGE": ("Specify a username to use with an authenticated "
"HTTP proxy"),
"PROMPT": ("Specify a username to use with an authenticated "
"HTTP proxy"),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_SATELLITE_PROXY_USER",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False},
{"CMD_OPTION": "rhn-satellite-proxy-password",
"USAGE": ("Specify a password to use with an authenticated "
"HTTP proxy."),
"PROMPT": ("Specify a password to use with an authenticated "
"HTTP proxy."),
"OPTION_LIST": [],
"DEFAULT_VALUE": "",
"MASK_INPUT": True,
"LOOSE_VALIDATION": False,
"CONF_NAME": "CONFIG_SATELLITE_PROXY_PW",
"USE_DEFAULT": False,
"NEED_CONFIRM": False,
"CONDITION": False}
]
}
def filled_rhsm(config):
return bool(config.get('CONFIG_RH_USER'))
def filled_rhsm_proxy(config):
return bool(config.get('CONFIG_RH_PROXY'))
def filled_satellite(config):
return bool(config.get('CONFIG_SATELLITE_URL'))
def filled_satellite_proxy(config):
return bool(config.get('CONFIG_SATELLITE_PROXY'))
conf_groups = [
{"GROUP_NAME": "SERVERPREPARE",
"DESCRIPTION": "Server Prepare Configs ",
"PRE_CONDITION": lambda x: 'yes',
"PRE_CONDITION_MATCH": "yes",
"POST_CONDITION": False,
"POST_CONDITION_MATCH": True},
]
config = controller.CONF
if (is_all_in_one(config) and is_rhel()) or not is_all_in_one(config):
conf_groups.extend([
{"GROUP_NAME": "RHEL",
"DESCRIPTION": "RHEL config",
"PRE_CONDITION": lambda x: 'yes',
"PRE_CONDITION_MATCH": "yes",
"POST_CONDITION": False,
"POST_CONDITION_MATCH": True},
{"GROUP_NAME": "RHSM",
"DESCRIPTION": "RH subscription manager config",
"PRE_CONDITION": filled_rhsm,
"PRE_CONDITION_MATCH": True,
"POST_CONDITION": False,
"POST_CONDITION_MATCH": True},
{"GROUP_NAME": "RHSM_PROXY",
"DESCRIPTION": "RH subscription manager proxy config",
"PRE_CONDITION": filled_rhsm_proxy,
"PRE_CONDITION_MATCH": True,
"POST_CONDITION": False,
"POST_CONDITION_MATCH": True},
{"GROUP_NAME": "SATELLITE",
"DESCRIPTION": "RHN Satellite config",
"PRE_CONDITION": filled_satellite,
"PRE_CONDITION_MATCH": True,
"POST_CONDITION": False,
"POST_CONDITION_MATCH": True},
{"GROUP_NAME": "SATELLITE_PROXY",
"DESCRIPTION": "RHN Satellite proxy config",
"PRE_CONDITION": filled_satellite_proxy,
"PRE_CONDITION_MATCH": True,
"POST_CONDITION": False,
"POST_CONDITION_MATCH": True}
])
for group in conf_groups:
params = conf_params[group["GROUP_NAME"]]
controller.addGroup(group, params)
def initSequences(controller):
preparesteps = [
{'title': 'Preparing servers', 'functions': [server_prep]}
]
controller.addSequence("Preparing servers", [], [], preparesteps)
# ------------------------- helper functions -------------------------
def is_rhel():
return 'Red Hat Enterprise Linux' in platform.linux_distribution()[0]
def run_rhn_reg(host, server_url, username=None, password=None,
cacert=None, activation_key=None, profile_name=None,
proxy_host=None, proxy_user=None, proxy_pass=None,
flags=None):
"""
Registers given host to given RHN Satellite server. To successfully
register either activation_key or username/password is required.
"""
logging.debug('Setting RHN Satellite server: %s.' % locals())
mask = []
cmd = ['/usr/sbin/rhnreg_ks']
server = utils.ScriptRunner(host)
# check satellite server url
server_url = (server_url.rstrip('/').endswith('/XMLRPC')
and server_url or '%s/XMLRPC' % server_url)
cmd.extend(['--serverUrl', server_url])
if activation_key:
cmd.extend(['--activationkey', activation_key])
elif username:
cmd.extend(['--username', username])
if password:
cmd.extend(['--password', password])
mask.append(password)
else:
raise exceptions.InstallError('Either RHN Satellite activation '
'key or username/password must '
'be provided.')
if cacert:
# use and if required download given certificate
location = "/etc/sysconfig/rhn/%s" % os.path.basename(cacert)
if not os.path.isfile(location):
logging.debug('Downloading cacert from %s.' % server_url)
wget_cmd = ('ls %(location)s &> /dev/null && echo -n "" || '
'wget -nd --no-check-certificate --timeout=30 '
'--tries=3 -O "%(location)s" "%(cacert)s"' %
locals())
server.append(wget_cmd)
cmd.extend(['--sslCACert', location])
if profile_name:
cmd.extend(['--profilename', profile_name])
if proxy_host:
cmd.extend(['--proxy', proxy_host])
if proxy_user:
cmd.extend(['--proxyUser', proxy_user])
if proxy_pass:
cmd.extend(['--proxyPassword', proxy_pass])
mask.append(proxy_pass)
flags = flags or []
flags.append('force')
for i in flags:
cmd.append('--%s' % i)
server.append(' '.join(cmd))
server.append('yum clean metadata')
server.execute(mask_list=mask)
def run_rhsm_reg(host, username, password, optional=False, proxy_server=None,
proxy_port=None, proxy_user=None, proxy_password=None):
"""
Registers given host to Red Hat Repositories via subscription manager.
"""
releasever = config['HOST_DETAILS'][host]['release'].split('.')[0]
server = utils.ScriptRunner(host)
# configure proxy if it is necessary
if proxy_server:
cmd = ('subscription-manager config '
'--server.proxy_hostname=%(proxy_server)s '
'--server.proxy_port=%(proxy_port)s')
if proxy_user:
cmd += (' --server.proxy_user=%(proxy_user)s '
'--server.proxy_password=%(proxy_password)s')
server.append(cmd % locals())
# register host
cmd = ('subscription-manager register --username=\"%s\" '
'--password=\"%s\" --autosubscribe || true')
server.append(cmd % (username, password.replace('"', '\\"')))
# subscribe to required channel
cmd = ('subscription-manager list --consumed | grep -i openstack || '
'subscription-manager subscribe --pool %s')
pool = ("$(subscription-manager list --available"
" | grep -m1 -A15 'Red Hat Enterprise Linux OpenStack Platform'"
" | grep -i 'Pool ID:' | awk '{print $3}')")
server.append(cmd % pool)
if optional:
server.append("subscription-manager repos "
"--enable rhel-%s-server-optional-rpms" % releasever)
server.append("subscription-manager repos "
"--enable rhel-%s-server-openstack-5.0-rpms" % releasever)
# mrg channel naming is a big mess
if releasever == '7':
mrg_prefix = 'rhel-x86_64-server-7'
elif releasever == '6':
mrg_prefix = 'rhel-6-server'
server.append("subscription-manager repos "
"--enable %s-mrg-messaging-2-rpms" % mrg_prefix)
server.append("yum clean all")
server.append("rpm -q --whatprovides yum-utils || "
"yum install -y yum-utils")
server.append("yum clean metadata")
server.execute(mask_list=[password])
def manage_epel(host, config):
"""
Installs and/or enables EPEL repo if it is required or disables it if it
is not required.
"""
if config['HOST_DETAILS'][host]['os'] in ('Fedora', 'Unknown'):
return
# yum's $releasever can be non numeric on RHEL, so interpolate here
releasever = config['HOST_DETAILS'][host]['release'].split('.')[0]
mirrors = ('https://mirrors.fedoraproject.org/metalink?repo=epel-%s&'
'arch=$basearch' % releasever)
server = utils.ScriptRunner(host)
if config['CONFIG_USE_EPEL'] == 'y':
server.append('REPOFILE=$(mktemp)')
server.append('cat /etc/yum.conf > $REPOFILE')
server.append("echo -e '[packstack-epel]\nname=packstack-epel\n"
"enabled=1\nmirrorlist=%(mirrors)s' >> $REPOFILE"
% locals())
server.append('( rpm -q --whatprovides epel-release ||'
' yum install -y --nogpg -c $REPOFILE epel-release ) '
'|| true')
server.append('rm -rf $REPOFILE')
try:
server.execute()
except exceptions.ScriptRuntimeError as ex:
msg = 'Failed to set EPEL repo on host %s:\n%s' % (host, ex)
raise exceptions.ScriptRuntimeError(msg)
# if there's an epel repo explicitly enables or disables it
# according to: CONFIG_USE_EPEL
if config['CONFIG_USE_EPEL'] == 'y':
cmd = 'enable'
enabled = '(1|True)'
else:
cmd = 'disable'
enabled = '(0|False)'
server.clear()
server.append('rpm -q yum-utils || yum -y install yum-utils')
server.append('yum-config-manager --%(cmd)s epel' % locals())
rc, out = server.execute()
# yum-config-manager returns 0 always, but returns current setup
# if succeeds
match = re.search('enabled\s*\=\s*%(enabled)s' % locals(), out)
if match:
return
msg = 'Failed to set EPEL repo on host %s:\n'
if cmd == 'enable':
# fail in case user wants to have EPEL enabled
msg += ('RPM file seems to be installed, but appropriate repo file is '
'probably missing in /etc/yum.repos.d/')
raise exceptions.ScriptRuntimeError(msg % host)
else:
# just warn in case disabling failed which might happen when EPEL repo
# is not installed at all
msg += 'This is OK in case you don\'t want EPEL installed and enabled.'
# TO-DO: fill logger name when logging will be refactored.
logger = logging.getLogger()
logger.warn(msg % host)
def manage_rdo(host, config):
"""
Installs and enables RDO repo on host in case it is installed locally.
"""
try:
cmd = "rpm -q rdo-release --qf='%{version}-%{release}.%{arch}\n'"
rc, out = utils.execute(cmd, use_shell=True)
except exceptions.ExecuteRuntimeError:
# RDO repo is not installed, so we don't need to continue
return
# We are installing RDO. EPEL is a requirement, so enable it, overriding
# any configured option
config['CONFIG_USE_EPEL'] = 'y'
match = re.match(r'^(?P<version>\w+)\-(?P<release>\d+\.[\d\w]+)\n', out)
version, release = match.group('version'), match.group('release')
rdo_url = ("http://rdo.fedorapeople.org/openstack/openstack-%(version)s/"
"rdo-release-%(version)s-%(release)s.rpm" % locals())
server = utils.ScriptRunner(host)
server.append("(rpm -q 'rdo-release-%(version)s' ||"
" yum install -y --nogpg %(rdo_url)s) || true"
% locals())
try:
server.execute()
except exceptions.ScriptRuntimeError as ex:
msg = 'Failed to set RDO repo on host %s:\n%s' % (host, ex)
raise exceptions.ScriptRuntimeError(msg)
reponame = 'openstack-%s' % version
server.clear()
server.append('yum-config-manager --enable %(reponame)s' % locals())
# yum-config-manager returns 0 always, but returns current setup
# if succeeds
rc, out = server.execute()
match = re.search('enabled\s*=\s*(1|True)', out)
if not match:
msg = ('Failed to set RDO repo on host %s:\nRPM file seems to be '
'installed, but appropriate repo file is probably missing '
'in /etc/yum.repos.d/' % host)
raise exceptions.ScriptRuntimeError(msg)
# -------------------------- step functions --------------------------
def server_prep(config, messages):
rh_username = None
sat_url = None
if is_rhel():
rh_username = config.get("CONFIG_RH_USER")
rh_password = config.get("CONFIG_RH_PW")
sat_registered = set()
sat_url = config["CONFIG_SATELLITE_URL"].strip()
if sat_url:
flag_list = config["CONFIG_SATELLITE_FLAGS"].split(',')
sat_flags = [i.strip() for i in flag_list if i.strip()]
sat_proxy_user = config.get("CONFIG_SATELLITE_PROXY_USER", '')
sat_proxy_pass = config.get("CONFIG_SATELLITE_PROXY_PW", '')
sat_args = {
'username': config["CONFIG_SATELLITE_USER"].strip(),
'password': config["CONFIG_SATELLITE_PW"].strip(),
'cacert': config["CONFIG_SATELLITE_CACERT"].strip(),
'activation_key': config["CONFIG_SATELLITE_AKEY"].strip(),
'profile_name': config["CONFIG_SATELLITE_PROFILE"].strip(),
'proxy_host': config["CONFIG_SATELLITE_PROXY"].strip(),
'proxy_user': sat_proxy_user.strip(),
'proxy_pass': sat_proxy_pass.strip(),
'flags': sat_flags
}
for hostname in filtered_hosts(config):
# Subscribe to Red Hat Repositories if configured
if rh_username:
run_rhsm_reg(hostname, rh_username, rh_password,
optional=(config.get('CONFIG_RH_OPTIONAL') == 'y'),
proxy_server=config.get('CONFIG_RH_PROXY'),
proxy_port=config.get('CONFIG_RH_PROXY_PORT'),
proxy_user=config.get('CONFIG_RH_PROXY_USER'),
proxy_password=config.get('CONFIG_RH_PROXY_PASSWORD'))
# Subscribe to RHN Satellite if configured
if sat_url and hostname not in sat_registered:
run_rhn_reg(hostname, sat_url, **sat_args)
sat_registered.add(hostname)
server = utils.ScriptRunner(hostname)
server.append('rpm -q --whatprovides yum-utils || '
'yum install -y yum-utils')
if is_rhel():
# Installing rhos-log-collector if it is available from yum.
server.append('yum list available rhos-log-collector && '
'yum -y install rhos-log-collector || '
'echo "no rhos-log-collector available"')
server.execute()
# enable RDO if it is installed locally
manage_rdo(hostname, config)
# enable or disable EPEL according to configuration
manage_epel(hostname, config)
# Add yum repositories if configured
CONFIG_REPO = config["CONFIG_REPO"].strip()
if CONFIG_REPO:
for i, repourl in enumerate(CONFIG_REPO.split(',')):
reponame = 'packstack_%d' % i
server.append('echo "[%(reponame)s]\nname=%(reponame)s\n'
'baseurl=%(repourl)s\nenabled=1\n'
'priority=1\ngpgcheck=0"'
' > /etc/yum.repos.d/%(reponame)s.repo'
% locals())
server.append("yum clean metadata")
server.execute()
| apache-2.0 | 8,605,950,510,959,501,000 | 38.710949 | 79 | 0.535144 | false |
turdusmerula/kipartman | kipartman/dialogs/dialog_edit_category.py | 1 | 2893 | # -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Jul 12 2017)
## http://www.wxformbuilder.org/
##
## PLEASE DO "NOT" EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class DialogEditCategory
###########################################################################
class DialogEditCategory ( wx.Dialog ):
def __init__( self, parent ):
wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.DefaultPosition, size = wx.Size( 332,159 ), style = wx.DEFAULT_DIALOG_STYLE )
self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )
bSizer4 = wx.BoxSizer( wx.VERTICAL )
fgSizer2 = wx.FlexGridSizer( 0, 2, 0, 0 )
fgSizer2.AddGrowableCol( 1 )
fgSizer2.SetFlexibleDirection( wx.BOTH )
fgSizer2.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED )
self.m_staticText1 = wx.StaticText( self, wx.ID_ANY, u"Category", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText1.Wrap( -1 )
fgSizer2.Add( self.m_staticText1, 1, wx.RIGHT|wx.LEFT|wx.ALIGN_CENTER_VERTICAL, 5 )
self.text_name = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
fgSizer2.Add( self.text_name, 1, wx.ALL|wx.EXPAND, 5 )
self.m_staticText2 = wx.StaticText( self, wx.ID_ANY, u"Description", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText2.Wrap( -1 )
fgSizer2.Add( self.m_staticText2, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.text_description = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
fgSizer2.Add( self.text_description, 1, wx.ALL|wx.EXPAND, 5 )
bSizer4.Add( fgSizer2, 1, wx.EXPAND, 5 )
bSizer1 = wx.BoxSizer( wx.VERTICAL )
bSizer2 = wx.BoxSizer( wx.HORIZONTAL )
self.button_validate = wx.Button( self, wx.ID_OK, u"Add", wx.DefaultPosition, wx.DefaultSize, 0 )
self.button_validate.SetDefault()
bSizer2.Add( self.button_validate, 0, wx.ALL, 5 )
self.button_cancel = wx.Button( self, wx.ID_CANCEL, u"Cancel", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer2.Add( self.button_cancel, 0, wx.ALL, 5 )
bSizer1.Add( bSizer2, 0, wx.TOP|wx.ALIGN_CENTER_HORIZONTAL, 5 )
bSizer4.Add( bSizer1, 0, wx.EXPAND|wx.ALIGN_RIGHT, 5 )
self.SetSizer( bSizer4 )
self.Layout()
self.Centre( wx.BOTH )
# Connect Events
self.button_validate.Bind( wx.EVT_BUTTON, self.onValidateClick )
self.button_cancel.Bind( wx.EVT_BUTTON, self.onCancelClick )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def onValidateClick( self, event ):
event.Skip()
def onCancelClick( self, event ):
event.Skip()
| gpl-3.0 | -646,511,858,100,546,600 | 32.639535 | 163 | 0.617007 | false |
FrodeSolheim/fs-uae-launcher | launcher/ui/floppyselector.py | 1 | 7141 | import os
import fsui
from fsbc.paths import Paths
from fsgamesys.context import fsgs
from fsgamesys.options.option import Option
from fsgamesys.platforms import PLATFORM_ATARI
from launcher.context import get_config
from launcher.helpers.cdmanager import CDManager
from launcher.helpers.floppymanager import FloppyManager
from launcher.i18n import gettext
from launcher.ui.behaviors.platformbehavior import AMIGA_PLATFORMS
from launcher.ui.IconButton import IconButton
from launcher.ui.LauncherFilePicker import LauncherFilePicker
class FloppySelector(fsui.Panel):
FLOPPY_MODE = 0
CD_MODE = 1
TAPE_MODE = 2
CARTRIDGE_MODE = 3
def __init__(self, parent, drive, show_path=True):
fsui.Panel.__init__(self, parent)
self.mode = FloppySelector.FLOPPY_MODE
self.show_path = show_path
self.drive = drive
self.config_key = ""
self.config_key_sha1 = ""
self.config_key_implicit = ""
self.config_value_implicit = ""
self.__platform = ""
self.text_field = fsui.TextField(self, "", read_only=True)
self.browse_button = IconButton(self, "browse_folder_16.png")
self.browse_button.set_tooltip(gettext("Browse for file"))
self.browse_button.activated.connect(self.on_browse)
self.eject_button = IconButton(self, "eject_button.png")
# AmigaEnableBehavior(self.eject_button)
self.eject_button.set_tooltip(gettext("Eject"))
self.eject_button.activated.connect(self.on_eject)
self.layout = fsui.HorizontalLayout()
self.layout.add(self.text_field, expand=True)
self.layout.add_spacer(5)
self.layout.add(self.eject_button, fill=True)
self.layout.add_spacer(5)
self.layout.add(self.browse_button, fill=True)
# Config.add_listener(self)
# fsgs.signal.connect(self.on_config,
# "fsgs:config:floppy_drive_{0}".format(self.drive),
# "fsgs:config:cdrom_drive_{0}".format(self.drive))
# fsgs.signal.connect("config", self.on_config)
get_config(self).attach(self.__on_config)
self.on_config(Option.PLATFORM, fsgs.config.get(Option.PLATFORM))
self.update_config_key()
def on_browse(self):
if self.mode == self.CD_MODE:
title = gettext("Choose CD-ROM Image")
# default_dir = FSGSDirectories.get_cdroms_dir()
media_type = "cd"
elif self.mode == self.TAPE_MODE:
title = gettext("Choose Tape Image")
media_type = "tape"
elif self.mode == self.CARTRIDGE_MODE:
title = gettext("Choose Cartridge Image")
media_type = "cartridge"
else:
title = gettext("Choose Floppy Image")
# default_dir = FSGSDirectories.get_floppies_dir()
media_type = "floppy"
dialog = LauncherFilePicker(
self.window,
title,
media_type,
get_config(self).get(self.config_key),
)
if not dialog.show_modal():
return
path = dialog.get_path()
if self.mode == self.CD_MODE:
fsgs.amiga.insert_cd(self.drive, path)
elif self.mode == self.FLOPPY_MODE:
fsgs.amiga.insert_floppy(self.drive, path)
else:
fsgs.config.set(self.config_key, Paths.contract_path(path))
def __on_config(self, event):
self.on_config(event.key, event.value)
def on_config(self, key, value):
if key == self.config_key:
self.text_field.set_text(value)
dir_path, name = os.path.split(value)
if dir_path:
if self.show_path:
path = "{0} ({1})".format(name, dir_path)
else:
path = name
else:
path = name
self.text_field.set_text(path)
self.text_field.set_cursor_position(0)
self.eject_button.set_enabled(bool(value))
elif key == self.config_key_implicit:
self.config_value_implicit = value
self.update_enable()
elif key == Option.PLATFORM:
self.__platform = value
self.update_enable()
def onDestroy(self):
# fsgs.signal.disconnect(
# "fsgs:config:floppy_drive_{0}".format(self.drive),
# self.on_config_floppy_drive)
# fsgs.signal.disconnect("config", self.on_config)
get_config(self).detach(self.__on_config)
super().onDestroy()
def on_eject(self):
config = get_config(self)
if self.mode == self.CD_MODE:
CDManager.eject(self.drive, config=config)
elif self.mode == self.FLOPPY_MODE:
FloppyManager.eject(self.drive, config=config)
else:
fsgs.config.set(self.config_key, "")
def set_mode(self, mode):
self.mode = mode
self.update_config_key()
def update_config_key(self):
if self.mode == self.CD_MODE:
self.config_key = "cdrom_drive_{}".format(self.drive)
self.config_key_sha1 = "x_cdrom_drive_{}_sha1".format(self.drive)
self.config_key_implicit = "__implicit_cdrom_drive_count"
elif self.mode == self.TAPE_MODE:
self.config_key = "tape_drive_{}".format(self.drive)
self.config_key_sha1 = "x_tape_drive_{}_sha1".format(self.drive)
self.config_key_implicit = "__implicit_tape_drive_count"
elif self.mode == self.CARTRIDGE_MODE:
if self.drive == 0:
self.config_key = Option.CARTRIDGE_SLOT
self.config_key_sha1 = "x_cartridge_slot_sha1"
else:
self.config_key = "cartridge_drive_{}".format(self.drive)
self.config_key_sha1 = "x_cartridge_drive_{}_sha1".format(
self.drive
)
self.config_key_implicit = "__implicit_cartridge_drive_count"
else:
self.config_key = "floppy_drive_{}".format(self.drive)
self.config_key_sha1 = "x_floppy_drive_{}_sha1".format(self.drive)
self.config_key_implicit = "__implicit_uae_floppy{}type".format(
self.drive
)
config = get_config(self)
self.on_config(self.config_key, config.get(self.config_key))
self.on_config(
self.config_key_implicit,
config.get(self.config_key_implicit),
)
def update_enable(self):
if self.__platform in AMIGA_PLATFORMS:
if self.mode == self.CD_MODE:
self.text_field.set_enabled(self.config_value_implicit != "0")
elif self.mode == self.CARTRIDGE_MODE:
pass
else:
self.text_field.set_enabled(self.config_value_implicit != "-1")
else:
if (
self.__platform == PLATFORM_ATARI
and self.mode == self.FLOPPY_MODE
):
self.text_field.set_enabled(self.drive < 2)
else:
self.text_field.set_enabled(self.drive == 0)
| gpl-2.0 | 8,819,031,893,213,337,000 | 37.187166 | 79 | 0.581711 | false |
the-virtual-brain/tvb-hpc | phase_plane_interactive/hackathon_cuda.py | 1 | 4443 | # Copyright 2017 TVB-HPC contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division, print_function
import math as m
import numpy as _lpy_np
import numba.cuda as _lpy_ncu
import numba as _lpy_numba
from tvb_hpc import utils, network, model
from typing import List
# TODO Add call to the generated numbacuda code
LOG = utils.getLogger('tvb_hpc')
def make_data():
c = network.Connectivity.hcp0()
return c.nnode, c.lengths, c.nnz, c.row, c.col, c.wnz, c.nz, c.weights
def prep_arrays(nsims, nnode: int) -> List[_lpy_np.ndarray]:
"""
Prepare arrays for use with this model.
"""
dtype = _lpy_np.float32
arrs: List[_lpy_np.ndarray] = []
for key in 'input drift diffs'.split():
shape = nsims * nnode * 1
arrs.append(_lpy_np.zeros(shape, dtype))
for i, (lo, hi) in enumerate([(0, 2 * _lpy_np.pi)]):
state = _lpy_np.ones(nsims* nnode)#.random.uniform(float(lo), float(hi),
#size=(nsims* nnode ))
arrs.append(state)
param = _lpy_np.ones((nnode * 1), dtype)
arrs.append(param)
return arrs
def run_all(args):
j, speed, coupling, nnode, lengths, nz, nnz, row, col, wnz = args
dt = 1.0
lnz = []
for i in range(len(speed)):
lnz.append((lengths[nz] / speed[i] / dt).astype(_lpy_np.uintc))
#print(_lpy_np.shape(lnz))
#flat_lnz = [item for sublist in lnz for item in sublist]
#flat_lnz = _lpy_np.asarray(flat_lnz)
flat_lnz = _lpy_np.reshape(lnz, (nnz*len(speed)))
input, drift, diffs, state, param = prep_arrays(len(coupling)*len(speed),nnode)
obsrv = _lpy_np.zeros((len(coupling)*len(speed) * (max(flat_lnz) + 3 + 4000) * nnode * 2), _lpy_np.float32)
trace = _lpy_np.zeros((len(coupling)*len(speed), 400, nnode), _lpy_np.float32)
threadsperblock = len(coupling)
blockspergrid = len(speed)
for i in range(400):
Kuramoto_and_Network_and_EulerStep_inner[blockspergrid, threadsperblock](10, nnode, (max(flat_lnz) + 3 + 4000), state, input, param, drift, diffs, obsrv, nnz, flat_lnz, row, col, wnz, coupling, i * 10)
o = obsrv
o =_lpy_np.reshape(o,(len(coupling)*len(speed), (max(flat_lnz) + 3 + 4000), nnode, 2))
trace[:,i,:] = o[:,i * 10:(i + 1) * 10, :, 0].sum(axis=1)
return trace
def run():
_lpy_ncu.select_device(0)
LOG.info(_lpy_ncu.gpus)
#utils.default_target = NumbaCudaTarget
nnode, lengths, nnz, row, col, wnz, nz, weights = make_data()
# choose param space
nc, ns = 8, 8
couplings = _lpy_np.logspace(0, 1.0, nc)
speeds = _lpy_np.logspace(0.0, 2.0, ns)
# Make parallel over speed anc coupling
start = time.time()
trace = run_all((0, speeds, couplings, nnode, lengths, nz, nnz, row, col, wnz))
end = time.time()
print ("Finished simulation successfully in:")
print(end - start)
print ("Checking correctness of results")
# check correctness
n_work_items = nc * ns
r, c = _lpy_np.triu_indices(nnode, 1)
win_size = 200 # 2s
tavg = _lpy_np.transpose(trace, (1, 2, 0))
win_tavg = tavg.reshape((-1, win_size) + tavg.shape[1:])
err = _lpy_np.zeros((len(win_tavg), n_work_items))
for i, tavg_ in enumerate(win_tavg):
for j in range(n_work_items):
fc = _lpy_np.corrcoef(tavg_[:, :, j].T)
err[i, j] = ((fc[r, c] - weights[r, c])**2).sum()
# look at 2nd 2s window (converges quickly)
err_ = err[-1].reshape((speeds.size, couplings.size))
# change on fc-sc metric wrt. speed & coupling strength
derr_speed = _lpy_np.diff(err_.mean(axis=1)).sum()
derr_coupl = _lpy_np.diff(err_.mean(axis=0)).sum()
LOG.info('derr_speed=%f, derr_coupl=%f', derr_speed, derr_coupl)
print (derr_speed)
assert derr_speed > 350.0
assert derr_coupl < -500.0
print ("Results are correct")
if __name__ == '__main__':
run()
| apache-2.0 | -1,578,581,880,672,073,700 | 38.318584 | 209 | 0.622327 | false |
ovnicraft/openerp-server | openerp/tools/amount_to_text_en.py | 1 | 5115 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from translate import _
_logger = logging.getLogger(__name__)
#-------------------------------------------------------------
#ENGLISH
#-------------------------------------------------------------
to_19 = ( 'Zero', 'One', 'Two', 'Three', 'Four', 'Five', 'Six',
'Seven', 'Eight', 'Nine', 'Ten', 'Eleven', 'Twelve', 'Thirteen',
'Fourteen', 'Fifteen', 'Sixteen', 'Seventeen', 'Eighteen', 'Nineteen' )
tens = ( 'Twenty', 'Thirty', 'Forty', 'Fifty', 'Sixty', 'Seventy', 'Eighty', 'Ninety')
denom = ( '',
'Thousand', 'Million', 'Billion', 'Trillion', 'Quadrillion',
'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion',
'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' )
def _convert_nn(val):
"""convert a value < 100 to English.
"""
if val < 20:
return to_19[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19[val % 10]
return dcap
def _convert_nnn(val):
"""
convert a value < 1000 to english, special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19[rem] + ' Hundred'
if mod > 0:
word = word + ' '
if mod > 0:
word = word + _convert_nn(mod)
return word
def english_number(val):
if val < 100:
return _convert_nn(val)
if val < 1000:
return _convert_nnn(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn(l) + ' ' + denom[didx]
if r > 0:
ret = ret + ', ' + english_number(r)
return ret
def amount_to_text(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = english_number(int(list[0]))
end_word = english_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and 'Cents' or 'Cent'
return ' '.join(filter(None, [start_word, units_name, (start_word or units_name) and (end_word or cents_name) and 'and', end_word, cents_name]))
#-------------------------------------------------------------
# Generic functions
#-------------------------------------------------------------
_translate_funcs = {'en' : amount_to_text}
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='en', currency='euro'):
""" Converts an integer to its textual representation, using the language set in the context if any.
Example::
1654: thousands six cent cinquante-quatre.
"""
import openerp.loglevels as loglevels
# if nbr > 10000000:
# _logger.warning(_("Number too large '%d', can not translate it"))
# return str(nbr)
if not _translate_funcs.has_key(lang):
_logger.warning(_("no translation function found for lang: '%s'"), lang)
#TODO: (default should be en) same as above
lang = 'en'
return _translate_funcs[lang](abs(nbr), currency)
if __name__=='__main__':
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", int_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", int_to_text(i, lang)
else:
print int_to_text(int(argv[1]), lang)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,974,910,350,022,178,300 | 37.171642 | 148 | 0.539198 | false |
indautgrp/erpnext | erpnext/accounts/report/balance_sheet/balance_sheet.py | 1 | 4666 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt, cint
from erpnext.accounts.report.financial_statements import (get_period_list, get_columns, get_data)
def execute(filters=None):
period_list = get_period_list(filters.from_fiscal_year, filters.to_fiscal_year, filters.periodicity)
asset = get_data(filters.company, "Asset", "Debit", period_list, only_current_fiscal_year=False)
liability = get_data(filters.company, "Liability", "Credit", period_list, only_current_fiscal_year=False)
equity = get_data(filters.company, "Equity", "Credit", period_list, only_current_fiscal_year=False)
provisional_profit_loss, total_credit = get_provisional_profit_loss(asset, liability, equity,
period_list, filters.company)
message, opening_balance = check_opening_balance(asset, liability, equity)
data = []
data.extend(asset or [])
data.extend(liability or [])
data.extend(equity or [])
if opening_balance and round(opening_balance,2) !=0:
unclosed ={
"account_name": "'" + _("Unclosed Fiscal Years Profit / Loss (Credit)") + "'",
"account": "'" + _("Unclosed Fiscal Years Profit / Loss (Credit)") + "'",
"warn_if_negative": True,
"currency": frappe.db.get_value("Company", filters.company, "default_currency")
}
for period in period_list:
unclosed[period.key] = opening_balance
if provisional_profit_loss:
provisional_profit_loss[period.key] = provisional_profit_loss[period.key] - opening_balance
unclosed["total"]=opening_balance
data.append(unclosed)
if provisional_profit_loss:
data.append(provisional_profit_loss)
if total_credit:
data.append(total_credit)
columns = get_columns(filters.periodicity, period_list, company=filters.company)
chart = get_chart_data(columns, asset, liability, equity)
return columns, data, message, chart
def get_provisional_profit_loss(asset, liability, equity, period_list, company):
provisional_profit_loss = {}
total_row = {}
if asset and (liability or equity):
total = total_row_total=0
currency = frappe.db.get_value("Company", company, "default_currency")
total_row = {
"account_name": "'" + _("Total (Credit)") + "'",
"account": "'" + _("Total (Credit)") + "'",
"warn_if_negative": True,
"currency": currency
}
has_value = False
for period in period_list:
effective_liability = 0.0
if liability:
effective_liability += flt(liability[-2].get(period.key))
if equity:
effective_liability += flt(equity[-2].get(period.key))
provisional_profit_loss[period.key] = flt(asset[-2].get(period.key)) - effective_liability
total_row[period.key] = effective_liability + provisional_profit_loss[period.key]
if provisional_profit_loss[period.key]:
has_value = True
total += flt(provisional_profit_loss[period.key])
provisional_profit_loss["total"] = total
total_row_total += flt(total_row[period.key])
total_row["total"] = total_row_total
if has_value:
provisional_profit_loss.update({
"account_name": "'" + _("Provisional Profit / Loss (Credit)") + "'",
"account": "'" + _("Provisional Profit / Loss (Credit)") + "'",
"warn_if_negative": True,
"currency": currency
})
return provisional_profit_loss, total_row
def check_opening_balance(asset, liability, equity):
# Check if previous year balance sheet closed
opening_balance = 0
float_precision = cint(frappe.db.get_default("float_precision")) or 2
if asset:
opening_balance = flt(asset[0].get("opening_balance", 0), float_precision)
if liability:
opening_balance -= flt(liability[0].get("opening_balance", 0), float_precision)
if equity:
opening_balance -= flt(equity[0].get("opening_balance", 0), float_precision)
if opening_balance:
return _("Previous Financial Year is not closed"),opening_balance
return None,None
def get_chart_data(columns, asset, liability, equity):
x_intervals = ['x'] + [d.get("label") for d in columns[2:]]
asset_data, liability_data, equity_data = [], [], []
for p in columns[2:]:
if asset:
asset_data.append(asset[-2].get(p.get("fieldname")))
if liability:
liability_data.append(liability[-2].get(p.get("fieldname")))
if equity:
equity_data.append(equity[-2].get(p.get("fieldname")))
columns = [x_intervals]
if asset_data:
columns.append(["Assets"] + asset_data)
if liability_data:
columns.append(["Liabilities"] + liability_data)
if equity_data:
columns.append(["Equity"] + equity_data)
return {
"data": {
'x': 'x',
'columns': columns
}
}
| gpl-3.0 | 1,444,389,777,122,514,700 | 33.308824 | 106 | 0.692885 | false |
hmenke/espresso | testsuite/python/interactions_bonded_interface.py | 1 | 5394 | #
# Copyright (C) 2013-2018 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Tests particle property setters/getters
from __future__ import print_function
import unittest as ut
import espressomd
import numpy as np
from tests_common import abspath
class ParticleProperties(ut.TestCase):
system = espressomd.System(box_l=[10.0, 10.0, 10.0])
# Particle id to work on
pid = 17
# Error tolerance when comparing arrays/tuples...
tol = 1E-9
def bondsMatch(self, inType, outType, inParams, outParams):
"""Check, if the bond type set and gotten back as well as the bond
parameters set and gotten back match. Only check keys present in
inParams.
"""
if inType != outType:
return False
for k in list(inParams.keys()):
if k not in outParams:
return False
if outParams[k] != inParams[k]:
return False
return True
def setUp(self):
if not self.system.part.exists(self.pid):
self.system.part.add(id=self.pid, pos=(0, 0, 0, 0))
def generateTestForBondParams(_bondId, _bondClass, _params):
"""Generates test cases for checking bond parameters set and gotten back
from Es actually match. Only keys which are present in _params are checked
1st arg: Id of the bonded ia in Espresso to test on, i.e., 0,2,1...
2nd: Class of the bond potential to test, ie.e, FeneBond, HarmonicBond
3rd: Bond parameters as dictionary, i.e., {"k"=1.,"r_0"=0.
"""
bondId = _bondId
bondClass = _bondClass
params = _params
def func(self):
# This code is run at the execution of the generated function.
# It will use the state of the variables in the outer function,
# which was there, when the outer function was called
self.system.bonded_inter[bondId] = bondClass(**params)
outBond = self.system.bonded_inter[bondId]
tnIn = bondClass(**params).type_number()
tnOut = outBond.type_number()
outParams = outBond.params
self.assertTrue(
self.bondsMatch(
tnIn,
tnOut,
params,
outParams),
bondClass(
**params).type_name() +
": value set and value gotten back differ for bond id " +
str(bondId) +
": " +
params.__str__() +
" vs. " +
outParams.__str__())
return func
test_fene = generateTestForBondParams(
0, espressomd.interactions.FeneBond, {"r_0": 1.1, "k": 5.2, "d_r_max": 3.})
test_fene2 = generateTestForBondParams(
1, espressomd.interactions.FeneBond, {"r_0": 1.1, "k": 5.2, "d_r_max": 3.})
test_harmonic = generateTestForBondParams(
0, espressomd.interactions.HarmonicBond, {"r_0": 1.1, "k": 5.2})
test_harmonic2 = generateTestForBondParams(
0, espressomd.interactions.HarmonicBond, {"r_0": 1.1, "k": 5.2, "r_cut": 1.3})
if espressomd.has_features(["ROTATION"]):
test_harmonic_dumbbell = generateTestForBondParams(
0, espressomd.interactions.HarmonicDumbbellBond, {"k1": 1.1, "k2": 2.2, "r_0": 1.5})
test_harmonic_dumbbell2 = generateTestForBondParams(
0, espressomd.interactions.HarmonicDumbbellBond, {"k1": 1.1, "k2": 2.2, "r_0": 1.5, "r_cut": 1.9})
test_dihedral = generateTestForBondParams(
0, espressomd.interactions.Dihedral, {"mult": 3.0, "bend": 5.2, "phase": 3.})
if espressomd.has_features(["BOND_ANGLE"]):
test_angle_harm = generateTestForBondParams(
0, espressomd.interactions.AngleHarmonic, {"bend": 5.2, "phi0": 3.2})
test_angle_cos = generateTestForBondParams(
0, espressomd.interactions.AngleCosine, {"bend": 5.2, "phi0": 3.2})
test_angle_cossquare = generateTestForBondParams(
0, espressomd.interactions.AngleCossquare, {"bend": 5.2, "phi0": 0.})
if espressomd.has_features(["LENNARD_JONES"]):
test_subt_lj = generateTestForBondParams(
0, espressomd.interactions.SubtLJ, {})
if espressomd.has_features(["TABULATED"]):
test_tabulated = generateTestForBondParams(
0, espressomd.interactions.Tabulated, {"type": "distance",
"min": 1.,
"max": 2.,
"energy": [1., 2., 3.],
"force": [3., 4., 5.]})
if __name__ == "__main__":
ut.main()
| gpl-3.0 | -476,454,923,069,754,240 | 40.175573 | 110 | 0.586207 | false |
JohnHowland/kivy | kivy/core/window/window_pygame.py | 1 | 16443 | '''
Window Pygame: windowing provider based on Pygame
'''
__all__ = ('WindowPygame', )
# fail early if possible
import pygame
from kivy.compat import PY2
from kivy.core.window import WindowBase
from kivy.core import CoreCriticalException
from os import environ
from os.path import exists, join
from kivy.config import Config
from kivy import kivy_data_dir
from kivy.base import ExceptionManager
from kivy.logger import Logger
from kivy.base import stopTouchApp, EventLoop
from kivy.utils import platform, deprecated
from kivy.resources import resource_find
from kivy.clock import Clock
try:
android = None
if platform == 'android':
import android
except ImportError:
pass
# late binding
glReadPixels = GL_RGBA = GL_UNSIGNED_BYTE = None
class WindowPygame(WindowBase):
def create_window(self, *largs):
# ensure the mouse is still not up after window creation, otherwise, we
# have some weird bugs
self.dispatch('on_mouse_up', 0, 0, 'all', [])
# force display to show (available only for fullscreen)
displayidx = Config.getint('graphics', 'display')
if not 'SDL_VIDEO_FULLSCREEN_HEAD' in environ and displayidx != -1:
environ['SDL_VIDEO_FULLSCREEN_HEAD'] = '%d' % displayidx
# init some opengl, same as before.
self.flags = pygame.HWSURFACE | pygame.OPENGL | pygame.DOUBLEBUF
# right now, activate resizable window only on linux.
# on window / macosx, the opengl context is lost, and we need to
# reconstruct everything. Check #168 for a state of the work.
if platform in ('linux', 'macosx', 'win') and \
Config.getboolean('graphics', 'resizable'):
self.flags |= pygame.RESIZABLE
try:
pygame.display.init()
except pygame.error as e:
raise CoreCriticalException(e.message)
multisamples = Config.getint('graphics', 'multisamples')
if multisamples > 0:
pygame.display.gl_set_attribute(pygame.GL_MULTISAMPLEBUFFERS, 1)
pygame.display.gl_set_attribute(pygame.GL_MULTISAMPLESAMPLES,
multisamples)
pygame.display.gl_set_attribute(pygame.GL_DEPTH_SIZE, 16)
pygame.display.gl_set_attribute(pygame.GL_STENCIL_SIZE, 1)
pygame.display.set_caption(self.title)
if self.position == 'auto':
self._pos = None
elif self.position == 'custom':
self._pos = self.left, self.top
else:
raise ValueError('position token in configuration accept only '
'"auto" or "custom"')
if self._fake_fullscreen:
if not self.borderless:
self.fullscreen = self._fake_fullscreen = False
elif not self.fullscreen or self.fullscreen == 'auto':
self.borderless = self._fake_fullscreen = False
if self.fullscreen == 'fake':
self.borderless = self._fake_fullscreen = True
Logger.warning("The 'fake' fullscreen option has been "
"deprecated, use Window.borderless or the "
"borderless Config option instead.")
if self.fullscreen == 'fake' or self.borderless:
Logger.debug('WinPygame: Set window to borderless mode.')
self.flags |= pygame.NOFRAME
# If no position set in borderless mode, we always need
# to set the position. So use 0, 0.
if self._pos is None:
self._pos = (0, 0)
environ['SDL_VIDEO_WINDOW_POS'] = '%d,%d' % self._pos
elif self.fullscreen in ('auto', True):
Logger.debug('WinPygame: Set window to fullscreen mode')
self.flags |= pygame.FULLSCREEN
elif self._pos is not None:
environ['SDL_VIDEO_WINDOW_POS'] = '%d,%d' % self._pos
# never stay with a None pos, application using w.center will be fired.
self._pos = (0, 0)
# prepare keyboard
repeat_delay = int(Config.get('kivy', 'keyboard_repeat_delay'))
repeat_rate = float(Config.get('kivy', 'keyboard_repeat_rate'))
pygame.key.set_repeat(repeat_delay, int(1000. / repeat_rate))
# set window icon before calling set_mode
try:
filename_icon = self.icon or Config.get('kivy', 'window_icon')
if filename_icon == '':
logo_size = 32
if platform == 'macosx':
logo_size = 512
elif platform == 'win':
logo_size = 64
filename_icon = 'kivy-icon-{}.png'.format(logo_size)
filename_icon = resource_find(
join(kivy_data_dir, 'logo', filename_icon))
self.set_icon(filename_icon)
except:
Logger.exception('Window: cannot set icon')
# try to use mode with multisamples
try:
self._pygame_set_mode()
except pygame.error as e:
if multisamples:
Logger.warning('WinPygame: Video: failed (multisamples=%d)' %
multisamples)
Logger.warning('WinPygame: trying without antialiasing')
pygame.display.gl_set_attribute(
pygame.GL_MULTISAMPLEBUFFERS, 0)
pygame.display.gl_set_attribute(
pygame.GL_MULTISAMPLESAMPLES, 0)
multisamples = 0
try:
self._pygame_set_mode()
except pygame.error as e:
raise CoreCriticalException(e.message)
else:
raise CoreCriticalException(e.message)
info = pygame.display.Info()
self._size = (info.current_w, info.current_h)
#self.dispatch('on_resize', *self._size)
# in order to debug futur issue with pygame/display, let's show
# more debug output.
Logger.debug('Window: Display driver ' + pygame.display.get_driver())
Logger.debug('Window: Actual window size: %dx%d',
info.current_w, info.current_h)
if platform != 'android':
# unsupported platform, such as android that doesn't support
# gl_get_attribute.
Logger.debug(
'Window: Actual color bits r%d g%d b%d a%d',
pygame.display.gl_get_attribute(pygame.GL_RED_SIZE),
pygame.display.gl_get_attribute(pygame.GL_GREEN_SIZE),
pygame.display.gl_get_attribute(pygame.GL_BLUE_SIZE),
pygame.display.gl_get_attribute(pygame.GL_ALPHA_SIZE))
Logger.debug(
'Window: Actual depth bits: %d',
pygame.display.gl_get_attribute(pygame.GL_DEPTH_SIZE))
Logger.debug(
'Window: Actual stencil bits: %d',
pygame.display.gl_get_attribute(pygame.GL_STENCIL_SIZE))
Logger.debug(
'Window: Actual multisampling samples: %d',
pygame.display.gl_get_attribute(pygame.GL_MULTISAMPLESAMPLES))
super(WindowPygame, self).create_window()
# set mouse visibility
pygame.mouse.set_visible(
Config.getboolean('graphics', 'show_cursor'))
# if we are on android platform, automaticly create hooks
if android:
from kivy.support import install_android
install_android()
def close(self):
pygame.display.quit()
self.dispatch('on_close')
def on_title(self, instance, value):
if self.initialized:
pygame.display.set_caption(self.title)
def set_icon(self, filename):
if not exists(filename):
return False
try:
if platform == 'win':
try:
if self._set_icon_win(filename):
return True
except:
# fallback on standard loading then.
pass
# for all others platform, or if the ico is not available, use the
# default way to set it.
self._set_icon_standard(filename)
super(WindowPygame, self).set_icon(filename)
except:
Logger.exception('WinPygame: unable to set icon')
def _set_icon_standard(self, filename):
if PY2:
try:
im = pygame.image.load(filename)
except UnicodeEncodeError:
im = pygame.image.load(filename.encode('utf8'))
else:
im = pygame.image.load(filename)
if im is None:
raise Exception('Unable to load window icon (not found)')
pygame.display.set_icon(im)
def _set_icon_win(self, filename):
# ensure the window ico is ended by ico
if not filename.endswith('.ico'):
filename = '{}.ico'.format(filename.rsplit('.', 1)[0])
if not exists(filename):
return False
import win32api
import win32gui
import win32con
hwnd = pygame.display.get_wm_info()['window']
icon_big = win32gui.LoadImage(
None, filename, win32con.IMAGE_ICON,
48, 48, win32con.LR_LOADFROMFILE)
icon_small = win32gui.LoadImage(
None, filename, win32con.IMAGE_ICON,
16, 16, win32con.LR_LOADFROMFILE)
win32api.SendMessage(
hwnd, win32con.WM_SETICON, win32con.ICON_SMALL, icon_small)
win32api.SendMessage(
hwnd, win32con.WM_SETICON, win32con.ICON_BIG, icon_big)
return True
def screenshot(self, *largs, **kwargs):
global glReadPixels, GL_RGBA, GL_UNSIGNED_BYTE
filename = super(WindowPygame, self).screenshot(*largs, **kwargs)
if filename is None:
return None
if glReadPixels is None:
from kivy.graphics.opengl import (glReadPixels, GL_RGBA,
GL_UNSIGNED_BYTE)
width, height = self.system_size
data = glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE)
if PY2:
data = str(buffer(data))
else:
data = bytes(bytearray(data))
surface = pygame.image.fromstring(data, (width, height), 'RGBA', True)
pygame.image.save(surface, filename)
Logger.debug('Window: Screenshot saved at <%s>' % filename)
return filename
def flip(self):
pygame.display.flip()
super(WindowPygame, self).flip()
@deprecated
def toggle_fullscreen(self):
if self.flags & pygame.FULLSCREEN:
self.flags &= ~pygame.FULLSCREEN
else:
self.flags |= pygame.FULLSCREEN
self._pygame_set_mode()
def _mainloop(self):
EventLoop.idle()
for event in pygame.event.get():
# kill application (SIG_TERM)
if event.type == pygame.QUIT:
if self.dispatch('on_request_close'):
continue
EventLoop.quit = True
self.close()
# mouse move
elif event.type == pygame.MOUSEMOTION:
x, y = event.pos
self.mouse_pos = x, self.system_size[1] - y
# don't dispatch motion if no button are pressed
if event.buttons == (0, 0, 0):
continue
self._mouse_x = x
self._mouse_y = y
self._mouse_meta = self.modifiers
self.dispatch('on_mouse_move', x, y, self.modifiers)
# mouse action
elif event.type in (pygame.MOUSEBUTTONDOWN,
pygame.MOUSEBUTTONUP):
self._pygame_update_modifiers()
x, y = event.pos
btn = 'left'
if event.button == 3:
btn = 'right'
elif event.button == 2:
btn = 'middle'
elif event.button == 4:
btn = 'scrolldown'
elif event.button == 5:
btn = 'scrollup'
elif event.button == 6:
btn = 'scrollright'
elif event.button == 7:
btn = 'scrollleft'
eventname = 'on_mouse_down'
if event.type == pygame.MOUSEBUTTONUP:
eventname = 'on_mouse_up'
self._mouse_x = x
self._mouse_y = y
self._mouse_meta = self.modifiers
self._mouse_btn = btn
self._mouse_down = eventname == 'on_mouse_down'
self.dispatch(eventname, x, y, btn, self.modifiers)
# keyboard action
elif event.type in (pygame.KEYDOWN, pygame.KEYUP):
self._pygame_update_modifiers(event.mod)
# atm, don't handle keyup
if event.type == pygame.KEYUP:
self.dispatch('on_key_up', event.key,
event.scancode)
continue
# don't dispatch more key if down event is accepted
if self.dispatch('on_key_down', event.key,
event.scancode, event.unicode,
self.modifiers):
continue
self.dispatch('on_keyboard', event.key,
event.scancode, event.unicode,
self.modifiers)
# video resize
elif event.type == pygame.VIDEORESIZE:
self._size = event.size
self.update_viewport()
elif event.type == pygame.VIDEOEXPOSE:
self.canvas.ask_update()
# ignored event
elif event.type == pygame.ACTIVEEVENT:
pass
# drop file (pygame patch needed)
elif event.type == pygame.USEREVENT and \
hasattr(pygame, 'USEREVENT_DROPFILE') and \
event.code == pygame.USEREVENT_DROPFILE:
self.dispatch('on_dropfile', event.filename)
'''
# unhandled event !
else:
Logger.debug('WinPygame: Unhandled event %s' % str(event))
'''
def mainloop(self):
while not EventLoop.quit and EventLoop.status == 'started':
try:
self._mainloop()
if not pygame.display.get_active():
pygame.time.wait(100)
except BaseException as inst:
# use exception manager first
r = ExceptionManager.handle_exception(inst)
if r == ExceptionManager.RAISE:
stopTouchApp()
raise
else:
pass
#
# Pygame wrapper
#
def _pygame_set_mode(self, size=None):
if size is None:
size = self.size
if self.fullscreen == 'auto':
pygame.display.set_mode((0, 0), self.flags)
else:
pygame.display.set_mode(size, self.flags)
def _pygame_update_modifiers(self, mods=None):
# Available mod, from dir(pygame)
# 'KMOD_ALT', 'KMOD_CAPS', 'KMOD_CTRL', 'KMOD_LALT',
# 'KMOD_LCTRL', 'KMOD_LMETA', 'KMOD_LSHIFT', 'KMOD_META',
# 'KMOD_MODE', 'KMOD_NONE'
if mods is None:
mods = pygame.key.get_mods()
self._modifiers = []
if mods & (pygame.KMOD_SHIFT | pygame.KMOD_LSHIFT):
self._modifiers.append('shift')
if mods & (pygame.KMOD_ALT | pygame.KMOD_LALT):
self._modifiers.append('alt')
if mods & (pygame.KMOD_CTRL | pygame.KMOD_LCTRL):
self._modifiers.append('ctrl')
if mods & (pygame.KMOD_META | pygame.KMOD_LMETA):
self._modifiers.append('meta')
def request_keyboard(self, callback, target, input_type='text'):
keyboard = super(WindowPygame, self).request_keyboard(
callback, target, input_type)
if android and not self.allow_vkeyboard:
android.show_keyboard(target, input_type)
return keyboard
def release_keyboard(self, *largs):
super(WindowPygame, self).release_keyboard(*largs)
if android:
android.hide_keyboard()
return True
| mit | 8,564,566,295,590,561,000 | 37.0625 | 79 | 0.545399 | false |
eshijia/magnum | magnum/tests/unit/conductor/test_template_definition.py | 1 | 12192 | # Copyright 2015 Rackspace Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from magnum.common import exception
from magnum.conductor import template_definition as tdef
from magnum.tests import base
class TemplateDefinitionTestCase(base.TestCase):
@mock.patch.object(tdef, 'iter_entry_points')
def test_load_entry_points(self, mock_iter_entry_points):
mock_entry_point = mock.MagicMock()
mock_entry_points = [mock_entry_point]
mock_iter_entry_points.return_value = mock_entry_points.__iter__()
entry_points = tdef.TemplateDefinition.load_entry_points()
for (expected_entry_point,
(actual_entry_point, loaded_cls)) in zip(mock_entry_points,
entry_points):
self.assertEqual(expected_entry_point, actual_entry_point)
expected_entry_point.load.assert_called_once_with(require=False)
def test_get_template_definitions(self):
defs = tdef.TemplateDefinition.get_template_definitions()
vm_atomic_k8s = defs[('vm', 'fedora-atomic', 'kubernetes')]
vm_coreos_k8s = defs[('vm', 'coreos', 'kubernetes')]
self.assertEqual(len(vm_atomic_k8s), 1)
self.assertEqual(vm_atomic_k8s['magnum_vm_atomic_k8s'],
tdef.AtomicK8sTemplateDefinition)
self.assertEqual(len(vm_coreos_k8s), 1)
self.assertEqual(vm_coreos_k8s['magnum_vm_coreos_k8s'],
tdef.CoreOSK8sTemplateDefinition)
def test_get_vm_atomic_kubernetes_definition(self):
definition = tdef.TemplateDefinition.get_template_definition(
'vm',
'fedora-atomic',
'kubernetes')
self.assertIsInstance(definition,
tdef.AtomicK8sTemplateDefinition)
def test_get_vm_coreos_kubernetes_definition(self):
definition = tdef.TemplateDefinition.get_template_definition(
'vm',
'coreos',
'kubernetes')
self.assertIsInstance(definition,
tdef.CoreOSK8sTemplateDefinition)
def test_get_vm_atomic_swarm_definition(self):
definition = tdef.TemplateDefinition.get_template_definition(
'vm',
'fedora-atomic',
'swarm')
self.assertIsInstance(definition,
tdef.AtomicSwarmTemplateDefinition)
def test_get_vm_ubuntu_mesos_definition(self):
definition = tdef.TemplateDefinition.get_template_definition(
'vm',
'ubuntu',
'mesos')
self.assertIsInstance(definition,
tdef.UbuntuMesosTemplateDefinition)
def test_get_definition_not_supported(self):
self.assertRaises(exception.BayTypeNotSupported,
tdef.TemplateDefinition.get_template_definition,
'vm', 'not_supported', 'kubernetes')
def test_get_definition_not_enabled(self):
cfg.CONF.set_override('enabled_definitions',
['magnum_vm_atomic_k8s'],
group='bay')
self.assertRaises(exception.BayTypeNotEnabled,
tdef.TemplateDefinition.get_template_definition,
'vm', 'coreos', 'kubernetes')
def test_required_param_not_set(self):
param = tdef.ParameterMapping('test', baymodel_attr='test',
required=True)
mock_baymodel = mock.MagicMock()
mock_baymodel.test = None
self.assertRaises(exception.RequiredParameterNotProvided,
param.set_param, {}, mock_baymodel, None)
def test_output_mapping(self):
heat_outputs = [
{
"output_value": "value1",
"description": "No description given",
"output_key": "key1"
},
{
"output_value": ["value2", "value3"],
"description": "No description given",
"output_key": "key2"
}
]
mock_stack = mock.MagicMock()
mock_stack.outputs = heat_outputs
output = tdef.OutputMapping('key1')
value = output.get_output_value(mock_stack)
self.assertEqual(value, 'value1')
output = tdef.OutputMapping('key2')
value = output.get_output_value(mock_stack)
self.assertEqual(value, ["value2", "value3"])
output = tdef.OutputMapping('key3')
value = output.get_output_value(mock_stack)
self.assertIsNone(value)
def test_update_outputs(self):
definition = tdef.TemplateDefinition.get_template_definition(
'vm',
'fedora-atomic',
'kubernetes')
expected_api_address = 'api_address'
expected_node_addresses = ['ex_minion', 'address']
outputs = [
{"output_value": expected_node_addresses,
"description": "No description given",
"output_key": "kube_minions_external"},
{"output_value": expected_api_address,
"description": "No description given",
"output_key": "api_address"},
{"output_value": ['any', 'output'],
"description": "No description given",
"output_key": "kube_minions"}
]
mock_stack = mock.MagicMock()
mock_stack.outputs = outputs
mock_bay = mock.MagicMock()
definition.update_outputs(mock_stack, mock_bay)
self.assertEqual(mock_bay.api_address, expected_api_address)
self.assertEqual(mock_bay.node_addresses, expected_node_addresses)
class AtomicK8sTemplateDefinitionTestCase(base.TestCase):
@mock.patch('magnum.conductor.template_definition'
'.AtomicK8sTemplateDefinition.get_discovery_url')
@mock.patch('magnum.conductor.template_definition.BaseTemplateDefinition'
'.get_params')
@mock.patch('magnum.conductor.template_definition.TemplateDefinition'
'.get_output')
def test_k8s_get_params(self, mock_get_output, mock_get_params,
mock_get_discovery_url):
mock_context = mock.MagicMock()
mock_baymodel = mock.MagicMock()
mock_bay = mock.MagicMock()
mock_scale_manager = mock.MagicMock()
removal_nodes = ['node1', 'node2']
mock_scale_manager.get_removal_nodes.return_value = removal_nodes
mock_get_discovery_url.return_value = 'fake_discovery_url'
flannel_cidr = mock_baymodel.labels.get('flannel_network_cidr')
flannel_subnet = mock_baymodel.labels.get('flannel_network_subnetlen')
flannel_vxlan = mock_baymodel.labels.get('flannel_use_vxlan')
k8s_def = tdef.AtomicK8sTemplateDefinition()
k8s_def.get_params(mock_context, mock_baymodel, mock_bay,
scale_manager=mock_scale_manager)
expected_kwargs = {'extra_params': {
'minions_to_remove': removal_nodes,
'discovery_url': 'fake_discovery_url',
'flannel_network_cidr': flannel_cidr,
'flannel_use_vxlan': flannel_subnet,
'flannel_network_subnetlen': flannel_vxlan}}
mock_get_params.assert_called_once_with(mock_context, mock_baymodel,
mock_bay, **expected_kwargs)
@mock.patch('requests.get')
def test_k8s_get_discovery_url(self, mock_get):
cfg.CONF.set_override('etcd_discovery_service_endpoint_format',
'http://etcd/test?size=%(size)d',
group='bay')
expected_discovery_url = 'http://etcd/token'
mock_resp = mock.MagicMock()
mock_resp.text = expected_discovery_url
mock_get.return_value = mock_resp
mock_bay = mock.MagicMock()
mock_bay.master_count = 10
mock_bay.discovery_url = None
k8s_def = tdef.AtomicK8sTemplateDefinition()
discovery_url = k8s_def.get_discovery_url(mock_bay)
mock_get.assert_called_once_with('http://etcd/test?size=10')
self.assertEqual(mock_bay.discovery_url, expected_discovery_url)
self.assertEqual(discovery_url, expected_discovery_url)
def test_k8s_get_heat_param(self):
k8s_def = tdef.AtomicK8sTemplateDefinition()
heat_param = k8s_def.get_heat_param(bay_attr='node_count')
self.assertEqual(heat_param, 'number_of_minions')
@mock.patch('requests.get')
def test_k8s_get_discovery_url_not_found(self, mock_get):
mock_resp = mock.MagicMock()
mock_resp.text = ''
mock_get.return_value = mock_resp
fake_bay = mock.MagicMock()
fake_bay.discovery_url = None
self.assertRaises(exception.InvalidDiscoveryURL,
tdef.AtomicK8sTemplateDefinition().get_discovery_url,
fake_bay)
class AtomicSwarmTemplateDefinitionTestCase(base.TestCase):
@mock.patch('requests.post')
def test_swarm_discovery_url_public_token(self, mock_post):
mock_resp = mock.MagicMock()
mock_resp.text = 'some_token'
mock_post.return_value = mock_resp
mock_bay = mock.MagicMock()
mock_bay.discovery_url = None
mock_bay.id = 1
mock_bay.uuid = 'some_uuid'
swarm_def = tdef.AtomicSwarmTemplateDefinition()
actual_url = swarm_def.get_discovery_url(mock_bay)
self.assertEqual('token://some_token', actual_url)
def test_swarm_discovery_url_format_bay_id(self):
cfg.CONF.set_override('public_swarm_discovery', False, group='bay')
cfg.CONF.set_override('swarm_discovery_url_format',
'etcd://test.com/bay-%(bay_id)s', group='bay')
mock_bay = mock.MagicMock()
mock_bay.discovery_url = None
mock_bay.id = 1
mock_bay.uuid = 'some_uuid'
swarm_def = tdef.AtomicSwarmTemplateDefinition()
actual_url = swarm_def.get_discovery_url(mock_bay)
self.assertEqual('etcd://test.com/bay-1', actual_url)
def test_swarm_discovery_url_format_bay_uuid(self):
cfg.CONF.set_override('public_swarm_discovery', False, group='bay')
cfg.CONF.set_override('swarm_discovery_url_format',
'etcd://test.com/bay-%(bay_uuid)s', group='bay')
mock_bay = mock.MagicMock()
mock_bay.discovery_url = None
mock_bay.id = 1
mock_bay.uuid = 'some_uuid'
swarm_def = tdef.AtomicSwarmTemplateDefinition()
actual_url = swarm_def.get_discovery_url(mock_bay)
self.assertEqual('etcd://test.com/bay-some_uuid', actual_url)
def test_swarm_discovery_url_from_bay(self):
mock_bay = mock.MagicMock()
mock_bay.discovery_url = 'token://some_token'
mock_bay.id = 1
mock_bay.uuid = 'some_uuid'
swarm_def = tdef.AtomicSwarmTemplateDefinition()
actual_url = swarm_def.get_discovery_url(mock_bay)
self.assertEqual(mock_bay.discovery_url, actual_url)
def test_swarm_get_heat_param(self):
swarm_def = tdef.AtomicSwarmTemplateDefinition()
heat_param = swarm_def.get_heat_param(bay_attr='node_count')
self.assertEqual(heat_param, 'number_of_nodes')
class UbuntuMesosTemplateDefinitionTestCase(base.TestCase):
def test_mesos_get_heat_param(self):
mesos_def = tdef.UbuntuMesosTemplateDefinition()
heat_param = mesos_def.get_heat_param(bay_attr='node_count')
self.assertEqual(heat_param, 'number_of_slaves')
| apache-2.0 | -5,164,326,885,403,137,000 | 37.219436 | 79 | 0.612041 | false |
gmrandazzo/PyLSS | pylss/lsscoltransfer.py | 1 | 5550 | '''
@package ssengine
lsscoltransfer was writen by Giuseppe Marco Randazzo <[email protected]>
Geneve Dec 2015
'''
#from scipy.optimize import fmin
from optimizer import simplex as fmin
from math import sqrt, pi, log10, log, exp, fabs, isnan, isinf, erf
from optseparation import drange
from time import sleep
def square(val):
""" return the square of val"""
return val*val
class LSSColTrans(object):
"""Perform the generation of LSS parameters logKw, S, alhpa1 and alpha 2
to be used in a column transfer.
Parameters
----------
c_length: float
Define the column lenght.
c_diameter: float
Define the column diameter.
c_porosity: float
Define the column particule porisity.
t0: float
Define the dead time for the unretained compounds.
v_d: float
Define the instrumental dead volume.
flow: float
Define the flow rate.
init_B: list(float)
Define the initial % of organic modifier in a gradients.
final_b: list(float)
Define the final % of organic modifier in a gradients.
tg: list(float)
Define the gradients time.
Returns
------
lss_logkw: float
Return the LSS logaritmic retention factor in water (logKw)
lss_s: float
Return the LSS S molecular parameter
alpha: list(float)
Return the column interaction factor for column 1
References
----------
Lloyd R. Snyder, John W. Dolan
High-Performance Gradient Elution:
The Practical Application of the Linear-Solvent-Strength Model
ISBN: 978-0-471-70646-5
January 2007
"""
def __init__(self, c_length, c_diameter, c_porosity, t0, v_d, flow):
self.logkw = []
self.s = []
self.alpha = []
if c_length != None and c_diameter != None and c_porosity != None:
#Column Parameters
self.c_length = float(c_length)
self.c_diameter = float(c_diameter)
self.c_porosity = float(c_porosity)
self.v0 = ((square(self.c_diameter)*self.c_length*pi*self.c_porosity)/4.)/1000.
else:
self.v0 = None
#System Parameters
self.v_d = v_d # Dwell Volume
# Gradient Parameters
self.flow = flow
self.init_B = []
self.final_B = []
self.tg = []
self.trtab = [] #table of retention times
self.tr = [] #row of retention times
self.t0 = []
if c_length != None and c_diameter != None and c_porosity != None:
for i in range(len(self.flow)):
self.t0.append(self.v0/self.flow[i])
else:
self.t0 = t0
self.td = []
for i in range(len(self.v_d)):
self.td.append(self.v_d[i]/self.flow[i])
def rtpred(self, logkw, S, tg, init_B, final_B, alpha, t0, td):
#print logkw, S, tg, alpha, t0, td
if logkw != None and S != None and alpha > 0:
DeltaFi = final_B - init_B
b = (t0 * DeltaFi) / tg
if b > 0:
try:
kw = exp(logkw)
lnk0 = log(kw*alpha[0]) - S*alpha[1]*(init_B/100.)
k0 = exp(lnk0)
tr_pred = log(b*k0*S*t0+1)/(b*S*alpha) + t0 + td
return tr_pred
except:
return 9999
else:
return 9999
else:
return 9999
def iterfun(self, lss):
res = 0.
for i in range(len(self.tr)):
tr_pred = self.rtpred(lss[0], lss[1], self.tg[i], self.init_B[i], self.final_B[i], self.alpha[i%len(self.alpha)], self.t0[i%len(self.alpha)], self.td[i%len(self.alpha)])
res += square(self.tr[i]-tr_pred)
rmsd = sqrt(res)
return rmsd
def iterfunalpha(self, alpha):
""" Return the logKw and S parameters """
self.alpha = alpha
rmsd = 0.
for i in range(len(self.trtab)):
self.tr = self.trtab[i]
lssinit = [0.1, 0.1]
#simplex optimization
logkw, s = fmin(self.iterfun, lssinit, side=[0.1, 0.1], tol=1e-10)
#calcualte retention time of all compounds with this alpha
sz_grad = len(self.flow)
for j in range(len(self.trtab[i])):
trpred = self.rtpred(logkw, s, self.tg[j % sz_grad], self.init_B[j % sz_grad], self.final_B[j % sz_grad], self.alpha[j % sz_grad], self.t0[j % sz_grad], self.td[j % sz_grad])
rmsd += square(self.trtab[i][j] - trpred)
print("%.2f %.2f [%f %f]") % (self.trtab[i][j], trpred, self.t0[j % sz_grad], self.td[j % sz_grad])
#print alpha
print ("-"*20)
sleep(1)
rmsd /= float(len(self.trtab))
rmsd = sqrt(rmsd)
return rmsd
def getlssparameters(self, trtab, tg, init_B, final_B, alpha):
self.trtab = trtab
self.tg = tg
self.init_B = init_B
self.final_B = final_B
alphainit = []
asides = []
for i in range(len(alpha)):
alphainit.append(1.0)
asides.append(0.1)
self.alpha = fmin(self.iterfunalpha, alphainit, side=asides, tol=1e-10)
for i in range(len(self.trtab)):
self.tr = trtab[i]
lssinit = [0.1, 0.1]
logkw, s = fmin(self.iterfun, lssinit, side=[0.1, 0.1], tol=1e-3)
self.logkw.append(logkw)
self.s.append(s)
return self.logkw, self.s, self.alpha
| lgpl-3.0 | 8,786,438,165,989,413,000 | 30.005587 | 190 | 0.546126 | false |
LACMTA/loader | ott/loader/otp/graph/run.py | 1 | 2773 | """ Run
"""
import sys
import time
import logging
log = logging.getLogger(__file__)
from ott.utils import otp_utils
from ott.utils import web_utils
from ott.utils.cache_base import CacheBase
class Run(CacheBase):
""" run OTP graph
"""
graphs = None
def __init__(self):
super(Run, self).__init__('otp')
self.graphs = otp_utils.get_graphs(self)
@classmethod
def get_args(cls):
''' run the OTP server
examples:
bin/otp_run -s call (run the call server)
bin/otp_run -v test (run the vizualizer with the test graph)
'''
parser = otp_utils.get_initial_arg_parser()
parser.add_argument('--server', '-s', required=False, action='store_true', help="run 'named' graph in server mode")
parser.add_argument('--all', '-a', required=False, action='store_true', help="run all graphs in server mode")
parser.add_argument('--viz', '-v', required=False, action='store_true', help="run 'named' graph with the vizualizer client")
parser.add_argument('--mem', '-lm', required=False, action='store_true', help="set the jvm heap memory for the graph")
args = parser.parse_args()
return args, parser
@classmethod
def run(cls):
#import pdb; pdb.set_trace()
success = False
r = Run()
args, parser = r.get_args()
graph = otp_utils.find_graph(r.graphs, args.name)
java_mem = "-Xmx1236m" if args.mem else None
if args.all or 'all' == args.name or 'a' == args.name:
success = True
for z in r.graphs:
print "running {}".format(z)
time.sleep(2)
s = otp_utils.run_otp_server(java_mem=java_mem, **z)
if s == False:
success = False
elif args.server:
success = otp_utils.run_otp_server(java_mem=java_mem, **graph)
elif args.viz:
success = otp_utils.vizualize_graph(graph_dir=graph['dir'], java_mem=java_mem)
else:
print "PLEASE select a option to either serve or vizualize graph {}".format(graph['name'])
parser.print_help()
return success
@classmethod
def static_server_cfg(cls):
r = Run()
port = r.config.get('port', 'web', '50080')
dir = r.config.get('dir', 'web', 'ott/loader/otp/graph')
return port, dir
@classmethod
def static_server(cls):
''' start a static server where
'''
success = False
port, dir = Run.static_server_cfg()
success = web_utils.background_web_server(dir, port)
return success
def main(argv=sys.argv):
Run.run()
if __name__ == '__main__':
main()
| mpl-2.0 | 7,585,301,656,605,814,000 | 31.244186 | 136 | 0.567616 | false |
ckaus/EpiPy | epipy/ui/view/mainwindow.py | 1 | 2749 | # -*- coding: utf-8 -*-
from PyQt4 import QtGui
from PyQt4.uic import loadUi
from epipy.ui.view import cwd
from epipy.ui.view.aboutdialog import AboutDialog
from epipy.ui.view.infogroupbox import InfoGroupBox
from epipy.ui.view.plotwidget import PlotWidget
class MainWindow(QtGui.QMainWindow):
"""This class represents the main window.
:returns: an instance of *MainWindow*
"""
def __init__(self):
super(MainWindow, self).__init__()
loadUi(cwd + '/mainwindow.ui', self)
# Menu
self.about_dialog = AboutDialog()
self.exit_action.triggered.connect(self.close)
self.clear_information_action.triggered.connect(self.clear_information)
self.show_fullscreen_action.triggered.connect(self.show_full_screen)
self.exit_fullscreen_action.triggered.connect(self.exit_full_screen)
self.exit_fullscreen_action.setVisible(False)
self.show_sidebar_action.triggered.connect(self.show_sidebar)
self.hide_sidebar_action.triggered.connect(self.hide_sidebar)
self.show_sidebar_action.setVisible(False)
self.about_action.triggered.connect(self.about_dialog.show)
# Top Left
self.plot_view = PlotWidget()
self.h_splitter.insertWidget(0, self.plot_view)
# Bottom
self.info_group_box = InfoGroupBox()
self.v_splitter.insertWidget(1, self.info_group_box)
def clear_information(self):
"""Clears the information of *InfoGroupBox*"""
self.info_group_box.info_plain_text_edit.clear()
def exit_full_screen(self):
"""Stops full screen mode of *MainWindow*."""
self.show_fullscreen_action.setVisible(True)
self.exit_fullscreen_action.setVisible(False)
self.showNormal()
def hide_sidebar(self):
"""Hides the side bar view."""
self.h_splitter.widget(1).setVisible(False)
self.show_sidebar_action.setVisible(True)
self.hide_sidebar_action.setVisible(False)
def show_notification_information(self, text):
"""Shows a notification window *QMessageBox*.
:param text: the showing text
:type text: str
"""
QtGui.QMessageBox.information(self, 'Information', text,
QtGui.QMessageBox.Ok)
def show_full_screen(self):
"""Executes the full screen mode of *MainWindow*."""
self.show_fullscreen_action.setVisible(False)
self.exit_fullscreen_action.setVisible(True)
self.showFullScreen()
def show_sidebar(self):
"""Shows the side bar view."""
self.h_splitter.widget(1).setVisible(True)
self.show_sidebar_action.setVisible(False)
self.hide_sidebar_action.setVisible(True)
| mit | 7,415,836,705,658,588,000 | 35.171053 | 79 | 0.663514 | false |
Juan-Mateos/coll_int_ai_case | notebooks/sankey.py | 1 | 7090 | # -*- coding: utf-8 -*-
"""
Produces simple Sankey Diagrams with matplotlib.
@author: Anneya Golob & marcomanz & pierre-sassoulas
.-.
.--.( ).--.
<-. .-.-.(.-> )_ .--.
`-`( )-' `) )
(o o ) `)`-'
( ) ,)
( () ) )
`---"\ , , ,/`
`--' `--' `--'
| | | |
| | | |
' | ' |
"""
from collections import defaultdict
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
def sankey(left, right, leftWeight=None, rightWeight=None, colorDict=None,
leftLabels=None, rightLabels=None, aspect=4, rightColor=False,
fontsize=14, figure_name=None,closePlot=False):
'''
Make Sankey Diagram showing flow from left-->right
Inputs:
left = NumPy array of object labels on the left of the diagram
right = NumPy array of corresponding labels on the right of the diagram
len(right) == len(left)
leftWeight = NumPy array of weights for each strip starting from the
left of the diagram, if not specified 1 is assigned
rightWeight = NumPy array of weights for each strip starting from the
right of the diagram, if not specified the corresponding leftWeight
is assigned
colorDict = Dictionary of colors to use for each label
{'label':'color'}
leftLabels = order of the left labels in the diagram
rightLabels = order of the right labels in the diagram
aspect = vertical extent of the diagram in units of horizontal extent
rightColor = If true, each strip in the diagram will be be colored
according to its left label
Ouput:
None
'''
if leftWeight is None:
leftWeight = []
if rightWeight is None:
rightWeight = []
if leftLabels is None:
leftLabels = []
if rightLabels is None:
rightLabels = []
# Check weights
if len(leftWeight) == 0:
leftWeight = np.ones(len(left))
if len(rightWeight) == 0:
rightWeight = leftWeight
plt.figure()
plt.rc('text', usetex=False)
plt.rc('font', family='serif')
# Create Dataframe
df = pd.DataFrame({'left': left, 'right': right, 'leftWeight': leftWeight,
'rightWeight': rightWeight}, index=range(len(left)))
# Identify all labels that appear 'left' or 'right'
allLabels = pd.Series(np.r_[df.left.unique(), df.right.unique()]).unique()
# Identify left labels
if len(leftLabels) == 0:
leftLabels = pd.Series(df.left.unique()).unique()
# Identify right labels
if len(rightLabels) == 0:
rightLabels = pd.Series(df.right.unique()).unique()
# If no colorDict given, make one
if colorDict is None:
colorDict = {}
pal = "hls"
cls = sns.color_palette(pal, len(allLabels))
for i, l in enumerate(allLabels):
colorDict[l] = cls[i]
# Determine widths of individual strips
ns_l = defaultdict()
ns_r = defaultdict()
for l in leftLabels:
myD_l = {}
myD_r = {}
for l2 in rightLabels:
myD_l[l2] = df[(df.left == l) & (df.right == l2)].leftWeight.sum()
myD_r[l2] = df[(df.left == l) & (df.right == l2)].rightWeight.sum()
ns_l[l] = myD_l
ns_r[l] = myD_r
# Determine positions of left label patches and total widths
widths_left = defaultdict()
for i, l in enumerate(leftLabels):
myD = {}
myD['left'] = df[df.left == l].leftWeight.sum()
if i == 0:
myD['bottom'] = 0
myD['top'] = myD['left']
else:
myD['bottom'] = widths_left[leftLabels[i - 1]]['top'] + 0.02 * df.leftWeight.sum()
myD['top'] = myD['bottom'] + myD['left']
topEdge = myD['top']
widths_left[l] = myD
# Determine positions of right label patches and total widths
widths_right = defaultdict()
for i, l in enumerate(rightLabels):
myD = {}
myD['right'] = df[df.right == l].rightWeight.sum()
if i == 0:
myD['bottom'] = 0
myD['top'] = myD['right']
else:
myD['bottom'] = widths_right[rightLabels[i - 1]]['top'] + 0.02 * df.rightWeight.sum()
myD['top'] = myD['bottom'] + myD['right']
topEdge = myD['top']
widths_right[l] = myD
# Total vertical extent of diagram
xMax = topEdge / aspect
# Draw vertical bars on left and right of each label's section & print label
for l in leftLabels:
plt.fill_between(
[-0.02 * xMax, 0],
2 * [widths_left[l]['bottom']],
2 * [widths_left[l]['bottom'] + widths_left[l]['left']],
color=colorDict[l],
alpha=0.99
)
plt.text(
-0.05 * xMax,
widths_left[l]['bottom'] + 0.5 * widths_left[l]['left'],
l,
{'ha': 'right', 'va': 'center'},
fontsize=fontsize
)
for l in rightLabels:
plt.fill_between(
[xMax, 1.02 * xMax], 2 * [widths_right[l]['bottom']],
2 * [widths_right[l]['bottom'] + widths_right[l]['right']],
color=colorDict[l],
alpha=0.99
)
plt.text(
1.05 * xMax, widths_right[l]['bottom'] + 0.5 * widths_right[l]['right'],
l,
{'ha': 'left', 'va': 'center'},
fontsize=fontsize
)
# Plot strips
for l in leftLabels:
for l2 in rightLabels:
lc = l
if rightColor:
lc = l2
if len(df[(df.left == l) & (df.right == l2)]) > 0:
# Create array of y values for each strip, half at left value, half at right, convolve
ys_d = np.array(50 * [widths_left[l]['bottom']] + 50 * [widths_right[l2]['bottom']])
ys_d = np.convolve(ys_d, 0.05 * np.ones(20), mode='valid')
ys_d = np.convolve(ys_d, 0.05 * np.ones(20), mode='valid')
ys_u = np.array(50 * [widths_left[l]['bottom'] + ns_l[l][l2]] + 50 * [widths_right[l2]['bottom'] + ns_r[l][l2]])
ys_u = np.convolve(ys_u, 0.05 * np.ones(20), mode='valid')
ys_u = np.convolve(ys_u, 0.05 * np.ones(20), mode='valid')
# Update bottom edges at each label so next strip starts at the right place
widths_left[l]['bottom'] += ns_l[l][l2]
widths_right[l2]['bottom'] += ns_r[l][l2]
plt.fill_between(
np.linspace(0, xMax, len(ys_d)), ys_d, ys_u, alpha=0.65,
color=colorDict[lc]
)
plt.gca().axis('off')
plt.gcf().set_size_inches(6, 6)
if figure_name!=None:
plt.savefig("{}.png".format(figure_name), bbox_inches='tight', dpi=150)
if closePlot:
plt.close()
| mit | 4,818,465,458,635,935,000 | 35.546392 | 128 | 0.517772 | false |
wooga/airflow | airflow/example_dags/example_external_task_marker_dag.py | 1 | 3327 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating setting up inter-DAG dependencies using ExternalTaskSensor and
ExternalTaskMarker
In this example, child_task1 in example_external_task_marker_child depends on parent_task in
example_external_task_marker_parent. When parent_task is cleared with "Recursive" selected,
the presence of ExternalTaskMarker tells Airflow to clear child_task1 and its
downstream tasks.
ExternalTaskSensor will keep poking for the status of remote ExternalTaskMarker task at a regular
interval till one of the following will happen:
1. ExternalTaskMarker reaches the states mentioned in the allowed_states list
In this case, ExternalTaskSensor will exit with a succes status code
2. ExternalTaskMarker reaches the states mentioned in the failed_states list
In this case, ExternalTaskSensor will raise an AirflowException and user need to handle this
with multiple downstream tasks
3. ExternalTaskSensor times out
In this case, ExternalTaskSensor will raise AirflowSkipException or AirflowSensorTimeout
exception
"""
import datetime
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.sensors.external_task_sensor import ExternalTaskMarker, ExternalTaskSensor
start_date = datetime.datetime(2015, 1, 1)
with DAG(
dag_id="example_external_task_marker_parent",
start_date=start_date,
schedule_interval=None,
tags=['example'],
) as parent_dag:
# [START howto_operator_external_task_marker]
parent_task = ExternalTaskMarker(task_id="parent_task",
external_dag_id="example_external_task_marker_child",
external_task_id="child_task1")
# [END howto_operator_external_task_marker]
with DAG(
dag_id="example_external_task_marker_child",
start_date=start_date,
schedule_interval=None,
tags=['example'],
) as child_dag:
# [START howto_operator_external_task_sensor]
child_task1 = ExternalTaskSensor(task_id="child_task1",
external_dag_id=parent_dag.dag_id,
external_task_id=parent_task.task_id,
timeout=600,
allowed_states=['success'],
failed_states=['failed', 'skipped'],
mode="reschedule")
# [END howto_operator_external_task_sensor]
child_task2 = DummyOperator(task_id="child_task2")
child_task1 >> child_task2
| apache-2.0 | -6,449,447,133,075,926,000 | 42.776316 | 97 | 0.704839 | false |
gonicus/gosa | client/src/gosa/client/mqtt_service.py | 1 | 10323 | # This file is part of the GOsa project.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
"""
The *MQTTClientService* is responsible for connecting the *client* to the MQTT
bus, registers the required queues, listens for commands on that queues
and dispatches incoming commands to the :class:`clacks.client.command.CommandRegistry`.
**Queues**
Every client has a individual queue. It is constructed of these components::
{domain}.client.{uuid}
The client can publish and subscribe ti this queue.
There is another broadcasting queue for all clients::
{domain}.client.broadcast
The client can subscribe to this queue, but only the server can publish to that queue.
**Events**
The gosa client produces a **ClientAnnounce** event on startup which tells
the backend about the client capabilities (commands it provides) and
some hardware information.
This information is re-send when the client receives a **ClientPoll** event,
which is generated by the backend.
On client shutdown, a **ClientLeave** is emitted to tell the backend that
the client has passed away.
"""
import sys
import netifaces #@UnresolvedImport
import traceback
import logging
import random
import time
import zope.event
import datetime
from lxml import objectify, etree
from threading import Timer
from netaddr import IPNetwork
from zope.interface import implementer
from gosa.common.gjson import loads, dumps
from gosa.common.components.jsonrpc_utils import BadServiceRequest
from gosa.common.handler import IInterfaceHandler
from gosa.common.components.registry import PluginRegistry
from gosa.common.event import EventMaker
from gosa.common import Environment
from gosa.client.event import Resume
@implementer(IInterfaceHandler)
class MQTTClientService(object):
"""
Internal class to serve all available queues and commands to
the MQTT broker.
"""
_priority_ = 10
# Time instance that helps us preventing re-announce-event flooding
time_obj = None
time_int = 3
client = None
__last_announce = None
_ping_job = None
def __init__(self):
env = Environment.getInstance()
self.log = logging.getLogger(__name__)
self.log.debug("initializing MQTT service provider")
self.env = env
self.__cr = None
e = EventMaker()
self.goodbye = e.Event(e.ClientLeave(
e.Id(Environment.getInstance().uuid)
))
def _handle_message(self, topic, message):
if message[0:1] == "{":
# RPC command
self.commandReceived(topic, message)
else:
# event received
try:
xml = objectify.fromstring(message)
if hasattr(xml, "ClientPoll"):
self.__handleClientPoll()
else:
self.log.debug("unhandled event received '%s'" % xml.getchildren()[0].tag)
except etree.XMLSyntaxError as e:
self.log.error("Message parsing error: %s" % e)
def serve(self):
""" Start MQTT service for this gosa service provider. """
# Load MQTT and Command registry instances
self.client = PluginRegistry.getInstance('MQTTClientHandler')
self.client.get_client().add_connection_listener(self._on_connection_change)
self.__cr = PluginRegistry.getInstance('ClientCommandRegistry')
self.client.set_subscription_callback(self._handle_message)
def _on_connection_change(self, connected):
if connected is True:
if self.__last_announce is None or self.__last_announce < (datetime.datetime.now() - datetime.timedelta(minutes=5)):
self.__announce(send_client_announce=True, send_user_session=True)
# Send a ping on a regular base
if self._ping_job is None:
timeout = float(self.env.config.get('client.ping-interval', default=600))
sched = PluginRegistry.getInstance("SchedulerService").getScheduler()
self._ping_job = sched.add_interval_job(self.__ping, seconds=timeout, start_date=datetime.datetime.now() + datetime.timedelta(seconds=1))
else:
if self._ping_job is not None:
sched = PluginRegistry.getInstance("SchedulerService").getScheduler()
sched.unschedule_job(self._ping_job)
self._ping_job = None
def stop(self):
self.client.send_event(self.goodbye, qos=1)
self.client.close()
def __ping(self):
e = EventMaker()
info = e.Event(e.ClientPing(e.Id(self.env.uuid)))
self.client.send_event(info)
def reAnnounce(self):
"""
Re-announce signature changes to the agent.
This method waits a given amount of time and then sends re-sends
the client method-signatures.
"""
if self.__cr:
# Cancel running jobs
if self.time_obj:
self.time_obj.cancel()
self.time_obj = Timer(self.time_int, self._reAnnounce)
self.time_obj.start()
def _reAnnounce(self):
"""
Re-announces the client signatures
"""
self.__announce(send_client_announce=False, send_user_session=False)
def commandReceived(self, topic, message):
"""
Process incoming commands, coming in with session and message
information.
================= ==========================
Parameter Description
================= ==========================
message Received MQTT message
================= ==========================
Incoming messages are coming from an
:class:`gosa.common.components.mqtt_proxy.MQTTServiceProxy`. The command
result is written to the '<domain>.client.<client-uuid>' queue.
"""
err = None
res = None
name = None
args = None
kwargs = None
id_ = ''
response_topic = "%s/response" % "/".join(topic.split("/")[0:4])
try:
req = loads(message)
except Exception as e:
err = str(e)
self.log.error("ServiceRequestNotTranslatable: %s" % err)
req = {'id': topic.split("/")[-2]}
if err is None:
try:
id_ = req['id']
name = req['method']
args = req['params']
kwargs = req['kwparams']
except KeyError as e:
self.log.error("KeyError: %s" % e)
err = str(BadServiceRequest(message))
self.log.debug("received call [%s] for %s: %s(%s,%s)" % (id_, topic, name, args, kwargs))
# Try to execute
if err is None:
try:
res = self.__cr.dispatch(name, *args, **kwargs)
except Exception as e:
err = str(e)
# Write exception to log
exc_type, exc_value, exc_traceback = sys.exc_info()
self.log.error(traceback.format_exception(exc_type, exc_value, exc_traceback))
self.log.debug("returning call [%s]: %s / %s" % (id_, res, err))
response = dumps({"result": res, "id": id_})
# Get rid of it...
self.client.send_message(response, topic=response_topic)
def __handleClientPoll(self):
delay = random.randint(0, 30)
self.log.debug("received client poll - will answer in %d seconds" % delay)
time.sleep(delay)
self.__announce(send_client_announce=True, send_user_session=True)
# Send a resume to all registered plugins
zope.event.notify(Resume())
def __announce(self, send_client_announce=False, send_user_session=True):
e = EventMaker()
# Assemble network information
more = []
netinfo = []
self.__last_announce = datetime.datetime.now()
for interface in netifaces.interfaces():
i_info = netifaces.ifaddresses(interface)
# Skip lo interfaces
if not netifaces.AF_INET in i_info:
continue
# Skip lo interfaces
if not netifaces.AF_LINK in i_info:
continue
if i_info[netifaces.AF_LINK][0]['addr'] == '00:00:00:00:00:00':
continue
# Assemble ipv6 information
ip6 = ""
if netifaces.AF_INET6 in i_info:
ip = IPNetwork("%s/%s" % (i_info[netifaces.AF_INET6][0]['addr'].split("%", 1)[0],
i_info[netifaces.AF_INET6][0]['netmask']))
ip6 = str(ip)
netinfo.append(
e.NetworkDevice(
e.Name(interface),
e.IPAddress(i_info[netifaces.AF_INET][0]['addr']),
e.IPv6Address(ip6),
e.MAC(i_info[netifaces.AF_LINK][0]['addr']),
e.Netmask(i_info[netifaces.AF_INET][0]['netmask']),
e.Broadcast(i_info[netifaces.AF_INET][0]['broadcast'])))
more.append(e.NetworkInformation(*netinfo))
# Build event
if send_client_announce is True:
info = e.Event(
e.ClientAnnounce(
e.Id(self.env.uuid),
e.Name(self.env.id),
*more))
self.client.send_event(info, qos=1)
# Assemble capabilities
more = []
caps = []
for command, dsc in self.__cr.commands.items():
caps.append(
e.ClientMethod(
e.Name(command),
e.Path(dsc['path']),
e.Signature(','.join(dsc['sig'])),
e.Documentation(dsc['doc'])))
more.append(e.ClientCapabilities(*caps))
info = e.Event(
e.ClientSignature(
e.Id(self.env.uuid),
e.Name(self.env.id),
*more))
self.client.send_event(info, qos=1)
if send_user_session is True:
try:
sk = PluginRegistry.getInstance('SessionKeeper')
sk.sendSessionNotification()
except: # pragma: nocover
pass
| lgpl-2.1 | -6,830,260,586,721,705,000 | 33.182119 | 153 | 0.576577 | false |
guillaume-philippon/aquilon | tests/broker/test_add_static_route.py | 1 | 14153 | #!/usr/bin/env python
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2011,2012,2013,2014,2015,2016 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the add_static_route command."""
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
from machinetest import MachineTestMixin
class TestAddStaticRoute(MachineTestMixin, TestBrokerCommand):
def test_001_add_test_host(self):
eth0_ip = self.net["unknown0"].usable[37]
eth1_ip = self.net["routing1"].usable[1]
self.create_host("unittest27.aqd-unittest.ms.com", eth0_ip, "ut3c5n9",
model="hs21-8853", chassis="ut3c5", slot=9,
eth0_mac=eth0_ip.mac,
eth1_mac=eth1_ip.mac, eth1_ip=eth1_ip,
eth1_fqdn="unittest27-e1.aqd-unittest.ms.com",
zebra=False, personality="inventory")
def test_100_add_route1(self):
gw = self.net["routing1"].usable[-1]
command = ["add", "static", "route", "--gateway", gw,
"--ip", "192.168.250.0", "--prefixlen", "23",
"--comments", "Some route comments"]
self.statustest(command)
def test_100_add_route1_personality(self):
gw = self.net["routing1"].usable[-1]
command = ["add", "static", "route", "--gateway", gw,
"--ip", "192.168.248.0", "--prefixlen", "24",
"--personality", "inventory"]
self.statustest(command)
def test_100_add_route2(self):
gw = self.net["routing2"].usable[-1]
command = ["add", "static", "route", "--gateway", gw,
"--ip", "192.168.252.0", "--prefixlen", "23"]
self.noouttest(command)
def test_100_add_route2_guess(self):
net = self.net["routing2"]
command = ["add", "static", "route", "--networkip", net.ip,
"--ip", "192.168.254.0", "--prefixlen", "24"]
out = self.statustest(command)
self.matchoutput(out, "Gateway %s taken from default offset "
"1 for network %s." % (net.gateway, str(net)),
command)
def test_100_add_route3(self):
net = self.net["routing3"]
ip = net[3]
command = ["add", "static", "route", "--networkip", net.ip,
"--ip", "192.168.254.0", "--prefixlen", "24"]
out = self.statustest(command)
self.matchoutput(out, "Gateway %s taken from router address "
"of network %s." % (ip, str(net)), command)
def test_110_add_overlap(self):
net = self.net["routing2"]
gw = net.usable[-1]
command = ["add", "static", "route", "--gateway", gw,
"--ip", "192.168.252.128", "--prefixlen", "25"]
out = self.badrequesttest(command)
self.matchoutput(out,
"Network %s [%s] already has an overlapping route to "
"192.168.252.0/23 using gateway %s." %
(net.name, net, gw),
command)
def test_120_add_default(self):
gw = self.net["unknown0"].gateway
command = ["add", "static", "route", "--gateway", gw,
"--ip", "250.250.0.0", "--prefixlen", "16"]
self.statustest(command)
def test_130_add_non_network_ip(self):
gw = self.net["unknown0"].gateway
command = ["add", "static", "route", "--gateway", gw,
"--ip", "192.168.95.150", "--prefixlen", "24"]
out = self.badrequesttest(command)
self.matchoutput(out,
"192.168.95.150 is not a network address; "
"did you mean 192.168.95.0.",
command)
def test_200_show_host(self):
gw = self.net["routing1"].usable[-1]
command = ["show", "host", "--hostname", "unittest26.aqd-unittest.ms.com"]
out = self.commandtest(command)
self.matchoutput(out, "Static Route: 192.168.250.0/23 gateway %s" % gw,
command)
self.matchoutput(out, "Comments: Some route comments", command)
self.matchclean(out, "192.168.252.0", command)
def test_200_show_network(self):
gw = self.net["routing1"].usable[-1]
command = ["show", "network", "--ip", self.net["routing1"].ip]
out = self.commandtest(command)
self.searchoutput(out,
r'Static Route: 192\.168\.248\.0/24 gateway %s'
r'\s*Personality: inventory Archetype: aquilon$' % gw,
command)
self.searchoutput(out,
r'Static Route: 192\.168\.250\.0/23 gateway %s'
r'\s*Comments: Some route comments' % gw,
command)
self.matchclean(out, "192.168.252.0", command)
def test_210_make_unittest26(self):
command = ["make", "--hostname", "unittest26.aqd-unittest.ms.com"]
err = self.statustest(command)
self.matchoutput(err, "3/3 compiled", command)
def test_220_verify_unittest26(self):
eth0_net = self.net["unknown0"]
eth0_ip = eth0_net.usable[23]
eth1_net = self.net["routing1"]
eth1_ip = eth1_net.usable[0]
eth1_gw = eth1_net.usable[-1]
command = ["cat", "--hostname", "unittest26.aqd-unittest.ms.com", "--data"]
out = self.commandtest(command)
self.searchoutput(out,
r'"system/network/interfaces/eth0" = nlist\(\s*'
r'"bootproto", "static",\s*'
r'"broadcast", "%s",\s*'
r'"fqdn", "unittest26.aqd-unittest.ms.com",\s*'
r'"gateway", "%s",\s*'
r'"ip", "%s",\s*'
r'"netmask", "%s",\s*'
r'"network_environment", "internal",\s*'
r'"network_type", "unknown",\s*'
r'"route", list\(\s*'
r'nlist\(\s*'
r'"address", "250.250.0.0",\s*'
r'"gateway", "%s",\s*'
r'"netmask", "255.255.0.0"\s*\)\s*'
r'\)\s*\)' %
(eth0_net.broadcast, eth0_net.gateway, eth0_ip,
eth0_net.netmask, eth0_net.gateway),
command)
self.searchoutput(out,
r'"system/network/interfaces/eth1" = nlist\(\s*'
r'"bootproto", "static",\s*'
r'"broadcast", "%s",\s*'
r'"fqdn", "unittest26-e1.aqd-unittest.ms.com",\s*'
r'"gateway", "%s",\s*'
r'"ip", "%s",\s*'
r'"netmask", "%s",\s*'
r'"network_environment", "internal",\s*'
r'"network_type", "unknown",\s*'
r'"route", list\(\s*'
r'nlist\(\s*'
r'"address", "192.168.250.0",\s*'
r'"gateway", "%s",\s*'
r'"netmask", "255.255.254.0"\s*\)\s*'
r'\)\s*\)' %
(eth1_net.broadcast, eth1_net.gateway, eth1_ip,
eth1_net.netmask, eth1_gw),
command)
def test_220_verify_unittest27(self):
eth0_net = self.net["unknown0"]
eth0_ip = eth0_net.usable[37]
eth1_net = self.net["routing1"]
eth1_ip = eth1_net.usable[1]
eth1_gw = eth1_net.usable[-1]
command = ["cat", "--hostname", "unittest27.aqd-unittest.ms.com", "--data"]
out = self.commandtest(command)
self.searchoutput(out,
r'"system/network/interfaces/eth0" = nlist\(\s*'
r'"bootproto", "static",\s*'
r'"broadcast", "%s",\s*'
r'"fqdn", "unittest27.aqd-unittest.ms.com",\s*'
r'"gateway", "%s",\s*'
r'"ip", "%s",\s*'
r'"netmask", "%s",\s*'
r'"network_environment", "internal",\s*'
r'"network_type", "unknown",\s*'
r'"route", list\(\s*'
r'nlist\(\s*'
r'"address", "250.250.0.0",\s*'
r'"gateway", "%s",\s*'
r'"netmask", "255.255.0.0"\s*\)\s*'
r'\)\s*\)' %
(eth0_net.broadcast, eth0_net.gateway, eth0_ip,
eth0_net.netmask, eth0_net.gateway),
command)
self.searchoutput(out,
r'"system/network/interfaces/eth1" = nlist\(\s*'
r'"bootproto", "static",\s*'
r'"broadcast", "%s",\s*'
r'"fqdn", "unittest27-e1.aqd-unittest.ms.com",\s*'
r'"gateway", "%s",\s*'
r'"ip", "%s",\s*'
r'"netmask", "%s",\s*'
r'"network_environment", "internal",\s*'
r'"network_type", "unknown",\s*'
r'"route", list\(\s*'
r'nlist\(\s*'
r'"address", "192.168.248.0",\s*'
r'"gateway", "%s",\s*'
r'"netmask", "255.255.255.0"\s*'
r'\),\s*'
r'nlist\(\s*'
r'"address", "192.168.250.0",\s*'
r'"gateway", "%s",\s*'
r'"netmask", "255.255.254.0"\s*'
r'\)\s*\)\s*\)' %
(eth1_net.broadcast, eth1_net.gateway, eth1_ip,
eth1_net.netmask, eth1_gw, eth1_gw),
command)
def test_230_verify_show_unittest02(self):
command = ["show", "host", "--hostname", "unittest02.one-nyp.ms.com"]
out = self.commandtest(command)
self.matchoutput(out, "Static Route: 250.250.0.0/16 gateway %s" %
self.net["unknown0"].gateway, command)
def test_240_verify_cat_unittest02(self):
net = self.net["unknown0"]
eth0_ip = net.usable[0]
command = ["cat", "--hostname", "unittest02.one-nyp.ms.com", "--data"]
out = self.commandtest(command)
self.searchoutput(out,
r'"system/network/interfaces/eth0" = nlist\(\s*'
r'"bootproto", "static",\s*'
r'"broadcast", "%s",\s*'
r'"fqdn", "unittest02.one-nyp.ms.com",\s*'
r'"gateway", "%s",\s*'
r'"ip", "%s",\s*'
r'"netmask", "%s",\s*'
r'"network_environment", "internal",\s*'
r'"network_type", "unknown",\s*'
r'"route", list\(\s*'
r'nlist\(\s*'
r'"address", "250.250.0.0",\s*'
r'"gateway", "%s",\s*'
r'"netmask", "255.255.0.0"\s*\)\s*'
r'\)\s*\)' %
(net.broadcast, net.gateway,
eth0_ip, net.netmask, net.gateway),
command)
def test_300_missing_personality(self):
gw = self.net["routing1"].usable[-1]
command = ["add", "static", "route", "--gateway", gw,
"--ip", "192.168.250.0", "--prefixlen", "23",
"--personality", "personality-does-not-exist",
"--archetype", "aquilon"]
out = self.notfoundtest(command)
self.matchoutput(out,
"Personality personality-does-not-exist, "
"archetype aquilon not found.",
command)
def test_300_missing_personality_stage(self):
gw = self.net["routing1"].usable[-1]
command = ["add", "static", "route", "--gateway", gw,
"--ip", "192.168.250.0", "--prefixlen", "23",
"--personality", "nostage", "--archetype", "aquilon",
"--personality_stage", "previous"]
out = self.notfoundtest(command)
self.matchoutput(out,
"Personality aquilon/nostage does not have stage "
"previous.",
command)
def test_300_bad_personality_stage(self):
gw = self.net["routing1"].usable[-1]
command = ["add", "static", "route", "--gateway", gw,
"--ip", "192.168.250.0", "--prefixlen", "23",
"--personality", "nostage", "--archetype", "aquilon",
"--personality_stage", "no-such-stage"]
out = self.badrequesttest(command)
self.matchoutput(out, "'no-such-stage' is not a valid personality "
"stage.", command)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestAddStaticRoute)
unittest.TextTestRunner(verbosity=2).run(suite)
| apache-2.0 | 6,306,508,862,787,653,000 | 45.864238 | 83 | 0.456652 | false |
YacineKhamis/Polymerge | polymerge.py | 1 | 2413 | import sys
import os
import binascii
import zipfile
import argparse
def createZipFile(fileToArchive):
path = fileToArchive + '.zip'
with zipfile.ZipFile(path, mode='w') as myZip:
myZip.write(fileToArchive, arcname=fileToArchive.split('/')[-1])
def stats(fileToAnalyze):
return os.stat(fileToAnalyze)
def appendTo(fileCombined, fileToAppend, pathToOutputFile):
f1 = open(fileCombined, 'rb')
fileData = f1.read()
f1.close()
f2 = open(fileToAppend, 'rb')
toAppendData = f2.read()
f2.close()
output = open(pathToOutputFile, 'wb')
output.write(fileData)
output.write(toAppendData)
output.close()
def printHexa(fileToRead):
"""
Print the content of the file passed in parameter in a user friendly flavor./
View inspired from modern hexa editor : numbered lines each containing 16 bytes.
"""
with open(fileToRead, 'rb') as binFile:
binFile.seek(0, 2)
numberBytes = binFile.tell()
j = 0
print('')
print('_____________________________________________________________')
for i in range(numberBytes):
if i % 16 == 0:
print('')
j += 1
print(format(j, '02X') + " : ", end='')
binFile.seek(i, 0)
data = binFile.read(1)
text = binascii.hexlify(data)
print(text.decode('utf-8'), end=' ')
print('')
print('__________________________________________________________________')
def MergingProcess(frontFile, toHideFile, outputFilename):
createZipFile(toHideFile)
appendTo(frontFile, toHideFile, outputFilename)
os.remove(toHideFile + '.zip')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Polymerge script. Output a file that preserve its properties and \
embed another file as an Zip Archive.')
parser.add_argument('facadeFile')
parser.add_argument('hiddenFile')
parser.add_argument('-o', '--output')
parser.add_argument('-p', '--printFile', action="store_true")
args = parser.parse_args()
if args.printFile:
printHexa(args.facadeFile)
#printHexa(args.hiddenFile)
if args.output:
MergingProcess(args.facadeFile, args.hiddenFile, args.output.split('/')[-1])
else:
MergingProcess(args.facadeFile, args.hiddenFile, 'Polymerged_' + args.facadeFile.split('/')[-1])
| gpl-3.0 | 6,495,256,162,737,528,000 | 29.935897 | 116 | 0.593038 | false |
koakumaping/simple-blog | mysite.py | 1 | 3305 | #coding=utf-8
import web
import markdown
import model
from settings import *
from admin import check_login
########################################################################
class redirect:
""""""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
#----------------------------------------------------------------------
def GET(self):
""""""
web.seeother('/blog/1')
########################################################################
class index:
"""SHow Home Page"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
pass
#----------------------------------------------------------------------
def GET(self):
""""""
active = 1
context = "Welcom to my Blog."
return model.render_template('main.html', context = context, active = active)
########################################################################
class show_scrap_all:
""""""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
#----------------------------------------------------------------------
def GET(self, id):
""""""
active = 2
NavNum = 7
id = int(id)
if id is None:
id = 1
results = db.query("SELECT COUNT(*) AS numbers FROM scrap WHERE is_deleted = 0")
pages_all = results[0].numbers
if pages_all % NavNum == 0:
pages = pages_all / NavNum
else:
pages = pages_all / NavNum + 1
offset = (id - 1) * NavNum
scrap = db.select('scrap', where = 'is_deleted = 0', limit=NavNum, offset = offset, order = 'id desc')
if len(scrap) == 0:
return 'No scrap!'
return model.render_template('blog/index.html', scrap = scrap, pages = pages, active = active, id = id)
########################################################################
class show_scrap:
""""""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
pass
#----------------------------------------------------------------------
def GET(self, title):
""""""
active = 2
#if web.ctx.ip != '127.0.0.1':
try:
results = db.select('scrap', what = 'file_type,counter,content', where = 'title = $title and is_deleted = 0', vars = locals())
results = results[0]
path = results.file_type
counter = results.counter
content = results.content
if 'md' in path:
scrap = markdown.markdown(content, extensions=['markdown.extensions.extra'])
#scrap = model.md2html(path)
else:
scrap = content
db.query('UPDATE scrap SET scrap.counter=scrap.counter+1 WHERE title=$title', vars = locals())
except Exception as e:
print str(e)
return "Markdown file not found!"
return model.render_template('blog/show_scrap.html', scrap = scrap, active = active, counter = counter) | gpl-2.0 | 1,463,311,195,832,444,700 | 27.747826 | 138 | 0.378215 | false |
simonkrogmann/planets | gui/scrollbar.py | 1 | 3472 | # -*- coding: cp1252 -*-
import Tkinter
class Scrollbar:
"""stellt eine Scrollbar für ein Canvas-Objekt bereit,
Parent muss dieses Canvas-Objekt als Attribut besitzen."""
def __init__(self, Parent, X):
self.Parent = Parent
self.X = X
self.Bar = self.Parent.Canvas.create_rectangle(0, 0, 2, 2, state = Tkinter.HIDDEN,
fill = "#444444", outline = "")
self.Parent.Canvas.bind("<Configure>", self.UpdateRegion)
self.Parent.Canvas.bind("<MouseWheel>", self.Wheel)
self.Parent.Canvas.tag_bind(self.Bar, "<Button-1>", self.ScrollBegin)
self.Parent.Canvas.tag_bind(self.Bar, "<B1-Motion>", self.ScrollMotion)
self.Scrolling = False
def Wheel(self, e):
"""scrollt die Ansicht entsprechend der Mausradbewegung"""
if self.Scrollable:
self.Parent.Canvas.yview_scroll(-e.delta/120, "units")
if self.Parent.Active:
self.Parent.Active[0].Motion(e)
self.UpdateBar()
def Offset(self):
"""gibt die Höhe des Bereiches zurück, der nach oben aus der Ansicht herausgescrollt ist"""
return self.Parent.Canvas.yview()[0] * self.Region
def UpdateRegion(self, e = None):
"""aktualisiert den scrollbaren Bereich"""
# Die Zahlen, die in dieser Methode addiert,
# werden gleichen Ungenauigkeiten im Canvas-Objekt aus.
# ein vorhandenes e weist auf Aufruf durch "configure"-event hin
# und eine Höhenveränderung des Canvas hin
if e:
self.Height = e.height + 8
# bestimmt benötigte Höhe der Liste
self.Region = self.Parent.Height() + 1
# prüft ob eine Scrollbar benötigt wird
if self.Region + 3 <= self.Height:
self.Parent.Canvas.config(scrollregion = (0, 0, 0, self.Height - 8))
self.Scrollable = False
self.Show(0)
self.Parent.Canvas.itemconfig(self.Bar, state = Tkinter.HIDDEN)
else:
self.Scrollable = True
self.Parent.Canvas.itemconfig(self.Bar, state = Tkinter.NORMAL)
self.Parent.Canvas.config(scrollregion = (0, 0, 0, self.Region))
self.UpdateBar()
def UpdateBar(self):
"""zeichnet die Scrollbar neu"""
Position = self.Parent.Canvas.yview()
Begin = self.Height * Position[0] + self.Offset()
End = self.Height * Position[1] + self.Offset()
self.Parent.Canvas.coords(self.Bar, self.X - 11, Begin, self.X - 3, End)
self.Parent.Canvas.tag_raise(self.Bar)
def ScrollBegin(self, e):
"""speichert die Position des Mausklicks beim Beginnen des Scrollens"""
if self.Scrollable:
self.DragHeight = float(e.y) / self.Height - self.Parent.Canvas.yview()[0]
def ScrollMotion(self, e):
"""zieht die neue Mausposition von der gepeicherten ab und
legt danach die Scrollrichtung und -weite fest"""
if self.Scrollable:
self.Parent.Canvas.yview_moveto(float(e.y) / self.Height - self.DragHeight)
self.UpdateBar()
def Show(self, Position):
"""scrollt zum Listenelement mit dem Index Position"""
if self.Scrollable:
self.Parent.Canvas.yview_moveto(Position / float(self.Region))
self.UpdateBar()
def Raise(self):
"""zeigt die Scrollbar im Vordergrund an"""
self.Parent.Canvas.tag_raise(self.Bar)
| mit | -3,531,107,996,626,620,400 | 40.831325 | 99 | 0.613767 | false |
globocom/database-as-a-service | dbaas/dashboard/views.py | 1 | 2317 | import logging
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from physical.models import DatabaseInfra
from logical.models import Database
from util import get_credentials_for
from dbaas_credentials.models import CredentialType
from physical.models import Environment
LOG = logging.getLogger(__name__)
@login_required
def dashboard(request):
env_id = request.GET.get('env_id')
engine_type = request.GET.get('engine_type')
dbinfra_list = DatabaseInfra.objects.all().order_by('name')
url_par = "?"
if env_id or engine_type:
if env_id:
url_par += "env_id=" + str(env_id) + "&"
dbinfra_list = dbinfra_list.filter(environment__id=env_id)
if engine_type:
url_par += "engine_type=" + str(engine_type) + "&"
dbinfra_list = dbinfra_list.filter(engine__engine_type__name=engine_type)
paginator = Paginator(dbinfra_list,100)
try:
page = int(request.GET.get('page','1'))
except:
page = 1
try:
dbinfra = paginator.page(page)
except(EmptyPage, InvalidPage):
dbinfra = paginator.page(paginator.num_pages)
return render_to_response("dashboard/dashboard.html", {'dbinfra': dbinfra, 'url_par': url_par}, context_instance=RequestContext(request))
@login_required
def databaseinfra(request, infra_id):
dbinfra = DatabaseInfra.objects.get(pk=infra_id)
databases = Database.objects.filter(databaseinfra=dbinfra)
return render_to_response("dashboard/databaseinfra.html", {'infra': dbinfra, 'databases': databases}, context_instance=RequestContext(request))
@login_required
def sofia_dashboard(request):
credential = get_credentials_for(
environment=Environment.objects.first(),
credential_type=CredentialType.GRAFANA
)
sofia_dashboard = "{}/{}?var-datasource={}".format(
credential.endpoint,
credential.get_parameter_by_name('sofia_dbaas_dashboard'),
credential.get_parameter_by_name('datasource')
)
return render_to_response("dashboard/sofia_dashboard.html", {'sofia_dashboard':sofia_dashboard}, context_instance=RequestContext(request))
| bsd-3-clause | -7,126,134,115,814,491,000 | 34.646154 | 147 | 0.702201 | false |
mpfeppat/mpfeppat | tools/simplegraph.py | 1 | 1860 | # -*- coding: utf-8 -*-
import random
import numpy
def createCGraph(n):
Edges = []
Vertices = []
counter = n
while counter > 0:
if Vertices == []:
v0 = vertex('0')
v1 = vertex('1')
e0 = edge(v0,v1)
Vertices.append(v0)
Vertices.append(v1)
Edges.append(e0)
else :
vs = random.choice(Vertices)
ve = random.choice(Vertices)
while ve == vertex('0') :
ve = random.choice(Vertices)
e = edge(vs,ve)
prob = random.randint(0,100)
if vs == ve or e in Edges or prob > 75 :
l = len(Vertices)
name = str(l)
nv = vertex(name)
ne = edge(vs,nv)
Vertices.append(nv)
Edges.append(ne)
else :
Edges.append(e)
counter = counter - 1
k = len(Vertices)
M = numpy.zeros((k,k),dtype = object)
for ed in Edges:
vs = int(ed.startingvertex.name)
ve = int(ed.endingvertex.name)
M[vs,ve] = 1
return Vertices, Edges, M
class vertex:
def __init__(self,name):
self.name = name
def __eq__(self,other):
return self.name == other.name
def __str__(self):
return self.name
def __repr__(self):
return self.name
class edge:
def __init__(self,startingvertex,endingvertex):
self.startingvertex = startingvertex
self.endingvertex = endingvertex
def __eq__(self,other):
return self.startingvertex == other.startingvertex and self.endingvertex == other.endingvertex
def __str__(self):
return self.startingvertex.name+'-->'+self.endingvertex.name
def __repr__(self):
return self.startingvertex.name+'-->'+self.endingvertex.name
| apache-2.0 | 7,364,874,894,004,989,000 | 25.571429 | 102 | 0.52043 | false |
anselmobd/fo2 | src/manutencao/migrations/0005_maquina.py | 1 | 1069 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2019-07-12 18:49
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('manutencao', '0004_fo2_man_unidade_tempo_loaddata'),
]
operations = [
migrations.CreateModel(
name='Maquina',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(db_index=True, max_length=50)),
('slug', models.SlugField()),
('descricao', models.CharField(max_length=250, verbose_name='Descrição')),
('tipo_maquina', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='manutencao.TipoMaquina', verbose_name='Tipo de máquina')),
],
options={
'verbose_name': 'Máquina',
'db_table': 'fo2_man_maquina',
},
),
]
| mit | 3,103,723,040,811,429,400 | 34.5 | 158 | 0.578404 | false |
cheral/orange3 | Orange/widgets/utils/concurrent.py | 2 | 23465 | """\
OWConcurent
===========
General helper functions and classes for Orange Canvas
concurrent programming
"""
import threading
import atexit
import logging
from contextlib import contextmanager
from AnyQt.QtCore import (
Qt, QObject, QMetaObject, QThreadPool, QThread, QRunnable,
QEventLoop, QCoreApplication, QEvent, Q_ARG
)
from AnyQt.QtCore import pyqtSignal as Signal, pyqtSlot as Slot
_log = logging.getLogger(__name__)
@contextmanager
def locked(mutex):
"""
A context manager for locking an instance of a QMutex.
"""
mutex.lock()
try:
yield
finally:
mutex.unlock()
class _TaskDepotThread(QThread):
"""
A special 'depot' thread used to transfer Task instance into threads
started by a QThreadPool.
"""
_lock = threading.Lock()
_instance = None
def __new__(cls):
if _TaskDepotThread._instance is not None:
raise RuntimeError("Already exists")
return QThread.__new__(cls)
def __init__(self):
QThread.__init__(self)
self.start()
# Need to handle queued method calls from this thread.
self.moveToThread(self)
atexit.register(self._cleanup)
def _cleanup(self):
self.quit()
self.wait()
@staticmethod
def instance():
with _TaskDepotThread._lock:
if _TaskDepotThread._instance is None:
_TaskDepotThread._instance = _TaskDepotThread()
return _TaskDepotThread._instance
@Slot(object, object)
def transfer(self, obj, thread):
"""
Transfer `obj` (:class:`QObject`) instance from this thread to the
target `thread` (a :class:`QThread`).
"""
assert obj.thread() is self
assert QThread.currentThread() is self
obj.moveToThread(thread)
def __del__(self):
self._cleanup()
class _TaskRunnable(QRunnable):
"""
A QRunnable for running a :class:`Task` by a :class:`ThreadExecuter`.
"""
def __init__(self, future, task, args, kwargs):
QRunnable.__init__(self)
self.future = future
self.task = task
self.args = args
self.kwargs = kwargs
self.eventLoop = None
def run(self):
"""
Reimplemented from `QRunnable.run`
"""
self.eventLoop = QEventLoop()
self.eventLoop.processEvents()
# Move the task to the current thread so it's events, signals, slots
# are triggered from this thread.
assert self.task.thread() is _TaskDepotThread.instance()
QMetaObject.invokeMethod(
self.task.thread(), "transfer", Qt.BlockingQueuedConnection,
Q_ARG(object, self.task),
Q_ARG(object, QThread.currentThread())
)
self.eventLoop.processEvents()
# Schedule task.run from the event loop.
self.task.start()
# Quit the loop and exit when task finishes or is cancelled.
self.task.finished.connect(self.eventLoop.quit)
self.task.cancelled.connect(self.eventLoop.quit)
self.eventLoop.exec_()
class _Runnable(QRunnable):
"""
A QRunnable for running plain functions by a :class:`ThreadExecuter`.
"""
def __init__(self, future, func, args, kwargs):
QRunnable.__init__(self)
self.future = future
self.func = func
self.args = args
self.kwargs = kwargs
def run(self):
"""
Reimplemented from QRunnable.run
"""
try:
if not self.future.set_running_or_notify_cancel():
# Was cancelled
return
try:
result = self.func(*self.args, **self.kwargs)
except BaseException as ex:
self.future.set_exception(ex)
else:
self.future.set_result(result)
except BaseException:
_log.critical("Exception in worker thread.", exc_info=True)
class ThreadExecutor(QObject):
"""
ThreadExceuter object class provides an interface for running tasks
in a thread pool.
:param QObject parent:
Executor's parent instance.
:param QThreadPool threadPool:
Thread pool to be used by the instance of the Executor. If `None`
then ``QThreadPool.globalInstance()`` will be used.
"""
def __init__(self, parent=None, threadPool=None):
QObject.__init__(self, parent)
if threadPool is None:
threadPool = QThreadPool.globalInstance()
self._threadPool = threadPool
self._depot_thread = None
self._futures = []
self._shutdown = False
self._state_lock = threading.Lock()
def _get_depot_thread(self):
if self._depot_thread is None:
self._depot_thread = _TaskDepotThread.instance()
return self._depot_thread
def submit(self, func, *args, **kwargs):
"""
Schedule the `func(*args, **kwargs)` to be executed and return an
:class:`Future` instance representing the result of the computation.
"""
with self._state_lock:
if self._shutdown:
raise RuntimeError("Cannot schedule new futures after " +
"shutdown.")
if isinstance(func, Task):
f, runnable = self.__make_task_runnable(func)
else:
f = Future()
runnable = _Runnable(f, func, args, kwargs)
self._futures.append(f)
f._watchers.append(self._future_state_change)
self._threadPool.start(runnable)
return f
def submit_task(self, task):
with self._state_lock:
if self._shutdown:
raise RuntimeError("Cannot schedule new futures after " +
"shutdown.")
f, runnable = self.__make_task_runnable(task)
self._futures.append(f)
f._watchers.append(self._future_state_change)
self._threadPool.start(runnable)
return f
def __make_task_runnable(self, task):
if task.thread() is not QThread.currentThread():
raise ValueError("Can only submit Tasks from it's own " +
"thread.")
if task.parent() is not None:
raise ValueError("Can not submit Tasks with a parent.")
task.moveToThread(self._get_depot_thread())
# Use the Task's own Future object
f = task.future()
runnable = _TaskRunnable(f, task, (), {})
return (f, runnable)
def map(self, func, *iterables):
futures = [self.submit(func, *args) for args in zip(*iterables)]
for f in futures:
yield f.result()
def shutdown(self, wait=True):
"""
Shutdown the executor and free all resources. If `wait` is True then
wait until all pending futures are executed or cancelled.
"""
with self._state_lock:
self._shutdown = True
if wait:
# Wait until all futures have completed.
for future in list(self._futures):
try:
future.exception()
except (TimeoutError, CancelledError):
pass
def _future_state_change(self, future, state):
# Remove futures when finished.
if state == Future.Finished:
self._futures.remove(future)
class ExecuteCallEvent(QEvent):
"""
Represents an function call from the event loop (used by :class:`Task`
to schedule the :func:`Task.run` method to be invoked)
"""
ExecuteCall = QEvent.registerEventType()
def __init__(self):
QEvent.__init__(self, ExecuteCallEvent.ExecuteCall)
class Task(QObject):
"""
"""
started = Signal()
finished = Signal()
cancelled = Signal()
resultReady = Signal(object)
exceptionReady = Signal(Exception)
def __init__(self, parent=None, function=None):
QObject.__init__(self, parent)
self.function = function
self._future = Future()
def run(self):
if self.function is None:
raise NotImplementedError
else:
return self.function()
def start(self):
QCoreApplication.postEvent(self, ExecuteCallEvent())
def future(self):
return self._future
def result(self, timeout=None):
return self._future.result(timeout)
def _execute(self):
try:
if not self._future.set_running_or_notify_cancel():
self.cancelled.emit()
return
self.started.emit()
try:
result = self.run()
except BaseException as ex:
self._future.set_exception(ex)
self.exceptionReady.emit(ex)
else:
self._future.set_result(result)
self.resultReady.emit(result)
self.finished.emit()
except BaseException:
_log.critical("Exception in Task", exc_info=True)
def customEvent(self, event):
if event.type() == ExecuteCallEvent.ExecuteCall:
self._execute()
else:
QObject.customEvent(self, event)
def futures_iter(futures):
for f in futures:
yield f.result()
class TimeoutError(Exception):
pass
class CancelledError(Exception):
pass
class Future(object):
"""
Represents a result of an asynchronous computation.
"""
Pending, Canceled, Running, Finished = 1, 2, 4, 8
def __init__(self):
self._watchers = []
self._state = Future.Pending
self._condition = threading.Condition()
self._result = None
self._exception = None
self._done_callbacks = []
def _set_state(self, state):
if self._state != state:
self._state = state
for watcher in self._watchers:
watcher(self, state)
def cancel(self):
"""
Attempt to cancel the the call. Return `False` if the call is
already in progress and cannot be canceled, otherwise return `True`.
"""
with self._condition:
if self._state in [Future.Running, Future.Finished]:
return False
elif self._state == Future.Canceled:
return True
else:
self._set_state(Future.Canceled)
self._condition.notify_all()
self._invoke_callbacks()
return True
def cancelled(self):
"""
Return `True` if call was successfully cancelled.
"""
with self._condition:
return self._state == Future.Canceled
def done(self):
"""
Return `True` if the call was successfully cancelled or finished
running.
"""
with self._condition:
return self._state in [Future.Canceled, Future.Finished]
def running(self):
"""
Return True if the call is currently being executed.
"""
with self._condition:
return self._state == Future.Running
def _get_result(self):
if self._exception:
raise self._exception
else:
return self._result
def result(self, timeout=None):
"""
Return the result of the :class:`Futures` computation. If `timeout`
is `None` the call will block until either the computation finished
or is cancelled.
"""
with self._condition:
if self._state == Future.Finished:
return self._get_result()
elif self._state == Future.Canceled:
raise CancelledError()
self._condition.wait(timeout)
if self._state == Future.Finished:
return self._get_result()
elif self._state == Future.Canceled:
raise CancelledError()
else:
raise TimeoutError()
def exception(self, timeout=None):
"""
Return the exception instance (if any) resulting from the execution
of the :class:`Future`. Can raise a :class:`CancelledError` if the
computation was cancelled.
"""
with self._condition:
if self._state == Future.Finished:
return self._exception
elif self._state == Future.Canceled:
raise CancelledError()
self._condition.wait(timeout)
if self._state == Future.Finished:
return self._exception
elif self._state == Future.Canceled:
raise CancelledError()
else:
raise TimeoutError()
def set_result(self, result):
"""
Set the result of the computation (called by the worker thread).
"""
with self._condition:
self._result = result
self._set_state(Future.Finished)
self._condition.notify_all()
self._invoke_callbacks()
def set_exception(self, exception):
"""
Set the exception instance that was raised by the computation
(called by the worker thread).
"""
with self._condition:
self._exception = exception
self._set_state(Future.Finished)
self._condition.notify_all()
self._invoke_callbacks()
def add_done_callback(self, fn):
with self._condition:
if self._state not in [Future.Finished, Future.Canceled]:
self._done_callbacks.append(fn)
return
# Already done
fn(self)
def set_running_or_notify_cancel(self):
with self._condition:
if self._state == Future.Canceled:
return False
elif self._state == Future.Pending:
self._set_state(Future.Running)
return True
else:
raise Exception()
def _invoke_callbacks(self):
for callback in self._done_callbacks:
try:
callback(self)
except Exception:
pass
class StateChangedEvent(QEvent):
"""
Represents a change in the internal state of a :class:`Future`.
"""
StateChanged = QEvent.registerEventType()
def __init__(self, state):
QEvent.__init__(self, StateChangedEvent.StateChanged)
self._state = state
def state(self):
"""
Return the new state (Future.Pending, Future.Cancelled, ...).
"""
return self._state
class FutureWatcher(QObject):
"""
A `FutureWatcher` class provides a convenient interface to the
:class:`Future` instance using Qt's signals.
:param :class:`Future` future:
A :class:`Future` instance to watch.
:param :class:`QObject` parent:
Object's parent instance.
"""
#: The future was cancelled.
cancelled = Signal()
#: The future has finished.
finished = Signal()
#: The future has started computation.
started = Signal()
def __init__(self, future, parent=None):
QObject.__init__(self, parent)
self._future = future
self._future._watchers.append(self._stateChanged)
def isCancelled(self):
"""
Was the future cancelled.
"""
return self._future.cancelled()
def isDone(self):
"""
Is the future done (was cancelled or has finished).
"""
return self._future.done()
def isRunning(self):
"""
Is the future running (i.e. has started).
"""
return self._future.running()
def isStarted(self):
"""
Has the future computation started.
"""
return self._future.running()
def result(self):
"""
Return the result of the computation.
"""
return self._future.result()
def exception(self):
"""
Return the exception instance or `None` if no exception was raised.
"""
return self._future.exception()
def customEvent(self, event):
"""
Reimplemented from `QObject.customEvent`.
"""
if event.type() == StateChangedEvent.StateChanged:
if event.state() == Future.Canceled:
self.cancelled.emit()
elif event.state() == Future.Running:
self.started.emit()
elif event.state() == Future.Finished:
self.finished.emit()
return
return QObject.customEvent(self, event)
def _stateChanged(self, future, state):
"""
The `future` state has changed (called by :class:`Future`).
"""
ev = StateChangedEvent(state)
if self.thread() is QThread.currentThread():
QCoreApplication.sendEvent(self, ev)
else:
QCoreApplication.postEvent(self, ev)
class methodinvoke(object):
"""
Create an QObject method wrapper that invokes the method asynchronously
in the object's own thread.
:param obj:
A QObject instance.
:param str method:
The method name.
:param tuple arg_types:
A tuple of positional argument types.
"""
def __init__(self, obj, method, arg_types=()):
self.obj = obj
self.method = method
self.arg_types = tuple(arg_types)
def __call__(self, *args):
args = [Q_ARG(atype, arg) for atype, arg in zip(self.arg_types, args)]
QMetaObject.invokeMethod(
self.obj, self.method, Qt.QueuedConnection,
*args
)
import unittest
class TestFutures(unittest.TestCase):
def test_futures(self):
f = Future()
self.assertEqual(f.done(), False)
self.assertEqual(f.running(), False)
self.assertTrue(f.cancel())
self.assertTrue(f.cancelled())
with self.assertRaises(CancelledError):
f.result()
with self.assertRaises(CancelledError):
f.exception()
f = Future()
f.set_running_or_notify_cancel()
with self.assertRaises(TimeoutError):
f.result(0.1)
with self.assertRaises(TimeoutError):
f.exception(0.1)
f = Future()
f.set_running_or_notify_cancel()
f.set_result("result")
self.assertEqual(f.result(), "result")
self.assertEqual(f.exception(), None)
f = Future()
f.set_running_or_notify_cancel()
f.set_exception(Exception("foo"))
with self.assertRaises(Exception):
f.result()
class Ref():
def __init__(self, ref):
self.ref = ref
def set(self, ref):
self.ref = ref
# Test that done callbacks are called.
called = Ref(False)
f = Future()
f.add_done_callback(lambda f: called.set(True))
f.set_result(None)
self.assertTrue(called.ref)
# Test that callbacks are called when cancelled.
called = Ref(False)
f = Future()
f.add_done_callback(lambda f: called.set(True))
f.cancel()
self.assertTrue(called.ref)
# Test that callbacks are called immediately when the future is
# already done.
called = Ref(False)
f = Future()
f.set_result(None)
f.add_done_callback(lambda f: called.set(True))
self.assertTrue(called.ref)
count = Ref(0)
f = Future()
f.add_done_callback(lambda f: count.set(count.ref + 1))
f.add_done_callback(lambda f: count.set(count.ref + 1))
f.set_result(None)
self.assertEqual(count.ref, 2)
# Test that the callbacks are called with the future as argument.
done_future = Ref(None)
f = Future()
f.add_done_callback(lambda f: done_future.set(f))
f.set_result(None)
self.assertIs(f, done_future.ref)
class TestExecutor(unittest.TestCase):
def setUp(self):
self.app = QCoreApplication([])
def test_executor(self):
executor = ThreadExecutor()
f1 = executor.submit(pow, 100, 100)
f2 = executor.submit(lambda: 1 / 0)
f3 = executor.submit(QThread.currentThread)
self.assertTrue(f1.result(), pow(100, 100))
with self.assertRaises(ZeroDivisionError):
f2.result()
self.assertIsInstance(f2.exception(), ZeroDivisionError)
self.assertIsNot(f3.result(), QThread.currentThread())
def test_methodinvoke(self):
executor = ThreadExecutor()
state = [None, None]
class StateSetter(QObject):
@Slot(object)
def set_state(self, value):
state[0] = value
state[1] = QThread.currentThread()
def func(callback):
callback(QThread.currentThread())
obj = StateSetter()
f1 = executor.submit(func, methodinvoke(obj, "set_state", (object,)))
f1.result()
# So invoked method can be called
QCoreApplication.processEvents()
self.assertIs(state[1], QThread.currentThread(),
"set_state was called from the wrong thread")
self.assertIsNot(state[0], QThread.currentThread(),
"set_state was invoked in the main thread")
executor.shutdown(wait=True)
def test_executor_map(self):
executor = ThreadExecutor()
r = executor.map(pow, list(range(1000)), list(range(1000)))
results = list(r)
self.assertTrue(len(results) == 1000)
class TestFutureWatcher(unittest.TestCase):
def setUp(self):
self.app = QCoreApplication([])
def test_watcher(self):
executor = ThreadExecutor()
f = executor.submit(QThread.currentThread)
watcher = FutureWatcher(f)
if f.cancel():
self.assertTrue(watcher.isCancelled())
executor.shutdown()
class TestTask(unittest.TestCase):
def setUp(self):
self.app = QCoreApplication([])
def test_task(self):
results = []
task = Task(function=QThread.currentThread)
task.resultReady.connect(results.append)
task.start()
self.app.processEvents()
self.assertSequenceEqual(results, [QThread.currentThread()])
results = []
thread = QThread()
thread.start()
task = Task(function=QThread.currentThread)
task.moveToThread(thread)
self.assertIsNot(task.thread(), QThread.currentThread())
self.assertIs(task.thread(), thread)
task.resultReady.connect(results.append, Qt.DirectConnection)
task.start()
f = task.future()
self.assertIsNot(f.result(3), QThread.currentThread())
self.assertIs(f.result(3), results[-1])
def test_executor(self):
executor = ThreadExecutor()
f = executor.submit(QThread.currentThread)
self.assertIsNot(f.result(3), QThread.currentThread())
f = executor.submit(lambda: 1 / 0)
with self.assertRaises(ZeroDivisionError):
f.result()
results = []
task = Task(function=QThread.currentThread)
task.resultReady.connect(results.append, Qt.DirectConnection)
f = executor.submit(task)
self.assertIsNot(f.result(3), QThread.currentThread())
executor.shutdown()
| bsd-2-clause | 1,586,268,903,746,335,700 | 26.444444 | 78 | 0.572555 | false |
vrkrishn/FBHacks | src/Common/FacebookVideo.py | 1 | 2094 | class FacebookVideo(object):
# [FBProperty: length]
def getLength(self):
return self.videoLength
# [FBProperty: place]
def getPlace(self):
return self.place
# [FBProperty: source]
def getSource(self):
return self.source
# [FBProperty: title]
def getTitle(self):
return self.title
# [FBProperty: content_category]
# BEAUTY_FASHION, BUSINESS, CARS_TRUCKS, COMEDY, CUTE_ANIMALS, ENTERTAINMENT, FAMILY, FOOD_HEALTH, HOME, LIFESTYLE, MUSIC, NEWS, POLITICS, SCIENCE, SPORTS, TECHNOLOGY, VIDEO_GAMING, OTHER
def getContentCategory(self):
return self.contentCategory
# [FBProperty: universal_video_id]
def getUniversalVideoId(self):
return self.universalVideoId
# [FBProperty: live_status]
def getLiveStatus(self):
return self.liveStatus
# [FBProperty: created_time]
def getCreatedTime(self):
return self.createdTime
# [FBProperty: description]
def getDescription(self):
return self.description
# [FBProperty: id]
def getId(self):
return self.id
# Edge Methods
# [FBProperty: auto_generated_captions]
def getAutoGeneratedCaptions(self):
return self.autoGeneratedCaptions
# [FBProperty: captions]
def getCaptions(self):
return self.captions
# [FBProperty: comments]
def getComments(self):
return self.comments
# [FBProperty: likes]
def getLikes(self):
return self.likes
# [FBProperty: reactions]
def getReactions(self):
return self.reactions
# [FBProperty: sponsor_tags]
def getSponsorTags(self):
return self.sponsorTags
# [FBProperty: tags]
def getTags(self):
return self.tags
# [FBProperty: tags]
def getInsights(self):
return self.tags
# [FBProperty: video_insights]
def getVideoInsights(self):
return self.tags
| mit | 2,399,925,756,954,293,000 | 24.864198 | 191 | 0.602197 | false |
RedHatQE/cfme_tests | cfme/tests/containers/test_reports.py | 1 | 15145 | # -*- coding: utf-8 -*-
import re
from traceback import format_exc
import pytest
from wrapanapi.utils import eval_strings
from cfme.containers.provider import ContainersProvider
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.wait import TimedOutError
pytestmark = [
pytest.mark.usefixtures('setup_provider'),
pytest.mark.meta(
server_roles='+ems_metrics_coordinator +ems_metrics_collector +ems_metrics_processor'),
pytest.mark.tier(1),
pytest.mark.long_running_env,
pytest.mark.provider([ContainersProvider], scope='function')
]
@pytest.fixture(scope='module')
def node_hardwares_db_data(appliance):
"""Grabbing hardwares table data for nodes"""
db = appliance.db.client
hardwares_table = db['hardwares']
container_nodes = db['container_nodes']
out = {}
for node in db.session.query(container_nodes).all():
out[node.name] = hardwares_table.__table__.select().where(
hardwares_table.id == node.id
).execute().fetchone()
return out
def get_vpor_data_by_name(vporizer_, name):
return [vals for vals in vporizer_ if vals.resource_name == name]
def get_report(appliance, menu_name, candu=False):
"""Queue a report by menu name , wait for finish and return it"""
try:
saved_report = appliance.collections.reports.instantiate(
type='Configuration Management',
subtype='Containers',
menu_name=menu_name,
is_candu=candu
).queue(wait_for_finish=True)
except TimedOutError:
pytest.skip('Could not find report "{}" in containers.\nTraceback:\n{}'
.format(menu_name, format_exc()))
return saved_report
def test_container_reports_base_on_options(soft_assert, appliance):
"""This test verifies that all containers options are available in the report 'based on'
Dropdown in the report creation
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
view = navigate_to(appliance.collections.reports, 'Add')
for base_on in (
'Chargeback for Images',
'Container Images',
'Container Services',
'Container Templates',
'Containers',
re.compile(r'Performance - Container\s*Nodes'),
re.compile(r'Performance - Container\s*Projects'),
'Performance - Containers'
):
compare = (base_on.match if hasattr(base_on, 'match') else base_on.__eq__)
option = [opt for opt in view.base_report_on.all_options
if compare(str(opt.text))]
soft_assert(option, 'Could not find option "{}" for base report on.'.format(base_on))
def test_report_pods_per_ready_status(appliance, soft_assert, provider):
"""Testing 'Pods per Ready Status' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
pods_per_ready_status = provider.pods_per_ready_status()
report = get_report(appliance, 'Pods per Ready Status')
for row in report.data.rows:
name = row['# Pods per Ready Status']
readiness_ui = bool(eval_strings([row['Ready Condition Status']]).pop())
if soft_assert(name in pods_per_ready_status, # this check based on BZ#1435958
'Could not find pod "{}" in openshift.'
.format(name)):
expected_readiness = bool(all(pod for pod in pods_per_ready_status.get(name, False)))
soft_assert(expected_readiness == readiness_ui,
'For pod "{}" expected readiness is "{}" Found "{}"'
.format(name, expected_readiness, readiness_ui))
def test_report_nodes_by_capacity(appliance, soft_assert, node_hardwares_db_data):
"""Testing 'Nodes By Capacity' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Nodes By Capacity')
for row in report.data.rows:
hw = node_hardwares_db_data[row['Name']]
soft_assert(hw.cpu_total_cores == int(row['CPU Cores']),
'Number of CPU cores is wrong: expected {}'
' got {}'.format(hw.cpu_total_cores, row['CPU Cores']))
# The following block is to convert whatever we have to MB
memory_ui = float(re.sub(r'[a-zA-Z,]', '', row['Memory']))
if 'gb' in row['Memory'].lower():
memory_mb_ui = memory_ui * 1024
# Shift hw.memory_mb to GB, round to the number of decimals of memory_mb_db
# and shift back to MB:
memory_mb_db = round(hw.memory_mb / 1024.0,
len(str(memory_mb_ui).split('.')[1])) * 1024
else: # Assume it's MB
memory_mb_ui = memory_ui
memory_mb_db = hw.memory_mb
soft_assert(memory_mb_ui == memory_mb_db,
'Memory (MB) is wrong for node "{}": expected {} got {}'
.format(row['Name'], memory_mb_ui, memory_mb_db))
def test_report_nodes_by_cpu_usage(appliance, soft_assert, vporizer):
"""Testing 'Nodes By CPU Usage' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Nodes By CPU Usage')
for row in report.data.rows:
vpor_values = get_vpor_data_by_name(vporizer, row["Name"])[0]
usage_db = round(vpor_values.max_cpu_usage_rate_average, 2)
usage_report = round(float(row['CPU Usage (%)']), 2)
soft_assert(usage_db == usage_report,
'CPU usage is wrong for node "{}": expected {} got {}'
.format(row['Name'], usage_db, usage_report))
def test_report_nodes_by_memory_usage(appliance, soft_assert, vporizer):
"""Testing 'Nodes By Memory Usage' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Nodes By Memory Usage')
for row in report.data.rows:
vpor_values = get_vpor_data_by_name(vporizer, row["Name"])[0]
usage_db = round(vpor_values.max_mem_usage_absolute_average, 2)
usage_report = round(float(row['Memory Usage (%)']), 2)
soft_assert(usage_db == usage_report,
'CPU usage is wrong for node "{}": expected {} got {}.'
.format(row['Name'], usage_db, usage_report))
def test_report_number_of_nodes_per_cpu_cores(appliance, soft_assert, node_hardwares_db_data):
"""Testing 'Number of Nodes per CPU Cores' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Nodes by Number of CPU Cores')
for row in report.data.rows:
hw = node_hardwares_db_data[row['Name']]
soft_assert(hw.cpu_total_cores == int(row['Hardware Number of CPU Cores']),
'Hardware Number of CPU Cores is wrong for node "{}": expected {} got {}.'
.format(row['Name'], hw.cpu_total_cores, row['Hardware Number of CPU Cores']))
def test_report_projects_by_number_of_pods(appliance, soft_assert):
"""Testing 'Projects by Number of Pods' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
container_projects = appliance.db.client['container_projects']
container_pods = appliance.db.client['container_groups']
report = get_report(appliance, 'Projects by Number of Pods')
for row in report.data.rows:
pods_count = len(container_pods.__table__.select().where(
container_pods.container_project_id ==
container_projects.__table__.select().where(
container_projects.name == row['Project Name']).execute().fetchone().id
).execute().fetchall())
soft_assert(pods_count == int(row['Number of Pods']),
'Number of pods is wrong for project "{}". expected {} got {}.'
.format(row['Project Name'], pods_count, row['Number of Pods']))
def test_report_projects_by_cpu_usage(appliance, soft_assert, vporizer):
"""Testing 'Projects By CPU Usage' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Projects By CPU Usage')
for row in report.data.rows:
vpor_values = get_vpor_data_by_name(vporizer, row["Name"])[0]
usage_db = round(vpor_values.max_cpu_usage_rate_average, 2)
usage_report = round(float(row['CPU Usage (%)']), 2)
soft_assert(usage_db == usage_report,
'CPU usage is wrong for project "{}": expected {} got {}'
.format(row['Name'], usage_db, usage_report))
def test_report_projects_by_memory_usage(appliance, soft_assert, vporizer):
"""Testing 'Projects By Memory Usage' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Projects By Memory Usage')
for row in report.data.rows:
vpor_values = get_vpor_data_by_name(vporizer, row["Name"])[0]
usage_db = round(vpor_values.max_mem_usage_absolute_average, 2)
usage_report = round(float(row['Memory Usage (%)']), 2)
soft_assert(usage_db == usage_report,
'CPU usage is wrong for project "{}": expected {} got {}.'
.format(row['Name'], usage_db, usage_report))
def test_report_pod_counts_for_container_images_by_project(appliance, provider, soft_assert):
"""Testing 'Pod counts For Container Images by Project' report,\
see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Pod counts For Container Images by Project', candu=True)
pods_api = provider.mgmt.list_pods()
pods_per_project = {}
for project in provider.mgmt.list_project_names():
pods_per_project[project] = [
pd for pd in pods_api if pd.metadata.namespace == project]
rows = list(report.data.rows)
for row in rows:
project_name, pod_name = row['Project Name'], row['Pod Name']
pod = filter(lambda pd: pd.metadata.name == pod_name,
pods_per_project[project_name])
soft_assert(pod, 'Could not find pod "{}" of project "{}" in the report.'
.format(pod_name, project_name))
pod = pod.pop()
for pd in pods_per_project[project_name]:
expected_image = pd.spec.containers[0].image
pod_images = [r['Image Name'] for r in rows if r['Pod Name'] == pod_name]
# Use 'in' since the image name in the API may include also registry and tag
soft_assert(filter(lambda img_nm: img_nm in expected_image, pod_images),
'Could not find image "{}" in pod "{}". Pod images in report: {}'
.format(expected_image, pod_name, pod_images))
def test_report_recently_discovered_pods(appliance, provider, soft_assert):
"""Testing 'Recently Discovered Pods' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Recently Discovered Pods')
pods_in_report = [row['Name'] for row in report.data.rows]
pods_per_ready_status = provider.pods_per_ready_status()
for pod in pods_per_ready_status.keys():
soft_assert(pod in pods_in_report,
'Could not find pod "{}" in report.'.format(pod))
def test_report_number_of_images_per_node(appliance, provider, soft_assert):
"""Testing 'Number of Images per Node' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
pods_api = provider.mgmt.list_pods()
report = get_report(appliance, 'Number of Images per Node', candu=True)
report_data = list(report.data.rows)
for pod in pods_api:
expected_image = pod.spec.containers[0].image
node = pod.spec.node_name
pod_name = pod.metadata.name
pod_images = [row['Image Name'] for row in report_data
if row['Pod Name'] == pod_name and
row['Node Name'] == node]
# Use 'in' since the image name in the API may include also registry and tag
is_image = filter(lambda img_nm: img_nm in expected_image, pod_images)
soft_assert(is_image,
'Expected image for pod "{0}" in node {1} is "{2}". found images: {3}'
.format(pod_name, node, expected_image, pod_images))
def test_report_projects_by_number_of_containers(appliance, provider, soft_assert):
"""Testing 'Projects by Number of Containers' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Projects by Number of Containers')
pods_api = provider.mgmt.list_pods()
# Since there is no provider column, in case of more than 1 provider we get some projects
# multiple times in the report. Because of that for each project name we are collecting
# all the 'Containers Count' columns and then checking that the containers count that we
# fetched from the API is found _in_ the counts under this project name
projects_containers_count = {}
for row in report.data.rows:
if row['Project Name'] not in projects_containers_count:
projects_containers_count[row['Project Name']] = []
projects_containers_count[row['Project Name']].append(int(row['Containers Count']))
for project_name, containers_counts in projects_containers_count.items():
containers_counts_api = sum(
[len(pod.spec.containers) for pod in pods_api
if pod.metadata.namespace == project_name]
)
soft_assert(containers_counts_api in containers_counts,
'Expected containers count for project {} should be {}. Found {} instead.'
.format(project_name, containers_counts_api, containers_counts_api))
| gpl-2.0 | -4,475,754,717,884,814,300 | 38.440104 | 98 | 0.624497 | false |
glehmann/uptodate | uptodate/plugins/copy.py | 1 | 1817 | #!/usr/bin/env python
#coding: iso-8859-15
#
# Copyright (C) 2005 Gaëtan Lehmann <[email protected]>
#
# this file is part of uptodate
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
from uptodate import *
usage = _("uptodate [options] copy name name")
summary = _("Copy a module")
description = _("""Copy is used in order to copy a module.
Example:
uptodate copy itk-app InsightToolkit-Applications""")
names = ['copy', 'cp']
options = []
def runCommand(opts, args, conf, out) :
if len(args) != 2 :
raise InvalidNbOfArgsException(usage)
module, new = args
if module not in conf.sections() :
raise ModuleNotFoundException(module)
if not opts.force and conf.has_section(new) :
if opts.batch or not yes(_("Do you want to remove the module %s?") % new, False) :
raise ModuleExistsException(new)
else :
opts.force = True
# remove new section if it already exist and --force is used
if opts.force and conf.has_section(new) :
conf.remove_section(new)
conf.add_section(new)
for prop in conf.options(module) :
conf.set(new, prop, conf.get(module, prop))
if opts.verbose :
printModule(conf, new, sys.stderr, True)
| gpl-2.0 | 1,046,055,466,410,766,600 | 29.283333 | 84 | 0.72262 | false |
leihaha/Django-By-Example_demo | bookmarks/images/forms.py | 1 | 1308 | from django import forms
from .models import Image
from urllib import request
from django.core.files.base import ContentFile
from django.utils.text import slugify
class ImageCreateForm(forms.ModelForm):
class Meta:
model = Image
fields = ('title', 'url', 'description')
widgets = {
'url': forms.HiddenInput,
}
def clean_url(self):
url = self.cleaned_data['url']
valid_extensions = ['jpg', 'jpeg']
extension = url.rsplit('.', 1)[1].lower()
if extension not in valid_extensions:
raise forms.ValidationError('The given URL does not match valid image extensions.')
return url
def save(self, force_insert=False,
force_update=False,
commit=True):
image = super(ImageCreateForm, self).save(commit=False)
image_url = self.cleaned_data['url']
image_name = '{}.{}'.format(slugify(image.title),
image_url.rsplit('.', 1)[1].lower())
# 从给定的URL中下载图片
response = request.urlopen(image_url)
image.image.save(image_name,
ContentFile(response.read()),
save=False)
if commit:
image.save()
return image | gpl-2.0 | 4,444,864,217,305,913,000 | 31.948718 | 95 | 0.570872 | false |
rbarrois/xelpaste | libpaste/conf.py | 1 | 1199 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""Default settings for libpaste."""
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
import appconf
from . import enums
class LibPasteConf(appconf.AppConf):
class Meta:
prefix = 'libpaste'
BASE_URL = 'https://example.org'
SITENAME = 'example.org'
# Expiry
EXPIRE_CHOICES = (
(enums.EXPIRE_ONETIME, _(u'One Time Snippet')),
(enums.EXPIRE_ONE_HOUR, _(u'In one hour')),
(enums.EXPIRE_ONE_WEEK, _(u'In one week')),
(enums.EXPIRE_ONE_MONTH, _(u'In one month')),
# ('never', _(u'Never')),
)
EXPIRE_DEFAULT = enums.EXPIRE_ONE_MONTH
# Lexer
LEXER_DEFAULT = 'python'
LEXER_LIST = enums.DEFAULT_LEXER_LIST
LEXER_WORDWRAP = ('freetext', 'text', 'rst')
# Snippets
SLUG_LENGTH = 4
SLUG_CHOICES = 'abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNOPQRSTUVWXYZ1234567890'
MAX_CONTENT_LENGTH = 250 * 1024 * 1024
BADWORD_TRIGGERS = {
'http': 5,
}
MAX_FILE_LENGTH = 10 * 1024 * 1024 # 10MB
UPLOAD_TO = 'snippets'
# Users
MAX_SNIPPETS_PER_USER = 15
ONETIME_LIMIT = 2
| mit | -8,430,553,452,395,966,000 | 23.469388 | 81 | 0.619683 | false |
tarunbod/dotfiles | scripts/todo.py | 1 | 1774 | #!/usr/bin/env python3
import os.path, sys, json
from collections import OrderedDict
todo_list = OrderedDict()
file_path = os.path.expanduser('~/.todo_list.json')
if os.path.isfile(file_path):
with open(file_path, 'r') as todo_list_file:
todo_list.update(json.load(todo_list_file))
args = sys.argv[1:]
def usage():
usage = """Usage:
todo.py add <task>
todo.py list
todo.py del <task number>
todo.py done <task number>"""
print(usage)
sys.exit(0)
if len(args) < 1:
args = ["list"]
task_count = len(todo_list)
if args[0] == "add":
if len(args) != 2:
usage()
name = args[1]
todo_list[str(task_count + 1)] = {
"name": name,
"completed": False
}
print("Added " + args[1] + " to todo list")
elif args[0] == "list":
if task_count == 0:
print("Woohoo, nothing to do!")
else:
for i in range(1, task_count + 1):
task = todo_list[str(i)]
print("%d) %s (%s)" % (i, task["name"], "✔" if task["completed"] else "╳"))
elif args[0] == "del":
if len(args) != 2:
usage()
idx = args[1]
if idx in todo_list:
del todo_list[idx]
keys = sorted(todo_list)
for i in range(0, task_count - 1):
key = keys[i]
todo_list[str(i + 1)] = todo_list[key]
if int(key) >= task_count:
del todo_list[key]
else:
print("Task #%s does not exist" % idx)
elif args[0] == "done":
if len(args) != 2:
usage()
idx = args[1]
if idx in todo_list:
todo_list[idx]["completed"] = True
else:
print("Task #%s does not exist" % idx)
else:
usage()
with open(file_path, 'w') as todo_list_file:
json.dump(todo_list, todo_list_file)
| mit | 8,956,292,231,730,313,000 | 22.918919 | 88 | 0.537853 | false |
att-comdev/armada | armada/exceptions/tiller_exceptions.py | 1 | 5060 | # Copyright 2017 The Armada Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from armada.exceptions.base_exception import ArmadaBaseException as ex
class TillerException(ex):
'''Base class for Tiller exceptions and error handling.'''
message = 'An unknown Tiller error occurred.'
class TillerServicesUnavailableException(TillerException):
'''
Exception for tiller service being unavailable.
**Troubleshoot:**
*Coming Soon*
'''
message = 'Tiller services unavailable.'
class ChartCleanupException(TillerException):
'''Exception that occurs during chart cleanup.'''
def __init__(self, chart_name):
message = 'An error occurred during cleanup while removing {}'.format(
chart_name)
super(ChartCleanupException, self).__init__(message)
class ListChartsException(TillerException):
'''Exception that occurs when listing charts'''
message = 'There was an error listing the Helm chart releases.'
class PostUpdateJobDeleteException(TillerException):
'''Exception that occurs when a job deletion'''
def __init__(self, name, namespace):
message = 'Failed to delete k8s job {} in {}'.format(
name, namespace)
super(PostUpdateJobDeleteException, self).__init__(message)
class PostUpdateJobCreateException(TillerException):
'''
Exception that occurs when a job creation fails.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, name, namespace):
message = 'Failed to create k8s job {} in {}'.format(
name, namespace)
super(PostUpdateJobCreateException, self).__init__(message)
class PreUpdateJobDeleteException(TillerException):
'''
Exception that occurs when a job deletion.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, name, namespace):
message = 'Failed to delete k8s job {} in {}'.format(
name, namespace)
super(PreUpdateJobDeleteException, self).__init__(message)
class PreUpdateJobCreateException(TillerException):
'''Exception that occurs when a job creation fails.'''
def __init__(self, name, namespace):
message = 'Failed to create k8s job {} in {}'.format(
name, namespace)
super(PreUpdateJobCreateException, self).__init__(message)
class ReleaseException(TillerException):
'''
Exception that occurs when a release fails to install, upgrade, delete,
or test.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, name, status, action):
til_msg = getattr(status.info, 'Description').encode()
message = 'Failed to {} release: {} - Tiller Message: {}'.format(
action, name, til_msg)
super(ReleaseException, self).__init__(message)
class ChannelException(TillerException):
'''
Exception that occurs during a failed gRPC channel creation
**Troubleshoot:**
*Coming Soon*
'''
message = 'Failed to create gRPC channel.'
class GetReleaseStatusException(TillerException):
'''
Exception that occurs during a failed Release Testing.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, release, version):
message = 'Failed to get {} status {} version'.format(
release, version)
super(GetReleaseStatusException, self).__init__(message)
class GetReleaseContentException(TillerException):
'''Exception that occurs during a failed Release Testing'''
def __init__(self, release, version):
message = 'Failed to get {} content {} version {}'.format(
release, version)
super(GetReleaseContentException, self).__init__(message)
class TillerPodNotFoundException(TillerException):
'''
Exception that occurs when a tiller pod cannot be found using the labels
specified in the Armada config.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, labels):
message = 'Could not find Tiller pod with labels "{}"'.format(labels)
super(TillerPodNotFoundException, self).__init__(message)
class TillerPodNotRunningException(TillerException):
'''
Exception that occurs when no tiller pod is found in a running state.
**Troubleshoot:**
*Coming Soon*
'''
message = 'No Tiller pods found in running state'
class TillerVersionException(TillerException):
'''
Exception that occurs during a failed Release Testing
**Troubleshoot:**
*Coming Soon*
'''
message = 'Failed to get Tiller Version'
| apache-2.0 | -5,653,126,201,895,981,000 | 25.492147 | 78 | 0.669763 | false |
caogecym/muer | muer/settings_stage.py | 1 | 6764 | # Django settings for forum project.
import os.path
import forum
DEBUG = False
TEMPLATE_DEBUG = False
# for OpenID auth
ugettext = lambda s: s
LOGIN_URL = '/%s' % (ugettext('login/'))
ADMINS = (
('Yuming Cao', '[email protected]'),
)
SEND_BROKEN_LINK_EMAILS = True
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'muer_db',
'USER': 'caogecym',
'PASSWORD': '',
'HOST': 'muer-stage.herokuapp.com',
'PORT': '5432',
'OPTIONS': {
'autocommit': True,
}
},
}
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES['default'] = dj_database_url.config()
DATABASES['default']['OPTIONS'] = {
'autocommit': True,
}
# Registration regulation
MIN_USERNAME_LENGTH = 4
EMAIL_UNIQUE = True
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = '[email protected]'
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'zh'
PROJECT_PATH = os.path.abspath(os.path.dirname(__name__))
LOCALE_PATHS = (
'%s/locale' % PROJECT_PATH,
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# /home/my_site/forum
FORUM_ROOT = os.path.abspath(forum.__path__[0])
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(PROJECT_PATH, 'public'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# AMAZON S3 config
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_STORAGE_BUCKET_NAME = 'muer-stage'
# fix manage.py collectstatic command to only upload changed files instead of all files
AWS_PRELOAD_METADATA = True
STATIC_URL = 'https://muer-stage.s3.amazonaws.com/'
ADMIN_MEDIA_PREFIX = 'https://muer-stage.s3.amazonaws.com/static/admin/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = os.environ['SECRET_KEY']
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'muer.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'muer.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.dirname(__file__), 'templates').replace('\\', '/'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'forum',
'south',
'storages',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(asctime)s %(module)s %(message)s'
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'muer': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
},
}
}
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
| gpl-2.0 | -5,950,150,926,862,549,000 | 29.062222 | 95 | 0.672235 | false |
jcu-eresearch/TDH-rich-data-capture | jcudc24provisioning/controllers/method_schema_scripts.py | 1 | 7697 | """
Converts MethodSchema's (data configurations) into Deform schemas. There is also a helper function
(get_method_schema_preview) for turning the created schema into HTML for preview purposes on the methods page.
"""
from collections import namedtuple
from datetime import date
from beaker.cache import cache_region
from deform.form import Form
import random
import string
import colander
import deform
from jcudc24provisioning.models import DBSession
from jcudc24provisioning.models.ca_model import CAModel
from jcudc24provisioning.models.project import MethodSchema, field_types, Project
from jcudc24provisioning.models.file_upload import upload_widget
__author__ = 'xjc01266'
# Indexes of custom_field's, field_types.
INTEGER_INDEX = 0
DECIMAL_INDEX = 1
TEXT_INPUT_INDEX = 2
TEXT_AREA_INDEX = 3
CHECKBOX_INDEX = 4
SELECT_INDEX = 5
RADIO_INDEX = 6
FILE_INDEX = 7
WEBSITE_INDEX = 8
EMAIL_INDEX = 9
PHONE_INDEX = 10
DATE_INDEX = 11
HIDDEN_INDEX = 12
class DummySession(object):
"""
Pretend/dummy session that allows file upload widgets to work in the HTML output from get_method_schema_preview.
"""
def setdefault(self, arg1, arg2):
pass
def changed(self):
pass
#@cache_region('long_term')
def get_method_schema_preview(method_schema_id):
"""
Create and render the method schema identified by method_schema_id as HTML.
:param method_schema_id: ID of the MethodSchema to preview.
:return: Deform rendered HTML form for the identified MethodSchema (Note: The <form> element is removed).
"""
method_schema = DBSession.query(MethodSchema).filter_by(id=method_schema_id).first()
if method_schema is None:
return "<p>Please create your data mapping using the standardised and custom fields.</p>"
model_schema = DataTypeSchema(method_schema)
# Create a dummy request object to make file upload widgets display properly for preview purposes.
settings = {'workflows.files': "./"}
Registry = namedtuple('registry', 'settings')
Request = namedtuple('registry', ['registry', 'session'])
dummy_request = Request(registry=Registry(settings=settings), session=DummySession())
model_schema._bind({'request': dummy_request}) # Use _bind instead of bind so the schema isn't cloned
form = Form(model_schema, action="")
display = form.render({})
display = display[display.index(">")+1:].replace("</form>", "").strip()
return display
class DataTypeModel(CAModel):
def __init__(self, schema=None, appstruct=None):
self.id = None
if schema is not None:
attrs = {child.name: None for child in schema.children}
self.__dict__.update(attrs)
model_class = type(schema.name, (DataTypeModel,), attrs)
test2 = model_class()
self._model_class = model_class
super(DataTypeModel, self).__init__(schema=schema, appstruct=appstruct)
class DataTypeSchema(colander.SchemaNode):
"""
Base Deform schema that dynamically adds all elements of a MethodSchema (including parent schema elements).
"""
def __init__(self, method_schema):
params = {}
self.__dict__['params'] = params
super(DataTypeSchema, self).__init__(colander.Mapping('ignore'), **params)
if isinstance(method_schema, MethodSchema):
fields = get_schema_fields(method_schema)
for field in fields:
self.add(field)
#def method_schema_to_model(method_schema):
# """
# This is another way of generating the schema from MethodSchema models.
# """
# fields = get_schema_fields(method_schema)
# model_schema = colander._SchemaMeta(str(method_schema.name), (colander._SchemaNode,), fields)
# return model_schema
#@cache_region('long_term')
def get_schema_fields(method_schema):
"""
Create all fields/elements of the MethodSchema, this includes:
- Hierarchically add all elements of parent schemas.
- Add Deform element display attributes such as description and placeholder.
- Dynamically create the correct widget with associated settings such as select values and mask regex.
:param method_schema: MethodSchema to generate a Deform schema from.
:return: Deform schema (that can be rendered to HTML as a form).
"""
fields = []
for parent in method_schema.parents:
fields.extend(get_schema_fields(parent))
for field in method_schema.custom_fields:
field_type = field.type == field_types[CHECKBOX_INDEX][0] and colander.Boolean() or \
field.type == field_types[DATE_INDEX][0] and colander.DateTime() or \
colander.String()
python_type = field.type == field_types[INTEGER_INDEX][0] and int or\
field.type == field_types[DECIMAL_INDEX][0] and float or\
field.type == field_types[FILE_INDEX][0] and file or\
field.type == field_types[DATE_INDEX][0] and date or\
str
if field.values is not None:
value_items = field.values.split(",")
values = ()
for value in value_items:
values = values + ((value.strip(", ").lower().replace(" ", "_"), value),)
# Website regex is basic but should validate blatant mistakes such as user misinterpreting the field for email
widget = field.type == field_types[INTEGER_INDEX][0] and deform.widget.TextInputWidget(regex_mask="^\\\\d*$", strip=False) or\
field.type == field_types[DECIMAL_INDEX][0] and deform.widget.TextInputWidget(regex_mask="^(((\\\\.\\\\d*)?)|(\\\\d+(\\\\.\\\\d*)?))$", strip=False) or\
field.type == field_types[TEXT_AREA_INDEX][0] and deform.widget.TextAreaWidget() or\
field.type == field_types[CHECKBOX_INDEX][0] and deform.widget.CheckboxWidget() or\
field.type == field_types[SELECT_INDEX][0] and deform.widget.SelectWidget(values=values) or\
field.type == field_types[RADIO_INDEX][0] and deform.widget.RadioChoiceWidget(values=values) or\
field.type == field_types[FILE_INDEX][0] and upload_widget or\
field.type == field_types[WEBSITE_INDEX][0] and deform.widget.TextInputWidget(
regex_mask="(http://)?(www\.)?([^@. ]+)(\.[^.@ ]+)(\.[^@. ]+)?(\.[^@. ]+)?(\.[^@. ]+)?", strip=False) or \
field.type == field_types[EMAIL_INDEX][0] and deform.widget.TextInputWidget(
regex_mask="[^@ ]+@[^@ ]+\.[^@ ]+", strip=False) or\
field.type == field_types[PHONE_INDEX][0] and deform.widget.TextInputWidget(
regex_mask="(\d{3}[-\.\s]??\d{3}[-\.\s]??\d{4}|\(\d{3}\)\s*\d{3}[-\.\s]??\d{4}|\d{3}[-\.\s]??\d{4})", strip=False) or\
field.type == field_types[DATE_INDEX][0] and deform.widget.DateInputWidget() or\
field.type == field_types[HIDDEN_INDEX][0] and deform.widget.HiddenWidget() or\
deform.widget.TextInputWidget()
children = []
params = {
'name': field.internal_name,
'title': "%s%s" % (field.name, field.units and " (%s)" % field.units or ""),
'widget': widget,
'description': field.description,
'placeholder': field.placeholder,
'default': field.default,
'python_type': python_type,
}
fields.append(colander.SchemaNode(field_type, *children, **params))
return fields
| bsd-3-clause | -6,555,447,298,200,760,000 | 41.732955 | 169 | 0.621021 | false |
cretaceous-creature/jsk_mbzirc_task3 | jsk_network_tools/scripts/silverhammer_lowspeed_receiver.py | 1 | 4109 | #!/usr/bin/env python
from jsk_network_tools.msg import FC2OCS, OCS2FC
from jsk_network_tools.silverhammer_util import *
from threading import Lock, Thread
from socket import *
from struct import Struct
import os
import rospy
import signal
import sys
import roslib
from roslib.message import get_message_class
from std_msgs.msg import Time
import diagnostic_updater
from diagnostic_msgs.msg import DiagnosticStatus
class SilverHammerUDPListener():
def __init__(self, server, buffer_size, format, message, pub):
self.server = server
self.format = format
self.pub = pub
self.message = message
self.buffer_size = buffer_size
def run(self):
recv_data, addr = self.server.recvfrom(self.buffer_size)
msg = unpackMessage(recv_data, self.format, self.message)
self.pub.publish(msg)
print "received:", msg
class SilverHammerLowspeedReceiver():
def __init__(self):
message_class_str = rospy.get_param("~message",
"jsk_network_tools/FC2OCS")
try:
self.receive_message = get_message_class(message_class_str)
except:
raise Exception("invalid topic type: %s"%message_class_str)
self.lock = Lock()
self.launched_time = rospy.Time.now()
self.diagnostic_updater = diagnostic_updater.Updater()
self.diagnostic_updater.setHardwareID("none")
self.diagnostic_updater.add("LowspeedReceiver", self.diagnosticCallback)
self.received_num = 0
self.receive_port = rospy.get_param("~receive_port", 1024)
self.receive_ip = rospy.get_param("~receive_ip", "192.168.8.101")
self.receive_buffer = rospy.get_param("~receive_buffer_size", 250)
self.socket_server = socket(AF_INET, SOCK_DGRAM)
self.socket_server.settimeout(None)
self.socket_server.bind((self.receive_ip, self.receive_port))
self.receive_format = msgToStructFormat(self.receive_message())
self.pub = rospy.Publisher("~output", self.receive_message)
self.last_received_time = rospy.Time(0)
self.last_received_time_pub = rospy.Publisher(
"~last_received_time", Time)
self.last_publish_output_time = rospy.Time(0)
self.last_publish_output_time_pub = rospy.Publisher(
"~last_publish_output_time", Time)
self.diagnostic_timer = rospy.Timer(rospy.Duration(1.0 / 10),
self.diagnosticTimerCallback)
def diagnosticTimerCallback(self, event):
self.diagnostic_updater.update()
# and publish time
with self.lock:
self.last_publish_output_time_pub.publish(self.last_publish_output_time)
self.last_received_time_pub.publish(self.last_received_time)
def diagnosticCallback(self, stat):
# always OK
stat.summary(DiagnosticStatus.OK, "OK")
with self.lock:
now = rospy.Time.now()
stat.add("Uptime [sec]",
(now - self.launched_time).to_sec())
stat.add("Time from the last reception [sec]",
(now - self.last_received_time).to_sec())
stat.add("Time from the last publish ~output [sec]",
(now - self.last_publish_output_time).to_sec())
stat.add("UDP address", self.receive_ip)
stat.add("UDP port", self.receive_port)
return stat
def run(self):
while not rospy.is_shutdown():
recv_data, addr = self.socket_server.recvfrom(self.receive_buffer)
msg = unpackMessage(recv_data, self.receive_format,
self.receive_message)
with self.lock:
self.last_received_time = rospy.Time.now()
self.pub.publish(msg)
with self.lock:
self.last_publish_output_time = rospy.Time.now()
rospy.logdebug("received:", msg)
if __name__ == "__main__":
rospy.init_node("silverhammer_lowspeed_receiver")
rc = SilverHammerLowspeedReceiver()
rc.run()
| apache-2.0 | -3,182,837,485,987,124,000 | 41.802083 | 84 | 0.619615 | false |
openqt/algorithms | leetcode/python/ac/lc872-leaf-similar-trees.py | 1 | 1464 | # coding=utf-8
import unittest
"""872. Leaf-Similar Trees
https://leetcode.com/problems/leaf-similar-trees/description/
Consider all the leaves of a binary tree. From left to right order, the
values of those leaves form a _leaf value sequence._

For example, in the given tree above, the leaf value sequence is `(6, 7, 4, 9,
8)`.
Two binary trees are considered _leaf-similar_ if their leaf value sequence
is the same.
Return `true` if and only if the two given trees with head nodes `root1` and
`root2` are leaf-similar.
**Note:**
* Both of the given trees will have between `1` and `100` nodes.
Similar Questions:
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def leafSimilar(self, root1, root2):
"""
:type root1: TreeNode
:type root2: TreeNode
:rtype: bool
"""
l, r = [], []
self._similar(root1, l)
self._similar(root2, r)
return l == r
def _similar(self, node, leaf: list):
if node:
if not (node.left or node.right):
leaf.append(node.val)
else:
self._similar(node.left, leaf)
self._similar(node.right, leaf)
if __name__ == "__main__":
unittest.main()
| gpl-3.0 | -7,882,174,381,876,940,000 | 22.612903 | 78 | 0.603825 | false |
uname/bleproxy | PC/BleProxyDesk/view/BleListItem.py | 1 | 1110 | #-*- coding: utf-8 -*-
from PyQt4 import QtGui
from ui.AppIcons import *
class BleListItem(QtGui.QListWidgetItem):
def __init__(self, name, address, rssi):
QtGui.QListWidgetItem.__init__(self)
self.name, self.address, self.rssi = name, address, rssi
self.setBleInfo(rssi)
self.conflag = False
def setConnected(self, flag):
self.conflag = flag
self.setBackgroundColor(QtGui.QColor(flag and 0x00ff00 or 0xffffff))
def isConnected(self):
return self.conflag
def setBleInfo(self, rssi):
iconPath = ":app/icons/app/sig_1.png"
if rssi > -45:
iconPath = ":app/icons/app/sig_4.png"
elif rssi > -60:
iconPath = ":app/icons/app/sig_3.png"
elif rssi > -80:
iconPath = ":app/icons/app/sig_2.png"
self.setIcon(QtGui.QIcon(iconPath))
self.setText("%s\n%s %ddb\n" % (self.name, self.address, rssi))
def updateRssi(self, rssi):
self.setBleInfo(rssi)
def getAddress(self):
return self.address | apache-2.0 | 3,593,046,291,192,624,600 | 29.861111 | 76 | 0.579279 | false |
Tintri/tintri-python-sdk | examples/set_qos_tgc_service_groups.py | 1 | 5668 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Tintri, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from tintri.common import TintriServerError
from tintri.v310 import Tintri
from tintri.v310 import VirtualMachineQoSConfig
"""
This Python script sets the QoS of the VMs in the first TGC service group with
more than 2 VMs.
Command usage:
set_qos_tgc_service_groups.py server_name user_name password min_value max_value
Where:"
server_name - name of a TGC server
user_name - user name used to login into the TGC server
password - password for the user
min_value - the QoS minimum value for the VM
max_value - the QoS maximum value for the VM
"""
# For exhaustive messages on console, make it to True; otherwise keep it False
debug_mode = False
def print_with_prefix(prefix, out):
print(prefix + out)
return
def print_debug(out):
if debug_mode:
print_with_prefix("[DEBUG] : ", out)
return
def print_info(out):
print_with_prefix("[INFO] : ", out)
return
def print_error(out):
print_with_prefix("[ERROR] : ", out)
return
# Sets the Minimum and maximum QoS values on a TGC service group.
def set_qos(tintri, sg_uuid, new_min_value, new_max_value):
# Create new QoS object with the fields to be changed
modify_qos_info = VirtualMachineQoSConfig()
modify_qos_info.minNormalizedIops = int(new_min_value)
modify_qos_info.maxNormalizedIops = int(new_max_value)
print_debug("IOPS: " + str(modify_qos_info.minNormalizedIops) + ", " + str(modify_qos_info.maxNormalizedIops))
# Set the QoS in the service group.
tintri.update_service_group_qos_config(modify_qos_info, sg_uuid)
# Apply the QoS values that were set for the service group above.
tintri.apply_service_group_qos_config(sg_uuid)
# main
if len(sys.argv) < 6:
print("\nsets the QoS of the VMs in a TGC service group with more than 2 VMs.\n")
print("Usage: " + sys.argv[0] + " server_name user_name password min_value max_value\n")
print("Where:")
print(" server_name - name of a TGC server")
print(" user_name - user name used to login into the TGC server")
print(" password - password for the TGC and VMstore users")
print(" min_value - the QoS minimum value for the VM")
print(" max_value - the QoS maximum value for the VM")
sys.exit(-1)
server_name = sys.argv[1]
user_name = sys.argv[2]
password = sys.argv[3]
new_min_value = sys.argv[4]
new_max_value = sys.argv[5]
try:
# instantiate the Tintri server.
tintri = Tintri(server_name)
# Get version and product
version_info = tintri.version
if (not tintri.is_tgc()):
raise TintriServerError(0, cause="Tintri server needs to be Tintri Global Center, not a " + product_name)
preferred_version = version_info.preferredVersion
print("API Version: " + preferred_version)
versions = preferred_version.split(".")
major_version = versions[0]
minor_version = int(versions[1])
if major_version != "v310":
raise TintriServerError(0, cause="Incorrect major version: " + major_version + ". Should be v310.")
if minor_version < 31:
raise TintriServerError(0, cause="Incorrect minor Version: " + minor_version + ". Should be 31 or greater")
# Login to TGC
tintri.login(user_name, password)
except TintriServerError as tse:
print_error(tse.__str__())
sys.exit(2)
try:
# Get a list of service groups
service_groups = tintri.get_service_groups()
num_service_groups = service_groups.absoluteTotal
if num_service_groups == 0:
raise TintriServerError(0, cause="No Service Groups present")
print_info(str(num_service_groups) + " Service Groups present")
# Initialze the member list
sg_uuid = ""
found = False
# Look for a qualifying service group
count = 1
for sg in service_groups:
sg_name = sg.name
sg_uuid = sg.uuid.uuid
sg_member_count = sg.memberCount
print_info(str(count) + ": " + sg_name + "(" + str(sg_member_count) + "): " + sg_uuid)
if sg_member_count >= 2:
found = True
break
count += 1
if not found:
raise TintriServerError(0, cause="No service groups matching the criertia, member count >= 2.")
# Set the QoS on the service group.
set_qos(tintri, sg_uuid, new_min_value, new_max_value)
except TintriServerError as tse:
print_error(tse.__str__())
tintri.logout()
sys.exit(3)
# All pau, log out
tintri.logout()
| bsd-3-clause | -6,672,245,908,238,499,000 | 32.341176 | 116 | 0.678899 | false |
exobrain-wisekb/wisekb-management-platform | wisekb-uima-ducc/bin/properties.py | 2 | 8155 | #!/usr/bin/python
import os
import re
import platform
import string
# -----------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# -----------------------------------------------------------------------
class PropertiesException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return repr(self.msg)
class Property:
def __init__(self, k, v, c):
self.k = k # key
self.v = v # value
self.c = c # comments
self.orig_v = v
def reset(self):
self.v = self.orig_v
def __str__(self):
return str(self.k) + '=' + str(self.v)
class Properties:
def __init__(self):
self.props = {}
self.builtin = {}
self.keys = []
self.comments = []
#
# Create builtins corresponding to some of the java properties.
#
# We allow expansion on java system properties. It's obviously not possible to
# do most of them but these guys may have a use e.g. to put ducc_ling into
# architecture-specific places.
#
(system, node, release, version, machine, processor) = platform.uname()
if ( system == 'Darwin' ):
self.builtin['os.arch'] = 'x86_64'
self.builtin['os.name'] = 'Mac OS X'
elif ( system == 'Linux' ):
if ( machine == 'ppc64' ):
self.builtin['os.arch'] = 'ppc64'
self.builtin['os.name'] = 'Linux'
elif ( machine == 'x86_64' ):
self.builtin['os.arch'] = 'amd64'
self.builtin['os.name'] = 'Linux'
elif ( machine == 'ppc64le' ):
self.builtin['os.arch'] = 'ppc64le'
self.builtin['os.name'] = 'Linux'
#
# Expand all ${} values. The search order is:
# 1 look in this properties file
# 2 look in the environment
# 3 look in a subset of the Java system properties (os.name & os.arch)
#
def do_subst(self, st):
key = None
p = re.compile("\\$\\{[a-zA-Z0-9_\\.\\-]+\\}")
ndx = 0
response = st.strip()
m = p.search(response, ndx)
while ( m != None ):
key = m.group()[2:-1]
val = None
if ( self.has_key(key) ):
val = self.get(key)
elif ( os.environ.has_key(key) ):
val = os.environ[key]
elif (self.builtin.has_key(key) ):
val = self.builtin[key]
if ( val != None ):
response = string.replace(response, m.group() , val)
ndx = m.start()+1
m = p.search(response, ndx)
return response
def mkitem(self, line):
#
# First deal with line comments so we can preserve them on write
#
if ( line.startswith('#') ):
self.comments.append(line)
return False
if ( line.startswith('//') ):
self.comments.append(line)
return False
if ( line == '' ):
return False
#
# Now strip off embedded comments, these are lost, but they're not valid
# for java props anyway.
#
ndx = line.find('#') # remove comments - like the java DuccProperties
if ( ndx >= 0 ):
line = line[0:ndx] # strip the comment
ndx = line.find('//') # remove comments - like the java DuccProperties
if ( ndx >= 0 ):
line = line[0:ndx] # strip the comment
line = line.strip() # clear leading and trailing whitespace
if ( line == '' ):
return
mobj = re.search('[ =:]+', line)
if ( mobj ):
key = line[:mobj.start()].strip()
val = line[mobj.end():].strip()
# print 'NEXT', mobj.start(), 'END', mobj.end(), 'KEY', key, 'VAL', val
# val = self.do_subst(val) # we'll do lazy subst on get instead
self.props[key] = Property(key, val, self.comments)
if ( key in self.keys ):
self.keys.remove(key)
self.keys.append(key)
self.comments = []
else:
self.props[line] = Property(line, '', self.comments)
self.keys.append(line)
self.comments = []
#
# Load reads a properties file and adds it contents to the
# hash. It may be called several times; each call updates
# the internal has, thus building it up. The input file is
# in the form of a java-like properties file.
#
def load(self, propsfile):
if ( not os.path.exists(propsfile) ):
raise PropertiesException(propsfile + ' does not exist and cannot be loaded.')
f = open(propsfile);
for line in f:
self.mkitem(line.strip())
f.close()
# read a jar manifest into a properties entity
def load_from_manifest(self, jarfile):
z = zipfile.ZipFile(jarfile)
items = z.read('META-INF/MANIFEST.MF').split('\n')
for item in items:
self.mkitem(item)
#
# Try to load a properties file. Just be silent if it doesn't exist.
#
def load_if_exists(self, propsfile):
if ( os.path.exists(propsfile) ):
return self.load(propsfile)
#
# Put something into the hash with an optional comment
#
def put(self, key, value, comment=[]):
self.props[key] = Property(key, value, comment)
self.keys.append(key)
#
# Put a Property object into the map
#
def put_property(self, p):
self.props[p.k] = p
self.keys.append(p.k)
#
# Get a value from the hash
#
def get(self, key):
if ( self.props.has_key(key) ):
return self.do_subst(self.props[key].v) # we'll do lazy subst on get instead
return None
#
# Get a Property object for manipulation (k, v, comment)
#
def get_property(self, key):
if ( self.props.has_key(key) ):
return self.props[key] # note no expansion.
return None
#
# Remove an item if it exists
#
def delete(self, key):
if ( self.props.has_key(key) ):
del self.props[key]
self.keys.remove(key)
#
# Write the has as a Java-like properties file
#
def write(self, propsfile):
f = open(propsfile, 'w')
for k in self.keys:
p = self.props[k]
v = p.v
c = p.c
for cc in c:
f.write(cc + '\n')
f.write(k + ' = ' + str(v) + '\n\n')
f.close()
#
# return a shallow copy of the dictionary
#
def copy_dictionary(self):
return self.props.copy()
#
# Return the entries (Property list) in the dictionary
#
def items(self):
return self.props.items()
#
# The keys, in the order as defined in the input file
#
def get_keys(self):
return self.keys
#
# check to see if the key exists in the dictionary
#
def has_key(self, key):
return self.props.has_key(key)
#
# Return the length of the dictionary
#
def __len__(self):
return len(self.props)
| apache-2.0 | 5,952,035,096,709,450,000 | 29.657895 | 91 | 0.526548 | false |
mxgnene01/itester | itester/common/termlogcolor.py | 1 | 5853 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Meng xiangguo <[email protected]>
#
# H A P P Y H A C K I N G !
# _____ ______
# ____==== ]OO|_n_n__][. | |]
# [________]_|__|________)< |MENG|
# oo oo 'oo OOOO-| oo\_ ~o~~~o~'
# +--+--+--+--+--+--+--+--+--+--+--+--+--+
# 17/5/27 下午7:54
from __future__ import print_function
import os
import sys
import datetime
'''
参考: https://pypi.python.org/pypi/termcolor
'''
__ALL__ = [ 'colored', 'cprint' ]
VERSION = (1, 1, 0)
ATTRIBUTES = dict(
list(zip([
'bold',
'dark',
'',
'underline',
'blink',
'',
'reverse',
'concealed'
],
list(range(1, 9))
))
)
del ATTRIBUTES['']
HIGHLIGHTS = dict(
list(zip([
'on_grey',
'on_red',
'on_green',
'on_yellow',
'on_blue',
'on_magenta',
'on_cyan',
'on_white'
],
list(range(40, 48))
))
)
COLORS = dict(
list(zip(['grey','red','green','yellow'],[47, 41, 42, 43]))
)
END = '\033[0m'
def colored(text, color=None, on_color=None, attrs=None):
"""Colorize text.
Available text colors:
red, green, yellow, grey.
Available text highlights:
on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white.
Available attributes:
bold, dark, underline, blink, reverse, concealed.
Example:
colored('Hello, World!', 'red', 'on_grey', ['grey', 'blink'])
colored('Hello, World!', 'green')
"""
if os.getenv('ANSI_COLORS_DISABLED') is None:
fmt_str = '\033[%d;30;1m%s'
if color is not None:
text = fmt_str % (COLORS[color], text)
if on_color is not None:
text = fmt_str % (HIGHLIGHTS[on_color], text)
if attrs is not None:
for attr in attrs:
text = fmt_str % (ATTRIBUTES[attr], text)
text += END
return text
def cprint(text, color=None, on_color=None, attrs=None, **kwargs):
print((colored(text, color, on_color, attrs)), **kwargs)
# next bit filched from 1.5.2's inspect.py
def currentframe():
"""Return the frame object for the caller's stack frame."""
try:
raise Exception
except:
return sys.exc_info()[2].tb_frame.f_back
def findCaller():
"""
Find the stack frame of the caller so that we can note the source
file name, line number and function name.
"""
if hasattr(sys, 'frozen'): # support for py2exe
_srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:])
elif __file__[-4:].lower() in ['.pyc', '.pyo']:
_srcfile = __file__[:-4] + '.py'
else:
_srcfile = __file__
_srcfile = os.path.normcase(_srcfile)
f = currentframe()
# On some versions of IronPython, currentframe() returns None if
# IronPython isn't run with -X:Frames.
if f is not None:
f = f.f_back
rv = "(unknown file)", 0, "(unknown function)"
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename == _srcfile:
f = f.f_back
continue
rv = (co.co_filename, f.f_lineno, co.co_name)
break
return rv
class TermColor():
'''
支持jenkins 支持输出颜色
'''
colormap = dict(
concern=dict(color='green', attrs=['bold']),
info=dict(color='grey'),
warn=dict(color='yellow', attrs=['bold']),
warning=dict(color='yellow', attrs=['bold']),
error=dict(color='red'),
critical=dict(color='red', attrs=['bold']),
)
def msg_format(self, mode, msg):
'''
获取调用者的 文件名、调用函数、调用行
'''
fn, lineno, co_name = findCaller()
filename = fn.split('/')[-1]
now_date = str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
msg_simple = ('[-] %s - %s(%s:%s@%s): %s') % (now_date, mode, filename, co_name, str(lineno), msg)
return msg_simple
def info(self, msg):
self._log("info", msg)
def concern(self, msg):
self._log("concern", msg)
def error(self, msg):
self._log("error", msg)
def warn(self, msg):
self._log("warn", msg)
def _log(self, funcname, msg):
print(colored(self.msg_format(funcname, msg), **self.colormap[funcname]))
log = TermColor()
if __name__ == '__main__':
print('Current terminal type: %s' % os.getenv('TERM'))
print('Test basic colors:')
cprint('Grey color', 'grey')
cprint('Red color', 'red')
cprint('Green color', 'green')
cprint('Yellow color', 'yellow')
print(('-' * 78))
print('Test highlights:')
cprint('On grey color', on_color='on_grey')
cprint('On red color', on_color='on_red')
cprint('On green color', on_color='on_green')
cprint('On yellow color', on_color='on_yellow')
cprint('On blue color', on_color='on_blue')
cprint('On magenta color', on_color='on_magenta')
cprint('On cyan color', on_color='on_cyan')
cprint('On white color', color='grey', on_color='on_white')
print('-' * 78)
print('Test attributes:')
cprint('Bold grey color', 'grey', attrs=['bold'])
cprint('Dark red color', 'red', attrs=['dark'])
cprint('Underline green color', 'green', attrs=['underline'])
cprint('Blink yellow color', 'yellow', attrs=['blink'])
print(('-' * 78))
print('Test mixing:')
cprint('Underline red on grey color', 'red', 'on_grey',
['underline'])
cprint('Reversed green on red color', 'green', 'on_red', ['reverse'])
| gpl-3.0 | 925,905,783,110,960,900 | 25.686636 | 106 | 0.516491 | false |
mckinseyacademy/xblock-diagnosticfeedback | diagnostic_feedback/helpers/helper.py | 1 | 1063 | from __future__ import absolute_import
from . import Category, Question, Range
class MainHelper(object):
@classmethod
def save_filtered_data(cls, quiz, data):
"""
filter out & save the posted data to match our required schema for each quiz step
"""
step = data['step']
if step == 1:
quiz.title = data['title']
quiz.description = data['description']
if not quiz.quiz_type and data.get('type'):
quiz.quiz_type = data['type']
if step == 2 and quiz.quiz_type == quiz.BUZZFEED_QUIZ_VALUE:
results = Category.filter_results(data)
quiz.results = results
elif step == 2 and quiz.quiz_type == quiz.DIAGNOSTIC_QUIZ_VALUE:
results = Range.filter_results(data)
quiz.results = results
elif step == 3:
questions = Question.filter_question(data, quiz.quiz_type)
quiz.questions = questions
else:
pass
return "step {} data saved".format(step)
| agpl-3.0 | 775,066,402,183,640,600 | 29.371429 | 93 | 0.571966 | false |
ZeitOnline/zeit.objectlog | src/zeit/objectlog/source.py | 1 | 1802 | from zeit.objectlog.i18n import MessageFactory as _
import zc.sourcefactory.contextual
import zeit.objectlog.interfaces
import zope.app.form.browser.interfaces
import zope.i18n
import zope.interface.common.idatetime
class LogEntrySource(
zc.sourcefactory.contextual.BasicContextualSourceFactory):
def getValues(self, context):
log = zeit.objectlog.interfaces.ILog(context)
return log.get_log()
def createTerm(self, context, source, value, title, token, request):
# We got to create the title here as we haven't got the request in
# `getTitle` :(
if value.principal is None:
principal = _('System')
else:
p_source = zeit.objectlog.interfaces.ILogEntry['principal'].source
principal_terms = zope.component.getMultiAdapter(
(p_source, request), zope.app.form.browser.interfaces.ITerms)
try:
principal = principal_terms.getTerm(value.principal).title
except LookupError:
principal = value.principal
formatter = request.locale.dates.getFormatter('dateTime', 'medium')
tzinfo = zope.interface.common.idatetime.ITZInfo(request, None)
time = value.time
if tzinfo is not None:
time = time.astimezone(tzinfo)
time = formatter.format(time)
message = zope.i18n.translate(value.message, context=request)
title = _("${time} [${principal}]: ${message}",
mapping=dict(
time=time,
principal_id=value.principal,
principal=principal,
message=message))
return super(LogEntrySource, self).createTerm(
context, source, value, title, token, request)
| bsd-3-clause | -7,002,345,452,534,313,000 | 36.541667 | 78 | 0.624861 | false |
rajashreer7/autotest-client-tests | linux-tools/socat/socat.py | 3 | 1132 | #!/bin/python
import os, subprocess
import logging
from autotest.client import test
from autotest.client.shared import error
class socat(test.test):
"""
Autotest module for testing basic functionality
of socat
@author
"""
version = 1
nfail = 0
path = ''
def initialize(self):
"""
Sets the overall failure counter for the test.
"""
self.nfail = 0
logging.info('\n Test initialize successfully')
def run_once(self, test_path=''):
"""
Trigger test run
"""
try:
os.environ["LTPBIN"] = "%s/shared" %(test_path)
ret_val = subprocess.call(test_path + '/socat' + '/socat_test.sh', shell=True)
if ret_val != 0:
self.nfail += 1
except error.CmdError, e:
self.nfail += 1
logging.error("Test Failed: %s", e)
def postprocess(self):
if self.nfail != 0:
logging.info('\n nfails is non-zero')
raise error.TestError('\nTest failed')
else:
logging.info('\n Test completed successfully ')
| gpl-2.0 | -1,116,519,213,480,656,300 | 23.085106 | 90 | 0.547703 | false |
dungvtdev/upsbayescpm | bayespy/inference/vmp/nodes/wishart.py | 1 | 9440 | ################################################################################
# Copyright (C) 2011-2012,2014 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
import numpy as np
import scipy.special as special
from bayespy.utils import misc, linalg
from .expfamily import ExponentialFamily
from .expfamily import ExponentialFamilyDistribution
from .expfamily import useconstructor
from .constant import Constant
from .deterministic import Deterministic
from .gamma import GammaMoments
from .node import Moments, Node
class WishartPriorMoments(Moments):
def __init__(self, k):
self.k = k
self.dims = ( (), () )
return
def compute_fixed_moments(self, n):
""" Compute moments for fixed x. """
u0 = np.asanyarray(n)
u1 = special.multigammaln(0.5*u0, self.k)
return [u0, u1]
@classmethod
def from_values(cls, x, d):
""" Compute the dimensions of phi or u. """
return cls(d)
class WishartMoments(Moments):
def __init__(self, shape):
self.shape = shape
self.ndim = len(shape)
self.dims = ( 2 * shape, () )
return
def compute_fixed_moments(self, Lambda, gradient=None):
""" Compute moments for fixed x. """
L = linalg.chol(Lambda, ndim=self.ndim)
ldet = linalg.chol_logdet(L, ndim=self.ndim)
u = [Lambda,
ldet]
if gradient is None:
return u
du0 = gradient[0]
du1 = (
misc.add_trailing_axes(gradient[1], 2*self.ndim)
* linalg.chol_inv(L, ndim=self.ndim)
)
du = du0 + du1
return (u, du)
def plates_from_shape(self, shape):
if self.ndim == 0:
return shape
else:
return shape[:-2*self.ndim]
def shape_from_plates(self, plates):
return plates + self.shape + self.shape
def get_instance_conversion_kwargs(self):
return dict(ndim=self.ndim)
def get_instance_converter(self, ndim):
if ndim != self.ndim:
raise NotImplementedError(
"No conversion between different ndim implemented for "
"WishartMoments yet"
)
return None
@classmethod
def from_values(cls, x, ndim):
""" Compute the dimensions of phi and u. """
if np.ndim(x) < 2 * ndim:
raise ValueError("Values for Wishart distribution must be at least "
"2-D arrays.")
if np.shape(x)[-ndim:] != np.shape(x)[-2*ndim:-ndim]:
raise ValueError("Values for Wishart distribution must be square "
"matrices, thus the two last axes must have equal "
"length.")
shape = np.shape(x)[-ndim:]
return cls(shape)
class WishartDistribution(ExponentialFamilyDistribution):
"""
Sub-classes implement distribution specific computations.
Distribution for :math:`k \times k` symmetric positive definite matrix.
.. math::
\Lambda \sim \mathcal{W}(n, V)
Note: :math:`V` is inverse scale matrix.
.. math::
p(\Lambda | n, V) = ..
"""
def compute_message_to_parent(self, parent, index, u_self, u_n, u_V):
if index == 0:
raise NotImplementedError("Message from Wishart to degrees of "
"freedom parameter (first parent) "
"not yet implemented")
elif index == 1:
Lambda = u_self[0]
n = u_n[0]
return [-0.5 * Lambda,
0.5 * n]
else:
raise ValueError("Invalid parent index {0}".format(index))
def compute_phi_from_parents(self, u_n, u_V, mask=True):
r"""
Compute natural parameters
.. math::
\phi(n, V) =
\begin{bmatrix}
-\frac{1}{2} V
\\
\frac{1}{2} n
\end{bmatrix}
"""
return [-0.5 * u_V[0],
0.5 * u_n[0]]
def compute_moments_and_cgf(self, phi, mask=True):
r"""
Return moments and cgf for given natural parameters
.. math::
\langle u \rangle =
\begin{bmatrix}
\phi_2 (-\phi_1)^{-1}
\\
-\log|-\phi_1| + \psi_k(\phi_2)
\end{bmatrix}
\\
g(\phi) = \phi_2 \log|-\phi_1| - \log \Gamma_k(\phi_2)
"""
U = linalg.chol(-phi[0])
k = np.shape(phi[0])[-1]
#k = self.dims[0][0]
logdet_phi0 = linalg.chol_logdet(U)
u0 = phi[1][...,np.newaxis,np.newaxis] * linalg.chol_inv(U)
u1 = -logdet_phi0 + misc.multidigamma(phi[1], k)
u = [u0, u1]
g = phi[1] * logdet_phi0 - special.multigammaln(phi[1], k)
return (u, g)
def compute_cgf_from_parents(self, u_n, u_V):
r"""
CGF from parents
.. math::
g(n, V) = \frac{n}{2} \log|V| - \frac{nk}{2} \log 2 -
\log \Gamma_k(\frac{n}{2})
"""
n = u_n[0]
gammaln_n = u_n[1]
V = u_V[0]
logdet_V = u_V[1]
k = np.shape(V)[-1]
g = 0.5*n*logdet_V - 0.5*k*n*np.log(2) - gammaln_n
return g
def compute_fixed_moments_and_f(self, Lambda, mask=True):
r"""
Compute u(x) and f(x) for given x.
.. math:
u(\Lambda) =
\begin{bmatrix}
\Lambda
\\
\log |\Lambda|
\end{bmatrix}
"""
k = np.shape(Lambda)[-1]
ldet = linalg.chol_logdet(linalg.chol(Lambda))
u = [Lambda,
ldet]
f = -(k+1)/2 * ldet
return (u, f)
class Wishart(ExponentialFamily):
r"""
Node for Wishart random variables.
The random variable :math:`\mathbf{\Lambda}` is a :math:`D\times{}D`
positive-definite symmetric matrix.
.. math::
p(\mathbf{\Lambda}) = \mathrm{Wishart}(\mathbf{\Lambda} | N,
\mathbf{V})
Parameters
----------
n : scalar or array
:math:`N`, degrees of freedom, :math:`N>D-1`.
V : Wishart-like node or (...,D,D)-array
:math:`\mathbf{V}`, scale matrix.
"""
_distribution = WishartDistribution()
def __init__(self, n, V, **kwargs):
"""
Create Wishart node.
"""
super().__init__(n, V, **kwargs)
@classmethod
def _constructor(cls, n, V, **kwargs):
"""
Constructs distribution and moments objects.
"""
# Make V a proper parent node and get the dimensionality of the matrix
V = cls._ensure_moments(V, WishartMoments, ndim=1)
D = V.dims[0][-1]
n = cls._ensure_moments(n, WishartPriorMoments, d=D)
moments = WishartMoments((D,))
# Parent node message types
parent_moments = (n._moments, V._moments)
parents = [n, V]
return (parents,
kwargs,
moments.dims,
cls._total_plates(kwargs.get('plates'),
cls._distribution.plates_from_parent(0, n.plates),
cls._distribution.plates_from_parent(1, V.plates)),
cls._distribution,
moments,
parent_moments)
def scale(self, scalar, **kwargs):
return _ScaledWishart(self, scalar, **kwargs)
def __str__(self):
n = 2*self.phi[1]
A = 0.5 * self.u[0] / self.phi[1][...,np.newaxis,np.newaxis]
return ("%s ~ Wishart(n, A)\n"
" n =\n"
"%s\n"
" A =\n"
"%s\n"
% (self.name, n, A))
class _ScaledWishart(Deterministic):
def __init__(self, Lambda, alpha, ndim=None, **kwargs):
if ndim is None:
try:
ndim = Lambda._moments.ndim
except AttributeError:
raise ValueError("Give explicit ndim argument. (ndim=1 for normal matrix)")
Lambda = self._ensure_moments(Lambda, WishartMoments, ndim=ndim)
alpha = self._ensure_moments(alpha, GammaMoments)
dims = Lambda.dims
self._moments = Lambda._moments
self._parent_moments = (Lambda._moments, alpha._moments)
return super().__init__(Lambda, alpha, dims=dims, **kwargs)
def _compute_moments(self, u_Lambda, u_alpha):
Lambda = u_Lambda[0]
logdet_Lambda = u_Lambda[1]
alpha = misc.add_trailing_axes(u_alpha[0], 2*self._moments.ndim)
logalpha = u_alpha[1]
u0 = Lambda * alpha
u1 = logdet_Lambda + np.prod(self._moments.shape) * logalpha
return [u0, u1]
def _compute_message_to_parent(self, index, m, u_Lambda, u_alpha):
if index == 0:
alpha = misc.add_trailing_axes(u_alpha[0], 2*self._moments.ndim)
logalpha = u_alpha[1]
m0 = m[0] * alpha
m1 = m[1]
return [m0, m1]
if index == 1:
Lambda = u_Lambda[0]
logdet_Lambda = u_Lambda[1]
m0 = linalg.inner(m[0], Lambda, ndim=2*self._moments.ndim)
m1 = m[1] * np.prod(self._moments.shape)
return [m0, m1]
raise IndexError()
| mit | 6,343,873,845,033,821,000 | 25.591549 | 91 | 0.504661 | false |
LePtitLilou/vcsmp | Lib/utils.py | 1 | 16904 | # Adapted for numpy/ma/cdms2 by convertcdms.py
import numpy
import cdtime
class VCSUtilsError (Exception):
def __init__ (self, args=None):
"""Create an exception"""
self.args = args
def __str__(self):
"""Calculate the string representation"""
return str(self.args)
__repr__ = __str__
def minmax(*data) :
'''
Function : minmax
Description of Function
Return the minimum and maximum of a serie of array/list/tuples (or combination of these)
Values those absolute value are greater than 1.E20, are masked
You can combined list/tuples/... pretty much any combination is allowed
Examples of Use
>>> s=range(7)
>>> vcs.minmax(s)
(0.0, 6.0)
>>> vcs.minmax([s,s])
(0.0, 6.0)
>>> vcs.minmax([[s,s*2],4.,[6.,7.,s]],[5.,-7.,8,(6.,1.)])
(-7.0, 8.0)
'''
mx=-1.E77
mn=1.E77
if len(data)==1 : data=data[0]
global myfunction
def myfunction(d,mx,mn):
if d is None:
return mx,mn
from numpy.ma import maximum,minimum,masked_where,absolute,greater,count
try:
d=masked_where(greater(absolute(d),9.9E19),d)
if count(d)==0 : return mx,mn
mx=float(maximum(mx,float(maximum(d))))
mn=float(minimum(mn,float(minimum(d))))
except:
for i in d:
mx,mn=myfunction(i,mx,mn)
return mx,mn
mx,mn=myfunction(data,mx,mn)
if mn==1.E77 and mx==-1.E77 :mn,mx=1.E20,1.E20
return mn,mx
def mkevenlevels(n1,n2,nlev=10):
'''
Function : mkevenlevels
Description of Function:
Return a serie of evenly spaced levels going from n1 to n2
by default 10 intervals will be produced
Examples of use:
>>> vcs.mkevenlevels(0,100)
[0.0, 10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
>>> vcs.mkevenlevels(0,100,nlev=5)
[0.0, 20.0, 40.0, 60.0, 80.0, 100.0]
>>> vcs.mkevenlevels(100,0,nlev=5)
[100.0, 80.0, 60.0, 40.0, 20.0, 0.0]
'''
import numpy.ma
lev=numpy.ma.arange(nlev+1,dtype=numpy.float)
factor=float(n2-n1)/nlev
lev=factor*lev
lev=lev+n1
return list(lev)
def mkscale(n1,n2,nc=12,zero=1):
'''
Function: mkscale
Description of function:
This function return a nice scale given a min and a max
option:
nc # Maximum number of intervals (default=12)
zero # Not all implemented yet so set to 1 but values will be:
-1: zero MUST NOT be a contour
0: let the function decide # NOT IMPLEMENTED
1: zero CAN be a contour (default)
2: zero MUST be a contour
Examples of Use:
>>> vcs.mkscale(0,100)
[0.0, 10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
>>> vcs.mkscale(0,100,nc=5)
[0.0, 20.0, 40.0, 60.0, 80.0, 100.0]
>>> vcs.mkscale(-10,100,nc=5)
[-25.0, 0.0, 25.0, 50.0, 75.0, 100.0]
>>> vcs.mkscale(-10,100,nc=5,zero=-1)
[-20.0, 20.0, 60.0, 100.0]
>>> vcs.mkscale(2,20)
[2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0]
>>> vcs.mkscale(2,20,zero=2)
[0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0]
'''
if n1==n2 : return [n1]
import numpy
nc=int(nc)
cscale=0 # ???? May be later
min, max=minmax(n1,n2)
if zero>1.:
if min>0. : min=0.
if max<0. : max=0.
rg=float(max-min) # range
delta=rg/nc # basic delta
# scale delta to be >10 and <= 100
lg=-numpy.log10(delta)+2.
il=numpy.floor(lg)
delta=delta*(10.**il)
max=max*(10.**il)
min=min*(10.**il)
if zero>-0.5:
if delta<=20.:
delta=20
elif delta<=25. :
delta=25
elif delta<=40. :
delta=40
elif delta<=50. :
delta=50
elif delta<=101. :
delta=100
first = numpy.floor(min/delta)-1.
else:
if delta<=20.:
delta=20
elif delta<=40. :
delta=40
elif delta<=60. :
delta=60
elif delta<=101. :
delta=100
first=numpy.floor(min/delta)-1.5
scvals=delta*(numpy.arange(2*nc)+first)
a=0
for j in range(len(scvals)):
if scvals[j]>min :
a=j-1
break
b=0
for j in range(len(scvals)):
if scvals[j]>=max :
b=j+1
break
if cscale==0:
cnt=scvals[a:b]/10.**il
else:
#not done yet...
raise VCSUtilsError,'ERROR scale not implemented in this function'
return list(cnt)
def __split2contiguous(levels):
""" Function __split2contiguous(levels)
takes list of split intervals and make it contiguous if possible
"""
tmplevs=[]
for il in range(len(levels)):
lv=levels[il]
if not (isinstance(lv,list) or isinstance(lv,tuple)):
raise VCSUtilsError,"Error levels must be a set of intervals"
if not len(lv)==2: raise VCSUtilsError,"Error intervals can only have 2 elements"
if il!=0:
lv2=levels[il-1]
if lv2[1]!=lv[0]:
raise VCSUtilsError,"Error intervals are NOT contiguous from "+str(lv2[1])+" to "+str(lv[0])
tmplevs.append(lv[0])
tmplevs.append(levels[-1][1])
return tmplevs
def mklabels(vals,output='dict'):
'''
Function : mklabels
Description of Function:
This function gets levels and output strings for nice display of the levels values, returns a dictionary unless output="list" specified
Examples of use:
>>> a=vcs.mkscale(2,20,zero=2)
>>> vcs.mklabels (a)
{20.0: '20', 18.0: '18', 16.0: '16', 14.0: '14', 12.0: '12', 10.0: '10', 8.0: '8', 6.0: '6', 4.0: '4', 2.0: '2', 0.0: '0'}
>>> vcs.mklabels ( [5,.005])
{0.0050000000000000001: '0.005', 5.0: '5.000'}
>>> vcs.mklabels ( [.00002,.00005])
{2.0000000000000002e-05: '2E-5', 5.0000000000000002e-05: '5E-5'}
>>> vcs.mklabels ( [.00002,.00005],output='list')
['2E-5', '5E-5']
'''
import string,numpy.ma
if isinstance(vals[0],list) or isinstance(vals[0],tuple):
vals=__split2contiguous(vals)
vals=numpy.ma.asarray(vals)
nvals=len(vals)
ineg=0
ext1=0
ext2=0
# Finds maximum number to write
amax=float(numpy.ma.maximum(numpy.ma.absolute(vals)))
if amax==0 :
if string.lower(output[:3])=='dic' :
return {0:'0'}
else:
return ['0']
amin,amax=minmax(numpy.ma.masked_equal(numpy.ma.absolute(vals),0))
ratio=amax/amin
if int(numpy.ma.floor(numpy.ma.log10(ratio)))+1>6:
lbls=[]
for i in range(nvals):
if vals[i]!=0:
lbls.append(mklabels([vals[i]],output='list')[0])
else:
lbls.append('0')
if string.lower(output[:3])=='dic':
dic={}
for i in range(len(vals)):
dic[float(vals[i])]=lbls[i]
return dic
else:
return lbls
tmax=float(numpy.ma.maximum(vals))
if tmax<0. :
ineg=1
vals=-vals
amax=float(numpy.ma.maximum(vals))
# Number of digit on the left of decimal point
idigleft=int(numpy.ma.floor(numpy.ma.log10(amax)))+1
# Now determine the number of significant figures
idig=0
for i in range(nvals):
aa=numpy.ma.power(10.,-idigleft)
while abs(round(aa*vals[i])-aa*vals[i])>.000001 : aa=aa*10.
idig=numpy.ma.maximum(idig,numpy.ma.floor(numpy.ma.log10(aa*numpy.ma.power(10.,idigleft))))
idig=int(idig)
# Now does the writing part
lbls=[]
# First if we need an E format
if idigleft>5 or idigleft<-2:
if idig==1:
for i in range(nvals):
aa=int(round(vals[i]/numpy.ma.power(10.,idigleft-1)))
lbls.append(str(aa)+'E'+str(idigleft-1))
else:
for i in range(nvals):
aa=str(vals[i]/numpy.ma.power(10.,idigleft-1))
ii=1
if vals[i]<0. : ii=2
aa=string.ljust(aa,idig+ii)
aa=string.replace(aa,' ','0')
lbls.append(aa+'E'+str(idigleft-1))
elif idigleft>0 and idigleft>=idig: #F format
for i in range(nvals):
lbls.append(str(int(round(vals[i]))))
else:
for i in range(nvals):
ii=1
if vals[i]<0.: ii=2
ndig=idig+ii
rdig=idig-idigleft
if idigleft<0 : ndig=idig-idigleft+1+ii
aa='%'+str(ndig)+'.'+str(rdig)+'f'
aa=aa % vals[i]
lbls.append(aa)
if ineg:
vals=-vals
for i in range(len(lbls)):
lbls[i]='-'+lbls[i]
if string.lower(output[:3])=='dic':
dic={}
for i in range(len(vals)):
dic[float(vals[i])]=str(lbls[i])
return dic
else:
return lbls
def getcolors(levs,colors=range(16,240),split=1,white=240):
'''
Function : getcolors(levs,colors=range(16,240),split=1,white=240)
Description of Function:
For isofill/boxfill purposes
Given a list of levels this function returns the colors that would best spread a list of "user-defined" colors (default is 16 to 239 , i.e 224 colors), always using the first and last color. Optionally the color range can be split into 2 equal domain to represent <0 and >0 values.
If the colors are split an interval goes from <0 to >0 then this is assigned the "white" color
Usage:
levs : levels defining the color ranges
colors (default= range(16,240) ) : A list/tuple of the of colors you wish to use
split # parameter to split the colors between 2 equal domain:
one for positive values and one for negative values
0 : no split
1 : split if the levels go from <0 to >0
2 : split even if all the values are positive or negative
white (=240) # If split is on and an interval goes from <0 to >0 this color number will be used within this interval (240 is white in the default VCS palette color)
Examples of Use:
>>> a=[0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0]
>>> vcs.getcolors (a)
[16, 41, 66, 90, 115, 140, 165, 189, 214, 239]
>>> vcs.getcolors (a,colors=range(16,200))
[16, 36, 57, 77, 97, 118, 138, 158, 179, 199]
>>> vcs.getcolors(a,colors=[16,25,15,56,35,234,12,11,19,32,132,17])
[16, 25, 15, 35, 234, 12, 11, 32, 132, 17]
>>> a=[-6.0, -2.0, 2.0, 6.0, 10.0, 14.0, 18.0, 22.0, 26.0]
>>> vcs.getcolors (a,white=241)
[72, 241, 128, 150, 172, 195, 217, 239]
>>> vcs.getcolors (a,white=241,split=0)
[16, 48, 80, 112, 143, 175, 207, 239]
'''
import string
if len(levs)==1: return [colors[0]]
if isinstance(levs[0],list) or isinstance(levs[0],tuple):
tmplevs=[levs[0][0]]
for i in range(len(levs)):
if i!=0:
if levs[i-1][1]*levs[i][0]<0.:
tmplevs[-1]=0.
tmplevs.append(levs[i][1])
levs=tmplevs
# Take care of the input argument split
if isinstance(split,str):
if split.lower()=='no' :
split=0
elif split.lower()=='force' :
split=2
else :
split=1
# Take care of argument white
if isinstance(white,str): white=string.atoi(white)
# Gets first and last value, and adjust if extensions
mn=levs[0]
mx=levs[-1]
# If first level is < -1.E20 then use 2nd level for mn
if levs[0]<=-9.E19 and levs[1]>0. : mn=levs[1]
# If last level is > 1.E20 then use 2nd to last level for mx
if levs[-1]>=9.E19 and levs[-2]<0. : mx=levs[-2]
# Do we need to split the palette in 2 ?
sep=0
if mx*mn<0. and split==1 : sep=1
if split==2 : sep=1
# Determine the number of colors to use
nc=len(levs)-1
## In case only 2 levels, i.e only one color to return
if nc==1:
if split>0 and levs[0]*levs[1]<=0: # Change of sign
return white
else:
return colors[0]
# Number of colors passed
ncols=len(colors)
k=0 #???
col=[]
# Counts the number of negative colors
nn=0 # initialize
#if (mn<=0.) and (levs[0]<=-9.E19) : nn=nn+1 # Ext is one more <0 box
zr=0 # Counter to know if you stop by zero or it is included in a level
for i in range(nc):
if levs[i]<0.: nn=nn+1 # Count nb of <0 box
if levs[i]==0.: zr=1 # Do we stop at zero ?
np=nc-nn # Nb of >0 box is tot - neg -1 for the blank box
if mx*mn<0. and zr==0 :nn=nn-1 # we have a split cell bet + and - so remove a -
# Determine the interval (in colors) between each level
cinc=(ncols-1.)/float(nc-1.)
# Determine the interval (in colors) between each level (neg)
cincn=0.
if nn!=0 and nn!=1 : cincn=(ncols/2.-1.)/float(nn-1.)
# Determine the interval (in colors) between each level (pos)
cincp=0
isplit=0
if np!=0 and np!=1 : cincp=(ncols/2.-1.)/float(np-1.)
if sep!=1:
for i in xrange(nc):
cv=i*cinc
col.append(colors[int(round(cv))])
else:
colp=[]
coln=[]
col=[]
for i in xrange(nc):
if levs[i] < 0 :
cv=i*cincn
# if nn==1 : cv=len(colors)/4. # if only 1 neg then use the middle of the neg colors
if (levs[i])*(levs[i+1])<0 :
col.append(white)
isplit=1
else:
col.append(colors[int(round(cv))])
else:
if np==1 : cv=3*len(colors)/4. # if only 1 pos then use the middle of the pos colors
cv=ncols/2.+(i-nn-isplit)*cincp
col.append(colors[int(round(cv))])
if col[0]==white and levs[0]<-9.E19: col[0]=colors[0]
return col
def generate_time_labels(d1,d2,units,calendar=cdtime.DefaultCalendar):
""" generate_time_labels(self,d1,d2,units,calendar=cdtime.DefaultCalendar)
returns a dictionary of time labels for an interval of time, in a user defined units system
d1 and d2 must be cdtime object, if not they will be assumed to be in "units"
Example:
lbls = generate_time_labels(cdtime.reltime(0,'months since 2000'),
cdtime.reltime(12,'months since 2000'),
'days since 1800',
)
This generated a dictionary of nice time labels for the year 2000 in units of 'days since 1800'
lbls = generate_time_labels(cdtime.reltime(0,'months since 2000'),
cdtime.comptime(2001),
'days since 1800',
)
This generated a dictionary of nice time labels for the year 2000 in units of 'days since 1800'
lbls = generate_time_labels(0,
12,
'months since 2000',
)
This generated a dictionary of nice time labels for the year 2000 in units of 'months since 2000'
"""
if isinstance(d1,(int,long,float)):
d1=cdtime.reltime(d1,units)
if isinstance(d2,(int,long,float)):
d2=cdtime.reltime(d2,units)
d1r=d1.torel(units,calendar)
d2r=d2.torel(units,calendar)
d1,d2=minmax(d1r.value,d2r.value)
u=units.split('since')[0].strip().lower()
dic={}
if u in ['month','months']:
delta=(d2-d1)*30
elif u in ['year','years']:
delta=(d2-d1)*365
elif u in ['hours','hour']:
delta=(d2-d1)/24.
elif u in ['minute','minutes']:
delta=(d2-d1)/24./60.
elif u in ['second','seconds']:
delta=(d2-d1)/24./60.
else:
delta=d2-d1
if delta<.042: # less than 1 hour
levs=mkscale(d1,d2)
for l in levs:
dic[l]=str(cdtime.reltime(l,units).tocomp(calendar))
elif delta<1: # Less than a day put a label every hours
d1=d1r.torel('hours since 2000').value
d2=d2r.torel('hours since 2000').value
d1,d2=minmax(d1,d2)
levs=mkscale(d1,d2)
for l in levs:
t=cdtime.reltime(l,'hours since 2000').tocomp(calendar)
if t.minute>30:
t=t.add(1,cdtime.Hour)
t.minute=0
t.second=0
tr=t.torel(units,calendar)
dic[tr.value]=str(t).split(':')[0]
elif delta<90: # Less than 3 month put label every day
d1=d1r.torel('days since 2000').value
d2=d2r.torel('days since 2000').value
d1,d2=minmax(d1,d2)
levs=mkscale(d1,d2)
for l in levs:
t=cdtime.reltime(l,'days since 2000').tocomp(calendar)
if t.hour>12:
t=t.add(1,cdtime.Day)
t.hour=0
t.minute=0
t.second=0
tr=t.torel(units,calendar)
dic[tr.value]=str(t).split(' ')[0]
elif delta<800: # ~ Less than 24 month put label every month
d1=d1r.torel('months since 2000').value
d2=d2r.torel('months since 2000').value
d1,d2=minmax(d1,d2)
levs=mkscale(d1,d2)
for l in levs:
t=cdtime.reltime(l,'months since 2000').tocomp(calendar)
if t.day>15:
t=t.add(1,cdtime.Month)
t.day=1
t.hour=0
t.minute=0
t.second=0
tr=t.torel(units,calendar)
dic[tr.value]='-'.join(str(t).split('-')[:2])
else: # ok lots of years, let auto decide but always puts at Jan first
d1=d1r.torel('years since 2000').value
d2=d2r.torel('years since 2000').value
d1,d2=minmax(d1,d2)
levs=mkscale(d1,d2)
for l in levs:
t=cdtime.reltime(l,'years since 2000').tocomp(calendar)
if t.month>6:
t=t.add(1,cdtime.Year)
t.month=1
t.day=1
t.hour=0
t.minute=0
t.second=0
tr=t.torel(units,calendar)
dic[tr.value]=str(t).split('-')[0]
return dic
| lgpl-3.0 | 8,316,699,669,609,183,000 | 31.383142 | 284 | 0.585424 | false |
sonymoon/algorithm | src/main/python/leetcode-python/easy/206.Reverse Linked List.py | 1 | 1534 | class ListNode:
def __init__(self, x):
self.val = x
self.next = None
def __str__(self):
return self.x
class Solution:
def reverseList(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
if not head:
return head
pre = None
while head is not None:
temp = head.next
head.next = pre
pre = head
head = temp
return pre
def stringToIntegerList(input):
import json
return json.loads(input)
def stringToListNode(input):
# Generate list from the input
numbers = stringToIntegerList(input)
# Now convert that list into linked list
dummyRoot = ListNode(0)
ptr = dummyRoot
for number in numbers:
ptr.next = ListNode(number)
ptr = ptr.next
ptr = dummyRoot.next
return ptr
def listNodeToString(node):
if not node:
return "[]"
result = ""
while node:
result += str(node.val) + ", "
node = node.next
return "[" + result[:-2] + "]"
def main():
import sys
def readlines():
for line in sys.stdin:
yield line.strip('\n')
lines = readlines()
while True:
try:
line = next(lines)
head = stringToListNode(line);
ret = Solution().reverseList(head)
out = listNodeToString(ret);
print(out)
except StopIteration:
break
if __name__ == '__main__':
main()
| apache-2.0 | -5,421,362,284,684,240,000 | 18.417722 | 46 | 0.52412 | false |
bregmanstudio/BregmanToolkit | bregman/examples/6_dissonance.py | 1 | 2827 | # MUSIC014/102 - Music, Information, Neuroscience,
# Week 1 Lab
# Using the Plompt and Levelt dissonance function
#
# Professor Michael Casey, 1/7/2015
from pylab import *
from bregman.suite import *
import scipy.signal as signal
import pdb
def ideal_chromatic_dissonance(num_harmonics=7, f0=440):
"""
One octave of chromatic dissonance values
"""
harms = arange(num_harmonics)+1
freqs = [f0*i for i in harms]
amps = [exp(-.5*i) for i in harms]
freqs2 = array([[f0*2**(k/12.)*i for i in harms] for k in range(0,13)])
all_amps = r_[amps,amps]
diss = []
for f in freqs2:
all_freqs = r_[freqs,f]
idx = all_freqs.argsort()
diss.append(dissonance_fun(all_freqs[idx], all_amps[idx]))
return array(diss)
def get_peaks(F):
"""
Extract peaks from linear spectrum in F
Algorithm 1: zero-crossings of derivative of smoothed spectrum
"""
X = F.X.copy()
b,a = signal.butter(10, .25) # lp filter coefficients
# Smoothing
signal.filtfilt(b,a,X,axis=0)
# Derivative
Xd = diff(X,axis=0)
# Zero crossing
thresh=1e-9
peak_idx = []
for i,x in enumerate(Xd.T):
idx = where((x[:-1]>thresh)&(x[1:]<-thresh))[0] + 1
if len(idx):
idx = idx[X[idx,i].argsort()][::-1]
peak_idx.append(idx)
return peak_idx
def audio_chromatic_scale(f0=440, num_harmonics=7):
N = 11025
nH = num_harmonics
H = vstack([harmonics(f0=f0*2**(k/12.),num_harmonics=nH, num_points=N) for k in arange(13)])
return H
def audio_chromatic_dissonance(f0=440, num_harmonics=7, num_peaks=10):
sr = 44100
nfft = 8192
afreq = sr/nfft
H = audio_chromatic_scale(f0=f0, num_harmonics=num_harmonics)
h0 = H[0]
diss = []
for i,h in enumerate(H):
F = LinearFrequencySpectrum((h0+h)/2.,nfft=nfft,wfft=nfft/2,nhop=nfft/4)
P = get_peaks(F)
frame = []
for j,p in enumerate(P):
freqs = afreq*p[:num_peaks] # take middle frame as reference
mags = F.X[p[:num_peaks],j]
idx = freqs.argsort()
frame.append(dissonance_fun(freqs[idx],mags[idx]))
diss.append(array(frame).mean())
return array(diss)
def dissonance_plot(f0=440, num_harmonics=7, num_peaks=10):
figure()
diss_i = ideal_chromatic_dissonance(f0=f0, num_harmonics=num_harmonics)
diss = audio_chromatic_dissonance(f0=f0, num_harmonics=num_harmonics, num_peaks=num_peaks)
plot(diss_i / diss_i.max(), linestyle='--', linewidth=2)
plot(diss / diss.max())
t_str = 'f0=%d, partials=%d, peaks=%d'%(f0,num_harmonics,num_peaks)
title('Dissonance (chromatic): '+t_str,fontsize=16)
legend(['ideal','estimated'])
xlabel('Pitch class (chroma)',fontsize=14)
ylabel('Dissonance',fontsize=14)
grid()
| mit | 2,914,253,153,146,188,300 | 31.494253 | 96 | 0.615493 | false |
cherbib/fofix | src/Guitar.py | 1 | 95964 | #####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyostila #
# 2008 Alarian #
# 2008 myfingershurt #
# 2008 Capo #
# 2008 Glorandwarf #
# 2008 QQStarS #
# 2008 Blazingamer #
# 2008 evilynux <[email protected]> #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
from Song import Note, Tempo
from Mesh import Mesh
from Neck import Neck
import random
from copy import deepcopy
from Shader import shaders
from OpenGL.GL import *
import math
#myfingershurt: needed for multi-OS file fetching
import os
import Log
import Song #need the base song defines as well
from Instrument import *
class Guitar(Instrument):
def __init__(self, engine, playerObj, editorMode = False, player = 0, bass = False):
Instrument.__init__(self, engine, playerObj, player)
self.isDrum = False
self.isBassGuitar = bass
self.isVocal = False
self.debugMode = False
self.gameMode2p = self.engine.world.multiMode
self.matchingNotes = []
self.starSpinFrameIndex = 0
self.starSpinFrames = 16
self.logClassInits = self.engine.config.get("game", "log_class_inits")
if self.logClassInits == 1:
Log.debug("Guitar class init...")
#death_au: fixed neck size
#if self.engine.theme.twoDnote == False or self.engine.theme.twoDkeys == False:
#self.boardWidth = 3.6
#self.boardLength = 9.0
self.lastPlayedNotes = [] #MFH - for reverting when game discovers it implied incorrectly
self.missedNotes = []
self.missedNoteNums = []
self.editorMode = editorMode
#########For Animations
self.Animspeed = 30#Lower value = Faster animations
#For Animated Starnotes
self.indexCount = 0
#Alarian, For animated hitglow
self.HCountAni = False
#myfingershurt:
self.hopoStyle = self.engine.config.get("game", "hopo_system")
self.gh2sloppy = self.engine.config.get("game", "gh2_sloppy")
if self.gh2sloppy == 1:
self.hopoStyle = 4
self.sfxVolume = self.engine.config.get("audio", "SFX_volume")
#blazingamer
self.killfx = self.engine.config.get("performance", "killfx")
self.killCount = 0
self.bigMax = 1
#Get theme
themename = self.engine.data.themeLabel
#now theme determination logic is only in data.py:
self.theme = self.engine.data.theme
self.oFlash = None
#myfingershurt:
self.bassGrooveNeckMode = self.engine.config.get("game", "bass_groove_neck")
self.starspin = self.engine.config.get("performance", "starspin")
if self.twoDnote == True:
#Spinning starnotes or not?
#myfingershurt: allowing any non-Rock Band theme to have spinning starnotes if the SpinNotes.png is available in that theme's folder
if self.starspin == True and self.theme < 2:
#myfingershurt: check for SpinNotes, if not there then no animation
if self.gameMode2p == 6:
if engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"spinnotesbattle.png")):
self.starSpinFrames = 8
else:
self.starspin = False
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notesbattle.png")):
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"spinnotes.png")):
self.starspin = False
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
if self.gameMode2p == 6:
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notesbattle.png")):
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
#mfh - adding fallback for beta option
else:
#MFH - can't use IOError for fallback logic for a Mesh() call...
if self.engine.fileExists(os.path.join("themes", themename, "note.dae")):
engine.resource.load(self, "noteMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "note.dae")))
else:
engine.resource.load(self, "noteMesh", lambda: Mesh(engine.resource.fileName("note.dae")))
for i in range(5):
if engine.loadImgDrawing(self, "notetex"+chr(97+i), os.path.join("themes", themename, "notetex_"+chr(97+i)+".png")):
self.notetex = True
else:
self.notetex = False
break
if self.engine.fileExists(os.path.join("themes", themename, "star.dae")):
engine.resource.load(self, "starMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "star.dae")))
else:
self.starMesh = None
for i in range(5):
if engine.loadImgDrawing(self, "startex"+chr(97+i), os.path.join("themes", themename, "startex_"+chr(97+i)+".png")):
self.startex = True
else:
self.startex = False
break
for i in range(5):
if engine.loadImgDrawing(self, "staratex"+chr(97+i), os.path.join("themes", themename, "staratex_"+chr(97+i)+".png")):
self.staratex = True
else:
self.staratex = False
break
if self.gameMode2p == 6:
if not engine.loadImgDrawing(self, "battleFrets", os.path.join("themes", themename,"battle_frets.png")):
self.battleFrets = None
if self.twoDkeys == True:
engine.loadImgDrawing(self, "fretButtons", os.path.join("themes",themename,"fretbuttons.png"))
else:
defaultKey = False
#MFH - can't use IOError for fallback logic for a Mesh() call...
if self.engine.fileExists(os.path.join("themes", themename, "key.dae")):
engine.resource.load(self, "keyMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "key.dae")))
else:
engine.resource.load(self, "keyMesh", lambda: Mesh(engine.resource.fileName("key.dae")))
defaultKey = True
if defaultKey:
self.keytex = False
else:
for i in range(5):
if engine.loadImgDrawing(self, "keytex"+chr(97+i), os.path.join("themes", themename, "keytex_"+chr(97+i)+".png")):
self.keytex = True
else:
self.keytex = False
break
#inkk: loading theme-dependant tail images
#myfingershurt: must ensure the new tails don't affect the Rock Band mod...
self.simpleTails = False
for i in range(0,7):
if not engine.loadImgDrawing(self, "tail"+str(i), os.path.join("themes",themename,"tails","tail"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "taile"+str(i), os.path.join("themes",themename,"tails","taile"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "btail"+str(i), os.path.join("themes",themename,"tails","btail"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "btaile"+str(i), os.path.join("themes",themename,"tails","btaile"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if self.simpleTails:
Log.debug("Simple tails used; complex tail loading error...")
if not engine.loadImgDrawing(self, "tail1", os.path.join("themes",themename,"tail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "tail1", "tail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "tail2", os.path.join("themes",themename,"tail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "tail2", "tail2.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "bigTail1", os.path.join("themes",themename,"bigtail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "bigTail1", "bigtail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "bigTail2", os.path.join("themes",themename,"bigtail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "bigTail2", "bigtail2.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "kill1", os.path.join("themes", themename, "kill1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "kill1", "kill1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "kill2", os.path.join("themes", themename, "kill2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "kill2", "kill2.png", textureSize = (128, 128))
#MFH - freestyle tails (for drum fills & BREs)
if not engine.loadImgDrawing(self, "freestyle1", os.path.join("themes", themename, "freestyletail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "freestyle1", "freestyletail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "freestyle2", os.path.join("themes", themename, "freestyletail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "freestyle2", "freestyletail2.png", textureSize = (128, 128))
self.twoChordMax = False
self.rockLevel = 0.0
self.neck = Neck(self.engine, self, playerObj)
def selectPreviousString(self):
self.selectedString = (self.selectedString - 1) % self.strings
def selectString(self, string):
self.selectedString = string % self.strings
def selectNextString(self):
self.selectedString = (self.selectedString + 1) % self.strings
def noteBeingHeld(self):
noteHeld = False
for i in range(0,5):
if self.hit[i] == True:
noteHeld = True
return noteHeld
def isKillswitchPossible(self):
possible = False
for i in range(0,5):
if self.hit[i] == True:
possible = True
return possible
def renderTail(self, length, sustain, kill, color, flat = False, tailOnly = False, isTappable = False, big = False, fret = 0, spNote = False, freestyleTail = 0, pos = 0):
#volshebnyi - if freestyleTail == 0, act normally.
# if freestyleTail == 1, render an freestyle tail
# if freestyleTail == 2, render highlighted freestyle tail
if not self.simpleTails:#Tail Colors
tailcol = (1,1,1, color[3])
else:
if big == False and tailOnly == True:
tailcol = (.6, .6, .6, color[3])
else:
tailcol = (color)
#volshebnyi - tail color when sp is active
if self.starPowerActive and self.theme != 2 and not color == (0,0,0,1):#8bit
c = self.fretColors[5]
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], color[3])
if flat:
tailscale = (1, .1, 1)
else:
tailscale = None
if sustain:
if not length == None:
size = (.08, length)
if size[1] > self.boardLength:
s = self.boardLength
else:
s = length
# if freestyleTail == 1, render freestyle tail
if freestyleTail == 0: #normal tail rendering
#myfingershurt: so any theme containing appropriate files can use new tails
if not self.simpleTails:
if big == True and tailOnly == True:
if kill and self.killfx == 0:
zsize = .25
tex1 = self.kill1
tex2 = self.kill2
#volshebnyi - killswitch tail width and color change
kEffect = ( math.sin( pos / 50 ) + 1 ) /2
size = (0.02+kEffect*0.15, s - zsize)
c = [self.killColor[0],self.killColor[1],self.killColor[2]]
if c != [0,0,0]:
for i in range(0,3):
c[i]=c[i]*kEffect+color[i]*(1-kEffect)
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
else:
zsize = .25
size = (.17, s - zsize)
if self.starPowerActive and not color == (0,0,0,1):
tex1 = self.btail6
tex2 = self.btaile6
else:
if fret == 0:
tex1 = self.btail1
tex2 = self.btaile1
elif fret == 1:
tex1 = self.btail2
tex2 = self.btaile2
elif fret == 2:
tex1 = self.btail3
tex2 = self.btaile3
elif fret == 3:
tex1 = self.btail4
tex2 = self.btaile4
elif fret == 4:
tex1 = self.btail5
tex2 = self.btaile5
else:
zsize = .15
size = (.1, s - zsize)
if tailOnly:#Note let go
tex1 = self.tail0
tex2 = self.taile0
else:
if self.starPowerActive and not color == (0,0,0,1):
tex1 = self.tail6
tex2 = self.taile6
else:
if fret == 0:
tex1 = self.tail1
tex2 = self.taile1
elif fret == 1:
tex1 = self.tail2
tex2 = self.taile2
elif fret == 2:
tex1 = self.tail3
tex2 = self.taile3
elif fret == 3:
tex1 = self.tail4
tex2 = self.taile4
elif fret == 4:
tex1 = self.tail5
tex2 = self.taile5
else:
if big == True and tailOnly == True:
if kill:
zsize = .25
tex1 = self.kill1
tex2 = self.kill2
#volshebnyi - killswitch tail width and color change
kEffect = ( math.sin( pos / 50 ) + 1 ) /2
size = (0.02+kEffect*0.15, s - zsize)
c = [self.killColor[0],self.killColor[1],self.killColor[2]]
if c != [0,0,0]:
for i in range(0,3):
c[i]=c[i]*kEffect+color[i]*(1-kEffect)
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
else:
zsize = .25
size = (.11, s - zsize)
tex1 = self.bigTail1
tex2 = self.bigTail2
else:
zsize = .15
size = (.08, s - zsize)
tex1 = self.tail1
tex2 = self.tail2
else: #freestyleTail > 0
# render an inactive freestyle tail (self.freestyle1 & self.freestyle2)
zsize = .25
if self.freestyleActive:
size = (.30, s - zsize) #was .15
else:
size = (.15, s - zsize)
tex1 = self.freestyle1
tex2 = self.freestyle2
if freestyleTail == 1:
#glColor4f(*color)
c1, c2, c3, c4 = color
tailGlow = 1 - (pos - self.freestyleLastFretHitTime[fret] ) / self.freestylePeriod
if tailGlow < 0:
tailGlow = 0
color = (c1 + c1*2.0*tailGlow, c2 + c2*2.0*tailGlow, c3 + c3*2.0*tailGlow, c4*0.6 + c4*0.4*tailGlow) #MFH - this fades inactive tails' color darker
tailcol = (color)
if self.theme == 2 and freestyleTail == 0 and big and tailOnly and shaders.enable("tail"):
color = (color[0]*1.5,color[1]*1.5,color[2]*1.5,1.0)
shaders.setVar("color",color)
if kill and self.killfx == 0:
h = shaders.getVar("height")
shaders.modVar("height",0.5,0.06/h-0.1)
shaders.setVar("offset",(5.0-size[1],0.0))
size=(size[0]*15,size[1])
self.engine.draw3Dtex(tex1, vertex = (-size[0], 0, size[0], size[1]), texcoord = (0.0, 0.0, 1.0, 1.0),
scale = tailscale, color = tailcol)
self.engine.draw3Dtex(tex2, vertex = (-size[0], size[1], size[0], size[1] + (zsize)),
scale = tailscale, texcoord = (0.0, 0.05, 1.0, 0.95), color = tailcol)
shaders.disable()
#MFH - this block of code renders the tail "beginning" - before the note, for freestyle "lanes" only
#volshebnyi
if freestyleTail > 0 and pos < self.freestyleStart + self.freestyleLength:
self.engine.draw3Dtex(tex2, vertex = (-size[0], 0-(zsize), size[0], 0 + (.05)),
scale = tailscale, texcoord = (0.0, 0.95, 1.0, 0.05), color = tailcol)
if tailOnly:
return
def renderNote(self, length, sustain, kill, color, flat = False, tailOnly = False, isTappable = False, big = False, fret = 0, spNote = False):
if flat:
glScalef(1, .1, 1)
if tailOnly:
return
if self.twoDnote == True:
#myfingershurt: this should be retrieved once at init, not repeatedly in-game whenever tails are rendered.
if self.notedisappear == True:#Notes keep on going when missed
notecol = (1,1,1)#capo
else:
if flat:#Notes disappear when missed
notecol = (.1,.1,.1)
else:
notecol = (1,1,1)
tailOnly == True
if self.theme < 2:
if self.starspin:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (0.150+self.starSpinFrameIndex*0.05, 0.175+self.starSpinFrameIndex*0.05)
else:
texY = (0.125+self.starSpinFrameIndex*0.05, 0.150+self.starSpinFrameIndex*0.05)
else:
if isTappable:
texY = (0.025,0.05)
else:
texY = (0,0.025)
if self.starPowerActive:
texY = (0.10,0.125) #QQstarS
if isTappable:
texSize = (0.2,0.4)
else:
texSize = (0,0.2)
else:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (0.6, 0.8)
else:
texY = (0.4,0.6)
else:
if isTappable:
texY = (0.2,0.4)
else:
texY = (0,0.2)
if self.starPowerActive:
texY = (0.8,1)
if isTappable:
texSize = (0.2,0.4)
else:
texSize = (0,0.2)
elif self.theme == 2:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (3*0.166667, 4*0.166667)
else:
texY = (2*0.166667, 3*0.166667)
else:
if isTappable:
texY = (1*0.166667, 2*0.166667)
else:
texY = (0, 1*0.166667)
#myfingershurt: adding spNote==False conditional so that star notes can appear in overdrive
if self.starPowerActive and spNote == False:
if isTappable:
texY = (5*0.166667, 1)
else:
texY = (4*0.166667, 5*0.166667)
self.engine.draw3Dtex(self.noteButtons, vertex = (-size[0],size[1],size[0],-size[1]), texcoord = (texSize[0],texY[0],texSize[1],texY[1]),
scale = (1,1,0), rot = (30,1,0,0), multiples = True, color = color, vertscale = .27)
else:
shaders.setVar("Material",color,"notes")
#mesh = outer ring (black)
#mesh_001 = main note (key color)
#mesh_002 = top (spot or hopo if no mesh_003)
#mesh_003 = hopo bump (hopo color)
if spNote == True and self.starMesh is not None:
meshObj = self.starMesh
else:
meshObj = self.noteMesh
glPushMatrix()
glEnable(GL_DEPTH_TEST)
glDepthMask(1)
glShadeModel(GL_SMOOTH)
if self.noterotate:
glRotatef(90, 0, 1, 0)
glRotatef(-90, 1, 0, 0)
if spNote == True and self.threeDspin == True:
glRotate(90 + self.time/3, 0, 1, 0)
#death_au: fixed 3D note colours
#volshebnyi - note color when sp is active
glColor4f(*color)
if self.starPowerActive and self.theme != 2 and not color == (0,0,0,1):
c = self.fretColors[5]
glColor4f(.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
if fret == 0: # green note
glRotate(self.engine.theme.noterot[0], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[0], 0)
elif fret == 1: # red note
glRotate(self.engine.theme.noterot[1], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[1], 0)
elif fret == 2: # yellow
glRotate(self.engine.theme.noterot[2], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[2], 0)
elif fret == 3:# blue note
glRotate(self.engine.theme.noterot[3], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[3], 0)
elif fret == 4:# blue note
glRotate(self.engine.theme.noterot[4], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[4], 0)
if self.staratex == True and self.starPowerActive and spNote == False:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"staratex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
elif self.notetex == True and spNote == False:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"notetex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
elif self.startex == True and spNote == True:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"startex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
else:
if shaders.enable("notes"):
shaders.setVar("isTextured",False)
meshObj.render("Mesh_001")
shaders.disable()
glColor3f(self.spotColor[0], self.spotColor[1], self.spotColor[2])
if isTappable:
if self.hopoColor[0] == -2:
glColor4f(*color)
else:
glColor3f(self.hopoColor[0], self.hopoColor[1], self.hopoColor[2])
if(meshObj.find("Mesh_003")) == True:
meshObj.render("Mesh_003")
glColor3f(self.spotColor[0], self.spotColor[1], self.spotColor[2])
meshObj.render("Mesh_002")
glColor3f(self.meshColor[0], self.meshColor[1], self.meshColor[2])
meshObj.render("Mesh")
glDepthMask(0)
glPopMatrix()
def renderFreestyleLanes(self, visibility, song, pos):
if not song:
return
if not song.readyToGo:
return
#boardWindowMin = pos - self.currentPeriod * 2
boardWindowMax = pos + self.currentPeriod * self.beatsPerBoard
track = song.midiEventTrack[self.player]
#MFH - render 5 freestyle tails when Song.freestyleMarkingNote comes up
if self.freestyleEnabled:
freestyleActive = False
#for time, event in track.getEvents(boardWindowMin, boardWindowMax):
for time, event in track.getEvents(pos - self.freestyleOffset , boardWindowMax + self.freestyleOffset):
if isinstance(event, Song.MarkerNote):
if event.number == Song.freestyleMarkingNote:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
w = self.boardWidth / self.strings
self.freestyleLength = event.length #volshebnyi
self.freestyleStart = time # volshebnyi
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
#MFH - must extend the tail past the first fretboard section dynamically so we don't have to render the entire length at once
#volshebnyi - allow tail to move under frets
if time - self.freestyleOffset < pos:
freestyleActive = True
if z < -1.5:
length += z +1.5
z = -1.5
#MFH - render 5 freestyle tails
for theFret in range(0,5):
x = (self.strings / 2 - theFret) * w
c = self.fretColors[theFret]
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (theFret + 1), z)
freestyleTailMode = 1
self.renderTail(length, sustain = True, kill = False, color = color, flat = False, tailOnly = True, isTappable = False, big = True, fret = theFret, spNote = False, freestyleTail = freestyleTailMode, pos = pos)
glPopMatrix()
self.freestyleActive = freestyleActive
def renderNotes(self, visibility, song, pos, killswitch):
if not song:
return
if not song.readyToGo:
return
# Update dynamic period
self.currentPeriod = self.neckSpeed
#self.targetPeriod = self.neckSpeed
self.killPoints = False
w = self.boardWidth / self.strings
track = song.track[self.player]
num = 0
enable = True
starEventsInView = False
renderedNotes = reversed(self.getRequiredNotesForRender(song,pos))
for time, event in renderedNotes:
#for time, event in reversed(track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)): #MFH - reverse order of note rendering
if isinstance(event, Tempo):
self.tempoBpm = event.bpm
if self.lastBpmChange > 0 and self.disableVBPM == True:
continue
if (pos - time > self.currentPeriod or self.lastBpmChange < 0) and time > self.lastBpmChange:
self.baseBeat += (time - self.lastBpmChange) / self.currentPeriod
self.targetBpm = event.bpm
self.lastBpmChange = time
self.neck.lastBpmChange = time
self.neck.baseBeat = self.baseBeat
# self.setBPM(self.targetBpm) # glorandwarf: was setDynamicBPM(self.targetBpm)
continue
if not isinstance(event, Note):
continue
if (event.noteBpm == 0.0):
event.noteBpm = self.tempoBpm
if self.coOpFailed:
if self.coOpRestart:
if time - self.coOpRescueTime < (self.currentPeriod * self.beatsPerBoard * 2):
continue
elif self.coOpRescueTime + (self.currentPeriod * self.beatsPerBoard * 2) < pos:
self.coOpFailed = False
self.coOpRestart = False
Log.debug("Turning off coOpFailed. Rescue successful.")
else:
continue #can't break. Tempo.
c = self.fretColors[event.number]
x = (self.strings / 2 - event.number) * w
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
#volshebnyi - hide notes in BRE zone if BRE enabled
if self.freestyleEnabled and self.freestyleStart > 0:
if time >= self.freestyleStart-self.freestyleOffset and time < self.freestyleStart + self.freestyleLength+self.freestyleOffset:
z = -2.0
if self.twoDnote == True and not self.useFretColors:
color = (1,1,1, 1 * visibility * f)
else:
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
if event.length > 120:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
else:
length = 0
flat = False
tailOnly = False
spNote = False
#myfingershurt: user setting for starpower refill / replenish notes
if self.starPowerActive:
if self.spRefillMode == 0: #mode 0 = no starpower / overdrive refill notes
self.spEnabled = False
elif self.spRefillMode == 1 and self.theme != 2: #mode 1 = overdrive refill notes in RB themes only
self.spEnabled = False
elif self.spRefillMode == 2 and song.midiStyle != 1: #mode 2 = refill based on MIDI type
self.spEnabled = False
if event.star:
#self.isStarPhrase = True
starEventsInView = True
if event.finalStar:
self.finalStarSeen = True
starEventsInView = True
if event.star and self.spEnabled:
spNote = True
if event.finalStar and self.spEnabled:
spNote = True
if event.played or event.hopod:
if event.flameCount < 1 and not self.starPowerGained:
Log.debug("star power added")
if self.gameMode2p == 6:
if self.battleSuddenDeath:
self.battleObjects = [1] + self.battleObjects[:2]
else:
self.battleObjects = [self.battleObjectsEnabled[random.randint(0,len(self.battleObjectsEnabled)-1)]] + self.battleObjects[:2]
self.battleGetTime = pos
self.battleObjectGained = True
Log.debug("Battle Object Gained, Objects %s" % str(self.battleObjects))
else:
if self.starPower < 100:
self.starPower += 25
if self.starPower > 100:
self.starPower = 100
self.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
self.starPowerGained = True
if event.tappable < 2:
isTappable = False
else:
isTappable = True
# Clip the played notes to the origin
#myfingershurt: this should be loaded once at init, not every render...
if self.notedisappear == True:#Notes keep on going when missed
###Capo###
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
if z < 0 and not (event.played or event.hopod):
color = (.6, .6, .6, .5 * visibility * f)
flat = True
###endCapo###
else:#Notes disappear when missed
if z < 0:
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
else:
color = (.6, .6, .6, .5 * visibility * f)
flat = True
big = False
self.bigMax = 0
for i in range(0,5):
if self.hit[i]:
big = True
self.bigMax += 1
#MFH - filter out this tail whitening when starpower notes have been disbled from a screwup
if self.spEnabled and killswitch:
if event.star or event.finalStar:
if big == True and tailOnly == True:
self.killPoints = True
color = (1,1,1,1)
if z + length < -1.0:
continue
if event.length <= 120:
length = None
sustain = False
if event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
sustain = True
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (event.number + 1), z)
if shaders.turnon:
shaders.setVar("note_position",(x, (1.0 - visibility) ** (event.number + 1), z),"notes")
if self.battleStatus[8]:
renderNote = random.randint(0,2)
else:
renderNote = 0
if renderNote == 0:
if big == True and num < self.bigMax:
num += 1
self.renderNote(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, big = True, fret = event.number, spNote = spNote)
else:
self.renderNote(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, fret = event.number, spNote = spNote)
glPopMatrix()
if (not starEventsInView and self.finalStarSeen):
self.spEnabled = True
self.finalStarSeen = False
self.isStarPhrase = False
def renderTails(self, visibility, song, pos, killswitch):
if not song:
return
if not song.readyToGo:
return
# Update dynamic period
self.currentPeriod = self.neckSpeed
#self.targetPeriod = self.neckSpeed
self.killPoints = False
w = self.boardWidth / self.strings
track = song.track[self.player]
num = 0
enable = True
renderedNotes = self.getRequiredNotesForRender(song,pos)
for time, event in renderedNotes:
#for time, event in track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard):
if isinstance(event, Tempo):
self.tempoBpm = event.bpm
continue
if not isinstance(event, Note):
continue
if (event.noteBpm == 0.0):
event.noteBpm = self.tempoBpm
if self.coOpFailed:
if self.coOpRestart:
if time - self.coOpRescueTime < (self.currentPeriod * self.beatsPerBoard * 2):
continue
elif self.coOpRescueTime + (self.currentPeriod * self.beatsPerBoard * 2) < pos:
self.coOpFailed = False
self.coOpRestart = False
Log.debug("Turning off coOpFailed. Rescue successful.")
else:
continue
c = self.fretColors[event.number]
x = (self.strings / 2 - event.number) * w
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
if event.length > 120:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
else:
length = 0
flat = False
tailOnly = False
spNote = False
#myfingershurt: user setting for starpower refill / replenish notes
if event.star and self.spEnabled:
spNote = True
if event.finalStar and self.spEnabled:
spNote = True
if event.played or event.hopod:
if event.flameCount < 1 and not self.starPowerGained:
if self.gameMode2p == 6:
if self.battleSuddenDeath:
self.battleObjects = [1] + self.battleObjects[:2]
else:
self.battleObjects = [self.battleObjectsEnabled[random.randint(0,len(self.battleObjectsEnabled)-1)]] + self.battleObjects[:2]
self.battleGetTime = pos
self.battleObjectGained = True
Log.debug("Battle Object Gained, Objects %s" % str(self.battleObjects))
else:
if self.starPower < 100:
self.starPower += 25
if self.starPower > 100:
self.starPower = 100
self.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
self.starPowerGained = True
self.neck.ocount = 0
if event.tappable < 2:
isTappable = False
else:
isTappable = True
# Clip the played notes to the origin
#myfingershurt: this should be loaded once at init, not every render...
if self.notedisappear == True:#Notes keep on going when missed
###Capo###
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
if z < 0 and not (event.played or event.hopod):
color = (.6, .6, .6, .5 * visibility * f)
flat = True
###endCapo###
else:#Notes disappear when missed
if z < 0:
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
else:
color = (.6, .6, .6, .5 * visibility * f)
flat = True
big = False
self.bigMax = 0
for i in range(0,5):
if self.hit[i]:
big = True
self.bigMax += 1
if self.spEnabled and killswitch:
if event.star or event.finalStar:
if big == True and tailOnly == True:
self.killPoints = True
color = (1,1,1,1)
if z + length < -1.0:
continue
if event.length <= 120:
length = None
sustain = False
if event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
sustain = True
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (event.number + 1), z)
if self.battleStatus[8]:
renderNote = random.randint(0,2)
else:
renderNote = 0
if renderNote == 0:
if big == True and num < self.bigMax:
num += 1
self.renderTail(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, big = True, fret = event.number, spNote = spNote, pos = pos)
else:
self.renderTail(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, fret = event.number, spNote = spNote, pos = pos)
glPopMatrix()
if killswitch and self.killfx == 1:
glBlendFunc(GL_SRC_ALPHA, GL_ONE)
for time, event in self.playedNotes:
step = self.currentPeriod / 16
t = time + event.length
x = (self.strings / 2 - event.number) * w
c = self.fretColors[event.number]
s = t
proj = 1.0 / self.currentPeriod / self.beatsPerUnit
zStep = step * proj
def waveForm(t):
u = ((t - time) * -.1 + pos - time) / 64.0 + .0001
return (math.sin(event.number + self.time * -.01 + t * .03) + math.cos(event.number + self.time * .01 + t * .02)) * .1 + .1 + math.sin(u) / (5 * u)
glBegin(GL_TRIANGLE_STRIP)
f1 = 0
while t > time:
if ((t-pos)*proj) < self.boardLength:
z = (t - pos) * proj
else:
z = self.boardLength
if z < 0:
break
f2 = min((s - t) / (6 * step), 1.0)
a1 = waveForm(t) * f1
a2 = waveForm(t - step) * f2
if self.starPowerActive and self.theme != 2:#8bit
glColor4f(self.spColor[0],self.spColor[1],self.spColor[2],1) #(.3,.7,.9,1)
else:
glColor4f(c[0], c[1], c[2], .5)
glVertex3f(x - a1, 0, z)
glVertex3f(x - a2, 0, z - zStep)
glColor4f(1, 1, 1, .75)
glVertex3f(x, 0, z)
glVertex3f(x, 0, z - zStep)
if self.starPowerActive and self.theme != 2:#8bit
glColor4f(self.spColor[0],self.spColor[1],self.spColor[2],1) #(.3,.7,.9,1)
else:
glColor4f(c[0], c[1], c[2], .5)
glVertex3f(x + a1, 0, z)
glVertex3f(x + a2, 0, z - zStep)
glVertex3f(x + a2, 0, z - zStep)
glVertex3f(x - a2, 0, z - zStep)
t -= step
f1 = f2
glEnd()
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
def renderFrets(self, visibility, song, controls):
w = self.boardWidth / self.strings
size = (.22, .22)
v = 1.0 - visibility
glEnable(GL_DEPTH_TEST)
#Hitglow color option - myfingershurt sez this should be a Guitar class global, not retrieved ever fret render in-game...
for n in range(self.strings):
f = self.fretWeight[n]
c = self.fretColors[n]
if f and (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
f += 0.25
glColor4f(.1 + .8 * c[0] + f, .1 + .8 * c[1] + f, .1 + .8 * c[2] + f, visibility)
if self.fretPress:
y = v + f / 6
else:
y = v / 6
x = (self.strings / 2 - n) * w
if self.twoDkeys == True:
if self.battleStatus[4]:
fretWhamOffset = self.battleWhammyNow * .15
fretColor = (1,1,1,.5)
else:
fretWhamOffset = 0
fretColor = (1,1,1,1)
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2.4)
if self.battleStatus[3] and self.battleFrets != None and self.battleBreakString == n:
texSize = (n/5.0+.042,n/5.0+0.158)
size = (.30, .40)
fretPos = 8 - round((self.battleBreakNow/self.battleBreakLimit) * 8)
texY = (fretPos/8.0,(fretPos + 1.0)/8)
self.engine.draw3Dtex(self.battleFrets, vertex = (size[0],size[1],-size[0],-size[1]), texcoord = (texSize[0], texY[0], texSize[1], texY[1]),
coord = (x,v + .08 + fretWhamOffset,0), multiples = True,color = fretColor, depth = True)
else:
texSize = (n/5.0,n/5.0+0.2)
texY = (0.0,1.0/3.0)
if controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]):
texY = (1.0/3.0,2.0/3.0)
if self.hit[n] or (self.battleStatus[3] and self.battleBreakString == n):
texY = (2.0/3.0,1.0)
self.engine.draw3Dtex(self.fretButtons, vertex = (size[0],size[1],-size[0],-size[1]), texcoord = (texSize[0], texY[0], texSize[1], texY[1]),
coord = (x,v + fretWhamOffset,0), multiples = True,color = fretColor, depth = True)
else:
if self.keyMesh:
glPushMatrix()
glDepthMask(1)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glShadeModel(GL_SMOOTH)
glRotatef(90, 0, 1, 0)
glLightfv(GL_LIGHT0, GL_POSITION, (5.0, 10.0, -10.0, 0.0))
glLightfv(GL_LIGHT0, GL_AMBIENT, (.2, .2, .2, 0.0))
glLightfv(GL_LIGHT0, GL_DIFFUSE, (1.0, 1.0, 1.0, 0.0))
glRotatef(-90, 1, 0, 0)
glRotatef(-90, 0, 0, 1)
if n == 0: #green fret button
glRotate(self.engine.theme.keyrot[0], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[0])
elif n == 1: #red fret button
glRotate(self.engine.theme.keyrot[1], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[1])
elif n == 2: #yellow fret button
glRotate(self.engine.theme.keyrot[2], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[2])
elif n == 3: #blue fret button
glRotate(self.engine.theme.keyrot[3], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[3])
elif n == 4: #orange fret button
glRotate(self.engine.theme.keyrot[4], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[4])
#Mesh - Main fret
#Key_001 - Top of fret (key_color)
#Key_002 - Bottom of fret (key2_color)
#Glow_001 - Only rendered when a note is hit along with the glow.svg
#if self.complexkey == True:
# glColor4f(.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], visibility)
# if self.battleStatus[4]:
# glTranslatef(x, y + self.battleWhammyNow * .15, 0)
# else:
# glTranslatef(x, y, 0)
if self.keytex == True:
glColor4f(1,1,1,visibility)
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15, 0)
else:
glTranslatef(x, y, 0)
glEnable(GL_TEXTURE_2D)
getattr(self,"keytex"+chr(97+n)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if f and not self.hit[n]:
self.keyMesh.render("Mesh_001")
elif self.hit[n]:
self.keyMesh.render("Mesh_002")
else:
self.keyMesh.render("Mesh")
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
else:
glColor4f(.1 + .8 * c[0] + f, .1 + .8 * c[1] + f, .1 + .8 * c[2] + f, visibility)
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15 + v * 6, 0)
else:
glTranslatef(x, y + v * 6, 0)
key = self.keyMesh
if(key.find("Glow_001")) == True:
key.render("Mesh")
if(key.find("Key_001")) == True:
glColor3f(self.keyColor[0], self.keyColor[1], self.keyColor[2])
key.render("Key_001")
if(key.find("Key_002")) == True:
glColor3f(self.key2Color[0], self.key2Color[1], self.key2Color[2])
key.render("Key_002")
else:
key.render()
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glDepthMask(0)
glPopMatrix()
######################
f = self.fretActivity[n]
if f and self.disableFretSFX != True:
if self.glowColor[0] == -1:
s = 1.0
else:
s = 0.0
while s < 1:
ms = s * (math.sin(self.time) * .25 + 1)
if self.glowColor[0] == -2:
glColor3f(c[0] * (1 - ms), c[1] * (1 - ms), c[2] * (1 - ms))
else:
glColor3f(self.glowColor[0] * (1 - ms), self.glowColor[1] * (1 - ms), self.glowColor[2] * (1 - ms))
glPushMatrix()
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15, 0)
else:
glTranslatef(x, y, 0)
glScalef(.1 + .02 * ms * f, .1 + .02 * ms * f, .1 + .02 * ms * f)
glRotatef( 90, 0, 1, 0)
glRotatef(-90, 1, 0, 0)
glRotatef(-90, 0, 0, 1)
if self.twoDkeys == False and self.keytex == False:
if(self.keyMesh.find("Glow_001")) == True:
key.render("Glow_001")
else:
key.render()
glPopMatrix()
s += 0.2
#Hitglow color
if self.hitglow_color == 0:
glowcol = (c[0], c[1], c[2])#Same as fret
elif self.hitglow_color == 1:
glowcol = (1, 1, 1)#Actual color in .svg-file
f += 2
if self.battleStatus[4]:
self.engine.draw3Dtex(self.glowDrawing, coord = (x, y + self.battleWhammyNow * .15, 0.01), rot = (f * 90 + self.time, 0, 1, 0),
texcoord = (0.0, 0.0, 1.0, 1.0), vertex = (-size[0] * f, -size[1] * f, size[0] * f, size[1] * f),
multiples = True, alpha = True, color = glowcol)
else:
self.engine.draw3Dtex(self.glowDrawing, coord = (x, y, 0.01), rot = (f * 90 + self.time, 0, 1, 0),
texcoord = (0.0, 0.0, 1.0, 1.0), vertex = (-size[0] * f, -size[1] * f, size[0] * f, size[1] * f),
multiples = True, alpha = True, color = glowcol)
#self.hit[n] = False #MFH -- why? This prevents frets from being rendered under / before the notes...
glDisable(GL_DEPTH_TEST)
def renderFreestyleFlames(self, visibility, controls):
if self.flameColors[0][0][0] == -1:
return
w = self.boardWidth / self.strings
#track = song.track[self.player]
size = (.22, .22)
v = 1.0 - visibility
if self.disableFlameSFX != True:
flameLimit = 10.0
flameLimitHalf = round(flameLimit/2.0)
for fretNum in range(self.strings):
if controls.getState(self.keys[fretNum]) or controls.getState(self.keys[fretNum+5]):
if self.freestyleHitFlameCounts[fretNum] < flameLimit:
ms = math.sin(self.time) * .25 + 1
x = (self.strings / 2 - fretNum) * w
ff = 1 + 0.25
y = v + ff / 6
if self.theme == 2:
y -= 0.5
#flameSize = self.flameSizes[self.scoreMultiplier - 1][fretNum]
flameSize = self.flameSizes[self.cappedScoreMult - 1][fretNum]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else: #MFH - fixing crash!
#try:
# flameColor = self.flameColors[self.scoreMultiplier - 1][fretNum]
#except IndexError:
flameColor = self.fretColors[fretNum]
if flameColor[0] == -2:
flameColor = self.fretColors[fretNum]
ff += 1.5 #ff first time is 2.75 after this
if self.freestyleHitFlameCounts[fretNum] < flameLimitHalf:
flamecol = tuple([flameColor[ifc] for ifc in range(3)])
rbStarColor = (.1, .1, .2, .3)
xOffset = (.0, - .005, .005, .0)
yOffset = (.20, .255, .255, .255)
scaleMod = .6 * ms * ff
scaleFix = (6.0, 5.5, 5.0, 4.7)
for step in range(4):
if self.starPowerActive and self.theme < 2:
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (rbStarColor[step],)*3
hfCount = self.freestyleHitFlameCounts[fretNum]
if step == 0:
hfCount += 1
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x+xOffset[step], y+yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + .05 * step + scaleMod, hfCount/scaleFix[step] + scaleMod, hfCount/scaleFix[step] + scaleMod),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
else:
flameColorMod = 0.1 * (flameLimit - self.freestyleHitFlameCounts[fretNum])
flamecol = tuple([flameColor[ifc]*flameColorMod for ifc in range(3)])
xOffset = (.0, - .005, .005, .005)
yOffset = (.35, .405, .355, .355)
scaleMod = .6 * ms * ff
scaleFix = (3.0, 2.5, 2.0, 1.7)
for step in range(4):
hfCount = self.freestyleHitFlameCounts[fretNum]
if step == 0:
hfCount += 1
else:
if self.starPowerActive and self.theme < 2:
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.4+.1*step,)*3
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x+xOffset[step], y+yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + .05 * step + scaleMod, hfCount/scaleFix[step] + scaleMod, hfCount/scaleFix[step] + scaleMod),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
self.freestyleHitFlameCounts[fretNum] += 1
else: #MFH - flame count is done - reset it!
self.freestyleHitFlameCounts[fretNum] = 0 #MFH
def renderFlames(self, visibility, song, pos, controls):
if not song or self.flameColors[0][0][0] == -1:
return
w = self.boardWidth / self.strings
track = song.track[self.player]
size = (.22, .22)
v = 1.0 - visibility
if self.disableFlameSFX != True and (self.HCountAni == True and self.HCount2 > 12):
for n in range(self.strings):
f = self.fretWeight[n]
c = self.fretColors[n]
if f and (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
f += 0.25
y = v + f / 6
x = (self.strings / 2 - n) * w
f = self.fretActivity[n]
if f:
ms = math.sin(self.time) * .25 + 1
ff = f
ff += 1.2
#myfingershurt: need to cap flameSizes use of scoreMultiplier to 4x, the 5x and 6x bass groove mults cause crash:
self.cappedScoreMult = min(self.scoreMultiplier,4)
flameSize = self.flameSizes[self.cappedScoreMult - 1][n]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else:
flameColor = self.flameColors[self.cappedScoreMult - 1][n]
flameColorMod = (1.19, 1.97, 10.59)
flamecol = tuple([flameColor[ifc]*flameColorMod[ifc] for ifc in range(3)])
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.9,.9,.9)
if self.Hitanim != True:
self.engine.draw3Dtex(self.hitglowDrawing, coord = (x, y + .125, 0), rot = (90, 1, 0, 0),
scale = (0.5 + .6 * ms * ff, 1.5 + .6 * ms * ff, 1 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
#Alarian: Animated hitflames
else:
self.HCount = self.HCount + 1
if self.HCount > self.Animspeed-1:
self.HCount = 0
HIndex = (self.HCount * 16 - (self.HCount * 16) % self.Animspeed) / self.Animspeed
if HIndex > 15:
HIndex = 0
texX = (HIndex*(1/16.0), HIndex*(1/16.0)+(1/16.0))
self.engine.draw3Dtex(self.hitglowAnim, coord = (x, y + .225, 0), rot = (90, 1, 0, 0), scale = (2.4, 1, 3.3),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (texX[0],0.0,texX[1],1.0), multiples = True, alpha = True, color = (1,1,1))
ff += .3
flameColorMod = (1.19, 1.78, 12.22)
flamecol = tuple([flameColor[ifc]*flameColorMod[ifc] for ifc in range(3)])
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.8,.8,.8)
if self.Hitanim != True:
self.engine.draw3Dtex(self.hitglow2Drawing, coord = (x, y + .25, .05), rot = (90, 1, 0, 0),
scale = (.40 + .6 * ms * ff, 1.5 + .6 * ms * ff, 1 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
if self.disableFlameSFX != True:
flameLimit = 10.0
flameLimitHalf = round(flameLimit/2.0)
renderedNotes = self.getRequiredNotesForRender(song,pos)
for time, event in renderedNotes:
if isinstance(event, Tempo):
continue
if not isinstance(event, Note):
continue
if (event.played or event.hopod) and event.flameCount < flameLimit:
ms = math.sin(self.time) * .25 + 1
x = (self.strings / 2 - event.number) * w
xlightning = (self.strings / 2 - event.number)*2.2*w
ff = 1 + 0.25
y = v + ff / 6
if self.theme == 2:
y -= 0.5
flameSize = self.flameSizes[self.cappedScoreMult - 1][event.number]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else:
flameColor = self.flameColors[self.cappedScoreMult - 1][event.number]
if flameColor[0] == -2:
flameColor = self.fretColors[event.number]
ff += 1.5 #ff first time is 2.75 after this
if self.Hitanim2 == True:
self.HCount2 = self.HCount2 + 1
self.HCountAni = False
if self.HCount2 > 12:
if not event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
self.HCount2 = 0
else:
self.HCountAni = True
if event.flameCount < flameLimitHalf:
HIndex = (self.HCount2 * 13 - (self.HCount2 * 13) % 13) / 13
if HIndex > 12 and self.HCountAni != True:
HIndex = 0
texX = (HIndex*(1/13.0), HIndex*(1/13.0)+(1/13.0))
self.engine.draw3Dtex(self.hitflamesAnim, coord = (x, y + .665, 0), rot = (90, 1, 0, 0), scale = (1.6, 1.6, 4.9),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (texX[0],0.0,texX[1],1.0), multiples = True, alpha = True, color = (1,1,1))
else:
flameColorMod = 0.1 * (flameLimit - event.flameCount)
flamecol = tuple([ifc*flameColorMod for ifc in flameColor])
scaleChange = (3.0,2.5,2.0,1.7)
yOffset = (.35, .405, .355, .355)
vtx = flameSize * ff
scaleMod = .6 * ms * ff
for step in range(4):
#draw lightning in GH themes on SP gain
if step == 0 and self.theme != 2 and event.finalStar and self.spEnabled:
self.engine.draw3Dtex(self.hitlightning, coord = (xlightning, y, 3.3), rot = (90, 1, 0, 0),
scale = (.15 + .5 * ms * ff, event.flameCount / 3.0 + .6 * ms * ff, 2), vertex = (.4,-2,-.4,2),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = (1,1,1))
continue
if step == 0:
yzscaleMod = event.flameCount/ scaleChange[step]
else:
yzscaleMod = (event.flameCount + 1)/ scaleChange[step]
if self.starPowerActive:
if self.theme == 0 or self.theme == 1:
spcolmod = .7+step*.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else:
flamecol = (.4+step*.1,)*3#Default starcolor (Rockband)
if self.hitFlamesPresent == True:
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x - .005, y + yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + step*.05 + scaleMod, yzscaleMod + scaleMod, yzscaleMod + scaleMod),
vertex = (-vtx,-vtx,vtx,vtx), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
elif self.hitFlamesPresent == True and self.Hitanim2 == False:
self.HCount2 = 13
self.HCountAni = True
if event.flameCount < flameLimitHalf:
flamecol = flameColor
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
spcolmod = .3
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else: #Default starcolor (Rockband)
flamecol = (.1,.1,.1)
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x, y + .20, 0), rot = (90, 1, 0, 0),
scale = (.25 + .6 * ms * ff, event.flameCount/6.0 + .6 * ms * ff, event.flameCount / 6.0 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
for i in range(3):
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
spcolmod = 0.4+i*0.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else: #Default starcolor (Rockband)
flamecol = (0.1+i*0.1,)*3
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x-.005, y + .255, 0), rot = (90, 1, 0, 0),
scale = (.30 + i*0.05 + .6 * ms * ff, event.flameCount/(5.5 - i*0.4) + .6 * ms * ff, event.flameCount / (5.5 - i*0.4) + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
else:
flameColorMod = 0.1 * (flameLimit - event.flameCount)
flamecol = tuple([ifc*flameColorMod for ifc in flameColor])
scaleChange = (3.0,2.5,2.0,1.7)
yOffset = (.35, .405, .355, .355)
vtx = flameSize * ff
scaleMod = .6 * ms * ff
for step in range(4):
#draw lightning in GH themes on SP gain
if step == 0 and self.theme != 2 and event.finalStar and self.spEnabled:
self.engine.draw3Dtex(self.hitlightning, coord = (xlightning, y, 3.3), rot = (90, 1, 0, 0),
scale = (.15 + .5 * ms * ff, event.flameCount / 3.0 + .6 * ms * ff, 2), vertex = (.4,-2,-.4,2),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = (1,1,1))
continue
if step == 0:
yzscaleMod = event.flameCount/ scaleChange[step]
else:
yzscaleMod = (event.flameCount + 1)/ scaleChange[step]
if self.starPowerActive:
if self.theme == 0 or self.theme == 1:
spcolmod = .7+step*.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else:
flamecol = (.4+step*.1,)*3#Default starcolor (Rockband)
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x - .005, y + yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + step*.05 + scaleMod, yzscaleMod + scaleMod, yzscaleMod + scaleMod),
vertex = (-vtx,-vtx,vtx,vtx), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
event.flameCount += 1
def render(self, visibility, song, pos, controls, killswitch):
if shaders.turnon:
shaders.globals["dfActive"] = self.drumFillsActive
shaders.globals["breActive"] = self.freestyleActive
shaders.globals["rockLevel"] = self.rockLevel
if shaders.globals["killswitch"] != killswitch:
shaders.globals["killswitchPos"] = pos
shaders.globals["killswitch"] = killswitch
shaders.modVar("height",0.2,0.2,1.0,"tail")
if not self.starNotesSet == True:
self.totalNotes = 0
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
self.totalNotes += 1
stars = []
maxStars = []
maxPhrase = self.totalNotes/120
for q in range(0,maxPhrase):
for n in range(0,10):
stars.append(self.totalNotes/maxPhrase*(q)+n+maxPhrase/4)
maxStars.append(self.totalNotes/maxPhrase*(q)+10+maxPhrase/4)
i = 0
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
for a in stars:
if i == a:
self.starNotes.append(time)
event.star = True
for a in maxStars:
if i == a:
self.maxStars.append(time)
event.finalStar = True
i += 1
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
for q in self.starNotes:
if time == q:
event.star = True
for q in self.maxStars:
#if time == q and not event.finalStar:
# event.star = True
if time == q: #MFH - no need to mark only the final SP phrase note as the finalStar as in drums, they will be hit simultaneously here.
event.finalStar = True
self.starNotesSet = True
if not (self.coOpFailed and not self.coOpRestart):
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_COLOR_MATERIAL)
if self.leftyMode:
if not self.battleStatus[6]:
glScalef(-1, 1, 1)
elif self.battleStatus[6]:
glScalef(-1, 1, 1)
if self.freestyleActive:
self.renderTails(visibility, song, pos, killswitch)
self.renderNotes(visibility, song, pos, killswitch)
self.renderFreestyleLanes(visibility, song, pos) #MFH - render the lanes on top of the notes.
self.renderFrets(visibility, song, controls)
if self.hitFlamesPresent: #MFH - only if present!
self.renderFreestyleFlames(visibility, controls) #MFH - freestyle hit flames
else:
self.renderTails(visibility, song, pos, killswitch)
if self.fretsUnderNotes: #MFH
if self.twoDnote == True:
self.renderFrets(visibility, song, controls)
self.renderNotes(visibility, song, pos, killswitch)
else:
self.renderNotes(visibility, song, pos, killswitch)
self.renderFrets(visibility, song, controls)
else:
self.renderNotes(visibility, song, pos, killswitch)
self.renderFrets(visibility, song, controls)
self.renderFreestyleLanes(visibility, song, pos) #MFH - render the lanes on top of the notes.
if self.hitFlamesPresent: #MFH - only if present!
self.renderFlames(visibility, song, pos, controls) #MFH - only when freestyle inactive!
if self.leftyMode:
if not self.battleStatus[6]:
glScalef(-1, 1, 1)
elif self.battleStatus[6]:
glScalef(-1, 1, 1)
#return notes
#MFH - corrected and optimized:
#def getRequiredNotesMFH(self, song, pos):
def getRequiredNotesMFH(self, song, pos, hopoTroubleCheck = False):
if self.battleStatus[2] and self.difficulty != 0:
if pos < self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard or pos > self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue]
else:
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue - 1]
track = song.track[self.player]
if hopoTroubleCheck:
notes = [(time, event) for time, event in track.getEvents(pos, pos + (self.earlyMargin*2)) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not time==pos] #MFH - filter out the problem note that caused this check!
else:
notes = [(time, event) for time, event in track.getEvents(pos - self.lateMargin, pos + self.earlyMargin) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not (event.hopod or event.played or event.skipped)]
notes = [(time, event) for time, event in notes if (time >= (pos - self.lateMargin)) and (time <= (pos + self.earlyMargin))]
sorted(notes, key=lambda x: x[0])
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return sorted(notes, key=lambda x: x[0]) #MFH - what the hell, this should be sorted by TIME not note number....
def getDoubleNotes(self, notes):
if self.battleStatus[7] and notes != []:
notes = sorted(notes, key=lambda x: x[0])
curTime = 0
tempnotes = []
tempnumbers = []
tempnote = None
curNumbers = []
noteCount = 0
for time, note in notes:
noteCount += 1
if not isinstance(note, Note):
if noteCount == len(notes) and len(curNumbers) < 3 and len(curNumbers) > 0:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
continue
if time != curTime:
if curTime != 0 and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif (curTime != 0 or noteCount == len(notes)) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
tempnotes.append((time,deepcopy(note)))
curTime = time
curNumbers.append(note.number)
if noteCount == len(notes) and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
else:
curNumbers.append(note.number)
if noteCount == len(notes) and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
noteCount = 0
for time, note in tempnotes:
if tempnumbers[noteCount] != -1:
note.number = tempnumbers[noteCount]
noteCount += 1
if time > self.battleStartTimes[7] + self.currentPeriod * self.beatsPerBoard and time < self.battleStartTimes[7] - self.currentPeriod * self.beatsPerBoard + self.battleDoubleLength:
notes.append((time,note))
else:
noteCount += 1
return sorted(notes, key=lambda x: x[0])
def getRequiredNotesForRender(self, song, pos):
if self.battleStatus[2] and self.difficulty != 0:
Log.debug(self.battleDiffUpValue)
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue]
track0 = song.track[self.player]
notes0 = [(time, event) for time, event in track0.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue - 1]
track1 = song.track[self.player]
notes1 = [(time, event) for time, event in track1.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
notes = []
for time,note in notes0:
if time < self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard or time > self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
notes.append((time,note))
for time,note in notes1:
if time > self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard and time < self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
notes.append((time,note))
notes0 = None
notes1 = None
track0 = None
track1 = None
notes = sorted(notes, key=lambda x: x[0])
#Log.debug(notes)
else:
track = song.track[self.player]
notes = [(time, event) for time, event in track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return notes
#MFH - corrected and optimized:
def getRequiredNotesForJurgenOnTime(self, song, pos):
track = song.track[self.player]
notes = [(time, event) for time, event in track.getEvents(pos - self.lateMargin, pos + 30) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not (event.hopod or event.played or event.skipped)]
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return sorted(notes, key=lambda x: x[0]) #MFH - what the hell, this should be sorted by TIME not note number....
def controlsMatchNotes(self, controls, notes):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for k in self.keys:
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
for n in range(self.strings):
if n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
return False
if not n in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
# The lower frets can be held down
if n > max(requiredKeys):
return False
if twochord != 0:
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
if twochord == 2:
self.twoChord += skipped
return True
def controlsMatchNotes2(self, controls, notes, hopo = False):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if note.hopod == True and (controls.getState(self.keys[note.number]) or controls.getState(self.keys[note.number + 5])):
#if hopo == True and controls.getState(self.keys[note.number]):
self.playedNotes = []
return True
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
for n in range(self.strings):
if n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
return False
if not n in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
# The lower frets can be held down
if hopo == False and n >= min(requiredKeys):
return False
if twochord != 0:
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
if twochord == 2:
self.twoChord += skipped
return True
def controlsMatchNotes3(self, controls, notes, hopo = False):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if note.hopod == True and (controls.getState(self.keys[note.number]) or controls.getState(self.keys[note.number + 5])):
#if hopo == True and controls.getState(self.keys[note.number]):
self.playedNotes = []
return True
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
#chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
chordlist.sort(key=lambda a: a[0][0])
self.missedNotes = []
self.missedNoteNums = []
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
if (self.controlsMatchNote3(controls, chord, requiredKeys, hopo)):
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
break
if hopo == True:
break
self.missedNotes.append(chord)
else:
self.missedNotes = []
self.missedNoteNums = []
for chord in self.missedNotes:
for time, note in chord:
if self.debugMode:
self.missedNoteNums.append(note.number)
note.skipped = True
note.played = False
if twochord == 2:
self.twoChord += skipped
return True
#MFH - special function for HOPO intentions checking
def controlsMatchNextChord(self, controls, notes):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(key=lambda a: a[0][0])
twochord = 0
for chord in chordlist:
# matching keys?
self.requiredKeys = [note.number for time, note in chord]
self.requiredKeys = self.uniqify(self.requiredKeys)
if len(self.requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
self.twoChordApply = True
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(self.requiredKeys) - 2
self.requiredKeys = [min(self.requiredKeys), max(self.requiredKeys)]
else:
twochord = 0
if (self.controlsMatchNote3(controls, chord, self.requiredKeys, False)):
return True
else:
return False
def uniqify(self, seq, idfun=None):
# order preserving
if idfun is None:
def idfun(x): return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
# in old Python versions:
# if seen.has_key(marker)
# but in new ones:
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result
def controlsMatchNote3(self, controls, chordTuple, requiredKeys, hopo):
if len(chordTuple) > 1:
#Chords must match exactly
for n in range(self.strings):
if (n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]))) or (n not in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]))):
return False
else:
#Single Note must match that note
requiredKey = requiredKeys[0]
if not controls.getState(self.keys[requiredKey]) and not controls.getState(self.keys[requiredKey+5]):
return False
#myfingershurt: this is where to filter out higher frets held when HOPOing:
if hopo == False or self.hopoStyle == 2 or self.hopoStyle == 3:
#Check for higher numbered frets if not a HOPO or if GH2 strict mode
for n, k in enumerate(self.keys):
if (n > requiredKey and n < 5) or (n > 4 and n > requiredKey + 5):
#higher numbered frets cannot be held
if controls.getState(k):
return False
return True
def areNotesTappable(self, notes):
if not notes:
return
for time, note in notes:
if note.tappable > 1:
return True
return False
def startPick(self, song, pos, controls, hopo = False):
if hopo == True:
res = startPick2(song, pos, controls, hopo)
return res
if not song:
return False
if not song.readyToGo:
return False
self.playedNotes = []
self.matchingNotes = self.getRequiredNotes(song, pos)
if self.controlsMatchNotes(controls, self.matchingNotes):
self.pickStartPos = pos
for time, note in self.matchingNotes:
if note.skipped == True:
continue
self.pickStartPos = max(self.pickStartPos, time)
note.played = True
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
return True
return False
def startPick2(self, song, pos, controls, hopo = False):
if not song:
return False
if not song.readyToGo:
return False
self.playedNotes = []
self.matchingNotes = self.getRequiredNotes2(song, pos, hopo)
if self.controlsMatchNotes2(controls, self.matchingNotes, hopo):
self.pickStartPos = pos
for time, note in self.matchingNotes:
if note.skipped == True:
continue
self.pickStartPos = max(self.pickStartPos, time)
if hopo:
note.hopod = True
else:
note.played = True
if note.tappable == 1 or note.tappable == 2:
self.hopoActive = time
self.wasLastNoteHopod = True
elif note.tappable == 3:
self.hopoActive = -time
self.wasLastNoteHopod = True
else:
self.hopoActive = 0
self.wasLastNoteHopod = False
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
self.hopoLast = note.number
return True
return False
def startPick3(self, song, pos, controls, hopo = False):
if not song:
return False
if not song.readyToGo:
return False
self.lastPlayedNotes = self.playedNotes
self.playedNotes = []
self.matchingNotes = self.getRequiredNotesMFH(song, pos)
self.controlsMatchNotes3(controls, self.matchingNotes, hopo)
#myfingershurt
for time, note in self.matchingNotes:
if note.played != True:
continue
if shaders.turnon:
shaders.var["fret"][self.player][note.number]=shaders.time()
shaders.var["fretpos"][self.player][note.number]=pos
self.pickStartPos = pos
self.pickStartPos = max(self.pickStartPos, time)
if hopo:
note.hopod = True
else:
note.played = True
#self.wasLastNoteHopod = False
if note.tappable == 1 or note.tappable == 2:
self.hopoActive = time
self.wasLastNoteHopod = True
elif note.tappable == 3:
self.hopoActive = -time
self.wasLastNoteHopod = True
if hopo: #MFH - you just tapped a 3 - make a note of it. (har har)
self.hopoProblemNoteNum = note.number
self.sameNoteHopoString = True
else:
self.hopoActive = 0
self.wasLastNoteHopod = False
self.hopoLast = note.number
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
#myfingershurt: be sure to catch when a chord is played
if len(self.playedNotes) > 1:
lastPlayedNote = None
for time, note in self.playedNotes:
if isinstance(lastPlayedNote, Note):
if note.tappable == 1 and lastPlayedNote.tappable == 1:
self.LastStrumWasChord = True
#self.sameNoteHopoString = False
else:
self.LastStrumWasChord = False
lastPlayedNote = note
elif len(self.playedNotes) > 0: #ensure at least that a note was played here
self.LastStrumWasChord = False
if len(self.playedNotes) != 0:
return True
return False
def soloFreestylePick(self, song, pos, controls):
numHits = 0
for theFret in range(5):
self.freestyleHit[theFret] = controls.getState(self.keys[theFret+5])
if self.freestyleHit[theFret]:
if shaders.turnon:
shaders.var["fret"][self.player][theFret]=shaders.time()
shaders.var["fretpos"][self.player][theFret]=pos
numHits += 1
return numHits
#MFH - TODO - handle freestyle picks here
def freestylePick(self, song, pos, controls):
numHits = 0
#if not song:
# return numHits
if not controls.getState(self.actions[0]) and not controls.getState(self.actions[1]):
return 0
for theFret in range(5):
self.freestyleHit[theFret] = controls.getState(self.keys[theFret])
if self.freestyleHit[theFret]:
if shaders.turnon:
shaders.var["fret"][self.player][theFret]=shaders.time()
shaders.var["fretpos"][self.player][theFret]=pos
numHits += 1
return numHits
def endPick(self, pos):
for time, note in self.playedNotes:
if time + note.length > pos + self.noteReleaseMargin:
self.playedNotes = []
return False
self.playedNotes = []
return True
def getPickLength(self, pos):
if not self.playedNotes:
return 0.0
# The pick length is limited by the played notes
pickLength = pos - self.pickStartPos
for time, note in self.playedNotes:
pickLength = min(pickLength, note.length)
return pickLength
def coOpRescue(self, pos):
self.coOpRestart = True #initializes Restart Timer
self.coOpRescueTime = pos
self.starPower = 0
Log.debug("Rescued at " + str(pos))
def run(self, ticks, pos, controls):
if not self.paused:
self.time += ticks
#MFH - Determine which frame to display for starpower notes
if self.starspin:
self.indexCount = self.indexCount + 1
if self.indexCount > self.Animspeed-1:
self.indexCount = 0
self.starSpinFrameIndex = (self.indexCount * self.starSpinFrames - (self.indexCount * self.starSpinFrames) % self.Animspeed) / self.Animspeed
if self.starSpinFrameIndex > self.starSpinFrames - 1:
self.starSpinFrameIndex = 0
#myfingershurt: must not decrease SP if paused.
if self.starPowerActive == True and self.paused == False:
self.starPower -= ticks/self.starPowerDecreaseDivisor
if self.starPower <= 0:
self.starPower = 0
self.starPowerActive = False
#MFH - call to play star power deactivation sound, if it exists (if not play nothing)
if self.engine.data.starDeActivateSoundFound:
#self.engine.data.starDeActivateSound.setVolume(self.sfxVolume)
self.engine.data.starDeActivateSound.play()
# update frets
if self.editorMode:
if (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
for i in range(self.strings):
if controls.getState(self.keys[i]) or controls.getState(self.keys[i+5]):
activeFrets.append(i)
activeFrets = activeFrets or [self.selectedString]
else:
activeFrets = []
else:
activeFrets = [note.number for time, note in self.playedNotes]
for n in range(self.strings):
if controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]) or (self.editorMode and self.selectedString == n):
self.fretWeight[n] = 0.5
else:
self.fretWeight[n] = max(self.fretWeight[n] - ticks / 64.0, 0.0)
if n in activeFrets:
self.fretActivity[n] = min(self.fretActivity[n] + ticks / 32.0, 1.0)
else:
self.fretActivity[n] = max(self.fretActivity[n] - ticks / 64.0, 0.0)
#MFH - THIS is where note sustains should be determined... NOT in renderNotes / renderFrets / renderFlames -.-
if self.fretActivity[n]:
self.hit[n] = True
else:
self.hit[n] = False
if self.vbpmLogicType == 0: #MFH - VBPM (old)
if self.currentBpm != self.targetBpm:
diff = self.targetBpm - self.currentBpm
if (round((diff * .03), 4) != 0):
self.currentBpm = round(self.currentBpm + (diff * .03), 4)
else:
self.currentBpm = self.targetBpm
self.setBPM(self.currentBpm) # glorandwarf: was setDynamicBPM(self.currentBpm)
for time, note in self.playedNotes:
if pos > time + note.length:
return False
return True
| gpl-2.0 | -2,108,389,822,770,771,500 | 37.978067 | 223 | 0.549018 | false |
gustavofoa/pympm | apps/mpm/models/Musica.py | 1 | 2085 | from django.db import models
class Musica(models.Model):
slug = models.SlugField(primary_key=True, max_length=100)
nome = models.CharField(max_length=255)
letra = models.TextField()
cifra = models.TextField()
info = models.TextField()
link_video = models.URLField(blank=True, null=True)
categorias = models.ManyToManyField("Categoria")
rating = models.FloatField(blank=True, null=True)
votes = models.PositiveIntegerField(blank=True, null=True)
link_lpsalmo = models.URLField(blank=True, null=True)
tem_imagem = models.BooleanField(default=False)
banner_lateral = models.ForeignKey("Banner", related_name="banner_lateral_mus", blank=True, null=True)
banner_footer = models.ForeignKey("Banner", related_name="banner_footer_mus", blank=True, null=True)
class Meta:
app_label = "mpm"
def __str__(self):
return self.nome.encode('utf-8')
def get_video_code(self):
if self.link_video:
try:
return self.link_video[self.link_video.rindex('/'):].replace("embed",'').replace('watch?v=','').replace('v=','')
except ValueError:
return ""
else:
return ""
def add_rate(self, rate):
#weighted average
self.rating = (self.rating * self.votes + rate*100/5) / (self.votes + 1)
self.votes += 1
def get_rating_per_5(self):
return self.rating * 5 / 100.0
def get_formated_rating(self):
return "%.2f" % self.rating
def get_legend(self):
plural = ""
if(self.votes > 1):
plural = "s"
retorno = "<span property='ratingValue'>%.2f</span> em <span property='ratingCount'>%d</span> voto%s"
return retorno % (self.get_rating_per_5(), self.votes, plural)
def get_absolute_url(self):
return "/musica/%s/" % self.slug
def get_inicio(self):
retorno = self.letra[:140].replace("<strong>",'').replace("<strong",'').replace("<stron",'').replace("<stro",'').replace("<str",'').replace("<st",'').replace("<s",'')
retorno = retorno.replace("</strong>",'').replace("</strong",'').replace("</stron",'').replace("</stro",'').replace("</str",'').replace("</st",'').replace("</s",'')
retorno = retorno.replace("</",'').replace("<",'')
return retorno
| apache-2.0 | -3,247,626,394,395,597,000 | 41.55102 | 168 | 0.668106 | false |
frerepoulet/ZeroNet | src/Config.py | 1 | 20105 | import argparse
import sys
import os
import locale
import re
import ConfigParser
class Config(object):
def __init__(self, argv):
self.version = "0.5.4"
self.rev = 2054
self.argv = argv
self.action = None
self.config_file = "zeronet.conf"
self.createParser()
self.createArguments()
def createParser(self):
# Create parser
self.parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
self.parser.register('type', 'bool', self.strToBool)
self.subparsers = self.parser.add_subparsers(title="Action to perform", dest="action")
def __str__(self):
return str(self.arguments).replace("Namespace", "Config") # Using argparse str output
# Convert string to bool
def strToBool(self, v):
return v.lower() in ("yes", "true", "t", "1")
# Create command line arguments
def createArguments(self):
trackers = [
"zero://boot3rdez4rzn36x.onion:15441",
"zero://boot.zeronet.io#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:15441",
"udp://tracker.coppersurfer.tk:6969",
"udp://tracker.leechers-paradise.org:6969",
"udp://9.rarbg.com:2710",
"http://tracker.opentrackr.org:1337/announce",
"http://explodie.org:6969/announce",
"http://tracker1.wasabii.com.tw:6969/announce"
]
# Platform specific
if sys.platform.startswith("win"):
coffeescript = "type %s | tools\\coffee\\coffee.cmd"
else:
coffeescript = None
try:
language, enc = locale.getdefaultlocale()
language = language.split("_")[0]
except Exception:
language = "en"
use_openssl = True
if repr(1483108852.565) != "1483108852.565":
fix_float_decimals = True
else:
fix_float_decimals = False
this_file = os.path.abspath(__file__).replace("\\", "/")
if this_file.endswith("/Contents/Resources/core/src/Config.py"):
# Running as ZeroNet.app
if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")):
# Runnig from non-writeable directory, put data to Application Support
start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet").decode(sys.getfilesystemencoding())
else:
# Running from writeable directory put data next to .app
start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file).decode(sys.getfilesystemencoding())
config_file = start_dir + "/zeronet.conf"
data_dir = start_dir + "/data"
log_dir = start_dir + "/log"
elif this_file.endswith("/core/src/Config.py"):
# Running as exe or source is at Application Support directory, put var files to outside of core dir
start_dir = this_file.replace("/core/src/Config.py", "").decode(sys.getfilesystemencoding())
config_file = start_dir + "/zeronet.conf"
data_dir = start_dir + "/data"
log_dir = start_dir + "/log"
else:
config_file = "zeronet.conf"
data_dir = "data"
log_dir = "log"
ip_local = ["127.0.0.1"]
# Main
action = self.subparsers.add_parser("main", help='Start UiServer and FileServer (default)')
# SiteCreate
action = self.subparsers.add_parser("siteCreate", help='Create a new site')
# SiteNeedFile
action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site')
action.add_argument('address', help='Site address')
action.add_argument('inner_path', help='File inner path')
# SiteDownload
action = self.subparsers.add_parser("siteDownload", help='Download a new site')
action.add_argument('address', help='Site address')
# SiteSign
action = self.subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
action.add_argument('address', help='Site to sign')
action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
action.add_argument('--inner_path', help='File you want to sign (default: content.json)',
default="content.json", metavar="inner_path")
action.add_argument('--remove_missing_optional', help='Remove optional files that is not present in the directory', action='store_true')
action.add_argument('--publish', help='Publish site after the signing', action='store_true')
# SitePublish
action = self.subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
action.add_argument('address', help='Site to publish')
action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)',
default=None, nargs='?')
action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)',
default=15441, nargs='?')
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)',
default="content.json", metavar="inner_path")
# SiteVerify
action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
action.add_argument('address', help='Site to verify')
# dbRebuild
action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
action.add_argument('address', help='Site to rebuild')
# dbQuery
action = self.subparsers.add_parser("dbQuery", help='Query site sql cache')
action.add_argument('address', help='Site to query')
action.add_argument('query', help='Sql query')
# PeerPing
action = self.subparsers.add_parser("peerPing", help='Send Ping command to peer')
action.add_argument('peer_ip', help='Peer ip')
action.add_argument('peer_port', help='Peer port', nargs='?')
# PeerGetFile
action = self.subparsers.add_parser("peerGetFile", help='Request and print a file content from peer')
action.add_argument('peer_ip', help='Peer ip')
action.add_argument('peer_port', help='Peer port')
action.add_argument('site', help='Site address')
action.add_argument('filename', help='File name to request')
action.add_argument('--benchmark', help='Request file 10x then displays the total time', action='store_true')
# PeerCmd
action = self.subparsers.add_parser("peerCmd", help='Request and print a file content from peer')
action.add_argument('peer_ip', help='Peer ip')
action.add_argument('peer_port', help='Peer port')
action.add_argument('cmd', help='Command to execute')
action.add_argument('parameters', help='Parameters to command', nargs='?')
# CryptSign
action = self.subparsers.add_parser("cryptSign", help='Sign message using Bitcoin private key')
action.add_argument('message', help='Message to sign')
action.add_argument('privatekey', help='Private key')
# Config parameters
self.parser.add_argument('--verbose', help='More detailed logging', action='store_true')
self.parser.add_argument('--debug', help='Debug mode', action='store_true')
self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
self.parser.add_argument('--debug_gevent', help='Debug gevent functions', action='store_true')
self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true')
self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path")
self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path")
self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path")
self.parser.add_argument('--language', help='Web interface language', default=language, metavar='language')
self.parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
self.parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
self.parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically',
nargs='?', const="default_browser", metavar='browser_name')
self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D',
metavar='address')
self.parser.add_argument('--updatesite', help='Source code update site', default='1UPDatEDxnvHDo7TXvq6AEBARfNkyfxsp',
metavar='address')
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='size')
self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit')
self.parser.add_argument('--workers', help='Download workers per site', default=5, type=int, metavar='workers')
self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
self.parser.add_argument('--fileserver_port', help='FileServer bind port', default=15441, type=int, metavar='port')
self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*')
self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip')
self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip')
self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*')
self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', default=False, metavar='path')
self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup',
type='bool', choices=[True, False], default=use_openssl)
self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true')
self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
self.parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory',
type='bool', choices=[True, False], default=True)
self.parser.add_argument('--keep_ssl_cert', help='Disable new SSL cert generation on startup', action='store_true')
self.parser.add_argument('--max_files_opened', help='Change maximum opened files allowed by OS to this value on startup',
default=2048, type=int, metavar='limit')
self.parser.add_argument('--stack_size', help='Change thread stack size', default=None, type=int, metavar='thread_stack_size')
self.parser.add_argument('--use_tempfiles', help='Use temporary files when downloading (experimental)',
type='bool', choices=[True, False], default=False)
self.parser.add_argument('--stream_downloads', help='Stream download directly to files (experimental)',
type='bool', choices=[True, False], default=False)
self.parser.add_argument("--msgpack_purepython", help='Use less memory, but a bit more CPU power',
type='bool', choices=[True, False], default=True)
self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification',
type='bool', choices=[True, False], default=fix_float_decimals)
self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed")
self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript,
metavar='executable_path')
self.parser.add_argument('--tor', help='enable: Use only for Tor peers, always: Use Tor for every connection', choices=["disable", "enable", "always"], default='enable')
self.parser.add_argument('--tor_controller', help='Tor controller address', metavar='ip:port', default='127.0.0.1:9051')
self.parser.add_argument('--tor_proxy', help='Tor proxy address', metavar='ip:port', default='127.0.0.1:9050')
self.parser.add_argument('--tor_password', help='Tor controller password', metavar='password')
self.parser.add_argument('--tor_hs_limit', help='Maximum number of hidden services', metavar='limit', type=int, default=10)
self.parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev))
return self.parser
def loadTrackersFile(self):
self.trackers = []
for tracker in open(self.trackers_file):
if "://" in tracker:
self.trackers.append(tracker.strip())
# Find arguments specified for current action
def getActionArguments(self):
back = {}
arguments = self.parser._subparsers._group_actions[0].choices[self.action]._actions[1:] # First is --version
for argument in arguments:
back[argument.dest] = getattr(self, argument.dest)
return back
# Try to find action from argv
def getAction(self, argv):
actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
found_action = False
for action in actions: # See if any in argv
if action in argv:
found_action = action
break
return found_action
# Move plugin parameters to end of argument list
def moveUnknownToEnd(self, argv, default_action):
valid_actions = sum([action.option_strings for action in self.parser._actions], [])
valid_parameters = []
plugin_parameters = []
plugin = False
for arg in argv:
if arg.startswith("--"):
if arg not in valid_actions:
plugin = True
else:
plugin = False
elif arg == default_action:
plugin = False
if plugin:
plugin_parameters.append(arg)
else:
valid_parameters.append(arg)
return valid_parameters + plugin_parameters
# Parse arguments from config file and command line
def parse(self, silent=False, parse_config=True):
if silent: # Don't display messages or quit on unknown parameter
original_print_message = self.parser._print_message
original_exit = self.parser.exit
def silencer(parser, function_name):
parser.exited = True
return None
self.parser.exited = False
self.parser._print_message = lambda *args, **kwargs: silencer(self.parser, "_print_message")
self.parser.exit = lambda *args, **kwargs: silencer(self.parser, "exit")
argv = self.argv[:] # Copy command line arguments
self.parseCommandline(argv, silent) # Parse argv
self.setAttributes()
if parse_config:
argv = self.parseConfig(argv) # Add arguments from config file
self.parseCommandline(argv, silent) # Parse argv
self.setAttributes()
if not silent:
if self.fileserver_ip != "*" and self.fileserver_ip not in self.ip_local:
self.ip_local.append(self.fileserver_ip)
if silent: # Restore original functions
if self.parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action
self.action = None
self.parser._print_message = original_print_message
self.parser.exit = original_exit
# Parse command line arguments
def parseCommandline(self, argv, silent=False):
# Find out if action is specificed on start
action = self.getAction(argv)
if not action:
argv.append("main")
action = "main"
argv = self.moveUnknownToEnd(argv, action)
if silent:
res = self.parser.parse_known_args(argv[1:])
if res:
self.arguments = res[0]
else:
self.arguments = {}
else:
self.arguments = self.parser.parse_args(argv[1:])
# Parse config file
def parseConfig(self, argv):
# Find config file path from parameters
if "--config_file" in argv:
self.config_file = argv[argv.index("--config_file") + 1]
# Load config file
if os.path.isfile(self.config_file):
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read(self.config_file)
for section in config.sections():
for key, val in config.items(section):
if section != "global": # If not global prefix key with section
key = section + "_" + key
if val:
for line in val.strip().split("\n"): # Allow multi-line values
argv.insert(1, line)
argv.insert(1, "--%s" % key)
return argv
# Expose arguments as class attributes
def setAttributes(self):
# Set attributes from arguments
if self.arguments:
args = vars(self.arguments)
for key, val in args.items():
setattr(self, key, val)
def loadPlugins(self):
from Plugin import PluginManager
@PluginManager.acceptPlugins
class ConfigPlugin(object):
def __init__(self, config):
self.parser = config.parser
self.createArguments()
def createArguments(self):
pass
ConfigPlugin(self)
def saveValue(self, key, value):
if not os.path.isfile(self.config_file):
content = ""
else:
content = open(self.config_file).read()
lines = content.splitlines()
global_line_i = None
key_line_i = None
i = 0
for line in lines:
if line.strip() == "[global]":
global_line_i = i
if line.startswith(key + " = "):
key_line_i = i
i += 1
if value is None: # Delete line
if key_line_i:
del lines[key_line_i]
else: # Add / update
new_line = "%s = %s" % (key, str(value).replace("\n", "").replace("\r", ""))
if key_line_i: # Already in the config, change the line
lines[key_line_i] = new_line
elif global_line_i is None: # No global section yet, append to end of file
lines.append("[global]")
lines.append(new_line)
else: # Has global section, append the line after it
lines.insert(global_line_i + 1, new_line)
open(self.config_file, "w").write("\n".join(lines))
config = Config(sys.argv)
| gpl-2.0 | 7,540,716,014,782,910,000 | 49.515075 | 177 | 0.608953 | false |
OCA/l10n-brazil | l10n_br_fiscal/tests/test_ibpt_service.py | 1 | 3486 | # Copyright 2019 Akretion - Renato Lima <[email protected]>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo.tests import SavepointCase
class TestIbptService(SavepointCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.company = cls._create_compay()
cls._switch_user_company(cls.env.user, cls.company)
cls.nbs_115069000 = cls.env.ref("l10n_br_fiscal.nbs_115069000")
cls.nbs_124043300 = cls.env.ref("l10n_br_fiscal.nbs_124043300")
cls.product_tmpl_model = cls.env["product.template"]
cls.product_tmpl_1 = cls._create_product_tmpl(
name="Service Test 1 - With NBS: 1.1506.90.00", nbs=cls.nbs_115069000
)
cls.product_tmpl_2 = cls._create_product_tmpl(
name="Product Test 2 - With NBS: 1.1506.90.00", nbs=cls.nbs_115069000
)
cls.product_tmpl_3 = cls._create_product_tmpl(
name="Product Test 3 - With NBS: 1.2404.33.00", nbs=cls.nbs_124043300
)
cls.tax_estimate_model = cls.env["l10n_br_fiscal.tax.estimate"]
cls.nbs_model = cls.env["l10n_br_fiscal.nbs"]
@classmethod
def _switch_user_company(cls, user, company):
""" Add a company to the user's allowed & set to current. """
user.write(
{
"company_ids": [(6, 0, (company + user.company_ids).ids)],
"company_id": company.id,
}
)
@classmethod
def _create_compay(cls):
# Creating a company
company = cls.env["res.company"].create(
{
"name": "Company Test Fiscal BR",
"cnpj_cpf": "02.960.895/0002-12",
"country_id": cls.env.ref("base.br").id,
"state_id": cls.env.ref("base.state_br_es").id,
"ibpt_api": True,
"ibpt_update_days": 0,
"ibpt_token": (
"dsaaodNP5i6RCu007nPQjiOPe5XIefnx"
"StS2PzOV3LlDRVNGdVJ5OOUlwWZhjFZk"
),
}
)
return company
@classmethod
def _create_product_tmpl(cls, name, nbs):
# Creating a product
product = cls.product_tmpl_model.create({"name": name, "nbs_id": nbs.id})
return product
def test_update_ibpt_service(self):
"""Check tax estimate update"""
self.nbs_115069000.action_ibpt_inquiry()
self.assertTrue(self.nbs_115069000.tax_estimate_ids)
self.nbs_124043300.action_ibpt_inquiry()
self.assertTrue(self.nbs_124043300.tax_estimate_ids)
self.tax_estimate_model.search(
[("nbs_id", "in", (self.nbs_115069000.id, self.nbs_124043300.id))]
).unlink()
def test_nbs_count_product_template(self):
"""Check product template relation with NBS"""
self.assertEqual(self.nbs_115069000.product_tmpl_qty, 2)
self.assertEqual(self.nbs_124043300.product_tmpl_qty, 1)
def test_update_scheduled(self):
"""Check NBS update scheduled"""
nbss = self.nbs_model.search(
[("id", "in", (self.nbs_115069000.id, self.nbs_124043300.id))]
)
nbss._scheduled_update()
self.assertTrue(self.nbs_115069000.tax_estimate_ids)
self.assertTrue(self.nbs_124043300.tax_estimate_ids)
self.tax_estimate_model.search(
[("nbs_id", "in", (self.nbs_115069000.id, self.nbs_124043300.id))]
).unlink()
| agpl-3.0 | -3,261,681,379,221,557,000 | 35.3125 | 81 | 0.582616 | false |
waqasbhatti/wcs2kml | python/fitsimage.py | 1 | 16860 | #!/usr/bin/env python
# Library for treating FITS files as Python Imaging Library objects
# Copyright (c) 2005, 2006, 2007, Jeremy Brewer
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * The names of the contributors may not be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Changelog:
#
# 3/31/08 Fixed overflow errors that were occuring when zscale_range was
# returning weird types for zmin and zmax. Now we force zmin & zmax
# to be of builtin type float for safety.
#
# 10/17/07 Added manual range selection to FitsImage. Fixed typecode for
# numpy to use unsigned 8 bit integers.
#
# 9/25/07 Added call to fits_simple_verify() to verify input file is FITS.
# Removed kwargs from FitsImage() because pyfits doesn't use them.
#
# 9/14/07 Changed array usage from Numeric to numpy. Changed underlying
# FITS I/O library from fitslib to pyfits. Modifications made
# by Christopher Hanley.
#
# 8/20/07 Write arcsinh scaling algorithm and adding scaling options.
# Updated documentation. Dropped number of channels check on
# color -- PIL should handle this instead.
#
# 8/17/07 Wrote new scaling algorithm, percentile_range(), that determines
# the range to use from a configurable percentile cut. Now
# FitsImage() takes optional named arguments to configure which
# contrast algorithm to use. In addition, keyword arguments are
# passed on to Fits() to configure how minor errors are handled.
#
# 7/4/07 Updated to use Numeric. Improved speed of zscale_range().
#
# 10/10/06 Increased accuracy of draw_circle().
#
# 2/7/06 Updated documentation.
#
# 1/4/06 Fixed bug in zscale_range() where num_points and num_pixels
# sometimes differed, resulting in the sigma iteration failing because
# the arrays would differ in length. Now the arrays are both of
# size num_pixels. Some additional checks were also added.
#
# 12/10/05 Updated documentation.
#
# 12/8/05 Now draw_circle will not draw points that lie outside of the image.
#
# 12/7/05 Wrote zscale_range() function which implements the ds9 zscale
# autocontrast algorithm for FITs images. Wrote a new version of
# asImage(), now called FitsImage(), that returns a PIL Image object
# without use of the convert commandline utility. Rewrote convert()
# and resize() methods so that they do not have to use the convert
# command externally. Removed all of the other asImage() methods
# that weren't working.
"""
Module for treating a FITS image as a Python Imaging Library (PIL) object.
This is extremely useful if you want to convert a FITS image to jpeg and/or
perform various operations on it (such as drawing).
The contrast for FITS images is determined using the zscale algorithm by
default, but this can be configured with various options. See the
documentation for zscale_range() and percentile_range() for more information.
Example Usage:
# image is a full PIL object
image = fitsimage.FitsImage("foo.fits")
image.save("foo.jpg")
"""
__author__ = "Jeremy Brewer ([email protected])"
__copyright__ = "Copyright 2005, 2006, 2007 Jeremy Brewer"
__license__ = "BSD"
__version__ = "1.1"
import os
import sys
import cmath
import fitslib
import pyfits
import pointarray
import Image
import ImageDraw
import numpy
def zscale_range(image_data, contrast=0.25, num_points=600, num_per_row=120):
"""
Computes the range of pixel values to use when adjusting the contrast
of FITs images using the zscale algorithm. The zscale algorithm
originates in Iraf. More information about it can be found in the help
section for DISPLAY in Iraf.
Briefly, the zscale algorithm uses an evenly distributed subsample of the
input image instead of a full histogram. The subsample is sorted by
intensity and then fitted with an iterative least squares fit algorithm.
The endpoints of this fit give the range of pixel values to use when
adjusting the contrast.
Input: image_data -- the array of data contained in the FITs image
(must have 2 dimensions)
contrast -- the contrast parameter for the zscale algorithm
num_points -- the number of points to use when sampling the
image data
num_per_row -- number of points per row when sampling
Return: 1.) The minimum pixel value to use when adjusting contrast
2.) The maximum pixel value to use when adjusting contrast
"""
# check input shape
if len(image_data.shape) != 2:
raise ValueError("input data is not an image")
# check contrast
if contrast <= 0.0:
contrast = 1.0
# check number of points to use is sane
if num_points > numpy.size(image_data) or num_points < 0:
num_points = 0.5 * numpy.size(image_data)
# determine the number of points in each column
num_per_col = int(float(num_points) / float(num_per_row) + 0.5)
# integers that determine how to sample the control points
xsize, ysize = image_data.shape
row_skip = float(xsize - 1) / float(num_per_row - 1)
col_skip = float(ysize - 1) / float(num_per_col - 1)
# create a regular subsampled grid which includes the corners and edges,
# indexing from 0 to xsize - 1, ysize - 1
data = []
for i in xrange(num_per_row):
x = int(i * row_skip + 0.5)
for j in xrange(num_per_col):
y = int(j * col_skip + 0.5)
data.append(image_data[x, y])
# actual number of points selected
num_pixels = len(data)
# sort the data by intensity
data.sort()
# check for a flat distribution of pixels
data_min = min(data)
data_max = max(data)
center_pixel = (num_pixels + 1) / 2
if data_min == data_max:
return data_min, data_max
# compute the median
if num_pixels % 2 == 0:
median = data[center_pixel - 1]
else:
median = 0.5 * (data[center_pixel - 1] + data[center_pixel])
# compute an iterative fit to intensity
pixel_indeces = map(float, xrange(num_pixels))
points = pointarray.PointArray(pixel_indeces, data, min_err=1.0e-4)
fit = points.sigmaIterate()
num_allowed = 0
for pt in points.allowedPoints():
num_allowed += 1
if num_allowed < int(num_pixels / 2.0):
return data_min, data_max
# compute the limits
z1 = median - (center_pixel - 1) * (fit.slope / contrast)
z2 = median + (num_pixels - center_pixel) * (fit.slope / contrast)
if z1 > data_min:
zmin = z1
else:
zmin = data_min
if z2 < data_max:
zmax = z2
else:
zmax = data_max
# last ditch sanity check
if zmin >= zmax:
zmin = data_min
zmax = data_max
return zmin, zmax
def percentile_range(image_data, min_percent=3.0, max_percent=99.0,
num_points=5000, num_per_row=250):
"""
Computes the range of pixel values to use when adjusting the contrast
of FITs images using a simple percentile cut. For efficiency reasons,
only a subsample of the input image data is used.
Input: image_data -- the array of data contained in the FITs image
(must have 2 dimensions)
min_percent -- min percent value between (0, 100)
max_percent -- max percent value between (0, 100)
num_points -- the number of points to use when sampling the
image data
num_per_row -- number of points per row when sampling
Return: 1.) The minimum pixel value to use when adjusting contrast
2.) The maximum pixel value to use when adjusting contrast
"""
if not 0 <= min_percent <= 100:
raise ValueError("invalid value for min percent '%s'" % min_percent)
elif not 0 <= max_percent <= 100:
raise ValueError("invalid value for max percent '%s'" % max_percent)
min_percent = float(min_percent) / 100.0
max_percent = float(max_percent) / 100.0
# check input shape
if len(image_data.shape) != 2:
raise ValueError("input data is not an image")
# check number of points to use is sane
if num_points > numpy.size(image_data) or num_points < 0:
num_points = 0.5 * numpy.size(image_data)
# determine the number of points in each column
num_per_col = int(float(num_points) / float(num_per_row) + 0.5)
# integers that determine how to sample the control points
xsize, ysize = image_data.shape
row_skip = float(xsize - 1) / float(num_per_row - 1)
col_skip = float(ysize - 1) / float(num_per_col - 1)
# create a regular subsampled grid which includes the corners and edges,
# indexing from 0 to xsize - 1, ysize - 1
data = []
for i in xrange(num_per_row):
x = int(i * row_skip + 0.5)
for j in xrange(num_per_col):
y = int(j * col_skip + 0.5)
data.append(image_data[x, y])
# perform a simple percentile cut
data.sort()
zmin = data[int(min_percent * len(data))]
zmax = data[int(max_percent * len(data))]
return zmin, zmax
def FitsImage(fitsfile, contrast="zscale", contrast_opts={}, scale="linear",
scale_opts={}):
"""
Constructor-like function that returns a Python Imaging Library (PIL)
Image object. This allows extremely easy and powerful manipulation of
FITS files as images. The contrast is automatically adjusted using the
zscale algorithm (see zscale_range() above).
Input: fitsfile -- a FITS image filename
contrast -- the algorithm for determining the min/max
values in the FITS pixel data to use when
compressing the dynamic range of the FITS
data to something visible by the eye, either
"zscale", "percentile", or "manual"
contrast_opts -- options for the contrast algorithm, see
the optional args of [contrast]_range()
for what to name the keys
scale -- how to scale the pixel values between the
min/max values from the contrast
algorithm when converting to a a raster
format, either "linear" or "arcsinh"
scale_opts -- options for the scaling algorithm, currently
only "nonlinearity" is supported for arcsinh,
which has a default value of 3
"""
if contrast not in ("zscale", "percentile", "manual"):
raise ValueError("invalid contrast algorithm '%s'" % contrast)
if scale not in ("linear", "arcsinh"):
raise ValueError("invalid scale value '%s'" % scale)
# open the fits file and read the image data and size
fitslib.fits_simple_verify(fitsfile)
fits = pyfits.open(fitsfile)
try:
hdr = fits[0].header
xsize = hdr["NAXIS1"]
ysize = hdr["NAXIS2"]
fits_data = fits[0].data
finally:
fits.close()
# compute the proper scaling for the image
if contrast == "zscale":
contrast_value = contrast_opts.get("contrast", 0.25)
num_points = contrast_opts.get("num_points", 600)
num_per_row = contrast_opts.get("num_per_row", 120)
zmin, zmax = zscale_range(fits_data, contrast=contrast_value,
num_points=num_points,
num_per_row=num_per_row)
elif contrast == "percentile":
min_percent = contrast_opts.get("min_percent", 3.0)
max_percent = contrast_opts.get("max_percent", 99.0)
num_points = contrast_opts.get("num_points", 5000)
num_per_row = contrast_opts.get("num_per_row", 250)
zmin, zmax = percentile_range(fits_data, min_percent=min_percent,
max_percent=max_percent,
num_points=num_points,
num_per_row=num_per_row)
elif contrast == "manual":
zmin = contrast_opts.get("min", None)
zmax = contrast_opts.get("max", None)
if zmin is None:
zmin = data.min()
if zmax is None:
zmax = data.max()
# sometimes the zscale_range or other numpy routines return different types
# for zmin and zmax (e.g. float32 and float64), which results in overflow
# errors below
zmin = float(zmin)
zmax = float(zmax)
fits_data = numpy.where(fits_data > zmin, fits_data, zmin)
fits_data = numpy.where(fits_data < zmax, fits_data, zmax)
if scale == "linear":
scaled_data = (fits_data - zmin) * (255.0 / (zmax - zmin)) + 0.5
elif scale == "arcsinh":
# nonlinearity sets the range over which we sample values of the
# asinh function; values near 0 are linear and values near infinity
# are logarithmic
nonlinearity = scale_opts.get("nonlinearity", 3.0)
nonlinearity = max(nonlinearity, 0.001)
max_asinh = cmath.asinh(nonlinearity).real
scaled_data = (255.0 / max_asinh) * \
(numpy.arcsinh((fits_data - zmin) * \
(nonlinearity / (zmax - zmin))))
# convert to 8 bit unsigned int
scaled_data = scaled_data.astype("B")
# create the image
image = Image.frombuffer("L", (xsize, ysize), scaled_data, "raw", "L", 0, 0)
return image
def draw_circle(image, x, y, radius, color):
"""
Draws a circle on image at position x, y with the given radius and
color.
Input: image -- the image object to draw the circle on
x -- the x position of the center of the circle
y -- the y position of the center of the circle
radius -- the radius of the circle in pixels
color -- a tuple containing the color of the border of the
circle, ranging from 0 to 255 for each channel
"""
# arc takes the upper left and lower right corners of a box bounding the
# circle as arguments. Here (x1, y1) gives the coordinates of the upper left
# corner and (x2, y2) gives the lower right corner of the bounding box.
x1 = int(x - radius + 0.5)
y1 = int(y - radius + 0.5)
x2 = int(x + radius + 0.5)
y2 = int(y + radius + 0.5)
xsize, ysize = image.size
# draw the circle
draw = ImageDraw.Draw(image)
draw.arc((x1, y1, x2, y2), 0, 360, fill=color)
def main(argv):
import time
if len(argv) != 2:
print "Usage: %s <fits-file>" % os.path.basename(argv[0])
print "Input file will be converted to JPEG"
sys.exit(2)
# FITS image to open and JPEG counterpart
fitsfile = argv[1]
name, ext = os.path.splitext(fitsfile)
jpegfile = "%s.jpg" % name
# open as PIL object
start = time.time()
image = FitsImage(fitsfile).convert("RGB")
stop = time.time()
print "Converting to PIL object took %f sec" % (stop - start)
# save as a jpeg
start = time.time()
image.save(jpegfile)
stop = time.time()
print "Saving to '%s' took %f sec" % (jpegfile, stop - start)
if __name__ == "__main__":
main(sys.argv)
| bsd-3-clause | -7,967,885,862,647,529,000 | 37.581236 | 80 | 0.629419 | false |
Akrog/cinder | cinder/tests/test_dellscapi.py | 1 | 155946 | # Copyright (c) 2015 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder import context
from cinder import exception
from cinder.openstack.common import log as logging
from cinder import test
from cinder.volume.drivers.dell import dell_storagecenter_api
import mock
from requests import models
import uuid
LOG = logging.getLogger(__name__)
# We patch these here as they are used by every test to keep
# from trying to contact a Dell Storage Center.
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'__init__',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'open_connection')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'close_connection')
class DellSCSanAPITestCase(test.TestCase):
'''DellSCSanAPITestCase
Class to test the Storage Center API using Mock.
'''
SC = {u'IPv6ManagementIPPrefix': 128,
u'connectionError': u'',
u'instanceId': u'64702',
u'scSerialNumber': 64702,
u'dataProgressionRunning': False,
u'hostOrIpAddress': u'192.168.0.80',
u'userConnected': True,
u'portsBalanced': True,
u'managementIp': u'192.168.0.80',
u'version': u'6.5.1.269',
u'location': u'',
u'objectType': u'StorageCenter',
u'instanceName': u'Storage Center 64702',
u'statusMessage': u'',
u'status': u'Up',
u'flashOptimizedConfigured': False,
u'connected': True,
u'operationMode': u'Normal',
u'userName': u'Admin',
u'nonFlashOptimizedConfigured': True,
u'name': u'Storage Center 64702',
u'scName': u'Storage Center 64702',
u'notes': u'',
u'serialNumber': 64702,
u'raidRebalanceRunning': False,
u'userPasswordExpired': False,
u'contact': u'',
u'IPv6ManagementIP': u'::'}
VOLUME = {u'instanceId': u'64702.3494',
u'scSerialNumber': 64702,
u'replicationSource': False,
u'liveVolume': False,
u'vpdId': 3496,
u'objectType': u'ScVolume',
u'index': 3494,
u'volumeFolderPath': u'devstackvol/fcvm/',
u'hostCacheEnabled': False,
u'usedByLegacyFluidFsNasVolume': False,
u'inRecycleBin': False,
u'volumeFolderIndex': 17,
u'instanceName': u'volume-37883deb-85cd-426a-9a98-62eaad8671ea',
u'statusMessage': u'',
u'status': u'Up',
u'storageType': {u'instanceId': u'64702.1',
u'instanceName': u'Assigned - Redundant - 2 MB',
u'objectType': u'ScStorageType'},
u'cmmDestination': False,
u'replicationDestination': False,
u'volumeFolder': {u'instanceId': u'64702.17',
u'instanceName': u'fcvm',
u'objectType': u'ScVolumeFolder'},
u'deviceId': u'6000d31000fcbe000000000000000da8',
u'active': True,
u'portableVolumeDestination': False,
u'deleteAllowed': True,
u'name': u'volume-37883deb-85cd-426a-9a98-62eaad8671ea',
u'scName': u'Storage Center 64702',
u'secureDataUsed': False,
u'serialNumber': u'0000fcbe-00000da8',
u'replayAllowed': True,
u'flashOptimized': False,
u'configuredSize': u'1.073741824E9 Bytes',
u'mapped': False,
u'cmmSource': False}
INACTIVE_VOLUME = \
{u'instanceId': u'64702.3494',
u'scSerialNumber': 64702,
u'replicationSource': False,
u'liveVolume': False,
u'vpdId': 3496,
u'objectType': u'ScVolume',
u'index': 3494,
u'volumeFolderPath': u'devstackvol/fcvm/',
u'hostCacheEnabled': False,
u'usedByLegacyFluidFsNasVolume': False,
u'inRecycleBin': False,
u'volumeFolderIndex': 17,
u'instanceName': u'volume-37883deb-85cd-426a-9a98-62eaad8671ea',
u'statusMessage': u'',
u'status': u'Up',
u'storageType': {u'instanceId': u'64702.1',
u'instanceName': u'Assigned - Redundant - 2 MB',
u'objectType': u'ScStorageType'},
u'cmmDestination': False,
u'replicationDestination': False,
u'volumeFolder': {u'instanceId': u'64702.17',
u'instanceName': u'fcvm',
u'objectType': u'ScVolumeFolder'},
u'deviceId': u'6000d31000fcbe000000000000000da8',
u'active': False,
u'portableVolumeDestination': False,
u'deleteAllowed': True,
u'name': u'volume-37883deb-85cd-426a-9a98-62eaad8671ea',
u'scName': u'Storage Center 64702',
u'secureDataUsed': False,
u'serialNumber': u'0000fcbe-00000da8',
u'replayAllowed': True,
u'flashOptimized': False,
u'configuredSize': u'1.073741824E9 Bytes',
u'mapped': False,
u'cmmSource': False}
SCSERVER = {u'scName': u'Storage Center 64702',
u'volumeCount': 0,
u'removeHbasAllowed': True,
u'legacyFluidFs': False,
u'serverFolderIndex': 4,
u'alertOnConnectivity': True,
u'objectType': u'ScPhysicalServer',
u'instanceName': u'Server_21000024ff30441d',
u'instanceId': u'64702.47',
u'serverFolderPath': u'devstacksrv/',
u'portType': [u'FibreChannel'],
u'type': u'Physical',
u'statusMessage': u'Only 5 of 6 expected paths are up',
u'status': u'Degraded',
u'scSerialNumber': 64702,
u'serverFolder': {u'instanceId': u'64702.4',
u'instanceName': u'devstacksrv',
u'objectType': u'ScServerFolder'},
u'parentIndex': 0,
u'connectivity': u'Partial',
u'hostCacheIndex': 0,
u'deleteAllowed': True,
u'pathCount': 5,
u'name': u'Server_21000024ff30441d',
u'hbaPresent': True,
u'hbaCount': 2,
u'notes': u'Created by Dell Cinder Driver',
u'mapped': False,
u'operatingSystem': {u'instanceId': u'64702.38',
u'instanceName': u'Red Hat Linux 6.x',
u'objectType': u'ScServerOperatingSystem'}
}
# ScServer where deletedAllowed=False (not allowed to be deleted)
SCSERVER_NO_DEL = {u'scName': u'Storage Center 64702',
u'volumeCount': 0,
u'removeHbasAllowed': True,
u'legacyFluidFs': False,
u'serverFolderIndex': 4,
u'alertOnConnectivity': True,
u'objectType': u'ScPhysicalServer',
u'instanceName': u'Server_21000024ff30441d',
u'instanceId': u'64702.47',
u'serverFolderPath': u'devstacksrv/',
u'portType': [u'FibreChannel'],
u'type': u'Physical',
u'statusMessage': u'Only 5 of 6 expected paths are up',
u'status': u'Degraded',
u'scSerialNumber': 64702,
u'serverFolder': {u'instanceId': u'64702.4',
u'instanceName': u'devstacksrv',
u'objectType': u'ScServerFolder'},
u'parentIndex': 0,
u'connectivity': u'Partial',
u'hostCacheIndex': 0,
u'deleteAllowed': False,
u'pathCount': 5,
u'name': u'Server_21000024ff30441d',
u'hbaPresent': True,
u'hbaCount': 2,
u'notes': u'Created by Dell Cinder Driver',
u'mapped': False,
u'operatingSystem':
{u'instanceId': u'64702.38',
u'instanceName': u'Red Hat Linux 6.x',
u'objectType': u'ScServerOperatingSystem'}
}
SCSERVERS = [{u'scName': u'Storage Center 64702',
u'volumeCount': 5,
u'removeHbasAllowed': True,
u'legacyFluidFs': False,
u'serverFolderIndex': 0,
u'alertOnConnectivity': True,
u'objectType': u'ScPhysicalServer',
u'instanceName': u'openstack4',
u'instanceId': u'64702.1',
u'serverFolderPath': u'',
u'portType': [u'Iscsi'],
u'type': u'Physical',
u'statusMessage': u'',
u'status': u'Up',
u'scSerialNumber': 64702,
u'serverFolder': {u'instanceId': u'64702.0',
u'instanceName': u'Servers',
u'objectType': u'ScServerFolder'},
u'parentIndex': 0,
u'connectivity': u'Up',
u'hostCacheIndex': 0,
u'deleteAllowed': True,
u'pathCount': 0,
u'name': u'openstack4',
u'hbaPresent': True,
u'hbaCount': 1,
u'notes': u'',
u'mapped': True,
u'operatingSystem':
{u'instanceId': u'64702.3',
u'instanceName': u'Other Multipath',
u'objectType': u'ScServerOperatingSystem'}},
{u'scName': u'Storage Center 64702',
u'volumeCount': 1,
u'removeHbasAllowed': True,
u'legacyFluidFs': False,
u'serverFolderIndex': 0,
u'alertOnConnectivity': True,
u'objectType': u'ScPhysicalServer',
u'instanceName': u'openstack5',
u'instanceId': u'64702.2',
u'serverFolderPath': u'',
u'portType': [u'Iscsi'],
u'type': u'Physical',
u'statusMessage': u'',
u'status': u'Up',
u'scSerialNumber': 64702,
u'serverFolder': {u'instanceId': u'64702.0',
u'instanceName': u'Servers',
u'objectType': u'ScServerFolder'},
u'parentIndex': 0,
u'connectivity': u'Up',
u'hostCacheIndex': 0,
u'deleteAllowed': True,
u'pathCount': 0, u'name': u'openstack5',
u'hbaPresent': True,
u'hbaCount': 1,
u'notes': u'',
u'mapped': True,
u'operatingSystem':
{u'instanceId': u'64702.2',
u'instanceName': u'Other Singlepath',
u'objectType': u'ScServerOperatingSystem'}}]
# ScServers list where status = Down
SCSERVERS_DOWN = \
[{u'scName': u'Storage Center 64702',
u'volumeCount': 5,
u'removeHbasAllowed': True,
u'legacyFluidFs': False,
u'serverFolderIndex': 0,
u'alertOnConnectivity': True,
u'objectType': u'ScPhysicalServer',
u'instanceName': u'openstack4',
u'instanceId': u'64702.1',
u'serverFolderPath': u'',
u'portType': [u'Iscsi'],
u'type': u'Physical',
u'statusMessage': u'',
u'status': u'Down',
u'scSerialNumber': 64702,
u'serverFolder': {u'instanceId': u'64702.0',
u'instanceName': u'Servers',
u'objectType': u'ScServerFolder'},
u'parentIndex': 0,
u'connectivity': u'Up',
u'hostCacheIndex': 0,
u'deleteAllowed': True,
u'pathCount': 0,
u'name': u'openstack4',
u'hbaPresent': True,
u'hbaCount': 1,
u'notes': u'',
u'mapped': True,
u'operatingSystem':
{u'instanceId': u'64702.3',
u'instanceName': u'Other Multipath',
u'objectType': u'ScServerOperatingSystem'}}]
MAP_PROFILES = [{u'instanceId': u'64702.2941',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'controller': {u'instanceId': u'64702.64703',
u'instanceName': u'SN 64703',
u'objectType': u'ScController'},
u'lunUsed': [1],
u'server': {u'instanceId': u'64702.47',
u'instanceName': u'Server_21000024ff30441d',
u'objectType': u'ScPhysicalServer'},
u'volume':
{u'instanceId': u'64702.6025',
u'instanceName': u'Server_21000024ff30441d Test Vol',
u'objectType': u'ScVolume'},
u'connectivity': u'Up',
u'readOnly': False,
u'objectType': u'ScMappingProfile',
u'hostCache': False,
u'mappedVia': u'Server',
u'mapCount': 3,
u'instanceName': u'6025-47',
u'lunRequested': u'N/A'}]
MAP_PROFILE = {u'instanceId': u'64702.2941',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'controller': {u'instanceId': u'64702.64703',
u'instanceName': u'SN 64703',
u'objectType': u'ScController'},
u'lunUsed': [1],
u'server': {u'instanceId': u'64702.47',
u'instanceName': u'Server_21000024ff30441d',
u'objectType': u'ScPhysicalServer'},
u'volume':
{u'instanceId': u'64702.6025',
u'instanceName': u'Server_21000024ff30441d Test Vol',
u'objectType': u'ScVolume'},
u'connectivity': u'Up',
u'readOnly': False,
u'objectType': u'ScMappingProfile',
u'hostCache': False,
u'mappedVia': u'Server',
u'mapCount': 3,
u'instanceName': u'6025-47',
u'lunRequested': u'N/A'}
MAPPINGS = [{u'profile': {u'instanceId': u'64702.104',
u'instanceName': u'92-30',
u'objectType': u'ScMappingProfile'},
u'status': u'Down',
u'statusMessage': u'',
u'instanceId': u'64702.969.64702',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'controller': {u'instanceId': u'64702.64702',
u'instanceName': u'SN 64702',
u'objectType': u'ScController'},
u'server': {u'instanceId': u'64702.30',
u'instanceName':
u'Server_iqn.1993-08.org.debian:01:3776df826e4f',
u'objectType': u'ScPhysicalServer'},
u'volume': {u'instanceId': u'64702.92',
u'instanceName':
u'volume-74a21934-60ad-4cf2-b89b-1f0dda309ddf',
u'objectType': u'ScVolume'},
u'readOnly': False,
u'lun': 1,
u'lunUsed': [1],
u'serverHba': {u'instanceId': u'64702.3454975614',
u'instanceName':
u'iqn.1993-08.org.debian:01:3776df826e4f',
u'objectType': u'ScServerHba'},
u'path': {u'instanceId': u'64702.64702.64702.31.8',
u'instanceName':
u'iqn.1993-08.org.debian:'
'01:3776df826e4f-5000D31000FCBE43',
u'objectType': u'ScServerHbaPath'},
u'controllerPort': {u'instanceId':
u'64702.5764839588723736131.91',
u'instanceName': u'5000D31000FCBE43',
u'objectType': u'ScControllerPort'},
u'instanceName': u'64702-969',
u'transport': u'Iscsi',
u'objectType': u'ScMapping'}]
# Multiple mappings to test find_iscsi_properties with multiple portals
MAPPINGS_MULTI_PORTAL = \
[{u'profile': {u'instanceId': u'64702.104',
u'instanceName': u'92-30',
u'objectType': u'ScMappingProfile'},
u'status': u'Down',
u'statusMessage': u'',
u'instanceId': u'64702.969.64702',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'controller': {u'instanceId': u'64702.64702',
u'instanceName': u'SN 64702',
u'objectType': u'ScController'},
u'server': {u'instanceId': u'64702.30',
u'instanceName':
u'Server_iqn.1993-08.org.debian:01:3776df826e4f',
u'objectType': u'ScPhysicalServer'},
u'volume': {u'instanceId': u'64702.92',
u'instanceName':
u'volume-74a21934-60ad-4cf2-b89b-1f0dda309ddf',
u'objectType': u'ScVolume'},
u'readOnly': False,
u'lun': 1,
u'lunUsed': [1],
u'serverHba': {u'instanceId': u'64702.3454975614',
u'instanceName':
u'iqn.1993-08.org.debian:01:3776df826e4f',
u'objectType': u'ScServerHba'},
u'path': {u'instanceId': u'64702.64702.64702.31.8',
u'instanceName':
u'iqn.1993-08.org.debian:'
'01:3776df826e4f-5000D31000FCBE43',
u'objectType': u'ScServerHbaPath'},
u'controllerPort': {u'instanceId':
u'64702.5764839588723736131.91',
u'instanceName': u'5000D31000FCBE43',
u'objectType': u'ScControllerPort'},
u'instanceName': u'64702-969',
u'transport': u'Iscsi',
u'objectType': u'ScMapping'},
{u'profile': {u'instanceId': u'64702.104',
u'instanceName': u'92-30',
u'objectType': u'ScMappingProfile'},
u'status': u'Down',
u'statusMessage': u'',
u'instanceId': u'64702.969.64702',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'controller': {u'instanceId': u'64702.64702',
u'instanceName': u'SN 64702',
u'objectType': u'ScController'},
u'server': {u'instanceId': u'64702.30',
u'instanceName':
u'Server_iqn.1993-08.org.debian:01:3776df826e4f',
u'objectType': u'ScPhysicalServer'},
u'volume': {u'instanceId': u'64702.92',
u'instanceName':
u'volume-74a21934-60ad-4cf2-b89b-1f0dda309ddf',
u'objectType': u'ScVolume'},
u'readOnly': False,
u'lun': 1,
u'lunUsed': [1],
u'serverHba': {u'instanceId': u'64702.3454975614',
u'instanceName':
u'iqn.1993-08.org.debian:01:3776df826e4f',
u'objectType': u'ScServerHba'},
u'path': {u'instanceId': u'64702.64702.64702.31.8',
u'instanceName':
u'iqn.1993-08.org.debian:'
'01:3776df826e4f-5000D31000FCBE43',
u'objectType': u'ScServerHbaPath'},
u'controllerPort': {u'instanceId':
u'64702.5764839588723736131.91',
u'instanceName': u'5000D31000FCBE43',
u'objectType': u'ScControllerPort'},
u'instanceName': u'64702-969',
u'transport': u'Iscsi',
u'objectType': u'ScMapping'}]
MAPPINGS_READ_ONLY = \
[{u'profile': {u'instanceId': u'64702.104',
u'instanceName': u'92-30',
u'objectType': u'ScMappingProfile'},
u'status': u'Down',
u'statusMessage': u'',
u'instanceId': u'64702.969.64702',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'controller': {u'instanceId': u'64702.64702',
u'instanceName': u'SN 64702',
u'objectType': u'ScController'},
u'server': {u'instanceId': u'64702.30',
u'instanceName':
u'Server_iqn.1993-08.org.debian:01:3776df826e4f',
u'objectType': u'ScPhysicalServer'},
u'volume': {u'instanceId': u'64702.92',
u'instanceName':
u'volume-74a21934-60ad-4cf2-b89b-1f0dda309ddf',
u'objectType': u'ScVolume'},
u'readOnly': True,
u'lun': 1,
u'lunUsed': [1],
u'serverHba': {u'instanceId': u'64702.3454975614',
u'instanceName':
u'iqn.1993-08.org.debian:01:3776df826e4f',
u'objectType': u'ScServerHba'},
u'path': {u'instanceId': u'64702.64702.64702.31.8',
u'instanceName':
u'iqn.1993-08.org.debian:'
'01:3776df826e4f-5000D31000FCBE43',
u'objectType': u'ScServerHbaPath'},
u'controllerPort': {u'instanceId':
u'64702.5764839588723736131.91',
u'instanceName':
u'5000D31000FCBE43',
u'objectType': u'ScControllerPort'},
u'instanceName': u'64702-969',
u'transport': u'Iscsi',
u'objectType': u'ScMapping'}]
FC_MAPPINGS = [{u'profile': {u'instanceId': u'64702.2941',
u'instanceName': u'6025-47',
u'objectType': u'ScMappingProfile'},
u'status': u'Up',
u'statusMessage': u'',
u'instanceId': u'64702.7639.64702',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'controller': {u'instanceId': u'64702.64703',
u'instanceName': u'SN 64703',
u'objectType': u'ScController'},
u'server': {u'instanceId': u'64702.47',
u'instanceName': u'Server_21000024ff30441d',
u'objectType': u'ScPhysicalServer'},
u'volume': {u'instanceId': u'64702.6025',
u'instanceName':
u'Server_21000024ff30441d Test Vol',
u'objectType': u'ScVolume'},
u'readOnly': False,
u'lun': 1,
u'serverHba': {u'instanceId': u'64702.3282218607',
u'instanceName': u'21000024FF30441C',
u'objectType': u'ScServerHba'},
u'path': {u'instanceId': u'64702.64702.64703.27.73',
u'instanceName':
u'21000024FF30441C-5000D31000FCBE36',
u'objectType': u'ScServerHbaPath'},
u'controllerPort':
{u'instanceId': u'64702.5764839588723736118.50',
u'instanceName': u'5000D31000FCBE36',
u'objectType': u'ScControllerPort'},
u'instanceName': u'64702-7639',
u'transport': u'FibreChannel',
u'objectType': u'ScMapping'},
{u'profile': {u'instanceId': u'64702.2941',
u'instanceName': u'6025-47',
u'objectType': u'ScMappingProfile'},
u'status': u'Up',
u'statusMessage': u'',
u'instanceId': u'64702.7640.64702',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'controller': {u'instanceId': u'64702.64703',
u'instanceName': u'SN 64703',
u'objectType': u'ScController'},
u'server': {u'instanceId': u'64702.47',
u'instanceName': u'Server_21000024ff30441d',
u'objectType': u'ScPhysicalServer'},
u'volume':
{u'instanceId': u'64702.6025',
u'instanceName': u'Server_21000024ff30441d Test Vol',
u'objectType': u'ScVolume'},
u'readOnly': False,
u'lun': 1,
u'serverHba': {u'instanceId': u'64702.3282218606',
u'instanceName': u'21000024FF30441D',
u'objectType': u'ScServerHba'},
u'path':
{u'instanceId': u'64702.64702.64703.27.78',
u'instanceName': u'21000024FF30441D-5000D31000FCBE36',
u'objectType': u'ScServerHbaPath'},
u'controllerPort':
{u'instanceId': u'64702.5764839588723736118.50',
u'instanceName': u'5000D31000FCBE36',
u'objectType': u'ScControllerPort'},
u'instanceName': u'64702-7640',
u'transport': u'FibreChannel',
u'objectType': u'ScMapping'},
{u'profile': {u'instanceId': u'64702.2941',
u'instanceName': u'6025-47',
u'objectType': u'ScMappingProfile'},
u'status': u'Up',
u'statusMessage': u'',
u'instanceId': u'64702.7638.64702',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'controller': {u'instanceId': u'64702.64703',
u'instanceName': u'SN 64703',
u'objectType': u'ScController'},
u'server': {u'instanceId': u'64702.47',
u'instanceName': u'Server_21000024ff30441d',
u'objectType': u'ScPhysicalServer'},
u'volume': {u'instanceId': u'64702.6025',
u'instanceName':
u'Server_21000024ff30441d Test Vol',
u'objectType': u'ScVolume'},
u'readOnly': False,
u'lun': 1,
u'serverHba': {u'instanceId': u'64702.3282218606',
u'instanceName': u'21000024FF30441D',
u'objectType': u'ScServerHba'},
u'path':
{u'instanceId': u'64702.64702.64703.28.76',
u'instanceName': u'21000024FF30441D-5000D31000FCBE3E',
u'objectType': u'ScServerHbaPath'},
u'controllerPort': {u'instanceId':
u'64702.5764839588723736126.60',
u'instanceName': u'5000D31000FCBE3E',
u'objectType': u'ScControllerPort'},
u'instanceName': u'64702-7638',
u'transport': u'FibreChannel',
u'objectType': u'ScMapping'}]
RPLAY = {u'scSerialNumber': 64702,
u'globalIndex': u'64702-46-250',
u'description': u'Cinder Clone Replay',
u'parent': {u'instanceId': u'64702.46.249',
u'instanceName': u'64702-46-249',
u'objectType': u'ScReplay'},
u'instanceId': u'64702.46.250',
u'scName': u'Storage Center 64702',
u'consistent': False,
u'expires': True,
u'freezeTime': u'12/09/2014 03:52:08 PM',
u'createVolume': {u'instanceId': u'64702.46',
u'instanceName':
u'volume-ff9589d3-2d41-48d5-9ef5-2713a875e85b',
u'objectType': u'ScVolume'},
u'expireTime': u'12/09/2014 04:52:08 PM',
u'source': u'Manual',
u'spaceRecovery': False,
u'writesHeldDuration': 7910,
u'active': False,
u'markedForExpiration': False,
u'objectType': u'ScReplay',
u'instanceName': u'12/09/2014 03:52:08 PM',
u'size': u'0.0 Bytes'
}
RPLAYS = [{u'scSerialNumber': 64702,
u'globalIndex': u'64702-6025-5',
u'description': u'Manually Created',
u'parent': {u'instanceId': u'64702.6025.4',
u'instanceName': u'64702-6025-4',
u'objectType': u'ScReplay'},
u'instanceId': u'64702.6025.5',
u'scName': u'Storage Center 64702',
u'consistent': False,
u'expires': True,
u'freezeTime': u'02/02/2015 08:23:55 PM',
u'createVolume': {u'instanceId': u'64702.6025',
u'instanceName':
u'Server_21000024ff30441d Test Vol',
u'objectType': u'ScVolume'},
u'expireTime': u'02/02/2015 09:23:55 PM',
u'source': u'Manual',
u'spaceRecovery': False,
u'writesHeldDuration': 7889,
u'active': False,
u'markedForExpiration': False,
u'objectType': u'ScReplay',
u'instanceName': u'02/02/2015 08:23:55 PM',
u'size': u'0.0 Bytes'},
{u'scSerialNumber': 64702,
u'globalIndex': u'64702-6025-4',
u'description': u'Cinder Test Replay012345678910',
u'parent': {u'instanceId': u'64702.6025.3',
u'instanceName': u'64702-6025-3',
u'objectType': u'ScReplay'},
u'instanceId': u'64702.6025.4',
u'scName': u'Storage Center 64702',
u'consistent': False,
u'expires': True,
u'freezeTime': u'02/02/2015 08:23:47 PM',
u'createVolume': {u'instanceId': u'64702.6025',
u'instanceName':
u'Server_21000024ff30441d Test Vol',
u'objectType': u'ScVolume'},
u'expireTime': u'02/02/2015 09:23:47 PM',
u'source': u'Manual',
u'spaceRecovery': False,
u'writesHeldDuration': 7869,
u'active': False,
u'markedForExpiration': False,
u'objectType': u'ScReplay',
u'instanceName': u'02/02/2015 08:23:47 PM',
u'size': u'0.0 Bytes'}]
TST_RPLAY = {u'scSerialNumber': 64702,
u'globalIndex': u'64702-6025-4',
u'description': u'Cinder Test Replay012345678910',
u'parent': {u'instanceId': u'64702.6025.3',
u'instanceName': u'64702-6025-3',
u'objectType': u'ScReplay'},
u'instanceId': u'64702.6025.4',
u'scName': u'Storage Center 64702',
u'consistent': False,
u'expires': True,
u'freezeTime': u'02/02/2015 08:23:47 PM',
u'createVolume': {u'instanceId': u'64702.6025',
u'instanceName':
u'Server_21000024ff30441d Test Vol',
u'objectType': u'ScVolume'},
u'expireTime': u'02/02/2015 09:23:47 PM',
u'source': u'Manual',
u'spaceRecovery': False,
u'writesHeldDuration': 7869,
u'active': False,
u'markedForExpiration': False,
u'objectType': u'ScReplay',
u'instanceName': u'02/02/2015 08:23:47 PM',
u'size': u'0.0 Bytes'}
FLDR = {u'status': u'Up',
u'instanceName': u'opnstktst',
u'name': u'opnstktst',
u'parent':
{u'instanceId': u'64702.0',
u'instanceName': u'Volumes',
u'objectType': u'ScVolumeFolder'},
u'instanceId': u'64702.43',
u'scName': u'Storage Center 64702',
u'notes': u'Folder for OpenStack Cinder Driver',
u'scSerialNumber': 64702,
u'parentIndex': 0,
u'okToDelete': True,
u'folderPath': u'',
u'root': False,
u'statusMessage': u'',
u'objectType': u'ScVolumeFolder'}
SVR_FLDR = {u'status': u'Up',
u'instanceName': u'devstacksrv',
u'name': u'devstacksrv',
u'parent': {u'instanceId': u'64702.0',
u'instanceName': u'Servers',
u'objectType': u'ScServerFolder'},
u'instanceId': u'64702.4',
u'scName': u'Storage Center 64702',
u'notes': u'Folder for OpenStack Cinder Driver',
u'scSerialNumber': 64702,
u'parentIndex': 0,
u'okToDelete': False,
u'folderPath': u'',
u'root': False,
u'statusMessage': u'',
u'objectType': u'ScServerFolder'}
ISCSI_HBA = {u'portWwnList': [],
u'iscsiIpAddress': u'0.0.0.0',
u'pathCount': 1,
u'name': u'iqn.1993-08.org.debian:01:52332b70525',
u'connectivity': u'Down',
u'instanceId': u'64702.3786433166',
u'scName': u'Storage Center 64702',
u'notes': u'',
u'scSerialNumber': 64702,
u'server':
{u'instanceId': u'64702.38',
u'instanceName':
u'Server_iqn.1993-08.org.debian:01:52332b70525',
u'objectType': u'ScPhysicalServer'},
u'remoteStorageCenter': False,
u'iscsiName': u'',
u'portType': u'Iscsi',
u'instanceName': u'iqn.1993-08.org.debian:01:52332b70525',
u'objectType': u'ScServerHba'}
FC_HBAS = [{u'portWwnList': [],
u'iscsiIpAddress': u'0.0.0.0',
u'pathCount': 2,
u'name': u'21000024FF30441C',
u'connectivity': u'Up',
u'instanceId': u'64702.3282218607',
u'scName': u'Storage Center 64702',
u'notes': u'',
u'scSerialNumber': 64702,
u'server': {u'instanceId': u'64702.47',
u'instanceName': u'Server_21000024ff30441d',
u'objectType': u'ScPhysicalServer'},
u'remoteStorageCenter': False,
u'iscsiName': u'',
u'portType': u'FibreChannel',
u'instanceName': u'21000024FF30441C',
u'objectType': u'ScServerHba'},
{u'portWwnList': [],
u'iscsiIpAddress': u'0.0.0.0',
u'pathCount': 3,
u'name': u'21000024FF30441D',
u'connectivity': u'Partial',
u'instanceId': u'64702.3282218606',
u'scName': u'Storage Center 64702',
u'notes': u'',
u'scSerialNumber': 64702,
u'server': {u'instanceId': u'64702.47',
u'instanceName': u'Server_21000024ff30441d',
u'objectType': u'ScPhysicalServer'},
u'remoteStorageCenter': False,
u'iscsiName': u'',
u'portType': u'FibreChannel',
u'instanceName': u'21000024FF30441D',
u'objectType': u'ScServerHba'}]
FC_HBA = {u'portWwnList': [],
u'iscsiIpAddress': u'0.0.0.0',
u'pathCount': 3,
u'name': u'21000024FF30441D',
u'connectivity': u'Partial',
u'instanceId': u'64702.3282218606',
u'scName': u'Storage Center 64702',
u'notes': u'',
u'scSerialNumber': 64702,
u'server': {u'instanceId': u'64702.47',
u'instanceName': u'Server_21000024ff30441d',
u'objectType': u'ScPhysicalServer'},
u'remoteStorageCenter': False,
u'iscsiName': u'',
u'portType': u'FibreChannel',
u'instanceName': u'21000024FF30441D',
u'objectType': u'ScServerHba'}
SVR_OS_S = [{u'allowsLunGaps': True,
u'product': u'Red Hat Linux',
u'supportsActiveMappingDeletion': True,
u'version': u'6.x',
u'requiresLunZero': False,
u'scName': u'Storage Center 64702',
u'virtualMachineGuest': True,
u'virtualMachineHost': False,
u'allowsCrossTransportMapping': False,
u'objectType': u'ScServerOperatingSystem',
u'instanceId': u'64702.38',
u'lunCanVaryAcrossPaths': False,
u'scSerialNumber': 64702,
u'maximumVolumeSize': u'0.0 Bytes',
u'multipath': True,
u'instanceName': u'Red Hat Linux 6.x',
u'supportsActiveMappingCreation': True,
u'name': u'Red Hat Linux 6.x'}]
ISCSI_FLT_DOMAINS = [{u'headerDigestEnabled': False,
u'classOfServicePriority': 0,
u'wellKnownIpAddress': u'192.168.0.21',
u'scSerialNumber': 64702,
u'iscsiName':
u'iqn.2002-03.com.compellent:5000d31000fcbe42',
u'portNumber': 3260,
u'subnetMask': u'255.255.255.0',
u'gateway': u'192.168.0.1',
u'objectType': u'ScIscsiFaultDomain',
u'chapEnabled': False,
u'instanceId': u'64702.6.5.3',
u'childStatus': u'Up',
u'defaultTimeToRetain': u'SECONDS_20',
u'dataDigestEnabled': False,
u'instanceName': u'iSCSI 10G 2',
u'statusMessage': u'',
u'status': u'Up',
u'transportType': u'Iscsi',
u'vlanId': 0,
u'windowSize': u'131072.0 Bytes',
u'defaultTimeToWait': u'SECONDS_2',
u'scsiCommandTimeout': u'MINUTES_1',
u'deleteAllowed': False,
u'name': u'iSCSI 10G 2',
u'immediateDataWriteEnabled': False,
u'scName': u'Storage Center 64702',
u'notes': u'',
u'mtu': u'MTU_1500',
u'bidirectionalChapSecret': u'',
u'keepAliveTimeout': u'SECONDS_30'}]
# For testing find_iscsi_properties where multiple portals are found
ISCSI_FLT_DOMAINS_MULTI_PORTALS = \
[{u'headerDigestEnabled': False,
u'classOfServicePriority': 0,
u'wellKnownIpAddress': u'192.168.0.21',
u'scSerialNumber': 64702,
u'iscsiName':
u'iqn.2002-03.com.compellent:5000d31000fcbe42',
u'portNumber': 3260,
u'subnetMask': u'255.255.255.0',
u'gateway': u'192.168.0.1',
u'objectType': u'ScIscsiFaultDomain',
u'chapEnabled': False,
u'instanceId': u'64702.6.5.3',
u'childStatus': u'Up',
u'defaultTimeToRetain': u'SECONDS_20',
u'dataDigestEnabled': False,
u'instanceName': u'iSCSI 10G 2',
u'statusMessage': u'',
u'status': u'Up',
u'transportType': u'Iscsi',
u'vlanId': 0,
u'windowSize': u'131072.0 Bytes',
u'defaultTimeToWait': u'SECONDS_2',
u'scsiCommandTimeout': u'MINUTES_1',
u'deleteAllowed': False,
u'name': u'iSCSI 10G 2',
u'immediateDataWriteEnabled': False,
u'scName': u'Storage Center 64702',
u'notes': u'',
u'mtu': u'MTU_1500',
u'bidirectionalChapSecret': u'',
u'keepAliveTimeout': u'SECONDS_30'},
{u'headerDigestEnabled': False,
u'classOfServicePriority': 0,
u'wellKnownIpAddress': u'192.168.0.25',
u'scSerialNumber': 64702,
u'iscsiName':
u'iqn.2002-03.com.compellent:5000d31000fcbe42',
u'portNumber': 3260,
u'subnetMask': u'255.255.255.0',
u'gateway': u'192.168.0.1',
u'objectType': u'ScIscsiFaultDomain',
u'chapEnabled': False,
u'instanceId': u'64702.6.5.3',
u'childStatus': u'Up',
u'defaultTimeToRetain': u'SECONDS_20',
u'dataDigestEnabled': False,
u'instanceName': u'iSCSI 10G 2',
u'statusMessage': u'',
u'status': u'Up',
u'transportType': u'Iscsi',
u'vlanId': 0,
u'windowSize': u'131072.0 Bytes',
u'defaultTimeToWait': u'SECONDS_2',
u'scsiCommandTimeout': u'MINUTES_1',
u'deleteAllowed': False,
u'name': u'iSCSI 10G 2',
u'immediateDataWriteEnabled': False,
u'scName': u'Storage Center 64702',
u'notes': u'',
u'mtu': u'MTU_1500',
u'bidirectionalChapSecret': u'',
u'keepAliveTimeout': u'SECONDS_30'}]
ISCSI_FLT_DOMAIN = {u'headerDigestEnabled': False,
u'classOfServicePriority': 0,
u'wellKnownIpAddress': u'192.168.0.21',
u'scSerialNumber': 64702,
u'iscsiName':
u'iqn.2002-03.com.compellent:5000d31000fcbe42',
u'portNumber': 3260,
u'subnetMask': u'255.255.255.0',
u'gateway': u'192.168.0.1',
u'objectType': u'ScIscsiFaultDomain',
u'chapEnabled': False,
u'instanceId': u'64702.6.5.3',
u'childStatus': u'Up',
u'defaultTimeToRetain': u'SECONDS_20',
u'dataDigestEnabled': False,
u'instanceName': u'iSCSI 10G 2',
u'statusMessage': u'',
u'status': u'Up',
u'transportType': u'Iscsi',
u'vlanId': 0,
u'windowSize': u'131072.0 Bytes',
u'defaultTimeToWait': u'SECONDS_2',
u'scsiCommandTimeout': u'MINUTES_1',
u'deleteAllowed': False,
u'name': u'iSCSI 10G 2',
u'immediateDataWriteEnabled': False,
u'scName': u'Storage Center 64702',
u'notes': u'',
u'mtu': u'MTU_1500',
u'bidirectionalChapSecret': u'',
u'keepAliveTimeout': u'SECONDS_30'}
CTRLR_PORT = {u'status': u'Up',
u'iscsiIpAddress': u'0.0.0.0',
u'WWN': u'5000D31000FCBE06',
u'name': u'5000D31000FCBE06',
u'iscsiGateway': u'0.0.0.0',
u'instanceId': u'64702.5764839588723736070.51',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'transportType': u'FibreChannel',
u'virtual': False,
u'controller': {u'instanceId': u'64702.64702',
u'instanceName': u'SN 64702',
u'objectType': u'ScController'},
u'iscsiName': u'',
u'purpose': u'FrontEnd',
u'iscsiSubnetMask': u'0.0.0.0',
u'faultDomain':
{u'instanceId': u'64702.4.3',
u'instanceName': u'Domain 1',
u'objectType': u'ScControllerPortFaultDomain'},
u'instanceName': u'5000D31000FCBE06',
u'statusMessage': u'',
u'objectType': u'ScControllerPort'}
ISCSI_CTRLR_PORT = {u'preferredParent':
{u'instanceId': u'64702.5764839588723736074.69',
u'instanceName': u'5000D31000FCBE0A',
u'objectType': u'ScControllerPort'},
u'status': u'Up',
u'iscsiIpAddress': u'10.23.8.235',
u'WWN': u'5000D31000FCBE43',
u'name': u'5000D31000FCBE43',
u'parent':
{u'instanceId': u'64702.5764839588723736074.69',
u'instanceName': u'5000D31000FCBE0A',
u'objectType': u'ScControllerPort'},
u'iscsiGateway': u'0.0.0.0',
u'instanceId': u'64702.5764839588723736131.91',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'transportType': u'Iscsi',
u'virtual': True,
u'controller': {u'instanceId': u'64702.64702',
u'instanceName': u'SN 64702',
u'objectType': u'ScController'},
u'iscsiName':
u'iqn.2002-03.com.compellent:5000d31000fcbe43',
u'purpose': u'FrontEnd',
u'iscsiSubnetMask': u'0.0.0.0',
u'faultDomain':
{u'instanceId': u'64702.6.5',
u'instanceName': u'iSCSI 10G 2',
u'objectType': u'ScControllerPortFaultDomain'},
u'instanceName': u'5000D31000FCBE43',
u'childStatus': u'Up',
u'statusMessage': u'',
u'objectType': u'ScControllerPort'}
FC_CTRLR_PORT = {u'preferredParent':
{u'instanceId': u'64702.5764839588723736093.57',
u'instanceName': u'5000D31000FCBE1D',
u'objectType': u'ScControllerPort'},
u'status': u'Up',
u'iscsiIpAddress': u'0.0.0.0',
u'WWN': u'5000D31000FCBE36',
u'name': u'5000D31000FCBE36',
u'parent':
{u'instanceId': u'64702.5764839588723736093.57',
u'instanceName': u'5000D31000FCBE1D',
u'objectType': u'ScControllerPort'},
u'iscsiGateway': u'0.0.0.0',
u'instanceId': u'64702.5764839588723736118.50',
u'scName': u'Storage Center 64702',
u'scSerialNumber': 64702,
u'transportType': u'FibreChannel',
u'virtual': True,
u'controller': {u'instanceId': u'64702.64703',
u'instanceName': u'SN 64703',
u'objectType': u'ScController'},
u'iscsiName': u'',
u'purpose': u'FrontEnd',
u'iscsiSubnetMask': u'0.0.0.0',
u'faultDomain':
{u'instanceId': u'64702.1.0',
u'instanceName': u'Domain 0',
u'objectType': u'ScControllerPortFaultDomain'},
u'instanceName': u'5000D31000FCBE36',
u'childStatus': u'Up',
u'statusMessage': u'',
u'objectType': u'ScControllerPort'}
STRG_USAGE = {u'systemSpace': u'7.38197504E8 Bytes',
u'freeSpace': u'1.297659461632E13 Bytes',
u'oversubscribedSpace': u'0.0 Bytes',
u'instanceId': u'64702',
u'scName': u'Storage Center 64702',
u'savingVsRaidTen': u'1.13737990144E11 Bytes',
u'allocatedSpace': u'1.66791217152E12 Bytes',
u'usedSpace': u'3.25716017152E11 Bytes',
u'configuredSpace': u'9.155796533248E12 Bytes',
u'alertThresholdSpace': u'1.197207956992E13 Bytes',
u'availableSpace': u'1.3302310633472E13 Bytes',
u'badSpace': u'0.0 Bytes',
u'time': u'02/02/2015 02:23:39 PM',
u'scSerialNumber': 64702,
u'instanceName': u'Storage Center 64702',
u'storageAlertThreshold': 10,
u'objectType': u'StorageCenterStorageUsage'}
IQN = 'iqn.2002-03.com.compellent:5000D31000000001'
WWN = u'21000024FF30441C'
WWNS = [u'21000024FF30441C',
u'21000024FF30441D']
FLDR_PATH = 'StorageCenter/ScVolumeFolder/'
# Create a Response object that indicates OK
response_ok = models.Response()
response_ok.status_code = 200
response_ok.reason = u'ok'
RESPONSE_200 = response_ok
# Create a Response object that indicates created
response_created = models.Response()
response_created.status_code = 201
response_created.reason = u'created'
RESPONSE_201 = response_created
# Create a Response object that indicates a failure (no content)
response_nc = models.Response()
response_nc.status_code = 204
response_nc.reason = u'duplicate'
RESPONSE_204 = response_nc
def setUp(self):
super(DellSCSanAPITestCase, self).setUp()
# Configuration is a mock. A mock is pretty much a blank
# slate. I believe mock's done in setup are not happy time
# mocks. So we just do a few things like driver config here.
self.configuration = mock.Mock()
self.configuration.san_is_local = False
self.configuration.san_ip = "192.168.0.1"
self.configuration.san_login = "admin"
self.configuration.san_password = "mmm"
self.configuration.dell_sc_ssn = 12345
self.configuration.dell_sc_server_folder = 'opnstktst'
self.configuration.dell_sc_volume_folder = 'opnstktst'
self.configuration.dell_sc_api_port = 3033
self.configuration.iscsi_ip_address = '192.168.1.1'
self.configuration.iscsi_port = 3260
self._context = context.get_admin_context()
# Set up the StorageCenterApi
self.scapi = dell_storagecenter_api.StorageCenterApi(
self.configuration.san_ip,
self.configuration.dell_sc_api_port,
self.configuration.san_login,
self.configuration.san_password)
self.volid = str(uuid.uuid4())
self.volume_name = "volume" + self.volid
def test_path_to_array(self,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._path_to_array(u'folder1/folder2/folder3')
expected = [u'folder1', u'folder2', u'folder3']
self.assertEqual(expected, res, 'Unexpected folder path')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_result',
return_value=SC)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_find_sc(self,
mock_get,
mock_get_result,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.find_sc(64702)
mock_get.assert_called_once_with('StorageCenter/StorageCenter')
mock_get_result.assert_called()
self.assertEqual(u'64702', res, 'Unexpected SSN')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_result',
return_value=None)
def test_find_sc_failure(self,
mock_get_result,
mock_get,
mock_close_connection,
mock_open_connection,
mock_init):
self.assertRaises(exception.VolumeBackendAPIException,
self.scapi.find_sc, 12345)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=FLDR)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_201)
def test_create_folder(self,
mock_post,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._create_folder(
'StorageCenter/ScVolumeFolder', 12345, '',
self.configuration.dell_sc_volume_folder)
mock_post.assert_called()
mock_first_result.assert_called()
self.assertEqual(self.FLDR, res, 'Unexpected Folder')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=FLDR)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_201)
def test_create_folder_with_parent(self,
mock_post,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where parent folder name is specified
res = self.scapi._create_folder(
'StorageCenter/ScVolumeFolder', 12345, 'parentFolder',
self.configuration.dell_sc_volume_folder)
mock_post.assert_called()
mock_first_result.assert_called()
self.assertEqual(self.FLDR, res, 'Unexpected Folder')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_create_folder_failure(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._create_folder(
'StorageCenter/ScVolumeFolder', 12345, '',
self.configuration.dell_sc_volume_folder)
self.assertIsNone(res, 'Test Create folder - None expected')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_folder',
return_value=FLDR)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_path_to_array',
return_value=['Cinder_Test_Folder'])
def test_create_folder_path(self,
mock_path_to_array,
mock_find_folder,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._create_folder_path(
'StorageCenter/ScVolumeFolder', 12345,
self.configuration.dell_sc_volume_folder)
mock_path_to_array.assert_called_once_with(
self.configuration.dell_sc_volume_folder)
mock_find_folder.assert_called()
self.assertEqual(self.FLDR, res, 'Unexpected ScFolder')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_create_folder',
return_value=FLDR)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_folder',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_path_to_array',
return_value=['Cinder_Test_Folder'])
def test_create_folder_path_create_fldr(self,
mock_path_to_array,
mock_find_folder,
mock_create_folder,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where folder is not found and must be created
res = self.scapi._create_folder_path(
'StorageCenter/ScVolumeFolder', 12345,
self.configuration.dell_sc_volume_folder)
mock_path_to_array.assert_called_once_with(
self.configuration.dell_sc_volume_folder)
mock_find_folder.assert_called()
mock_create_folder.assert_called()
self.assertEqual(self.FLDR, res, 'Unexpected ScFolder')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_create_folder',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_folder',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_path_to_array',
return_value=['Cinder_Test_Folder'])
def test_create_folder_path_failure(self,
mock_path_to_array,
mock_find_folder,
mock_create_folder,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where folder is not found, must be created
# and creation fails
res = self.scapi._create_folder_path(
'StorageCenter/ScVolumeFolder', 12345,
self.configuration.dell_sc_volume_folder)
mock_path_to_array.assert_called_once_with(
self.configuration.dell_sc_volume_folder)
mock_find_folder.assert_called()
mock_create_folder.assert_called()
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_result',
return_value=u'devstackvol/fcvm/')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_find_folder(self,
mock_post,
mock_get_result,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._find_folder(
'StorageCenter/ScVolumeFolder', 12345,
self.configuration.dell_sc_volume_folder)
mock_post.assert_called()
mock_get_result.assert_called()
self.assertEqual(u'devstackvol/fcvm/', res, 'Unexpected folder')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_result',
return_value=u'devstackvol/fcvm/')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_find_folder_multi_fldr(self,
mock_post,
mock_get_result,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case for folder path with multiple folders
res = self.scapi._find_folder(
'StorageCenter/ScVolumeFolder', 12345,
u'testParentFolder/opnstktst')
mock_post.assert_called()
mock_get_result.assert_called()
self.assertEqual(u'devstackvol/fcvm/', res, 'Unexpected folder')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_find_folder_failure(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._find_folder(
'StorageCenter/ScVolumeFolder', 12345,
self.configuration.dell_sc_volume_folder)
self.assertIsNone(res, 'Test find folder - None expected')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_create_folder_path',
return_value=FLDR)
def test_create_volume_folder_path(self,
mock_create_vol_fldr_path,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._create_volume_folder_path(
12345,
self.configuration.dell_sc_volume_folder)
mock_create_vol_fldr_path.assert_called_once_with(
'StorageCenter/ScVolumeFolder',
12345,
self.configuration.dell_sc_volume_folder)
self.assertEqual(self.FLDR, res, 'Unexpected ScFolder')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_folder',
return_value=FLDR)
def test_find_volume_folder(self,
mock_find_folder,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._find_volume_folder(
12345,
self.configuration.dell_sc_volume_folder)
mock_find_folder.assert_called_once_with(
'StorageCenter/ScVolumeFolder/GetList',
12345,
self.configuration.dell_sc_volume_folder)
self.assertEqual(self.FLDR, res, 'Unexpected Folder')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'unmap_volume',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'map_volume',
return_value=MAPPINGS)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=SCSERVERS)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_init_volume(self,
mock_post,
mock_get_json,
mock_map_volume,
mock_unmap_volume,
mock_close_connection,
mock_open_connection,
mock_init):
self.scapi._init_volume(self.VOLUME)
mock_map_volume.assert_called()
mock_unmap_volume.assert_called()
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_init_volume_failure(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where ScServer list fails
self.scapi._init_volume(self.VOLUME)
mock_post.assert_called()
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'unmap_volume',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'map_volume',
return_value=MAPPINGS)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=SCSERVERS_DOWN)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_init_volume_servers_down(self,
mock_post,
mock_get_json,
mock_map_volume,
mock_unmap_volume,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where ScServer Status = Down
self.scapi._init_volume(self.VOLUME)
mock_map_volume.assert_called()
mock_unmap_volume.assert_called()
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_volume_folder',
return_value=FLDR)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_201)
def test_create_volume(self,
mock_post,
mock_find_volume_folder,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.create_volume(
self.volume_name,
1,
12345,
self.configuration.dell_sc_volume_folder)
mock_post.assert_called()
mock_get_json.assert_called()
mock_find_volume_folder.assert_called_once_with(
12345, self.configuration.dell_sc_volume_folder)
self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_create_volume_folder_path',
return_value=FLDR)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_volume_folder',
return_value=None)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_201)
def test_create_vol_and_folder(self,
mock_post,
mock_find_volume_folder,
mock_create_vol_folder_path,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
# Test calling create_volume where volume folder has to be created
res = self.scapi.create_volume(
self.volume_name,
1,
12345,
self.configuration.dell_sc_volume_folder)
mock_post.assert_called()
mock_get_json.assert_called()
mock_create_vol_folder_path.assert_called_once_with(
12345,
self.configuration.dell_sc_volume_folder)
mock_find_volume_folder.assert_called_once_with(
12345, self.configuration.dell_sc_volume_folder)
self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_create_volume_folder_path',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_volume_folder',
return_value=None)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_201)
def test_create_vol_folder_fail(self,
mock_post,
mock_find_volume_folder,
mock_create_vol_folder_path,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
# Test calling create_volume where volume folder does not exist and
# fails to be created
res = self.scapi.create_volume(
self.volume_name,
1,
12345,
self.configuration.dell_sc_volume_folder)
mock_post.assert_called()
mock_get_json.assert_called()
mock_create_vol_folder_path.assert_called_once_with(
12345,
self.configuration.dell_sc_volume_folder)
mock_find_volume_folder.assert_called_once_with(
12345, self.configuration.dell_sc_volume_folder)
self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_volume_folder',
return_value=FLDR)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_create_volume_failure(self,
mock_post,
mock_find_volume_folder,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.create_volume(
self.volume_name,
1,
12345,
self.configuration.dell_sc_volume_folder)
mock_find_volume_folder.assert_called_once_with(
12345, self.configuration.dell_sc_volume_folder)
self.assertIsNone(res, 'None expected')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_find_volume_by_name(self,
mock_post,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case to find volume by name
res = self.scapi.find_volume(12345,
self.volume_name)
mock_post.assert_called()
mock_first_result.assert_called()
self.assertEqual(self.VOLUME, res, 'Unexpected volume')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
# Test case to find volume by InstancedId
def test_find_volume_by_instanceid(self,
mock_post,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.find_volume(12345,
None,
'64702.3494')
mock_post.assert_called()
mock_first_result.assert_called()
self.assertEqual(self.VOLUME, res, 'Unexpected volume')
def test_find_volume_no_name_or_instance(self,
mock_close_connection,
mock_open_connection,
mock_init):
# Test calling find_volume with no name or instanceid
res = self.scapi.find_volume(12345)
self.assertEqual(res, None, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=None)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_find_volume_not_found(self,
mock_post,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
# Test calling find_volume with result of no volume found
res = self.scapi.find_volume(12345,
self.volume_name)
self.assertEqual(None, res, 'None expected')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=True)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'delete',
return_value=RESPONSE_200)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
def test_delete_volume(self,
mock_find_volume,
mock_delete,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.delete_volume(12345,
self.volume_name)
mock_delete.assert_called()
mock_find_volume.assert_called_once_with(12345, self.volume_name, None)
mock_get_json.assert_called()
self.assertTrue(res, 'Expected True')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'delete',
return_value=RESPONSE_204)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=VOLUME)
def test_delete_volume_failure(self,
mock_find_volume,
mock_delete,
mock_close_connection,
mock_open_connection,
mock_init):
self.assertRaises(exception.VolumeBackendAPIException,
self.scapi.delete_volume, 12345, self.volume_name)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_volume',
return_value=None)
def test_delete_volume_no_vol_found(self,
mock_find_volume,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where volume to be deleted does not exist
res = self.scapi.delete_volume(12345,
self.volume_name)
self.assertTrue(res, 'Expected True')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_create_folder_path',
return_value=SVR_FLDR)
def test_create_server_folder_path(self,
mock_create_svr_fldr_path,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._create_server_folder_path(
12345,
self.configuration.dell_sc_server_folder)
mock_create_svr_fldr_path.assert_called_once_with(
'StorageCenter/ScServerFolder',
12345,
self.configuration.dell_sc_server_folder)
self.assertEqual(self.SVR_FLDR, res, 'Unexpected server folder')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_folder',
return_value=SVR_FLDR)
def test_find_server_folder(self,
mock_find_folder,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._find_server_folder(
12345,
self.configuration.dell_sc_server_folder)
mock_find_folder.assert_called_once_with(
'StorageCenter/ScServerFolder/GetList',
12345,
self.configuration.dell_sc_server_folder)
self.assertEqual(self.SVR_FLDR, res, 'Unexpected server folder')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_add_hba(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._add_hba(self.SCSERVER,
self.IQN,
False)
mock_post.assert_called()
self.assertTrue(res, 'Expected True')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_add_hba_fc(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._add_hba(self.SCSERVER,
self.WWN,
True)
mock_post.assert_called()
self.assertTrue(res, 'Expected True')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_add_hba_failure(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._add_hba(self.SCSERVER,
self.IQN,
False)
mock_post.assert_called()
self.assertFalse(res, 'Expected False')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=SVR_OS_S)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_find_serveros(self,
mock_post,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._find_serveros(12345, 'Red Hat Linux 6.x')
mock_get_json.assert_called()
mock_post.assert_called()
self.assertEqual('64702.38', res, 'Wrong InstanceId')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=SVR_OS_S)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_find_serveros_not_found(self,
mock_post,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
# Test requesting a Server OS that will not be found
res = self.scapi._find_serveros(12345, 'Non existent OS')
mock_get_json.assert_called()
mock_post.assert_called()
self.assertIsNone(res, 'None expected')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_find_serveros_failed(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._find_serveros(12345, 'Red Hat Linux 6.x')
self.assertEqual(None, res, 'None expected')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_add_hba',
return_value=FC_HBA)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'create_server',
return_value=SCSERVER)
def test_create_server_multiple_hbas(self,
mock_create_server,
mock_add_hba,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.create_server_multiple_hbas(
12345,
self.configuration.dell_sc_server_folder,
self.WWNS)
mock_create_server.assert_called()
mock_add_hba.assert_called()
self.assertEqual(self.SCSERVER, res, 'Unexpected ScServer')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_add_hba',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_server_folder',
return_value=SVR_FLDR)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_serveros',
return_value='64702.38')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_201)
def test_create_server(self,
mock_post,
mock_find_serveros,
mock_find_server_folder,
mock_first_result,
mock_add_hba,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.create_server(
12345,
self.configuration.dell_sc_server_folder,
self.IQN,
False)
mock_find_serveros.assert_called()
mock_find_server_folder.assert_called()
mock_first_result.assert_called()
mock_add_hba.assert_called()
self.assertEqual(self.SCSERVER, res, 'Unexpected ScServer')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_add_hba',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_server_folder',
return_value=SVR_FLDR)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_serveros',
return_value=None)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_201)
def test_create_server_os_not_found(self,
mock_post,
mock_find_serveros,
mock_find_server_folder,
mock_first_result,
mock_add_hba,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.create_server(
12345,
self.configuration.dell_sc_server_folder,
self.IQN,
False)
mock_find_serveros.assert_called()
self.assertEqual(self.SCSERVER, res, 'Unexpected ScServer')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_add_hba',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_create_server_folder_path',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_server_folder',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_serveros',
return_value='64702.38')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_201)
def test_create_server_fldr_not_found(self,
mock_post,
mock_find_serveros,
mock_find_server_folder,
mock_create_svr_fldr_path,
mock_first_result,
mock_add_hba,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.create_server(
12345,
self.configuration.dell_sc_server_folder,
self.IQN,
False)
mock_find_server_folder.assert_called()
mock_create_svr_fldr_path.assert_called()
self.assertEqual(self.SCSERVER, res, 'Unexpected ScServer')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_add_hba',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_create_server_folder_path',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_server_folder',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_serveros',
return_value='64702.38')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_create_server_failure(self,
mock_post,
mock_find_serveros,
mock_find_server_folder,
mock_create_svr_fldr_path,
mock_first_result,
mock_add_hba,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.create_server(
12345,
self.configuration.dell_sc_server_folder,
self.IQN,
False)
self.assertIsNone(res, 'None expected')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_add_hba',
return_value=True)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_create_server_folder_path',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_server_folder',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_serveros',
return_value='64702.38')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_201)
def test_create_server_not_found(self,
mock_post,
mock_find_serveros,
mock_find_server_folder,
mock_create_svr_fldr_path,
mock_first_result,
mock_add_hba,
mock_close_connection,
mock_open_connection,
mock_init):
# Test create server where _first_result is None
res = self.scapi.create_server(
12345,
self.configuration.dell_sc_server_folder,
self.IQN,
False)
self.assertIsNone(res, 'None expected')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_delete_server',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_add_hba',
return_value=False)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_server_folder',
return_value=SVR_FLDR)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_serveros',
return_value='64702.38')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_201)
def test_create_server_addhba_fail(self,
mock_post,
mock_find_serveros,
mock_find_server_folder,
mock_first_result,
mock_add_hba,
mock_delete_server,
mock_close_connection,
mock_open_connection,
mock_init):
# Tests create server where add hba fails
res = self.scapi.create_server(
12345,
self.configuration.dell_sc_server_folder,
self.IQN,
False)
mock_delete_server.assert_called()
self.assertIsNone(res, 'None expected')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=SCSERVER)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_serverhba',
return_value=ISCSI_HBA)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_find_server(self,
mock_post,
mock_find_serverhba,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.find_server(12345,
self.IQN)
mock_find_serverhba.assert_called()
mock_first_result.assert_called()
self.assertIsNotNone(res, 'Expected ScServer')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_serverhba',
return_value=None)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_find_server_no_hba(self,
mock_post,
mock_find_serverhba,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where a ScServer HBA does not exist with the specified IQN
# or WWN
res = self.scapi.find_server(12345,
self.IQN)
mock_find_serverhba.assert_called()
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_serverhba',
return_value=ISCSI_HBA)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_find_server_failure(self,
mock_post,
mock_find_serverhba,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where a ScServer does not exist with the specified
# ScServerHba
res = self.scapi.find_server(12345,
self.IQN)
mock_find_serverhba.assert_called()
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=ISCSI_HBA)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_find_serverhba(self,
mock_post,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.find_server(12345,
self.IQN)
mock_post.assert_called()
mock_first_result.assert_called()
self.assertIsNotNone(res, 'Expected ScServerHba')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_find_serverhba_failure(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where a ScServer does not exist with the specified
# ScServerHba
res = self.scapi.find_server(12345,
self.IQN)
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=ISCSI_FLT_DOMAINS)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_find_domains(self,
mock_get,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._find_domains(u'64702.5764839588723736074.69')
mock_get .assert_called()
mock_get_json.assert_called()
self.assertEqual(
self.ISCSI_FLT_DOMAINS, res, 'Unexpected ScIscsiFaultDomain')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_204)
def test_find_domains_error(self,
mock_get,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where get of ScControllerPort FaultDomainList fails
res = self.scapi._find_domains(u'64702.5764839588723736074.69')
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=ISCSI_FLT_DOMAINS)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_find_domain(self,
mock_get,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._find_domain(u'64702.5764839588723736074.69',
u'192.168.0.21')
mock_get .assert_called()
mock_get_json.assert_called()
self.assertIsNotNone(res, 'Expected ScIscsiFaultDomain')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_204)
def test_find_domain_error(self,
mock_get,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where get of ScControllerPort FaultDomainList fails
res = self.scapi._find_domain(u'64702.5764839588723736074.69',
u'192.168.0.21')
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=ISCSI_FLT_DOMAINS)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_find_domain_not_found(self,
mock_get,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where domainip does not equal any WellKnownIpAddress
# of the fault domains
res = self.scapi._find_domain(u'64702.5764839588723736074.69',
u'192.168.0.22')
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=FC_HBAS)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_find_fc_initiators(self,
mock_get,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._find_fc_initiators(self.SCSERVER)
mock_get.assert_called()
mock_get_json.assert_called()
self.assertIsNotNone(res, 'Expected WWN list')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_204)
def test_find_fc_initiators_error(self,
mock_get,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where get of ScServer HbaList fails
res = self.scapi._find_fc_initiators(self.SCSERVER)
self.assertListEqual([], res, 'Expected empty list')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=MAPPINGS)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_get_volume_count(self,
mock_get,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.get_volume_count(self.SCSERVER)
mock_get.assert_called()
mock_get_json.assert_called()
self.assertEqual(len(self.MAPPINGS), res, 'Mapping count mismatch')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_204)
def test_get_volume_count_failure(self,
mock_get,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case of where get of ScServer MappingList fails
res = self.scapi.get_volume_count(self.SCSERVER)
mock_get.assert_called()
self.assertEqual(-1, res, 'Mapping count not -1')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=[])
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_get_volume_count_no_volumes(self,
mock_get,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.get_volume_count(self.SCSERVER)
mock_get.assert_called()
mock_get_json.assert_called()
self.assertEqual(len([]), res, 'Mapping count mismatch')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=MAPPINGS)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_find_mappings(self,
mock_get,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._find_mappings(self.VOLUME)
mock_get.assert_called()
mock_get_json.assert_called()
self.assertEqual(self.MAPPINGS, res, 'Mapping mismatch')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_find_mappings_inactive_vol(self,
mock_get,
mock_close_connection,
mock_open_connection,
mock_init):
# Test getting volume mappings on inactive volume
res = self.scapi._find_mappings(self.INACTIVE_VOLUME)
mock_get.assert_called()
self.assertEqual([], res, 'No mappings expected')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_204)
def test_find_mappings_failure(self,
mock_get,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case of where get of ScVolume MappingList fails
res = self.scapi._find_mappings(self.VOLUME)
mock_get.assert_called()
self.assertEqual([], res, 'Mapping count not empty')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=[])
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_find_mappings_no_mappings(self,
mock_get,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where ScVolume has no mappings
res = self.scapi._find_mappings(self.VOLUME)
mock_get.assert_called()
mock_get_json.assert_called()
self.assertEqual([], res, 'Mapping count mismatch')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=CTRLR_PORT)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_find_controller_port(self,
mock_get,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._find_controller_port(u'64702.5764839588723736070.51')
mock_get.assert_called()
mock_first_result.assert_called()
self.assertEqual(self.CTRLR_PORT, res, 'ScControllerPort mismatch')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_204)
def test_find_controller_port_failure(self,
mock_get,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where get of ScVolume MappingList fails
res = self.scapi._find_controller_port(self.VOLUME)
mock_get.assert_called()
self.assertIsNone(res, 'None expected')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_controller_port',
return_value=FC_CTRLR_PORT)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_mappings',
return_value=FC_MAPPINGS)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_fc_initiators',
return_value=WWNS)
def test_find_wwns(self,
mock_find_fc_initiators,
mock_find_mappings,
mock_find_controller_port,
mock_close_connection,
mock_open_connection,
mock_init):
lun, wwns, itmap = self.scapi.find_wwns(self.VOLUME,
self.SCSERVER)
mock_find_fc_initiators.assert_called()
mock_find_mappings.assert_called()
mock_find_controller_port.assert_called()
# The _find_controller_port is Mocked, so all mapping pairs
# will have the same WWN for the ScControllerPort
itmapCompare = {u'21000024FF30441C': [u'5000D31000FCBE36'],
u'21000024FF30441D':
[u'5000D31000FCBE36', u'5000D31000FCBE36']}
self.assertEqual(1, lun, 'Incorrect LUN')
self.assertIsNotNone(wwns, 'WWNs is None')
self.assertEqual(itmapCompare, itmap, 'WWN mapping incorrect')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_mappings',
return_value=[])
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_fc_initiators',
return_value=FC_HBAS)
def test_find_wwns_no_mappings(self,
mock_find_fc_initiators,
mock_find_mappings,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where there are no ScMapping(s)
lun, wwns, itmap = self.scapi.find_wwns(self.VOLUME,
self.SCSERVER)
mock_find_fc_initiators.assert_called()
mock_find_mappings.assert_called()
self.assertEqual(None, lun, 'Incorrect LUN')
self.assertEqual([], wwns, 'WWNs is not empty')
self.assertEqual({}, itmap, 'WWN mapping not empty')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_controller_port',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_mappings',
return_value=FC_MAPPINGS)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_fc_initiators',
return_value=WWNS)
def test_find_wwns_no_ctlr_port(self,
mock_find_fc_initiators,
mock_find_mappings,
mock_find_controller_port,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where ScControllerPort is none
lun, wwns, itmap = self.scapi.find_wwns(self.VOLUME,
self.SCSERVER)
mock_find_fc_initiators.assert_called()
mock_find_mappings.assert_called()
mock_find_controller_port.assert_called()
self.assertEqual(None, lun, 'Incorrect LUN')
self.assertEqual([], wwns, 'WWNs is not empty')
self.assertEqual({}, itmap, 'WWN mapping not empty')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_controller_port',
return_value=ISCSI_CTRLR_PORT)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_domains',
return_value=ISCSI_FLT_DOMAINS)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_mappings',
return_value=MAPPINGS)
def test_find_iscsi_properties_mappings(self,
mock_find_mappings,
mock_find_domain,
mock_find_ctrl_port,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.find_iscsi_properties(self.VOLUME)
mock_find_mappings.assert_called()
mock_find_domain.assert_called()
mock_find_ctrl_port.assert_called()
expected = {'access_mode': 'rw',
'target_discovered': False,
'target_iqns':
[u'iqn.2002-03.com.compellent:5000d31000fcbe43'],
'target_luns': [1],
'target_portals': [u'192.168.0.21:3260']}
self.assertEqual(expected, res, 'Wrong Target Info')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_controller_port',
return_value=ISCSI_CTRLR_PORT)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_domains',
return_value=ISCSI_FLT_DOMAINS)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_mappings',
return_value=MAPPINGS)
def test_find_iscsi_properties_by_address(self,
mock_find_mappings,
mock_find_domain,
mock_find_ctrl_port,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case to find iSCSI mappings by IP Address & port
res = self.scapi.find_iscsi_properties(
self.VOLUME, '192.168.0.21', 3260)
mock_find_mappings.assert_called()
mock_find_domain.assert_called()
mock_find_ctrl_port.assert_called()
expected = {'access_mode': 'rw',
'target_discovered': False,
'target_iqns':
[u'iqn.2002-03.com.compellent:5000d31000fcbe43'],
'target_luns': [1],
'target_portals': [u'192.168.0.21:3260']}
self.assertEqual(expected, res, 'Wrong Target Info')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_controller_port',
return_value=ISCSI_CTRLR_PORT)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_domains',
return_value=ISCSI_FLT_DOMAINS)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_mappings',
return_value=MAPPINGS)
def test_find_iscsi_properties_by_address_not_found(self,
mock_find_mappings,
mock_find_domain,
mock_find_ctrl_port,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case to find iSCSI mappings by IP Address & port are not found
res = self.scapi.find_iscsi_properties(
self.VOLUME, '192.168.1.21', 3260)
mock_find_mappings.assert_called()
mock_find_domain.assert_called()
mock_find_ctrl_port.assert_called()
expected = {'access_mode': 'rw',
'target_discovered': False,
'target_iqns': [],
'target_luns': [],
'target_portals': []}
self.assertEqual(expected, res, 'Wrong Target Info')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_mappings',
return_value=[])
def test_find_iscsi_properties_no_mapping(self,
mock_find_mappings,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where there are no ScMapping(s)
res = self.scapi.find_iscsi_properties(self.VOLUME)
mock_find_mappings.assert_called()
expected = {'access_mode': 'rw',
'target_discovered': False,
'target_iqns': [],
'target_luns': [],
'target_portals': []}
self.assertEqual(expected, res, 'Expected empty Target Info')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_controller_port',
return_value=ISCSI_CTRLR_PORT)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_domains',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_mappings',
return_value=MAPPINGS)
def test_find_iscsi_properties_no_domain(self,
mock_find_mappings,
mock_find_domain,
mock_find_ctrl_port,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where there are no ScFaultDomain(s)
res = self.scapi.find_iscsi_properties(self.VOLUME)
mock_find_mappings.assert_called()
mock_find_domain.assert_called()
mock_find_ctrl_port.assert_called()
expected = {'access_mode': 'rw',
'target_discovered': False,
'target_iqns': [],
'target_luns': [],
'target_portals': []}
self.assertEqual(expected, res, 'Expected empty Target Info')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_controller_port',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_domains',
return_value=ISCSI_FLT_DOMAINS)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_mappings',
return_value=MAPPINGS)
def test_find_iscsi_properties_no_ctrl_port(self,
mock_find_mappings,
mock_find_domain,
mock_find_ctrl_port,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where there are no ScFaultDomain(s)
res = self.scapi.find_iscsi_properties(self.VOLUME)
mock_find_mappings.assert_called()
mock_find_domain.assert_called()
mock_find_ctrl_port.assert_called()
expected = {'access_mode': 'rw',
'target_discovered': False,
'target_iqns': [],
'target_luns': [],
'target_portals': []}
self.assertEqual(expected, res, 'Expected empty Target Info')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_controller_port',
return_value=ISCSI_CTRLR_PORT)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_domains',
return_value=ISCSI_FLT_DOMAINS)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_mappings',
return_value=MAPPINGS_READ_ONLY)
def test_find_iscsi_properties_ro(self,
mock_find_mappings,
mock_find_domain,
mock_find_ctrl_port,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where Read Only mappings are found
res = self.scapi.find_iscsi_properties(self.VOLUME)
mock_find_mappings.assert_called()
mock_find_domain.assert_called()
mock_find_ctrl_port.assert_called()
expected = {'access_mode': 'ro',
'target_discovered': False,
'target_iqns':
[u'iqn.2002-03.com.compellent:5000d31000fcbe43'],
'target_luns': [1],
'target_portals': [u'192.168.0.21:3260']}
self.assertEqual(expected, res, 'Wrong Target Info')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_controller_port',
return_value=ISCSI_CTRLR_PORT)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_domains',
return_value=ISCSI_FLT_DOMAINS_MULTI_PORTALS)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_mappings',
return_value=MAPPINGS_MULTI_PORTAL)
def test_find_iscsi_properties_multi_portals(self,
mock_find_mappings,
mock_find_domain,
mock_find_ctrl_port,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where there are multiple portals
res = self.scapi.find_iscsi_properties(self.VOLUME)
mock_find_mappings.assert_called()
mock_find_domain.assert_called()
mock_find_ctrl_port.assert_called()
expected = {'access_mode': 'rw',
'target_discovered': False,
'target_iqns':
[u'iqn.2002-03.com.compellent:5000d31000fcbe43'],
'target_luns': [1],
'target_portals':
[u'192.168.0.21:3260', u'192.168.0.25:3260']}
self.assertEqual(expected, res, 'Wrong Target Info')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=MAP_PROFILE)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_map_volume(self,
mock_post,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.map_volume(self.VOLUME,
self.SCSERVER)
mock_post.assert_called()
mock_first_result.assert_called()
self.assertEqual(self.MAP_PROFILE, res, 'Incorrect ScMappingProfile')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_map_volume_failure(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where mapping volume to server fails
res = self.scapi.map_volume(self.VOLUME,
self.SCSERVER)
mock_post.assert_called()
self.assertIsNone(res, 'None expected')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'delete',
return_value=RESPONSE_200)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=MAP_PROFILES)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_unmap_volume(self,
mock_get,
mock_get_json,
mock_delete,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.unmap_volume(self.VOLUME,
self.SCSERVER)
mock_get.assert_called()
mock_get_json.assert_called()
mock_delete.assert_called()
self.assertTrue(res, 'Expected True')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_204)
def test_unmap_volume_failure(self,
mock_get,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.unmap_volume(self.VOLUME,
self.SCSERVER)
mock_get.assert_called()
self.assertFalse(res, 'Expected False')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'delete',
return_value=RESPONSE_200)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=[])
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_unmap_volume_no_map_profile(self,
mock_get,
mock_get_json,
mock_delete,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.unmap_volume(self.VOLUME,
self.SCSERVER)
mock_get.assert_called()
mock_get_json.assert_called()
mock_delete.assert_called()
self.assertTrue(res, 'Expected True')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'delete',
return_value=RESPONSE_204)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=MAP_PROFILES)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_unmap_volume_del_fail(self,
mock_get,
mock_get_json,
mock_delete,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.unmap_volume(self.VOLUME,
self.SCSERVER)
mock_get.assert_called()
mock_get_json.assert_called()
mock_delete.assert_called()
self.assertFalse(res, 'Expected False')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=STRG_USAGE)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_get_storage_usage(self,
mock_get,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.get_storage_usage(64702)
mock_get.assert_called()
mock_get_json.assert_called()
self.assertEqual(self.STRG_USAGE, res, 'Unexpected ScStorageUsage')
def test_get_storage_usage_no_ssn(self,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where SSN is none
res = self.scapi.get_storage_usage(None)
self.assertIsNone(res, 'None expected')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_204)
# Test case where get of Storage Usage fails
def test_get_storage_usage_failure(self,
mock_get,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.get_storage_usage(64702)
mock_get.assert_called()
self.assertIsNone(res, 'None expected')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=RPLAY)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_create_replay(self,
mock_post,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.create_replay(self.VOLUME,
'Test Replay',
60)
mock_post.assert_called()
mock_first_result.assert_called()
self.assertEqual(self.RPLAY, res, 'Unexpected ScReplay')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=RPLAY)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_init_volume')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_create_replay_inact_vol(self,
mock_post,
mock_init_volume,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where the specified volume is inactive
res = self.scapi.create_replay(self.INACTIVE_VOLUME,
'Test Replay',
60)
mock_post.assert_called()
mock_init_volume.assert_called_once_with(self.INACTIVE_VOLUME)
mock_first_result.assert_called()
self.assertEqual(self.RPLAY, res, 'Unexpected ScReplay')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=RPLAY)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_create_replay_no_expire(self,
mock_post,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.create_replay(self.VOLUME,
'Test Replay',
0)
mock_post.assert_called()
mock_first_result.assert_called()
self.assertEqual(self.RPLAY, res, 'Unexpected ScReplay')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_create_replay_no_volume(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where no ScVolume is specified
res = self.scapi.create_replay(None,
'Test Replay',
60)
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_create_replay_failure(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where create ScReplay fails
res = self.scapi.create_replay(self.VOLUME,
'Test Replay',
60)
mock_post.assert_called()
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=RPLAYS)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_find_replay(self,
mock_post,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.find_replay(self.VOLUME,
u'Cinder Test Replay012345678910')
mock_post.assert_called()
mock_get_json.assert_called()
self.assertEqual(self.TST_RPLAY, res, 'Unexpected ScReplay')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=[])
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_200)
def test_find_replay_no_replays(self,
mock_post,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where no replays are found
res = self.scapi.find_replay(self.VOLUME,
u'Cinder Test Replay012345678910')
mock_post.assert_called()
mock_get_json.assert_called()
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=None)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'get',
return_value=RESPONSE_204)
def test_find_replay_failure(self,
mock_post,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where None is returned for replays
res = self.scapi.find_replay(self.VOLUME,
u'Cinder Test Replay012345678910')
mock_post.assert_called()
mock_get_json.assert_called()
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_replay',
return_value=RPLAYS)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_delete_replay(self,
mock_post,
mock_find_replay,
mock_close_connection,
mock_open_connection,
mock_init):
replayId = u'Cinder Test Replay012345678910'
res = self.scapi.delete_replay(self.VOLUME,
replayId)
mock_post.assert_called()
mock_find_replay.assert_called_once_with(self.VOLUME, replayId)
self.assertTrue(res, 'Expected True')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_replay',
return_value=None)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_delete_replay_no_replay(self,
mock_post,
mock_find_replay,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where specified ScReplay does not exist
replayId = u'Cinder Test Replay012345678910'
res = self.scapi.delete_replay(self.VOLUME,
replayId)
mock_post.assert_called()
mock_find_replay.assert_called_once_with(self.VOLUME, replayId)
self.assertTrue(res, 'Expected True')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'find_replay',
return_value=TST_RPLAY)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_delete_replay_failure(self,
mock_post,
mock_find_replay,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where delete ScReplay results in an error
replayId = u'Cinder Test Replay012345678910'
res = self.scapi.delete_replay(self.VOLUME,
replayId)
mock_post.assert_called()
mock_find_replay.assert_called_once_with(self.VOLUME, replayId)
self.assertFalse(res, 'Expected False')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_volume_folder',
return_value=FLDR)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_create_view_volume(self,
mock_post,
mock_find_volume_folder,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
vol_name = u'Test_create_vol'
res = self.scapi.create_view_volume(
vol_name,
self.configuration.dell_sc_volume_folder,
self.TST_RPLAY)
mock_post.assert_called()
mock_find_volume_folder.assert_called_once_with(
64702,
self.configuration.dell_sc_volume_folder)
mock_first_result.assert_called()
self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_create_volume_folder_path',
return_value=FLDR)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_volume_folder',
return_value=None)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_create_view_volume_create_fldr(self,
mock_post,
mock_find_volume_folder,
mock_create_volume_folder,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where volume folder does not exist and must be created
vol_name = u'Test_create_vol'
res = self.scapi.create_view_volume(
vol_name,
self.configuration.dell_sc_volume_folder,
self.TST_RPLAY)
mock_post.assert_called()
mock_find_volume_folder.assert_called_once_with(
64702,
self.configuration.dell_sc_volume_folder)
mock_create_volume_folder.assert_called_once_with(
64702,
self.configuration.dell_sc_volume_folder)
mock_first_result.assert_called()
self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_first_result',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_create_volume_folder_path',
return_value=None)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_volume_folder',
return_value=None)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_create_view_volume_no_vol_fldr(self,
mock_post,
mock_find_volume_folder,
mock_create_volume_folder,
mock_first_result,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where volume folder does not exist and cannot be created
vol_name = u'Test_create_vol'
res = self.scapi.create_view_volume(
vol_name,
self.configuration.dell_sc_volume_folder,
self.TST_RPLAY)
mock_post.assert_called()
mock_find_volume_folder.assert_called_once_with(
64702,
self.configuration.dell_sc_volume_folder)
mock_create_volume_folder.assert_called_once_with(
64702,
self.configuration.dell_sc_volume_folder)
mock_first_result.assert_called()
self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_find_volume_folder',
return_value=FLDR)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_create_view_volume_failure(self,
mock_post,
mock_find_volume_folder,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where view volume create fails
vol_name = u'Test_create_vol'
res = self.scapi.create_view_volume(
vol_name,
self.configuration.dell_sc_volume_folder,
self.TST_RPLAY)
mock_post.assert_called()
mock_find_volume_folder.assert_called_once_with(
64702,
self.configuration.dell_sc_volume_folder)
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'create_view_volume',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'create_replay',
return_value=RPLAY)
def test_create_cloned_volume(self,
mock_create_replay,
mock_create_view_volume,
mock_close_connection,
mock_open_connection,
mock_init):
vol_name = u'Test_create_clone_vol'
res = self.scapi.create_cloned_volume(
vol_name,
self.configuration.dell_sc_volume_folder,
self.VOLUME)
mock_create_replay.assert_called_once_with(self.VOLUME,
'Cinder Clone Replay',
60)
mock_create_view_volume.assert_called_once_with(
vol_name,
self.configuration.dell_sc_volume_folder,
self.RPLAY)
self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'create_replay',
return_value=None)
def test_create_cloned_volume_failure(self,
mock_create_replay,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where create cloned volumes fails because create_replay
# fails
vol_name = u'Test_create_clone_vol'
res = self.scapi.create_cloned_volume(
vol_name,
self.configuration.dell_sc_volume_folder,
self.VOLUME)
mock_create_replay.assert_called_once_with(self.VOLUME,
'Cinder Clone Replay',
60)
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.StorageCenterApi,
'_get_json',
return_value=VOLUME)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_expand_volume(self,
mock_post,
mock_get_json,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.expand_volume(self.VOLUME, 550)
mock_post.assert_called()
mock_get_json.assert_called()
self.assertEqual(self.VOLUME, res, 'Unexpected ScVolume')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_expand_volume_failure(self,
mock_post,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi.expand_volume(self.VOLUME, 550)
mock_post.assert_called()
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'delete',
return_value=RESPONSE_200)
def test_delete_server(self,
mock_delete,
mock_close_connection,
mock_open_connection,
mock_init):
res = self.scapi._delete_server(self.SCSERVER)
mock_delete.assert_called()
self.assertIsNone(res, 'Expected None')
@mock.patch.object(dell_storagecenter_api.HttpClient,
'delete',
return_value=RESPONSE_200)
def test_delete_server_del_not_allowed(self,
mock_delete,
mock_close_connection,
mock_open_connection,
mock_init):
# Test case where delete of ScServer not allowed
res = self.scapi._delete_server(self.SCSERVER_NO_DEL)
mock_delete.assert_called()
self.assertIsNone(res, 'Expected None')
class DellSCSanAPIConnectionTestCase(test.TestCase):
'''DellSCSanAPIConnectionTestCase
Class to test the Storage Center API connection using Mock.
'''
# Create a Response object that indicates OK
response_ok = models.Response()
response_ok.status_code = 200
response_ok.reason = u'ok'
RESPONSE_200 = response_ok
# Create a Response object that indicates a failure (no content)
response_nc = models.Response()
response_nc.status_code = 204
response_nc.reason = u'duplicate'
RESPONSE_204 = response_nc
def setUp(self):
super(DellSCSanAPIConnectionTestCase, self).setUp()
# Configuration is a mock. A mock is pretty much a blank
# slate. I believe mock's done in setup are not happy time
# mocks. So we just do a few things like driver config here.
self.configuration = mock.Mock()
self.configuration.san_is_local = False
self.configuration.san_ip = "192.168.0.1"
self.configuration.san_login = "admin"
self.configuration.san_password = "mmm"
self.configuration.dell_sc_ssn = 12345
self.configuration.dell_sc_server_folder = 'opnstktst'
self.configuration.dell_sc_volume_folder = 'opnstktst'
self.configuration.dell_sc_api_port = 3033
self.configuration.iscsi_ip_address = '192.168.1.1'
self.configuration.iscsi_port = 3260
self._context = context.get_admin_context()
# Set up the StorageCenterApi
self.scapi = dell_storagecenter_api.StorageCenterApi(
self.configuration.san_ip,
self.configuration.dell_sc_api_port,
self.configuration.san_login,
self.configuration.san_password)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_open_connection(self,
mock_post):
self.scapi.open_connection()
mock_post.assert_called()
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_open_connection_failure(self,
mock_post):
self.assertRaises(exception.VolumeBackendAPIException,
self.scapi.open_connection)
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_204)
def test_close_connection(self,
mock_post):
self.scapi.close_connection()
mock_post.assert_called()
@mock.patch.object(dell_storagecenter_api.HttpClient,
'post',
return_value=RESPONSE_200)
def test_close_connection_failure(self,
mock_post):
self.scapi.close_connection()
mock_post.assert_called()
| apache-2.0 | -992,609,185,727,740,800 | 45.620628 | 79 | 0.476005 | false |
gertingold/lit2016 | examples/unittests/test_pascal_v5.py | 1 | 1282 | from itertools import chain
from unittest import main, TestCase
from pascal_v2 import pascal
class TestExplicit(TestCase):
def test_n0(self):
self.assertEqual(list(pascal(0)), [1])
def test_n1(self):
self.assertEqual(list(pascal(1)), [1, 1])
def test_n5(self):
self.assertEqual(list(pascal(5)), [1, 5, 10, 10, 5, 1])
class TestSums(TestCase):
def test_sum(self):
for n in (10, 100, 1000, 10000):
self.assertEqual(sum(pascal(n)), 2**n)
def test_alternate_sum(self):
for n in (10, 100, 1000, 10000):
self.assertEqual(sum(alternate(pascal(n))), 0)
class TestAdjacent(TestCase):
def test_generate_next_line(self):
for n in (10, 100, 1000, 10000):
expected = [a+b for a, b
in zip(chain(zero(), pascal(n)),
chain(pascal(n), zero()))]
result = list(pascal(n+1))
self.assertEqual(result, expected)
class TestParameters(TestCase):
def test_negative_int(self):
with self.assertRaises(ValueError):
next(pascal(-1))
def alternate(g):
sign = 1
for elem in g:
yield sign*elem
sign = -sign
def zero():
yield 0
if __name__ == '__main__':
main()
| mit | 4,772,268,082,943,603,000 | 25.708333 | 63 | 0.567863 | false |
jj0hns0n/geonode | geonode/layers/models.py | 1 | 18480 | # -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import uuid
import logging
from datetime import datetime
from django.db import models
from django.db.models import signals
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.core.files.storage import FileSystemStorage
from geonode.base.models import ResourceBase, ResourceBaseManager, resourcebase_post_save
from geonode.people.utils import get_valid_user
from agon_ratings.models import OverallRating
from geonode.utils import check_shp_columnnames
from geonode.security.models import remove_object_permissions
logger = logging.getLogger("geonode.layers.models")
shp_exts = ['.shp', ]
csv_exts = ['.csv']
kml_exts = ['.kml']
vec_exts = shp_exts + csv_exts + kml_exts
cov_exts = ['.tif', '.tiff', '.geotiff', '.geotif', '.asc']
TIME_REGEX = (
('[0-9]{8}', _('YYYYMMDD')),
('[0-9]{8}T[0-9]{6}', _("YYYYMMDD'T'hhmmss")),
('[0-9]{8}T[0-9]{6}Z', _("YYYYMMDD'T'hhmmss'Z'")),
)
TIME_REGEX_FORMAT = {
'[0-9]{8}': '%Y%m%d',
'[0-9]{8}T[0-9]{6}': '%Y%m%dT%H%M%S',
'[0-9]{8}T[0-9]{6}Z': '%Y%m%dT%H%M%SZ'
}
class Style(models.Model):
"""Model for storing styles.
"""
name = models.CharField(_('style name'), max_length=255, unique=True)
sld_title = models.CharField(max_length=255, null=True, blank=True)
sld_body = models.TextField(_('sld text'), null=True, blank=True)
sld_version = models.CharField(
_('sld version'),
max_length=12,
null=True,
blank=True)
sld_url = models.CharField(_('sld url'), null=True, max_length=1000)
workspace = models.CharField(max_length=255, null=True, blank=True)
def __str__(self):
return "%s" % self.name.encode('utf-8')
def absolute_url(self):
if self.sld_url:
if self.sld_url.startswith(settings.OGC_SERVER['default']['LOCATION']):
return self.sld_url.split(settings.OGC_SERVER['default']['LOCATION'], 1)[1]
elif self.sld_url.startswith(settings.OGC_SERVER['default']['PUBLIC_LOCATION']):
return self.sld_url.split(settings.OGC_SERVER['default']['PUBLIC_LOCATION'], 1)[1]
return self.sld_url
else:
logger.error("SLD URL is empty for Style %s" % self.name.encode('utf-8'))
return None
class LayerManager(ResourceBaseManager):
def __init__(self):
models.Manager.__init__(self)
class Layer(ResourceBase):
"""
Layer (inherits ResourceBase fields)
"""
# internal fields
objects = LayerManager()
workspace = models.CharField(max_length=128)
store = models.CharField(max_length=128)
storeType = models.CharField(max_length=128)
name = models.CharField(max_length=128)
typename = models.CharField(max_length=128, null=True, blank=True)
is_mosaic = models.BooleanField(default=False)
has_time = models.BooleanField(default=False)
has_elevation = models.BooleanField(default=False)
time_regex = models.CharField(max_length=128, null=True, blank=True, choices=TIME_REGEX)
elevation_regex = models.CharField(max_length=128, null=True, blank=True)
default_style = models.ForeignKey(
Style,
related_name='layer_default_style',
null=True,
blank=True)
styles = models.ManyToManyField(Style, related_name='layer_styles')
charset = models.CharField(max_length=255, default='UTF-8')
upload_session = models.ForeignKey('UploadSession', blank=True, null=True)
@property
def is_remote(self):
return self.storeType == "remoteStore"
@property
def service(self):
"""Get the related service object dynamically
"""
service_layers = self.servicelayer_set.all()
if len(service_layers) == 0:
return None
else:
return service_layers[0].service
def is_vector(self):
return self.storeType == 'dataStore'
@property
def display_type(self):
return ({
"dataStore": "Vector Data",
"coverageStore": "Raster Data",
}).get(self.storeType, "Data")
@property
def data_model(self):
if hasattr(self, 'modeldescription_set'):
lmd = self.modeldescription_set.all()
if lmd.exists():
return lmd.get().get_django_model()
return None
@property
def data_objects(self):
if self.data_model is not None:
return self.data_model.objects.using('datastore')
return None
@property
def service_type(self):
if self.storeType == 'coverageStore':
return "WCS"
if self.storeType == 'dataStore':
return "WFS"
@property
def ows_url(self):
if self.is_remote:
return self.service.base_url
else:
return settings.OGC_SERVER['default']['PUBLIC_LOCATION'] + "wms"
@property
def ptype(self):
if self.is_remote:
return self.service.ptype
else:
return "gxp_wmscsource"
@property
def service_typename(self):
if self.is_remote:
return "%s:%s" % (self.service.name, self.typename)
else:
return self.typename
@property
def attributes(self):
return self.attribute_set.exclude(attribute='the_geom')
def get_base_file(self):
"""Get the shp or geotiff file for this layer.
"""
# If there was no upload_session return None
if self.upload_session is None:
return None, None
base_exts = [x.replace('.', '') for x in cov_exts + vec_exts]
base_files = self.upload_session.layerfile_set.filter(
name__in=base_exts)
base_files_count = base_files.count()
# If there are no files in the upload_session return None
if base_files_count == 0:
return None, None
msg = 'There should only be one main file (.shp or .geotiff or .asc), found %s' % base_files_count
assert base_files_count == 1, msg
# we need to check, for shapefile, if column names are valid
list_col = None
if self.storeType == 'dataStore':
valid_shp, wrong_column_name, list_col = check_shp_columnnames(self)
if wrong_column_name:
msg = 'Shapefile has an invalid column name: %s' % wrong_column_name
else:
msg = _('File cannot be opened, maybe check the encoding')
assert valid_shp, msg
# no error, let's return the base files
return base_files.get(), list_col
def get_absolute_url(self):
return reverse('layer_detail', args=(self.service_typename,))
def attribute_config(self):
# Get custom attribute sort order and labels if any
cfg = {}
visible_attributes = self.attribute_set.visible()
if (visible_attributes.count() > 0):
cfg["getFeatureInfo"] = {
"fields": [l.attribute for l in visible_attributes],
"propertyNames": dict([(l.attribute, l.attribute_label) for l in visible_attributes])
}
return cfg
def __str__(self):
if self.typename is not None:
return "%s Layer" % self.service_typename.encode('utf-8')
elif self.name is not None:
return "%s Layer" % self.name
else:
return "Unamed Layer"
class Meta:
# custom permissions,
# change and delete are standard in django-guardian
permissions = (
('change_layer_data', 'Can edit layer data'),
('change_layer_style', 'Can change layer style'),
)
# Permission Level Constants
# LEVEL_NONE inherited
LEVEL_READ = 'layer_readonly'
LEVEL_WRITE = 'layer_readwrite'
LEVEL_ADMIN = 'layer_admin'
def maps(self):
from geonode.maps.models import MapLayer
return MapLayer.objects.filter(name=self.typename)
@property
def class_name(self):
return self.__class__.__name__
@property
def geogig_enabled(self):
return (len(self.link_set.geogig()) > 0)
@property
def geogig_link(self):
if(self.geogig_enabled):
return getattr(self.link_set.filter(name__icontains='clone in geogig').first(), 'url', None)
return None
class UploadSession(models.Model):
"""Helper class to keep track of uploads.
"""
date = models.DateTimeField(auto_now=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
processed = models.BooleanField(default=False)
error = models.TextField(blank=True, null=True)
traceback = models.TextField(blank=True, null=True)
context = models.TextField(blank=True, null=True)
def successful(self):
return self.processed and self.errors is None
class LayerFile(models.Model):
"""Helper class to store original files.
"""
upload_session = models.ForeignKey(UploadSession)
name = models.CharField(max_length=255)
base = models.BooleanField(default=False)
file = models.FileField(upload_to='layers',
storage=FileSystemStorage(base_url=settings.LOCAL_MEDIA_URL), max_length=255)
class AttributeManager(models.Manager):
"""Helper class to access filtered attributes
"""
def visible(self):
return self.get_queryset().filter(
visible=True).order_by('display_order')
class Attribute(models.Model):
"""
Auxiliary model for storing layer attributes.
This helps reduce the need for runtime lookups
to other servers, and lets users customize attribute titles,
sort order, and visibility.
"""
layer = models.ForeignKey(
Layer,
blank=False,
null=False,
unique=False,
related_name='attribute_set')
attribute = models.CharField(
_('attribute name'),
help_text=_('name of attribute as stored in shapefile/spatial database'),
max_length=255,
blank=False,
null=True,
unique=False)
description = models.CharField(
_('attribute description'),
help_text=_('description of attribute to be used in metadata'),
max_length=255,
blank=True,
null=True)
attribute_label = models.CharField(
_('attribute label'),
help_text=_('title of attribute as displayed in GeoNode'),
max_length=255,
blank=True,
null=True,
unique=False)
attribute_type = models.CharField(
_('attribute type'),
help_text=_('the data type of the attribute (integer, string, geometry, etc)'),
max_length=50,
blank=False,
null=False,
default='xsd:string',
unique=False)
visible = models.BooleanField(
_('visible?'),
help_text=_('specifies if the attribute should be displayed in identify results'),
default=True)
display_order = models.IntegerField(
_('display order'),
help_text=_('specifies the order in which attribute should be displayed in identify results'),
default=1)
# statistical derivations
count = models.IntegerField(
_('count'),
help_text=_('count value for this field'),
default=1)
min = models.CharField(
_('min'),
help_text=_('minimum value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
max = models.CharField(
_('max'),
help_text=_('maximum value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
average = models.CharField(
_('average'),
help_text=_('average value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
median = models.CharField(
_('median'),
help_text=_('median value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
stddev = models.CharField(
_('standard deviation'),
help_text=_('standard deviation for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
sum = models.CharField(
_('sum'),
help_text=_('sum value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
unique_values = models.TextField(
_('unique values for this field'),
null=True,
blank=True,
default='NA')
last_stats_updated = models.DateTimeField(
_('last modified'),
default=datetime.now,
help_text=_('date when attribute statistics were last updated')) # passing the method itself, not
objects = AttributeManager()
def __str__(self):
return "%s" % self.attribute_label.encode(
"utf-8") if self.attribute_label else self.attribute.encode("utf-8")
def unique_values_as_list(self):
return self.unique_values.split(',')
def pre_save_layer(instance, sender, **kwargs):
if kwargs.get('raw', False):
instance.owner = instance.resourcebase_ptr.owner
instance.uuid = instance.resourcebase_ptr.uuid
instance.bbox_x0 = instance.resourcebase_ptr.bbox_x0
instance.bbox_x1 = instance.resourcebase_ptr.bbox_x1
instance.bbox_y0 = instance.resourcebase_ptr.bbox_y0
instance.bbox_y1 = instance.resourcebase_ptr.bbox_y1
if instance.abstract == '' or instance.abstract is None:
instance.abstract = unicode(_('No abstract provided'))
if instance.title == '' or instance.title is None:
instance.title = instance.name
# Set a default user for accountstream to work correctly.
if instance.owner is None:
instance.owner = get_valid_user()
if instance.uuid == '':
instance.uuid = str(uuid.uuid1())
if instance.typename is None:
# Set a sensible default for the typename
instance.typename = 'geonode:%s' % instance.name
base_file, info = instance.get_base_file()
if info:
instance.info = info
if base_file is not None:
extension = '.%s' % base_file.name
if extension in vec_exts:
instance.storeType = 'dataStore'
elif extension in cov_exts:
instance.storeType = 'coverageStore'
# Set sane defaults for None in bbox fields.
if instance.bbox_x0 is None:
instance.bbox_x0 = -180
if instance.bbox_x1 is None:
instance.bbox_x1 = 180
if instance.bbox_y0 is None:
instance.bbox_y0 = -90
if instance.bbox_y1 is None:
instance.bbox_y1 = 90
bbox = [
instance.bbox_x0,
instance.bbox_x1,
instance.bbox_y0,
instance.bbox_y1]
instance.set_bounds_from_bbox(bbox)
def pre_delete_layer(instance, sender, **kwargs):
"""
Remove any associated style to the layer, if it is not used by other layers.
Default style will be deleted in post_delete_layer
"""
if instance.is_remote:
# we need to delete the maplayers here because in the post save layer.service is not available anymore
# REFACTOR
from geonode.maps.models import MapLayer
if instance.typename:
logger.debug(
"Going to delete associated maplayers for [%s]",
instance.typename.encode('utf-8'))
MapLayer.objects.filter(
name=instance.typename,
ows_url=instance.ows_url).delete()
return
logger.debug(
"Going to delete the styles associated for [%s]",
instance.typename.encode('utf-8'))
ct = ContentType.objects.get_for_model(instance)
OverallRating.objects.filter(
content_type=ct,
object_id=instance.id).delete()
default_style = instance.default_style
for style in instance.styles.all():
if style.layer_styles.all().count() == 1:
if style != default_style:
style.delete()
# Delete object permissions
remove_object_permissions(instance)
def post_delete_layer(instance, sender, **kwargs):
"""
Removed the layer from any associated map, if any.
Remove the layer default style.
"""
if instance.is_remote:
return
from geonode.maps.models import MapLayer
if instance.typename:
logger.debug(
"Going to delete associated maplayers for [%s]",
instance.typename.encode('utf-8'))
MapLayer.objects.filter(
name=instance.typename,
ows_url=instance.ows_url).delete()
if instance.typename:
logger.debug(
"Going to delete the default style for [%s]",
instance.typename.encode('utf-8'))
if instance.default_style and Layer.objects.filter(
default_style__id=instance.default_style.id).count() == 0:
instance.default_style.delete()
try:
if instance.upload_session:
for lf in instance.upload_session.layerfile_set.all():
lf.file.delete()
except UploadSession.DoesNotExist:
pass
signals.pre_save.connect(pre_save_layer, sender=Layer)
signals.post_save.connect(resourcebase_post_save, sender=Layer)
signals.pre_delete.connect(pre_delete_layer, sender=Layer)
signals.post_delete.connect(post_delete_layer, sender=Layer)
| gpl-3.0 | -5,706,157,803,603,171,000 | 30.752577 | 110 | 0.613528 | false |
procangroup/edx-platform | common/djangoapps/student/views/dashboard.py | 1 | 33146 | """
Dashboard view and supporting methods
"""
import datetime
import logging
from collections import defaultdict
from completion.exceptions import UnavailableCompletionData
from completion.utilities import get_key_to_last_completed_course_block
from django.conf import settings
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import NoReverseMatch, reverse, reverse_lazy
from django.shortcuts import redirect
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from opaque_keys.edx.keys import CourseKey
from pytz import UTC
from six import text_type, iteritems
import track.views
from bulk_email.models import BulkEmailFlag, Optout # pylint: disable=import-error
from course_modes.models import CourseMode
from courseware.access import has_access
from edxmako.shortcuts import render_to_response, render_to_string
from entitlements.models import CourseEntitlement
from lms.djangoapps.commerce.utils import EcommerceService # pylint: disable=import-error
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification # pylint: disable=import-error
from openedx.core.djangoapps import monitoring_utils
from openedx.core.djangoapps.catalog.utils import (
get_programs,
get_pseudo_session_for_entitlement,
get_visible_sessions_for_entitlement
)
from openedx.core.djangoapps.credit.email_utils import get_credit_provider_display_names, make_providers_strings
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangoapps.programs.utils import ProgramDataExtender, ProgramProgressMeter
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.waffle_utils import WaffleFlag, WaffleFlagNamespace
from openedx.features.enterprise_support.api import get_dashboard_consent_notification
from shoppingcart.api import order_history
from shoppingcart.models import CourseRegistrationCode, DonationConfiguration
from student.cookies import set_user_info_cookie
from student.helpers import cert_info, check_verify_status_by_course
from student.models import (
CourseEnrollment,
CourseEnrollmentAttribute,
DashboardConfiguration,
UserProfile
)
from util.milestones_helpers import get_pre_requisite_courses_not_completed
from xmodule.modulestore.django import modulestore
log = logging.getLogger("edx.student")
def get_org_black_and_whitelist_for_site():
"""
Returns the org blacklist and whitelist for the current site.
Returns:
(org_whitelist, org_blacklist): A tuple of lists of orgs that serve as
either a blacklist or a whitelist of orgs for the current site. The
whitelist takes precedence, and the blacklist is used if the
whitelist is None.
"""
# Default blacklist is empty.
org_blacklist = None
# Whitelist the orgs configured for the current site. Each site outside
# of edx.org has a list of orgs associated with its configuration.
org_whitelist = configuration_helpers.get_current_site_orgs()
if not org_whitelist:
# If there is no whitelist, the blacklist will include all orgs that
# have been configured for any other sites. This applies to edx.org,
# where it is easier to blacklist all other orgs.
org_blacklist = configuration_helpers.get_all_orgs()
return org_whitelist, org_blacklist
def _get_recently_enrolled_courses(course_enrollments):
"""
Given a list of enrollments, filter out all but recent enrollments.
Args:
course_enrollments (list[CourseEnrollment]): A list of course enrollments.
Returns:
list[CourseEnrollment]: A list of recent course enrollments.
"""
seconds = DashboardConfiguration.current().recent_enrollment_time_delta
time_delta = (datetime.datetime.now(UTC) - datetime.timedelta(seconds=seconds))
return [
enrollment for enrollment in course_enrollments
# If the enrollment has no created date, we are explicitly excluding the course
# from the list of recent enrollments.
if enrollment.is_active and enrollment.created > time_delta
]
def _allow_donation(course_modes, course_id, enrollment):
"""
Determines if the dashboard will request donations for the given course.
Check if donations are configured for the platform, and if the current course is accepting donations.
Args:
course_modes (dict): Mapping of course ID's to course mode dictionaries.
course_id (str): The unique identifier for the course.
enrollment(CourseEnrollment): The enrollment object in which the user is enrolled
Returns:
True if the course is allowing donations.
"""
if course_id not in course_modes:
flat_unexpired_modes = {
text_type(course_id): [mode for mode in modes]
for course_id, modes in iteritems(course_modes)
}
flat_all_modes = {
text_type(course_id): [mode.slug for mode in modes]
for course_id, modes in iteritems(CourseMode.all_modes_for_courses([course_id]))
}
log.error(
u'Can not find `%s` in course modes.`%s`. All modes: `%s`',
course_id,
flat_unexpired_modes,
flat_all_modes
)
donations_enabled = configuration_helpers.get_value(
'ENABLE_DONATIONS',
DonationConfiguration.current().enabled
)
return (
donations_enabled and
enrollment.mode in course_modes[course_id] and
course_modes[course_id][enrollment.mode].min_price == 0
)
def _create_recent_enrollment_message(course_enrollments, course_modes): # pylint: disable=invalid-name
"""
Builds a recent course enrollment message.
Constructs a new message template based on any recent course enrollments
for the student.
Args:
course_enrollments (list[CourseEnrollment]): a list of course enrollments.
course_modes (dict): Mapping of course ID's to course mode dictionaries.
Returns:
A string representing the HTML message output from the message template.
None if there are no recently enrolled courses.
"""
recently_enrolled_courses = _get_recently_enrolled_courses(course_enrollments)
if recently_enrolled_courses:
enrollments_count = len(recently_enrolled_courses)
course_name_separator = ', '
# If length of enrolled course 2, join names with 'and'
if enrollments_count == 2:
course_name_separator = _(' and ')
course_names = course_name_separator.join(
[enrollment.course_overview.display_name for enrollment in recently_enrolled_courses]
)
allow_donations = any(
_allow_donation(course_modes, enrollment.course_overview.id, enrollment)
for enrollment in recently_enrolled_courses
)
platform_name = configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME)
return render_to_string(
'enrollment/course_enrollment_message.html',
{
'course_names': course_names,
'enrollments_count': enrollments_count,
'allow_donations': allow_donations,
'platform_name': platform_name,
'course_id': recently_enrolled_courses[0].course_overview.id if enrollments_count == 1 else None
}
)
def get_course_enrollments(user, org_whitelist, org_blacklist):
"""
Given a user, return a filtered set of his or her course enrollments.
Arguments:
user (User): the user in question.
org_whitelist (list[str]): If not None, ONLY courses of these orgs will be returned.
org_blacklist (list[str]): Courses of these orgs will be excluded.
Returns:
generator[CourseEnrollment]: a sequence of enrollments to be displayed
on the user's dashboard.
"""
for enrollment in CourseEnrollment.enrollments_for_user_with_overviews_preload(user):
# If the course is missing or broken, log an error and skip it.
course_overview = enrollment.course_overview
if not course_overview:
log.error(
"User %s enrolled in broken or non-existent course %s",
user.username,
enrollment.course_id
)
continue
# Filter out anything that is not in the whitelist.
if org_whitelist and course_overview.location.org not in org_whitelist:
continue
# Conversely, filter out any enrollments in the blacklist.
elif org_blacklist and course_overview.location.org in org_blacklist:
continue
# Else, include the enrollment.
else:
yield enrollment
def complete_course_mode_info(course_id, enrollment, modes=None):
"""
We would like to compute some more information from the given course modes
and the user's current enrollment
Returns the given information:
- whether to show the course upsell information
- numbers of days until they can't upsell anymore
"""
if modes is None:
modes = CourseMode.modes_for_course_dict(course_id)
mode_info = {'show_upsell': False, 'days_for_upsell': None}
# we want to know if the user is already enrolled as verified or credit and
# if verified is an option.
if CourseMode.VERIFIED in modes and enrollment.mode in CourseMode.UPSELL_TO_VERIFIED_MODES:
mode_info['show_upsell'] = True
mode_info['verified_sku'] = modes['verified'].sku
mode_info['verified_bulk_sku'] = modes['verified'].bulk_sku
# if there is an expiration date, find out how long from now it is
if modes['verified'].expiration_datetime:
today = datetime.datetime.now(UTC).date()
mode_info['days_for_upsell'] = (modes['verified'].expiration_datetime.date() - today).days
return mode_info
def is_course_blocked(request, redeemed_registration_codes, course_key):
"""
Checking if registration is blocked or not.
"""
blocked = False
for redeemed_registration in redeemed_registration_codes:
# registration codes may be generated via Bulk Purchase Scenario
# we have to check only for the invoice generated registration codes
# that their invoice is valid or not
if redeemed_registration.invoice_item:
if not redeemed_registration.invoice_item.invoice.is_valid:
blocked = True
# disabling email notifications for unpaid registration courses
Optout.objects.get_or_create(user=request.user, course_id=course_key)
log.info(
u"User %s (%s) opted out of receiving emails from course %s",
request.user.username,
request.user.email,
course_key,
)
track.views.server_track(
request,
"change-email1-settings",
{"receive_emails": "no", "course": text_type(course_key)},
page='dashboard',
)
break
return blocked
def get_verification_error_reasons_for_display(verification_error_codes):
verification_errors = []
verification_error_map = {
'photos_mismatched': _('Photos are mismatched'),
'id_image_missing_name': _('Name missing from ID photo'),
'id_image_missing': _('ID photo not provided'),
'id_invalid': _('ID is invalid'),
'user_image_not_clear': _('Learner photo is blurry'),
'name_mismatch': _('Name on ID does not match name on account'),
'user_image_missing': _('Learner photo not provided'),
'id_image_not_clear': _('ID photo is blurry'),
}
for error in verification_error_codes:
error_text = verification_error_map.get(error)
if error_text:
verification_errors.append(error_text)
return verification_errors
def reverification_info(statuses):
"""
Returns reverification-related information for *all* of user's enrollments whose
reverification status is in statuses.
Args:
statuses (list): a list of reverification statuses we want information for
example: ["must_reverify", "denied"]
Returns:
dictionary of lists: dictionary with one key per status, e.g.
dict["must_reverify"] = []
dict["must_reverify"] = [some information]
"""
reverifications = defaultdict(list)
# Sort the data by the reverification_end_date
for status in statuses:
if reverifications[status]:
reverifications[status].sort(key=lambda x: x.date)
return reverifications
def _credit_statuses(user, course_enrollments):
"""
Retrieve the status for credit courses.
A credit course is a course for which a user can purchased
college credit. The current flow is:
1. User becomes eligible for credit (submits verifications, passes the course, etc.)
2. User purchases credit from a particular credit provider.
3. User requests credit from the provider, usually creating an account on the provider's site.
4. The credit provider notifies us whether the user's request for credit has been accepted or rejected.
The dashboard is responsible for communicating the user's state in this flow.
Arguments:
user (User): The currently logged-in user.
course_enrollments (list[CourseEnrollment]): List of enrollments for the
user.
Returns: dict
The returned dictionary has keys that are `CourseKey`s and values that
are dictionaries with:
* eligible (bool): True if the user is eligible for credit in this course.
* deadline (datetime): The deadline for purchasing and requesting credit for this course.
* purchased (bool): Whether the user has purchased credit for this course.
* provider_name (string): The display name of the credit provider.
* provider_status_url (string): A URL the user can visit to check on their credit request status.
* request_status (string): Either "pending", "approved", or "rejected"
* error (bool): If true, an unexpected error occurred when retrieving the credit status,
so the user should contact the support team.
Example:
>>> _credit_statuses(user, course_enrollments)
{
CourseKey.from_string("edX/DemoX/Demo_Course"): {
"course_key": "edX/DemoX/Demo_Course",
"eligible": True,
"deadline": 2015-11-23 00:00:00 UTC,
"purchased": True,
"provider_name": "Hogwarts",
"provider_status_url": "http://example.com/status",
"request_status": "pending",
"error": False
}
}
"""
from openedx.core.djangoapps.credit import api as credit_api
# Feature flag off
if not settings.FEATURES.get("ENABLE_CREDIT_ELIGIBILITY"):
return {}
request_status_by_course = {
request["course_key"]: request["status"]
for request in credit_api.get_credit_requests_for_user(user.username)
}
credit_enrollments = {
enrollment.course_id: enrollment
for enrollment in course_enrollments
if enrollment.mode == "credit"
}
# When a user purchases credit in a course, the user's enrollment
# mode is set to "credit" and an enrollment attribute is set
# with the ID of the credit provider. We retrieve *all* such attributes
# here to minimize the number of database queries.
purchased_credit_providers = {
attribute.enrollment.course_id: attribute.value
for attribute in CourseEnrollmentAttribute.objects.filter(
namespace="credit",
name="provider_id",
enrollment__in=credit_enrollments.values()
).select_related("enrollment")
}
provider_info_by_id = {
provider["id"]: provider
for provider in credit_api.get_credit_providers()
}
statuses = {}
for eligibility in credit_api.get_eligibilities_for_user(user.username):
course_key = CourseKey.from_string(text_type(eligibility["course_key"]))
providers_names = get_credit_provider_display_names(course_key)
status = {
"course_key": text_type(course_key),
"eligible": True,
"deadline": eligibility["deadline"],
"purchased": course_key in credit_enrollments,
"provider_name": make_providers_strings(providers_names),
"provider_status_url": None,
"provider_id": None,
"request_status": request_status_by_course.get(course_key),
"error": False,
}
# If the user has purchased credit, then include information about the credit
# provider from which the user purchased credit.
# We retrieve the provider's ID from the an "enrollment attribute" set on the user's
# enrollment when the user's order for credit is fulfilled by the E-Commerce service.
if status["purchased"]:
provider_id = purchased_credit_providers.get(course_key)
if provider_id is None:
status["error"] = True
log.error(
u"Could not find credit provider associated with credit enrollment "
u"for user %s in course %s. The user will not be able to see his or her "
u"credit request status on the student dashboard. This attribute should "
u"have been set when the user purchased credit in the course.",
user.id, course_key
)
else:
provider_info = provider_info_by_id.get(provider_id, {})
status["provider_name"] = provider_info.get("display_name")
status["provider_status_url"] = provider_info.get("status_url")
status["provider_id"] = provider_id
statuses[course_key] = status
return statuses
def _get_urls_for_resume_buttons(user, enrollments):
'''
Checks whether a user has made progress in any of a list of enrollments.
'''
resume_button_urls = []
for enrollment in enrollments:
try:
block_key = get_key_to_last_completed_course_block(user, enrollment.course_id)
url_to_block = reverse(
'jump_to',
kwargs={'course_id': enrollment.course_id, 'location': block_key}
)
except UnavailableCompletionData:
url_to_block = ''
resume_button_urls.append(url_to_block)
return resume_button_urls
@login_required
@ensure_csrf_cookie
def student_dashboard(request):
"""
Provides the LMS dashboard view
TODO: This is lms specific and does not belong in common code.
Arguments:
request: The request object.
Returns:
The dashboard response.
"""
user = request.user
if not UserProfile.objects.filter(user=user).exists():
return redirect(reverse('account_settings'))
platform_name = configuration_helpers.get_value("platform_name", settings.PLATFORM_NAME)
enable_verified_certificates = configuration_helpers.get_value(
'ENABLE_VERIFIED_CERTIFICATES',
settings.FEATURES.get('ENABLE_VERIFIED_CERTIFICATES')
)
display_course_modes_on_dashboard = configuration_helpers.get_value(
'DISPLAY_COURSE_MODES_ON_DASHBOARD',
settings.FEATURES.get('DISPLAY_COURSE_MODES_ON_DASHBOARD', True)
)
activation_email_support_link = configuration_helpers.get_value(
'ACTIVATION_EMAIL_SUPPORT_LINK', settings.ACTIVATION_EMAIL_SUPPORT_LINK
) or settings.SUPPORT_SITE_LINK
# Get the org whitelist or the org blacklist for the current site
site_org_whitelist, site_org_blacklist = get_org_black_and_whitelist_for_site()
course_enrollments = list(get_course_enrollments(user, site_org_whitelist, site_org_blacklist))
# Get the entitlements for the user and a mapping to all available sessions for that entitlement
# If an entitlement has no available sessions, pass through a mock course overview object
course_entitlements = list(CourseEntitlement.get_active_entitlements_for_user(user))
course_entitlement_available_sessions = {}
unfulfilled_entitlement_pseudo_sessions = {}
for course_entitlement in course_entitlements:
course_entitlement.update_expired_at()
available_sessions = get_visible_sessions_for_entitlement(course_entitlement)
course_entitlement_available_sessions[str(course_entitlement.uuid)] = available_sessions
if not course_entitlement.enrollment_course_run:
# Unfulfilled entitlements need a mock session for metadata
pseudo_session = get_pseudo_session_for_entitlement(course_entitlement)
unfulfilled_entitlement_pseudo_sessions[str(course_entitlement.uuid)] = pseudo_session
# Record how many courses there are so that we can get a better
# understanding of usage patterns on prod.
monitoring_utils.accumulate('num_courses', len(course_enrollments))
# Sort the enrollment pairs by the enrollment date
course_enrollments.sort(key=lambda x: x.created, reverse=True)
# Retrieve the course modes for each course
enrolled_course_ids = [enrollment.course_id for enrollment in course_enrollments]
__, unexpired_course_modes = CourseMode.all_and_unexpired_modes_for_courses(enrolled_course_ids)
course_modes_by_course = {
course_id: {
mode.slug: mode
for mode in modes
}
for course_id, modes in iteritems(unexpired_course_modes)
}
# Check to see if the student has recently enrolled in a course.
# If so, display a notification message confirming the enrollment.
enrollment_message = _create_recent_enrollment_message(
course_enrollments, course_modes_by_course
)
course_optouts = Optout.objects.filter(user=user).values_list('course_id', flat=True)
sidebar_account_activation_message = ''
banner_account_activation_message = ''
display_account_activation_message_on_sidebar = configuration_helpers.get_value(
'DISPLAY_ACCOUNT_ACTIVATION_MESSAGE_ON_SIDEBAR',
settings.FEATURES.get('DISPLAY_ACCOUNT_ACTIVATION_MESSAGE_ON_SIDEBAR', False)
)
# Display activation message in sidebar if DISPLAY_ACCOUNT_ACTIVATION_MESSAGE_ON_SIDEBAR
# flag is active. Otherwise display existing message at the top.
if display_account_activation_message_on_sidebar and not user.is_active:
sidebar_account_activation_message = render_to_string(
'registration/account_activation_sidebar_notice.html',
{
'email': user.email,
'platform_name': platform_name,
'activation_email_support_link': activation_email_support_link
}
)
elif not user.is_active:
banner_account_activation_message = render_to_string(
'registration/activate_account_notice.html',
{'email': user.email}
)
enterprise_message = get_dashboard_consent_notification(request, user, course_enrollments)
# Disable lookup of Enterprise consent_required_course due to ENT-727
# Will re-enable after fixing WL-1315
consent_required_courses = set()
enterprise_customer_name = None
# Account activation message
account_activation_messages = [
message for message in messages.get_messages(request) if 'account-activation' in message.tags
]
# Global staff can see what courses encountered an error on their dashboard
staff_access = False
errored_courses = {}
if has_access(user, 'staff', 'global'):
# Show any courses that encountered an error on load
staff_access = True
errored_courses = modulestore().get_errored_courses()
show_courseware_links_for = frozenset(
enrollment.course_id for enrollment in course_enrollments
if has_access(request.user, 'load', enrollment.course_overview)
)
# Find programs associated with course runs being displayed. This information
# is passed in the template context to allow rendering of program-related
# information on the dashboard.
meter = ProgramProgressMeter(request.site, user, enrollments=course_enrollments)
ecommerce_service = EcommerceService()
inverted_programs = meter.invert_programs()
urls, programs_data = {}, {}
bundles_on_dashboard_flag = WaffleFlag(WaffleFlagNamespace(name=u'student.experiments'), u'bundles_on_dashboard')
# TODO: Delete this code and the relevant HTML code after testing LEARNER-3072 is complete
if bundles_on_dashboard_flag.is_enabled() and inverted_programs and inverted_programs.items():
if len(course_enrollments) < 4:
for program in inverted_programs.values():
try:
program_uuid = program[0]['uuid']
program_data = get_programs(request.site, uuid=program_uuid)
program_data = ProgramDataExtender(program_data, request.user).extend()
skus = program_data.get('skus')
checkout_page_url = ecommerce_service.get_checkout_page_url(*skus)
program_data['completeProgramURL'] = checkout_page_url + '&bundle=' + program_data.get('uuid')
programs_data[program_uuid] = program_data
except: # pylint: disable=bare-except
pass
# Construct a dictionary of course mode information
# used to render the course list. We re-use the course modes dict
# we loaded earlier to avoid hitting the database.
course_mode_info = {
enrollment.course_id: complete_course_mode_info(
enrollment.course_id, enrollment,
modes=course_modes_by_course[enrollment.course_id]
)
for enrollment in course_enrollments
}
# Determine the per-course verification status
# This is a dictionary in which the keys are course locators
# and the values are one of:
#
# VERIFY_STATUS_NEED_TO_VERIFY
# VERIFY_STATUS_SUBMITTED
# VERIFY_STATUS_APPROVED
# VERIFY_STATUS_MISSED_DEADLINE
#
# Each of which correspond to a particular message to display
# next to the course on the dashboard.
#
# If a course is not included in this dictionary,
# there is no verification messaging to display.
verify_status_by_course = check_verify_status_by_course(user, course_enrollments)
cert_statuses = {
enrollment.course_id: cert_info(request.user, enrollment.course_overview)
for enrollment in course_enrollments
}
# only show email settings for Mongo course and when bulk email is turned on
show_email_settings_for = frozenset(
enrollment.course_id for enrollment in course_enrollments if (
BulkEmailFlag.feature_enabled(enrollment.course_id)
)
)
# Verification Attempts
# Used to generate the "you must reverify for course x" banner
verification_status, verification_error_codes = SoftwareSecurePhotoVerification.user_status(user)
verification_errors = get_verification_error_reasons_for_display(verification_error_codes)
# Gets data for midcourse reverifications, if any are necessary or have failed
statuses = ["approved", "denied", "pending", "must_reverify"]
reverifications = reverification_info(statuses)
block_courses = frozenset(
enrollment.course_id for enrollment in course_enrollments
if is_course_blocked(
request,
CourseRegistrationCode.objects.filter(
course_id=enrollment.course_id,
registrationcoderedemption__redeemed_by=request.user
),
enrollment.course_id
)
)
enrolled_courses_either_paid = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.is_paid_course()
)
# If there are *any* denied reverifications that have not been toggled off,
# we'll display the banner
denied_banner = any(item.display for item in reverifications["denied"])
# Populate the Order History for the side-bar.
order_history_list = order_history(
user,
course_org_filter=site_org_whitelist,
org_filter_out_set=site_org_blacklist
)
# get list of courses having pre-requisites yet to be completed
courses_having_prerequisites = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.course_overview.pre_requisite_courses
)
courses_requirements_not_met = get_pre_requisite_courses_not_completed(user, courses_having_prerequisites)
if 'notlive' in request.GET:
redirect_message = _("The course you are looking for does not start until {date}.").format(
date=request.GET['notlive']
)
elif 'course_closed' in request.GET:
redirect_message = _("The course you are looking for is closed for enrollment as of {date}.").format(
date=request.GET['course_closed']
)
else:
redirect_message = ''
valid_verification_statuses = ['approved', 'must_reverify', 'pending', 'expired']
display_sidebar_on_dashboard = len(order_history_list) or verification_status in valid_verification_statuses
# Filter out any course enrollment course cards that are associated with fulfilled entitlements
for entitlement in [e for e in course_entitlements if e.enrollment_course_run is not None]:
course_enrollments = [
enr for enr in course_enrollments if entitlement.enrollment_course_run.course_id != enr.course_id
]
context = {
'urls': urls,
'programs_data': programs_data,
'enterprise_message': enterprise_message,
'consent_required_courses': consent_required_courses,
'enterprise_customer_name': enterprise_customer_name,
'enrollment_message': enrollment_message,
'redirect_message': redirect_message,
'account_activation_messages': account_activation_messages,
'course_enrollments': course_enrollments,
'course_entitlements': course_entitlements,
'course_entitlement_available_sessions': course_entitlement_available_sessions,
'unfulfilled_entitlement_pseudo_sessions': unfulfilled_entitlement_pseudo_sessions,
'course_optouts': course_optouts,
'banner_account_activation_message': banner_account_activation_message,
'sidebar_account_activation_message': sidebar_account_activation_message,
'staff_access': staff_access,
'errored_courses': errored_courses,
'show_courseware_links_for': show_courseware_links_for,
'all_course_modes': course_mode_info,
'cert_statuses': cert_statuses,
'credit_statuses': _credit_statuses(user, course_enrollments),
'show_email_settings_for': show_email_settings_for,
'reverifications': reverifications,
'verification_status': verification_status,
'verification_status_by_course': verify_status_by_course,
'verification_errors': verification_errors,
'block_courses': block_courses,
'denied_banner': denied_banner,
'billing_email': settings.PAYMENT_SUPPORT_EMAIL,
'user': user,
'logout_url': reverse('logout'),
'platform_name': platform_name,
'enrolled_courses_either_paid': enrolled_courses_either_paid,
'provider_states': [],
'order_history_list': order_history_list,
'courses_requirements_not_met': courses_requirements_not_met,
'nav_hidden': True,
'inverted_programs': inverted_programs,
'show_program_listing': ProgramsApiConfig.is_enabled(),
'show_dashboard_tabs': True,
'disable_courseware_js': True,
'display_course_modes_on_dashboard': enable_verified_certificates and display_course_modes_on_dashboard,
'display_sidebar_on_dashboard': display_sidebar_on_dashboard,
}
if ecommerce_service.is_enabled(request.user):
context.update({
'use_ecommerce_payment_flow': True,
'ecommerce_payment_page': ecommerce_service.payment_page_url(),
})
# Gather urls for course card resume buttons.
resume_button_urls = _get_urls_for_resume_buttons(user, course_enrollments)
# There must be enough urls for dashboard.html. Template creates course
# cards for "enrollments + entitlements".
resume_button_urls += ['' for entitlement in course_entitlements]
context.update({
'resume_button_urls': resume_button_urls
})
response = render_to_response('dashboard.html', context)
set_user_info_cookie(response, request)
return response
| agpl-3.0 | -8,119,874,705,585,200,000 | 40.85101 | 117 | 0.672962 | false |
xorpaul/shinken | test/test_properties_defaults.py | 1 | 26460 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012:
# Hartmut Goebel <[email protected]>
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
"""
Test default values for item types.
"""
import unittest
import __import_shinken
from shinken.property import UnusedProp, none_object
import shinken.daemon
from shinken_test import *
class PropertiesTester(object):
def test_unused_properties(self):
item = self.item # shortcut
for name in self.unused_props:
self.assertIn(name, item.properties,
msg='property %r not found in %s' % (name, self.item.my_type))
self.assertIsInstance(item.properties[name], UnusedProp)
def test_properties_without_default(self):
item = self.item # shortcut
for name in self.without_default:
self.assertIn(name, item.properties,
msg='property %r not found in %s' % (name, self.item.my_type))
self.assertIs(item.properties[name].default, none_object,
msg='property %r is not `none_object` but %r' % (name, item.properties[name]))
self.assertTrue(item.properties[name].required)
def test_default_values(self):
item = self.item # shortcut
for name, value in self.properties.iteritems():
self.assertIn(name, item.properties,
msg='property %r not found in %s' % (name, self.item.my_type))
if hasattr(item.properties[name], 'default'):
if item.properties[name].default != value:
print "%s, %s: %s, %s" % (name, value, item.properties[name].default, value)
self.assertEqual(item.properties[name].default, value)
def test_all_props_are_tested(self):
item = self.item # shortcut
prop_names = set(list(self.properties.keys()) + self.unused_props + self.without_default)
for name in item.properties:
if name.startswith('$') and name.endswith('$'):
continue
self.assertIn(name, prop_names,
msg='unknown property %r found' % name)
class TestConfig(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = [
'log_file', 'object_cache_file', 'precached_object_file',
'temp_file', 'status_file', 'status_update_interval',
'command_check_interval', 'external_command_buffer_slots',
'check_for_updates', 'bare_update_checks',
'retain_state_information', 'use_retained_program_state',
'use_retained_scheduling_info',
'retained_host_attribute_mask',
'retained_service_attribute_mask',
'retained_process_host_attribute_mask',
'retained_process_service_attribute_mask',
'retained_contact_host_attribute_mask',
'retained_contact_service_attribute_mask', 'sleep_time',
'service_inter_check_delay_method',
'service_interleave_factor', 'max_concurrent_checks',
'check_result_reaper_frequency',
'max_check_result_reaper_time', 'check_result_path',
'max_check_result_file_age', 'host_inter_check_delay_method',
'free_child_process_memory', 'child_processes_fork_twice',
'admin_email', 'admin_pager', 'event_broker_options',
'debug_file', 'debug_level', 'debug_verbosity',
'max_debug_file_size']
without_default = []
properties = dict([
('prefix', '/usr/local/shinken/'),
('workdir', '/var/run/shinken/'),
('config_base_dir', ''),
('modulesdir', '/var/lib/shinken/modules'),
('use_local_log', '1'),
('log_level', 'WARNING'),
('local_log', 'arbiterd.log'),
('resource_file', '/tmp/resources.txt'),
('shinken_user', shinken.daemon.get_cur_user()),
('shinken_group', shinken.daemon.get_cur_group()),
('enable_notifications', '1'),
('execute_service_checks', '1'),
('accept_passive_service_checks', '1'),
('execute_host_checks', '1'),
('accept_passive_host_checks', '1'),
('enable_event_handlers', '1'),
('log_rotation_method', 'd'),
('log_archive_path', '/usr/local/shinken/var/archives'),
('check_external_commands', '1'),
('command_file', ''),
('lock_file', 'arbiterd.pid'),
('state_retention_file', ''),
('retention_update_interval', '60'),
('use_syslog', '0'),
('log_notifications', '1'),
('log_service_retries', '1'),
('log_host_retries', '1'),
('log_event_handlers', '1'),
('log_initial_states', '1'),
('log_external_commands', '1'),
('log_passive_checks', '1'),
('global_host_event_handler', ''),
('global_service_event_handler', ''),
('max_service_check_spread', '30'),
('max_host_check_spread', '30'),
('interval_length', '60'),
('auto_reschedule_checks', '1'),
('auto_rescheduling_interval', '1'),
('auto_rescheduling_window', '180'),
('use_aggressive_host_checking', '0'),
('translate_passive_host_checks', '1'),
('passive_host_checks_are_soft', '1'),
('enable_predictive_host_dependency_checks', '1'),
('enable_predictive_service_dependency_checks', '1'),
('cached_host_check_horizon', '0'),
('cached_service_check_horizon', '0'),
('use_large_installation_tweaks', '0'),
('enable_environment_macros', '1'),
('enable_flap_detection', '1'),
('low_service_flap_threshold', '20'),
('high_service_flap_threshold', '30'),
('low_host_flap_threshold', '20'),
('high_host_flap_threshold', '30'),
('soft_state_dependencies', '0'),
('service_check_timeout', '60'),
('host_check_timeout', '30'),
('event_handler_timeout', '30'),
('notification_timeout', '30'),
('ocsp_timeout', '15'),
('ochp_timeout', '15'),
('perfdata_timeout', '5'),
('obsess_over_services', '0'),
('ocsp_command', ''),
('obsess_over_hosts', '0'),
('ochp_command', ''),
('process_performance_data', '1'),
('host_perfdata_command', ''),
('service_perfdata_command', ''),
('host_perfdata_file', ''),
('service_perfdata_file', ''),
('host_perfdata_file_template', '/tmp/host.perf'),
('service_perfdata_file_template', '/tmp/host.perf'),
('host_perfdata_file_mode', 'a'),
('service_perfdata_file_mode', 'a'),
('host_perfdata_file_processing_interval', '15'),
('service_perfdata_file_processing_interval', '15'),
('host_perfdata_file_processing_command', ''),
('service_perfdata_file_processing_command', None),
('check_for_orphaned_services', '1'),
('check_for_orphaned_hosts', '1'),
('check_service_freshness', '1'),
('service_freshness_check_interval', '60'),
('check_host_freshness', '1'),
('host_freshness_check_interval', '60'),
('additional_freshness_latency', '15'),
('enable_embedded_perl', '1'),
('use_embedded_perl_implicitly', '0'),
('date_format', None),
('use_timezone', ''),
('illegal_object_name_chars', '`~!$%^&*"|\'<>?,()='),
('illegal_macro_output_chars', ''),
('use_regexp_matching', '0'),
('use_true_regexp_matching', None),
('broker_module', ''),
('modified_attributes', 0L),
('daemon_enabled', '1'),
# Shinken specific
('idontcareaboutsecurity', '0'),
('flap_history', '20'),
('max_plugins_output_length', '8192'),
('no_event_handlers_during_downtimes', '0'),
('cleaning_queues_interval', '900'),
('disable_old_nagios_parameters_whining', '0'),
('enable_problem_impacts_states_change', '0'),
('resource_macros_names', []),
# SSL part
('use_ssl', '0'),
('server_key', 'etc/certs/server.key'),
('ca_cert', 'etc/certs/ca.pem'),
('server_cert', 'etc/certs/server.cert'),
('hard_ssl_name_check', '0'),
('human_timestamp_log', '0'),
# Discovery part
('strip_idname_fqdn', '1'),
('runners_timeout', '3600'),
('pack_distribution_file', 'pack_distribution.dat'),
# WebUI part
('webui_lock_file', 'webui.pid'),
('webui_port', '8080'),
('webui_host', '0.0.0.0'),
('use_multiprocesses_serializer', '0'),
('daemon_thread_pool_size', '8'),
('enable_environment_macros', '1'),
('timeout_exit_status', '2'),
])
def setUp(self):
from shinken.objects.config import Config
self.item = Config()
class TestCommand(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['command_name', 'command_line']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('poller_tag', 'None'),
('reactionner_tag', 'None'),
('module_type', None),
('timeout', '-1'),
('enable_environment_macros', 0),
])
def setUp(self):
from shinken.objects.command import Command
self.item = Command()
class TestContactgroup(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['members', 'contactgroup_name', 'alias']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('unknown_members', []),
('id', 0),
])
def setUp(self):
from shinken.objects.contactgroup import Contactgroup
self.item = Contactgroup()
class TestContact(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = [
'contact_name',
'host_notification_period', 'service_notification_period',
'host_notification_options', 'service_notification_options',
'host_notification_commands', 'service_notification_commands'
]
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('alias', 'none'),
('contactgroups', ''),
('host_notifications_enabled', '1'),
('service_notifications_enabled', '1'),
('min_business_impact', '0'),
('email', 'none'),
('pager', 'none'),
('address1', 'none'),
('address2', 'none'),
('address3', 'none'),
('address4', 'none'),
('address5', 'none'),
('address6', 'none'),
('can_submit_commands', '0'),
('is_admin', '0'),
('retain_status_information', '1'),
('notificationways', ''),
('password', 'NOPASSWORDSET'),
])
def setUp(self):
from shinken.objects.contact import Contact
self.item = Contact()
class TestDiscoveryrule(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['discoveryrule_name']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('creation_type', 'service'),
('discoveryrule_order', '0'),
])
def setUp(self):
from shinken.objects.discoveryrule import Discoveryrule
self.item = Discoveryrule()
class TestDiscoveryrun(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['discoveryrun_name', 'discoveryrun_command']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
])
def setUp(self):
from shinken.objects.discoveryrun import Discoveryrun
self.item = Discoveryrun()
class TestEscalation(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['escalation_name', 'first_notification', 'last_notification', 'first_notification_time', 'last_notification_time', 'contacts', 'contact_groups']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('notification_interval', '-1'),
('escalation_period', ''),
('escalation_options', 'd,u,r,w,c'),
])
def setUp(self):
from shinken.objects.escalation import Escalation
self.item = Escalation()
class TestHostdependency(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['dependent_host_name', 'host_name']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('dependent_hostgroup_name', ''),
('hostgroup_name', ''),
('inherits_parent', '0'),
('execution_failure_criteria', 'n'),
('notification_failure_criteria', 'n'),
('dependency_period', ''),
])
def setUp(self):
from shinken.objects.hostdependency import Hostdependency
self.item = Hostdependency()
class TestHostescalation(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = [
'host_name', 'hostgroup_name',
'first_notification', 'last_notification',
'contacts', 'contact_groups'
]
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('notification_interval', '30'),
('escalation_period', ''),
('escalation_options', 'd,u,r,w,c'),
])
def setUp(self):
from shinken.objects.hostescalation import Hostescalation
self.item = Hostescalation()
class TestHostextinfo(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['host_name']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('notes', ''),
('notes_url', ''),
('icon_image', ''),
('icon_image_alt', ''),
('vrml_image', ''),
('statusmap_image', ''),
('2d_coords', ''),
('3d_coords', ''),
])
def setUp(self):
from shinken.objects.hostextinfo import HostExtInfo
self.item = HostExtInfo()
class TestHostgroup(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['members', 'hostgroup_name', 'alias']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('unknown_members', []),
('id', 0),
('notes', ''),
('notes_url', ''),
('action_url', ''),
('realm', ''),
])
def setUp(self):
from shinken.objects.hostgroup import Hostgroup
self.item = Hostgroup()
class TestHost(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = [
'host_name', 'alias', 'address',
'max_check_attempts', 'check_period', 'notification_period']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('display_name', ''),
('parents', ''),
('hostgroups', ''),
('check_command', '_internal_host_up'),
('initial_state', 'u'),
('check_interval', '0'),
('retry_interval', '0'),
('active_checks_enabled', '1'),
('passive_checks_enabled', '1'),
('obsess_over_host', '0'),
('check_freshness', '0'),
('freshness_threshold', '0'),
('event_handler', ''),
('event_handler_enabled', '0'),
('low_flap_threshold', '25'),
('high_flap_threshold', '50'),
('flap_detection_enabled', '1'),
('flap_detection_options', 'o,d,u'),
('process_perf_data', '1'),
('retain_status_information', '1'),
('retain_nonstatus_information', '1'),
('contacts', ''),
('contact_groups', ''),
('notification_interval', '60'),
('first_notification_delay', '0'),
('notification_options', 'd,u,r,f'),
('notifications_enabled', '1'),
('stalking_options', ''),
('notes', ''),
('notes_url', ''),
('action_url', ''),
('icon_image', ''),
('icon_image_alt', ''),
('icon_set', ''),
('vrml_image', ''),
('statusmap_image', ''),
('2d_coords', ''),
('3d_coords', ''),
('failure_prediction_enabled', '0'),
('realm', None),
('poller_tag', 'None'),
('reactionner_tag', 'None'),
('resultmodulations', ''),
('business_impact_modulations', ''),
('escalations', ''),
('maintenance_period', ''),
('business_impact', '2'),
('trigger', ''),
('trigger_name', ''),
('time_to_orphanage', '300'),
('trending_policies', ''),
('checkmodulations', ''),
('macromodulations', ''),
('custom_views', ''),
('service_overrides', ''),
('business_rule_output_template', ''),
('business_rule_smart_notifications', '0'),
('business_rule_downtime_as_ack', '0'),
('labels', ''),
])
def setUp(self):
from shinken.objects.host import Host
self.item = Host()
class TestModule(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['module_name', 'module_type']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('modules', ''),
])
def setUp(self):
from shinken.objects.module import Module
self.item = Module()
class TestNotificationway(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = [
'notificationway_name',
'host_notification_period', 'service_notification_period',
'host_notification_options', 'service_notification_options',
'host_notification_commands', 'service_notification_commands']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('host_notifications_enabled', '1'),
('service_notifications_enabled', '1'),
('min_business_impact', '0'),
])
def setUp(self):
from shinken.objects.notificationway import NotificationWay
self.item = NotificationWay()
class TestPack(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['pack_name']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
])
def setUp(self):
from shinken.objects.pack import Pack
self.item = Pack()
class TestRealm(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['members', 'realm_name']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('unknown_members', []),
('id', 0),
('realm_members', ''),
('higher_realms', ''),
('default', '0'),
('broker_complete_links', '0'),
])
def setUp(self):
from shinken.objects.realm import Realm
self.item = Realm()
class TestResultmodulation(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['resultmodulation_name']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('exit_codes_match', ''),
('exit_code_modulation', None),
('modulation_period', None),
])
def setUp(self):
from shinken.objects.resultmodulation import Resultmodulation
self.item = Resultmodulation()
class TestServicedependency(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['dependent_host_name', 'dependent_service_description', 'host_name', 'service_description']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('dependent_hostgroup_name', ''),
('hostgroup_name', ''),
('inherits_parent', '0'),
('execution_failure_criteria', 'n'),
('notification_failure_criteria', 'n'),
('dependency_period', ''),
('explode_hostgroup', '0'),
])
def setUp(self):
from shinken.objects.servicedependency import Servicedependency
self.item = Servicedependency()
class TestServiceescalation(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = [
'host_name', 'hostgroup_name',
'service_description',
'first_notification', 'last_notification',
'contacts', 'contact_groups']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('notification_interval', '30'),
('escalation_period', ''),
('escalation_options', 'd,u,r,w,c'),
])
def setUp(self):
from shinken.objects.serviceescalation import Serviceescalation
self.item = Serviceescalation()
class TestServiceextinfo(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['host_name', 'service_description']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('notes', ''),
('notes_url', ''),
('icon_image', ''),
('icon_image_alt', ''),
])
def setUp(self):
from shinken.objects.serviceextinfo import ServiceExtInfo
self.item = ServiceExtInfo()
class TestServicegroup(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['members', 'servicegroup_name', 'alias']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('unknown_members', []),
('id', 0),
('notes', ''),
('notes_url', ''),
('action_url', ''),
])
def setUp(self):
from shinken.objects.servicegroup import Servicegroup
self.item = Servicegroup()
class TestService(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = [
'host_name', 'service_description',
'check_command', 'max_check_attempts', 'check_interval',
'retry_interval', 'check_period', 'notification_period']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('hostgroup_name', ''),
('display_name', ''),
('servicegroups', ''),
('is_volatile', '0'),
('initial_state', 'o'),
('active_checks_enabled', '1'),
('passive_checks_enabled', '1'),
('obsess_over_service', '0'),
('check_freshness', '0'),
('freshness_threshold', '0'),
('event_handler', ''),
('event_handler_enabled', '0'),
('low_flap_threshold', '-1'),
('high_flap_threshold', '-1'),
('flap_detection_enabled', '1'),
('flap_detection_options', 'o,w,c,u'),
('process_perf_data', '1'),
('retain_status_information', '1'),
('retain_nonstatus_information', '1'),
('notification_interval', '60'),
('first_notification_delay', '0'),
('notification_options', 'w,u,c,r,f,s'),
('notifications_enabled', '1'),
('contacts', ''),
('contact_groups', ''),
('stalking_options', ''),
('notes', ''),
('notes_url', ''),
('action_url', ''),
('icon_image', ''),
('icon_image_alt', ''),
('icon_set', ''),
('failure_prediction_enabled', '0'),
('parallelize_check', '1'),
('poller_tag', 'None'),
('reactionner_tag', 'None'),
('resultmodulations', ''),
('business_impact_modulations', ''),
('escalations', ''),
('maintenance_period', ''),
('duplicate_foreach', ''),
('default_value', ''),
('business_impact', '2'),
('trigger', ''),
('trigger_name', ''),
('time_to_orphanage', '300'),
('trending_policies', ''),
('checkmodulations', ''),
('macromodulations', ''),
('aggregation', ''),
('service_dependencies', ''),
('custom_views', ''),
('merge_host_contacts', '0'),
('business_rule_output_template', ''),
('business_rule_smart_notifications', '0'),
('business_rule_downtime_as_ack', '0'),
('labels', ''),
])
def setUp(self):
from shinken.objects.service import Service
self.item = Service()
class TestTimeperiod(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['timeperiod_name']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('alias', ''),
('use', ''),
('register', '1'),
('dateranges', []),
('exclude', []),
('is_active', '0'),
])
def setUp(self):
from shinken.objects.timeperiod import Timeperiod
self.item = Timeperiod()
class TestTrigger(PropertiesTester, ShinkenTest, unittest.TestCase):
unused_props = []
without_default = ['trigger_name']
properties = dict([
('imported_from', 'unknown'),
('use', ''),
('name', ''),
('code_src', ''),
])
def setUp(self):
from shinken.objects.trigger import Trigger
self.item = Trigger()
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | 3,819,793,554,402,088,400 | 29.767442 | 167 | 0.543197 | false |
chryswoods/SireTests | unittests/SireMove/rigidbodymd.py | 1 | 1966 |
from Sire.Mol import *
from Sire.IO import *
from Sire.Vol import *
from Sire.FF import *
from Sire.MM import *
from Sire.CAS import *
from Sire.Maths import *
from Sire.Qt import *
from Sire.Units import *
from Sire.System import *
from Sire.Move import *
from Sire.Stream import *
import sys
mols = PDB().read("test/io/water.pdb")
print("Read in %d molecules!" % mols.nMolecules())
mol = mols.moleculeAt(0).molecule()
mol = mol.edit().atom( AtomName("O00") ) \
.setProperty("LJ", LJParameter(3.15363*angstrom, \
0.15500*kcal_per_mol)).molecule() \
.atom( AtomName("H01") ) \
.setProperty("charge", 0.520 * mod_electron).molecule() \
.atom( AtomName("H02") ) \
.setProperty("charge", 0.520 * mod_electron).molecule() \
.atom( AtomName("M03") ) \
.setProperty("charge", -1.04 * mod_electron).molecule() \
.commit()
charges = mol.property("charge")
ljs = mol.property("LJ")
cljff = InterCLJFF("water-water")
cljff.add(mol)
solvent = MoleculeGroup("solvent")
solvent.add(mol)
for i in range(1,7):
mol = mols.moleculeAt(i).molecule()
mol = mol.edit().rename("T4P") \
.setProperty("charge", charges) \
.setProperty("LJ", ljs) \
.commit()
solvent.add(mol)
cljff.add(mol)
system = System()
system.add(solvent)
system.add(cljff)
print(system.energy())
rbmove = MolecularDynamics( solvent, DLMRigidBody(), 1*femtosecond )
#rbmove.setEnergyComponent( cljff.components().coulomb() )
PDB().write(system.molecules(), "test0000.pdb")
for i in range(1,1000):
rbmove.move(system, 10)
print(i, system.energy())
print(rbmove.kineticEnergy(), (system.energy() + rbmove.kineticEnergy()))
PDB().write(system.molecules(), "test%0004d.pdb" % i)
| gpl-2.0 | 7,023,810,924,964,318,000 | 25.931507 | 86 | 0.581384 | false |
houssemFat/bloodOn | bloodon/accounts/social/providers/google/views.py | 1 | 1089 | import requests
from bloodon.accounts.social.providers.oauth2.views import (OAuth2Adapter,
OAuth2LoginView,
OAuth2CallbackView)
from .provider import GoogleProvider
class GoogleOAuth2Adapter(OAuth2Adapter):
provider_id = GoogleProvider.id
access_token_url = 'https://accounts.google.com/o/oauth2/token'
authorize_url = 'https://accounts.google.com/o/oauth2/auth'
profile_url = 'https://www.googleapis.com/oauth2/v1/userinfo'
def complete_login(self, request, token, **kwargs):
resp = requests.get(self.profile_url,
params={'access_token': token.token,
'alt': 'json'})
extra_data = resp.json()
provider = self.get_provider()
login = provider.social_login_from_response(request, extra_data)
return login
oauth2_login = OAuth2LoginView.adapter_view(GoogleOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(GoogleOAuth2Adapter)
| mit | -3,685,332,758,195,466,000 | 39.333333 | 79 | 0.613407 | false |
dtimes6/JustForFun | hihi.sleekxmpp.aiml.py | 1 | 2851 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os.path
import sys
import logging
import getpass
import aiml
from optparse import OptionParser
import sleekxmpp
if sys.version_info < (3, 0):
from sleekxmpp.util.misc_ops import setdefaultencoding
setdefaultencoding('utf8')
else:
raw_input = input
class HiHiBot(sleekxmpp.ClientXMPP):
def __init__(self, jid, password):
self.aiml = aiml.Kernel()
if os.path.isfile("standard.brn"):
self.aiml.bootstrap(brainFile="standard.brn")
else:
self.aiml.bootstrap(learnFiles="std-startup.xml", commands="load aiml b")
self.aiml.saveBrain("standard.brn")
self.aiml.setBotPredicate("name", "海洋")
sleekxmpp.ClientXMPP.__init__(self, jid, password)
self.add_event_handler("session_start", self.start)
self.add_event_handler("message", self.message)
def start(self, event):
self.send_presence()
self.get_roster()
def message(self, msg):
if msg['type'] in ('chat', 'normal'):
result = self.aiml.respond(msg["body"], msg["from"])
if result:
msg.reply(result).send()
if __name__ == '__main__':
# Setup the command line arguments.
optp = OptionParser()
# Output verbosity options.
optp.add_option('-q', '--quiet', help='set logging to ERROR',
action='store_const', dest='loglevel',
const=logging.ERROR, default=logging.INFO)
optp.add_option('-d', '--debug', help='set logging to DEBUG',
action='store_const', dest='loglevel',
const=logging.DEBUG, default=logging.INFO)
optp.add_option('-v', '--verbose', help='set logging to COMM',
action='store_const', dest='loglevel',
const=5, default=logging.INFO)
# JID and password options.
optp.add_option("-j", "--jid", dest="jid",
help="JID to use")
optp.add_option("-p", "--password", dest="password",
help="password to use")
opts, args = optp.parse_args()
# Setup logging.
logging.basicConfig(level=opts.loglevel,
format='%(levelname)-8s %(message)s')
if opts.jid is None:
opts.jid = raw_input("Username: ")
if opts.password is None:
opts.password = getpass.getpass("Password: ")
xmpp = HiHiBot(opts.jid, opts.password)
xmpp.register_plugin('xep_0030') # Service Discovery
xmpp.register_plugin('xep_0004') # Data Forms
xmpp.register_plugin('xep_0060') # PubSub
xmpp.register_plugin('xep_0199') # XMPP Ping
# Connect to the XMPP server and start processing XMPP stanzas.
if xmpp.connect():
xmpp.process(block=True)
print("Done")
else:
print("Unable to connect.")
| mit | -3,853,410,436,045,851,000 | 31.724138 | 85 | 0.596066 | false |
SeanEstey/Bravo | app/tests/main/test_leaderboard.py | 1 | 1190 | '''app.tests.main.test_leaderboard'''
import logging, unittest, json
from flask import g
from app.tests.__init__ import *
from app import get_keys
from app.main import leaderboard
from logging import getLogger
log = getLogger(__name__)
class LeaderboardTests(unittest.TestCase):
def setUp(self):
init(self)
login_self(self)
login_client(self.client)
def tearDown(self):
logout(self.client)
def _test_get_all_ytd(self):
leaderboard.get_all_ytd('vec')
def test_get_rank(self):
leaderboard.get_rank('Deer Ridge', 'vec')
leaderboard.get_rank('Bowness', 'vec')
leaderboard.get_rank('Citadel', 'vec')
leaderboard.get_rank('Varsity', 'vec')
leaderboard.get_rank('Hawkwood', 'vec')
def _test_update_accts(self):
query = 'foo'
group = 'vec'
leaderboard.update_accts(query, group)
def _test_update_leaderboard_task(self):
from app.main import tasks
try:
tasks.update_leaderboard_accts.delay(group='vec')
except Exception as e:
log.debug('exc=%s', str(e), exc_info=True)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 6,378,706,726,770,708,000 | 27.333333 | 61 | 0.621849 | false |
SalesforceFoundation/mrbelvedereci | metaci/testresults/tests/test_importer.py | 1 | 1694 | import pytest
from metaci.testresults.importer import import_test_results, populate_limit_fields
from metaci.testresults.models import TestClass, TestMethod, TestResult
@pytest.mark.django_db
class TestImporter:
def test_import_test_results(self, data):
num_test_classes = TestClass.objects.all().count()
num_test_methods = TestMethod.objects.all().count()
num_test_results = TestResult.objects.all().count()
with open("metaci/testresults/tests/junit_output.xml", "r") as f:
results = data["buildflow"].load_junit(f)
import_test_results(data["buildflow"], results, "Apex")
assert TestClass.objects.all().count() == num_test_classes + 1
assert TestMethod.objects.all().count() == num_test_methods + 2
assert TestResult.objects.all().count() == num_test_results + 2
test_result = TestResult.objects.get(method__name="test_method1")
assert test_result.duration == 5.99
assert test_result.outcome == "Pass"
def test_populate_limit_fields(self, data):
test_result = data["testresult"]
code_unit = {
"TESTING_LIMITS: Number of SOQL queries": {"used": 10, "allowed": 100},
"TESTING_LIMITS: Number of DML rows": {"used": 20, "allowed": 100},
}
populate_limit_fields(test_result, code_unit)
worst_limit = "test_dml_rows_percent"
worst_limit_percent = 20.0
assert test_result.worst_limit == worst_limit
assert test_result.worst_limit_percent == worst_limit_percent
assert test_result.worst_limit_test == worst_limit
assert test_result.worst_limit_test_percent == worst_limit_percent
| bsd-3-clause | -6,614,269,496,293,821,000 | 41.35 | 83 | 0.656434 | false |
gary-pickens/HouseMonitor | housemonitor/outputs/xmlrpc/outputthread.py | 1 | 3101 | '''
Created on 2012-10-20
@author: Gary
'''
from housemonitor.lib.base import Base
from housemonitor.lib.constants import Constants
from pprint import pprint
from SimpleXMLRPCServer import SimpleXMLRPCServer
import pprint
import threading
import time
import os
from housemonitor.inputs.dataenvelope import DataEnvelope
class XmlRpcOutputThread( Base, threading.Thread ):
'''
'''
__host = '0.0.0.0'
__port = 9002
__current_values = None
__input_queue = None
def __init__( self, current_values, input_queue ):
'''
'''
super( XmlRpcOutputThread, self ).__init__()
threading.Thread.__init__( self )
self.__current_values = current_values
self.__input_queue = input_queue
''' Make sure and enter the appropriate entry in the logging configuration
file
'''
@property
def logger_name( self ):
''' Set the logger level. '''
return Constants.LogKeys.outputsXMLRPC
def change_dio( self, value, device, port, steps ):
try:
env = DataEnvelope( type=Constants.EnvelopeTypes.COMMAND, value=value,
device=device, port=port, steps=steps )
self.__input_queue.transmit( env, self.__input_queue.HIGH_PRIORITY )
self.logger.debug(
"send command: value = {} device = {} port = {} steps = {}".
format( value, device, port, steps ) )
except Exception as ex:
self.logger.exception( "Exception in {}".format( __name__ ) )
return value
def send_command( self, value, device, port, steps ):
try:
env = DataEnvelope( type=Constants.EnvelopeTypes.COMMAND, value=value,
device=device, port=port, steps=steps )
self.__input_queue.transmit( env, self.__input_queue.MID_PRIORITY )
self.logger.debug(
"send command: value = {} device = {} port = {} steps = {}".
format( value, device, port, steps ) )
except Exception as ex:
self.logger.exception( "Exception in {}".format( __name__ ) )
return value
def get_current_value( self, device, port ):
value = self.__current_values.get( device, port )
self.logger.debug(
"get current value: device = {} port = {} value = {}".
format( device, port, value ) )
return value
def get_current_values( self ):
self.logger.debug( 'get_current_values called' )
cv = self.__current_values.get()
self.logger.debug( 'current_values = ', pprint.pformat( cv ) )
return cv
def run( self ):
server = SimpleXMLRPCServer( ( self.__host, self.__port ), logRequests=False )
server.register_introspection_functions()
server.register_function( self.get_current_value )
server.register_function( self.get_current_values )
server.register_function( self.send_command )
server.register_function( self.change_dio )
server.serve_forever()
| mit | -2,624,293,600,049,674,000 | 34.238636 | 86 | 0.5911 | false |
florensacc/snn4hrl | regressors/latent_regressor.py | 1 | 11889 | import numpy as np
from rllab.core.serializable import Serializable
from rllab.core.parameterized import Parameterized
from rllab.misc import logger
# the regressor will be choosen to be from the same distribution as the latents
from rllab.regressors.gaussian_mlp_regressor import GaussianMLPRegressor
from rllab.regressors.categorical_mlp_regressor import CategoricalMLPRegressor # could be Categorical_oneAxis
from sandbox.snn4hrl.regressors.categorical_recurrent_regressor import CategoricalRecurrentRegressor
from sandbox.snn4hrl.regressors.bernoulli_mlp_regressor import BernoulliMLPRegressor
from sandbox.snn4hrl.regressors.bernoulli_recurrent_regressor import BernoulliRecurrentRegressor
from rllab.optimizers.first_order_optimizer import FirstOrderOptimizer
class Latent_regressor(Parameterized, Serializable):
def __init__(
self,
env_spec,
policy,
recurrent=False,
predict_all=True,
obs_regressed='all',
act_regressed='all',
use_only_sign=False,
noisify_traj_coef=0,
optimizer=None, # this defaults to LBFGS
regressor_args=None, # here goes all args straight to the regressor: hidden_sizes, TR, step_size....
):
"""
:param predict_all: this is only for the recurrent case, to use all hidden states as predictions
:param obs_regressed: list of index of the obs variables used to fit the regressor. default string 'all'
:param act_regressed: list of index of the act variables used to fit the regressor. default string 'all'
:param regressor_args:
"""
self.env_spec = env_spec
self.policy = policy
self.latent_dim = policy.latent_dim
self.recurrent = recurrent
self.predict_all = predict_all
self.use_only_sign = use_only_sign
self.noisify_traj_coef = noisify_traj_coef
self.regressor_args = regressor_args
# decide what obs variables will be regressed upon
if obs_regressed == 'all':
self.obs_regressed = list(range(env_spec.observation_space.flat_dim))
else:
self.obs_regressed = obs_regressed
# decide what action variables will be regressed upon
if act_regressed == 'all':
self.act_regressed = list(range(env_spec.action_space.flat_dim))
else:
self.act_regressed = act_regressed
# shape the input dimension of the NN for the above decisions.
self.obs_act_dim = len(self.obs_regressed) + len(self.act_regressed)
Serializable.quick_init(self, locals()) # ??
if regressor_args is None:
regressor_args = dict()
if optimizer == 'first_order':
self.optimizer = FirstOrderOptimizer(
max_epochs=10, # both of these are to match Rocky's 10
batch_size=128,
)
elif optimizer is None:
self.optimizer = None
else:
raise NotImplementedError
if policy.latent_name == 'bernoulli':
if self.recurrent:
self._regressor = BernoulliRecurrentRegressor(
input_shape=(self.obs_act_dim,),
output_dim=policy.latent_dim,
optimizer=self.optimizer,
predict_all=self.predict_all,
**regressor_args
)
else:
self._regressor = BernoulliMLPRegressor(
input_shape=(self.obs_act_dim,),
output_dim=policy.latent_dim,
optimizer=self.optimizer,
**regressor_args
)
elif policy.latent_name == 'categorical':
if self.recurrent:
self._regressor = CategoricalRecurrentRegressor( # not implemented
input_shape=(self.obs_act_dim,),
output_dim=policy.latent_dim,
optimizer=self.optimizer,
# predict_all=self.predict_all,
**regressor_args
)
else:
self._regressor = CategoricalMLPRegressor(
input_shape=(self.obs_act_dim,),
output_dim=policy.latent_dim,
optimizer=self.optimizer,
**regressor_args
)
elif policy.latent_name == 'normal':
self._regressor = GaussianMLPRegressor(
input_shape=(self.obs_act_dim,),
output_dim=policy.latent_dim,
optimizer=self.optimizer,
**regressor_args
)
else:
raise NotImplementedError
def fit(self, paths):
logger.log('fitting the regressor...')
if self.recurrent:
observations = np.array([p["observations"][:, self.obs_regressed] for p in paths])
actions = np.array([p["actions"][:, self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=2)
if self.noisify_traj_coef:
obs_actions += np.random.normal(loc=0.0,
scale=float(np.mean(np.abs(obs_actions))) * self.noisify_traj_coef,
size=np.shape(obs_actions))
latents = np.array([p['agent_infos']['latents'] for p in paths])
self._regressor.fit(obs_actions, latents) # the input shapes are (traj, time, dim)
else:
observations = np.concatenate([p["observations"][:, self.obs_regressed] for p in paths])
actions = np.concatenate([p["actions"][:, self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=1)
latents = np.concatenate([p['agent_infos']["latents"] for p in paths])
if self.noisify_traj_coef:
obs_actions += np.random.normal(loc=0.0,
scale=float(np.mean(np.abs(obs_actions))) * self.noisify_traj_coef,
size=np.shape(obs_actions))
self._regressor.fit(obs_actions, latents.reshape((-1, self.latent_dim))) # why reshape??
logger.log('done fitting the regressor')
def predict(self, path):
if self.recurrent:
obs_actions = [np.concatenate([path["observations"][:, self.obs_regressed],
path["actions"][:, self.act_regressed]],
axis=1)] # is this the same??
else:
obs_actions = np.concatenate([path["observations"][:, self.obs_regressed],
path["actions"][:, self.act_regressed]], axis=1)
if self.noisify_traj_coef:
obs_actions += np.random.normal(loc=0.0, scale=float(np.mean(np.abs(obs_actions))) * self.noisify_traj_coef,
size=np.shape(obs_actions))
if self.use_only_sign:
obs_actions = np.sign(obs_actions)
return self._regressor.predict(obs_actions).flatten()
def get_output_p(self, path): # this gives the p_dist for every step: the latent posterior wrt obs_act
if self.recurrent:
obs_actions = [np.concatenate([path["observations"][:, self.obs_regressed],
path["actions"][:, self.act_regressed]],
axis=1)] # is this the same??
else:
obs_actions = np.concatenate([path["observations"][:, self.obs_regressed],
path["actions"][:, self.act_regressed]], axis=1)
if self.noisify_traj_coef:
obs_actions += np.random.normal(loc=0.0, scale=float(np.mean(np.abs(obs_actions))) * self.noisify_traj_coef,
size=np.shape(obs_actions))
if self.use_only_sign:
obs_actions = np.sign(obs_actions)
if self.policy.latent_name == 'bernoulli':
return self._regressor._f_p(obs_actions).flatten()
elif self.policy.latent_name == 'normal':
return self._regressor._f_pdists(obs_actions).flatten()
def get_param_values(self, **tags):
return self._regressor.get_param_values(**tags)
def set_param_values(self, flattened_params, **tags):
self._regressor.set_param_values(flattened_params, **tags)
def predict_log_likelihood(self, paths, latents):
if self.recurrent:
observations = np.array([p["observations"][:, self.obs_regressed] for p in paths])
actions = np.array([p["actions"][:, self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=2) # latents must match first 2dim: (batch,time)
else:
observations = np.concatenate([p["observations"][:, self.obs_regressed] for p in paths])
actions = np.concatenate([p["actions"][:, self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=1)
latents = np.concatenate(latents, axis=0)
if self.noisify_traj_coef:
noise = np.random.multivariate_normal(mean=np.zeros_like(np.mean(obs_actions, axis=0)),
cov=np.diag(np.mean(np.abs(obs_actions),
axis=0) * self.noisify_traj_coef),
size=np.shape(obs_actions)[0])
obs_actions += noise
if self.use_only_sign:
obs_actions = np.sign(obs_actions)
return self._regressor.predict_log_likelihood(obs_actions, latents) # see difference with fit above...
def lowb_mutual(self, paths, times=(0, None)):
if self.recurrent:
observations = np.array([p["observations"][times[0]:times[1], self.obs_regressed] for p in paths])
actions = np.array([p["actions"][times[0]:times[1], self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=2)
latents = np.array([p['agent_infos']['latents'][times[0]:times[1]] for p in paths])
else:
observations = np.concatenate([p["observations"][times[0]:times[1], self.obs_regressed] for p in paths])
actions = np.concatenate([p["actions"][times[0]:times[1], self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=1)
latents = np.concatenate([p['agent_infos']["latents"][times[0]:times[1]] for p in paths])
if self.noisify_traj_coef:
obs_actions += np.random.multivariate_normal(mean=np.zeros_like(np.mean(obs_actions,axis=0)),
cov=np.diag(np.mean(np.abs(obs_actions),
axis=0) * self.noisify_traj_coef),
size=np.shape(obs_actions)[0])
if self.use_only_sign:
obs_actions = np.sign(obs_actions)
H_latent = self.policy.latent_dist.entropy(self.policy.latent_dist_info) # sum of entropies latents in
return H_latent + np.mean(self._regressor.predict_log_likelihood(obs_actions, latents))
def log_diagnostics(self, paths):
logger.record_tabular(self._regressor._name + 'LowerB_MI', self.lowb_mutual(paths))
logger.record_tabular(self._regressor._name + 'LowerB_MI_5first', self.lowb_mutual(paths, times=(0, 5)))
logger.record_tabular(self._regressor._name + 'LowerB_MI_5last', self.lowb_mutual(paths, times=(-5, None)))
| mit | 5,297,087,816,919,702,000 | 52.075893 | 120 | 0.568761 | false |
tomosoft-jp/SainSmartLcd | Graphic.py | 1 | 5968 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from ST7735 import ST7735
class Graphic:
def __init__(self, pst7735):
self._st7735 = pst7735
def drawline(self, x0p, y0p, x1p, y1p, color):
if (x0p >= self._st7735.width) or (y0p >= self._st7735.height):
print " drawline x0, y0 Range error"
return
if (x1p >= self._st7735.width) or (y1p >= self._st7735.height):
print " drawline x1, y1 Range error"
return
x0 = x0p
y0 = y0p
x1 = x1p
y1 = y1p
steep = abs(y1 - y0) > abs(x1 - x0)
if steep:
x0, y0 = y0, x0
x1, y1 = y1, x1
if x0 > x1:
x0, x1 = x1, x0
y0, y1 = y1, y0
dx = x1 - x0
dy = abs(y1 - y0)
err = dx / 2
ystep = -1
if y0 < y1:
ystep = 1
for xx0 in range(x0, x1):
if steep:
self._st7735.dot(y0, xx0, color)
else:
self._st7735.dot(xx0, y0, color)
err -= dy
if err < 0:
y0 += ystep
err += dx
def drawrect(self, x, y, w, h, color):
if (x >= self._st7735.width) or (y >= self._st7735.height):
print " drawrect x, y Range error"
return
if ((x + w) >= self._st7735.width) or ((y + h) >= self._st7735.height):
print " drawrect w, h Range error"
return
self.drawline(x, y, x + w - 1, y, color)
self.drawline(x, y + h - 1, x + w - 1, y + h - 1, color)
self.drawline(x, y, x, y + h - 1, color)
self.drawline(x + w - 1, y, x + w - 1, y + h - 1, color)
def fillrect(self, x, y, w, h, color):
if (x >= self._st7735.width) or (y >= self._st7735.height):
print " fillrect x, y Range error"
return
# print " fillrect:{0:X}".format(x)
if (x + w - 1) >= self._st7735.width:
w = self._st7735.width - x
if (y + h - 1) >= self._st7735.height:
h = self._st7735.height - y
for xx in range(x, x + w):
for yy in range(y, y + h):
self._st7735.dot(xx, yy, color)
def fillscreen(self, color):
self.fillrect(0, 0, self._st7735.width, self._st7735.height, color)
def drawcircle(self, x0, y0, r, color):
f = 1 - r
ddf_x = 1
ddf_y = -2 * r
x = 0
y = r
self._st7735.dot(x0, y0 + r, color)
self._st7735.dot(x0, y0 - r, color)
self._st7735.dot(x0 + r, y0, color)
self._st7735.dot(x0 - r, y0, color)
while x < y:
if f >= 0:
y -= 1
ddf_y += 2
f += ddf_y
x += 1
ddf_x += 2
f += ddf_x
self._st7735.dot(x0 + x, y0 + y, color)
self._st7735.dot(x0 - x, y0 + y, color)
self._st7735.dot(x0 + x, y0 - y, color)
self._st7735.dot(x0 - x, y0 - y, color)
self._st7735.dot(x0 + y, y0 + x, color)
self._st7735.dot(x0 - y, y0 + x, color)
self._st7735.dot(x0 + y, y0 - x, color)
self._st7735.dot(x0 - y, y0 - x, color)
def drawcirclehelper(self, x0, y0, r, cornername, color):
f = 1 - r
ddf_x = 1
ddf_y = -2 * r
x = 0
y = r
while x < y:
if f >= 0:
y -= 1
ddf_y += 2
f += ddf_y
x += 1
ddf_x += 2
f += ddf_x
if cornername and 0x4:
self._st7735.dot(x0 + x, y0 + y, color)
self._st7735.dot(x0 + y, y0 + x, color)
if cornername and 0x2:
self._st7735.dot(x0 + x, y0 - y, color)
self._st7735.dot(x0 + y, y0 - x, color)
if cornername and 0x8:
self._st7735.dot(x0 - y, y0 + x, color)
self._st7735.dot(x0 - x, y0 + y, color)
if cornername and 0x1:
self._st7735.dot(x0 - y, y0 - x, color)
self._st7735.dot(x0 - x, y0 - y, color)
def fillcirclehelper(self, x0, y0, r, cornername, delta, color):
f = 1 - r
ddf_x = 1
ddf_y = -2 * r
x = 0
y = r
while x < y:
if f >= 0:
y -= 1
ddf_y += 2
f += ddf_y
x += 1
ddf_x += 2
f += ddf_x
if cornername & 0x1:
self.drawline(x0 + x, y0 - y, x0 + x, y0 - y + (2 * y + 1 + delta), color)
self.drawline(x0 + y, y0 - x, x0 + y, y0 - x + (2 * x + 1 + delta), color)
if cornername & 0x2:
self.drawline(x0 - x, y0 - y, x0 - x, y0 - y + (2 * y + 1 + delta), color)
self.drawline(x0 - y, y0 - x, x0 - y, y0 - x + (2 * x + 1 + delta), color)
def fillcircle(self, x0, y0, r, color):
self.drawline(x0, y0 - r, x0, y0 - r + (2 * r + 1), color)
self.fillcirclehelper(x0, y0, r, 3, 0, color)
if __name__ == "__main__":
ST7735_TFTWIDTH = 128
ST7735_TFTHEIGHT = 160
ST7735_BLACK = 0x000000
ST7735_BLUE = 0x0000FF
ST7735_RED = 0xFF0000
ST7735_GREEN = 0x008000
ST7735_CYAN = 0x00FFFF
ST7735_MAGENTA = 0xFF00FF
ST7735_YELLOW = 0xFFFF00
ST7735_WHITE = 0xFFFFFF
st7735 = ST7735(ST7735_TFTWIDTH, ST7735_TFTHEIGHT)
graphic = Graphic(st7735)
try:
graphic.fillscreen(ST7735_RED)
graphic.drawline(10, 10, ST7735_TFTWIDTH - 10, ST7735_TFTHEIGHT - 10, ST7735_BLACK)
graphic.drawrect(0, 40, 20, 40, ST7735_CYAN)
graphic.fillrect(80, 60, 40, 20, ST7735_YELLOW)
graphic.drawcircle(64, 40, 15, ST7735_MAGENTA)
graphic.fillcircle(64, 120, 30, ST7735_GREEN)
st7735.sendbuf()
except KeyboardInterrupt:
print '\nbreak'
# GPIO.cleanup()
| mit | 2,563,048,761,483,400,700 | 30.083333 | 91 | 0.453586 | false |
noyainrain/micro | micro/templates.py | 1 | 1222 | # micro
# Copyright (C) 2021 micro contributors
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU
# Lesser General Public License as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with this program.
# If not, see <http://www.gnu.org/licenses/>.
"""Server templates."""
MESSAGE_TEMPLATES = {
'email_auth': """
Subject: [{{ app.settings.title }}] Add email address
Hi there!
Here is the verification code to add your email address to {{ app.settings.title }}:
{{ auth }}
---
If you did not request to add an email address to {{ app.settings.title }}, someone else may
have entered your email address by mistake. In that case, please ignore this message, we
will not bother you again.
"""
}
| lgpl-3.0 | 4,537,489,739,628,792,300 | 36.030303 | 100 | 0.698854 | false |
ArnaudBelcour/Workflow_GeneList_Analysis | pathway_extraction/uniprot_retrieval_data.py | 1 | 4827 | #!/usr/bin/env python3
import math
import pandas as pa
import six
from SPARQLWrapper import SPARQLWrapper, JSON
from tqdm import tqdm
from . import *
def extract_information_from_uniprot(results_dataframe):
'''
Requests the SPARQL endpoint of Uniprot to retrieve (from Ensembl transcrit ID) GO terms, interpro, pfam/supfam and prosites.
The file taken as input file contains each gene associated with the result of a blast (that's the thing with 'hypothetical protein').
'''
if any(results_dataframe['Blast'].str.contains('hypothetical protein')):
results_dataframe['Blast'] = results_dataframe['Blast'].str[len('CEP03957.1hypothetical protein '):]
results_dataframe['Blast'] = results_dataframe['Blast'].str.replace(', partial', '')
results_dataframe.set_index("Gene_Name", inplace=True)
for gene, row in tqdm(results_dataframe.iterrows(), total=len(results_dataframe.index)):
gos_found = []
datas_found = []
enzymes_found = []
interpros = []
supfams = []
pfams = []
prosites = []
for transcript_id in row['Blast'].split(','):
transcript = 'ensembl:' + transcript_id
sparql = SPARQLWrapper('http://beta.sparql.uniprot.org/sparql')
sparql.setQuery("""
PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
PREFIX up:<http://purl.uniprot.org/core/>
PREFIX ensembl:<http://rdf.ebi.ac.uk/resource/ensembl/>
SELECT DISTINCT ?go
WHERE
{
?transcrit up:transcribedFrom ?ensemblName.
?protein rdfs:seeAlso ?transcrit .
?protein up:classifiedWith ?go .
FILTER (regex(str(?go), "GO")) .
VALUES ?ensemblName {""" + transcript + """}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
for result in results["results"]["bindings"]:
gos_found.append(result["go"]["value"][31:].replace("_", ":"))
sparql.setQuery("""
PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
PREFIX up:<http://purl.uniprot.org/core/>
PREFIX ensembl:<http://rdf.ebi.ac.uk/resource/ensembl/>
SELECT DISTINCT ?enzyme
WHERE
{
?transcrit up:transcribedFrom ?ensemblName.
?protein rdfs:seeAlso ?transcrit .
?protein up:enzyme ?ec .
VALUES ?ensemblName {""" + transcript + """}
}
""")
results = sparql.query().convert()
for result in results["results"]["bindings"]:
if "enzyme" in result:
enzymes_found.append('ec:' + result["enzyme"]["value"][len('http://purl.uniprot.org/enzyme/'):])
sparql.setQuery("""
PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
PREFIX up:<http://purl.uniprot.org/core/>
PREFIX ensembl:<http://rdf.ebi.ac.uk/resource/ensembl/>
SELECT DISTINCT ?data
WHERE
{
?transcrit up:transcribedFrom ?ensemblName.
?protein rdfs:seeAlso ?transcrit .
?protein rdfs:seeAlso ?data .
VALUES ?ensemblName {""" + transcript + """}
}
""")
results = sparql.query().convert()
for result in results["results"]["bindings"]:
datas_found.append(result["data"]["value"][len('http://purl.uniprot.org/'):])
for data in datas_found:
if 'interpro' in data:
data = data[len('interpro/'):]
interpros.append(data)
if 'supfam' in data:
data = data[len('supfam/'):]
supfams.append(data)
if 'pfam' in data and 'supfam' not in data:
data = data[len('pfam/'):]
pfams.append(data)
if 'prosite' in data:
data = data[len('prosite/'):]
prosites.append(data)
if row['GOs'] == '':
results_dataframe.set_value(gene, 'GOs', ','.join(gos_found))
#if row['EnzymeCodes'] == '':
#results_dataframe.set_value(gene, 'EnzymeCodes', ','.join(enzymes_found))
if row['InterProScan'] == '':
results_dataframe.set_value(gene, 'InterProScan', ','.join(interpros))
#results_dataframe.set_value(gene, 'supFams', str(supfams))
#results_dataframe.set_value(gene, 'pfams', str(pfams))
#results_dataframe.set_value(gene, 'prosites', str(prosites))
results_dataframe.reset_index(inplace=True)
return results_dataframe
| agpl-3.0 | 201,268,150,668,236,600 | 37.309524 | 141 | 0.542987 | false |
killer923/alarm | Alarm.py | 1 | 5182 | import os
import time
from Tkinter import Tk
from tkFileDialog import askopenfilename
def change_settings(first_time):
if first_time==0:
customizations=read_settings()
tone=customizations[0]
snooze=customizations[1]
settings=open("settings.txt","w")
settings.write("Please change only if you know what you are doing.\n")
settings.write("If you make a mistake simply delete this file.\n")
#set alarm tone
if first_time:
print "Select the alarm tone alarm tone: "
try:
Tk().withdraw()
except Exception as e:
print e
new_tone= askopenfilename()
print new_tone
settings.write("Alarm tone : "+new_tone+"\n")
else:
print "Current alarm tone: "+tone
print "Do you want to change the alarm tone:(Y|N) ",
response=raw_input()
if response=="y" or response=="Y":
try:
Tk().withdraw()
except Exception as e:
print e
new_tone=askopenfilename()
print new_tone
settings.write("Alarm tone : "+new_tone+"\n")
else:
settings.write("Alarm tone : "+tone+"\n")
#set snooze time
if first_time:
print "Enter the snooze time ( in minutes) :",
snooze=int(raw_input())
if snooze<1 or snooze>10:
check=0
check=1
while(check<1):
print "The range for snooze time is 1 minute to 10 minutes."
print "Please enter snooze time again :",
snooze=int(raw_input())
if snooze>=1 and snooze<=10:
check=1
settings.write("Snooze time : "+str(snooze)+"\n")
else:
print "Current snooze time is :"+str(snooze)
print "Do you want to change the snooze time? (Y|N) ",
response=raw_input()
if response=="y" or response=="Y":
print "Enter the new snooze time : ",
snooze=int(raw_input())
while(check<1):
print "The range for snooze time is 1 minute to 10 minutes."
print "Please enter snooze time again : ",
snooze=int(raw_input())
if snooze>=1 and snooze<=10:
check=1
settings.write("Snooze time: "+str(snooze)+"\n")
settings.close()
def create_settings():
print "Looks like you are using the program for the first time."
print "Thank you for choosing my program."
print "Please create settings for the program, you will be able to change them in the start of new run of the program."
change_settings(1)
def read_settings():
try:
settings=open("settings.txt","r")
except:
create_settings()
#print"ji"
settings=open("settings.txt","r")
try:
count=0
for line in settings:
#print count," ...",line
if count<2:
count=count+1
elif count==2:
tone=line
tone=tone.split(":")
#print "1==",tone
tone[1]=tone[1].split()[0]
tone1=tone[-1].split("/")
#print "2==",tone1
tone=tone[1]+":"
#print "3==",tone
tone1[-1]=tone1[-1].split("\\")[0]
if len(tone1)==1:
tone=tone+"\\"+str(tone1[0])
else:
for i in range(1,(len(tone1))):
tone=tone+"\\"+str(tone1[i])
#print "i=",i," ",tone
#tone=tone1.split()
#print tone
#tone=tone[0]
#print "tone="+tone
tone=tone.split("\n")[0]
count=count+1
#print count,tone
elif count==3: #read snooze time
snooze=line
snooze=snooze.split(":")
snooze=snooze[1].split()
snooze=int(snooze[0])
#print count,snooze
return [tone,snooze]
except Exception as x:
print count,x
print "There seems to be a problem with your settings file."
print "We will need to recreate it."
create_settings()
read_settings()
def ring(tone,snooze):
#print tone,str(snooze)
#print "Time to ring the alarm"
while 1:
os.startfile(tone)
time.sleep(snooze*60)
#ring(tone,snooze)
print "Come on Wake up... You are Getting Late ...."
def main():
print "Welcome"
print "Do you want to change settings? (Y|N) ",
response=raw_input()
if response=="y" or response=="Y":
change_settings(0)
customizations=read_settings()
#Get time to ring
print "Set time for alarm: "
#get hours
print " HH : ",
hh=int(raw_input())
check = 0
if hh<0 or hh>23:
check = -1
while check<0:
print " Hours does not exist, please enter again: ",
hh=int(raw_input())
if hh<0 or hh>24:
check = -1
else:
check = 0
#get time
print " MM : ",
mm=int(raw_input())
check = 0
if mm<0 or mm>59:
check = -1
while check<0:
print " Minutes does not exist, please enter again: ",
mm=int(raw_input())
if mm<0 or mm>24:
check = -1
else:
check = 0
#Get current time
sys_time=time.ctime()
sys_time=sys_time.split()
sys_time=sys_time[3].split(":")
sys_hh=int(sys_time[0])
sys_mm=int(sys_time[1])
#calculate sleeping time
if hh<sys_hh:
minutes=(60-sys_mm)+mm
hours=(23-sys_hh)+hh
elif hh==sys_hh:
if mm<sys_mm:
hours=23
minutes=(60-sys_mm)+mm
else:
hours=0
minutes=mm-sys_mm
else:
hours=hh-sys_hh-1
minutes=(60-sys_mm)+mm
if minutes >60:
hours=hours+1
minutes=minutes-60
elif minutes<0:
hours=hours-1
minutes=minutes+60
print "Alarm will ring after "+str(hours)+" hours and "+str(minutes)+" minutes."
seconds=(hours*3600)+(minutes*60)
#print "Alarm will ring after "+str(seconds)+" seconds."
time.sleep(seconds)
print "The program woke up :) \n Time for you to wake up too."
#print customizations
ring(customizations[0],customizations[1])
if __name__=='__main__':
main()
| apache-2.0 | 738,152,008,914,051,300 | 24.653465 | 120 | 0.648784 | false |
pliz/gunfolds | tools/pathtreetools.py | 1 | 14198 | import sys
sys.path.append('./tools/')
from pathtree import PathTree
from ortools.constraint_solver import pywrapcp
from matplotlib.cbook import flatten
from functools import wraps
import numpy as np
import bisect
from sortedcontainers import SortedDict
import ipdb
class SolutionNotFoundInTime(Exception):
pass
def ptloopnum(pt):
"""
Given a PathTree object returns the number of loops in it
:param pt: PathTree object
:return: number of loops (n)
"""
def ptn(pt, n=0):
for e in pt.loopset:
if type(e) is int:
n += 1
continue
n += ptn(e, n=1)
return n
return ptn(pt)
def ptnodenum(pt):
"""
Given a PathTree object returns the number of latents that comprise it
:param pt: PathTree object
:return: number of nodes (n)
"""
n = pt.preset - 1
def ptn(pt, n=0):
for e in pt.loopset:
if type(e) is int:
n += e - 1
continue
n += ptn(e, n=1)
return n
return n + ptn(pt)
def ptelement(pt, w):
"""
An element generated by a PathTree with a given weight setting
:param pt: PathTree
:param w: a list of weights
:return: an integer
"""
n = pt.preset
def sumloops(pt, w):
n = 0
ls = list(pt.loopset)
for i in range(len(ls)):
if type(ls[i]) is int:
n += w[i] * ls[i]
continue
n += w[i][0] * ls[i].preset \
+ min(1, w[i][0]) * sumloops(ls[i], w[i][1])
return n
return n + sumloops(pt, w)
def weights_pt(pt, weights):
c = [0]
def crawl(pt, w, c):
wl = []
for e in pt.loopset:
if type(e) is int:
wl.append(w[c[0]])
c[0] += 1
continue
ww = w[c[0]]
c[0] += 1
wl.append([ww, crawl(e, w, c)])
return wl
return crawl(pt, weights, c)
def extraloops_pt(pt, loops): # loops are tuples (loop, weight)
c = [0]
def crawl(pt, l, c):
first = [l[c[0]]]
wl = []
for e in pt.loopset:
c[0] += 1
if type(e) is int:
wl.append(l[c[0]])
continue
wl.append(crawl(e, l, c))
return first + [wl]
return crawl(pt, loops, c)
def ptelement_extraloop(pt, w, eloops):
"""
An element generated by a PathTree with a given weight setting and extra loops on each level
:param pt: PathTree
:param w: a list of list of weights
:param eloops: a list of tuples with lengths of extra loops and their weights
:return: an integer
"""
n = pt.preset + eloops[0][0] * eloops[0][1]
def sumloops(pt, w, lps):
ls = list(pt.loopset)
n = 0
for i in range(len(ls)):
if type(ls[i]) is int:
n += w[i] * ls[i] + min(1, w[i]) * lps[i][0] * lps[i][1]
continue
n += w[i][0] * ls[i].preset \
+ min(1, w[i][0]) * (lps[i][0][0] * lps[i][0][1] + sumloops(ls[i], w[i][1], lps[i][1]))
return n
return n + sumloops(pt, w, eloops[1])
def isptelement_el(el, pt, w, eloops):
return el == ptelement_extraloop(pt, w, eloops)
def isptsubset_el(elist, pt, w, eloops):
for i in range(elist[-1]):
if isptelement_el(i, pt, w, eloops):
if not i in elist:
return False
return True
def isrightpt(el, elist, pt, w, eloops):
for i in range(elist[-1]):
if isptelement_el(i, pt, w, eloops):
if not i in elist:
return False
if i == el and not isptelement_el(i, pt, w, eloops):
return False
return True
def ptelements(pt, seqlen=100, verbose=False, maxloop=100):
"""
Generate first `seqlen` elements from a pathtree
:param pt: a path tree object from pathtree.py
:param seqlen: number of elements to generate in ascending order
:param verbose: whether to print debugging information
:return: a list of elements
"""
solver = pywrapcp.Solver("pt-elements")
# declare variables
weights = []
N = ptloopnum(pt)
for i in range(N):
weights.append(solver.IntVar(0, maxloop, "w[%04i]" % i))
# declare constraints
# solver.Add()
# run the solver
solution = solver.Assignment()
solution.Add(weights)
db = solver.Phase(weights,
solver.CHOOSE_FIRST_UNBOUND,
solver.ASSIGN_MIN_VALUE)
solver.NewSearch(db)
num_solutions = 0
els = set()
while solver.NextSolution():
w = [x.Value() for x in weights]
num_solutions += 1
els.add(ptelement(pt, w))
if len(els) == seqlen:
break
solver.EndSearch()
# output solutions
if verbose:
print "num_solutions:", num_solutions
print "failures:", solver.Failures()
print "branches:", solver.Branches()
print "WallTime:", solver.WallTime()
return list(els)
def isptelement(pt, element, verbose=False, maxloop=100):
"""
Check if an integer element is in the weight set represented by the path tree
:param pt: a path tree object from pathtree.py
:param element: an integer to check for presence in the weight
:param verbose: whether to print debugging information
:return: True or False
"""
solver = pywrapcp.Solver("isptelement")
# declare variables
weights = []
N = ptloopnum(pt)
if not N:
return element == pt.preset
for i in range(N):
weights.append(solver.IntVar(0, maxloop, "w[%04i]" % i))
wpt = weights_pt(pt, weights)
# declare constraints
solver.Add(element == ptelement(pt, wpt))
# run the solver
solution = solver.Assignment()
solution.Add(weights)
db = solver.Phase(weights,
solver.CHOOSE_FIRST_UNBOUND,
solver.ASSIGN_MIN_VALUE)
solver.NewSearch(db)
solution_exists = False
while solver.NextSolution():
solution_exists = True
break
solver.EndSearch()
# output solutions
if verbose:
print "failures:", solver.Failures()
print "branches:", solver.Branches()
print "WallTime:", solver.WallTime()
return solution_exists
def loops_and_weights(solver, loops, weights):
"""
Add constraints to solver that make sure loops are not generated if subtree is not active due to a zero weight upstream
:param solver:
:param loops:
:param weights:
:return:
"""
def recurse(s, l, w):
for ww, ll in zip(w, l):
if type(ww) is list:
for e in flatten(ll):
s.Add((ww[0] == 0) <= (e == 0))
recurse(s, ll[1:], ww[1:])
else:
for e in flatten(ll):
s.Add((ww == 0) <= (e == 0))
recurse(solver, loops[1], weights)
def eloops_simplify(eloops):
l = []
for e in eloops:
if type(e) is list:
l.append(eloops_simplify(e))
else:
l.append(int(e[0].Value()))
return l
def ptaugmented(pt, eloops):
def augment(pt, ls):
pre = pt.preset
loop = pt.loopset
s = set()
if ls[0]:
s.add(ls[0])
for l, el in zip(loop, ls[1]):
if type(l) is int:
if not el:
s.add(l)
else:
s.add(PathTree({el}, pre=l))
continue
s.add(augment(l, el))
return PathTree(s, pre=pre)
t = augment(pt, eloops)
return t
def ptsubset(pt, elist):
for i in range(elist[-1]):
if isptelement(pt, i) and not i in elist:
return False
return True
def smallest_pt(ptlist):
if ptlist:
idx = np.argsort(map(ptnodenum, ptlist))
sol = ptlist[idx[0]]
else:
sol = None
return sol
def pairprint(pt1, pt2, k=40):
print np.c_[pt2seq(pt1, k), pt2seq(pt2, k)]
def etesteq(pt1, pt2, k=100):
a1 = np.asarray(pt2seq(pt1, k))
a2 = np.asarray(pt2seq(pt2, k))
return np.sum(a1 - a2) == 0
def keeptreegrow(pt, e, seq, cutoff=10, cap=1000):
t = None
while t is None:
t = growtree(pt, e, seq, cutoff=cutoff)
cutoff += 10
if cutoff > cap:
raise SolutionNotFoundInTime("Cannot keep the tree growing")
return t
def add_element(d, pt):
"""
Add a PathTree to dictionary d such that it is either appended to the list or added anew
Args:
d: a dictionary
pt: a PathTree
Returns:
"""
key = ptnodenum(pt)
if key in d:
d[key].append(pt)
else:
d[key] = pt
def del_element(d, pt, key=None):
"""
Delete a PathTree from dictionary d such that it is either removed from the list or the list that only contains one element is removed
Args:
d: a dictionary
pt: a PathTree
Returns:
"""
if key is None:
key = ptnodenum(pt)
if len(d[key]) == 1:
del d[key]
else:
d[key].remove(pt)
def swap_elements(d, pt1, pt2, key=None):
del_element(d, pt1, key=key)
add_element(d, pt2)
def seq2pt(seq, verbose=False, cutoff=100):
if not seq:
return None
pt = PathTree({}, pre=seq[0])
pts = SortedDict() # PathTrees
pts[ptnodenum(pt)] = [pt]
for e in seq[1:]:
e_is_in = False
for key in pts:
for pt in pts[key]:
if verbose:
print e
try:
newpt = keeptreegrow(pt, e, seq, cutoff=cutoff)
swap_elements(pts, pt, newpt, key=key)
e_is_in = True
break
except SolutionNotFoundInTime:
continue
if not e_is_in:
newpt = PathTree({}, pre=e)
add_element(d, newpt)
return pt
def growtree(pt, element, ref_elements, verbose=False, maxloop=100, cutoff=100):
"""
Add a loop with the minimal length to a path tree to enable it to generate a given element and still be a subset of a given list
:param pt: a path tree object from pathtree.py
:param element: an integer to check for presence in the weight
:param ref_elements: a (finite) list that should be a superset of numbers generated by the new path tree, for numbers smaller than tosubset[-1]
:param verbose: whether to print debugging information
:return: a PathTree augmented with a new loop
"""
solver = pywrapcp.Solver("loop_an_element")
# PathTree already can generate that number. Just to foolproof
if isptelement(pt, element):
return pt
# declare variables
weights = [] # weights denoting how many times a loop is active (marginalized)
loops = [] # extra loops that can be potentially added
lweights = [] # weights for the extra loops (marginalized out in the end)
ltuples = [] # tuple list to hold loops and weights together
N = ptloopnum(pt) # number of loops in the PathTree
for i in range(N):
weights.append(solver.IntVar(0, maxloop, "w[%04i]" % i))
for i in range(N + 1):
w = solver.IntVar(0, maxloop, "lw[%04i]" % i)
l = solver.IntVar(0, maxloop, "l[%04i]" % i)
lweights.append(w) # loop related weight
loops.append(l)
ltuples.append((l, w))
eloops = extraloops_pt(pt, ltuples)
ws = weights_pt(pt, weights)
# declare constraints
solver.Add(solver.MemberCt(ptelement_extraloop(pt, ws, eloops), ref_elements))
solver.Add(element == ptelement_extraloop(pt, ws, eloops)) # make sure the element can be generated
solver.Add(solver.Count(loops, 0, len(loops) - 1)) # only one loop is on
solver.Add(solver.Count(lweights, 0, len(lweights) - 1)) # only one loop is weighted
for i in range(len(lweights)):
solver.Add((lweights[i] == 0) <= (loops[i] == 0)) # if a loop has weight zero then it can't be active
# solver.Add(lweights[i] >= loops[i])
loops_and_weights(solver, eloops, ws) # if a subtree is off (weight zero) no need to add loops
# run the solver
solution = solver.Assignment()
solution.Add(loops)
db = solver.Phase(loops + lweights + weights,
solver.CHOOSE_FIRST_UNBOUND,
solver.ASSIGN_MIN_VALUE)
solver.NewSearch(db)
numsol = 0
pts = []
while solver.NextSolution():
# print numsol,
new_pt = ptaugmented(pt, eloops_simplify(eloops))
if verbose:
print "trying PathTree: ", new_pt
if ptsubset(new_pt, ref_elements):
pts.append(new_pt)
if verbose:
print "OK PathTree: ", pts[-1]
numsol += 1
if numsol >= cutoff:
break
solver.EndSearch()
# output solutions
if verbose:
print "solutions:", numsol
print "failures:", solver.Failures()
print "branches:", solver.Branches()
print "WallTime:", solver.WallTime()
print "for ", element, "solutions found ", numsol
return smallest_pt(pts)
def pt2seq(pt, num):
if not pt.loopset:
return [pt.preset]
i = 0
s = set()
while len(s) < num:
if isptelement(pt, i, maxloop=10 * num):
s.add(i)
i += 1
l = list(s)
l.sort()
return l
def s2spt(s): # convert edge set to pt
ss = set()
for e in s:
if type(e) is int:
ss.add(PathTree({0}, pre={e}))
continue
ss.add(e)
return ss
def spt_elements(spt, num):
"""
Generate numbers from a set of PathTrees
:param spt: set of PathTrees
:param num: number of elements (from the first) to generate
:return: list of num numbers
"""
i = 0
s = set()
while len(s) < num:
if issptelement(spt, i):
s.add(i)
i += 1
return list(s)
def issptelement(spt, element):
a = False
for pt in s2spt(spt):
a = a or isptelement(pt, element)
return a
| gpl-3.0 | 8,853,819,413,874,525,000 | 25.538318 | 147 | 0.557191 | false |
fabawi/QuestionAnswering | qa/answering_engines/babi_ann/challenges.py | 1 | 5299 | # Challenges:
# Returns the name of a challenge given a number. The number of epochs is predefined
class Challenges:
def __init__(self):
self.challenge_en10k_filename = {
# all challenges
1: '{}tasks_1-20_v1-2/en-10k/qa1_single-supporting-fact_{}.txt',
2: '{}tasks_1-20_v1-2/en-10k/qa2_two-supporting-facts_{}.txt',
3: '{}tasks_1-20_v1-2/en-10k/qa3_three-supporting-facts_{}.txt',
4: '{}tasks_1-20_v1-2/en-10k/qa4_two-arg-relations_{}.txt',
5: '{}tasks_1-20_v1-2/en-10k/qa5_three-arg-relations_{}.txt',
6: '{}tasks_1-20_v1-2/en-10k/qa6_yes-no-questions_{}.txt',
7: '{}tasks_1-20_v1-2/en-10k/qa7_counting_{}.txt',
8: '{}tasks_1-20_v1-2/en-10k/qa8_lists-sets_{}.txt',
9: '{}tasks_1-20_v1-2/en-10k/qa9_simple-negation_{}.txt',
10: '{}tasks_1-20_v1-2/en-10k/qa10_indefinite-knowledge_{}.txt',
11: '{}tasks_1-20_v1-2/en-10k/qa11_basic-coreference_{}.txt',
12: '{}tasks_1-20_v1-2/en-10k/qa12_conjunction_{}.txt',
13: '{}tasks_1-20_v1-2/en-10k/qa13_compound-coreference_{}.txt',
14: '{}tasks_1-20_v1-2/en-10k/qa14_time-reasoning_{}.txt',
15: '{}tasks_1-20_v1-2/en-10k/qa15_basic-deduction_{}.txt',
16: '{}tasks_1-20_v1-2/en-10k/qa16_basic-induction_{}.txt',
17: '{}tasks_1-20_v1-2/en-10k/qa17_positional-reasoning_{}.txt',
18: '{}tasks_1-20_v1-2/en-10k/qa18_size-reasoning_{}.txt',
19: '{}tasks_1-20_v1-2/en-10k/qa19_path-finding_{}.txt',
20: '{}tasks_1-20_v1-2/en-10k/qa20_agents-motivations_{}.txt'
}
self.epochs_en10k_number = {
1: 38,
2: 48,
3: 94,
4: 65,
5: 83,
6: 100,
7: 63,
8: 70,
9: 99,
10: 54,
11: 32,
12: 51,
13: 43,
14: 96,
15: 37,
16: 23,
17: 96,
18: 95,
19: 100,
20: 33
}
self.challenge_en_filename = {
# all challanges
1: '{}tasks_1-20_v1-2/en/qa1_single-supporting-fact_{}.txt',
2: '{}tasks_1-20_v1-2/en/qa2_two-supporting-facts_{}.txt',
3: '{}tasks_1-20_v1-2/en/qa3_three-supporting-facts_{}.txt',
4: '{}tasks_1-20_v1-2/en/qa4_two-arg-relations_{}.txt',
5: '{}tasks_1-20_v1-2/en/qa5_three-arg-relations_{}.txt',
6: '{}tasks_1-20_v1-2/en/qa6_yes-no-questions_{}.txt',
7: '{}tasks_1-20_v1-2/en/qa7_counting_{}.txt',
8: '{}tasks_1-20_v1-2/en/qa8_lists-sets_{}.txt',
9: '{}tasks_1-20_v1-2/en/qa9_simple-negation_{}.txt',
10: '{}tasks_1-20_v1-2/en/qa10_indefinite-knowledge_{}.txt',
11: '{}tasks_1-20_v1-2/en/qa11_basic-coreference_{}.txt',
12: '{}tasks_1-20_v1-2/en/qa12_conjunction_{}.txt',
13: '{}tasks_1-20_v1-2/en/qa13_compound-coreference_{}.txt',
14: '{}tasks_1-20_v1-2/en/qa14_time-reasoning_{}.txt',
15: '{}tasks_1-20_v1-2/en/qa15_basic-deduction_{}.txt',
16: '{}tasks_1-20_v1-2/en/qa16_basic-induction_{}.txt',
17: '{}tasks_1-20_v1-2/en/qa17_positional-reasoning_{}.txt',
18: '{}tasks_1-20_v1-2/en/qa18_size-reasoning_{}.txt',
19: '{}tasks_1-20_v1-2/en/qa19_path-finding_{}.txt',
20: '{}tasks_1-20_v1-2/en/qa20_agents-motivations_{}.txt'
}
self.epochs_en_number = {
1: 40,
2: 40,
3: 40,
4: 40,
5: 40,
6: 40,
7: 40,
8: 40,
9: 40,
10: 40,
11: 40,
12: 40,
13: 40,
14: 40,
15: 40,
16: 40,
17: 40,
18: 40,
19: 40,
20: 40
}
# In this list, each question task is defined as having the answer literal in the passage itself or a constant answer
# True means an answer is found in the passage, False means the answer is not in the passage
self.answer_in_passage = {
1: True,
2: True,
3: True,
4: True,
5: True,
6: False,
7: False,
8: True, #careful: this has two answers
9: False,
10: False,
11: True,
12: True,
13: True,
14: True,
15: True,
16: True,
17: False,
18: False,
19: False,
20: False
}
def get_challenge(self, challenge_id, challenge_type):
if challenge_type == 'en10k':
challenge_filename = self.challenge_en10k_filename[challenge_id]
number_epochs = self.epochs_en10k_number[challenge_id]
elif challenge_type == 'en':
challenge_filename = self.challenge_en_filename[challenge_id]
number_epochs = self.epochs_en_number[challenge_id]
answer_in_passage = self.answer_in_passage[challenge_id]
return challenge_filename, number_epochs,answer_in_passage
| mit | 7,969,111,048,269,106,000 | 38.544776 | 125 | 0.483487 | false |
ArneBab/PyHurd | examples/showtrans.py | 1 | 2983 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
showtrans.py - show files` passive translator.
Copyright (C) 2008 Anatoly A. Kazantsev
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public LicensODe along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
'''
import sys, errno, os
from optparse import OptionParser
from hurd import Port, O_NOTRANS, error
from mach import MACH_PORT_NULL
usage = 'Usage: %prog [OPTION...] FILE...'
description = """Show the passive translator of FILE...
A File argument of `-' prints the translator on the node attached to standart
input.
"""
parser = OptionParser(usage=usage, description=description)
parser.add_option('-p', '--prefix', dest='print_prefix',
action='store_true', default=None,
help="Always display `FILENAME: ' before translators")
parser.add_option('-P', '--no-prefix', dest='print_prefix',
action='store_false',
help="Never display `FILENAME: ' before translators")
parser.add_option('-s', '--silent', dest='silent', action='store_true',
default=False,
help='No output; useful when checking error status')
parser.add_option('-t', '--translated', dest='show_untrans',
action='store_false', default=True,
help='Only display files that have translators')
def print_node_trans (node, name):
if node is MACH_PORT_NULL:
error(0, -1, name)
else:
err, trans = node.get_translator()
if not err:
if not silent:
if print_prefix:
print '%s: %s' % (name, trans)
else:
print trans
global status
status = 0
elif err == errno.EINVAL:
if not silent and print_prefix and show_untrans:
print name
else:
error(0, err, name)
def main ():
options, args = parser.parse_args()
if len(args) == 0:
print usage
print "Try `%s --help' for more information." % sys.argv[0]
sys.exit()
global print_prefix, silent, show_untrans, status
status = 1
print_prefix = options.print_prefix
silent = options.silent
show_untrans = options.show_untrans
if not print_prefix:
print_prefix = len(args) > 1;
for arg in args:
if arg != '-':
print_node_trans (Port.lookup(arg, O_NOTRANS), arg)
else:
print_node_trans (Port.getdport(0), arg)
sys.exit(status)
if __name__ == "__main__":
main()
| gpl-2.0 | -56,086,262,759,934,080 | 28.245098 | 77 | 0.65471 | false |
alaeddine10/ggrc-core | src/ggrc/builder/json.py | 1 | 12563 | #i Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
import ggrc.builder
import ggrc.services
import iso8601
from datetime import datetime
from flask import _request_ctx_stack
from ggrc import db
from ggrc.models.reflection import AttributeInfo
from ggrc.services.util import url_for
from sqlalchemy.ext.associationproxy import AssociationProxy
from sqlalchemy.orm.attributes import InstrumentedAttribute
from sqlalchemy.orm.properties import RelationshipProperty
from werkzeug.exceptions import BadRequest
"""JSON resource state representation handler for gGRC models."""
def view_url_for(obj):
view = getattr(ggrc.views, obj.__class__.__name__, None)
return view.url_for(obj) if view else None
def get_json_builder(obj):
"""Instantiate or retrieve a JSON representation builder for the given
object.
"""
if type(obj) is type:
cls = obj
else:
cls = obj.__class__
# Lookup the builder instance in the builder module
builder = getattr(ggrc.builder, cls.__name__, None)
if not builder:
# Create the builder and cache it in the builder module
builder = Builder(cls)
setattr(ggrc.builder, cls.__name__, builder)
return builder
def publish(obj, inclusions=()):
"""Translate ``obj`` into a valid JSON value. Objects with properties are
translated into a ``dict`` object representing a JSON object while simple
values are returned unchanged or specially formatted if needed.
"""
publisher = get_json_builder(obj)
if publisher and hasattr(publisher, '_publish_attrs') \
and publisher._publish_attrs:
ret = {}
self_url = url_for(obj)
if self_url:
ret['selfLink'] = self_url
view_url = view_url_for(obj)
if view_url:
ret['viewLink'] = view_url
ret.update(publisher.publish_contribution(obj, inclusions))
return ret
# Otherwise, just return the value itself by default
return obj
def update(obj, json_obj):
"""Translate the state represented by ``json_obj`` into update actions
performed upon the model object ``obj``. After performing the update ``obj``
and ``json_obj`` should be equivalent representations of the model state.
"""
updater = get_json_builder(obj)
if updater:
updater.update(obj, json_obj)
#FIXME what to do if no updater??
#Nothing, perhaps log, assume omitted by design
def create(obj, json_obj):
"""Translate the state represented by ``json_obj`` into update actions
performed upon the new model object ``obj``. After performing the update
``obj`` and ``json_obj`` should be equivalent representations of the model
state.
"""
creator = get_json_builder(obj)
if creator:
creator.create(obj, json_obj)
class UpdateAttrHandler(object):
"""Performs the translation of a JSON state representation into update
actions performed on a model object instance.
"""
@classmethod
def do_update_attr(cls, obj, json_obj, attr):
"""Perform the update to ``obj`` required to make the attribute attr
equivalent in ``obj`` and ``json_obj``.
"""
if (hasattr(attr, '__call__')):
# The attribute has been decorated with a callable, grab the name and
# invoke the callable to get the value
attr_name = attr.attr_name
value = attr(cls, obj, json_obj)
else:
# Lookup the method to use to perform the update. Use reflection to
# key off of the type of the attribute and invoke the method of the
# same name.
attr_name = attr
class_attr = getattr(obj.__class__, attr_name)
method = getattr(cls, class_attr.__class__.__name__)
value = method(obj, json_obj, attr_name, class_attr)
setattr(obj, attr_name, value)
@classmethod
def InstrumentedAttribute(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for an ``InstrumentedAttribute``"""
method = getattr(cls, class_attr.property.__class__.__name__)
return method(obj, json_obj, attr_name, class_attr)
@classmethod
def ColumnProperty(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for a ``ColumnProperty``"""
method = getattr(
cls,
class_attr.property.expression.type.__class__.__name__,
cls.default_column_handler)
return method(obj, json_obj, attr_name, class_attr)
@classmethod
def default_column_handler(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for a simple value column"""
return json_obj.get(attr_name)
@classmethod
def DateTime(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for a ``Datetime`` column."""
value = json_obj.get(attr_name)
try:
return iso8601.parse_date(value) if value else None
except iso8601.ParseError as e:
raise BadRequest(
'Malformed DateTime {0} for parameter {1}. '
'Error message was: {2}'.format(value, attr_name, e.message)
)
@classmethod
def Date(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for a ``Date`` column."""
value = json_obj.get(attr_name)
try:
return datetime.strptime(value, "%Y-%m-%d") if value else None
except ValueError as e:
raise BadRequest(
'Malformed Date {0} for parameter {1}. '
'Error message was: {2}'.format(value, attr_name, e.message)
)
@classmethod
def query_for(cls, rel_class, json_obj, attr_name, uselist):
"""Resolve the model object instance referred to by the JSON value."""
if uselist:
# The value is a collection of links, resolve the collection of objects
value = json_obj.get(attr_name)
rel_ids = [o[u'id'] for o in value] if value else []
if rel_ids:
return db.session.query(rel_class).filter(
rel_class.id.in_(rel_ids)).all()
else:
return []
else:
rel_obj = json_obj.get(attr_name)
if rel_obj:
try:
return db.session.query(rel_class).filter(
rel_class.id == rel_obj[u'id']).one()
except(TypeError):
raise TypeError(''.join(['Failed to convert attribute ', attr_name]))
return None
@classmethod
def RelationshipProperty(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for a ``RelationshipProperty``."""
rel_class = class_attr.property.mapper.class_
return cls.query_for(
rel_class, json_obj, attr_name, class_attr.property.uselist)
@classmethod
def AssociationProxy(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for an ``AssociationProxy``."""
rel_class = class_attr.remote_attr.property.mapper.class_
return cls.query_for(rel_class, json_obj, attr_name, True)
@classmethod
def property(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for an object method decorated as a
``property``.
"""
#FIXME need a way to decide this. Require link? Use URNs?
# reflective approaches won't work as this is used for polymorphic
# properties
# rel_class = None
# return cls.query_for(rel_class, json_obj, attr_name, True)
if attr_name in json_obj:
url = json_obj[attr_name]['href']
rel_class_name = _request_ctx_stack.top.url_adapter.match(url, 'GET')[0]
from ggrc import models
rel_class = getattr(models, rel_class_name)
return cls.query_for(rel_class, json_obj, attr_name, False)
return None
class Builder(AttributeInfo):
"""JSON Dictionary builder for ggrc.models.* objects and their mixins."""
def generate_link_object_for(self, obj, inclusions, include):
"""Generate a link object for this object. If there are property paths
to be included specified in the ``inclusions`` parameter, those properties
will be added to the object representation. If the ``include`` parameter
is ``True`` the entire object will be represented in the result.
"""
if include:
return publish(obj, inclusions)
result = {'id': obj.id, 'href': url_for(obj)}
for path in inclusions:
if type(path) is not str and type(path) is not unicode:
attr_name, remaining_path = path[0], path[1:]
else:
attr_name, remaining_path = path, ()
result[attr_name] = self.publish_attr(obj, attr_name, remaining_path)
return result
def publish_link_collection(self, obj, attr_name, inclusions, include):
"""The ``attr_name`` attribute is a collection of object references;
translate the collection of object references into a collection of link
objects for the JSON dictionary representation.
"""
# FIXME: Remove the "if o is not None" when we can guarantee referential
# integrity
return [self.generate_link_object_for(o, inclusions, include)
for o in getattr(obj, attr_name) if o is not None]
def publish_link(self, obj, attr_name, inclusions, include):
"""The ``attr_name`` attribute is an object reference; translate the object
reference into a link object for the JSON dictionary representation.
"""
attr_value = getattr(obj, attr_name)
if attr_value:
return self.generate_link_object_for(attr_value, inclusions, include)
return None
def publish_attr(self, obj, attr_name, inclusions, include):
class_attr = getattr(obj.__class__, attr_name)
if isinstance(class_attr, AssociationProxy):
return self.publish_link_collection(obj, attr_name, inclusions, include)
elif isinstance(class_attr, InstrumentedAttribute) and \
isinstance(class_attr.property, RelationshipProperty):
if class_attr.property.uselist:
return self.publish_link_collection(
obj, attr_name, inclusions, include)
else:
return self.publish_link(obj, attr_name, inclusions, include)
elif isinstance(class_attr, property):
return self.publish_link(obj, attr_name, inclusions, include)
else:
return getattr(obj, attr_name)
def publish_attrs(self, obj, json_obj, inclusions):
"""Translate the state represented by ``obj`` into the JSON dictionary
``json_obj``.
The ``inclusions`` parameter can specify a tree of property paths to be
inlined into the representation. Leaf attributes will be inlined completely
if they are links to other objects. The inclusions data structure is a
list where the first segment of a path is a string and the next segment
is a list of segment paths. Here are some examples:
..
('directives')
[('directives'),('cycles')]
[('directives', ('audit_frequency','organization')),('cycles')]
"""
for attr in self._publish_attrs:
if hasattr(attr, '__call__'):
attr_name = attr.attr_name
else:
attr_name = attr
local_inclusion = ()
for inclusion in inclusions:
if inclusion[0] == attr_name:
local_inclusion = inclusion
break
json_obj[attr_name] = self.publish_attr(
obj, attr_name, local_inclusion[1:], len(local_inclusion) > 0)
@classmethod
def do_update_attrs(cls, obj, json_obj, attrs):
"""Translate every attribute in ``attrs`` from the JSON dictionary value
to a value or model object instance for references set for the attribute
in ``obj``.
"""
for attr_name in attrs:
UpdateAttrHandler.do_update_attr(obj, json_obj, attr_name)
def update_attrs(self, obj, json_obj):
"""Translate the state representation given by ``json_obj`` into the
model object ``obj``.
"""
self.do_update_attrs(obj, json_obj, self._update_attrs)
def create_attrs(self, obj, json_obj):
"""Translate the state representation given by ``json_obj`` into the new
model object ``obj``.
"""
self.do_update_attrs(obj, json_obj, self._create_attrs)
def publish_contribution(self, obj, inclusions):
"""Translate the state represented by ``obj`` into a JSON dictionary"""
json_obj = {}
self.publish_attrs(obj, json_obj, inclusions)
return json_obj
def update(self, obj, json_obj):
"""Update the state represented by ``obj`` to be equivalent to the state
represented by the JSON dictionary ``json_obj``.
"""
self.update_attrs(obj, json_obj)
def create(self, obj, json_obj):
"""Update the state of the new model object ``obj`` to be equivalent to the
state represented by the JSON dictionary ``json_obj``.
"""
self.create_attrs(obj, json_obj)
| apache-2.0 | 2,375,897,023,168,792,600 | 37.774691 | 79 | 0.675316 | false |
sonofeft/XYmath | xymath/examples/fit_Patmos.py | 1 | 4252 | """
the website
http://www.engineeringtoolbox.com/air-altitude-pressure-d_462.html
Calculates air pressure above sea level as:
pressure(Pa) = 101325 * (1 - 2.25577E-5 * h)**5.25588
where x=altitude(m)
1) run script and get slightly improved answer from web site
(Note that Percent Error fit has better %StdDev but worse StdDev)
..........Total Error............
y = A*(c - d*x)**n
A = 101071.995075
c = 1.00050869652
d = 2.22270597814e-05
n = 5.34803672307
x = altitude (m)
y = pressure (Pa)
Correlation Coefficient = 0.999996164903
Standard Deviation = 88.4289426009
Percent Standard Deviation = 0.0992975318388%
y = 101071.995075*(1.00050869652 - 2.22270597814e-05*x)**5.34803672307
=======================================================
..........Percent Error............
y = A*(c - d*x)**n
A = 101749.173838
c = 0.999255692815
d = 2.2309845172e-05
n = 5.31856519674
x = altitude (m)
y = pressure (Pa)
Correlation Coefficient = 0.999996135864
Standard Deviation = 88.7614426959
Percent Standard Deviation = 0.0944017487367%
y = 101749.173838*(0.999255692815 - 2.2309845172e-05*x)**5.31856519674
2) Set constants to website values and see slightly higher StdDev and Percent StdDev
than the XYmath fit.
y = A*(c - d*x)**n
A = 101325
c = 1
d = 2.25577e-05
n = 5.25588
x = altitude (m)
y = pressure (Pa)
Correlation Coefficient = 0.999995821319
Standard Deviation = 93.2374437135
Percent Standard Deviation = 0.106440180482%
y = 101325*(1 - 2.25577e-05*x)**5.25588
"""
try:
from matplotlib import pyplot as plt
got_plt = True
except:
got_plt = False
from numpy import array, double
from xymath.dataset import DataSet
from xymath.nonlinfit import NonLinCurveFit
alt_mArr = array([-1524,-1372,-1219,-1067,-914,-762,-610,-457,-305,-152,0,152,305,457,
610,762,914,1067,1219,1372,1524,1829,2134,2438,2743,3048,4572,6096,7620,
9144,10668,12192,13716,15240], dtype=double)
PaArr = 1000.0 * array([121,119,117,115,113,111,109,107,105,103,101,99.5,97.7,96,94.2,92.5,90.8,
89.1,87.5,85.9,84.3,81.2,78.2,75.3,72.4,69.7,57.2,46.6,37.6,30.1,23.8,
18.7,14.5,11.1], dtype=double)
DS = DataSet(alt_mArr, PaArr, xName='altitude', yName='pressure', xUnits='m', yUnits='Pa')
guessD = {'A':101325, 'c':1, 'd':2.25577E-5, 'n':5.25588 }
print( 'guessD Before',guessD )
CFit_toterr = NonLinCurveFit(DS, rhs_eqnStr='A*(c - d*x)**n',
constDinp=guessD, fit_best_pcent=0) # 0=fit best total error
print( 'guessD After',guessD )
print('='*55)
print('..........Total Error............')
print(CFit_toterr.get_full_description())
print('='*55)
print('..........Percent Error............')
CFit_pcterr = NonLinCurveFit(DS, rhs_eqnStr='A*(c - d*x)**n',
constDinp=guessD, fit_best_pcent=1) # 1=fit best percent error
print(CFit_pcterr.get_full_description())
print('='*55)
# To set parameters to reference values from www.engineeringtoolbox.com do this:
print('..........Reference Curve Fit............')
CFit_ref = NonLinCurveFit(DS, rhs_eqnStr='A*(c - d*x)**n', constDinp=guessD)
CFit_ref.constD.update( {'A':101325, 'c':1, 'd':2.25577E-5, 'n':5.25588 } )
CFit_ref.calc_std_values()
print(CFit_ref.get_full_description())
if got_plt:
plt.plot( alt_mArr, PaArr, 'o', markersize=10 )
xPlotArr, yPlotArr = CFit_ref.get_xy_plot_arrays( Npoints=100, logScale=False)
plt.plot( xPlotArr, yPlotArr, '--', linewidth=5, label='Reference' )
xPlotArr, yPlotArr = CFit_toterr.get_xy_plot_arrays( Npoints=100, logScale=False)
plt.plot( xPlotArr, yPlotArr, '-', label='Total Error' , linewidth=3 )
xPlotArr, yPlotArr = CFit_pcterr.get_xy_plot_arrays( Npoints=100, logScale=False)
plt.plot( xPlotArr, yPlotArr, '-', label='Percent Error' )
plt.title('Atmospheric Pressure')
plt.legend()
plt.show()
| gpl-3.0 | -6,385,122,869,755,342,000 | 38.009174 | 96 | 0.590781 | false |
ningirsu/stepmania-server | test/test_models/test_song_stat.py | 1 | 2412 | """ Module to test the song_stat model """
import datetime
from test.factories.song_stat_factory import SongStatFactory
from test.factories.user_factory import UserFactory
from test import utils
class SongStatTest(utils.DBTest):
""" test SongStat model"""
def test_lit_difficulty(self):
""" Test lit_difficulty property """
song_stat = SongStatFactory(difficulty=0)
self.assertEqual(song_stat.lit_difficulty, "BEGINNER")
song_stat = SongStatFactory(difficulty=1)
self.assertEqual(song_stat.lit_difficulty, "EASY")
song_stat = SongStatFactory(difficulty=2)
self.assertEqual(song_stat.lit_difficulty, "MEDIUM")
song_stat = SongStatFactory(difficulty=3)
self.assertEqual(song_stat.lit_difficulty, "HARD")
song_stat = SongStatFactory(difficulty=4)
self.assertEqual(song_stat.lit_difficulty, "EXPERT")
song_stat = SongStatFactory(difficulty=67)
self.assertEqual(song_stat.lit_difficulty, "67")
def test_full_difficulty(self):
""" Test full_difficulty property """
song_stat = SongStatFactory(difficulty=0, feet=4)
self.assertEqual(song_stat.full_difficulty, "BEGINNER (4)")
song_stat = SongStatFactory(difficulty=3, feet=78)
self.assertEqual(song_stat.full_difficulty, "HARD (78)")
def test_lit_grade(self):
""" Test lit_difficulty property """
song_stat = SongStatFactory(grade=0)
self.assertEqual(song_stat.lit_grade, "AAAA")
song_stat = SongStatFactory(grade=1)
self.assertEqual(song_stat.lit_grade, "AAA")
song_stat = SongStatFactory(grade=3)
self.assertEqual(song_stat.lit_grade, "A")
song_stat = SongStatFactory(grade=4)
self.assertEqual(song_stat.lit_grade, "B")
song_stat = SongStatFactory(grade=6)
self.assertEqual(song_stat.lit_grade, "D")
def test_pretty_result(self):
""" Test pretty_result property """
date = datetime.datetime(2017, 10, 13, 11, 42)
song_stat = SongStatFactory(
difficulty=3, #HARD
feet=9,
grade=3, #A
user=UserFactory(name="José Prout"),
percentage=78.327,
created_at=date
)
self.assertEqual(
song_stat.pretty_result(),
r"HARD (9): José Prout A (78.33%) on 13/10/17"
)
| mit | -2,328,918,741,349,060,600 | 32.013699 | 67 | 0.63278 | false |
hpcugent/vsc-mympirun | bin/mytaskprolog.py | 1 | 2185 | #!/usr/bin/env python
#
# Copyright 2009-2021 Ghent University
#
# This file is part of vsc-mympirun,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# the Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/hpcugent/vsc-mympirun
#
# vsc-mympirun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# vsc-mympirun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with vsc-mympirun. If not, see <http://www.gnu.org/licenses/>.
#
"""
Generate preferred CUDA_VISIBLE_DEVICES as part of srun task prolog
Work around some slurm issues
"""
from __future__ import print_function
from vsc.utils.affinity import sched_getaffinity
def export(key, value):
"""print export key=value, which is picked up by the task prolog"""
print("export %s=%s" % (key, value))
def get_preferred_gpu_map():
# issue #158: make generic or wait for schedmd fix, eg python nvml bindings
# this is the joltik map: 32 cores, even cores for gpu 0-1, odd for gpus 2-3
# so we prefer first 8 even cores for gpu 0, first 8 odd cores for gpu 1 etc etc
GPU_MAP = [0, 2] * 8 + [1, 3] * 8
return GPU_MAP
def preferred_cvd():
"""Generate the CUDA_VISIBLE_DEVICES value"""
gpu_map = get_preferred_gpu_map()
current_idx = [idx for idx, bit in enumerate(sched_getaffinity().get_cpus()) if bit and idx < len(gpu_map)]
gpus = set([gpu_map[idx] for idx in current_idx])
export('CUDA_VISIBLE_DEVICES', ','.join([str(x) for x in sorted(gpus)]))
def main():
preferred_cvd()
if __name__ == '__main__':
main()
| gpl-2.0 | -7,128,351,076,482,336,000 | 33.68254 | 111 | 0.701144 | false |
NPWR/Year-2042 | data.py | 1 | 17122 | from genericFunctions import *
import pygame as pg
from pygame.locals import *
from pygame import gfxdraw
from math import *
from random import randrange
from rigidBody import *
from levels import *
import sys
PI = pi
class Spaceship(rigidBody):
def __init__(self,pos,d = [0.,0.]):
rigidBody.__init__(self,pos,d)
self.c = (255,255,255)
self.c1 = (0,0,0)
self.bullets = []
self.fuel = MAX_FUEL_1
self.turretLevel = 0
self.hpLevel = 0
self.speedLevel = 0
self.LEVEL = 0
self.turretReady = True
self.turretCoolDown = TURRET_COOLDOWN[self.turretLevel]
self.turretCoolDownTime = 0
self.boosterReady = True
self.boosterCoolDown = 60
self.boosterCoolDownTime = 0
self.HP = HEALTH[self.hpLevel]
self.XP = 0
self.xpToNextLevel = LEVELS_XP[self.LEVEL]
self.speed = SPEED[self.speedLevel]
self.rocketParticles = ParticleSystem(ROCKET_COLOR,ROCKET_COLOR_VAR,ROCKET_LS,ROCKET_LS_VAR,ROCKET_MINSIZE,ROCKET_MAXSIZE)
self.boosterParticles = ParticleSystem(BOOSTER_COLOR,BOOSTER_COLOR_VAR,BOOSTER_LS,BOOSTER_LS_VAR,BOOSTER_MINSIZE,BOOSTER_MAXSIZE)
self.boosterParticles.setDrag(1.0)
self.rocketParticles.setDrag(DRAG)
self.shootAng = 0.
self.growth = 1.0
self.upgraded = False
self.bodySize = int(10*self.growth)
self.rearSize = int(4*self.growth)
def levelUp(self,upg):
self.LEVEL += 1
if upg == 'turret':
self.turretLevel += 1
if upg == 'health':
self.hpLevel += 1
if upg == 'speed':
self.speedLevel += 1
self.turretReady = True
self.turretCoolDown = TURRET_COOLDOWN[self.turretLevel]
self.turretCoolDownTime = 0
self.HP = HEALTH[self.hpLevel]
self.XP = 0
self.xpToNextLevel = LEVELS_XP[self.LEVEL]
self.speed = SPEED[self.speedLevel]
self.upgraded = True
def followMouse(self):
self.normalMove(self.shootAng)
self.upgraded = False
def addFuel(self):
self.fuel += FUEL_VALUE
if self.fuel > MAX_FUEL_1:
self.fuel = MAX_FUEL_1
if self.fuel < 0:
self.fuel = 0
self.XP += 20
def boost(self):
if self.fuel >= 10 and self.boosterReady:
x = cos(self.ang) * BOOST_SPEED
y = sin(self.ang ) * BOOST_SPEED
self.addMov([x,y])
self.boosterParticles.start(BOOSTER_FLUX,1)
self.fuel -= BOOST_COST
self.boosterReady = False
self.boosterCoolDownTime = self.boosterCoolDown
def normalMove(self,ang):
if self.fuel > 0:
spd = self.speed
x = cos(ang) * spd
y = sin(ang) * spd
self.addMov([x,y])
self.rocketParticles.start(ROCKET_FLUX)
self.fuel -= 1
def actAngle(self):
self.ang = atan2(self.d[1],self.d[0])
self.shootAng = atan2(pg.mouse.get_pos()[1] - CNTR[1], pg.mouse.get_pos()[0] - CNTR[0])
def coolDown(self):
if self.turretCoolDownTime > 0 and not self.turretReady:
self.turretCoolDownTime -= 1
else:
self.turretReady = True
if self.boosterCoolDownTime > 0 and not self.boosterReady:
self.boosterCoolDownTime -= 1
else:
self.boosterReady = True
def shoot(self):
if self.turretReady:
NB = {}
NB['POS'] = [self.pos[0],self.pos[1]]
x = cos(self.shootAng) * BULLET_SPEED + self.d[0]
y = sin(self.shootAng) * BULLET_SPEED + self.d[1]
NB['D'] = [x,y]
NB['AGE'] = 0
self.bullets.append(NB)
self.turretReady = False
self.turretCoolDownTime = self.turretCoolDown
def actuate(self):
self.move()
self.actAngle()
self.actBullets()
self.actParticles()
def actBullets(self):
for i,B in enumerate(self.bullets):
B['POS'][0] += B['D'][0]
B['POS'][1] += B['D'][1]
B['POS'][0] = int(B['POS'][0])
B['POS'][1] = int(B['POS'][1])
B['AGE'] += 1
if B['AGE'] > BULLET_LS:
self.bullets.pop(i)
self.coolDown()
def actParticles(self):
mang = atan2(self.d[1],self.d[0])
pmx = cos(mang)*30
pmy = sin(mang)*30
self.rocketParticles.actuate(self.pos,self.d,[pmx,pmy],ROCKET_SPREAD)
self.boosterParticles.actuate(self.pos,self.d,[pmx,pmy],BOOSTER_SPREAD)
def draw(self, SF, camPos):
#Particles drawing
self.rocketParticles.draw(SF,camPos)
self.boosterParticles.draw(SF,camPos)
#Calculating screen pos
pos = [self.pos[0]-camPos[0],self.pos[1]-camPos[1]]
#Ship Drawing
ang1 = self.ang + PI + PI/4.
ang2 = self.ang + PI - PI/4.
bodySize = int(10*self.growth)
rearSize = int(4*self.growth)
self.bodySize = bodySize
self.rearSize = rearSize
p1 = (int(pos[0] + cos(ang1)*bodySize), int(pos[1] + sin(ang1)*bodySize))
p2 = (int(pos[0] + cos(ang2)*bodySize), int(pos[1] + sin(ang2)*bodySize))
pg.gfxdraw.aacircle(SF,p1[0],p1[1],rearSize,self.c)
pg.gfxdraw.aacircle(SF,p2[0],p2[1],rearSize,self.c)
pg.draw.circle(SF,self.c1,pos,bodySize)
pg.gfxdraw.aacircle(SF,pos[0],pos[1],bodySize,self.c)
pg.gfxdraw.aacircle(SF,pos[0],pos[1],rearSize,ROCKET_COLOR)
for B in self.bullets:
p = (B['POS'][0] - camPos[0], B['POS'][1] - camPos[1])
pg.draw.circle(SF,self.c1,p,4)
pg.gfxdraw.aacircle(SF,p[0],p[1],4,self.c)
class Scene:
def __init__(self,BgDensity,BgDepth):
self.pos = (-CNTR[0],-CNTR[1])
self.vpos = [0.,0.]
self.dx, self.dy = 0.,0.
self.viewSize = WINSIZE
self.background = Background(BgDensity,BgDepth)
self.player = Spaceship(self.pos)
self.playerCell = [0,0]
self.cellStackTest = {}
self.cellStack = {}
self.genFuel()
self.previousCell = [0,0]
self.UI = {}
self.iUI = []
self.focus = 'GAME'
def signal(self,signal):
if signal == 'L':
self.player.normalMove(PI)
if signal == 'R':
self.player.normalMove(0)
if signal == 'U':
self.player.normalMove(-PI/2.)
if signal == 'D':
self.player.normalMove(PI/2.)
if signal == 'LCLICK':
if self.focus == 'UI':
choice = self.iUI[0].upgradeChoice()
if choice != None:
self.focus = 'GAME'
self.player.levelUp(BOX_TO_UPG[choice])
if signal == 'LCLICKH':
if self.focus == 'GAME':
self.player.shoot()
if signal == 'RCLICK':
if self.focus == 'UI':
choice = self.iUI[0].upgradeChoice()
if choice != None:
self.focus = 'GAME'
self.player.levelUp(BOX_TO_UPG[choice])
if signal == 'RCLICKH':
if self.focus == 'GAME':
self.player.followMouse()
if signal == 'SPACE':
if self.focus == 'GAME':
self.player.boost()
def addMov(self,vec):
self.dx += vec[0]
self.dy += vec[1]
def genFuel(self):
"""
Using dict for fuel cell notation:
fuel = {"x":x,
"y":y,
"dx":dx,
"dy":dy}
"""
for nb in AROUND:
cell = MOVE(self.playerCell,nb)
key = str(cell[0])+":"+str(cell[1])
been = False
try:
been = self.cellStackTest[key]
except:
been = False
if not been:
fuel = []
for i in range(FUEL_PER_CELL):
x = randrange(W)
y = randrange(H)
c = {'x':x, 'y':y, 'dx':0., 'dy':0.}
fuel.append(c)
self.cellStack[key] = fuel
self.cellStackTest[key] = True
def redefCell(self):
x = int(floor(self.player.pos[0] / W))
y = int(floor(self.player.pos[1] / H))
self.playerCell = [x,y]
if self.playerCell != self.previousCell:
self.previousCell = self.playerCell
self.genFuel()
def moveFuelCells(self):
for nb in AROUND:
cell = MOVE(self.playerCell, nb)
key = str(cell[0])+':'+str(cell[1])
for fuel in self.cellStack[key]:
fuel['x'] += fuel['dx']
fuel['y'] += fuel['dy']
fuel['dx'] *= DRAG
fuel['dy'] *= DRAG
def checkFuelCellsAttraction(self):
for nb in AROUND:
cell = MOVE(self.playerCell,nb)
key = str(cell[0])+':'+str(cell[1])
for i,fuel in enumerate(self.cellStack[key]):
x = (cell[0] * W + fuel['x']) - self.pos[0]
y = (cell[1] * H + fuel['y']) - self.pos[1]
if onScreen((x,y)):
dx = x - CNTR[0]
dy = y - CNTR[1]
d = hypot(dx,dy)
if d <= FUEL_MAGNET_RANGE:
g = FUEL_MAGNET_STRENGHT/(d)
ang = atan2(dy,dx) + PI
x = cos(ang)*g
y = sin(ang)*g
fuel['dx'] += x
fuel['dy'] += y
if d <= self.player.bodySize*2:
self.player.addFuel()
self.cellStack[key].pop(i)
def refreshUI(self):
self.UI['FUEL'].setCount(self.player.fuel)
self.UI['XP'].setCount(self.player.XP)
if self.player.XP >= self.player.xpToNextLevel:
self.player.XP = 0
self.iUI[0].appear()
self.focus = 'UI'
self.UI['XP'].setMax(LEVELS_XP[self.player.LEVEL])
self.UI['HP'].setMax(HEALTH[self.player.hpLevel])
def move(self):
self.vpos[0] += self.dx
self.vpos[1] += self.dy
self.dx *= DRAG
self.dy *= DRAG
self.actPos()
self.redefCell()
self.checkFuelCellsAttraction()
self.moveFuelCells()
self.refreshUI()
def addUI(self,key,ui,independant = False):
if not independant:
self.UI[key] = ui
else:
self.iUI.append(ui)
def followPlayer(self):
self.vpos[0] = self.player.vpos[0] - CNTR[0]
self.vpos[1] = self.player.vpos[1] - CNTR[1]
def actPos(self):
self.pos = (int(self.vpos[0]),int(self.vpos[1]))
def drawFuel(self,SF,cp):
for nb in AROUND:
cell = MOVE(self.playerCell,nb)
key = str(cell[0])+":"+str(cell[1])
for fp in self.cellStack[key]:
dx = cell[0] * W
dy = cell[1] * H
pos = (int((fp['x']+ dx)-cp[0]),int((fp['y']+dy)-cp[1]))
if onScreen(pos):
pg.draw.circle(SF,(0,0,0),pos,FUEL_SIZE)
pg.gfxdraw.aacircle(SF,pos[0],pos[1],FUEL_SIZE,FUEL_COLOR)
pg.gfxdraw.aacircle(SF,pos[0],pos[1],int(FUEL_SIZE/2.),FUEL_COLOR)
def drawUI(self,SF):
for i,key in enumerate(self.UI):
self.UI[key].draw(SF,UI_POS,i)
for ui in self.iUI:
ui.draw(SF)
def draw(self,SF):
self.background.draw(SF,self.pos)
self.drawFuel(SF,self.pos)
self.player.draw(SF,self.pos)
self.drawUI(SF)
class ParticleSystem:
def __init__(self, color, colorRange, medLs, varLs, minSize, maxSize):
self.baseCol = color
self.colorMod = colorRange
self.baseLifespan = medLs
self.lifespanVariation = varLs
self.minSize = minSize
self.maxSize = maxSize
self.active = False
self.particles = []
"""
Particles are modelised by a dict:
{"Px":x,
"Py":y,
"Dx":dx,
"Dy":dy,
"AGE":age,
"COLOR":(r,g,b),
"SIZE":s,
"BSIZE":s}
"""
self.time = 0
self.stopTime = 0
self.spawnRate = 0
self.DRAG = 1.0
def setDrag(self,drag):
self.DRAG = drag
def start(self,flux,stop = None):
if not self.active:
self.active = True
self.time = 0
if stop != None:
self.stopTime = stop
self.spawnRate = flux # particles/s
def stop(self):
if self.active:
self.active = False
self.time = 0
self.stopTime = 0
self.spawnRate = 0
def actuate(self, opos, omov, pmov, spread):
#Move existing particles and delete old ones
toDel = []
for i,particle in enumerate(self.particles):
particle["Px"] += particle["Dx"]
particle["Py"] += particle["Dy"]
particle["Dx"] *= self.DRAG
particle["Dy"] *= self.DRAG
particle["AGE"] += 1
particle["SIZE"] = int((float(particle["BSIZE"])/float(self.baseLifespan))*(float(self.baseLifespan)-float(particle["AGE"])))
if particle["SIZE"] < 1:
particle["SIZE"] = 1
rnd = randrange(-self.lifespanVariation,self.lifespanVariation)
if particle["AGE"] > self.baseLifespan + rnd:
toDel.append(i)
toDel.reverse()
for i in toDel:
self.particles.pop(i)
if self.active:
#Stop the system if necessary
if self.stopTime != 0:
if self.time >= self.stopTime:
self.stop()
#Spawn new particles
for particle in range(self.spawnRate):
newP = {}
r = randrange(self.baseCol[0] - self.colorMod, self.baseCol[0] + self.colorMod)
g = randrange(self.baseCol[1] - self.colorMod, self.baseCol[1] + self.colorMod)
b = randrange(self.baseCol[2] - self.colorMod, self.baseCol[2] + self.colorMod)
angleDev = int(degrees(spread)/2.)
angleDev = randrange(-angleDev,angleDev)
angleDev = radians(angleDev)
oAngle = atan2(pmov[1],pmov[0]) + PI
spd = hypot(pmov[0],pmov[1]) * (randrange(50,100)/100.)
nAngle = oAngle + angleDev
dx = cos(nAngle) * spd
dy = sin(nAngle) * spd
newP["Px"] = opos[0]
newP["Py"] = opos[1]
newP["Dx"] = omov[0] + dx
newP["Dy"] = omov[1] + dy
newP["AGE"] = 0
newP["COLOR"] = verifyColor((r,g,b))
newP["SIZE"] = randrange(self.minSize,self.maxSize)
newP["BSIZE"] = newP["SIZE"]
self.particles.append(newP)
self.time += 1
def draw(self,SF,cP):
for p in self.particles:
pos = (int(p["Px"])-cP[0],int(p["Py"])-cP[1])
pg.draw.circle(SF,p["COLOR"],pos,p["SIZE"])
class Background:
def __init__(self,density,depth):
self.density = density
self.depth = depth
self.initPlanes()
def initPlanes(self):
self.planes = []
for i in range(self.depth):
self.planes.append([])
for j in range(self.density*(i+1)):
star = (randrange(W),randrange(H))
self.planes[i].append(star)
self.planes.reverse()
self.surfaces = []
for j,plane in enumerate(self.planes):
i = (self.depth-1)-j
c = int((255/self.depth) * (self.depth - i))
c = (c,c,c)
newSF = pg.Surface((W*2,H*2))
smlSF = pg.Surface((W,H))
for star in plane:
pg.draw.circle(smlSF,c,star,2)
pg.gfxdraw.aacircle(smlSF,star[0],star[1],2,c)
newSF.blit(smlSF,(0,0))
newSF.blit(smlSF,(W,0))
newSF.blit(smlSF,(0,H))
newSF.blit(smlSF,(W,H))
newSF.set_colorkey((0,0,0),pg.RLEACCEL)
self.surfaces.append(newSF)
self.surfaces.reverse()
def draw(self,SF,camPos):
for i,surface in enumerate(self.surfaces):
dmod = (i+1)*(i+1)
pos = (int(camPos[0]/dmod),int(camPos[1]/dmod))
x = pos[0] % W
y = pos[1] % H
rct = ((x,y),(W,H))
SF.blit(surface,(0,0),rct)
| gpl-2.0 | -6,190,535,035,971,781,000 | 29.250883 | 137 | 0.493634 | false |
fugufisch/wholecell | state/metabolite.py | 1 | 4402 | import math
from data.knowledgebase import Knowledgebase
from state import State
__author__ = 'max'
__author__ = 'Sebastian'
class SingleMetabolite(State, object):
""""""
def __init__(self, metabolite_by_row):
"""Constructor for SingleMetabolite"""
super(SingleMetabolite, self).__init__(metabolite_by_row["WholeCellModelID"], metabolite_by_row["Name"])
self.__charge = float('nan')
self.__molecularWeightCalc = None #float('nan')
self.__exchangeLowerBound = float('nan')
self.__exchangeUpperBound = float('nan')
self.__reactions = None
self.__volume = float('nan')
self.__category = None
self._set_information(metabolite_by_row)
@property
def charge(self):
return self.__charge
@charge.setter
def charge(self, charge):
self.__charge = charge
@property
def molecularWeightCalc(self):
return self.__molecularWeightCalc
@molecularWeightCalc.setter
def molecularWeightCalc(self, molecularWeightCalc):
self.__molecularWeightCalc = molecularWeightCalc
@property
def exchangeLowerBound(self):
return self.__exchangeLowerBound
@exchangeLowerBound.setter
def exchangeLowerBound(self, exchangeLowerBound):
self.__exchangeLowerBound = exchangeLowerBound
@property
def exchangeUpperBound(self):
return self.__exchangeUpperBound
@exchangeUpperBound.setter
def exchangeUpperBound(self, exchangeUpperBound):
self.__exchangeUpperBound = exchangeUpperBound
@property
def reactions(self):
return self.__reactions
@reactions.setter
def reaction(self, reaction):
self.__reactions = reaction
@property
def volume(self):
return self.__volume
@volume.setter
def volume(self, volume):
self.__volume = volume
@property
def category(self):
return self.__category
@category.setter
def category(self, category):
self.__category = category
def _set_information(self, metabolite_by_row):
if not math.isnan(metabolite_by_row.Charge):
self.charge = metabolite_by_row.Charge
if not math.isnan(metabolite_by_row.MolecularWeightCalc):
self.molecularWeightCalc = metabolite_by_row.MolecularWeightCalc
if not math.isnan(metabolite_by_row.ExchangeLowerBound):
self.exchangeLowerBound = metabolite_by_row.ExchangeLowerBound
if not math.isnan(metabolite_by_row.ExchangeUpperBound):
self.exchangeUpperBound = metabolite_by_row.ExchangeUpperBound
if isinstance(metabolite_by_row.Reactions, str):
self.reaction = metabolite_by_row.Reactions.split(";")
if not math.isnan(metabolite_by_row.Volume):
self.volume = metabolite_by_row.Volume
if metabolite_by_row.Category:
self.category = metabolite_by_row.Category
class Metabolite(State, dict, object):
"""
Metabolites
"""
def __init__(self, init_dict):
super(Metabolite, self).__init__(init_dict["ID"], init_dict["name"])
self.kb = Knowledgebase(data_dir='../data', select_states=["metabolites"]) # get only the gene information
for i in range(len(self.kb.states.metabolites["WholeCellModelID"])): # iter over all genes
print self.kb.states.metabolites.transpose()[i] # get the line/gene information
self.add_metabolite(self.kb.states.metabolites.transpose()[i]) # get the complete ith row
def add_metabolite(self, metabolite_by_row):
"""
This function adds a metabolite to the metabolite dictionary
@param metabolite_by_row: panda object containing the row information of a gene
@return: None
"""
if metabolite_by_row.WholeCellModelID not in self and isinstance(metabolite_by_row.WholeCellModelID, str):
self[metabolite_by_row.WholeCellModelID] = SingleMetabolite(metabolite_by_row) # append each Single gene to a list of genes
elif isinstance(metabolite_by_row.WholeCellModelID, str):
print "WholeCellModelID {0} already known".format(metabolite_by_row.WholeCellModelID)
else:
print "Something strange WholeCellModelID: {0}".format(metabolite_by_row.WholeCellModelID)
if __name__ == "__main__":
Metabolite(({"ID": 2, "name":"metabolite"})) | mit | 6,591,553,823,020,934,000 | 35.090164 | 136 | 0.664471 | false |
smart-techs/you-get | src/you_get/extractors/iqiyi.py | 1 | 8162 | #!/usr/bin/env python
from ..common import *
from ..extractor import VideoExtractor
from uuid import uuid4
from random import random,randint
import json
from math import floor
from zlib import decompress
import hashlib
'''
Changelog:
-> http://www.iqiyi.com/common/flashplayer/20150916/MainPlayer_5_2_28_c3_3_7_4.swf
use @fffonion 's method in #617.
Add trace AVM(asasm) code in Iqiyi's encode function where the salt is put into the encode array and reassemble by RABCDasm(or WinRABCDasm),then use Fiddler to response modified file to replace the src file with its AutoResponder function ,set browser Fiddler proxy and play with !debug version! Flash Player ,finially get result in flashlog.txt(its location can be easily found in search engine).
Code Like (without letters after #comment:),it just do the job : trace("{IQIYI_SALT}:"+salt_array.join(""))
```(Postion After getTimer)
findpropstrict QName(PackageNamespace(""), "trace")
pushstring "{IQIYI_SALT}:" #comment for you to locate the salt
getscopeobject 1
getslot 17 #comment: 17 is the salt slots number defined in code
pushstring ""
callproperty QName(Namespace("http://adobe.com/AS3/2006/builtin"), "join"), 1
add
callpropvoid QName(PackageNamespace(""), "trace"), 1
```
-> http://www.iqiyi.com/common/flashplayer/20150820/MainPlayer_5_2_27_2_c3_3_7_3.swf
some small changes in Zombie.bite function
'''
'''
com.qiyi.player.core.model.def.DefinitonEnum
bid meaning for quality
0 none
1 standard
2 high
3 super
4 suprt-high
5 fullhd
10 4k
96 topspeed
'''
def mix(tvid):
salt = '4a1caba4b4465345366f28da7c117d20'
tm = str(randint(2000,4000))
sc = hashlib.new('md5', bytes(salt + tm + tvid, 'utf-8')).hexdigest()
return tm, sc, 'eknas'
def getVRSXORCode(arg1,arg2):
loc3=arg2 %3
if loc3 == 1:
return arg1^121
if loc3 == 2:
return arg1^72
return arg1^103
def getVrsEncodeCode(vlink):
loc6=0
loc2=''
loc3=vlink.split("-")
loc4=len(loc3)
# loc5=loc4-1
for i in range(loc4-1,-1,-1):
loc6=getVRSXORCode(int(loc3[loc4-i-1],16),i)
loc2+=chr(loc6)
return loc2[::-1]
def getDispathKey(rid):
tp=")(*&^flash@#$%a" #magic from swf
time=json.loads(get_content("http://data.video.qiyi.com/t?tn="+str(random())))["t"]
t=str(int(floor(int(time)/(10*60.0))))
return hashlib.new("md5",bytes(t+tp+rid,"utf-8")).hexdigest()
class Iqiyi(VideoExtractor):
name = "爱奇艺 (Iqiyi)"
stream_types = [
{'id': '4k', 'container': 'f4v', 'video_profile': '4K'},
{'id': 'fullhd', 'container': 'f4v', 'video_profile': '全高清'},
{'id': 'suprt-high', 'container': 'f4v', 'video_profile': '超高清'},
{'id': 'super', 'container': 'f4v', 'video_profile': '超清'},
{'id': 'high', 'container': 'f4v', 'video_profile': '高清'},
{'id': 'standard', 'container': 'f4v', 'video_profile': '标清'},
{'id': 'topspeed', 'container': 'f4v', 'video_profile': '最差'},
]
stream_to_bid = { '4k': 10, 'fullhd' : 5, 'suprt-high' : 4, 'super' : 3, 'high' : 2, 'standard' :1, 'topspeed' :96}
stream_urls = { '4k': [] , 'fullhd' : [], 'suprt-high' : [], 'super' : [], 'high' : [], 'standard' :[], 'topspeed' :[]}
baseurl = ''
gen_uid = ''
def getVMS(self):
#tm ->the flash run time for md5 usage
#um -> vip 1 normal 0
#authkey -> for password protected video ,replace '' with your password
#puid user.passportid may empty?
#TODO: support password protected video
tvid, vid = self.vid
tm, sc, src = mix(tvid)
uid = self.gen_uid
vmsreq='http://cache.video.qiyi.com/vms?key=fvip&src=1702633101b340d8917a69cf8a4b8c7' +\
"&tvId="+tvid+"&vid="+vid+"&vinfo=1&tm="+tm+\
"&enc="+sc+\
"&qyid="+uid+"&tn="+str(random()) +"&um=1" +\
"&authkey="+hashlib.new('md5',bytes(hashlib.new('md5', b'').hexdigest()+str(tm)+tvid,'utf-8')).hexdigest()
return json.loads(get_content(vmsreq))
def download_playlist_by_url(self, url, **kwargs):
self.url = url
video_page = get_content(url)
videos = set(re.findall(r'<a href="(http://www\.iqiyi\.com/v_[^"]+)"', video_page))
for video in videos:
self.__class__().download_by_url(video, **kwargs)
def prepare(self, **kwargs):
assert self.url or self.vid
if self.url and not self.vid:
html = get_html(self.url)
tvid = r1(r'#curid=(.+)_', self.url) or \
r1(r'tvid=([^&]+)', self.url) or \
r1(r'data-player-tvid="([^"]+)"', html)
videoid = r1(r'#curid=.+_(.*)$', self.url) or \
r1(r'vid=([^&]+)', self.url) or \
r1(r'data-player-videoid="([^"]+)"', html)
self.vid = (tvid, videoid)
self.gen_uid = uuid4().hex
try:
info = self.getVMS()
except:
self.download_playlist_by_url(self.url, **kwargs)
exit(0)
if info["code"] != "A000000":
log.e("[error] outdated iQIYI key")
log.wtf("is your you-get up-to-date?")
self.title = info["data"]["vi"]["vn"]
self.title = self.title.replace('\u200b', '')
# data.vp = json.data.vp
# data.vi = json.data.vi
# data.f4v = json.data.f4v
# if movieIsMember data.vp = json.data.np
#for highest qualities
#for http://www.iqiyi.com/v_19rrmmz5yw.html not vp -> np
try:
if info["data"]['vp']["tkl"]=='' :
raise ValueError
except:
log.e("[Error] Do not support for iQIYI VIP video.")
exit(-1)
vs = info["data"]["vp"]["tkl"][0]["vs"]
self.baseurl=info["data"]["vp"]["du"].split("/")
for stream in self.stream_types:
for i in vs:
if self.stream_to_bid[stream['id']] == i['bid']:
video_links=i["fs"] #now in i["flvs"] not in i["fs"]
if not i["fs"][0]["l"].startswith("/"):
tmp = getVrsEncodeCode(i["fs"][0]["l"])
if tmp.endswith('mp4'):
video_links = i["flvs"]
self.stream_urls[stream['id']] = video_links
size = 0
for l in video_links:
size += l['b']
self.streams[stream['id']] = {'container': stream['container'], 'video_profile': stream['video_profile'], 'size' : size}
break
def extract(self, **kwargs):
if 'stream_id' in kwargs and kwargs['stream_id']:
# Extract the stream
stream_id = kwargs['stream_id']
if stream_id not in self.streams:
log.e('[Error] Invalid video format.')
log.e('Run \'-i\' command with no specific video format to view all available formats.')
exit(2)
else:
# Extract stream with the best quality
stream_id = self.streams_sorted[0]['id']
urls=[]
for i in self.stream_urls[stream_id]:
vlink=i["l"]
if not vlink.startswith("/"):
#vlink is encode
vlink=getVrsEncodeCode(vlink)
key=getDispathKey(vlink.split("/")[-1].split(".")[0])
baseurl = [x for x in self.baseurl]
baseurl.insert(-1,key)
url="/".join(baseurl)+vlink+'?su='+self.gen_uid+'&qyid='+uuid4().hex+'&client=&z=&bt=&ct=&tn='+str(randint(10000,20000))
urls.append(json.loads(get_content(url))["l"])
#download should be complete in 10 minutes
#because the url is generated before start downloading
#and the key may be expired after 10 minutes
self.streams[stream_id]['src'] = urls
site = Iqiyi()
download = site.download_by_url
iqiyi_download_by_vid = site.download_by_vid
download_playlist = site.download_playlist_by_url
| mit | 7,460,090,059,082,132,000 | 36.804651 | 400 | 0.55438 | false |
juju/juju-gui-charm | hooks/shelltoolbox.py | 1 | 20055 | # Copyright 2012 Canonical Ltd.
# This file is taken from the python-shelltoolbox package.
#
# IMPORTANT: Do not modify this file to add or change functionality. If you
# really feel the need to do so, first convert our code to the shelltoolbox
# library, and modify it instead (or modify the helpers or utils module here,
# as appropriate).
#
# python-shell-toolbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation, version 3 of the License.
#
# python-shell-toolbox is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License
# along with python-shell-toolbox. If not, see <http://www.gnu.org/licenses/>.
"""Helper functions for accessing shell commands in Python."""
__metaclass__ = type
__all__ = [
'apt_get_install',
'bzr_whois',
'cd',
'command',
'DictDiffer',
'environ',
'file_append',
'file_prepend',
'generate_ssh_keys',
'get_su_command',
'get_user_home',
'get_user_ids',
'install_extra_repositories',
'join_command',
'mkdirs',
'run',
'Serializer',
'script_name',
'search_file',
'ssh',
'su',
'user_exists',
'wait_for_page_contents',
]
from collections import namedtuple
from contextlib import contextmanager
from email.Utils import parseaddr
import errno
import json
import operator
import os
import pipes
import pwd
import re
import subprocess
import sys
from textwrap import dedent
import time
import urllib2
Env = namedtuple('Env', 'uid gid home')
def apt_get_install(*args, **kwargs):
"""Install given packages using apt.
It is possible to pass environment variables to be set during install
using keyword arguments.
:raises: subprocess.CalledProcessError
"""
caller = kwargs.pop('caller', run)
stderr = kwargs.pop('stderr', None)
debian_frontend = kwargs.pop('DEBIAN_FRONTEND', 'noninteractive')
with environ(DEBIAN_FRONTEND=debian_frontend, **kwargs):
cmd = ('apt-get', '-y', 'install') + args
return caller(*cmd, stderr=stderr)
def bzr_whois(user):
"""Return full name and email of bzr `user`.
Return None if the given `user` does not have a bzr user id.
"""
with su(user):
try:
whoami = run('bzr', 'whoami')
except (subprocess.CalledProcessError, OSError):
return None
return parseaddr(whoami)
@contextmanager
def cd(directory):
"""A context manager to temporarily change current working dir, e.g.::
>>> import os
>>> os.chdir('/tmp')
>>> with cd('/bin'): print os.getcwd()
/bin
>>> print os.getcwd()
/tmp
"""
cwd = os.getcwd()
os.chdir(directory)
try:
yield
finally:
os.chdir(cwd)
def command(*base_args):
"""Return a callable that will run the given command with any arguments.
The first argument is the path to the command to run, subsequent arguments
are command-line arguments to "bake into" the returned callable.
The callable runs the given executable and also takes arguments that will
be appeneded to the "baked in" arguments.
For example, this code will list a file named "foo" (if it exists):
ls_foo = command('/bin/ls', 'foo')
ls_foo()
While this invocation will list "foo" and "bar" (assuming they exist):
ls_foo('bar')
"""
def callable_command(*args):
all_args = base_args + args
return run(*all_args)
return callable_command
@contextmanager
def environ(**kwargs):
"""A context manager to temporarily change environment variables.
If an existing environment variable is changed, it is restored during
context cleanup::
>>> import os
>>> os.environ['MY_VARIABLE'] = 'foo'
>>> with environ(MY_VARIABLE='bar'): print os.getenv('MY_VARIABLE')
bar
>>> print os.getenv('MY_VARIABLE')
foo
>>> del os.environ['MY_VARIABLE']
If we are adding environment variables, they are removed during context
cleanup::
>>> import os
>>> with environ(MY_VAR1='foo', MY_VAR2='bar'):
... print os.getenv('MY_VAR1'), os.getenv('MY_VAR2')
foo bar
>>> os.getenv('MY_VAR1') == os.getenv('MY_VAR2') == None
True
"""
backup = {}
for key, value in kwargs.items():
backup[key] = os.getenv(key)
os.environ[key] = value
try:
yield
finally:
for key, value in backup.items():
if value is None:
del os.environ[key]
else:
os.environ[key] = value
def file_append(filename, line):
r"""Append given `line`, if not present, at the end of `filename`.
Usage example::
>>> import tempfile
>>> f = tempfile.NamedTemporaryFile('w', delete=False)
>>> f.write('line1\n')
>>> f.close()
>>> file_append(f.name, 'new line\n')
>>> open(f.name).read()
'line1\nnew line\n'
Nothing happens if the file already contains the given `line`::
>>> file_append(f.name, 'new line\n')
>>> open(f.name).read()
'line1\nnew line\n'
A new line is automatically added before the given `line` if it is not
present at the end of current file content::
>>> import tempfile
>>> f = tempfile.NamedTemporaryFile('w', delete=False)
>>> f.write('line1')
>>> f.close()
>>> file_append(f.name, 'new line\n')
>>> open(f.name).read()
'line1\nnew line\n'
The file is created if it does not exist::
>>> import tempfile
>>> filename = tempfile.mktemp()
>>> file_append(filename, 'line1\n')
>>> open(filename).read()
'line1\n'
"""
if not line.endswith('\n'):
line += '\n'
with open(filename, 'a+') as f:
lines = f.readlines()
if line not in lines:
if not lines or lines[-1].endswith('\n'):
f.write(line)
else:
f.write('\n' + line)
def file_prepend(filename, line):
r"""Insert given `line`, if not present, at the beginning of `filename`.
Usage example::
>>> import tempfile
>>> f = tempfile.NamedTemporaryFile('w', delete=False)
>>> f.write('line1\n')
>>> f.close()
>>> file_prepend(f.name, 'line0\n')
>>> open(f.name).read()
'line0\nline1\n'
If the file starts with the given `line`, nothing happens::
>>> file_prepend(f.name, 'line0\n')
>>> open(f.name).read()
'line0\nline1\n'
If the file contains the given `line`, but not at the beginning,
the line is moved on top::
>>> file_prepend(f.name, 'line1\n')
>>> open(f.name).read()
'line1\nline0\n'
"""
if not line.endswith('\n'):
line += '\n'
with open(filename, 'r+') as f:
lines = f.readlines()
if lines[0] != line:
try:
lines.remove(line)
except ValueError:
pass
lines.insert(0, line)
f.seek(0)
f.writelines(lines)
def generate_ssh_keys(path, passphrase=''):
"""Generate ssh key pair, saving them inside the given `directory`.
>>> generate_ssh_keys('/tmp/id_rsa')
0
>>> open('/tmp/id_rsa').readlines()[0].strip()
'-----BEGIN RSA PRIVATE KEY-----'
>>> open('/tmp/id_rsa.pub').read().startswith('ssh-rsa')
True
>>> os.remove('/tmp/id_rsa')
>>> os.remove('/tmp/id_rsa.pub')
If either of the key files already exist, generate_ssh_keys() will
raise an Exception.
Note that ssh-keygen will prompt if the keyfiles already exist, but
when we're using it non-interactively it's better to pre-empt that
behaviour.
>>> with open('/tmp/id_rsa', 'w') as key_file:
... key_file.write("Don't overwrite me, bro!")
>>> generate_ssh_keys('/tmp/id_rsa') # doctest: +ELLIPSIS
Traceback (most recent call last):
Exception: File /tmp/id_rsa already exists...
>>> os.remove('/tmp/id_rsa')
>>> with open('/tmp/id_rsa.pub', 'w') as key_file:
... key_file.write("Don't overwrite me, bro!")
>>> generate_ssh_keys('/tmp/id_rsa') # doctest: +ELLIPSIS
Traceback (most recent call last):
Exception: File /tmp/id_rsa.pub already exists...
>>> os.remove('/tmp/id_rsa.pub')
"""
if os.path.exists(path):
raise Exception("File {} already exists.".format(path))
if os.path.exists(path + '.pub'):
raise Exception("File {}.pub already exists.".format(path))
return subprocess.call([
'ssh-keygen', '-q', '-t', 'rsa', '-N', passphrase, '-f', path])
def get_su_command(user, args):
"""Return a command line as a sequence, prepending "su" if necessary.
This can be used together with `run` when the `su` context manager is not
enough (e.g. an external program uses uid rather than euid).
run(*get_su_command(user, ['bzr', 'whoami']))
If the su is requested as current user, the arguments are returned as
given::
>>> import getpass
>>> current_user = getpass.getuser()
>>> get_su_command(current_user, ('ls', '-l'))
('ls', '-l')
Otherwise, "su" is prepended::
>>> get_su_command('nobody', ('ls', '-l', 'my file'))
('su', 'nobody', '-c', "ls -l 'my file'")
"""
if get_user_ids(user)[0] != os.getuid():
args = [i for i in args if i is not None]
return ('su', user, '-c', join_command(args))
return args
def get_user_home(user):
"""Return the home directory of the given `user`.
>>> get_user_home('root')
'/root'
If the user does not exist, return a default /home/[username] home::
>>> get_user_home('_this_user_does_not_exist_')
'/home/_this_user_does_not_exist_'
"""
try:
return pwd.getpwnam(user).pw_dir
except KeyError:
return os.path.join(os.path.sep, 'home', user)
def get_user_ids(user):
"""Return the uid and gid of given `user`, e.g.::
>>> get_user_ids('root')
(0, 0)
"""
userdata = pwd.getpwnam(user)
return userdata.pw_uid, userdata.pw_gid
def install_extra_repositories(*repositories):
"""Install all of the extra repositories and update apt.
Given repositories can contain a "{distribution}" placeholder, that will
be replaced by current distribution codename.
:raises: subprocess.CalledProcessError
"""
distribution = run('lsb_release', '-cs').strip()
# Starting from Oneiric, `apt-add-repository` is interactive by
# default, and requires a "-y" flag to be set.
assume_yes = None if distribution == 'lucid' else '-y'
for repo in repositories:
repository = repo.format(distribution=distribution)
run('apt-add-repository', assume_yes, repository)
run('apt-get', 'clean')
run('apt-get', 'update')
def join_command(args):
"""Return a valid Unix command line from `args`.
>>> join_command(['ls', '-l'])
'ls -l'
Arguments containing spaces and empty args are correctly quoted::
>>> join_command(['command', 'arg1', 'arg containing spaces', ''])
"command arg1 'arg containing spaces' ''"
"""
return ' '.join(pipes.quote(arg) for arg in args)
def mkdirs(*args):
"""Create leaf directories (given as `args`) and all intermediate ones.
>>> import tempfile
>>> base_dir = tempfile.mktemp(suffix='/')
>>> dir1 = tempfile.mktemp(prefix=base_dir)
>>> dir2 = tempfile.mktemp(prefix=base_dir)
>>> mkdirs(dir1, dir2)
>>> os.path.isdir(dir1)
True
>>> os.path.isdir(dir2)
True
If the leaf directory already exists the function returns without errors::
>>> mkdirs(dir1)
An `OSError` is raised if the leaf path exists and it is a file::
>>> f = tempfile.NamedTemporaryFile(
... 'w', delete=False, prefix=base_dir)
>>> f.close()
>>> mkdirs(f.name) # doctest: +ELLIPSIS
Traceback (most recent call last):
OSError: ...
"""
for directory in args:
try:
os.makedirs(directory)
except OSError as err:
if err.errno != errno.EEXIST or os.path.isfile(directory):
raise
def run(*args, **kwargs):
"""Run the command with the given arguments.
The first argument is the path to the command to run.
Subsequent arguments are command-line arguments to be passed.
This function accepts all optional keyword arguments accepted by
`subprocess.Popen`.
"""
args = [i for i in args if i is not None]
pipe = subprocess.PIPE
process = subprocess.Popen(
args, stdout=kwargs.pop('stdout', pipe),
stderr=kwargs.pop('stderr', pipe),
close_fds=kwargs.pop('close_fds', True), **kwargs)
stdout, stderr = process.communicate()
if process.returncode:
exception = subprocess.CalledProcessError(
process.returncode, repr(args))
# The output argument of `CalledProcessError` was introduced in Python
# 2.7. Monkey patch the output here to avoid TypeErrors in older
# versions of Python, still preserving the output in Python 2.7.
exception.output = ''.join(filter(None, [stdout, stderr]))
raise exception
return stdout
def script_name():
"""Return the name of this script."""
return os.path.basename(sys.argv[0])
def search_file(regexp, filename):
"""Return the first line in `filename` that matches `regexp`."""
with open(filename) as f:
for line in f:
if re.search(regexp, line):
return line
def ssh(location, user=None, key=None, caller=subprocess.call):
"""Return a callable that can be used to run ssh shell commands.
The ssh `location` and, optionally, `user` must be given.
If the user is None then the current user is used for the connection.
The callable internally uses the given `caller`::
>>> def caller(cmd):
... print tuple(cmd)
>>> sshcall = ssh('example.com', 'myuser', caller=caller)
>>> root_sshcall = ssh('example.com', caller=caller)
>>> sshcall('ls -l') # doctest: +ELLIPSIS
('ssh', '-t', ..., '[email protected]', '--', 'ls -l')
>>> root_sshcall('ls -l') # doctest: +ELLIPSIS
('ssh', '-t', ..., 'example.com', '--', 'ls -l')
The ssh key path can be optionally provided::
>>> root_sshcall = ssh('example.com', key='/tmp/foo', caller=caller)
>>> root_sshcall('ls -l') # doctest: +ELLIPSIS
('ssh', '-t', ..., '-i', '/tmp/foo', 'example.com', '--', 'ls -l')
If the ssh command exits with an error code,
a `subprocess.CalledProcessError` is raised::
>>> ssh('loc', caller=lambda cmd: 1)('ls -l') # doctest: +ELLIPSIS
Traceback (most recent call last):
CalledProcessError: ...
If ignore_errors is set to True when executing the command, no error
will be raised, even if the command itself returns an error code.
>>> sshcall = ssh('loc', caller=lambda cmd: 1)
>>> sshcall('ls -l', ignore_errors=True)
"""
sshcmd = [
'ssh',
'-t',
'-t', # Yes, this second -t is deliberate. See `man ssh`.
'-o', 'StrictHostKeyChecking=no',
'-o', 'UserKnownHostsFile=/dev/null',
]
if key is not None:
sshcmd.extend(['-i', key])
if user is not None:
location = '{}@{}'.format(user, location)
sshcmd.extend([location, '--'])
def _sshcall(cmd, ignore_errors=False):
command = sshcmd + [cmd]
retcode = caller(command)
if retcode and not ignore_errors:
raise subprocess.CalledProcessError(retcode, ' '.join(command))
return _sshcall
@contextmanager
def su(user):
"""A context manager to temporarily run the script as a different user."""
uid, gid = get_user_ids(user)
os.setegid(gid)
os.seteuid(uid)
home = get_user_home(user)
with environ(HOME=home):
try:
yield Env(uid, gid, home)
finally:
os.setegid(os.getgid())
os.seteuid(os.getuid())
def user_exists(username):
"""Return True if given `username` exists, e.g.::
>>> user_exists('root')
True
>>> user_exists('_this_user_does_not_exist_')
False
"""
try:
pwd.getpwnam(username)
except KeyError:
return False
return True
def wait_for_page_contents(url, contents, timeout=120, validate=None):
if validate is None:
validate = operator.contains
start_time = time.time()
while True:
try:
stream = urllib2.urlopen(url)
except (urllib2.HTTPError, urllib2.URLError):
pass
else:
page = stream.read()
if validate(page, contents):
return page
if time.time() - start_time >= timeout:
raise RuntimeError('timeout waiting for contents of ' + url)
time.sleep(0.1)
class DictDiffer:
"""
Calculate the difference between two dictionaries as:
(1) items added
(2) items removed
(3) keys same in both but changed values
(4) keys same in both and unchanged values
"""
# Based on answer by hughdbrown at:
# http://stackoverflow.com/questions/1165352
def __init__(self, current_dict, past_dict):
self.current_dict = current_dict
self.past_dict = past_dict
self.set_current = set(current_dict)
self.set_past = set(past_dict)
self.intersect = self.set_current.intersection(self.set_past)
@property
def added(self):
return self.set_current - self.intersect
@property
def removed(self):
return self.set_past - self.intersect
@property
def changed(self):
return set(key for key in self.intersect
if self.past_dict[key] != self.current_dict[key])
@property
def unchanged(self):
return set(key for key in self.intersect
if self.past_dict[key] == self.current_dict[key])
@property
def modified(self):
return self.current_dict != self.past_dict
@property
def added_or_changed(self):
return self.added.union(self.changed)
def _changes(self, keys):
new = {}
old = {}
for k in keys:
new[k] = self.current_dict.get(k)
old[k] = self.past_dict.get(k)
return "%s -> %s" % (old, new)
def __str__(self):
if self.modified:
s = dedent("""\
added: %s
removed: %s
changed: %s
unchanged: %s""") % (
self._changes(self.added),
self._changes(self.removed),
self._changes(self.changed),
list(self.unchanged))
else:
s = "no changes"
return s
class Serializer:
"""Handle JSON (de)serialization."""
def __init__(self, path, default=None, serialize=None, deserialize=None):
self.path = path
self.default = default or {}
self.serialize = serialize or json.dump
self.deserialize = deserialize or json.load
def exists(self):
return os.path.exists(self.path)
def get(self):
if self.exists():
with open(self.path) as f:
return self.deserialize(f)
return self.default
def set(self, data):
with open(self.path, 'w') as f:
self.serialize(data, f)
| agpl-3.0 | -4,344,521,105,030,531,600 | 28.932836 | 78 | 0.588581 | false |
ebozag/CLOUDCAL | simulator-ondemand-vms.py | 1 | 4527 | """
"""
import random
import simpy
from math import trunc
import numpy
from configuration import *
ARRIVAL_RATE = 1/ARRIVAL_RATE
ARRIVAL_RATE *= 8
MAX_RATE = max(ARRIVAL_RATE)
SERVICE_TIME_SUM = 0.0
TIME_IN_THE_SYSTEM_SUM = 0.0
SERVICE_TIME_COUNT = 0
latency = []
latency_peak = []
REQUIRED_VMS = []
def source(env, interval, counter, avg_service_time,hour_slot):
CURRENT_ARRIVAL_SUM = 0.0
CURRENT_ARRIVAL_COUNT = 0
"""Source generates customers randomly"""
i=0
hourlyrate = ARRIVAL_RATE[hour_slot]
MAX_RATE = max(ARRIVAL_RATE[hour_slot:hour_slot+2])
pthinning = 1-hourlyrate/MAX_RATE
while env.now <= interval:
i+=1
c = customer(env, 'Request%02d' % i, counter, avg_service_time)
env.process(c)
uthin=0.0
pthin=1.0
t = env.now
t_old = t
while (uthin < pthin):
deltat = random.expovariate(MAX_RATE)
t = t + deltat
pthin = pthinning
uthin = random.random()
CURRENT_ARRIVAL_SUM += t-t_old
CURRENT_ARRIVAL_COUNT += 1
yield env.timeout(t-t_old)
print('Average rate: %d, %f' % (hour_slot, CURRENT_ARRIVAL_COUNT/CURRENT_ARRIVAL_SUM))
print('SUM, COUNT: %f. %d' % (CURRENT_ARRIVAL_SUM, CURRENT_ARRIVAL_COUNT))
def customer(env, name, counter, avg_service_time):
global SERVICE_TIME_SUM, SERVICE_TIME_COUNT, TIME_IN_THE_SYSTEM_SUM, latency
"""Customer arrives, is served and leaves."""
arrive = env.now
#print('%7.4f %s: Here I am' % (arrive, name))
with counter.request() as req:
# Wait for the counter or abort at the end of our tether
yield req
wait = env.now - arrive
# Customer request start being served
#print('%7.4f %s: Waiting Time: %7.4f' % (env.now, name, wait))
service_time = random.expovariate(1.0 / avg_service_time)
SERVICE_TIME_SUM += service_time
SERVICE_TIME_COUNT += 1
yield env.timeout(service_time)
#print('%7.4f %s: Serving Time: %7.4f' % (env.now, name, service_time))
#print('%7.4f %s: Finished - Time on the System: %7.4f' % (env.now, name, wait+service_time))
TIME_IN_THE_SYSTEM_SUM += wait+service_time
#latency = numpy.append(latency,wait+service_time)
latency.append(wait+service_time)
############ MAIN FUNCTION
print('Starting Simulations:')
print
hour_slot = 0
total_latency =[]
for hourly_rate in ARRIVAL_RATE:
average_latency = 2*MAX_AVERAGE_LATENCY
reserved_vms = 0
print('=================')
print('Hour Slot: %d' % hour_slot)
while MAX_AVERAGE_LATENCY < average_latency:
reserved_vms += 1
SERVICE_TIME_SUM = 0.0
SERVICE_TIME_COUNT = 0
latency = []
# Setup and start the simulation
print('=====================')
print('Reserved VMs: %d' % reserved_vms)
#random.seed(RANDOM_SEED)
env = simpy.Environment(initial_time=START_TIME)
# Start processes and run
total_capacity = reserved_vms * MAX_CONCURRENT_REQUESTS_PER_VM
counter = simpy.Resource(env, capacity=total_capacity)
env.process(source(env, SIMULATION_TIME / 24, counter, AVERAGE_SERVICE_TIME, hour_slot))
startTime = env.now
env.run()
print('Simulation Time: %7.4f' % (env.now-startTime))
print('Average Service Time: %7.4f' % (SERVICE_TIME_SUM/SERVICE_TIME_COUNT))
average_latency = numpy.average(latency)
print('Average Time in the System: %7.4f' % average_latency)
REQUIRED_VMS = numpy.append(REQUIRED_VMS,reserved_vms)
total_latency += latency
if hour_slot == 12 :
latency_peak = latency
hour_slot += 1
# Print results
print('=====================')
print('=====================')
print('=====================')
print('RESULTS:')
print
print('Max. Required Latency: %7.4f' % MAX_AVERAGE_LATENCY)
print('Average Latency: %7.4f' % numpy.average(total_latency))
print('90th Percentile Latency: %7.4f' % numpy.percentile(total_latency,90))
print('99th Percentile Latency: %7.4f' % numpy.percentile(total_latency,99))
print('Required Virtual Machines per hour slot:')
print(REQUIRED_VMS)
yearly_cost = 0
for required_vms_per_hour in REQUIRED_VMS:
yearly_cost += 365*required_vms_per_hour*VM_HOURLY_COST_ONDEMAND
print('Yearly cost: %7.4f' % (yearly_cost))
print('=====================')
## Print Latencies - ENABLE ONLY FOR DEBUG
#for v in latency_peak: print v
| bsd-3-clause | -5,801,104,839,550,042,000 | 31.811594 | 101 | 0.610559 | false |
gghandsfield/musclePLSR | ju_scripts/src/matlab_strain_2_cm.py | 1 | 2888 | """
Script to convert Matlab-generate strain files to cmiss files
To be used by data in strain_pred_20170712
In each row, each group of 15 data entries correspond to the following for a node
Principal_1
Principal_2
Principal_3
VonMises
Hydrostatic
Octahedral
PrincipalVector1 x
PrincipalVector1 y
PrincipalVector1 z
PrincipalVector2 x
PrincipalVector2 y
PrincipalVector2 z
PrincipalVector3 x
PrincipalVector3 y
PrincipalVector3 z
"""
import numpy as np
import cmissio
#=========================================================================#
# constants
ACTIVATIONS = (0.0, 0.2, 0.4, 0.6, 0.8, 1.0)
LENGTHS = (380, 384, 388, 392, 396, 400)
STRAIN_TEMPLATE_FILESTR = '../data/strain/strainL{}A{}.exdata'
STRAIN_FIELDS = [8,9,10,11,12,13,14,15,16]
STRAIN_FIELD_COMPONENTS = [1,1,1,1,1,1,3,3,3]
# parameters
skips = [(400, 1.0),(396, 1.0),(392, 1.0),(388, 1.0), (384, 1.0),(380, 1.0)] # observations to skip, outliers
plsrK = 2 # number of plsr modes (1 or 2)
responseName = 'geometry' #'geometry', 'stress', or 'strain'
xvalK = 36 - len(skips) # number of folds for k-fold cross validation. For leave 1 out, this
# should be the number of observations
#=========================================================================#
def _wrapExdata(X, fieldComponents):
# wrap X into list of fields
fields = []
nFields = len(fieldComponents)
fi = 0
xi = 0
while xi <= len(X):
if fi==0:
fields.append([])
nComps = fieldComponents[fi]
fields[-1].append(X[xi:xi+nComps])
xi += nComps
fi += 1
if fi==nFields:
fi = 0
return fields
def writeStrain(X, fname, header):
fields = _wrapExdata(X, STRAIN_FIELD_COMPONENTS)
cmissio.writeExdata(STRAIN_TEMPLATE_FILESTR.format(l, a),
fname,
header,
fields,
STRAIN_FIELDS)
#=========================================================================#
# input_fn = '../../strain_pred_20170712/pred_strain.txt'
# out_fn = '../../strain_pred_20170712/pred_exdata/pred_strainL{}A{}.exdata'
# out_header = 'predicted_strain_L{}A{}'
input_fn = '../../strain_pred_20170712/strain.txt'
out_fn = '../../strain_pred_20170712/actual_exdata/actual_strainL{}A{}.exdata'
out_header = 'actual_strain_L{}A{}'
file_data = np.loadtxt(input_fn, delimiter=',') # shape 30,15*nodes
# generate length and activations for each simulation
LA = []
for i, l in enumerate(LENGTHS):
for j, a in enumerate(ACTIVATIONS):
if (l, a) not in skips:
# LA.append([l, a])
LA.append([i+1, j+1])
# for each row (simulation)
for i, d in enumerate(file_data):
l, a = LA[i]
writeStrain(d, out_fn.format(l, a), out_header.format(l, a))
| apache-2.0 | -7,811,895,322,634,500,000 | 29.4 | 112 | 0.563019 | false |
hodger/cyclus | tests/tools.py | 2 | 4942 | from __future__ import print_function
import os
import re
import sys
import imp
import shutil
import unittest
import subprocess
import tempfile
from contextlib import contextmanager
from functools import wraps
from nose.tools import assert_true, assert_equal
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
from cyclus import lib as libcyclus
if sys.version_info[0] >= 3:
basestring = str
unit = attr('unit')
integration = attr('integration')
INPUT = os.path.join(os.path.dirname(__file__), "input")
def cleanfs(paths):
"""Removes the paths from the file system."""
for p in paths:
p = os.path.join(*p)
if os.path.isfile(p):
os.remove(p)
elif os.path.isdir(p):
shutil.rmtree(p)
def check_cmd(args, cwd, holdsrtn):
"""Runs a command in a subprocess and verifies that it executed properly.
"""
if not isinstance(args, basestring):
args = " ".join(args)
print("TESTING: running command in {0}:\n\n{1}\n".format(cwd, args))
f = tempfile.NamedTemporaryFile()
env = dict(os.environ)
env['_'] = cp = subprocess.check_output(['which', 'cyclus'], cwd=cwd,
universal_newlines=True).strip()
rtn = subprocess.call(args, shell=True, cwd=cwd, stdout=f, stderr=f, env=env)
if rtn != 0:
f.seek(0)
print('CYCLUS COMMAND: ' + cp)
print("STDOUT + STDERR:\n\n" + f.read().decode())
f.close()
holdsrtn[0] = rtn
assert_equal(rtn, 0)
@contextmanager
def clean_import(name, paths=None):
"""Imports and returns a module context manager and then removes
all modules which didn't originally exist when exiting the block.
Be sure to delete any references to the returned module prior to
exiting the context.
"""
sys.path = paths + sys.path
origmods = set(sys.modules.keys())
mod = imp.load_module(name, *imp.find_module(name, paths))
yield mod
sys.path = sys.path[len(paths):]
del mod
newmods = set(sys.modules.keys()) - origmods
for newmod in newmods:
del sys.modules[newmod]
TESTNAME_RE = re.compile('(?:^|[\\b_\\.-])[Tt]est')
def modtests(mod):
"""Finds all of the tests in a module."""
tests = []
for name in dir(mod):
if TESTNAME_RE.match(name) is None:
continue
test = getattr(mod, name)
if test is unittest.TestCase:
continue
tests.append(test)
return tests
def dirtests(d):
"""Finds all of the test files in a directory."""
files = os.listdir(d)
filenames = []
for file in files:
if not file.endswith('.py'):
continue
if TESTNAME_RE.match(file) is None:
continue
filenames.append(file[:-3])
return filenames
def skip_then_continue(msg=""):
"""A simple function to yield such that a test is marked as skipped
and we may continue on our merry way. A message may be optionally passed
to this function.
"""
raise SkipTest(msg)
@contextmanager
def indir(d):
"""Context manager for switching directorties and then switching back."""
cwd = os.getcwd()
os.chdir(d)
yield
os.chdir(cwd)
#
# Some Database API test helpers
#
LIBCYCLUS_HAS_BEEN_RUN = False
DBS = [('libcyclus-test.h5', 'libcyclus-orig.h5', libcyclus.Hdf5Back),
#('libcyclus-test.sqlite', 'libcyclus-orig.sqlite', libcyclus.SqliteBack)
]
def safe_call(cmd, shell=False, *args, **kwargs):
"""Checks that a command successfully runs with/without shell=True.
Returns the process return code.
"""
try:
rtn = subprocess.call(cmd, shell=False, *args, **kwargs)
except (subprocess.CalledProcessError, OSError):
cmd = ' '.join(cmd)
rtn = subprocess.call(cmd, shell=True, *args, **kwargs)
return rtn
def libcyclus_setup():
global LIBCYCLUS_HAS_BEEN_RUN
if not LIBCYCLUS_HAS_BEEN_RUN:
LIBCYCLUS_HAS_BEEN_RUN = True
for fname, oname, _ in DBS:
if os.path.isfile(fname):
os.remove(fname)
if os.path.isfile(oname):
os.remove(oname)
for fname, oname, _ in DBS:
if os.path.isfile(oname):
continue
safe_call(['cyclus', '-o' + oname,
os.path.join(INPUT, 'inventory.xml')])
def dbtest(f):
@wraps(f)
def wrapper():
for fname, oname, backend in DBS:
if os.path.exists(fname):
os.remove(fname)
shutil.copy(oname, fname)
db = backend(fname)
yield f, db, fname, backend
return wrapper
#
# Here there be Hackons!
#
# hack to make sure that we are actually in the tests dir when we start running
# tests. This works because this file is imported by many of the other test
# files.
_fdir = os.path.dirname(__file__)
if os.getcwd() != _fdir:
os.chdir(_fdir)
del _fdir
| bsd-3-clause | -36,318,217,697,277,230 | 26.303867 | 81 | 0.619385 | false |
macosforge/ccs-calendarserver | txdav/caldav/datastore/test/test_index_file.py | 1 | 36284 | ##
# Copyright (c) 2010-2017 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from twisted.internet import reactor
from twisted.internet.defer import inlineCallbacks
from twisted.internet.task import deferLater
from txdav.caldav.datastore.index_file import Index, MemcachedUIDReserver
from txdav.caldav.datastore.query.filter import Filter
from txdav.common.icommondatastore import ReservationError, \
InternalDataStoreError
from twistedcaldav import caldavxml
from twistedcaldav.caldavxml import TimeRange
from twistedcaldav.ical import Component, InvalidICalendarDataError
from twistedcaldav.instance import InvalidOverriddenInstanceError
from twistedcaldav.test.util import InMemoryMemcacheProtocol
import twistedcaldav.test.util
from pycalendar.datetime import DateTime
import os
class MinimalCalendarObjectReplacement(object):
"""
Provide the minimal set of attributes and methods from CalDAVFile required
by L{Index}.
"""
def __init__(self, filePath):
self.fp = filePath
def iCalendar(self):
with self.fp.open() as f:
text = f.read()
try:
component = Component.fromString(text)
# Fix any bogus data we can
component.validCalendarData()
component.validCalendarForCalDAV(methodAllowed=False)
except InvalidICalendarDataError, e:
raise InternalDataStoreError(
"File corruption detected (%s) in file: %s"
% (e, self._path.path)
)
return component
class MinimalResourceReplacement(object):
"""
Provide the minimal set of attributes and methods from CalDAVFile required
by L{Index}.
"""
class MinimalTxn(object):
def postCommit(self, _ignore):
pass
def postAbort(self, _ignore):
pass
def __init__(self, filePath):
self.fp = filePath
self._txn = MinimalResourceReplacement.MinimalTxn()
def isCalendarCollection(self):
return True
def getChild(self, name):
# FIXME: this should really return something with a child method
return MinimalCalendarObjectReplacement(self.fp.child(name))
def initSyncToken(self):
pass
class SQLIndexTests (twistedcaldav.test.util.TestCase):
"""
Test abstract SQL DB class
"""
def setUp(self):
super(SQLIndexTests, self).setUp()
self.site.resource.isCalendarCollection = lambda: True
self.indexDirPath = self.site.resource.fp
# FIXME: since this resource lies about isCalendarCollection, it doesn't
# have all the associated backend machinery to actually get children.
self.db = Index(MinimalResourceReplacement(self.indexDirPath))
def tearDown(self):
self.db._db_close()
def test_reserve_uid_ok(self):
uid = "test-test-test"
d = self.db.isReservedUID(uid)
d.addCallback(self.assertFalse)
d.addCallback(lambda _: self.db.reserveUID(uid))
d.addCallback(lambda _: self.db.isReservedUID(uid))
d.addCallback(self.assertTrue)
d.addCallback(lambda _: self.db.unreserveUID(uid))
d.addCallback(lambda _: self.db.isReservedUID(uid))
d.addCallback(self.assertFalse)
return d
def test_reserve_uid_twice(self):
uid = "test-test-test"
d = self.db.reserveUID(uid)
d.addCallback(lambda _: self.db.isReservedUID(uid))
d.addCallback(self.assertTrue)
d.addCallback(lambda _:
self.assertFailure(self.db.reserveUID(uid),
ReservationError))
return d
def test_unreserve_unreserved(self):
uid = "test-test-test"
return self.assertFailure(self.db.unreserveUID(uid),
ReservationError)
def test_reserve_uid_timeout(self):
# WARNING: This test is fundamentally flawed and will fail
# intermittently because it uses the real clock.
uid = "test-test-test"
from twistedcaldav.config import config
old_timeout = config.UIDReservationTimeOut
config.UIDReservationTimeOut = 1
def _finally():
config.UIDReservationTimeOut = old_timeout
d = self.db.isReservedUID(uid)
d.addCallback(self.assertFalse)
d.addCallback(lambda _: self.db.reserveUID(uid))
d.addCallback(lambda _: self.db.isReservedUID(uid))
d.addCallback(self.assertTrue)
d.addCallback(lambda _: deferLater(reactor, 2, lambda: None))
d.addCallback(lambda _: self.db.isReservedUID(uid))
d.addCallback(self.assertFalse)
self.addCleanup(_finally)
return d
def test_index(self):
data = (
(
"#1.1 Simple component",
"1.1",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.1
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
END:VEVENT
END:VCALENDAR
""",
False,
True,
),
(
"#2.1 Recurring component",
"2.1",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-2.1
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=WEEKLY;COUNT=2
END:VEVENT
END:VCALENDAR
""",
False,
True,
),
(
"#2.2 Recurring component with override",
"2.2",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-2.2
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=WEEKLY;COUNT=2
END:VEVENT
BEGIN:VEVENT
UID:12345-67890-2.2
RECURRENCE-ID:20080608T120000Z
DTSTART:20080608T120000Z
DTEND:20080608T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
END:VEVENT
END:VCALENDAR
""",
False,
True,
),
(
"#2.3 Recurring component with broken override - new",
"2.3",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-2.3
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=WEEKLY;COUNT=2
END:VEVENT
BEGIN:VEVENT
UID:12345-67890-2.3
RECURRENCE-ID:20080609T120000Z
DTSTART:20080608T120000Z
DTEND:20080608T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
END:VEVENT
END:VCALENDAR
""",
False,
False,
),
(
"#2.4 Recurring component with broken override - existing",
"2.4",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-2.4
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=WEEKLY;COUNT=2
END:VEVENT
BEGIN:VEVENT
UID:12345-67890-2.4
RECURRENCE-ID:20080609T120000Z
DTSTART:20080608T120000Z
DTEND:20080608T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
END:VEVENT
END:VCALENDAR
""",
True,
True,
),
)
for description, name, calendar_txt, reCreate, ok in data:
calendar = Component.fromString(calendar_txt)
if ok:
with open(os.path.join(self.indexDirPath.path, name), "w") as f:
f.write(calendar_txt)
self.db.addResource(name, calendar, reCreate=reCreate)
self.assertTrue(self.db.resourceExists(name), msg=description)
else:
self.assertRaises(InvalidOverriddenInstanceError, self.db.addResource, name, calendar)
self.assertFalse(self.db.resourceExists(name), msg=description)
self.db._db_recreate()
for description, name, calendar_txt, reCreate, ok in data:
if ok:
self.assertTrue(self.db.resourceExists(name), msg=description)
else:
self.assertFalse(self.db.resourceExists(name), msg=description)
self.db.testAndUpdateIndex(DateTime(2020, 1, 1))
for description, name, calendar_txt, reCreate, ok in data:
if ok:
self.assertTrue(self.db.resourceExists(name), msg=description)
else:
self.assertFalse(self.db.resourceExists(name), msg=description)
@inlineCallbacks
def test_index_timerange(self):
"""
A plain (not freebusy) time range test.
"""
data = (
(
"#1.1 Simple component - busy",
"1.1",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.1
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
END:VEVENT
END:VCALENDAR
""",
"20080601T000000Z", "20080602T000000Z",
),
(
"#1.2 Simple component - transparent",
"1.2",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.2
DTSTART:20080602T120000Z
DTEND:20080602T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
TRANSP:TRANSPARENT
END:VEVENT
END:VCALENDAR
""",
"20080602T000000Z", "20080603T000000Z",
),
(
"#1.3 Simple component - canceled",
"1.3",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.3
DTSTART:20080603T120000Z
DTEND:20080603T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
STATUS:CANCELLED
END:VEVENT
END:VCALENDAR
""",
"20080603T000000Z", "20080604T000000Z",
),
(
"#1.4 Simple component - tentative",
"1.4",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.4
DTSTART:20080604T120000Z
DTEND:20080604T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
STATUS:TENTATIVE
END:VEVENT
END:VCALENDAR
""",
"20080604T000000Z", "20080605T000000Z",
),
(
"#2.1 Recurring component - busy",
"2.1",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-2.1
DTSTART:20080605T120000Z
DTEND:20080605T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=DAILY;COUNT=2
END:VEVENT
END:VCALENDAR
""",
"20080605T000000Z", "20080607T000000Z",
),
(
"#2.2 Recurring component - busy",
"2.2",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-2.2
DTSTART:20080607T120000Z
DTEND:20080607T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=DAILY;COUNT=2
END:VEVENT
BEGIN:VEVENT
UID:12345-67890-2.2
RECURRENCE-ID:20080608T120000Z
DTSTART:20080608T140000Z
DTEND:20080608T150000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
TRANSP:TRANSPARENT
END:VEVENT
END:VCALENDAR
""",
"20080607T000000Z", "20080609T000000Z",
),
)
for description, name, calendar_txt, trstart, trend in data:
calendar = Component.fromString(calendar_txt)
with open(os.path.join(self.indexDirPath.path, name), "w") as f:
f.write(calendar_txt)
self.db.addResource(name, calendar)
self.assertTrue(self.db.resourceExists(name), msg=description)
# Create fake filter element to match time-range
filter = caldavxml.Filter(
caldavxml.ComponentFilter(
caldavxml.ComponentFilter(
TimeRange(
start=trstart,
end=trend,
),
name=("VEVENT", "VFREEBUSY", "VAVAILABILITY"),
),
name="VCALENDAR",
)
)
filter = Filter(filter)
resources = yield self.db.indexedSearch(filter)
index_results = set()
for found_name, _ignore_uid, _ignore_type in resources:
index_results.add(found_name)
self.assertEqual(set((name,)), index_results, msg=description)
@inlineCallbacks
def test_index_timespan(self):
data = (
(
"#1.1 Simple component - busy",
"1.1",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.1
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
END:VEVENT
END:VCALENDAR
""",
"20080601T000000Z", "20080602T000000Z",
"mailto:[email protected]",
(('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'F'),),
),
(
"#1.2 Simple component - transparent",
"1.2",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.2
DTSTART:20080602T120000Z
DTEND:20080602T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
TRANSP:TRANSPARENT
END:VEVENT
END:VCALENDAR
""",
"20080602T000000Z", "20080603T000000Z",
"mailto:[email protected]",
(('N', "2008-06-02 12:00:00", "2008-06-02 13:00:00", 'B', 'T'),),
),
(
"#1.3 Simple component - canceled",
"1.3",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.3
DTSTART:20080603T120000Z
DTEND:20080603T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
STATUS:CANCELLED
END:VEVENT
END:VCALENDAR
""",
"20080603T000000Z", "20080604T000000Z",
"mailto:[email protected]",
(('N', "2008-06-03 12:00:00", "2008-06-03 13:00:00", 'F', 'F'),),
),
(
"#1.4 Simple component - tentative",
"1.4",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.4
DTSTART:20080604T120000Z
DTEND:20080604T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
STATUS:TENTATIVE
END:VEVENT
END:VCALENDAR
""",
"20080604T000000Z", "20080605T000000Z",
"mailto:[email protected]",
(('N', "2008-06-04 12:00:00", "2008-06-04 13:00:00", 'T', 'F'),),
),
(
"#2.1 Recurring component - busy",
"2.1",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-2.1
DTSTART:20080605T120000Z
DTEND:20080605T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=DAILY;COUNT=2
END:VEVENT
END:VCALENDAR
""",
"20080605T000000Z", "20080607T000000Z",
"mailto:[email protected]",
(
('N', "2008-06-05 12:00:00", "2008-06-05 13:00:00", 'B', 'F'),
('N', "2008-06-06 12:00:00", "2008-06-06 13:00:00", 'B', 'F'),
),
),
(
"#2.2 Recurring component - busy",
"2.2",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-2.2
DTSTART:20080607T120000Z
DTEND:20080607T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=DAILY;COUNT=2
END:VEVENT
BEGIN:VEVENT
UID:12345-67890-2.2
RECURRENCE-ID:20080608T120000Z
DTSTART:20080608T140000Z
DTEND:20080608T150000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
TRANSP:TRANSPARENT
END:VEVENT
END:VCALENDAR
""",
"20080607T000000Z", "20080609T000000Z",
"mailto:[email protected]",
(
('N', "2008-06-07 12:00:00", "2008-06-07 13:00:00", 'B', 'F'),
('N', "2008-06-08 14:00:00", "2008-06-08 15:00:00", 'B', 'T'),
),
),
)
for description, name, calendar_txt, trstart, trend, organizer, instances in data:
calendar = Component.fromString(calendar_txt)
with open(os.path.join(self.indexDirPath.path, name), "w") as f:
f.write(calendar_txt)
self.db.addResource(name, calendar)
self.assertTrue(self.db.resourceExists(name), msg=description)
# Create fake filter element to match time-range
filter = caldavxml.Filter(
caldavxml.ComponentFilter(
caldavxml.ComponentFilter(
TimeRange(
start=trstart,
end=trend,
),
name=("VEVENT", "VFREEBUSY", "VAVAILABILITY"),
),
name="VCALENDAR",
)
)
filter = Filter(filter)
resources = yield self.db.indexedSearch(filter, fbtype=True)
index_results = set()
for _ignore_name, _ignore_uid, type, test_organizer, float, start, end, fbtype, transp in resources:
self.assertEqual(test_organizer, organizer, msg=description)
index_results.add((float, start, end, fbtype, transp,))
self.assertEqual(set(instances), index_results, msg=description)
@inlineCallbacks
def test_index_timespan_per_user(self):
data = (
(
"#1.1 Single per-user non-recurring component",
"1.1",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.1
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
END:VEVENT
BEGIN:X-CALENDARSERVER-PERUSER
UID:12345-67890-1.1
X-CALENDARSERVER-PERUSER-UID:user01
BEGIN:X-CALENDARSERVER-PERINSTANCE
BEGIN:VALARM
ACTION:DISPLAY
DESCRIPTION:Test
TRIGGER;RELATED=START:-PT10M
END:VALARM
TRANSP:TRANSPARENT
END:X-CALENDARSERVER-PERINSTANCE
END:X-CALENDARSERVER-PERUSER
END:VCALENDAR
""",
"20080601T000000Z", "20080602T000000Z",
"mailto:[email protected]",
(
(
"user01",
(('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'T'),),
),
(
"user02",
(('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'F'),),
),
),
),
(
"#1.2 Two per-user non-recurring component",
"1.2",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.2
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
END:VEVENT
BEGIN:X-CALENDARSERVER-PERUSER
UID:12345-67890-1.2
X-CALENDARSERVER-PERUSER-UID:user01
BEGIN:X-CALENDARSERVER-PERINSTANCE
BEGIN:VALARM
ACTION:DISPLAY
DESCRIPTION:Test
TRIGGER;RELATED=START:-PT10M
END:VALARM
TRANSP:TRANSPARENT
END:X-CALENDARSERVER-PERINSTANCE
END:X-CALENDARSERVER-PERUSER
BEGIN:X-CALENDARSERVER-PERUSER
UID:12345-67890-1.2
X-CALENDARSERVER-PERUSER-UID:user02
BEGIN:X-CALENDARSERVER-PERINSTANCE
BEGIN:VALARM
ACTION:DISPLAY
DESCRIPTION:Test
TRIGGER;RELATED=START:-PT10M
END:VALARM
END:X-CALENDARSERVER-PERINSTANCE
END:X-CALENDARSERVER-PERUSER
END:VCALENDAR
""",
"20080601T000000Z", "20080602T000000Z",
"mailto:[email protected]",
(
(
"user01",
(('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'T'),),
),
(
"user02",
(('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'F'),),
),
(
"user03",
(('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'F'),),
),
),
),
(
"#2.1 Single per-user simple recurring component",
"2.1",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.1
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=DAILY;COUNT=10
END:VEVENT
BEGIN:X-CALENDARSERVER-PERUSER
UID:12345-67890-1.1
X-CALENDARSERVER-PERUSER-UID:user01
BEGIN:X-CALENDARSERVER-PERINSTANCE
BEGIN:VALARM
ACTION:DISPLAY
DESCRIPTION:Test
TRIGGER;RELATED=START:-PT10M
END:VALARM
TRANSP:TRANSPARENT
END:X-CALENDARSERVER-PERINSTANCE
END:X-CALENDARSERVER-PERUSER
END:VCALENDAR
""",
"20080601T000000Z", "20080603T000000Z",
"mailto:[email protected]",
(
(
"user01",
(
('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'T'),
('N', "2008-06-02 12:00:00", "2008-06-02 13:00:00", 'B', 'T'),
),
),
(
"user02",
(
('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'F'),
('N', "2008-06-02 12:00:00", "2008-06-02 13:00:00", 'B', 'F'),
),
),
),
),
(
"#2.2 Two per-user simple recurring component",
"2.2",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.2
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=DAILY;COUNT=10
END:VEVENT
BEGIN:X-CALENDARSERVER-PERUSER
UID:12345-67890-1.2
X-CALENDARSERVER-PERUSER-UID:user01
BEGIN:X-CALENDARSERVER-PERINSTANCE
BEGIN:VALARM
ACTION:DISPLAY
DESCRIPTION:Test
TRIGGER;RELATED=START:-PT10M
END:VALARM
TRANSP:TRANSPARENT
END:X-CALENDARSERVER-PERINSTANCE
END:X-CALENDARSERVER-PERUSER
BEGIN:X-CALENDARSERVER-PERUSER
UID:12345-67890-1.2
X-CALENDARSERVER-PERUSER-UID:user02
BEGIN:X-CALENDARSERVER-PERINSTANCE
BEGIN:VALARM
ACTION:DISPLAY
DESCRIPTION:Test
TRIGGER;RELATED=START:-PT10M
END:VALARM
END:X-CALENDARSERVER-PERINSTANCE
END:X-CALENDARSERVER-PERUSER
END:VCALENDAR
""",
"20080601T000000Z", "20080603T000000Z",
"mailto:[email protected]",
(
(
"user01",
(
('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'T'),
('N', "2008-06-02 12:00:00", "2008-06-02 13:00:00", 'B', 'T'),
),
),
(
"user02",
(
('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'F'),
('N', "2008-06-02 12:00:00", "2008-06-02 13:00:00", 'B', 'F'),
),
),
(
"user03",
(
('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'F'),
('N', "2008-06-02 12:00:00", "2008-06-02 13:00:00", 'B', 'F'),
),
),
),
),
(
"#3.1 Single per-user complex recurring component",
"3.1",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.1
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=DAILY;COUNT=10
END:VEVENT
BEGIN:VEVENT
UID:12345-67890-1.1
RECURRENCE-ID:20080602T120000Z
DTSTART:20080602T130000Z
DTEND:20080602T140000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
END:VEVENT
BEGIN:X-CALENDARSERVER-PERUSER
UID:12345-67890-1.1
X-CALENDARSERVER-PERUSER-UID:user01
BEGIN:X-CALENDARSERVER-PERINSTANCE
BEGIN:VALARM
ACTION:DISPLAY
DESCRIPTION:Test
TRIGGER;RELATED=START:-PT10M
END:VALARM
TRANSP:TRANSPARENT
END:X-CALENDARSERVER-PERINSTANCE
BEGIN:X-CALENDARSERVER-PERINSTANCE
RECURRENCE-ID:20080602T120000Z
TRANSP:OPAQUE
END:X-CALENDARSERVER-PERINSTANCE
END:X-CALENDARSERVER-PERUSER
END:VCALENDAR
""",
"20080601T000000Z", "20080604T000000Z",
"mailto:[email protected]",
(
(
"user01",
(
('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'T'),
('N', "2008-06-02 13:00:00", "2008-06-02 14:00:00", 'B', 'F'),
('N', "2008-06-03 12:00:00", "2008-06-03 13:00:00", 'B', 'T'),
),
),
(
"user02",
(
('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'F'),
('N', "2008-06-02 13:00:00", "2008-06-02 14:00:00", 'B', 'F'),
('N', "2008-06-03 12:00:00", "2008-06-03 13:00:00", 'B', 'F'),
),
),
),
),
(
"#3.2 Two per-user complex recurring component",
"3.2",
"""BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.2
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=DAILY;COUNT=10
END:VEVENT
BEGIN:VEVENT
UID:12345-67890-1.2
RECURRENCE-ID:20080602T120000Z
DTSTART:20080602T130000Z
DTEND:20080602T140000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
END:VEVENT
BEGIN:X-CALENDARSERVER-PERUSER
UID:12345-67890-1.2
X-CALENDARSERVER-PERUSER-UID:user01
BEGIN:X-CALENDARSERVER-PERINSTANCE
BEGIN:VALARM
ACTION:DISPLAY
DESCRIPTION:Test
TRIGGER;RELATED=START:-PT10M
END:VALARM
TRANSP:TRANSPARENT
END:X-CALENDARSERVER-PERINSTANCE
BEGIN:X-CALENDARSERVER-PERINSTANCE
RECURRENCE-ID:20080602T120000Z
TRANSP:OPAQUE
END:X-CALENDARSERVER-PERINSTANCE
END:X-CALENDARSERVER-PERUSER
BEGIN:X-CALENDARSERVER-PERUSER
UID:12345-67890-1.2
X-CALENDARSERVER-PERUSER-UID:user02
BEGIN:X-CALENDARSERVER-PERINSTANCE
BEGIN:VALARM
ACTION:DISPLAY
DESCRIPTION:Test
TRIGGER;RELATED=START:-PT10M
END:VALARM
END:X-CALENDARSERVER-PERINSTANCE
BEGIN:X-CALENDARSERVER-PERINSTANCE
RECURRENCE-ID:20080603T120000Z
TRANSP:TRANSPARENT
END:X-CALENDARSERVER-PERINSTANCE
END:X-CALENDARSERVER-PERUSER
END:VCALENDAR
""",
"20080601T000000Z", "20080604T000000Z",
"mailto:[email protected]",
(
(
"user01",
(
('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'T'),
('N', "2008-06-02 13:00:00", "2008-06-02 14:00:00", 'B', 'F'),
('N', "2008-06-03 12:00:00", "2008-06-03 13:00:00", 'B', 'T'),
),
),
(
"user02",
(
('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'F'),
('N', "2008-06-02 13:00:00", "2008-06-02 14:00:00", 'B', 'F'),
('N', "2008-06-03 12:00:00", "2008-06-03 13:00:00", 'B', 'T'),
),
),
(
"user03",
(
('N', "2008-06-01 12:00:00", "2008-06-01 13:00:00", 'B', 'F'),
('N', "2008-06-02 13:00:00", "2008-06-02 14:00:00", 'B', 'F'),
('N', "2008-06-03 12:00:00", "2008-06-03 13:00:00", 'B', 'F'),
),
),
),
),
)
for description, name, calendar_txt, trstart, trend, organizer, peruserinstances in data:
calendar = Component.fromString(calendar_txt)
with open(os.path.join(self.indexDirPath.path, name), "w") as f:
f.write(calendar_txt)
self.db.addResource(name, calendar)
self.assertTrue(self.db.resourceExists(name), msg=description)
# Create fake filter element to match time-range
filter = caldavxml.Filter(
caldavxml.ComponentFilter(
caldavxml.ComponentFilter(
TimeRange(
start=trstart,
end=trend,
),
name=("VEVENT", "VFREEBUSY", "VAVAILABILITY"),
),
name="VCALENDAR",
)
)
filter = Filter(filter)
for useruid, instances in peruserinstances:
resources = yield self.db.indexedSearch(filter, useruid=useruid, fbtype=True)
index_results = set()
for _ignore_name, _ignore_uid, type, test_organizer, float, start, end, fbtype, transp in resources:
self.assertEqual(test_organizer, organizer, msg=description)
index_results.add((str(float), str(start), str(end), str(fbtype), str(transp),))
self.assertEqual(set(instances), index_results, msg="%s, user:%s" % (description, useruid,))
self.db.deleteResource(name)
def test_index_revisions(self):
data1 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-1.1
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
END:VEVENT
END:VCALENDAR
"""
data2 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-2.1
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=WEEKLY;COUNT=2
END:VEVENT
END:VCALENDAR
"""
data3 = """BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//CALENDARSERVER.ORG//NONSGML Version 1//EN
BEGIN:VEVENT
UID:12345-67890-2.3
DTSTART:20080601T120000Z
DTEND:20080601T130000Z
DTSTAMP:20080601T120000Z
ORGANIZER;CN="User 01":mailto:[email protected]
ATTENDEE:mailto:[email protected]
ATTENDEE:mailto:[email protected]
RRULE:FREQ=WEEKLY;COUNT=2
END:VEVENT
END:VCALENDAR
"""
calendar = Component.fromString(data1)
self.db.addResource("data1.ics", calendar)
calendar = Component.fromString(data2)
self.db.addResource("data2.ics", calendar)
calendar = Component.fromString(data3)
self.db.addResource("data3.ics", calendar)
self.db.deleteResource("data3.ics")
tests = (
(0, (["data1.ics", "data2.ics", ], [], [],)),
(1, (["data2.ics", ], ["data3.ics", ], [],)),
(2, ([], ["data3.ics", ], [],)),
(3, ([], ["data3.ics", ], [],)),
(4, ([], [], [],)),
(5, ([], [], [],)),
)
for revision, results in tests:
self.assertEquals(self.db.whatchanged(revision), results, "Mismatched results for whatchanged with revision %d" % (revision,))
class MemcacheTests(SQLIndexTests):
def setUp(self):
super(MemcacheTests, self).setUp()
self.memcache = InMemoryMemcacheProtocol()
self.db.reserver = MemcachedUIDReserver(self.db, self.memcache)
def tearDown(self):
super(MemcacheTests, self).tearDown()
for _ignore_k, v in self.memcache._timeouts.iteritems():
if v.active():
v.cancel()
| apache-2.0 | -2,447,090,324,838,265,300 | 30.198624 | 138 | 0.600182 | false |
spookylukey/django-autocomplete-light | autocomplete_light/autocomplete/generic.py | 1 | 3905 | from django.contrib.contenttypes.models import ContentType
from django.db.models import Q
from autocomplete_light.generic import GenericModelChoiceField
from .model import AutocompleteModel
__all__ = ['AutocompleteGeneric']
class AutocompleteGeneric(AutocompleteModel):
"""
Autocomplete which considers choices as a list of querysets. It inherits
from AutocompleteModel so make sure that you've read the docs and
docstrings for AutocompleteModel before using this class.
choices
A list of querysets.
search_fields
A list of lists of fields to search in, configurable like on
ModelAdmin.search_fields. The first list of fields will be used for the
first queryset in choices and so on.
AutocompleteGeneric inherits from AutocompleteModel and supports
`limit_choices` and `split_words` exactly like AutocompleteModel.
However `order_by` is not supported (yet) in AutocompleteGeneric.
"""
choices = None
search_fields = None
def choice_value(self, choice):
"""
Rely on GenericModelChoiceField to return a string containing the
content type id and object id of the result.
Because this autocomplete is made for that field, and to avoid code
duplication.
"""
field = GenericModelChoiceField()
return field.prepare_value(choice)
def validate_values(self):
"""
Ensure that every choice is part of a queryset.
"""
assert self.choices, 'autocomplete.choices should be a queryset list'
for value in self.values:
if not isinstance(value, basestring):
return False
try:
content_type_id, object_id = value.split('-', 1)
except ValueError:
return False
try:
content_type = ContentType.objects.get_for_id(content_type_id)
except ContentType.DoesNotExist:
return False
model_class = content_type.model_class()
found = False
for queryset in self.choices:
if queryset.model != model_class:
continue
if queryset.filter(pk=object_id).count() == 1:
found = True
else:
return False
if not found:
# maybe a user would cheat by using a forbidden ctype id !
return False
return True
def choices_for_request(self):
"""
Propose local results and fill the autocomplete with remote
suggestions.
"""
assert self.choices, 'autocomplete.choices should be a queryset list'
q = self.request.GET.get('q', '')
request_choices = []
querysets_left = len(self.choices)
i = 0
for queryset in self.choices:
conditions = self._choices_for_request_conditions(q,
self.search_fields[i])
limit = ((self.limit_choices - len(request_choices)) /
querysets_left)
for choice in queryset.filter(conditions)[:limit]:
request_choices.append(choice)
querysets_left -= 1
i += 1
return request_choices
def choices_for_values(self):
"""
Values which are not found in the querysets are ignored.
"""
values_choices = []
for queryset in self.choices:
ctype = ContentType.objects.get_for_model(queryset.model).pk
try:
ids = [x.split('-')[1] for x in self.values
if x is not None and int(x.split('-')[0]) == ctype]
except ValueError:
continue
for choice in queryset.filter(pk__in=ids):
values_choices.append(choice)
return values_choices
| mit | 6,551,789,736,588,345,000 | 29.992063 | 79 | 0.591549 | false |
noironetworks/group-based-policy | gbpservice/_i18n.py | 1 | 1052 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_i18n
DOMAIN = "gbpservice"
_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
def get_available_languages():
return oslo_i18n.get_available_languages(DOMAIN)
| apache-2.0 | 7,634,804,986,224,047,000 | 31.875 | 78 | 0.739544 | false |
fregaham/DISP | sqlobject/tests/test_indexes.py | 1 | 1088 | from sqlobject import *
from sqlobject.tests.dbtest import *
########################################
## Indexes
########################################
class SOIndex1(SQLObject):
name = StringCol(length=100)
number = IntCol()
nameIndex = DatabaseIndex('name', unique=True)
nameIndex2 = DatabaseIndex(name, number)
nameIndex3 = DatabaseIndex({'column': name,
'length': 3})
class SOIndex2(SQLObject):
name = StringCol()
nameIndex = DatabaseIndex({'expression': 'lower(name)'})
def test_1():
setupClass(SOIndex1)
n = 0
for name in 'blah blech boring yep yort snort'.split():
n += 1
SOIndex1(name=name, number=n)
mod = SOIndex1._connection.module
try:
SOIndex1(name='blah', number=0)
except (mod.ProgrammingError, mod.IntegrityError, mod.OperationalError, mod.DatabaseError):
# expected
pass
else:
assert 0, "Exception expected."
def test_2():
if not supports('expressionIndex'):
return
setupClass(SOIndex2)
SOIndex2(name='')
| gpl-2.0 | 103,308,581,540,977,470 | 24.904762 | 95 | 0.581801 | false |
fuziontech/svb | svb/test/test_http_client.py | 1 | 13560 | import sys
import unittest2
from mock import MagicMock, Mock, patch
import svb
from svb.test.helper import SvbUnitTestCase
VALID_API_METHODS = ('get', 'post', 'delete')
class HttpClientTests(SvbUnitTestCase):
def setUp(self):
super(HttpClientTests, self).setUp()
self.original_filters = svb.http_client.warnings.filters[:]
svb.http_client.warnings.simplefilter('ignore')
def tearDown(self):
svb.http_client.warnings.filters = self.original_filters
super(HttpClientTests, self).tearDown()
def check_default(self, none_libs, expected):
for lib in none_libs:
setattr(svb.http_client, lib, None)
inst = svb.http_client.new_default_http_client()
self.assertTrue(isinstance(inst, expected))
def test_new_default_http_client_urlfetch(self):
self.check_default((),
svb.http_client.UrlFetchClient)
def test_new_default_http_client_requests(self):
self.check_default(('urlfetch',),
svb.http_client.RequestsClient)
def test_new_default_http_client_pycurl(self):
self.check_default(('urlfetch', 'requests',),
svb.http_client.PycurlClient)
def test_new_default_http_client_urllib2(self):
self.check_default(('urlfetch', 'requests', 'pycurl'),
svb.http_client.Urllib2Client)
class ClientTestBase():
@property
def request_mock(self):
return self.request_mocks[self.request_client.name]
@property
def valid_url(self, path='/foo'):
return 'https://api.svb.com%s' % (path,)
def make_request(self, method, url, headers, post_data):
client = self.request_client(verify_ssl_certs=True)
return client.request(method, url, headers, post_data)
def mock_response(self, body, code):
raise NotImplementedError(
'You must implement this in your test subclass')
def mock_error(self, error):
raise NotImplementedError(
'You must implement this in your test subclass')
def check_call(self, meth, abs_url, headers, params):
raise NotImplementedError(
'You must implement this in your test subclass')
def test_request(self):
self.mock_response(self.request_mock, '{"foo": "baz"}', 200)
for meth in VALID_API_METHODS:
abs_url = self.valid_url
data = ''
if meth != 'post':
abs_url = '%s?%s' % (abs_url, data)
data = None
headers = {'my-header': 'header val'}
body, code, _ = self.make_request(
meth, abs_url, headers, data)
self.assertEqual(200, code)
self.assertEqual('{"foo": "baz"}', body)
self.check_call(self.request_mock, meth, abs_url,
data, headers)
def test_exception(self):
self.mock_error(self.request_mock)
self.assertRaises(svb.error.APIConnectionError,
self.make_request,
'get', self.valid_url, {}, None)
class RequestsVerify(object):
def __eq__(self, other):
return other and other.endswith('svb/data/ca-certificates.crt')
class RequestsClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.RequestsClient
def setUp(self):
super(RequestsClientTests, self).setUp()
self.session = MagicMock()
def test_timeout(self):
headers = {'my-header': 'header val'}
data = ''
self.mock_response(self.request_mock, '{"foo": "baz"}', 200)
self.make_request('POST', self.valid_url,
headers, data, timeout=5)
self.check_call(None, 'POST', self.valid_url,
data, headers, timeout=5)
def make_request(self, method, url, headers, post_data, timeout=80):
client = self.request_client(verify_ssl_certs=True,
timeout=timeout,
proxy='http://slap/')
return client.request(method, url, headers, post_data)
def mock_response(self, mock, body, code):
result = Mock()
result.content = body
result.status_code = code
self.session.request = MagicMock(return_value=result)
mock.Session = MagicMock(return_value=self.session)
def mock_error(self, mock):
mock.exceptions.RequestException = Exception
self.session.request.side_effect = mock.exceptions.RequestException()
mock.Session = MagicMock(return_value=self.session)
# Note that unlike other modules, we don't use the "mock" argument here
# because we need to run the request call against the internal mock
# session.
def check_call(self, mock, meth, url, post_data, headers, timeout=80):
self.session.request. \
assert_called_with(meth, url,
headers=headers,
data=post_data,
verify=RequestsVerify(),
proxies={"http": "http://slap/",
"https": "http://slap/"},
timeout=timeout)
class UrlFetchClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.UrlFetchClient
def mock_response(self, mock, body, code):
result = Mock()
result.content = body
result.status_code = code
mock.fetch = Mock(return_value=result)
def mock_error(self, mock):
mock.Error = mock.InvalidURLError = Exception
mock.fetch.side_effect = mock.InvalidURLError()
def check_call(self, mock, meth, url, post_data, headers):
mock.fetch.assert_called_with(
url=url,
method=meth,
headers=headers,
validate_certificate=True,
deadline=55,
payload=post_data
)
class Urllib2ClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.Urllib2Client
def make_request(self, method, url, headers, post_data, proxy=None):
self.client = self.request_client(verify_ssl_certs=True,
proxy=proxy)
self.proxy = proxy
return self.client.request(method, url, headers, post_data)
def mock_response(self, mock, body, code):
response = Mock
response.read = Mock(return_value=body)
response.code = code
response.info = Mock(return_value={})
self.request_object = Mock()
mock.Request = Mock(return_value=self.request_object)
mock.urlopen = Mock(return_value=response)
opener = Mock
opener.open = Mock(return_value=response)
mock.build_opener = Mock(return_value=opener)
mock.build_opener.open = opener.open
mock.ProxyHandler = Mock(return_value=opener)
mock.urlopen = Mock(return_value=response)
def mock_error(self, mock):
mock.urlopen.side_effect = ValueError
mock.build_opener().open.side_effect = ValueError
mock.build_opener.reset_mock()
def check_call(self, mock, meth, url, post_data, headers):
if sys.version_info >= (3, 0) and isinstance(post_data, basestring):
post_data = post_data.encode('utf-8')
mock.Request.assert_called_with(url, post_data, headers)
if (self.client._proxy):
self.assertTrue(type(self.client._proxy) is dict)
mock.ProxyHandler.assert_called_with(self.client._proxy)
mock.build_opener.open.assert_called_with(self.request_object)
self.assertTrue(not mock.urlopen.called)
if (not self.client._proxy):
mock.urlopen.assert_called_with(self.request_object)
self.assertTrue(not mock.build_opener.called)
self.assertTrue(not mock.build_opener.open.called)
class Urllib2ClientHttpsProxyTests(Urllib2ClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(Urllib2ClientHttpsProxyTests, self).make_request(
method, url, headers, post_data,
{"http": "http://slap/",
"https": "http://slap/"})
class Urllib2ClientHttpProxyTests(Urllib2ClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(Urllib2ClientHttpProxyTests, self).make_request(
method, url, headers, post_data,
"http://slap/")
class PycurlClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.PycurlClient
def make_request(self, method, url, headers, post_data, proxy=None):
self.client = self.request_client(verify_ssl_certs=True,
proxy=proxy)
self.proxy = proxy
return self.client.request(method, url, headers, post_data)
@property
def request_mock(self):
if not hasattr(self, 'curl_mock'):
lib_mock = self.request_mocks[self.request_client.name]
self.curl_mock = Mock()
lib_mock.Curl = Mock(return_value=self.curl_mock)
return self.curl_mock
def setUp(self):
super(PycurlClientTests, self).setUp()
self.bio_patcher = patch('svb.util.io.BytesIO')
bio_mock = Mock()
self.bio_patcher.start().return_value = bio_mock
self.bio_getvalue = bio_mock.getvalue
def tearDown(self):
super(PycurlClientTests, self).tearDown()
self.bio_patcher.stop()
def mock_response(self, mock, body, code):
self.bio_getvalue.return_value = body.encode('utf-8')
mock.getinfo.return_value = code
def mock_error(self, mock):
class FakeException(BaseException):
@property
def args(self):
return ('foo', 'bar')
svb.http_client.pycurl.error = FakeException
mock.perform.side_effect = svb.http_client.pycurl.error
def check_call(self, mock, meth, url, post_data, headers):
lib_mock = self.request_mocks[self.request_client.name]
# A note on methodology here: we don't necessarily need to verify
# _every_ call to setopt, but check a few of them to make sure the
# right thing is happening. Keep an eye specifically on conditional
# statements where things are more likely to go wrong.
self.curl_mock.setopt.assert_any_call(lib_mock.NOSIGNAL, 1)
self.curl_mock.setopt.assert_any_call(lib_mock.URL,
svb.util.utf8(url))
if meth == 'get':
self.curl_mock.setopt.assert_any_call(lib_mock.HTTPGET, 1)
elif meth == 'post':
self.curl_mock.setopt.assert_any_call(lib_mock.POST, 1)
else:
self.curl_mock.setopt.assert_any_call(lib_mock.CUSTOMREQUEST,
meth.upper())
self.curl_mock.perform.assert_any_call()
class PycurlClientHttpProxyTests(PycurlClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(PycurlClientHttpProxyTests, self).make_request(
method, url, headers, post_data,
"http://user:withPwd@slap:8888/")
def check_call(self, mock, meth, url, post_data, headers):
lib_mock = self.request_mocks[self.request_client.name]
self.curl_mock.setopt.assert_any_call(lib_mock.PROXY, "slap")
self.curl_mock.setopt.assert_any_call(lib_mock.PROXYPORT, 8888)
self.curl_mock.setopt.assert_any_call(lib_mock.PROXYUSERPWD,
"user:withPwd")
super(PycurlClientHttpProxyTests, self).check_call(
mock, meth, url, post_data, headers)
class PycurlClientHttpsProxyTests(PycurlClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(PycurlClientHttpsProxyTests, self).make_request(
method, url, headers, post_data,
{"http": "http://slap:8888/",
"https": "http://slap2:444/"})
def check_call(self, mock, meth, url, post_data, headers):
lib_mock = self.request_mocks[self.request_client.name]
self.curl_mock.setopt.assert_any_call(lib_mock.PROXY, "slap2")
self.curl_mock.setopt.assert_any_call(lib_mock.PROXYPORT, 444)
super(PycurlClientHttpsProxyTests, self).check_call(
mock, meth, url, post_data, headers)
class APIEncodeTest(SvbUnitTestCase):
def test_encode_dict(self):
body = {
'foo': {
'dob': {
'month': 1,
},
'name': 'bat'
},
}
values = [t for t in svb.api_requestor._api_encode(body)]
self.assertTrue(('foo[dob][month]', 1) in values)
self.assertTrue(('foo[name]', 'bat') in values)
def test_encode_array(self):
body = {
'foo': [{
'dob': {
'month': 1,
},
'name': 'bat'
}],
}
values = [t for t in svb.api_requestor._api_encode(body)]
self.assertTrue(('foo[][dob][month]', 1) in values)
self.assertTrue(('foo[][name]', 'bat') in values)
if __name__ == '__main__':
unittest2.main()
| mit | -1,805,596,033,668,120,800 | 33.416244 | 77 | 0.591667 | false |
beeftornado/sentry | tests/sentry/integrations/slack/test_integration.py | 1 | 6673 | from __future__ import absolute_import
import responses
import six
from six.moves.urllib.parse import parse_qs, urlencode, urlparse
from sentry.integrations.slack import SlackIntegrationProvider, SlackIntegration
from sentry.models import (
AuditLogEntry,
AuditLogEntryEvent,
Identity,
IdentityProvider,
IdentityStatus,
Integration,
OrganizationIntegration,
)
from sentry.testutils import IntegrationTestCase, TestCase
from sentry.testutils.helpers import override_options
class SlackIntegrationTest(IntegrationTestCase):
provider = SlackIntegrationProvider
def assert_setup_flow(
self,
team_id="TXXXXXXX1",
authorizing_user_id="UXXXXXXX1",
expected_client_id="slack-client-id",
expected_client_secret="slack-client-secret",
):
responses.reset()
resp = self.client.get(self.init_path)
assert resp.status_code == 302
redirect = urlparse(resp["Location"])
assert redirect.scheme == "https"
assert redirect.netloc == "slack.com"
assert redirect.path == "/oauth/v2/authorize"
params = parse_qs(redirect.query)
scopes = self.provider.identity_oauth_scopes
assert params["scope"] == [" ".join(scopes)]
assert params["state"]
assert params["redirect_uri"] == ["http://testserver/extensions/slack/setup/"]
assert params["response_type"] == ["code"]
assert params["client_id"] == [expected_client_id]
assert params.get("user_scope") == ["links:read"]
# once we've asserted on it, switch to a singular values to make life
# easier
authorize_params = {k: v[0] for k, v in six.iteritems(params)}
access_json = {
"ok": True,
"access_token": "xoxb-xxxxxxxxx-xxxxxxxxxx-xxxxxxxxxxxx",
"team": {"id": team_id, "name": "Example"},
"authed_user": {"id": authorizing_user_id},
}
responses.add(responses.POST, "https://slack.com/api/oauth.v2.access", json=access_json)
responses.add(
responses.GET,
"https://slack.com/api/team.info",
json={
"ok": True,
"team": {
"domain": "test-slack-workspace",
"icon": {"image_132": "http://example.com/ws_icon.jpg"},
},
},
)
resp = self.client.get(
u"{}?{}".format(
self.setup_path,
urlencode({"code": "oauth-code", "state": authorize_params["state"]}),
)
)
mock_request = responses.calls[0].request
req_params = parse_qs(mock_request.body)
assert req_params["grant_type"] == ["authorization_code"]
assert req_params["code"] == ["oauth-code"]
assert req_params["redirect_uri"] == ["http://testserver/extensions/slack/setup/"]
assert req_params["client_id"] == [expected_client_id]
assert req_params["client_secret"] == [expected_client_secret]
assert resp.status_code == 200
self.assertDialogSuccess(resp)
@responses.activate
def test_bot_flow(self):
self.assert_setup_flow()
integration = Integration.objects.get(provider=self.provider.key)
assert integration.external_id == "TXXXXXXX1"
assert integration.name == "Example"
assert integration.metadata == {
"access_token": "xoxb-xxxxxxxxx-xxxxxxxxxx-xxxxxxxxxxxx",
"scopes": sorted(self.provider.identity_oauth_scopes),
"icon": "http://example.com/ws_icon.jpg",
"domain_name": "test-slack-workspace.slack.com",
"installation_type": "born_as_bot",
}
oi = OrganizationIntegration.objects.get(
integration=integration, organization=self.organization
)
assert oi.config == {}
idp = IdentityProvider.objects.get(type="slack", external_id="TXXXXXXX1")
identity = Identity.objects.get(idp=idp, user=self.user, external_id="UXXXXXXX1")
assert identity.status == IdentityStatus.VALID
audit_entry = AuditLogEntry.objects.get(event=AuditLogEntryEvent.INTEGRATION_ADD)
assert audit_entry.get_note() == "installed Example for the slack integration"
@responses.activate
def test_multiple_integrations(self):
self.assert_setup_flow()
self.assert_setup_flow(team_id="TXXXXXXX2", authorizing_user_id="UXXXXXXX2")
integrations = Integration.objects.filter(provider=self.provider.key).order_by(
"external_id"
)
assert integrations.count() == 2
assert integrations[0].external_id == "TXXXXXXX1"
assert integrations[1].external_id == "TXXXXXXX2"
oi = OrganizationIntegration.objects.get(
integration=integrations[1], organization=self.organization
)
assert oi.config == {}
idps = IdentityProvider.objects.filter(type="slack")
assert idps.count() == 2
identities = Identity.objects.all()
assert identities.count() == 2
assert identities[0].external_id != identities[1].external_id
assert identities[0].idp != identities[1].idp
@responses.activate
def test_reassign_user(self):
self.assert_setup_flow()
identity = Identity.objects.get()
assert identity.external_id == "UXXXXXXX1"
self.assert_setup_flow(authorizing_user_id="UXXXXXXX2")
identity = Identity.objects.get()
assert identity.external_id == "UXXXXXXX2"
@responses.activate
def test_install_v2(self):
with override_options(
{"slack-v2.client-id": "other-id", "slack-v2.client-secret": "other-secret"}
):
self.assert_setup_flow(
expected_client_id="other-id", expected_client_secret="other-secret",
)
class SlackIntegrationConfigTest(TestCase):
def setUp(self):
self.integration = Integration.objects.create(provider="slack", name="Slack", metadata={})
self.installation = SlackIntegration(self.integration, self.organization.id)
def test_config_data_workspace_app(self):
self.installation.get_config_data()["installationType"] = "workspace_app"
def test_config_data_user_token(self):
self.integration.metadata["user_access_token"] = "token"
self.installation.get_config_data()["installationType"] = "classic_bot"
def test_config_data_born_as_bot(self):
self.integration.metadata["installation_type"] = "born_as_bot"
self.installation.get_config_data()["installationType"] = "born_as_bot"
| bsd-3-clause | -5,141,144,303,626,453,000 | 36.27933 | 98 | 0.624157 | false |
redlink-gmbh/redlink-python-sdk | tests/test-clients.py | 1 | 1379 | # -*- coding: utf8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nose.tools import assert_true, raises
from .utils import setup_func, with_setup_args
import redlink
@raises(ValueError)
def test_none_key_analysis_client():
redlink.create_analysis_client(None)
@raises(ValueError)
def test_none_key_data_client():
redlink.create_data_client(None)
@raises(ValueError)
def test_empty_key_analysis_client():
redlink.create_analysis_client("")
@raises(ValueError)
def test_empty_key_data_client():
redlink.create_data_client("")
@with_setup_args(setup_func)
def test_analysis_client_status(key):
analysis = redlink.create_analysis_client(key)
assert_true(analysis.status["accessible"])
@with_setup_args(setup_func)
def test_analysis_client_status(key):
data = redlink.create_data_client(key)
assert_true(data.status["accessible"])
| apache-2.0 | -4,130,120,495,207,902,700 | 26.58 | 74 | 0.743292 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.