repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
RPGOne/Skynet | 5230d93ccc9fa5329b0a02a351b02939-459eebff35e625675d2f6ff5633c7051c1d64a0e/gistfile1.py | 1 | 3974 | """
python speedup_kmeans.py --profile
python speedup_kmeans.py
git worktree add workdir_master master
rob sedr "\<sklearn\>" sklearn_master True
git mv sklearn sklearn_master
python setup develop
python -c "import sklearn_master; print(sklearn_master.__file__)"
python -c "import sklearn; print(sklearn.__file__)"
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import utool as ut
import sklearn # NOQA
from sklearn.datasets.samples_generator import make_blobs
from sklearn.utils.extmath import row_norms, squared_norm # NOQA
import sklearn.cluster
import numpy as np
from sklearn.metrics.pairwise import euclidean_distances # NOQA
import sklearn_master.cluster
(print, rrr, profile) = ut.inject2(__name__, '[tester]')
def test_kmeans_plus_plus_speed(n_clusters=2000, n_features=128, per_cluster=10, asint=False, fix=True):
"""
from speedup_kmeans import *
from sklearn.cluster.k_means_ import *
"""
rng = np.random.RandomState(42)
# Make random cluster centers on a ball
centers = rng.rand(n_clusters, n_features)
centers /= np.linalg.norm(centers, axis=0)[None, :]
centers = (centers * 512).astype(np.uint8) / 512
centers /= np.linalg.norm(centers, axis=0)[None, :]
n_samples = int(n_clusters * per_cluster)
n_clusters, n_features = centers.shape
X, true_labels = make_blobs(n_samples=n_samples, centers=centers,
cluster_std=1., random_state=42)
if asint:
X = (X * 512).astype(np.int32)
x_squared_norms = row_norms(X, squared=True)
if fix:
_k_init = sklearn.cluster.k_means_._k_init
else:
_k_init = sklearn_master.cluster.k_means_._k_init
random_state = np.random.RandomState(42)
n_local_trials = None # NOQA
with ut.Timer('testing kmeans init') as t:
centers = _k_init(X, n_clusters, random_state=random_state, x_squared_norms=x_squared_norms)
return centers, t.ellapsed
def main():
if True:
import pandas as pd
pd.options.display.max_rows = 1000
pd.options.display.width = 1000
basis = {
#'n_clusters': [10, 100, 1000, 2000][::-1],
#'n_features': [4, 32, 128, 512][::-1],
#'per_cluster': [1, 10, 100, 200][::-1],
'n_clusters': [10, 100, 500][::-1],
'n_features': [32, 128][::-1],
'per_cluster': [1, 10, 20][::-1],
'asint': [True, False],
}
vals = []
for kw in ut.ProgIter(ut.all_dict_combinations(basis), lbl='gridsearch',
bs=False, adjust=False, freq=1):
print('kw = ' + ut.repr2(kw))
exec(ut.execstr_dict(kw))
centers1, new_speed = test_kmeans_plus_plus_speed(fix=True, **kw)
centers2, old_speed = test_kmeans_plus_plus_speed(fix=False, **kw)
import utool
with utool.embed_on_exception_context:
assert np.all(centers1 == centers2), 'new code disagrees'
kw['new_speed'] = new_speed
kw['old_speed'] = old_speed
vals.append(kw)
print('---------')
df = pd.DataFrame.from_dict(vals)
df['percent_change'] = 100 * (df['old_speed'] - df['new_speed']) / df['old_speed']
df = df.reindex_axis(list(basis.keys()) + ['new_speed', 'old_speed', 'percent_change'], axis=1)
df['absolute_change'] = (df['old_speed'] - df['new_speed'])
print(df.sort('absolute_change', ascending=False))
#print(df)
print(df['percent_change'][df['absolute_change'] > .1].mean())
#print(df.loc[df['percent_change'].argsort()[::-1]])
else:
new_speed = test_kmeans_plus_plus_speed()
try:
profile.dump_stats('out.lprof')
profile.print_stats(stripzeros=True)
except Exception:
pass
print('new_speed = %r' % (new_speed,))
if __name__ == '__main__':
main()
| bsd-3-clause | -8,699,514,571,054,754,000 | 35.127273 | 104 | 0.593357 | false |
ooici/marine-integrations | mi/dataset/driver/hypm/ctd/driver.py | 1 | 1597 | """
@package mi.dataset.driver.hypm.ctd.driver
@file marine-integrations/mi/dataset/driver/hypm/ctd/driver.py
@author Bill French
@brief Driver for the hypm/ctd
Release notes:
initial release
"""
__author__ = 'Bill French'
__license__ = 'Apache 2.0'
from mi.core.log import get_logger ; log = get_logger()
from mi.dataset.dataset_driver import SimpleDataSetDriver
from mi.dataset.parser.ctdpf import CtdpfParser
from mi.dataset.parser.ctdpf import CtdpfParserDataParticle
from mi.dataset.harvester import SingleDirectoryHarvester
class HypmCTDPFDataSetDriver(SimpleDataSetDriver):
@classmethod
def stream_config(cls):
return [CtdpfParserDataParticle.type()]
def _build_parser(self, parser_state, infile):
config = self._parser_config
config.update({
'particle_module': 'mi.dataset.parser.ctdpf',
'particle_class': 'CtdpfParserDataParticle'
})
log.debug("MYCONFIG: %s", config)
self._parser = CtdpfParser(
config,
parser_state,
infile,
self._save_parser_state,
self._data_callback,
self._sample_exception_callback
)
return self._parser
def _build_harvester(self, driver_state):
"""
Build and return the harvester
"""
self._harvester = SingleDirectoryHarvester(
self._harvester_config,
driver_state,
self._new_file_callback,
self._modified_file_callback,
self._exception_callback
)
return self._harvester
| bsd-2-clause | 1,017,401,072,070,529,700 | 27.017544 | 62 | 0.639324 | false |
kzys/buildbot | buildbot/test/unit/test_svnpoller.py | 1 | 16273 | # -*- test-case-name: buildbot.test.test_svnpoller -*-
import time
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.changes.svnpoller import SVNPoller
# this is the output of "svn info --xml
# svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk"
prefix_output = """\
<?xml version="1.0"?>
<info>
<entry
kind="dir"
path="trunk"
revision="18354">
<url>svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk</url>
<repository>
<root>svn+ssh://svn.twistedmatrix.com/svn/Twisted</root>
<uuid>bbbe8e31-12d6-0310-92fd-ac37d47ddeeb</uuid>
</repository>
<commit
revision="18352">
<author>jml</author>
<date>2006-10-01T02:37:34.063255Z</date>
</commit>
</entry>
</info>
"""
# and this is "svn info --xml svn://svn.twistedmatrix.com/svn/Twisted". I
# think this is kind of a degenerate case.. it might even be a form of error.
prefix_output_2 = """\
<?xml version="1.0"?>
<info>
</info>
"""
# this is the svn info output for a local repository, svn info --xml
# file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository
prefix_output_3 = """\
<?xml version="1.0"?>
<info>
<entry
kind="dir"
path="SVN-Repository"
revision="3">
<url>file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository</url>
<repository>
<root>file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository</root>
<uuid>c0f47ff4-ba1e-0410-96b5-d44cc5c79e7f</uuid>
</repository>
<commit
revision="3">
<author>warner</author>
<date>2006-10-01T07:37:04.182499Z</date>
</commit>
</entry>
</info>
"""
# % svn info --xml file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample/trunk
prefix_output_4 = """\
<?xml version="1.0"?>
<info>
<entry
kind="dir"
path="trunk"
revision="3">
<url>file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample/trunk</url>
<repository>
<root>file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository</root>
<uuid>c0f47ff4-ba1e-0410-96b5-d44cc5c79e7f</uuid>
</repository>
<commit
revision="1">
<author>warner</author>
<date>2006-10-01T07:37:02.286440Z</date>
</commit>
</entry>
</info>
"""
class ComputePrefix(unittest.TestCase):
def test1(self):
base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk"
s = SVNPoller(base + "/")
self.failUnlessEqual(s.svnurl, base) # certify slash-stripping
prefix = s.determine_prefix(prefix_output)
self.failUnlessEqual(prefix, "trunk")
self.failUnlessEqual(s._prefix, prefix)
def test2(self):
base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted"
s = SVNPoller(base)
self.failUnlessEqual(s.svnurl, base)
prefix = s.determine_prefix(prefix_output_2)
self.failUnlessEqual(prefix, "")
def test3(self):
base = "file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository"
s = SVNPoller(base)
self.failUnlessEqual(s.svnurl, base)
prefix = s.determine_prefix(prefix_output_3)
self.failUnlessEqual(prefix, "")
def test4(self):
base = "file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample/trunk"
s = SVNPoller(base)
self.failUnlessEqual(s.svnurl, base)
prefix = s.determine_prefix(prefix_output_4)
self.failUnlessEqual(prefix, "sample/trunk")
# output from svn log on .../SVN-Repository/sample
# (so it includes trunk and branches)
sample_base = "file:///usr/home/warner/stuff/Projects/BuildBot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository/sample"
sample_logentries = [None] * 6
sample_logentries[5] = """\
<logentry
revision="6">
<author>warner</author>
<date>2006-10-01T19:35:16.165664Z</date>
<paths>
<path
action="D">/sample/branch/version.c</path>
</paths>
<msg>revised_to_2</msg>
</logentry>
"""
sample_logentries[4] = """\
<logentry
revision="5">
<author>warner</author>
<date>2006-10-01T19:35:16.165664Z</date>
<paths>
<path
action="D">/sample/branch</path>
</paths>
<msg>revised_to_2</msg>
</logentry>
"""
sample_logentries[3] = """\
<logentry
revision="4">
<author>warner</author>
<date>2006-10-01T19:35:16.165664Z</date>
<paths>
<path
action="M">/sample/trunk/version.c</path>
</paths>
<msg>revised_to_2</msg>
</logentry>
"""
sample_logentries[2] = """\
<logentry
revision="3">
<author>warner</author>
<date>2006-10-01T19:35:10.215692Z</date>
<paths>
<path
action="M">/sample/branch/main.c</path>
</paths>
<msg>commit_on_branch</msg>
</logentry>
"""
sample_logentries[1] = """\
<logentry
revision="2">
<author>warner</author>
<date>2006-10-01T19:35:09.154973Z</date>
<paths>
<path
copyfrom-path="/sample/trunk"
copyfrom-rev="1"
action="A">/sample/branch</path>
</paths>
<msg>make_branch</msg>
</logentry>
"""
sample_logentries[0] = """\
<logentry
revision="1">
<author>warner</author>
<date>2006-10-01T19:35:08.642045Z</date>
<paths>
<path
action="A">/sample</path>
<path
action="A">/sample/trunk</path>
<path
action="A">/sample/trunk/subdir/subdir.c</path>
<path
action="A">/sample/trunk/main.c</path>
<path
action="A">/sample/trunk/version.c</path>
<path
action="A">/sample/trunk/subdir</path>
</paths>
<msg>sample_project_files</msg>
</logentry>
"""
sample_info_output = """\
<?xml version="1.0"?>
<info>
<entry
kind="dir"
path="sample"
revision="4">
<url>file:///usr/home/warner/stuff/Projects/BuildBot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository/sample</url>
<repository>
<root>file:///usr/home/warner/stuff/Projects/BuildBot/trees/misc/_trial_temp/test_vc/repositories/SVN-Repository</root>
<uuid>4f94adfc-c41e-0410-92d5-fbf86b7c7689</uuid>
</repository>
<commit
revision="4">
<author>warner</author>
<date>2006-10-01T19:35:16.165664Z</date>
</commit>
</entry>
</info>
"""
changes_output_template = """\
<?xml version="1.0"?>
<log>
%s</log>
"""
def make_changes_output(maxrevision):
# return what 'svn log' would have just after the given revision was
# committed
logs = sample_logentries[0:maxrevision]
assert len(logs) == maxrevision
logs.reverse()
output = changes_output_template % ("".join(logs))
return output
def split_file(path):
pieces = path.split("/")
if pieces[0] == "branch":
return "branch", "/".join(pieces[1:])
if pieces[0] == "trunk":
return None, "/".join(pieces[1:])
raise RuntimeError("there shouldn't be any files like %s" % path)
class MySVNPoller(SVNPoller):
def __init__(self, *args, **kwargs):
SVNPoller.__init__(self, *args, **kwargs)
self.pending_commands = []
self.finished_changes = []
def getProcessOutput(self, args):
d = defer.Deferred()
self.pending_commands.append((args, d))
return d
def submit_changes(self, changes):
self.finished_changes.extend(changes)
class ComputeChanges(unittest.TestCase):
def test1(self):
base = "file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample"
s = SVNPoller(base)
s._prefix = "sample"
output = make_changes_output(4)
doc = s.parse_logs(output)
newlast, logentries = s._filter_new_logentries(doc, 4)
self.failUnlessEqual(newlast, 4)
self.failUnlessEqual(len(logentries), 0)
newlast, logentries = s._filter_new_logentries(doc, 3)
self.failUnlessEqual(newlast, 4)
self.failUnlessEqual(len(logentries), 1)
newlast, logentries = s._filter_new_logentries(doc, 1)
self.failUnlessEqual(newlast, 4)
self.failUnlessEqual(len(logentries), 3)
newlast, logentries = s._filter_new_logentries(doc, None)
self.failUnlessEqual(newlast, 4)
self.failUnlessEqual(len(logentries), 0)
def testChanges(self):
base = "file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample"
s = SVNPoller(base, split_file=split_file)
s._prefix = "sample"
doc = s.parse_logs(make_changes_output(3))
newlast, logentries = s._filter_new_logentries(doc, 1)
# so we see revisions 2 and 3 as being new
self.failUnlessEqual(newlast, 3)
changes = s.create_changes(logentries)
self.failUnlessEqual(len(changes), 2)
self.failUnlessEqual(changes[0].branch, "branch")
self.failUnlessEqual(changes[0].revision, '2')
self.failUnlessEqual(changes[1].branch, "branch")
self.failUnlessEqual(changes[1].files, ["main.c"])
self.failUnlessEqual(changes[1].revision, '3')
# and now pull in r4
doc = s.parse_logs(make_changes_output(4))
newlast, logentries = s._filter_new_logentries(doc, newlast)
self.failUnlessEqual(newlast, 4)
# so we see revision 4 as being new
changes = s.create_changes(logentries)
self.failUnlessEqual(len(changes), 1)
self.failUnlessEqual(changes[0].branch, None)
self.failUnlessEqual(changes[0].revision, '4')
self.failUnlessEqual(changes[0].files, ["version.c"])
# and now pull in r5 (should *not* create a change as it's a
# branch deletion
doc = s.parse_logs(make_changes_output(5))
newlast, logentries = s._filter_new_logentries(doc, newlast)
self.failUnlessEqual(newlast, 5)
# so we see revision 5 as being new
changes = s.create_changes(logentries)
self.failUnlessEqual(len(changes), 0)
# and now pull in r6 (should create a change as it's not
# deleting an entire branch
doc = s.parse_logs(make_changes_output(6))
newlast, logentries = s._filter_new_logentries(doc, newlast)
self.failUnlessEqual(newlast, 6)
# so we see revision 6 as being new
changes = s.create_changes(logentries)
self.failUnlessEqual(len(changes), 1)
self.failUnlessEqual(changes[0].branch, 'branch')
self.failUnlessEqual(changes[0].revision, '6')
self.failUnlessEqual(changes[0].files, ["version.c"])
def testFirstTime(self):
base = "file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample"
s = SVNPoller(base, split_file=split_file)
s._prefix = "sample"
doc = s.parse_logs(make_changes_output(4))
logentries = s.get_new_logentries(doc)
# SVNPoller ignores all changes that happened before it was started
self.failUnlessEqual(len(logentries), 0)
self.failUnlessEqual(s.last_change, 4)
class Misc(unittest.TestCase):
def testAlreadyWorking(self):
base = "file:///home/warner/stuff/Projects/BuildBot/trees/svnpoller/_trial_temp/test_vc/repositories/SVN-Repository/sample"
s = MySVNPoller(base)
d = s.checksvn()
# the SVNPoller is now waiting for its getProcessOutput to finish
self.failUnlessEqual(s.overrun_counter, 0)
d2 = s.checksvn()
self.failUnlessEqual(s.overrun_counter, 1)
self.failUnlessEqual(len(s.pending_commands), 1)
def testGetRoot(self):
base = "svn+ssh://svn.twistedmatrix.com/svn/Twisted/trunk"
s = MySVNPoller(base)
d = s.checksvn()
# the SVNPoller is now waiting for its getProcessOutput to finish
self.failUnlessEqual(len(s.pending_commands), 1)
self.failUnlessEqual(s.pending_commands[0][0],
["info", "--xml", "--non-interactive", base])
def makeTime(timestring):
datefmt = '%Y/%m/%d %H:%M:%S'
when = time.mktime(time.strptime(timestring, datefmt))
return when
class Everything(unittest.TestCase):
def test1(self):
s = MySVNPoller(sample_base, split_file=split_file)
d = s.checksvn()
# the SVNPoller is now waiting for its getProcessOutput to finish
self.failUnlessEqual(len(s.pending_commands), 1)
self.failUnlessEqual(s.pending_commands[0][0],
["info", "--xml", "--non-interactive",
sample_base])
d = s.pending_commands[0][1]
s.pending_commands.pop(0)
d.callback(sample_info_output)
# now it should be waiting for the 'svn log' command
self.failUnlessEqual(len(s.pending_commands), 1)
self.failUnlessEqual(s.pending_commands[0][0],
["log", "--xml", "--verbose", "--non-interactive",
"--limit=100", sample_base])
d = s.pending_commands[0][1]
s.pending_commands.pop(0)
d.callback(make_changes_output(1))
# the command ignores the first batch of changes
self.failUnlessEqual(len(s.finished_changes), 0)
self.failUnlessEqual(s.last_change, 1)
# now fire it again, nothing changing
d = s.checksvn()
self.failUnlessEqual(s.pending_commands[0][0],
["log", "--xml", "--verbose", "--non-interactive",
"--limit=100", sample_base])
d = s.pending_commands[0][1]
s.pending_commands.pop(0)
d.callback(make_changes_output(1))
# nothing has changed
self.failUnlessEqual(len(s.finished_changes), 0)
self.failUnlessEqual(s.last_change, 1)
# and again, with r2 this time
d = s.checksvn()
self.failUnlessEqual(s.pending_commands[0][0],
["log", "--xml", "--verbose", "--non-interactive",
"--limit=100", sample_base])
d = s.pending_commands[0][1]
s.pending_commands.pop(0)
d.callback(make_changes_output(2))
# r2 should appear
self.failUnlessEqual(len(s.finished_changes), 1)
self.failUnlessEqual(s.last_change, 2)
c = s.finished_changes[0]
self.failUnlessEqual(c.branch, "branch")
self.failUnlessEqual(c.revision, '2')
self.failUnlessEqual(c.files, [''])
# TODO: this is what creating the branch looks like: a Change with a
# zero-length file. We should decide if we want filenames like this
# in the Change (and make sure nobody else gets confused by it) or if
# we want to strip them out.
self.failUnlessEqual(c.comments, "make_branch")
# and again at r2, so nothing should change
d = s.checksvn()
self.failUnlessEqual(s.pending_commands[0][0],
["log", "--xml", "--verbose", "--non-interactive",
"--limit=100", sample_base])
d = s.pending_commands[0][1]
s.pending_commands.pop(0)
d.callback(make_changes_output(2))
# nothing has changed
self.failUnlessEqual(len(s.finished_changes), 1)
self.failUnlessEqual(s.last_change, 2)
# and again with both r3 and r4 appearing together
d = s.checksvn()
self.failUnlessEqual(s.pending_commands[0][0],
["log", "--xml", "--verbose", "--non-interactive",
"--limit=100", sample_base])
d = s.pending_commands[0][1]
s.pending_commands.pop(0)
d.callback(make_changes_output(4))
self.failUnlessEqual(len(s.finished_changes), 3)
self.failUnlessEqual(s.last_change, 4)
c3 = s.finished_changes[1]
self.failUnlessEqual(c3.branch, "branch")
self.failUnlessEqual(c3.revision, '3')
self.failUnlessEqual(c3.files, ["main.c"])
self.failUnlessEqual(c3.comments, "commit_on_branch")
c4 = s.finished_changes[2]
self.failUnlessEqual(c4.branch, None)
self.failUnlessEqual(c4.revision, '4')
self.failUnlessEqual(c4.files, ["version.c"])
self.failUnlessEqual(c4.comments, "revised_to_2")
self.failUnless(abs(c4.when - time.time()) < 60)
# TODO: get coverage of split_file returning None
# point at a live SVN server for a little while
| gpl-2.0 | -176,282,127,084,430,800 | 33.258947 | 139 | 0.642291 | false |
cmunk/protwis | api/views.py | 1 | 32974 | from django.shortcuts import render
from rest_framework import views, generics, viewsets
from rest_framework.response import Response
from rest_framework.parsers import MultiPartParser, FormParser, FileUploadParser
from rest_framework.renderers import JSONRenderer
from django.template.loader import render_to_string
from django.db.models import Q
from django.conf import settings
from interaction.models import ResidueFragmentInteraction
from mutation.models import MutationRaw
from protein.models import Protein, ProteinConformation, ProteinFamily, Species, ProteinSegment
from residue.models import Residue, ResidueGenericNumber, ResidueNumberingScheme, ResidueGenericNumberEquivalent
from structure.models import Structure
from structure.assign_generic_numbers_gpcr import GenericNumbering
from api.serializers import (ProteinSerializer, ProteinFamilySerializer, SpeciesSerializer, ResidueSerializer,
ResidueExtendedSerializer, StructureSerializer,
StructureLigandInteractionSerializer,
MutationSerializer)
from api.renderers import PDBRenderer
from common.alignment import Alignment
from common.definitions import *
from drugs.models import Drugs
import json, os
from io import StringIO
from Bio.PDB import PDBIO
from collections import OrderedDict
# FIXME add
# getMutations
# numberPDBfile
import coreapi
from urllib.parse import urlparse
from urllib.parse import urljoin
from rest_framework import renderers, response, schemas
from rest_framework.decorators import api_view, renderer_classes
from rest_framework import response, schemas
from rest_framework_swagger.views import get_swagger_view
schema_view = get_swagger_view(title='GPCRdb API')
class ProteinDetail(generics.RetrieveAPIView):
"""
Get a single protein instance by entry name
\n/protein/{entry_name}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
queryset = Protein.objects.filter(sequence_type__slug="wt").prefetch_related('family', 'species', 'source', 'residue_numbering_scheme', 'genes')
serializer_class = ProteinSerializer
lookup_field = 'entry_name'
class ProteinByAccessionDetail(ProteinDetail):
"""
Get a single protein instance by accession
\n/protein/accession/{accession}/
\n{accession} is a protein identifier from Uniprot, e.g. P07550
"""
lookup_field = 'accession'
class ProteinFamilyList(generics.ListAPIView):
"""
Get a list of protein families
\n/proteinfamily/
"""
queryset = ProteinFamily.objects.all().prefetch_related('parent')
serializer_class = ProteinFamilySerializer
class ProteinFamilyDetail(generics.RetrieveAPIView):
"""
Get a single protein family instance
\n/proteinfamily/{slug}/
\n{slug} is a protein family identifier, e.g. 001_001_001
"""
queryset = ProteinFamily.objects.all().prefetch_related("parent")
serializer_class = ProteinFamilySerializer
lookup_field = 'slug'
class ProteinFamilyChildrenList(generics.ListAPIView):
"""
Get a list of child families of a protein family
\n/proteinfamily/children/{slug}/
\n{slug} is a protein family identifier, e.g. 001_001_001
"""
serializer_class = ProteinFamilySerializer
def get_queryset(self):
family = self.kwargs.get('slug')
queryset = ProteinFamily.objects.all().prefetch_related("parent")
return queryset.filter(parent__slug=family)
class ProteinFamilyDescendantList(generics.ListAPIView):
"""
Get a list of descendant families of a protein family
\n/proteinfamily/descendants/{slug}/
\n{slug} is a protein family identifier, e.g. 001_001_001
"""
serializer_class = ProteinFamilySerializer
def get_queryset(self):
family = self.kwargs.get('slug')
queryset = ProteinFamily.objects.all().prefetch_related("parent")
return queryset.filter(Q(slug__startswith=family) & ~Q(slug=family))
class ProteinsInFamilyList(generics.ListAPIView):
"""
Get a list of proteins in a protein family
\n/proteinfamily/proteins/{slug}/
\n{slug} is a protein family identifier, e.g. 001_001_001
"""
serializer_class = ProteinSerializer
def get_queryset(self):
queryset = Protein.objects.all()
family = self.kwargs.get('slug')
return queryset.filter(sequence_type__slug='wt', family__slug__startswith=family)\
.prefetch_related('family', 'species', 'source', 'residue_numbering_scheme', 'genes')
class ProteinsInFamilySpeciesList(generics.ListAPIView):
"""
Get a list of proteins in a protein family
\n/proteinfamily/proteins/{slug}/{species}
\n{slug} is a protein family identifier, e.g. 001_001_001
\n{latin_name} is a species identifier from Uniprot, e.g. Homo sapiens
"""
serializer_class = ProteinSerializer
def get_queryset(self):
queryset = Protein.objects.all()
family = self.kwargs.get('slug')
species = self.kwargs.get('latin_name')
return queryset.filter(sequence_type__slug='wt', family__slug__startswith=family,
species__latin_name=species).prefetch_related('family',
'species', 'source', 'residue_numbering_scheme', 'genes')
class ResiduesList(generics.ListAPIView):
"""
Get a list of residues of a protein
\n/residues/{entry_name}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
serializer_class = ResidueSerializer
def get_queryset(self):
queryset = Residue.objects.all()
#protein_conformation__protein__sequence_type__slug='wt',
return queryset.filter(
protein_conformation__protein__entry_name=self.kwargs.get('entry_name')).prefetch_related('display_generic_number','protein_segment','alternative_generic_numbers')
class ResiduesExtendedList(ResiduesList):
"""
Get a list of residues of a protein, including alternative generic numbers
\n/residues/extended/{entry_name}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
serializer_class = ResidueExtendedSerializer
class SpeciesList(generics.ListAPIView):
"""
Get a list of species
\n/species/
"""
queryset = Species.objects.all()
serializer_class = SpeciesSerializer
class SpeciesDetail(generics.RetrieveAPIView):
"""
Get a single species instance
\n/species/{latin_name}/
\n{latin_name} is a species identifier from Uniprot, e.g. Homo sapiens
"""
queryset = Species.objects.all()
serializer_class = SpeciesSerializer
lookup_field = 'latin_name'
class NumberPDBStructureView(views.APIView):
"""
WRITEME
"""
pass
class StructureList(views.APIView):
"""
Get a list of structures
\n/structure/
"""
def get(self, request, pdb_code=None, entry_name=None, representative=None):
if pdb_code:
structures = Structure.objects.filter(pdb_code__index=pdb_code)
elif entry_name and representative:
structures = Structure.objects.filter(protein_conformation__protein__parent__entry_name=entry_name,
representative=True)
elif entry_name:
structures = Structure.objects.filter(protein_conformation__protein__parent__entry_name=entry_name)
elif representative:
structures = Structure.objects.filter(representative=True)
else:
structures = Structure.objects.all()
structures = structures.exclude(refined=True).prefetch_related('protein_conformation__protein__parent__species', 'pdb_code',
'protein_conformation__protein__parent__family', 'protein_conformation__protein__parent__species',
'publication__web_link', 'structureligandinteraction_set__ligand__properities', 'structure_type',
'structureligandinteraction_set__ligand__properities__ligand_type',
'structureligandinteraction_set__ligand_role')
# structures = self.get_structures(pdb_code, entry_name, representative)
# convert objects to a list of dictionaries
# normal serializers can not be used because of abstraction of tables (e.g. protein_conformation)
s = []
for structure in structures:
# essential fields
structure_data = {
'pdb_code': structure.pdb_code.index,
'protein': structure.protein_conformation.protein.parent.entry_name,
'family': structure.protein_conformation.protein.parent.family.slug,
'species': structure.protein_conformation.protein.parent.species.latin_name,
'preferred_chain': structure.preferred_chain,
'resolution': structure.resolution,
'publication_date': structure.publication_date,
'type': structure.structure_type.name,
'state': structure.state.name,
'distance': structure.distance,
}
# publication
if structure.publication:
structure_data['publication'] = structure.publication.web_link.__str__()
else:
structure_data['publication'] = None
# ligand
ligands = []
for interaction in structure.structureligandinteraction_set.filter(annotated=True):
ligand = {}
if interaction.ligand.name:
ligand['name'] = interaction.ligand.name
if interaction.ligand.properities.ligand_type and interaction.ligand.properities.ligand_type.name:
ligand['type'] = interaction.ligand.properities.ligand_type.name
if interaction.ligand_role and interaction.ligand_role.name:
ligand['function'] = interaction.ligand_role.name
if ligand:
ligands.append(ligand)
structure_data['ligands'] = ligands
s.append(structure_data)
# if a structure is selected, return a single dict rather then a list of dicts
if len(s) == 1:
s = s[0]
return Response(s)
def get_structures(self, pdb_code=None, representative=None):
return Structure.objects.all()
class RepresentativeStructureList(StructureList):
"""
Get a list of representative structures (one for each protein and activation state)
\n/structure/representative/
"""
class StructureListProtein(StructureList):
"""
Get a list of structures of a protein
\n/structure/protein/{entry_name}
"""
class RepresentativeStructureListProtein(StructureList):
"""
Get a list of representative structures of a protein (one for each activation state)
\n/structure/protein/{entry_name}/representative/
"""
class StructureDetail(StructureList):
"""
Get a single structure instance
\n/structure/{pdb_code}/
\n{pdb_code} is a structure identifier from the Protein Data Bank, e.g. 2RH1
"""
def get_structures(self, pdb_code=None, representative=None):
return Structure.objects.filter(pdb_code__index=pdb_code)
class FamilyAlignment(views.APIView):
"""
Get a full sequence alignment of a protein family including a consensus sequence
\n/alignment/family/{slug}/
\n{slug} is a protein family identifier, e.g. 001_001_001
"""
def get(self, request, slug=None, segments=None, latin_name=None, statistics=False):
if slug is not None:
# Check for specific species
if latin_name is not None:
ps = Protein.objects.filter(sequence_type__slug='wt', source__id=1, family__slug__startswith=slug,
species__latin_name=latin_name)
else:
ps = Protein.objects.filter(sequence_type__slug='wt', source__id=1, family__slug__startswith=slug)
# take the numbering scheme from the first protein
#s_slug = Protein.objects.get(entry_name=ps[0]).residue_numbering_scheme_id
s_slug = ps[0].residue_numbering_scheme_id
protein_family = ps[0].family.slug[:3]
gen_list = []
segment_list = []
if segments is not None:
input_list = segments.split(",")
# fetch a list of all segments
protein_segments = ProteinSegment.objects.filter(partial=False).values_list('slug', flat=True)
for s in input_list:
# add to segment list
if s in protein_segments:
segment_list.append(s)
# get generic numbering object for generic positions
else:
# make sure the query works for all positions
gen_object = ResidueGenericNumberEquivalent.objects.get(label=s, scheme__id=s_slug)
gen_object.properties = {}
gen_list.append(gen_object)
# fetch all complete protein_segments
ss = ProteinSegment.objects.filter(slug__in=segment_list, partial=False)
else:
ss = ProteinSegment.objects.filter(partial=False)
if int(protein_family) < 100:
ss = [ s for s in ss if s.proteinfamily == 'GPCR']
elif protein_family == "100":
ss = [ s for s in ss if s.proteinfamily == 'Gprotein']
elif protein_family == "200":
ss = [ s for s in ss if s.proteinfamily == 'Arrestin']
# create an alignment object
a = Alignment()
a.show_padding = False
# load data from selection into the alignment
a.load_proteins(ps)
# load generic numbers and TMs seperately
if gen_list:
a.load_segments(gen_list)
a.load_segments(ss)
# build the alignment data matrix
a.build_alignment()
a.calculate_statistics()
residue_list = []
for aa in a.full_consensus:
residue_list.append(aa.amino_acid)
# render the fasta template as string
response = render_to_string('alignment/alignment_fasta.html', {'a': a}).split("\n")
# convert the list to a dict
ali_dict = OrderedDict({})
for row in response:
if row.startswith(">"):
k = row[1:]
else:
ali_dict[k] = row
k = False
ali_dict['CONSENSUS'] = ''.join(residue_list)
# render statistics for output
if statistics == True:
feat = {}
for i, feature in enumerate(AMINO_ACID_GROUPS):
feature_stats = a.feature_stats[i]
feature_stats_clean = []
for d in feature_stats:
sub_list = [x[0] for x in d]
feature_stats_clean.append(sub_list) # remove feature frequencies
# print(feature_stats_clean)
feat[feature] = [item for sublist in feature_stats_clean for item in sublist]
for i, AA in enumerate(AMINO_ACIDS):
feature_stats = a.amino_acid_stats[i]
feature_stats_clean = []
for d in feature_stats:
sub_list = [x[0] for x in d]
feature_stats_clean.append(sub_list) # remove feature frequencies
# print(feature_stats_clean)
feat[AA] = [item for sublist in feature_stats_clean for item in sublist]
ali_dict["statistics"] = feat
return Response(ali_dict)
class FamilyAlignmentPartial(FamilyAlignment):
"""
Get a partial sequence alignment of a protein family
\n/alignment/family/{slug}/{segments}/
\n{slug} is a protein family identifier, e.g. 001_001_001
\n{segments} is a comma separated list of protein segment identifiers and/ or
generic GPCRdb numbers, e.g. TM2,TM3,ECL2,4x50
"""
class FamilyAlignmentSpecies(FamilyAlignment):
"""
Get a full sequence alignment of a protein family
\n/alignment/family/{slug}//{species}
\n{slug} is a protein family identifier, e.g. 001_001_001
\n{species} is a species identifier from Uniprot, e.g. Homo sapiens
"""
class FamilyAlignmentPartialSpecies(FamilyAlignment):
"""
Get a partial sequence alignment of a protein family
\n/alignment/family/{slug}/{segments}/{species}
\n{slug} is a protein family identifier, e.g. 001_001_001
\n{segments} is a comma separated list of protein segment identifiers and/ or
generic GPCRdb numbers, e.g. TM2,TM3,ECL2,4x50
\n{species} is a species identifier from Uniprot, e.g. Homo sapiens
"""
class ProteinSimilaritySearchAlignment(views.APIView):
"""
Get a segment sequence alignment of two or more proteins ranked by similarity
\n/alignment/similarity/{proteins}/{segments}/
\n{proteins} is a comma separated list of protein identifiers, e.g. adrb2_human,5ht2a_human,cxcr4_human,
where the first protein is the query protein and the following the proteins to compare it to
\n{segments} is a comma separated list of protein segment identifiers and/ or
generic GPCRdb numbers, e.g. TM2,TM3,ECL2,4x50
"""
def get(self, request, proteins=None, segments=None):
if proteins is not None:
protein_list = proteins.split(",")
# first in API should be reference
ps = Protein.objects.filter(sequence_type__slug='wt', entry_name__in=protein_list[1:])
reference = Protein.objects.filter(sequence_type__slug='wt', entry_name__in=[protein_list[0]])
# take the numbering scheme from the first protein
s_slug = Protein.objects.get(entry_name=protein_list[0]).residue_numbering_scheme_id
protein_family = ps[0].family.slug[:3]
gen_list = []
segment_list = []
if segments is not None:
input_list = segments.split(",")
# fetch a list of all segments
protein_segments = ProteinSegment.objects.filter(partial=False).values_list('slug', flat=True)
for s in input_list:
# add to segment list
if s in protein_segments:
segment_list.append(s)
# get generic numbering object for generic positions
else:
# make sure the query works for all positions
gen_object = ResidueGenericNumberEquivalent.objects.get(label=s, scheme__id=s_slug)
gen_object.properties = {}
gen_list.append(gen_object)
# fetch all complete protein_segments
ss = ProteinSegment.objects.filter(slug__in=segment_list, partial=False)
else:
ss = ProteinSegment.objects.filter(partial=False)
if int(protein_family) < 100:
ss = [ s for s in ss if s.proteinfamily == 'GPCR']
elif protein_family == "100":
ss = [ s for s in ss if s.proteinfamily == 'Gprotein']
elif protein_family == "200":
ss = [ s for s in ss if s.proteinfamily == 'Arrestin']
# create an alignment object
a = Alignment()
a.show_padding = False
# load data from API into the alignment
a.load_reference_protein(reference[0])
a.load_proteins(ps)
# load generic numbers and TMs seperately
if gen_list:
a.load_segments(gen_list)
a.load_segments(ss)
# build the alignment data matrix
a.build_alignment()
# calculate identity and similarity of each row compared to the reference
a.calculate_similarity()
# render the fasta template as string
response = render_to_string('alignment/alignment_fasta.html', {'a': a}).split("\n")
# convert the list to a dict
ali_dict = {}
k = False
num = 0
for i, row in enumerate(response):
if row.startswith(">"):
k = row[1:]
elif k:
# add the query as 100 identical/similar to the beginning (like on the website)
if num == 0:
a.proteins[num].identity = 100
a.proteins[num].similarity = 100
# order dict after custom list
keyorder = ["similarity","identity","AA"]
ali_dict[k] = {"AA": row, "identity": int(str(a.proteins[num].identity).replace(" ","")),
"similarity": int(str(a.proteins[num].similarity).replace(" ",""))}
ali_dict[k] = OrderedDict(sorted(ali_dict[k].items(), key=lambda t: keyorder.index(t[0])))
num+=1
k = False
ali_dict_ordered = OrderedDict(sorted(ali_dict.items(), key=lambda x: x[1]['similarity'], reverse=True))
return Response(ali_dict_ordered)
class ProteinAlignment(views.APIView):
"""
Get a full sequence alignment of two or more proteins
\n/alignment/protein/{proteins}/
\n{proteins} is a comma separated list of protein identifiers, e.g. adrb2_human,5ht2a_human
"""
def get(self, request, proteins=None, segments=None, statistics=False):
if proteins is not None:
protein_list = proteins.split(",")
ps = Protein.objects.filter(sequence_type__slug='wt', entry_name__in=protein_list)
# take the numbering scheme from the first protein
#s_slug = Protein.objects.get(entry_name=protein_list[0]).residue_numbering_scheme_id
s_slug = ps[0].residue_numbering_scheme_id
protein_family = ps[0].family.slug[:3]
gen_list = []
segment_list = []
if segments is not None:
input_list = segments.split(",")
# fetch a list of all segments
protein_segments = ProteinSegment.objects.filter(partial=False).values_list('slug', flat=True)
for s in input_list:
# add to segment list
if s in protein_segments:
segment_list.append(s)
# get generic numbering object for generic positions
else:
gen_object = ResidueGenericNumberEquivalent.objects.get(label=s, scheme__id=s_slug)
gen_object.properties = {}
gen_list.append(gen_object)
# fetch all complete protein_segments
ss = ProteinSegment.objects.filter(slug__in=segment_list, partial=False)
else:
ss = ProteinSegment.objects.filter(partial=False)
if int(protein_family) < 100:
ss = [ s for s in ss if s.proteinfamily == 'GPCR']
elif protein_family == "100":
ss = [ s for s in ss if s.proteinfamily == 'Gprotein']
elif protein_family == "200":
ss = [ s for s in ss if s.proteinfamily == 'Arrestin']
# create an alignment object
a = Alignment()
a.show_padding = False
# load data from selection into the alignment
a.load_proteins(ps)
# load generic numbers and TMs seperately
if gen_list:
a.load_segments(gen_list)
a.load_segments(ss)
# build the alignment data matrix
a.build_alignment()
# calculate statistics
if statistics == True:
a.calculate_statistics()
# render the fasta template as string
response = render_to_string('alignment/alignment_fasta.html', {'a': a}).split("\n")
# convert the list to a dict
ali_dict = {}
k = False
for row in response:
if row.startswith(">"):
k = row[1:]
elif k:
ali_dict[k] = row
k = False
# render statistics for output
if statistics == True:
feat = {}
for i, feature in enumerate(AMINO_ACID_GROUPS):
feature_stats = a.feature_stats[i]
feature_stats_clean = []
for d in feature_stats:
sub_list = [x[0] for x in d]
feature_stats_clean.append(sub_list) # remove feature frequencies
# print(feature_stats_clean)
feat[feature] = [item for sublist in feature_stats_clean for item in sublist]
for i, AA in enumerate(AMINO_ACIDS):
feature_stats = a.amino_acid_stats[i]
feature_stats_clean = []
for d in feature_stats:
sub_list = [x[0] for x in d]
feature_stats_clean.append(sub_list) # remove feature frequencies
# print(feature_stats_clean)
feat[AA] = [item for sublist in feature_stats_clean for item in sublist]
ali_dict["statistics"] = feat
return Response(ali_dict)
class ProteinAlignmentStatistics(ProteinAlignment):
"""
Add a /statics at the end of an alignment in order to
receive an additional residue property statistics output e.g.:
\n/alignment/protein/{proteins}/{segments}/statistics
\n{proteins} is a comma separated list of protein identifiers, e.g. adrb2_human,5ht2a_human
\n{segments} is a comma separated list of protein segment identifiers and/ or
generic GPCRdb numbers, e.g. TM2,TM3,ECL2,4x50
"""
class ProteinAlignmentPartial(ProteinAlignment):
"""
Get a partial sequence alignment of two or more proteins
\n/alignment/protein/{proteins}/{segments}/
\n{proteins} is a comma separated list of protein identifiers, e.g. adrb2_human,5ht2a_human
\n{segments} is a comma separated list of protein segment identifiers and/ or
generic GPCRdb numbers, e.g. TM2,TM3,ECL2,4x50
"""
class StructureTemplate(views.APIView):
"""
Get the most similar structure template for a protein using a 7TM alignment
\n/structure/template/{entry_name}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
def get(self, request, entry_name=None, segments=None):
if entry_name is not None:
ref = Protein.objects.get(sequence_type__slug='wt', entry_name=entry_name)
structures = Structure.objects.order_by('protein_conformation__protein__parent', 'state',
'resolution').distinct('protein_conformation__protein__parent', 'state')
ps = []
for structure in structures:
ps.append(structure.protein_conformation.protein.parent)
if segments is not None:
input_list = segments.split(",")
ss = ProteinSegment.objects.filter(slug__in=input_list, partial=False)
else:
ss = ProteinSegment.objects.filter(partial=False, category='helix')
# create an alignment object
a = Alignment()
a.show_padding = False
# load data from selection into the alignment
a.load_reference_protein(ref)
a.load_proteins(ps)
a.load_segments(ss)
# build the alignment data matrix
a.build_alignment()
# calculate identity and similarity of each row compared to the reference
a.calculate_similarity()
# return the entry_name of the closest template
return Response(a.proteins[1].protein.entry_name)
class StructureTemplatePartial(StructureTemplate):
"""
Get the most similar structure template for a protein using a partial alignment
\n/structure/template/{entry_name}/{segments}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
\n{segments} is a comma separated list of protein segment identifiers, e.g. TM3,TM5,TM6
"""
class StructureAssignGenericNumbers(views.APIView):
"""
Assign generic residue numbers (Ballesteros-Weinstein and GPCRdb schemes) to an uploaded pdb file.
\n/structure/assign_generic_numbers\n
e.g.
curl -X POST -F "[email protected]" http://gpcrdb.org/services/structure/assign_generic_numbers
"""
parser_classes = (FileUploadParser,)
renderer_classes = (PDBRenderer, )
def post(self, request):
root, ext = os.path.splitext(request.FILES['pdb_file'].name)
generic_numbering = GenericNumbering(StringIO(request.FILES['pdb_file'].file.read().decode('UTF-8',"ignore")))
out_struct = generic_numbering.assign_generic_numbers()
out_stream = StringIO()
io = PDBIO()
io.set_structure(out_struct)
io.save(out_stream)
print(len(out_stream.getvalue()))
# filename="{}_GPCRdb.pdb".format(root)
return Response(out_stream.getvalue())
class StructureSequenceParser(views.APIView):
"""
Analyze the uploaded pdb structure listing auxiliary proteins, mutations, deletions and insertions.
\n/structure/structure/parse_pdb\n
e.g.
curl -X POST -F "[email protected]" http://gpcrdb.org/services/structure/parse_pdb
"""
parser_classes = (FileUploadParser,)
renderer_classes =(JSONRenderer)
def post(self, request):
root, ext = os.path.splitext(request.FILES['pdb_file'].name)
header = parse_pdb_header(request.FILES['pdb_file'])
parser = SequenceParser(request.FILES['pdb_file'])
json_data = OrderedDict()
json_data["header"] = header
json_data.update(parser.get_fusions())
json_data.update(parser.get_mutations())
json_data.update(parser.get_deletions())
return Response(json_data)
class StructureLigandInteractions(generics.ListAPIView):
"""
Get a list of interactions between structure and ligand
\n/structure/{pdb_code}/interaction/
\n{pdb_code} is a structure identifier from the Protein Data Bank, e.g. 2RH1
"""
serializer_class = StructureLigandInteractionSerializer
def get_queryset(self):
queryset = ResidueFragmentInteraction.objects.all()
queryset = queryset.prefetch_related('structure_ligand_pair__structure__pdb_code',
'interaction_type',
'fragment__residue__generic_number',
'fragment__residue__display_generic_number',
)
queryset = queryset.exclude(interaction_type__type='hidden').order_by('fragment__residue__sequence_number')
slug = self.kwargs.get('pdb_code')
return queryset.filter(structure_ligand_pair__structure__pdb_code__index=slug,
structure_ligand_pair__annotated=True)
class MutantList(generics.ListAPIView):
"""
Get a list of mutants of single protein instance by entry name
\n/mutant/{entry_name}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
serializer_class = MutationSerializer
def get_queryset(self):
queryset = MutationRaw.objects.all()
return queryset.filter(protein=self.kwargs.get('entry_name'))
class DrugList(views.APIView):
"""
Get a list of drugs for a single protein instance by entry name
\n/drugs/{proteins}/
\n{entry_name} is a protein identifier from Uniprot, e.g. adrb2_human
"""
def get(self, request, entry_name=None):
drugs = Drugs.objects.filter(target__entry_name=entry_name).distinct()
druglist = []
for drug in drugs:
drugname = drug.name
drugtype = drug.drugtype
clinical = drug.clinicalstatus
phasedate = drug.phasedate
if clinical != '-':
status = drug.status + ' (' + drug.clinicalstatus + ', ' + phasedate + ')'
else:
status = drug.status
approval = drug.approval
indication = drug.indication
moa = drug.moa
novelty = drug.novelty
druglist.append({'name':drugname, 'approval': approval, 'indication': indication, 'status':status, 'drugtype':drugtype, 'moa':moa, 'novelty': novelty})
return Response(druglist)
| apache-2.0 | -7,819,170,572,501,628,000 | 38.48982 | 175 | 0.611724 | false |
Djabx/mgd | mgdpck/writters/cbz.py | 1 | 1116 | #! /usr/bin/python
# -*- coding: utf-8 -*-
'''
A cbz writter
'''
from mgdpck import actions
import os
import mimetypes
import zipfile
class CbzWritter(actions.AbsWritter):
@classmethod
def get_name(cls):
return 'cbz'
def __init__(self, outdir):
self.outdir = outdir
self.out = None
def done(self):
if self.out:
self.out.close()
def export_book(self, lsb, chapter_min, chapter_max):
self.out_file = os.path.join(self.outdir, "{0.book.short_name}_{1.num:>03}_{2.num:>03}.cbz".format(lsb, chapter_min, chapter_max))
self.out = zipfile.ZipFile(self.out_file, "w", compression=zipfile.ZIP_DEFLATED)
def export_cover(self, lsb):
cv_path = "{0:>03}_{0:>03}_{1}{2}".format(0, 'cover',
mimetypes.guess_extension(lsb.image.mimetype))
self.out.writestr(cv_path, lsb.image.content)
def export_chapter(self, ch):
pass
def export_page(self, pa):
pa_path = "{0.chapter.num:>03}_{0.num:>03}{1}".format(pa,
mimetypes.guess_extension(pa.image.mimetype))
self.out.writestr(pa_path, pa.image.content)
actions.register_writter(CbzWritter)
| apache-2.0 | 603,414,583,288,958,700 | 21.32 | 134 | 0.654122 | false |
RicardoJohann/frappe | frappe/utils/goal.py | 1 | 4641 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from six.moves import xrange
def get_monthly_results(goal_doctype, goal_field, date_col, filter_str, aggregation = 'sum'):
'''Get monthly aggregation values for given field of doctype'''
# TODO: move to ORM?
if(frappe.conf.db_type == 'postgres'):
month_year_format_query = '''to_char("{}", 'MM-YYYY')'''.format(date_col)
else:
month_year_format_query = 'date_format(`{}`, "%m-%Y")'.format(date_col)
conditions = ('where ' + filter_str) if filter_str else ''
results = frappe.db.sql('''SELECT {aggregation}(`{goal_field}`) AS {goal_field},
{month_year_format_query} AS month_year
FROM `{table_name}` {conditions}
GROUP BY month_year'''
.format(
aggregation=aggregation,
goal_field=goal_field,
month_year_format_query=month_year_format_query,
table_name="tab" + goal_doctype,
conditions=conditions
), as_dict=True)
month_to_value_dict = {}
for d in results:
month_to_value_dict[d['month_year']] = d[goal_field]
return month_to_value_dict
@frappe.whitelist()
def get_monthly_goal_graph_data(title, doctype, docname, goal_value_field, goal_total_field, goal_history_field,
goal_doctype, goal_doctype_link, goal_field, date_field, filter_str, aggregation="sum"):
'''
Get month-wise graph data for a doctype based on aggregation values of a field in the goal doctype
:param title: Graph title
:param doctype: doctype of graph doc
:param docname: of the doc to set the graph in
:param goal_value_field: goal field of doctype
:param goal_total_field: current month value field of doctype
:param goal_history_field: cached history field
:param goal_doctype: doctype the goal is based on
:param goal_doctype_link: doctype link field in goal_doctype
:param goal_field: field from which the goal is calculated
:param filter_str: where clause condition
:param aggregation: a value like 'count', 'sum', 'avg'
:return: dict of graph data
'''
from frappe.utils.formatters import format_value
import json
meta = frappe.get_meta(doctype)
doc = frappe.get_doc(doctype, docname)
goal = doc.get(goal_value_field)
formatted_goal = format_value(goal, meta.get_field(goal_value_field), doc)
current_month_value = doc.get(goal_total_field)
formatted_value = format_value(current_month_value, meta.get_field(goal_total_field), doc)
from frappe.utils import today, getdate, formatdate, add_months
current_month_year = formatdate(today(), "MM-yyyy")
history = doc.get(goal_history_field)
try:
month_to_value_dict = json.loads(history) if history and '{' in history else None
except ValueError:
month_to_value_dict = None
if month_to_value_dict is None:
doc_filter = (goal_doctype_link + " = '" + docname + "'") if doctype != goal_doctype else ''
if filter_str:
doc_filter += ' and ' + filter_str if doc_filter else filter_str
month_to_value_dict = get_monthly_results(goal_doctype, goal_field, date_field, doc_filter, aggregation)
frappe.db.set_value(doctype, docname, goal_history_field, json.dumps(month_to_value_dict))
month_to_value_dict[current_month_year] = current_month_value
months = []
months_formatted = []
values = []
values_formatted = []
for i in range(0, 12):
date_value = add_months(today(), -i)
month_value = formatdate(date_value, "MM-yyyy")
month_word = getdate(date_value).strftime('%b')
month_year = getdate(date_value).strftime('%B') + ', ' + getdate(date_value).strftime('%Y')
months.insert(0, month_word)
months_formatted.insert(0, month_year)
if month_value in month_to_value_dict:
val = month_to_value_dict[month_value]
else:
val = 0
values.insert(0, val)
values_formatted.insert(0, format_value(val, meta.get_field(goal_total_field), doc))
y_markers = []
summary_values = [
{
'title': _("This month"),
'color': '#ffa00a',
'value': formatted_value
}
]
if float(goal) > 0:
y_markers = [
{
'label': _("Goal"),
'lineType': "dashed",
'value': goal
},
]
summary_values += [
{
'title': _("Goal"),
'color': '#5e64ff',
'value': formatted_goal
},
{
'title': _("Completed"),
'color': '#28a745',
'value': str(int(round(float(current_month_value)/float(goal)*100))) + "%"
}
]
data = {
'title': title,
# 'subtitle':
'data': {
'datasets': [
{
'values': values,
'formatted': values_formatted
}
],
'labels': months,
'yMarkers': y_markers
},
'summary': summary_values,
}
return data
| mit | 6,556,498,908,767,743,000 | 29.333333 | 112 | 0.676578 | false |
nmarincic/numbasom | numbasom/numbasom.py | 1 | 8667 | from numba import jit
import numpy as np
import math
import collections
from timeit import default_timer as timer
@jit(nopython=True)
def normalize(data, min_val=0, max_val=1):
no_vectors, dim = data.shape
D = np.empty((no_vectors,dim), dtype=np.float64)
inf = 1.7976931348623157e+308
min_arr = np.empty(dim, dtype=np.float64)
min_arr[:] = inf
max_arr = np.empty(dim, dtype=np.float64)
max_arr[:] = -inf
diff = np.empty(dim, dtype=np.float64)
for vec in range(no_vectors):
for d in range(dim):
val = data[vec,d]
if val < min_arr[d]:
min_arr[d] = val
if val > max_arr[d]:
max_arr[d] = val
for d in range(dim):
diff[d] = max_arr[d] - min_arr[d]
for i in range(no_vectors):
for j in range(dim):
if diff[j] != 0:
D[i,j] = (data[i, j] - min_arr[j]) / diff[j]
else:
D[i,j] = 0
return D
@jit(nopython=True)
def normalize_with_mutate(data, min_val=0, max_val=1):
no_vectors, dim = data.shape
#D = np.empty((no_vectors,dim), dtype=np.float64)
inf = 1.7976931348623157e+308
min_arr = np.empty(dim, dtype=np.float64)
min_arr[:] = inf
max_arr = np.empty(dim, dtype=np.float64)
max_arr[:] = -inf
diff = np.empty(dim, dtype=np.float64)
for vec in range(no_vectors):
for d in range(dim):
val = data[vec,d]
if val < min_arr[d]:
min_arr[d] = val
if val > max_arr[d]:
max_arr[d] = val
for d in range(dim):
diff[d] = max_arr[d] - min_arr[d]
for i in range(no_vectors):
for j in range(dim):
data[i,j] = (data[i, j] - min_arr[j]) / diff[j]
def pairwise(X):
M = X.shape[0]
N = X.shape[1]
D = np.empty((M, M), dtype=np.float64)
for i in range(M):
for j in range(M):
d = 0.0
for k in range(N):
tmp = X[i, k] - X[j, k]
d += tmp * tmp
D[i, j] = np.sqrt(d)
return D
def pairwise_squared(X):
M = X.shape[0]
N = X.shape[1]
# type will depend on the size of the matrix
D = np.empty((M, M), dtype=np.uint32)
for i in range(M):
for j in range(M):
d = 0.0
for k in range(N):
tmp = X[i, k] - X[j, k]
d += tmp * tmp
D[i, j] = d
return D
@jit(nopython=True)
def random_lattice(som_size, dimensionality):
X, Y, Z = som_size[0], som_size[1], dimensionality
D = np.empty((X,Y,Z), dtype=np.float64)
for x in range(X):
for y in range(Y):
for z in range(Z):
D[x,y,z] = np.random.random()
return D
@jit
def get_all_BMU_indexes(BMU, X, Y):
BMUx, BMUy = BMU[0], BMU[1]
BMU2x, BMU3x, BMU4x = BMU[0], BMU[0], BMU[0]
BMU2y, BMU3y, BMU4y = BMU[1], BMU[1], BMU[1]
if BMUx > X / 2:
BMU2x = BMUx - X
else:
BMU2x = BMUx + X
if BMUy > Y / 2:
BMU3y = BMUy - Y
else:
BMU3y = BMUy + Y
BMU4x = BMU2x
BMU4y = BMU3y
return BMU, (BMU2x, BMU2y), (BMU3x, BMU3y), (BMU4x, BMU4y)
@jit(nopython=True)
def som_calc(som_size, num_iterations, data_scaled, is_torus=False):
#data_scaled = normalize(data)
initial_radius = (max(som_size[0],som_size[1])/2)**2
time_constant = num_iterations/math.log(initial_radius)
start_lrate = 0.1
lattice = random_lattice(som_size, data_scaled.shape[1])
datalen = len(data_scaled)
X, Y, Z = lattice.shape
for current_iteration in range(num_iterations):
current_radius = initial_radius * math.exp(-current_iteration/time_constant)
current_lrate = start_lrate * math.exp(-current_iteration/num_iterations)
rand_input = np.random.randint(datalen)
rand_vector = data_scaled[rand_input]
BMU_dist = 1.7976931348623157e+308
BMU = (0,0)
for x in range(X):
for y in range(Y):
d = 0.0
for z in range(Z):
val = lattice[x,y,z]-rand_vector[z]
valsqr = val * val
d += valsqr
if d < BMU_dist:
BMU_dist = d
BMU = (x,y)
if is_torus:
BMUs = get_all_BMU_indexes(BMU, X, Y)
for BMU in BMUs:
adapt(lattice, rand_vector, BMU, current_radius, current_lrate)
else:
adapt(lattice, rand_vector, BMU, current_radius, current_lrate)
return lattice
@jit(nopython=True)
def adapt(lattice, rand_vector, BMU, current_radius, current_lrate):
X, Y, Z = lattice.shape
for x in range(X):
for y in range(Y):
a = x-BMU[0]
b = y-BMU[1]
d = a*a + b*b
if d < current_radius:
up = d * d
down = current_radius * current_radius
res = -up / (2 * down)
influence = math.exp(res)
for z in range(Z):
diff = (rand_vector[z] - lattice[x,y,z]) * influence * current_lrate
lattice[x,y,z] += diff
@jit(nopython=True)
def euclidean(vec1, vec2):
L = vec1.shape[0]
dist = 0
for l in range(L):
val = vec2[l] - vec1[l]
valsqr = val * val
dist += valsqr
return math.sqrt(dist)
@jit(nopython=True)
def euclidean_squared(vec1, vec2):
L = vec1.shape[0]
dist = 0
for l in range(L):
val = vec2[l] - vec1[l]
valsqr = val * val
dist += valsqr
return dist
@jit(nopython=True)
def u_matrix(lattice):
X, Y, Z = lattice.shape
u_values = np.empty((X,Y), dtype=np.float64)
for y in range(Y):
for x in range(X):
current = lattice[x,y]
dist = 0
num_neigh = 0
# left
if x-1 >= 0:
#middle
vec = lattice[x-1,y]
dist += euclidean(current, vec)
num_neigh += 1
if y - 1 >= 0:
#sup
vec = lattice[x-1, y-1]
dist += euclidean(current, vec)
num_neigh += 1
if y + 1 < Y:
# down
vec = lattice[x-1,y+1]
dist += euclidean(current, vec)
num_neigh += 1
# middle
if y - 1 >= 0:
# up
vec = lattice[x,y-1]
dist += euclidean(current, vec)
num_neigh += 1
# down
if y + 1 < Y:
vec = lattice[x,y+1]
dist += euclidean(current, vec)
num_neigh += 1
# right
if x + 1 < X:
# middle
vec = lattice[x+1,y]
dist += euclidean(current, vec)
num_neigh += 1
if y - 1 >= 0:
#up
vec = lattice[x+1,y-1]
dist += euclidean(current, vec)
num_neigh += 1
if y + 1 < lattice.shape[1]:
# down
vec = lattice[x+1,y+1]
dist += euclidean(current, vec)
num_neigh += 1
u_values[x,y] = dist / num_neigh
return u_values
def project_on_som(data, lattice, additional_list=None, data_scaled=False):
start = timer()
if data_scaled:
data_scaled = data
else:
data_scaled = normalize(data)
#create all keys
projected = collections.defaultdict(list)
X, Y, Z = lattice.shape
for x in range(X):
for y in range(Y):
projected[(x,y)]
# fill keys
for index, vec in enumerate(data_scaled):
winning_cell, wi = find_closest(index, vec, lattice)
projected[winning_cell].append(wi)
if additional_list:
final = {key: [additional_list[v] for v in value] for key, value in projected.items()}
else:
final = {key: [data[v] for v in value] for key, value in projected.items()}
end = timer()
print("Projecting on SOM took: %f seconds." %(end - start))
return final
@jit(nopython=True)
def find_closest_data_index(lattice_vec, data):
min_val = 1.7976931348623157e+308
winning_index = -1
data_len = len(data)
for i in range(data_len):
data_point = data[i]
dist = euclidean_squared(lattice_vec,data_point)
if dist < min_val:
min_val = dist
winning_index = i
return winning_index
def lattice_closest_vectors(data, lattice, additional_list=None, data_scaled=False):
start = timer()
if data_scaled:
data_scaled = data
else:
data_scaled = normalize(data)
X, Y, Z = lattice.shape
# create dictionary
projected = {}
# fill keys
for x in range(X):
for y in range(Y):
lattice_vec = lattice[x,y]
winning_index = find_closest_data_index(lattice_vec, data_scaled)
if additional_list:
projected[(x,y)] = [additional_list[winning_index]]
else:
projected[(x,y)] = data[winning_index]
end = timer()
print("Finding closest data points took: %f seconds." %(end - start))
return projected
@jit(nopython=True)
def find_closest(index, vec, lattice):
X, Y, Z = lattice.shape
min_val = 1.7976931348623157e+308
win_index = -1
win_cell = (-1,-1)
for x in range(X):
for y in range(Y):
dist = euclidean_squared(vec, lattice[x,y])
if dist < min_val:
min_val = dist
win_index = index
win_cell = (x,y)
return win_cell, win_index
def som(som_size, num_iterations, data, is_torus=False, is_scaled=False):
data_scaled = data
if not is_scaled:
start = timer()
data_scaled = normalize(data)
end = timer()
print("Data scaling took: %f seconds." %(end - start))
start = timer()
lattice = som_calc(som_size, num_iterations, data_scaled, is_torus)
end = timer()
print("SOM training took: %f seconds." %(end - start))
return lattice
def save_lattice(lattice, filename):
np.save(filename, lattice)
print ("SOM lattice saved at %s" %filename)
def load_lattice(filename):
lattice = np.load(filename)
print ("SOM lattice loaded from %s" %filename)
return lattice
| mit | 1,146,145,510,830,641,200 | 22.81044 | 88 | 0.621207 | false |
Buchhold/QLever | misc/move_language_into_relation.py | 1 | 1046 | import argparse
import sys
__author__ = 'buchholb'
parser = argparse.ArgumentParser()
parser.add_argument('--nt',
type=str,
help='n-triple file.',
required=True)
def writeNtFileToStdout(nt):
for line in open(nt):
cols = line.strip('\n').split('\t')
if len(cols) != 4 or cols[3] != '.':
print('Ignoring malformed line: ' + line, file=sys.stderr)
else:
lang_start = cols[2].rfind('"@');
if lang_start > 0 and cols[2].rfind('"', lang_start + 1) == -1:
lang = cols[2][lang_start + 2:]
if cols[1][-1] == '>':
cols[1] = cols[1][:-1] + '.' + lang + '>'
else:
cols[1] += ('.' + lang)
cols[2] = cols[2][:lang_start + 1]
print('\t'.join([cols[0], cols[1], cols[2], '.']))
def main():
args = vars(parser.parse_args())
nt = args['nt']
writeNtFileToStdout(nt)
if __name__ == '__main__':
main()
| apache-2.0 | 8,025,750,356,437,480,000 | 26.526316 | 75 | 0.448375 | false |
google-research/language | language/xsp/model/constants.py | 1 | 1574 | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains constants required for the model."""
# TODO(alanesuhr): These are used in convert_to_tf_examples.py.
# Use these constants instead of strings there.
# These constants define the keys used for the TFRecords.
COPIABLE_INPUT_KEY = 'copiable_input'
ALIGNED_KEY = 'utterance_schema_alignment'
SEGMENT_ID_KEY = 'segment_ids'
FOREIGN_KEY_KEY = 'indicates_foreign_key'
SOURCE_WORDPIECES_KEY = 'source_wordpieces'
SOURCE_LEN_KEY = 'source_len'
LANGUAGE_KEY = 'language'
REGION_KEY = 'region'
TAG_KEY = 'tag'
OUTPUT_TYPE_KEY = 'type'
WEIGHT_KEY = 'weight'
TARGET_ACTION_TYPES_KEY = 'target_action_types'
TARGET_ACTION_IDS_KEY = 'target_action_ids'
TARGET_LEN_KEY = 'target_len'
SCORES_KEY = 'scores'
# Symbol IDs.
TARGET_START_SYMBOL_ID = 2
TARGET_END_SYMBOL_ID = 1
PAD_SYMBOL_ID = 0
GENERATE_ACTION = 1
COPY_ACTION = 2
NUM_RESERVED_OUTPUT_SYMBOLS = 3
PREDICTED_ACTION_TYPES = 'predicted_action_types'
PREDICTED_ACTION_IDS = 'predicted_action_ids'
| apache-2.0 | -5,873,433,957,893,586,000 | 30.48 | 74 | 0.747776 | false |
ruleant/buildtime-trend | buildtimetrend/test/stages_test.py | 1 | 19853 | # vim: set expandtab sw=4 ts=4:
#
# Unit tests for Stages class
#
# Copyright (C) 2014 Dieter Adriaenssens <[email protected]>
#
# This file is part of buildtime-trend
# <https://github.com/ruleant/buildtime-trend/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from buildtimetrend.stages import Stages
from buildtimetrend.stages import Stage
import constants
from lxml import etree
import unittest
STAGES_RESULT = [{'duration': 17.0,
'finished_at': constants.SPLIT_TIMESTAMP4,
'name': 'stage1',
'started_at': constants.SPLIT_TIMESTAMP1}]
class TestStages(unittest.TestCase):
def setUp(self):
self.stages = Stages()
# show full diff in case of assert mismatch
self.maxDiff = None
def test_novalue(self):
# number of stages should be zero
self.assertEquals(0, len(self.stages.stages))
# test total duration
self.assertEqual(0, self.stages.total_duration())
# test started_at and finished_at
self.assertEqual(None, self.stages.started_at)
self.assertEqual(None, self.stages.finished_at)
# xml shouldn't contain items
self.assertEquals("<stages/>", etree.tostring(self.stages.to_xml()))
self.assertEquals("<stages/>\n", self.stages.to_xml_string())
def test_nofile(self):
# function should return false when file doesn't exist
self.assertFalse(self.stages.read_csv('nofile.csv'))
self.assertFalse(self.stages.read_csv(''))
# function should throw an error when no filename is set
self.assertRaises(TypeError, self.stages.read_csv)
def test_read_csv(self):
# read and parse sample file
self.assertTrue(self.stages.read_csv(constants.TEST_SAMPLE_TIMESTAMP_FILE))
# test number of stages
self.assertEquals(3, len(self.stages.stages))
# test started_at
self.assertEqual(constants.SPLIT_TIMESTAMP1, self.stages.started_at)
# test finished_at
self.assertEqual(constants.SPLIT_TIMESTAMP4, self.stages.finished_at)
# test stages (names + duration)
self.assertListEqual(
[{'duration': 2,
'finished_at': constants.SPLIT_TIMESTAMP2,
'name': 'stage1',
'started_at': constants.SPLIT_TIMESTAMP1},
{'duration': 5,
'finished_at': constants.SPLIT_TIMESTAMP3,
'name': 'stage2',
'started_at': constants.SPLIT_TIMESTAMP2},
{'duration': 10,
'finished_at': constants.SPLIT_TIMESTAMP4,
'name': 'stage3',
'started_at': constants.SPLIT_TIMESTAMP3}],
self.stages.stages)
def test_parse_timestamps_end(self):
# use 'end' to end timestamp parsing
self.stages.parse_timestamps([["stage1","1396378735"],["end","1396378752"], ["end","1396378755"]])
self.assertListEqual(STAGES_RESULT, self.stages.stages)
def test_parse_timestamps_caps(self):
# use 'End' to end timestamp parsing
self.stages.parse_timestamps([["stage1","1396378735"],["End","1396378752"], ["end","1396378755"]])
self.assertListEqual(STAGES_RESULT, self.stages.stages)
def test_parse_timestamps_end_no_match(self):
# use 'end_tag' as stage name, this shouldn't end time parsing
self.stages.parse_timestamps([["stage1","1396378735"],["end_tag","1396378752"], ["end","1396378755"]])
self.assertListEqual(
[{'duration': 17,
'finished_at': constants.SPLIT_TIMESTAMP4,
'name': 'stage1',
'started_at': constants.SPLIT_TIMESTAMP1},
{'duration': 3,
'finished_at': constants.SPLIT_TIMESTAMP_ENDTAG,
'name': 'end_tag',
'started_at': constants.SPLIT_TIMESTAMP4}],
self.stages.stages)
def test_parse_timestamps_done(self):
# use 'done' as end stage name
self.stages.parse_timestamps([["stage1","1396378735"],["done","1396378752"], ["end","1396378755"]])
self.assertListEqual(STAGES_RESULT, self.stages.stages)
def test_parse_timestamps_finished(self):
# use 'end' as end stage name
self.stages.parse_timestamps([["stage1","1396378735"],["finished","1396378752"], ["end","1396378755"]])
self.assertListEqual(STAGES_RESULT, self.stages.stages)
def test_parse_timestamps_completed(self):
# use 'completed' as end stage name
self.stages.parse_timestamps([["stage1","1396378735"],["completed","1396378752"], ["end","1396378755"]])
self.assertListEqual(STAGES_RESULT, self.stages.stages)
def test_total_duration(self):
# read and parse sample file
self.assertTrue(self.stages.read_csv(constants.TEST_SAMPLE_TIMESTAMP_FILE))
# test total duration
self.assertEqual(17, self.stages.total_duration())
def test_to_xml(self):
# read and parse sample file
self.stages.read_csv(constants.TEST_SAMPLE_TIMESTAMP_FILE)
# test xml output
self.assertEquals(
'<stages><stage duration="2.0" name="stage1"/>'
'<stage duration="5.0" name="stage2"/>'
'<stage duration="10.0" name="stage3"/></stages>',
etree.tostring(self.stages.to_xml()))
def test_to_xml_string(self):
# read and parse sample file
self.stages.read_csv(constants.TEST_SAMPLE_TIMESTAMP_FILE)
# test xml string output
self.assertEquals(
'<stages>\n'
' <stage duration="2.0" name="stage1"/>\n'
' <stage duration="5.0" name="stage2"/>\n'
' <stage duration="10.0" name="stage3"/>\n'
'</stages>\n',
self.stages.to_xml_string())
def test_add_stage(self):
# error is thrown when called without parameters
self.assertRaises(TypeError, self.stages.add_stage)
# error is thrown when called with an invalid parameter
self.assertRaises(TypeError, self.stages.add_stage, None)
self.assertRaises(TypeError, self.stages.add_stage, "string")
# add a stage
stage = Stage()
stage.set_name("stage1")
stage.set_started_at(constants.TIMESTAMP_STARTED)
stage.set_finished_at(constants.TIMESTAMP1)
stage.set_duration(235)
self.stages.add_stage(stage)
# test number of stages
self.assertEquals(1, len(self.stages.stages))
# test started_at
self.assertEqual(constants.SPLIT_TIMESTAMP_STARTED, self.stages.started_at)
# test finished_at
self.assertEqual(constants.SPLIT_TIMESTAMP1, self.stages.finished_at)
# test stages (names + duration)
self.assertListEqual(
[{'duration': 235,
'finished_at': constants.SPLIT_TIMESTAMP1,
'name': 'stage1',
'started_at': constants.SPLIT_TIMESTAMP_STARTED}],
self.stages.stages)
# add another stage
stage = Stage()
stage.set_name("stage2")
stage.set_started_at(constants.TIMESTAMP1)
stage.set_finished_at(constants.TIMESTAMP_FINISHED)
stage.set_duration(136.234)
self.stages.add_stage(stage)
# test number of stages
self.assertEquals(2, len(self.stages.stages))
# test started_at
self.assertEqual(constants.SPLIT_TIMESTAMP_STARTED, self.stages.started_at)
# test finished_at
self.assertEqual(constants.SPLIT_TIMESTAMP_FINISHED, self.stages.finished_at)
# test stages (names + duration)
self.assertListEqual(
[{'duration': 235,
'finished_at': constants.SPLIT_TIMESTAMP1,
'name': 'stage1',
'started_at': constants.SPLIT_TIMESTAMP_STARTED},
{'duration': 136.234,
'finished_at': constants.SPLIT_TIMESTAMP_FINISHED,
'name': 'stage2',
'started_at': constants.SPLIT_TIMESTAMP1}],
self.stages.stages)
def test_add_stage_incomplete(self):
# add a stage without started_at timestamp
stage = Stage()
stage.set_name("stage1")
stage.set_finished_at(constants.TIMESTAMP1)
stage.set_duration(235)
self.stages.add_stage(stage)
# test number of stages
self.assertEquals(1, len(self.stages.stages))
# test started_at
self.assertEqual(None, self.stages.started_at)
# test finished_at
self.assertEqual(constants.SPLIT_TIMESTAMP1, self.stages.finished_at)
# test stages (names + duration)
self.assertListEqual(
[{'duration': 235,
'finished_at': constants.SPLIT_TIMESTAMP1,
'name': 'stage1'}],
self.stages.stages)
# add another stage without finished_at timestamp
stage = Stage()
stage.set_name("stage2")
stage.set_started_at(constants.TIMESTAMP1)
stage.set_duration(136.234)
self.stages.add_stage(stage)
# test number of stages
self.assertEquals(2, len(self.stages.stages))
# test started_at
self.assertEqual(constants.SPLIT_TIMESTAMP1, self.stages.started_at)
# test finished_at
self.assertEqual(constants.SPLIT_TIMESTAMP1, self.stages.finished_at)
# test stages (names + duration)
self.assertListEqual(
[{'duration': 235,
'finished_at': constants.SPLIT_TIMESTAMP1,
'name': 'stage1'},
{'duration': 136.234,
'name': 'stage2',
'started_at': constants.SPLIT_TIMESTAMP1}],
self.stages.stages)
class TestStage(unittest.TestCase):
def setUp(self):
self.maxDiff = None
self.stage = Stage()
def test_novalue(self):
# number of stages should be zero
self.assertEquals(2, len(self.stage.data))
# test total duration
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
def test_set_name(self):
# name should be a string
self.assertFalse(self.stage.set_name(None))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
# set name
self.assertTrue(self.stage.set_name("stage_name"))
self.assertDictEqual(
{"name": "stage_name", "duration": 0},
self.stage.to_dict())
# name can be an empty string
self.assertTrue(self.stage.set_name(""))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
def test_set_command(self):
# command should be a string
self.assertFalse(self.stage.set_command(None))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
# set command
self.assertTrue(self.stage.set_command("command1.sh"))
self.assertDictEqual(
{"name": "", "duration": 0, "command": "command1.sh"},
self.stage.to_dict())
# command can be an empty string
self.assertTrue(self.stage.set_command(""))
self.assertDictEqual(
{"name": "", "duration": 0, "command": ""},
self.stage.to_dict())
def test_set_duration(self):
# duration should be a number
self.assertFalse(self.stage.set_duration(None))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
self.assertFalse(self.stage.set_duration("text"))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
# duration can't be a negative value
self.assertFalse(self.stage.set_duration(-1))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
# set duration
self.assertTrue(self.stage.set_duration(123))
self.assertDictEqual({"name": "", "duration": 123}, self.stage.to_dict())
self.assertTrue(self.stage.set_duration(123.456))
self.assertDictEqual(
{"name": "", "duration": 123.456},
self.stage.to_dict())
# duration can be zero
self.assertTrue(self.stage.set_duration(0))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
def test_set_duration_nano(self):
# duration should be a number
self.assertFalse(self.stage.set_duration_nano(None))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
self.assertFalse(self.stage.set_duration_nano("text"))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
# duration can't be a negative value
self.assertFalse(self.stage.set_duration_nano(-1))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
# set duration
self.assertTrue(self.stage.set_duration_nano(123456789))
self.assertDictEqual({"name": "", "duration": 0.123456789}, self.stage.to_dict())
self.assertTrue(self.stage.set_duration_nano(123456789.123))
self.assertDictEqual(
{"name": "", "duration": 0.123456789123},
self.stage.to_dict())
# duration can be zero
self.assertTrue(self.stage.set_duration_nano(0))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
def test_set_timestamp(self):
# timestamp should be valid
self.assertFalse(self.stage.set_timestamp("event1", None))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
self.assertFalse(self.stage.set_timestamp("event1", "text"))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
# test 0 timestamp (epoch)
self.assertTrue(self.stage.set_timestamp("event1", 0))
self.assertDictEqual(constants.TIMESTAMP_SPLIT_EPOCH, self.stage.data["event1"])
self.assertDictEqual(
{
"name": "",
"duration": 0,
"event1": constants.TIMESTAMP_SPLIT_EPOCH},
self.stage.to_dict())
# test timestamp
self.assertTrue(self.stage.set_timestamp("event1", constants.TIMESTAMP_TESTDATE))
self.assertDictEqual(constants.TIMESTAMP_SPLIT_TESTDATE, self.stage.data["event1"])
self.assertDictEqual(
{
"name": "",
"duration": 0,
"event1": constants.TIMESTAMP_SPLIT_TESTDATE},
self.stage.to_dict())
def test_set_timestamp_nano(self):
# test 0 timestamp (epoch)
self.assertTrue(self.stage.set_timestamp_nano("event1", 0))
self.assertDictEqual(constants.TIMESTAMP_SPLIT_EPOCH, self.stage.data["event1"])
self.assertDictEqual(
{
"name": "",
"duration": 0,
"event1": constants.TIMESTAMP_SPLIT_EPOCH},
self.stage.to_dict())
# test timestamp
self.assertTrue(self.stage.set_timestamp_nano("event1", constants.TIMESTAMP_NANO_TESTDATE))
self.assertDictEqual(constants.TIMESTAMP_SPLIT_TESTDATE, self.stage.data["event1"])
self.assertDictEqual(
{
"name": "",
"duration": 0,
"event1": constants.TIMESTAMP_SPLIT_TESTDATE},
self.stage.to_dict())
def test_set_started_at(self):
# timestamp should be valid
self.assertFalse(self.stage.set_started_at(None))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
self.assertFalse(self.stage.set_started_at("text"))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
# test 0 timestamp (epoch)
self.assertTrue(self.stage.set_started_at(0))
self.assertDictEqual(constants.TIMESTAMP_SPLIT_EPOCH, self.stage.data["started_at"])
self.assertDictEqual(
{
"name": "",
"duration": 0,
"started_at": constants.TIMESTAMP_SPLIT_EPOCH},
self.stage.to_dict())
# test timestamp
self.assertTrue(self.stage.set_started_at(constants.TIMESTAMP_TESTDATE))
self.assertDictEqual(constants.TIMESTAMP_SPLIT_TESTDATE, self.stage.data["started_at"])
self.assertDictEqual(
{
"name": "",
"duration": 0,
"started_at": constants.TIMESTAMP_SPLIT_TESTDATE},
self.stage.to_dict())
def test_set_started_at_nano(self):
# test timestamp
self.assertTrue(self.stage.set_started_at_nano(constants.TIMESTAMP_NANO_TESTDATE))
self.assertDictEqual(constants.TIMESTAMP_SPLIT_TESTDATE, self.stage.data["started_at"])
self.assertDictEqual(
{
"name": "",
"duration": 0,
"started_at": constants.TIMESTAMP_SPLIT_TESTDATE},
self.stage.to_dict())
def test_set_finished_at(self):
# timestamp should be valid
self.assertFalse(self.stage.set_finished_at(None))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
self.assertFalse(self.stage.set_finished_at("text"))
self.assertDictEqual({"name": "", "duration": 0}, self.stage.to_dict())
# test 0 timestamp (epoch)
self.assertTrue(self.stage.set_finished_at(0))
self.assertDictEqual(constants.TIMESTAMP_SPLIT_EPOCH, self.stage.data["finished_at"])
self.assertDictEqual(
{
"name": "",
"duration": 0,
"finished_at": constants.TIMESTAMP_SPLIT_EPOCH},
self.stage.to_dict())
# test timestamp
self.assertTrue(self.stage.set_finished_at(constants.TIMESTAMP_TESTDATE))
self.assertDictEqual(constants.TIMESTAMP_SPLIT_TESTDATE, self.stage.data["finished_at"])
self.assertDictEqual(
{
"name": "",
"duration": 0,
"finished_at": constants.TIMESTAMP_SPLIT_TESTDATE},
self.stage.to_dict())
def test_set_finished_at_nano(self):
# test timestamp
self.assertTrue(self.stage.set_finished_at_nano(constants.TIMESTAMP_NANO_TESTDATE))
self.assertDictEqual(constants.TIMESTAMP_SPLIT_TESTDATE, self.stage.data["finished_at"])
self.assertDictEqual(
{
"name": "",
"duration": 0,
"finished_at": constants.TIMESTAMP_SPLIT_TESTDATE},
self.stage.to_dict())
def test_todict(self):
self.assertTrue(self.stage.set_name("stage.1"))
self.assertTrue(self.stage.set_duration(11.2345))
self.assertTrue(self.stage.set_command("command1.sh"))
self.assertTrue(self.stage.set_started_at(constants.TIMESTAMP_STARTED))
self.assertTrue(self.stage.set_finished_at(constants.TIMESTAMP_FINISHED))
# test dictionary
self.assertDictEqual(
{
"name": "stage.1",
"duration": 11.2345,
"command": "command1.sh",
"started_at": constants.SPLIT_TIMESTAMP_STARTED,
"finished_at": constants.SPLIT_TIMESTAMP_FINISHED
},
self.stage.to_dict()
)
| gpl-3.0 | 2,302,122,817,889,219,600 | 37.851272 | 112 | 0.605299 | false |
cangermueller/deepcpg | deepcpg/data/dna.py | 1 | 2502 | """Functions for representing DNA sequences."""
from __future__ import division
from __future__ import print_function
from collections import OrderedDict
import numpy as np
from six.moves import range
# Mapping of nucleotides to integers
CHAR_TO_INT = OrderedDict([('A', 0), ('T', 1), ('G', 2), ('C', 3), ('N', 4)])
# Mapping of integers to nucleotides
INT_TO_CHAR = {v: k for k, v in CHAR_TO_INT.items()}
def get_alphabet(special=False, reverse=False):
"""Return char->int alphabet.
Parameters
----------
special: bool
If `True`, remove special 'N' character.
reverse: bool
If `True`, return int->char instead of char->int alphabet.
Returns
-------
OrderedDict
DNA alphabet.
"""
alpha = OrderedDict(CHAR_TO_INT)
if not special:
del alpha['N']
if reverse:
alpha = {v: k for k, v in alpha.items()}
return alpha
def char_to_int(seq):
"""Translate chars of single sequence `seq` to ints.
Parameters
----------
seq: str
DNA sequence.
Returns
-------
list
Integer-encoded `seq`.
"""
return [CHAR_TO_INT[x] for x in seq.upper()]
def int_to_char(seq, join=True):
"""Translate ints of single sequence `seq` to chars.
Parameters
----------
seq: list
Integers of sequences
join: bool
If `True` joint characters to `str`.
Returns
-------
If `join=True`, `str`, otherwise list of chars.
"""
t = [INT_TO_CHAR[x] for x in seq]
if join:
t = ''.join(t)
return t
def int_to_onehot(seqs, dim=4):
"""One-hot encodes array of integer sequences.
Takes array [nb_seq, seq_len] of integer sequence end encodes them one-hot.
Special nucleotides (int > 4) will be encoded as [0, 0, 0, 0].
Paramters
---------
seqs: :class:`numpy.ndarray`
[nb_seq, seq_len] :class:`numpy.ndarray` of integer sequences.
dim: int
Number of nucleotides
Returns
-------
:class:`numpy.ndarray`
[nb_seq, seq_len, dim] :class:`numpy.ndarray` of one-hot encoded
sequences.
"""
seqs = np.atleast_2d(np.asarray(seqs))
n = seqs.shape[0]
l = seqs.shape[1]
enc_seqs = np.zeros((n, l, dim), dtype='int8')
for i in range(dim):
t = seqs == i
enc_seqs[t, i] = 1
return enc_seqs
def onehot_to_int(seqs, axis=-1):
"""Translates one-hot sequences to integer sequences."""
return seqs.argmax(axis=axis)
| mit | -3,484,228,904,838,836,700 | 22.383178 | 79 | 0.583933 | false |
pynamodb/PynamoDB | tests/test_discriminator.py | 1 | 6828 | import pytest
from pynamodb.attributes import DiscriminatorAttribute
from pynamodb.attributes import DynamicMapAttribute
from pynamodb.attributes import ListAttribute
from pynamodb.attributes import MapAttribute
from pynamodb.attributes import NumberAttribute
from pynamodb.attributes import UnicodeAttribute
from pynamodb.models import Model
class_name = lambda cls: cls.__name__
class TypedValue(MapAttribute):
_cls = DiscriminatorAttribute(attr_name = 'cls')
name = UnicodeAttribute()
class NumberValue(TypedValue, discriminator=class_name):
value = NumberAttribute()
class StringValue(TypedValue, discriminator=class_name):
value = UnicodeAttribute()
class RenamedValue(TypedValue, discriminator='custom_name'):
value = UnicodeAttribute()
class DiscriminatorTestModel(Model, discriminator='Parent'):
class Meta:
host = 'http://localhost:8000'
table_name = 'test'
hash_key = UnicodeAttribute(hash_key=True)
value = TypedValue()
values = ListAttribute(of=TypedValue)
type = DiscriminatorAttribute()
class ChildModel(DiscriminatorTestModel, discriminator='Child'):
value = UnicodeAttribute()
class DynamicSubclassedMapAttribute(DynamicMapAttribute):
string_attr = UnicodeAttribute()
class DynamicMapDiscriminatorTestModel(Model, discriminator='Parent'):
class Meta:
host = 'http://localhost:8000'
table_name = 'test'
hash_key = UnicodeAttribute(hash_key=True)
value = DynamicSubclassedMapAttribute(default=dict)
type = DiscriminatorAttribute()
class DynamicMapDiscriminatorChildTestModel(DynamicMapDiscriminatorTestModel, discriminator='Child'):
value = UnicodeAttribute()
class TestDiscriminatorAttribute:
def test_serialize(self):
dtm = DiscriminatorTestModel()
dtm.hash_key = 'foo'
dtm.value = StringValue(name='foo', value='Hello')
dtm.values = [NumberValue(name='bar', value=5), RenamedValue(name='baz', value='World')]
assert dtm.serialize() == {
'hash_key': {'S': 'foo'},
'type': {'S': 'Parent'},
'value': {'M': {'cls': {'S': 'StringValue'}, 'name': {'S': 'foo'}, 'value': {'S': 'Hello'}}},
'values': {'L': [
{'M': {'cls': {'S': 'NumberValue'}, 'name': {'S': 'bar'}, 'value': {'N': '5'}}},
{'M': {'cls': {'S': 'custom_name'}, 'name': {'S': 'baz'}, 'value': {'S': 'World'}}}
]}
}
def test_deserialize(self):
item = {
'hash_key': {'S': 'foo'},
'type': {'S': 'Parent'},
'value': {'M': {'cls': {'S': 'StringValue'}, 'name': {'S': 'foo'}, 'value': {'S': 'Hello'}}},
'values': {'L': [
{'M': {'cls': {'S': 'NumberValue'}, 'name': {'S': 'bar'}, 'value': {'N': '5'}}},
{'M': {'cls': {'S': 'custom_name'}, 'name': {'S': 'baz'}, 'value': {'S': 'World'}}}
]}
}
dtm = DiscriminatorTestModel.from_raw_data(item)
assert dtm.hash_key == 'foo'
assert dtm.value.value == 'Hello'
assert dtm.values[0].value == 5
assert dtm.values[1].value == 'World'
def test_condition_expression(self):
condition = DiscriminatorTestModel.value._cls == RenamedValue
placeholder_names, expression_attribute_values = {}, {}
expression = condition.serialize(placeholder_names, expression_attribute_values)
assert expression == "#0.#1 = :0"
assert placeholder_names == {'value': '#0', 'cls': '#1'}
assert expression_attribute_values == {':0': {'S': 'custom_name'}}
def test_multiple_discriminator_values(self):
class TestAttribute(MapAttribute, discriminator='new_value'):
cls = DiscriminatorAttribute()
TestAttribute.cls.register_class(TestAttribute, 'old_value')
# ensure the first registered value is used during serialization
assert TestAttribute.cls.get_discriminator(TestAttribute) == 'new_value'
assert TestAttribute.cls.serialize(TestAttribute) == 'new_value'
# ensure the second registered value can be used to deserialize
assert TestAttribute.cls.deserialize('old_value') == TestAttribute
assert TestAttribute.cls.deserialize('new_value') == TestAttribute
def test_multiple_discriminator_classes(self):
with pytest.raises(ValueError):
# fail when attempting to register a class with an existing discriminator value
class RenamedValue2(TypedValue, discriminator='custom_name'):
pass
class TestDiscriminatorModel:
def test_serialize(self):
cm = ChildModel()
cm.hash_key = 'foo'
cm.value = 'bar'
cm.values = []
assert cm.serialize() == {
'hash_key': {'S': 'foo'},
'type': {'S': 'Child'},
'value': {'S': 'bar'},
'values': {'L': []}
}
def test_deserialize(self):
item = {
'hash_key': {'S': 'foo'},
'type': {'S': 'Child'},
'value': {'S': 'bar'},
'values': {'L': []}
}
cm = DiscriminatorTestModel.from_raw_data(item)
assert isinstance(cm, ChildModel)
assert cm.hash_key == 'foo'
assert cm.value == 'bar'
class TestDynamicDiscriminatorModel:
def test_serialize_parent(self):
m = DynamicMapDiscriminatorTestModel()
m.hash_key = 'foo'
m.value.string_attr = 'foostr'
m.value.bar_attribute = 3
assert m.serialize() == {
'hash_key': {'S': 'foo'},
'type': {'S': 'Parent'},
'value': {'M': {'string_attr': {'S': 'foostr'}, 'bar_attribute': {'N': '3'}}},
}
def test_deserialize_parent(self):
item = {
'hash_key': {'S': 'foo'},
'type': {'S': 'Parent'},
'value': {
'M': {'string_attr': {'S': 'foostr'}, 'bar_attribute': {'N': '3'}}
}
}
m = DynamicMapDiscriminatorTestModel.from_raw_data(item)
assert m.hash_key == 'foo'
assert m.value
assert m.value.string_attr == 'foostr'
assert m.value.bar_attribute == 3
def test_serialize_child(self):
m = DynamicMapDiscriminatorChildTestModel()
m.hash_key = 'foo'
m.value = 'string val'
assert m.serialize() == {
'hash_key': {'S': 'foo'},
'type': {'S': 'Child'},
'value': {'S': 'string val'}
}
def test_deserialize_child(self):
item = {
'hash_key': {'S': 'foo'},
'type': {'S': 'Child'},
'value': {'S': 'string val'}
}
m = DynamicMapDiscriminatorChildTestModel.from_raw_data(item)
assert m.hash_key == 'foo'
assert m.value == 'string val'
| mit | -3,047,368,666,517,998,000 | 33.836735 | 105 | 0.57645 | false |
samuelcolvin/django-importexport | views.py | 1 | 5063 | from django import forms
import Imex.models as m
import Imex.tasks as tasks
import HotDjango.views_base as viewb
from django.core.urlresolvers import reverse
from django.db import models
import settings
from django.shortcuts import redirect
import Imex
import_groups, export_groups = Imex.get_imex_groups()
actions = {'imex_import':import_groups, 'imex_export': export_groups}
class Export(viewb.TemplateBase):
template_name = 'export.html'
menu_active = 'imex_export'
side_menu = False
show_crums = False
def get_context_data(self, **kw):
self._context['title'] = 'Export'
self._context['page_menu'] = self.set_links()
return self._context
def set_links(self):
links= []
for group, label in actions['imex_export']:
links.append({'url': reverse('imex_process', kwargs={'command': 'imex_export', 'group': group}), 'name': label})
return links
class ExcelUploadForm(forms.Form):
xlfile = forms.FileField(
label='Select Excel (xlsx) File to Upload',
help_text='should be in standard format for this system'
)
import_group = forms.ChoiceField(widget=forms.RadioSelect, choices=import_groups, label='Import Type', initial=import_groups[0][0])
class Import(viewb.TemplateBase):
template_name = 'import.html'
menu_active = 'imex_import'
side_menu = False
show_crums = False
def get_context_data(self, **kw):
self._context['title'] = 'Import'
self._context['process_url'] = reverse('imex_process', kwargs={'command': 'imex_import'})
self._context['upload_form'] = ExcelUploadForm()
if 'errors' in self.request.session:
self._context['errors'] = self.request.session['errors']
return self._context
class Process(viewb.TemplateBase):
template_name = 'process.html'
side_menu = False
show_crums = False
_redirect = None
def get(self, request, *args, **kw):
if 'menu_active' in request.session:
self.menu_active = request.session['menu_active']
return super(Process, self).get(request, *args, **kw)
def post(self, request, *args, **kw):
page = self.get(request, *args, **kw)
if self._redirect:
return self._redirect
return page
_act_map = {'imex_export': 'EX', 'imex_import':'IM'}
def get_context_data(self, **kw):
self._context['expected_ms'] = 0
act = self._act_map[kw['command']]
self._context['act'] = act
prev_successful = m.Process.objects.filter(complete=True, successful=True, action=act)
if prev_successful.exists():
# print 'average_of %s' % ','.join([ '%0.3f' % p.time_taken for p in prev_successful])
expected_time = prev_successful.aggregate(expected_time = models.Avg('time_taken'))['expected_time']
self._context['expected_ms'] = '%0.0f' % (expected_time * 1000)
success = self.choose_func(kw)
if not success:
return self._context
self._context['media_url'] = settings.MEDIA_URL
self._context['json_url'] = '%s/%d.json' % (reverse('rest-Imex-Process-list'), self._pid)
return self._context
def choose_func(self, kw):
if 'command' in kw:
command = kw['command']
if command in [func_name for func_name, _ in self._act_map.items()]:
return getattr(self, command)(kw)
else:
self._context['errors'] = ['No function called %s' % command]
def imex_export(self, kw):
group = kw['group']
assert group in [g for g, _ in export_groups], \
'group %s not found in export_groups: %r' % (group, export_groups)
processor = m.Process.objects.create(action='EX', group=group)
self._pid = processor.id
tasks.perform_export(self._pid)
return True
def imex_import(self, kw):
error = None
if self.request.method != 'POST':
error = "No post data"
else:
form = ExcelUploadForm(self.request.POST, self.request.FILES)
import_group = form['import_group'].value()
if not form.is_valid():
error = "Form not valid"
elif not str(self.request.FILES['xlfile']).endswith('.xlsx'):
error = 'File must be xlsx, not xls or any other format.'
elif import_group not in [g for g, _ in import_groups]:
error = 'Group %s is not one of the import groups: %r' % (import_group, import_groups)
if error:
print 'refused'
self.request.session['errors'] = [error]
self._redirect = redirect(reverse('imex_import'))
return
p = m.Process.objects.create(action='IM', imex_file = self.request.FILES['xlfile'], group=import_group)
msg = tasks.perform_import(p.id)
if msg:
self._context['errors'].append(msg)
self._pid = p.id
return True
| gpl-2.0 | 4,235,616,666,778,978,300 | 37.648855 | 135 | 0.596089 | false |
ghostop14/sparrow-wifi | sparrowwifiagent.py | 1 | 114857 | #!/usr/bin/python3
#
# Copyright 2017 ghostop14
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import os
import sys
import datetime
import json
import re
import argparse
import configparser
# import subprocess
from socket import *
from time import sleep
from threading import Thread, Lock
from dateutil import parser
from http import server as HTTPServer
from socketserver import ThreadingMixIn
from wirelessengine import WirelessEngine
from sparrowgps import GPSEngine, GPSEngineStatic, GPSStatus, SparrowGPS
try:
from sparrowdrone import SparrowDroneMavlink
hasDroneKit = True
except:
hasDroneKit = False
from sparrowrpi import SparrowRPi
from sparrowbluetooth import SparrowBluetooth, BluetoothDevice
from sparrowhackrf import SparrowHackrf
from sparrowcommon import gzipCompress
try:
from manuf import manuf
hasOUILookup = True
except:
hasOUILookup = False
# ------ Global setup ------------
gpsEngine = None
curTime = datetime.datetime.now()
useMavlink = False
vehicle = None
mavlinkGPSThread = None
hasFalcon = False
hasBluetooth = False
hasUbertooth = False
falconWiFiRemoteAgent = None
bluetooth = None
hackrf = SparrowHackrf()
debugHTTP = False
allowCors = False
# Lock list is a dictionary of thread locks for scanning interfaces
lockList = {}
allowedIPs = []
useRPILeds = False
# runningcfg is created in main
runningcfg = None
recordThread = None
announceThread = None
# ------ Global functions ------------
def stringtobool(instr):
if (instr == 'True' or instr == 'true'):
return True
else:
return False
def TwoDigits(instr):
# Fill in a leading zero for single-digit numbers
while len(instr) < 2:
instr = '0' + instr
return instr
def deleteRecordingFiles(filelist):
dirname, filename = os.path.split(os.path.abspath(__file__))
recordingsDir = dirname + '/recordings'
retVal = ''
for curFilename in filelist:
# This split is simply a safety check to prevent path traversal attacks
dirname, filename = os.path.split(curFilename)
if len(filename) > 0:
fullpath = recordingsDir + '/' + filename
try:
os.remove(fullpath)
except:
if len(retVal) == 0:
retVal = filename
else:
retVal += ',' + filename
return retVal
def getRecordingFiles():
dirname, filename = os.path.split(os.path.abspath(__file__))
recordingsDir = dirname + '/recordings'
if not os.path.exists(recordingsDir):
os.makedirs(recordingsDir)
retVal = []
try:
for filename in os.listdir(recordingsDir):
fullPath = recordingsDir + '/' + filename
if not os.path.isdir(fullPath):
curFile = FileSystemFile()
curFile.filename = filename
curFile.size = os.path.getsize(fullPath)
try:
curFile.timestamp = datetime.datetime.fromtimestamp(os.path.getmtime(fullPath))
except:
curFile.timestamp = None
retVal.append(curFile.toJsondict())
except:
pass
return retVal
def restartAgent():
global bluetooth
if mavlinkGPSThread:
mavlinkGPSThread.signalStop = True
print('Waiting for mavlink GPS thread to terminate...')
while (mavlinkGPSThread.threadRunning):
sleep(0.2)
stopRecord()
stopAnnounceThread()
if bluetooth:
bluetooth.stopScanning()
if runningcfg.useRPiLEDs:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
if hasFalcon:
falconWiFiRemoteAgent.cleanup()
if os.path.isfile('/usr/local/bin/python3.5') or os.path.isfile('/usr/bin/python3.5'):
exefile = 'python3.5'
else:
exefile = 'python3'
# params = [exefile, __file__, '--delaystart=2']
newCommand = exefile + ' ' + __file__ + ' --delaystart=2 &'
os.system(newCommand)
# subprocess.Popen(params, stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
# result = subprocess.run(params, stdout=subprocess.DEVNULL,stderr=subprocess.DEVNULL)
# restartResult = result.stdout.decode('UTF-8')
os.kill(os.getpid(), 9)
def updateRunningConfig(newCfg):
global runningcfg
if runningcfg.ipAllowedList != newCfg.ipAllowedList:
buildAllowedIPs(newCfg.ipAllowedList)
# port we ignore since we're already running
# useRPiLEDs will just update
# Announce
if runningcfg.announce != newCfg.announce:
if not newCfg.announce:
stopAnnounceThread()
else:
# start will check if it's already running
startAnnounceThread()
# mavlinkGPS
# Need to restart to update mavlinkGPS
# So just copy forward
newCfg.mavlinkGPS = runningcfg.mavlinkGPS
# recordInterface
if runningcfg.recordInterface != newCfg.recordInterface:
if len(newCfg.recordInterface) == 0:
stopRecord()
else:
# start will check if it's already running
startRecord(newCfg.recordInterface)
# Finally swap out the config
runningcfg = newCfg
def startRecord(interface):
global recordThread
if recordThread:
return
if len(interface) > 0:
interfaces = WirelessEngine.getInterfaces()
if interface in interfaces:
recordThread = AutoAgentScanThread(interface)
recordThread.start()
else:
print('ERROR: Record was requested on ' + interface + ' but that interface was not found.')
else:
recordThread = None
def stopRecord():
global recordThread
if recordThread:
recordThread.signalStop = True
print('Waiting for record thread to terminate...')
i=0
maxCycles = 2 /0.2
while (recordThread.threadRunning) and (i<maxCycles):
sleep(0.2)
i += 1
def stopAnnounceThread():
global announceThread
if announceThread:
announceThread.signalStop = True
print('Waiting for announce thread to terminate...')
sleep(0.2)
# i=0
# maxCycles = 5 # int(2.0 /0.2)
# while (announceThread.threadRunning) and (i<maxCycles):
# sleep(0.2)
# i += 1
announceThread = None
def startAnnounceThread():
global runningcfg
global announceThread
# Start announce if needed
if announceThread:
# It's already running
return
print('Sending agent announcements on port ' + str(runningcfg.port) + '.')
announceThread = AnnounceThread(runningcfg.port)
announceThread.start()
def buildAllowedIPs(allowedIPstr):
global allowedIPs
allowedIPs = []
if len(allowedIPstr) > 0:
ippattern = re.compile('([0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})')
if ',' in allowedIPstr:
tmpList = allowedIPstr.split(',')
for curItem in tmpList:
ipStr = curItem.replace(' ', '')
try:
ipValue = ippattern.search(ipStr).group(1)
except:
ipValue = ""
print('ERROR: Unknown IP pattern: ' + ipStr)
exit(3)
if len(ipValue) > 0:
allowedIPs.append(ipValue)
else:
ipStr = allowedIPstr.replace(' ', '')
try:
ipValue = ippattern.search(ipStr).group(1)
except:
ipValue = ""
print('ERROR: Unknown IP pattern: ' + ipStr)
return False
if len(ipValue) > 0:
allowedIPs.append(ipValue)
return True
# ------ OUI lookup functions ------------
def getOUIDB():
ouidb = None
if hasOUILookup:
if os.path.isfile('manuf'):
# We have the file but let's not update it every time we run the app.
# every 90 days should be plenty
last_modified_date = datetime.datetime.fromtimestamp(os.path.getmtime('manuf'))
now = datetime.datetime.now()
age = now - last_modified_date
if age.days > 90:
updateflag = True
else:
updateflag = False
else:
# We don't have the file, let's get it
updateflag = True
try:
ouidb = manuf.MacParser(update=updateflag)
except:
ouidb = None
else:
ouidb = None
return ouidb
# ------------------ File ------------------------------
class FileSystemFile(object):
def __init__(self):
self.filename = ""
self.size = 0
self.timestamp = None
def __str__(self):
retVal = self.filename
return retVal
def toJsondict(self):
jsondict = {}
jsondict['filename'] = self.filename
jsondict['size'] = self.size
jsondict['timestamp'] = str(self.timestamp)
return jsondict
def fromJsondict(self, jsondict):
self.filename = jsondict['filename']
self.size = jsondict['size']
if jsondict['timestamp'] == 'None':
self.timestamp = None
else:
self.timestamp = parser.parse(jsondict['timestamp'])
# ------------------ Config Settings ------------------------------
class AgentConfigSettings(object):
def __init__(self):
self.cancelStart = False
self.port = 8020
self.announce = False
self.useRPiLEDs = False
self.recordInterface=""
self.recordRunning = False
self.mavlinkGPS = ""
self.ipAllowedList = ""
self.allowCors = False
def __str__(self):
retVal = "Cancel Start: " + str(self.cancelStart) + "\n"
retVal += "Port: " + str(self.port) + "\n"
retVal += "Announce Agent: " + str(self.announce) + "\n"
retVal += "Use RPi LEDs: " + str(self.useRPiLEDs) + "\n"
retVal += "Record Interface: " + self.recordInterface + "\n"
retVal += "Record Running (for running configs): " + str(self.recordRunning) + "\n"
retVal += "Mavlink GPS: " + self.mavlinkGPS + "\n"
retVal += "IP Allowed List: " + self.ipAllowedList + "\n"
retVal += "Allow CORS: " + str(self.allowCors) + "\n"
return retVal
def __eq__(self, obj):
# This is equivance.... ==
if not isinstance(obj, AgentConfigSettings):
return False
if self.cancelStart != obj.cancelStart:
return False
if self.port != obj.port:
return False
if self.announce != obj.announce:
return False
if self.useRPiLEDs != obj.useRPiLEDs:
return False
if self.recordInterface != obj.recordInterface:
return False
if self.mavlinkGPS != obj.mavlinkGPS:
return False
if self.ipAllowedList != obj.ipAllowedList:
return False
if self.allowCors != obj.allowCors:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def toJsondict(self):
dictjson = {}
dictjson['cancelstart'] = str(self.cancelStart)
dictjson['port'] = self.port
dictjson['announce'] = str(self.announce)
dictjson['recordrunning'] = str(self.recordRunning)
dictjson['userpileds'] = str(self.useRPiLEDs)
dictjson['recordinterface'] = self.recordInterface
dictjson['mavlinkgps'] = self.mavlinkGPS
dictjson['allowedips'] = self.ipAllowedList
dictjson['allowcors'] = str(self.allowCors)
return dictjson
def toJson(self):
dictjson = self.toJsondict()
return json.dumps(dictjson)
def fromJsondict(self, dictjson):
try:
self.cancelStart = stringtobool(dictjson['cancelstart'])
self.port = int(dictjson['port'])
self.announce = stringtobool(dictjson['announce'])
self.recordRunning = stringtobool(dictjson['recordrunning'])
self.useRPiLEDs = stringtobool(dictjson['userpileds'])
self.recordInterface = dictjson['recordinterface']
self.mavlinkGPS = dictjson['mavlinkgps']
self.ipAllowedList = dictjson['allowedips']
# if 'allowcors' in dictjson.keys():
self.allowCors = stringtobool(dictjson['allowcors'])
# else:
# print("allowCors not set in dictjson!")
except Exception as e:
print(e)
def fromJson(self, jsonstr):
dictjson = json.loads(jsonstr)
self.fromJsondict(dictjson)
def toConfigFile(self, cfgFile):
config = configparser.ConfigParser()
config['agent'] = self.toJsondict()
try:
with open(cfgFile, 'w') as configfile:
config.write(configfile)
return True
except:
return False
def fromConfigFile(self, cfgFile):
if os.path.isfile(cfgFile):
cfgParser = configparser.ConfigParser()
try:
cfgParser.read(cfgFile)
section="agent"
options = cfgParser.options(section)
for option in options:
try:
if option =='cancelstart':
self.cancelStart = stringtobool(cfgParser.get(section, option))
elif option == 'sendannounce':
self.announce = stringtobool(cfgParser.get(section, option))
elif option == 'userpileds':
self.useRPiLEDs = stringtobool(cfgParser.get(section, option))
elif option == 'port':
self.port=int(cfgParser.get(section, option))
elif option == 'recordinterface':
self.recordInterface=cfgParser.get(section, option)
elif option == 'mavlinkgps':
self.mavlinkGPS=cfgParser.get(section, option)
elif option == 'allowedips':
self.ipAllowedList = cfgParser.get(section, option)
elif option == 'allowcors':
self.allowCors = stringtobool(cfgParser.get(section, option))
except:
print("exception on %s!" % option)
settings[option] = None
except:
print("ERROR: Unable to read config file: ", cfgFile)
return False
else:
return False
return True
# ------------------ Agent auto scan thread ------------------------------
class AutoAgentScanThread(Thread):
def __init__(self, interface):
global lockList
global hasBluetooth
super(AutoAgentScanThread, self).__init__()
self.interface = interface
self.signalStop = False
self.scanDelay = 0.5 # seconds
self.threadRunning = False
self.discoveredNetworks = {}
self.discoveredBluetoothDevices = {}
self.daemon = True
try:
self.hostname = os.uname()[1]
except:
self.hostname = 'unknown'
if len(self.hostname) == 0:
self.hostname = 'unknown'
self.ouiLookupEngine = getOUIDB()
if interface not in lockList.keys():
lockList[interface] = Lock()
if not os.path.exists('./recordings'):
os.makedirs('./recordings')
now = datetime.datetime.now()
self.filename = './recordings/' + self.hostname + '_wifi_' + str(now.year) + "-" + TwoDigits(str(now.month)) + "-" + TwoDigits(str(now.day))
self.filename += "_" + TwoDigits(str(now.hour)) + "_" + TwoDigits(str(now.minute)) + "_" + TwoDigits(str(now.second)) + ".csv"
self.btfilename = './recordings/' + self.hostname + '_bt_' + str(now.year) + "-" + TwoDigits(str(now.month)) + "-" + TwoDigits(str(now.day))
self.btfilename += "_" + TwoDigits(str(now.hour)) + "_" + TwoDigits(str(now.minute)) + "_" + TwoDigits(str(now.second)) + ".csv"
if hasBluetooth:
print('Capturing on ' + interface + ' and writing wifi to ' + self.filename)
print('and writing bluetooth to ' + self.btfilename)
else:
print('Capturing on ' + interface + ' and writing wifi to ' + self.filename)
def run(self):
global lockList
global hasBluetooth
self.threadRunning = True
if self.interface not in lockList.keys():
lockList[self.interface] = Lock()
curLock = lockList[self.interface]
if hasBluetooth:
# Start normal discovery
bluetooth.startDiscovery(False)
lastState = -1
while (not self.signalStop):
# Scan all / normal mode
if (curLock):
curLock.acquire()
retCode, errString, wirelessNetworks = WirelessEngine.scanForNetworks(self.interface)
if (curLock):
curLock.release()
if (retCode == 0):
if useMavlink:
gpsCoord = GPSStatus()
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
elif gpsEngine.gpsValid():
gpsCoord = gpsEngine.lastCoord
if useRPILeds and (lastState !=SparrowRPi.LIGHT_STATE_ON):
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
lastState = SparrowRPi.LIGHT_STATE_ON
else:
gpsCoord = GPSStatus()
if useRPILeds and (lastState !=SparrowRPi.LIGHT_STATE_HEARTBEAT) :
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
lastState = SparrowRPi.LIGHT_STATE_HEARTBEAT
# self.statusBar().showMessage('Scan complete. Found ' + str(len(wirelessNetworks)) + ' networks')
if wirelessNetworks and (len(wirelessNetworks) > 0) and (not self.signalStop):
for netKey in wirelessNetworks.keys():
curNet = wirelessNetworks[netKey]
curNet.gps.copy(gpsCoord)
curNet.strongestgps.copy(gpsCoord)
curKey = curNet.getKey()
if curKey not in self.discoveredNetworks.keys():
self.discoveredNetworks[curKey] = curNet
else:
# Network exists, need to update it.
pastNet = self.discoveredNetworks[curKey]
# Need to save strongest gps and first seen. Everything else can be updated.
# Carry forward firstSeen
curNet.firstSeen = pastNet.firstSeen # This is one field to carry forward
# Check strongest signal
if pastNet.strongestsignal > curNet.signal:
curNet.strongestsignal = pastNet.strongestsignal
curNet.strongestgps.latitude = pastNet.strongestgps.latitude
curNet.strongestgps.longitude = pastNet.strongestgps.longitude
curNet.strongestgps.altitude = pastNet.strongestgps.altitude
curNet.strongestgps.speed = pastNet.strongestgps.speed
curNet.strongestgps.isValid = pastNet.strongestgps.isValid
self.discoveredNetworks[curKey] = curNet
if not self.signalStop:
self.exportNetworks()
# Now if we have bluetooth running export these:
if hasBluetooth and bluetooth.discoveryRunning():
bluetooth.deviceLock.acquire()
# Update GPS
now = datetime.datetime.now()
for curKey in bluetooth.devices.keys():
curDevice = bluetooth.devices[curKey]
elapsedTime = now - curDevice.lastSeen
# This is a little bit of a hack for the BlueHydra side since it can take a while to see devices or have
# them show up in the db. For LE discovery scans this will always be pretty quick.
if elapsedTime.total_seconds() < 120:
curDevice.gps.copy(gpsCoord)
if curDevice.rssi >= curDevice.strongestRssi:
curDevice.strongestRssi = curDevice.rssi
curDevice.strongestgps.copy(gpsCoord)
# export
self.exportBluetoothDevices(bluetooth.devices)
bluetooth.deviceLock.release()
sleep(self.scanDelay)
if hasBluetooth:
# Start normal discovery
bluetooth.stopDiscovery()
self.threadRunning = False
def ouiLookup(self, macAddr):
clientVendor = ""
if hasOUILookup:
try:
if self.ouiLookupEngine:
clientVendor = self.ouiLookupEngine.get_manuf(macAddr)
except:
clientVendor = ""
return clientVendor
def exportBluetoothDevices(self, devices):
try:
btOutputFile = open(self.btfilename, 'w')
except:
print('ERROR: Unable to write to bluetooth file ' + self.filename)
return
btOutputFile.write('uuid,Address,Name,Company,Manufacturer,Type,RSSI,TX Power,Strongest RSSI,Est Range (m),Last Seen,GPS Valid,Latitude,Longitude,Altitude,Speed,Strongest GPS Valid,Strongest Latitude,Strongest Longitude,Strongest Altitude,Strongest Speed\n')
for curKey in devices.keys():
curData = devices[curKey]
btType = ""
if curData.btType == BluetoothDevice.BT_LE:
btType = "BTLE"
else:
btType = "Classic"
if curData.txPowerValid:
txPower = str(curData.txPower)
else:
txPower = 'Unknown'
btOutputFile.write(curData.uuid + ',' + curData.macAddress + ',"' + curData.name + '","' + curData.company + '","' + curData.manufacturer)
btOutputFile.write('","' + btType + '",' + str(curData.rssi) + ',' + str(curData.strongestRssi) + ',' + txPower + ',' + str(curData.iBeaconRange) + ',' +
curData.lastSeen.strftime("%m/%d/%Y %H:%M:%S") + ',' +
str(curData.gps.isValid) + ',' + str(curData.gps.latitude) + ',' + str(curData.gps.longitude) + ',' + str(curData.gps.altitude) + ',' + str(curData.gps.speed) + ',' +
str(curData.strongestgps.isValid) + ',' + str(curData.strongestgps.latitude) + ',' + str(curData.strongestgps.longitude) + ',' + str(curData.strongestgps.altitude) + ',' + str(curData.strongestgps.speed) + '\n')
btOutputFile.close()
def exportNetworks(self):
try:
self.outputFile = open(self.filename, 'w')
except:
print('ERROR: Unable to write to wifi file ' + self.filename)
return
self.outputFile.write('macAddr,vendor,SSID,Security,Privacy,Channel,Frequency,Signal Strength,Strongest Signal Strength,Bandwidth,Last Seen,First Seen,GPS Valid,Latitude,Longitude,Altitude,Speed,Strongest GPS Valid,Strongest Latitude,Strongest Longitude,Strongest Altitude,Strongest Speed\n')
for netKey in self.discoveredNetworks.keys():
curData = self.discoveredNetworks[netKey]
vendor = self.ouiLookup(curData.macAddr)
if vendor is None:
vendor = ''
self.outputFile.write(curData.macAddr + ',' + vendor + ',"' + curData.ssid + '",' + curData.security + ',' + curData.privacy)
self.outputFile.write(',' + curData.getChannelString() + ',' + str(curData.frequency) + ',' + str(curData.signal) + ',' + str(curData.strongestsignal) + ',' + str(curData.bandwidth) + ',' +
curData.lastSeen.strftime("%m/%d/%Y %H:%M:%S") + ',' + curData.firstSeen.strftime("%m/%d/%Y %H:%M:%S") + ',' +
str(curData.gps.isValid) + ',' + str(curData.gps.latitude) + ',' + str(curData.gps.longitude) + ',' + str(curData.gps.altitude) + ',' + str(curData.gps.speed) + ',' +
str(curData.strongestgps.isValid) + ',' + str(curData.strongestgps.latitude) + ',' + str(curData.strongestgps.longitude) + ',' + str(curData.strongestgps.altitude) + ',' + str(curData.strongestgps.speed) + '\n')
self.outputFile.close()
# ------------------ Announce thread ------------------------------
class AnnounceThread(Thread):
def __init__(self, port):
super(AnnounceThread, self).__init__()
self.signalStop = False
self.sendDelay = 4.0 # seconds
self.threadRunning = False
self.daemon = True
self.broadcastSocket = socket(AF_INET, SOCK_DGRAM)
self.broadcastSocket.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
self.broadcastSocket.setsockopt(SOL_SOCKET, SO_BROADCAST, 1)
self.broadcastPort = port
self.broadcastAddr=('255.255.255.255', self.broadcastPort)
def sendAnnounce(self):
try:
self.broadcastSocket.sendto(bytes('sparrowwifiagent', "utf-8"),self.broadcastAddr)
except:
pass
def run(self):
self.threadRunning = True
while (not self.signalStop):
self.sendAnnounce()
# 4 second delay, but check every second for termination signal
i=0
while i<4 and not self.signalStop:
sleep(1.0)
i += 1
self.threadRunning = False
# ------------------ Local network scan thread ------------------------------
class MavlinkGPSThread(Thread):
def __init__(self, vehicle):
super(MavlinkGPSThread, self).__init__()
self.signalStop = False
self.scanDelay = 0.5 # seconds
self.threadRunning = False
self.vehicle = vehicle
self.synchronized = False
self.latitude = 0.0
self.longitude = 0.0
self.altitude = 0.0
self.daemon = True
def run(self):
self.threadRunning = True
lastState = -1
while (not self.signalStop):
self.synchronized, self.latitude, self.longitude, self.altitude = self.vehicle.getGlobalGPS()
if self.synchronized:
# Solid on synchronized
if useRPILeds and (lastState != SparrowRPi.LIGHT_STATE_ON):
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
lastState = SparrowRPi.LIGHT_STATE_ON
else:
# heartbeat on unsynchronized
if useRPILeds and (lastState != SparrowRPi.LIGHT_STATE_HEARTBEAT):
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
lastState = SparrowRPi.LIGHT_STATE_HEARTBEAT
sleep(self.scanDelay)
self.threadRunning = False
class SparrowWiFiAgent(object):
# See https://docs.python.org/3/library/http.server.html
# For HTTP Server info
def run(self, port):
global useRPILeds
global hackrf
global bluetooth
global falconWiFiRemoteAgent
server_address = ('', port)
try: # httpd = HTTPServer.HTTPServer(server_address, SparrowWiFiAgentRequestHandler)
httpd = MultithreadHTTPServer(server_address, SparrowWiFiAgentRequestHandler)
except OSError as e:
curTime = datetime.datetime.now()
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Unable to bind to port " + str(port) + ". " + e.strerror)
if runningcfg.useRPiLEDs:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
exit(1)
curTime = datetime.datetime.now()
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Starting Sparrow-wifi agent on port " + str(port))
if useRPILeds:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
if useRPILeds:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
if hasFalcon:
falconWiFiRemoteAgent.cleanup()
if bluetooth:
bluetooth.stopScanning()
if hackrf.scanRunning():
hackrf.stopScanning()
curTime = datetime.datetime.now()
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Sparrow-wifi agent stopped.")
# --------------- Multithreaded HTTP Server ------------------------------------
class MultithreadHTTPServer(ThreadingMixIn, HTTPServer.HTTPServer):
pass
# --------------- HTTP Request Handler --------------------
# Sample handler: https://wiki.python.org/moin/BaseHttpServer
class SparrowWiFiAgentRequestHandler(HTTPServer.BaseHTTPRequestHandler):
def log_message(self, format, *args):
global debugHTTP
if not debugHTTP:
return
else:
HTTPServer.BaseHTTPRequestHandler(format, *args)
def do_HEAD(s):
global allowCors
s.send_response(200)
s.send_header("Content-type", "text/html")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
def do_POST(s):
global runningcfg
global falconWiFiRemoteAgent
if len(s.client_address) == 0:
# This should have the connecting client IP. If this isn't at least 1, something is wrong
return
if len(allowedIPs) > 0:
if s.client_address[0] not in allowedIPs:
try:
s.send_response(403)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Connections not authorized from your IP address</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
return
if (not s.isValidPostURL()):
try:
s.send_response(404)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Page not found.</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
return
# Get the size of the posted data
try:
length = int(s.headers['Content-Length'])
except:
length = 0
if length <= 0:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Agent received a zero-length request.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
# get the POSTed payload
jsonstr_data = s.rfile.read(length).decode('utf-8')
# Try to convert it to JSON
try:
jsondata = json.loads(jsonstr_data)
except:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'bad posted data.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
if s.path == '/system/config':
# ------------- Update startup config ------------------
try:
scfg = jsondata['startup']
startupCfg = AgentConfigSettings()
startupCfg.fromJsondict(scfg)
dirname, filename = os.path.split(os.path.abspath(__file__))
cfgFile = dirname + '/sparrowwifiagent.cfg'
retVal = startupCfg.toConfigFile(cfgFile)
if not retVal:
# HTML 400 = Bad request
s.send_response(400)
responsedict = {}
responsedict['errcode'] = 2
responsedict['errmsg'] = 'An error occurred saving the startup config.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
responsedict = {}
responsedict['errcode'] = 3
responsedict['errmsg'] = 'Bad startup config.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
# ------------- Check if we should reboot ------------------
if 'rebootagent' in jsondata:
rebootFlag = jsondata['rebootagent']
if rebootFlag:
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = 'Restarting agent.'
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
restartAgent()
# If we're restarting, we'll never get to running config.
# ------------- Update Running config ------------------
try:
rcfg = jsondata['running']
tmpcfg = AgentConfigSettings()
tmpcfg.fromJsondict(rcfg)
updateRunningConfig(tmpcfg)
try:
s.send_response(200)
s.send_header("Content-Length", 0)
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
except:
pass
except Exception as e:
print(e)
responsedict = {}
responsedict['errcode'] = 4
responsedict['errmsg'] = 'Bad running config.'
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
# ------------- Done updating config ------------------
elif s.path == '/system/deleterecordings':
try:
filelist = jsondata['files']
problemfiles=deleteRecordingFiles(filelist)
responsedict = {}
if len(problemfiles) == 0:
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = problemfiles
jsonstr = json.dumps(responsedict)
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
except:
pass
elif s.path == '/falcon/stopdeauth':
if not hasFalcon:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Should get a FalconDeauth object
# This is in jsondata
try:
apMacAddr = jsondata['apmacaddr']
clientMacAddr = jsondata['stationmacaddr']
channel = jsondata['channel']
curInterface = jsondata['interface']
falconWiFiRemoteAgent.stopDeauth(apMacAddr, clientMacAddr, curInterface, channel)
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/falcon/deauth':
if not hasFalcon:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Should get a FalconDeauth object
# This is in jsondata
try:
apMacAddr = jsondata['apmacaddr']
clientMacAddr = jsondata['stationmacaddr']
channel = jsondata['channel']
curInterface = jsondata['interface']
continuous = jsondata['continuous']
if len(clientMacAddr) == 0:
newDeauth = falconWiFiRemoteAgent.deauthAccessPoint(apMacAddr, curInterface, channel, continuous)
else:
newDeauth = falconWiFiRemoteAgent.deauthAccessPointAndClient(apMacAddr, clientMacAddr, curInterface, channel, continuous)
if not continuous:
# There's nothing to check. Just return
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
if newDeauth:
# Deauth was started
try:
s.send_response(200)
#s.send_header("Content-type", "text/html")
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Something went wrong with the start
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = "An error occurred starting the deauth process."
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/falcon/startcrack':
if not hasFalcon:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Extract necessary info for cracking
try:
crackType = jsondata['cracktype'] # This will be wep or wpapsk
curInterface = jsondata['interface']
channel = jsondata['channel']
ssid = jsondata['ssid']
apMacAddr=jsondata['apmacaddr']
hasClient = jsondata['hasclient']
# For now you can only run 1 crack globally due to tmp flie naming.
# At some point I'll scale it out
if crackType == 'wep':
if curInterface in falconWiFiRemoteAgent.WEPCrackList:
wepCrack = falconWiFiRemoteAgent.WEPCrackList[curInterface]
# Stop one if it was already running
wepCrack.stopCrack()
else:
wepCrack = WEPCrack()
falconWiFiRemoteAgent.WEPCrackList[curInterface] = wepCrack
wepCrack.cleanupTempFiles()
retVal, errMsg = wepCrack.startCrack(curInterface, channel, ssid, apMacAddr, hasClient)
else:
if curInterface in falconWiFiRemoteAgent.WPAPSKCrackList:
wpaPSKCrack = falconWiFiRemoteAgent.WPAPSKCrackList[curInterface]
# Stop one if it was already running
wpaPSKCrack.stopCrack()
else:
wpaPSKCrack = WPAPSKCrack()
falconWiFiRemoteAgent.WPAPSKCrackList[curInterface] = wpaPSKCrack
wpaPSKCrack.cleanupTempFiles()
retVal, errMsg = wpaPSKCrack.startCrack(curInterface, channel, ssid, apMacAddr, hasClient)
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
responsedict = {}
# For start, retVal is True/False
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
try:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing json"
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
try:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = 'Bad request.'
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
def isValidPostURL(s):
allowedfullurls = ['/system/config',
'/falcon/startcrack',
'/falcon/deauth',
'/falcon/stopdeauth',
'/system/deleterecordings']
allowedstarturls=[]
if s.path in allowedfullurls:
return True
else:
for curURL in allowedstarturls:
if s.path.startswith(curURL):
return True
return False
def isValidGetURL(s):
# Full urls
allowedfullurls = ['/wireless/interfaces',
'/wireless/moninterfaces',
'/falcon/getscanresults',
'/falcon/getalldeauths',
'/system/getrecordings',
'/bluetooth/present',
'/bluetooth/scanstart',
'/bluetooth/scanstop',
'/bluetooth/scanstatus',
'/bluetooth/running',
'/bluetooth/beaconstart',
'/bluetooth/beaconstop',
'/bluetooth/discoverystartp',
'/bluetooth/discoverystarta',
'/bluetooth/discoverystop',
'/bluetooth/discoveryclear',
'/bluetooth/discoverystatus',
'/spectrum/scanstart24',
'/spectrum/scanstart5',
'/spectrum/scanstop',
'/spectrum/scanstatus',
'/spectrum/hackrfstatus',
'/gps/status']
# partials that have more in the URL
allowedstarturls=['/wireless/networks/',
'/falcon/startmonmode/',
'/falcon/stopmonmode/',
'/falcon/scanrunning/',
'/falcon/startscan/',
'/falcon/stopscan/',
'/falcon/stopalldeauths',
'/falcon/crackstatuswpapsk',
'/falcon/crackstatuswep',
'/falcon/stopcrack',
'/system/config',
'/system/startrecord',
'/system/stoprecord',
'/system/getrecording']
if s.path in allowedfullurls:
return True
else:
for curURL in allowedstarturls:
if s.path.startswith(curURL):
return True
return False
def sendFile(s, passedfilename):
# Directory traversal safety check
dirname, runfilename = os.path.split(os.path.abspath(__file__))
tmpdirname, filename = os.path.split(passedfilename)
recordingsDir = dirname + '/recordings'
fullPath = recordingsDir + '/' + filename
if not os.path.isfile(fullPath):
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'File not found.'
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
return
try:
f = open(fullPath, 'rb')
except:
s.send_response(400)
s.send_header("Content-type", "application/json")
s.end_headers()
responsedict = {}
responsedict['errcode'] = 2
responsedict['errmsg'] = 'Unable to open file.'
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
return
fileExtension = filename.split(".")[-1]
if fileExtension in ['txt', 'csv', 'json', 'xml']:
contentType = 'text/plain'
elif fileExtension == 'html':
contentType = 'text/html'
else:
contentType = 'application/octet-stream'
s.send_response(200)
#s.send_header("Content-type", "text/html")
s.send_header("Content-type", contentType)
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
try:
s.wfile.write(f.read())
except:
pass
f.close()
return
def do_GET(s):
global gpsEngine
global useMavlink
global mavlinkGPSThread
global lockList
global allowedIPs
global runningcfg
global falconWiFiRemoteAgent
global hasBluetooth
global hasUbertooth
global bluetooth
global allowCors
# For RPi LED's, using it during each get request wasn't completely working. Short transactions like
# status and interface list were so quick the light would get "confused" and stay off. So
# the LED is only used for long calls like scan
if len(s.client_address) == 0:
# This should have the connecting client IP. If this isn't at least 1, something is wrong
return
try:
# If the pipe gets broken mid-stream it'll throw an exception
if len(allowedIPs) > 0:
if s.client_address[0] not in allowedIPs:
try:
s.send_response(403)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Connections not authorized from your IP address</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
return
if not s.isValidGetURL():
try:
s.send_response(404)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write("<html><body><p>Bad Request</p>".encode("utf-8"))
s.wfile.write("</body></html>".encode("UTF-8"))
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
return
"""Respond to a GET request."""
if (not s.path.startswith('/system/getrecording/') and (not s.path == ('/bluetooth/scanstatus')) and
(not s.path == ('/spectrum/scanstatus'))):
# In getrecording we may adjust the content type header based on file extension
# Spectrum we'll gzip
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
if allowCors:
s.send_header("Access-Control-Allow-Origin", "*")
s.end_headers()
except:
pass
# NOTE: In python 3, string is a bit different. Examples write strings directly for Python2,
# In python3 you have to convert it to UTF-8 bytes
# s.wfile.write("<html><head><title>Sparrow-wifi agent</title></head><body>".encode("utf-8"))
if s.path == '/wireless/interfaces':
wirelessInterfaces = WirelessEngine.getInterfaces()
jsondict={}
jsondict['interfaces']=wirelessInterfaces
jsonstr = json.dumps(jsondict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/wireless/networks/' in s.path:
# THIS IS THE NORMAL SCAN
inputstr = s.path.replace('/wireless/networks/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
if '?' in inputstr:
splitlist = inputstr.split('?')
curInterface = splitlist[0]
else:
curInterface = inputstr
p = re.compile('.*Frequencies=(.*)', re.IGNORECASE)
try:
channelStr = p.search(inputstr).group(1)
except:
channelStr = ""
huntChannelList = []
if ',' in channelStr:
tmpList = channelStr.split(',')
else:
tmpList = []
if len(tmpList) > 0:
for curItem in tmpList:
try:
if len(curItem) > 0:
huntChannelList.append(int(curItem))
# Get results for the specified interface
# Need to iterate through the channels and aggregate the results
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
sleep(0.1)
if curInterface not in lockList.keys():
lockList[curInterface] = Lock()
curLock = lockList[curInterface]
if (curLock):
curLock.acquire()
if useMavlink:
gpsCoord = GPSStatus()
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
retCode, errString, jsonstr=WirelessEngine.getNetworksAsJson(fieldValue, gpsCoord, huntChannelList)
elif gpsEngine.gpsValid():
retCode, errString, jsonstr=WirelessEngine.getNetworksAsJson(fieldValue, gpsEngine.lastCoord, huntChannelList)
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
retCode, errString, jsonstr=WirelessEngine.getNetworksAsJson(fieldValue, None, huntChannelList)
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
if (curLock):
curLock.release()
s.wfile.write(jsonstr.encode("UTF-8"))
elif s.path == '/gps/status':
jsondict={}
if not useMavlink:
jsondict['gpsinstalled'] = str(GPSEngine.GPSDInstalled())
jsondict['gpsrunning'] = str(GPSEngine.GPSDRunning())
jsondict['gpssynch'] = str(gpsEngine.gpsValid())
if gpsEngine.gpsValid():
gpsPos = {}
gpsPos['latitude'] = gpsEngine.lastCoord.latitude
gpsPos['longitude'] = gpsEngine.lastCoord.longitude
gpsPos['altitude'] = gpsEngine.lastCoord.altitude
gpsPos['speed'] = gpsEngine.lastCoord.speed
jsondict['gpspos'] = gpsPos
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
else:
jsondict['gpsinstalled'] = 'True'
jsondict['gpsrunning'] = 'True'
jsondict['gpssynch'] = str(mavlinkGPSThread.synchronized)
gpsPos = {}
gpsPos['latitude'] = mavlinkGPSThread.latitude
gpsPos['longitude'] = mavlinkGPSThread.longitude
gpsPos['altitude'] = mavlinkGPSThread.altitude
gpsPos['speed'] = mavlinkGPSThread.vehicle.getAirSpeed()
jsondict['gpspos'] = gpsPos
jsonstr = json.dumps(jsondict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/wireless/moninterfaces':
wirelessInterfaces = WirelessEngine.getMonitoringModeInterfaces()
jsondict={}
jsondict['interfaces']=wirelessInterfaces
jsonstr = json.dumps(jsondict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/system/getrecordings':
filelist = getRecordingFiles()
responsedict = {}
responsedict['files'] = filelist
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/system/getrecording/'):
filename = s.path.replace('/system/getrecording/', '')
s.sendFile(filename)
elif s.path == '/bluetooth/present':
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
responsedict['hasbluetooth'] = hasBluetooth
if hasBluetooth:
responsedict['scanrunning'] = bluetooth.scanRunnning()
else:
responsedict['scanrunning'] = False
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/bluetooth/beacon'):
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/bluetooth/beacon', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='start':
if bluetooth.discoveryRunning():
bluetooth.stopDiscovery()
retVal = bluetooth.startBeacon()
if not retVal:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unable to start beacon.'
elif function == 'stop':
bluetooth.stopBeacon()
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/bluetooth/scan'):
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/bluetooth/scan', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='start':
bluetooth.startScanning()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif function == 'stop':
bluetooth.stopScanning()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif function == 'status':
channelData = bluetooth.spectrumToChannels()
responsedict['channeldata'] = channelData
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
s.send_header("Content-Encoding", "gzip")
s.end_headers()
except:
pass
jsonstr = json.dumps(responsedict)
gzipBytes = gzipCompress(jsonstr)
# s.wfile.write(jsonstr.encode("UTF-8"))
try:
s.wfile.write(gzipBytes)
except:
pass
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/bluetooth/discovery'):
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/bluetooth/discovery', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='startp':
# Promiscuous with ubertooth
if hasUbertooth:
bluetooth.startDiscovery(True)
else:
responsedict['errcode'] = 2
responsedict['errmsg'] = 'Ubertooth not supported on this agent'
elif function == 'starta':
# Normal with Bluetooth
bluetooth.startDiscovery(False)
elif function == 'stop':
bluetooth.stopDiscovery()
elif function == 'clear':
# Device list accumulates in the bluetooth class over time
# If you want a fresh list every time, you need to clear the old list.
bluetooth.clearDeviceList()
# Add in successful response
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif function == 'status':
# have to get the GPS:
gpsCoord = SparrowGPS()
if useMavlink:
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
elif gpsEngine.gpsValid():
gpsCoord.copy(gpsEngine.lastCoord)
# errcode, devices = bluetooth.getDiscoveredDevices()
bluetooth.updateDeviceList()
bluetooth.deviceLock.acquire()
devdict = []
now = datetime.datetime.now()
for curKey in bluetooth.devices.keys():
curDevice = bluetooth.devices[curKey]
elapsedTime = now - curDevice.lastSeen
# This is a little bit of a hack for the BlueHydra side since it can take a while to see devices or have
# them show up in the db. For LE discovery scans this will always be pretty quick.
if elapsedTime.total_seconds() < 120:
curDevice.gps.copy(gpsCoord)
if curDevice.rssi >= curDevice.strongestRssi:
curDevice.strongestRssi = curDevice.rssi
curDevice.strongestgps.copy(gpsCoord)
entryDict = curDevice.toJsondict()
devdict.append(entryDict)
bluetooth.deviceLock.release()
responsedict['devices'] = devdict
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/bluetooth/running':
if not hasBluetooth:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Bluetooth not supported on this agent'
responsedict['hasbluetooth'] = hasBluetooth
responsedict['hasubertooth'] = hasUbertooth
responsedict['spectrumscanrunning'] = False
responsedict['discoveryscanrunning'] = False
responsedict['beaconrunning'] = False
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
responsedict['hasbluetooth'] = hasBluetooth
responsedict['hasubertooth'] = hasUbertooth
responsedict['spectrumscanrunning'] = bluetooth.scanRunning()
responsedict['discoveryscanrunning'] = bluetooth.discoveryRunning()
responsedict['beaconrunning'] = bluetooth.beaconRunning()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/spectrum/hackrfstatus':
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
responsedict['hashackrf'] = hackrf.hasHackrf
responsedict['scan24running'] = hackrf.scanRunning24()
responsedict['scan5running'] = hackrf.scanRunning5()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/spectrum/scan'):
if not hackrf.hasHackrf:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'HackRF is not supported on this agent'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
function=s.path.replace('/spectrum/scan', '')
function = function.replace('/', '')
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
if function=='start24':
hackrf.startScanning24()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
elif function == 'start5':
hackrf.startScanning5()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
elif function == 'stop':
hackrf.stopScanning()
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
elif function == 'status':
if hackrf.scanRunning24():
channelData = hackrf.spectrum24ToChannels()
responsedict['scanrunning'] = hackrf.scanRunning24()
elif hackrf.scanRunning5():
channelData = hackrf.spectrum5ToChannels()
responsedict['scanrunning'] = hackrf.scanRunning24()
else:
channelData = {} # Shouldn't be here but just in case.
responsedict['scanrunning'] = False
responsedict['channeldata'] = channelData
try:
s.send_response(200)
s.send_header("Content-type", "application/json")
s.send_header("Content-Encoding", "gzip")
s.end_headers()
jsonstr = json.dumps(responsedict)
gzipBytes = gzipCompress(jsonstr)
# s.wfile.write(jsonstr.encode("UTF-8"))
s.wfile.write(gzipBytes)
except:
pass
else:
responsedict['errcode'] = 1
responsedict['errmsg'] = 'Unknown command'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/system/config':
cfgSettings = AgentConfigSettings()
cfgSettings.fromConfigFile('sparrowwifiagent.cfg')
responsedict = {}
responsedict['startup'] = cfgSettings.toJsondict()
if recordThread:
runningcfg.recordRunning = True
runningcfg.recordInterface = recordThread.interface
responsedict['running'] = runningcfg.toJsondict()
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path.startswith('/system/startrecord'):
recordinterface = s.path.replace('/system/startrecord/', '')
# Check that the specified interface is valid:
interfaces = WirelessEngine.getInterfaces()
if recordinterface in interfaces:
startRecord(recordinterface)
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
jsonstr = json.dumps(responsedict)
else:
responsedict = {}
responsedict['errcode'] = 1
responsedict['errmsg'] = 'The requested interface was not found on the system.'
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/system/stoprecord':
stopRecord()
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ''
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/startmonmode' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/startmonmode/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
retVal, errMsg = falconWiFiRemoteAgent.startMonitoringInterface(fieldValue)
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopmonmode' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopmonmode/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
retVal, errMsg = falconWiFiRemoteAgent.stopMonitoringInterface(fieldValue)
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/scanrunning' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/scanrunning/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
scanrunning = falconWiFiRemoteAgent.isScanRunning(fieldValue)
if scanrunning:
retVal = 0
errMsg = "scan for " + fieldValue + " is running"
else:
retVal = 1
errMsg = "scan for " + fieldValue + " is not running"
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/startscan' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/startscan/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
scanProc = falconWiFiRemoteAgent.startCapture(fieldValue)
if scanProc is not None:
retVal = 0
errMsg = ""
else:
retVal = -1
errMsg = "Unable to start scanning process."
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopscan' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopscan/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
retVal = falconWiFiRemoteAgent.stopCapture(fieldValue)
if retVal == 0:
errMsg = ""
else:
errMsg = "Unable to stop scanning process."
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopcrack' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopcrack/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
curInterface = p.search(inputstr).group(1)
except:
curInterface = ""
if len(curInterface) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
try:
if curInterface in falconWiFiRemoteAgent.WEPCrackList:
falconWiFiRemoteAgent.WEPCrackList[curInterface].stopCrack()
falconWiFiRemoteAgent.WEPCrackList[curInterface].cleanupTempFiles()
del falconWiFiRemoteAgent.WEPCrackList[curInterface]
if curInterface in falconWiFiRemoteAgent.WPAPSKCrackList:
falconWiFiRemoteAgent.WPAPSKCrackList[curInterface].stopCrack()
falconWiFiRemoteAgent.WPAPSKCrackList[curInterface].cleanupTempFiles()
del falconWiFiRemoteAgent.WPAPSKCrackList[curInterface]
except:
pass
retVal = 0
errMsg = ""
responsedict = {}
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/crackstatus' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
if 'crackstatuswep' in s.path:
type='wep'
else:
type = 'wpapsk'
inputstr = s.path.replace('/falcon/crackstatus'+type+'/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
curInterface = p.search(inputstr).group(1)
except:
curInterface = ""
if len(curInterface) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + curInterface
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
return
responsedict = {}
retVal = -1
errMsg = "Unable to find running crack."
try:
if type == 'wep':
if curInterface in falconWiFiRemoteAgent.WEPCrackList:
wepCrack = falconWiFiRemoteAgent.WEPCrackList[curInterface]
retVal = 0
errMsg = ""
responsedict['isrunning'] = wepCrack.isRunning()
responsedict['ivcount'] = wepCrack.getIVCount()
responsedict['ssid'] = wepCrack.SSID
responsedict['crackedpasswords'] = wepCrack.getCrackedPasswords()
else:
if curInterface in falconWiFiRemoteAgent.WPAPSKCrackList:
wpaPSKCrack = falconWiFiRemoteAgent.WPAPSKCrackList[curInterface]
retVal = 0
errMsg = ""
responsedict['isrunning'] = wpaPSKCrack.isRunning()
hasHandshake = wpaPSKCrack.hasHandshake()
responsedict['hashandshake'] = hasHandshake
if hasHandshake:
# For WPAPSK, lets copy the capture file to our recording directory for recovery
dirname, filename = os.path.split(os.path.abspath(__file__))
fullpath, filename=wpaPSKCrack.copyCaptureFile(dirname + '/recordings')
responsedict['capturefile'] = filename
else:
responsedict['capturefile'] = ""
except:
pass
responsedict['errcode'] = retVal
responsedict['errmsg'] = errMsg
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif s.path == '/falcon/getscanresults':
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
if useMavlink:
gpsCoord = GPSStatus()
gpsCoord.gpsInstalled = True
gpsCoord.gpsRunning = True
gpsCoord.isValid = mavlinkGPSThread.synchronized
gpsCoord.latitude = mavlinkGPSThread.latitude
gpsCoord.longitude = mavlinkGPSThread.longitude
gpsCoord.altitude = mavlinkGPSThread.altitude
gpsCoord.speed = mavlinkGPSThread.vehicle.getAirSpeed()
retCode, errString, jsonstr=falconWiFiRemoteAgent.getNetworksAsJson(gpsCoord)
elif gpsEngine.gpsValid():
retCode, errString, jsonstr=falconWiFiRemoteAgent.getNetworksAsJson(gpsEngine.lastCoord)
if useRPILeds:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
retCode, errString, jsonstr=falconWiFiRemoteAgent.getNetworksAsJson(None)
if useRPILeds:
# This just signals that the GPS isn't synced
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/stopalldeauths' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
inputstr = s.path.replace('/falcon/stopalldeauths/', '')
# Sanitize command-line input here:
p = re.compile('^([0-9a-zA-Z]+)')
try:
fieldValue = p.search(inputstr).group(1)
except:
fieldValue = ""
if len(fieldValue) == 0:
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(LIGHT_STATE_ON)
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Error parsing interface. Identified interface: " + fieldValue
jsonstr = json.dumps(responsedict)
s.wfile.write(jsonstr.encode("UTF-8"))
return
falconWiFiRemoteAgent.stopAllDeauths(fieldValue)
responsedict = {}
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
elif '/falcon/getalldeauths' in s.path:
if not hasFalcon:
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
responsedict = falconWiFiRemoteAgent.getAllDeauthsAsJsonDict()
# Add in successful response
responsedict['errcode'] = 0
responsedict['errmsg'] = ""
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
else:
# Catch-all. Should never be here
responsedict = {}
responsedict['errcode'] = 5
responsedict['errmsg'] = "Unknown request: " + s.path
jsonstr = json.dumps(responsedict)
try:
s.wfile.write(jsonstr.encode("UTF-8"))
except:
pass
except:
pass
if useRPILeds:
# Green will heartbeat when servicing requests. Turn back solid here
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_ON)
# ----------------- Bluetooth check -----------------------------
def checkForBluetooth():
global hasBluetooth
global hasUbertooth
global bluetooth
numBtAdapters = len(SparrowBluetooth.getBluetoothInterfaces())
if numBtAdapters > 0:
hasBluetooth = True
if SparrowBluetooth.getNumUbertoothDevices() > 0:
#SparrowBluetooth.ubertoothStopSpecan()
errcode, errmsg = SparrowBluetooth.hasUbertoothTools()
# errcode, errmsg = SparrowBluetooth.ubertoothOnline()
if errcode == 0:
hasUbertooth = True
bluetooth = SparrowBluetooth()
if hasBluetooth:
print("Found bluetooth hardware. Bluetooth capabilities enabled.")
else:
print("Bluetooth hardware not found. Bluetooth capabilities disabled.")
if hasUbertooth:
print("Found ubertooth hardware and software. Ubertooth capabilities enabled.")
else:
print("Ubertooth hardware and/or software not found. Ubertooth capabilities disabled.")
# ----------------- Main -----------------------------
if __name__ == '__main__':
argparser = argparse.ArgumentParser(description='Sparrow-wifi agent')
argparser.add_argument('--port', help='Port for HTTP server to listen on. Default is 8020.', default=8020, required=False)
argparser.add_argument('--allowedips', help="IP addresses allowed to connect to this agent. Default is any. This can be a comma-separated list for multiple IP addresses", default='', required=False)
argparser.add_argument('--staticcoord', help="Use user-defined lat,long,altitude(m) rather than GPS. Ex: 40.1,-75.3,150", default='', required=False)
argparser.add_argument('--mavlinkgps', help="Use Mavlink (drone) for GPS. Options are: '3dr' for a Solo, 'sitl' for local simulator, or full connection string ('udp/tcp:<ip>:<port>' such as: 'udp:10.1.1.10:14550')", default='', required=False)
argparser.add_argument('--sendannounce', help="Send a UDP broadcast packet on the specified port to announce presence", action='store_true', default=False, required=False)
argparser.add_argument('--userpileds', help="Use RPi LEDs to signal state. Red=GPS [off=None,blinking=Unsynchronized,solid=synchronized], Green=Agent Running [On=Running, blinking=servicing HTTP request]", action='store_true', default=False, required=False)
argparser.add_argument('--recordinterface', help="Automatically start recording locally with the given wireless interface (headless mode) in a recordings directory", default='', required=False)
argparser.add_argument('--ignorecfg', help="Don't load any config files (useful for overriding and/or testing)", action='store_true', default=False, required=False)
argparser.add_argument('--cfgfile', help="Use the specified config file rather than the default sparrowwifiagent.cfg file", default='', required=False)
argparser.add_argument('--allowcors', help="Allow Cross Domain Resource Sharing", action='store_true', default=False, required=False)
argparser.add_argument('--delaystart', help="Wait <delaystart> seconds before initializing", default=0, required=False)
argparser.add_argument('--debughttp', help="Print each URL request", action='store_true', default=False, required=False)
args = argparser.parse_args()
if len(args.staticcoord) > 0:
coord_array = args.staticcoord.split(",")
if len(coord_array) < 3:
print("ERROR: Provided static coordinates are not in the format latitude,longitude,altitude.")
exit(1)
usingStaticGPS = True
gpsEngine = GPSEngineStatic(float(coord_array[0]), float(coord_array[1]), float(coord_array[2]))
else:
usingStaticGPS = False
gpsEngine = GPSEngine()
debugHTTP = args.debughttp
if os.geteuid() != 0:
print("ERROR: You need to have root privileges to run this script. Please try again, this time using 'sudo'. Exiting.\n")
exit(2)
# Code to add paths
dirname, filename = os.path.split(os.path.abspath(__file__))
if dirname not in sys.path:
sys.path.insert(0, dirname)
# Check for Falcon offensive plugin
pluginsdir = dirname+'/plugins'
if os.path.exists(pluginsdir):
if pluginsdir not in sys.path:
sys.path.insert(0,pluginsdir)
if os.path.isfile(pluginsdir + '/falconwifi.py'):
from falconwifi import FalconWiFiRemoteAgent, WPAPSKCrack, WEPCrack
hasFalcon = True
falconWiFiRemoteAgent = FalconWiFiRemoteAgent()
if not falconWiFiRemoteAgent.toolsInstalled():
print("ERROR: aircrack suite of tools does not appear to be installed. Please install it.")
exit(4)
checkForBluetooth()
# See if we have a config file:
dirname, filename = os.path.split(os.path.abspath(__file__))
settings = {}
runningcfg=AgentConfigSettings()
if len(args.cfgfile) == 0:
cfgFile = dirname + '/sparrowwifiagent.cfg'
else:
cfgFile = args.cfgfile
# Since it's user-specified, let's see if it exists.
if not os.path.isfile(cfgFile):
print("ERROR: Unable to find the specified config file.")
exit(3)
if os.path.isfile(cfgFile) and (not args.ignorecfg):
cfgParser = configparser.ConfigParser()
try:
cfgParser.read(cfgFile)
section="agent"
options = cfgParser.options(section)
for option in options:
try:
if (option == 'sendannounce' or option == 'userpileds' or
option == 'cancelstart' or option == 'allowcors'):
settings[option] = stringtobool(cfgParser.get(section, option))
else:
settings[option] = cfgParser.get(section, option)
except:
print("exception on %s!" % option)
settings[option] = None
except:
print("ERROR: Unable to read config file: ", cfgFile)
exit(1)
# Set up parameters
if 'cancelstart' in settings.keys():
if settings['cancelstart']:
exit(0)
delayStart = int(args.delaystart)
if delayStart > 0:
sleep(delayStart)
runningcfg.cancelStart = False
if 'port' not in settings.keys():
port = args.port
else:
port = int(settings['port'])
runningcfg.port = port
if 'sendannounce' not in settings.keys():
sendannounce = args.sendannounce
else:
sendannounce = settings['sendannounce']
runningcfg.announce = sendannounce
if 'userpileds' not in settings.keys():
useRPILeds = args.userpileds
else:
useRPILeds = settings['userpileds']
runningcfg.useRPiLEDs = useRPILeds
if 'allowedips' not in settings.keys():
allowedIPstr = args.allowedips
else:
allowedIPstr = settings['allowedips']
runningcfg.ipAllowedList = allowedIPstr
if 'mavlinkgps' not in settings.keys():
mavlinksetting = args.mavlinkgps
else:
mavlinksetting = settings['mavlinkgps']
runningcfg.mavlinkGPS = mavlinksetting
if 'recordinterface' not in settings.keys():
recordinterface = args.recordinterface
else:
recordinterface = settings['recordinterface']
runningcfg.recordInterface = recordinterface
if 'allowcors' not in settings.keys():
allowCors = args.allowcors
else:
allowCors = settings['allowcors']
runningcfg.allowCors = allowCors
print("Allow CORS: " + str(runningcfg.allowCors))
# Now start logic
if runningcfg.useRPiLEDs:
# One extra check that the LED's are really present
runningcfg.useRPiLEDs = SparrowRPi.hasLights()
if not runningcfg.useRPiLEDs:
# we changed state. Print warning
print('WARNING: RPi LEDs were requested but were not found on this platform.')
# Now check again:
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
buildAllowedIPs(allowedIPstr)
if len(runningcfg.mavlinkGPS) > 0 and hasDroneKit:
vehicle = SparrowDroneMavlink()
print('Connecting to ' + runningcfg.mavlinkGPS)
connected = False
synchronized = False
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_OFF)
# If we're in drone gps mode, wait for the drone to be up and gps synchronized before starting.
while (not connected) or (not synchronized):
if not connected:
if runningcfg.mavlinkGPS == '3dr':
retVal = vehicle.connectToSolo()
elif (runningcfg.mavlinkGPS == 'sitl'):
retVal = vehicle.connectToSimulator()
else:
retVal = vehicle.connect(runningcfg.mavlinkGPS)
connected = retVal
if connected:
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
print('Mavlink connected.')
print('Current GPS Info:')
# get synchronized flag and position
synchronized, latitude, longitude, altitude = vehicle.getGlobalGPS()
print('Synchronized: ' + str(synchronized))
print('Latitude: ' + str(latitude))
print('Longitude: ' + str(longitude))
print('Altitude (m): ' + str(altitude))
print('Heading: ' + str(vehicle.getHeading()))
if synchronized:
useMavlink = True
mavlinkGPSThread = MavlinkGPSThread(vehicle)
mavlinkGPSThread.start()
print('Mavlink GPS synchronized. Continuing.')
else:
print('Mavlink GPS not synchronized yet. Waiting...')
sleep(2)
else:
print("ERROR: Unable to connect to " + mavlinksetting + '. Retrying...')
sleep(2)
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
# No mavlink specified. Check the local GPS.
if GPSEngine.GPSDRunning():
if runningcfg.useRPiLEDs:
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_HEARTBEAT)
gpsEngine.start()
if usingStaticGPS:
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Using static lat/long/altitude(m): " + args.staticcoord)
else:
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] Local gpsd Found. Providing GPS coordinates when synchronized.")
if useRPILeds:
sleep(1)
if gpsEngine.gpsValid():
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
else:
print('[' +curTime.strftime("%m/%d/%Y %H:%M:%S") + "] No local gpsd running. No GPS data will be provided.")
if runningcfg.announce:
startAnnounceThread()
if len(runningcfg.recordInterface) > 0:
startRecord(runningcfg.recordInterface)
# -------------- Run HTTP Server / Main Loop--------------
server = SparrowWiFiAgent()
server.run(runningcfg.port)
# -------------- This is the shutdown process --------------
if mavlinkGPSThread:
mavlinkGPSThread.signalStop = True
print('Waiting for mavlink GPS thread to terminate...')
while (mavlinkGPSThread.threadRunning):
sleep(0.2)
stopRecord()
if hasDroneKit and useMavlink and vehicle:
vehicle.close()
stopAnnounceThread()
if runningcfg.useRPiLEDs:
SparrowRPi.greenLED(SparrowRPi.LIGHT_STATE_OFF)
SparrowRPi.redLED(SparrowRPi.LIGHT_STATE_ON)
#for curKey in lockList.keys():
# curLock = lockList[curKey]
# try:
# curLock.release()
# except:
# pass
# os._exit(0)
exit(0)
| gpl-3.0 | 6,750,538,930,191,065,000 | 39.385724 | 300 | 0.488999 | false |
roxxup/PartialTuring | WebcamSound.py | 1 | 3038 | import threading
from threading import Thread
import cv2
import sys
#import wikipedia
#from chatterbot import ChatBot
import shlex, subprocess
import speech_recognition as sr
import pyvona
from googlesearch import GoogleSearch
import xml.etree.ElementTree as ET
import requests
cascPath = sys.argv[1]
def wikileaks(string):
string=wikipedia.summary(string,sentences=1)
chatvoice(string)
def speak():
# obtain audio from the microphone
r = sr.Recognizer()
with sr.Microphone() as source:
r.adjust_for_ambient_noise(source) # listen for 1 second to calibrate the energy threshold for ambient noise levels
print("Say something!")
audio = r.listen(source)
# recognize speech using Google Speech Recognition
try:
# for testing purposes, we're just using the default API key
# to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")`
# instead of `r.recognize_google(audio)`
string = r.recognize_google(audio)
print "you said "+string
return string
except sr.UnknownValueError:
print("Google Speech Recognition could not understand audio")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
def Google1(string):
gs = GoogleSearch(string)
for hit in gs.top_results():
#send(hit[u'content'])
chatvoice(hit[u'content'])
break
def chatvoice(string):
v = pyvona.create_voice('username','password')
#v.region('en-IN')
#print v.list_voices()
v.speak(string)
#v.speak(a)
def intelbot(string):
payload = {'input':string,'botid':'9fa364f2fe345a10'}
r = requests.get("http://fiddle.pandorabots.com/pandora/talk-xml", params=payload)
for child in ET.fromstring(r.text):
if child.tag == "that":
chatvoice(child.text)
def Camera():
faceCascade = cv2.CascadeClassifier(cascPath)
video_capture = cv2.VideoCapture(1)
while True:
# Capture frame-by-frame
ret, frame = video_capture.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(
gray,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30, 30),
#flags=cv2.cv.CV_HAAR_SCALE_IMAGE
flags = 0
)
# Draw a rectangle around the faces
for (x, y, w, h) in faces:
cv2.rectangle(gray, (x, y), (x+w, y+h), (0, 255, 0), 2)
# Display the resulting frame
cv2.imshow('Video', gray)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything is done, release the capture
video_capture.release()
cv2.destroyAllWindows()
def Sound():
while True:
takeString = speak()
intelbot(takeString)
if __name__ == '__main__':
Thread(target = Camera).start()
Thread(target = Sound).start()
| gpl-3.0 | 5,042,716,747,227,126,000 | 26.369369 | 123 | 0.628045 | false |
yohanyee/simple-neural-net | classes/example_pipelines.py | 1 | 1478 | import numpy as np
from data import *
from construct import *
from train import *
from hippocampi_to_patches import *
class DigitsPipeline(object):
def __init__(self):
self.D = Data()
self.D.load_digits_data()
self.D.reshape([16,16],[1])
self.N = FeedForwardNetwork()
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.input_shape, neuron_activation='identity'))
self.N.auto_add_layer_fullyconnected(NeuronLayer(10, neuron_activation='logistic', neuron_bias=True))
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.output_shape, neuron_activation='logistic', neuron_bias=True))
self.Trainer = BackpropagationTrainer(self.D, self.N)
class DigitsConvolutionPipeline(object):
def __init__(self):
self.D = Data()
self.D.load_digits_data()
self.D.reshape([16,16],[1])
self.N = FeedForwardNetwork()
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.input_shape, neuron_activation='identity'))
self.N.auto_add_layer_convolution(NeuronLayer([4,8,8], neuron_activation='logistic', neuron_bias=True))
self.N.auto_add_layer_fullyconnected(NeuronLayer(10, neuron_activation='logistic', neuron_bias=True))
self.N.auto_add_layer_fullyconnected(NeuronLayer(self.D.output_shape, neuron_activation='logistic', neuron_bias=True))
self.Trainer = BackpropagationTrainer(self.D, self.N)
| mit | -7,236,642,412,433,993,000 | 42.470588 | 126 | 0.673207 | false |
anurag03/integration_tests | cfme/tests/services/test_provision_stack.py | 1 | 9381 | import fauxfactory
import pytest
from widgetastic_patternfly import DropdownItemDisabled
from cfme import test_requirements
from cfme.cloud.provider import CloudProvider
from cfme.cloud.provider.azure import AzureProvider
from cfme.cloud.provider.ec2 import EC2Provider
from cfme.cloud.provider.openstack import OpenStackProvider
from cfme.services.myservice import MyService
from cfme.services.service_catalogs import ServiceCatalogs
from cfme.utils.blockers import BZ
from cfme.utils.conf import credentials
from cfme.utils.datafile import load_data_file
from cfme.utils.path import orchestration_path
pytestmark = [
pytest.mark.meta(server_roles='+automate'),
pytest.mark.ignore_stream('upstream'),
test_requirements.stack,
pytest.mark.tier(2),
pytest.mark.usefixtures("setup_provider_modscope"),
pytest.mark.provider([CloudProvider],
required_fields=[['provisioning', 'stack_provisioning']],
scope='module'),
]
@pytest.fixture
def stack_data(appliance, provider, provisioning):
random_base = fauxfactory.gen_alphanumeric()
stackname = 'test{}'.format(random_base)
vm_name = 'test-{}'.format(random_base)
stack_timeout = '20'
if provider.one_of(AzureProvider):
try:
template = provider.data.templates.small_template
vm_user = credentials[template.creds].username
vm_password = credentials[template.creds].password
except AttributeError:
pytest.skip('Could not find small_template or credentials for {}'.format(provider.name))
stack_data = {
'stack_name': stackname,
'resource_group': provisioning.get('resource_group'),
'deploy_mode': provisioning.get('mode'),
'location': provisioning.get('region_api'),
'vmname': vm_name,
'vmuser': vm_user,
'vmpassword': vm_password,
'vmsize': provisioning.get('vm_size'),
'cloudnetwork': provisioning.get('cloud_network').split()[0],
'cloudsubnet': provisioning.get('cloud_subnet').split()[0]
}
elif provider.one_of(OpenStackProvider):
stack_prov = provisioning['stack_provisioning']
stack_data = {
'stack_name': stackname,
'key': stack_prov['key_name'],
'flavor': stack_prov['instance_type'],
}
else:
stack_prov = provisioning['stack_provisioning']
if appliance.version < '5.9':
stack_data = {
'stack_name': stackname,
'stack_timeout': stack_timeout,
'virtualMachineName': vm_name,
'KeyName': stack_prov['key_name'],
'InstanceType': stack_prov['instance_type'],
'SSHLocation': provisioning['ssh_location']
}
else:
stack_data = {
'stack_name': stackname,
'stack_timeout': stack_timeout,
'param_virtualMachineName': vm_name,
'param_KeyName': stack_prov['key_name']
}
return stack_data
@pytest.fixture
def dialog_name():
return 'dialog_{}'.format(fauxfactory.gen_alphanumeric())
@pytest.fixture
def template(appliance, provider, provisioning, dialog_name, stack):
template_group = provisioning['stack_provisioning']['template_type']
template_type = provisioning['stack_provisioning']['template_type_dd']
template_name = fauxfactory.gen_alphanumeric()
file = provisioning['stack_provisioning']['data_file']
data_file = load_data_file(str(orchestration_path.join(file)))
content = data_file.read().replace('CFMETemplateName', template_name)
collection = appliance.collections.orchestration_templates
template = collection.create(template_group=template_group, template_name=template_name,
template_type=template_type, description="my template",
content=content)
template.create_service_dialog_from_template(dialog_name)
yield template
if stack.exists:
stack.retire_stack()
if template.exists:
template.delete()
@pytest.fixture
def catalog(appliance):
cat_name = "cat_{}".format(fauxfactory.gen_alphanumeric())
catalog = appliance.collections.catalogs.create(name=cat_name, description="my catalog")
yield catalog
if catalog.exists:
catalog.delete()
@pytest.fixture
def catalog_item(appliance, dialog, catalog, template, provider, dialog_name):
item_name = fauxfactory.gen_alphanumeric()
catalog_item = appliance.collections.catalog_items.create(
appliance.collections.catalog_items.ORCHESTRATION,
name=item_name,
description="my catalog",
display_in=True,
catalog=catalog,
dialog=dialog_name,
orch_template=template,
provider_name=provider.name,
)
yield catalog_item
if catalog_item.exists:
catalog_item.delete()
@pytest.fixture
def service_catalogs(appliance, catalog_item, stack_data):
return ServiceCatalogs(appliance, catalog_item.catalog, catalog_item.name, stack_data)
@pytest.fixture
def stack(appliance, provider, stack_data):
return appliance.collections.cloud_stacks.instantiate(stack_data['stack_name'],
provider=provider)
@pytest.fixture
def order_stack(appliance, request, service_catalogs, stack):
"""Fixture which prepares provisioned stack"""
provision_request = service_catalogs.order()
provision_request.wait_for_request(method='ui')
request.addfinalizer(lambda: _cleanup(appliance, provision_request))
assert provision_request.is_succeeded()
stack.wait_for_exists()
return provision_request, stack
def _cleanup(appliance=None, provision_request=None, service=None):
if not service:
last_message = provision_request.get_request_row_from_ui()['Last Message'].text
service_name = last_message.split()[2].strip('[]')
myservice = MyService(appliance, service_name)
else:
myservice = service
if myservice.exists:
myservice.delete()
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_provision_stack(order_stack):
"""Tests stack provisioning
Metadata:
test_flag: provision
"""
provision_request, stack = order_stack
assert provision_request.is_succeeded()
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_reconfigure_service(appliance, service_catalogs, request):
"""Tests service reconfiguring
Metadata:
test_flag: provision
"""
provision_request = service_catalogs.order()
provision_request.wait_for_request(method='ui')
last_message = provision_request.get_request_row_from_ui()['Last Message'].text
service_name = last_message.split()[2].strip('[]')
myservice = MyService(appliance, service_name)
request.addfinalizer(lambda: _cleanup(service=myservice))
assert provision_request.is_succeeded()
myservice.reconfigure_service()
@pytest.mark.uncollectif(lambda provider: provider.one_of(EC2Provider),
reason='EC2 locks template between Stack order and template removal')
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_remove_non_read_only_orch_template(appliance, provider, template, service_catalogs,
request):
"""
Steps:
1. Order Service which uses Orchestration template
2. Try to remove this Orchestration template
Metadata:
test_flag: provision
"""
provision_request = service_catalogs.order()
request.addfinalizer(lambda: _cleanup(appliance, provision_request))
template.delete()
assert (provision_request.rest.message == 'Service_Template_Provisioning failed' or
provision_request.status == 'Error')
assert not template.exists
@pytest.mark.uncollectif(lambda provider: not provider.one_of(EC2Provider),
reason='Only EC2 locks orchestration template')
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_remove_read_only_orch_template_neg(appliance, provider, template, service_catalogs,
request):
"""
For RHOS/Azure the original template will remain stand-alone while the stack links
to a new template read from the RHOS/Azure provider. Hence we can delete used orchestration
template for RHOS/Azure.
Steps:
1. Order Service which uses Orchestration template
2. Try to remove this Orchestration template
Metadata:
test_flag: provision
"""
provision_request = service_catalogs.order()
request.addfinalizer(lambda: _cleanup(appliance, provision_request))
provision_request.wait_for_request(method='ui')
with pytest.raises(DropdownItemDisabled):
template.delete()
@pytest.mark.meta(blockers=[BZ(1628384, forced_streams=['5.10'])])
def test_retire_stack(order_stack):
"""Tests stack retirement.
Steps:
1. Retire Orchestration stack
2. Verify it doesn't exist in UI
Metadata:
test_flag: provision
"""
_, stack = order_stack
stack.retire_stack()
assert not stack.exists, "Stack still visible in UI"
| gpl-2.0 | -7,078,827,523,268,368,000 | 35.933071 | 100 | 0.663895 | false |
Telestream/telestream-cloud-python-sdk | telestream_cloud_flip_sdk/telestream_cloud_flip/models/extra_file.py | 1 | 4957 | # coding: utf-8
"""
Flip API
Flip # noqa: E501
The version of the OpenAPI document: 3.1
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from telestream_cloud_flip.configuration import Configuration
class ExtraFile(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'tag': 'str',
'file_size': 'int',
'file_name': 'str'
}
attribute_map = {
'tag': 'tag',
'file_size': 'file_size',
'file_name': 'file_name'
}
def __init__(self, tag=None, file_size=None, file_name=None, local_vars_configuration=None): # noqa: E501
"""ExtraFile - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._tag = None
self._file_size = None
self._file_name = None
self.discriminator = None
self.tag = tag
self.file_size = file_size
self.file_name = file_name
@property
def tag(self):
"""Gets the tag of this ExtraFile. # noqa: E501
:return: The tag of this ExtraFile. # noqa: E501
:rtype: str
"""
return self._tag
@tag.setter
def tag(self, tag):
"""Sets the tag of this ExtraFile.
:param tag: The tag of this ExtraFile. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and tag is None: # noqa: E501
raise ValueError("Invalid value for `tag`, must not be `None`") # noqa: E501
self._tag = tag
@property
def file_size(self):
"""Gets the file_size of this ExtraFile. # noqa: E501
:return: The file_size of this ExtraFile. # noqa: E501
:rtype: int
"""
return self._file_size
@file_size.setter
def file_size(self, file_size):
"""Sets the file_size of this ExtraFile.
:param file_size: The file_size of this ExtraFile. # noqa: E501
:type: int
"""
if self.local_vars_configuration.client_side_validation and file_size is None: # noqa: E501
raise ValueError("Invalid value for `file_size`, must not be `None`") # noqa: E501
self._file_size = file_size
@property
def file_name(self):
"""Gets the file_name of this ExtraFile. # noqa: E501
:return: The file_name of this ExtraFile. # noqa: E501
:rtype: str
"""
return self._file_name
@file_name.setter
def file_name(self, file_name):
"""Sets the file_name of this ExtraFile.
:param file_name: The file_name of this ExtraFile. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and file_name is None: # noqa: E501
raise ValueError("Invalid value for `file_name`, must not be `None`") # noqa: E501
self._file_name = file_name
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ExtraFile):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ExtraFile):
return True
return self.to_dict() != other.to_dict()
| mit | 4,721,568,431,818,250,000 | 27.164773 | 110 | 0.556788 | false |
pombreda/https-gitorious.org-appstream-software-center | softwarecenter/ui/gtk3/widgets/buttons.py | 1 | 21538 | # Copyright (C) 2011 Canonical
#
# Authors:
# Matthew McGowan
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import cairo
from gi.repository import Gtk, Gdk, Pango, GObject, GdkPixbuf
from gettext import gettext as _
from softwarecenter.backend import get_install_backend
from softwarecenter.db.application import AppDetails
from softwarecenter.enums import Icons
from softwarecenter.ui.gtk3.em import StockEms, em
from softwarecenter.ui.gtk3.drawing import darken
from softwarecenter.ui.gtk3.widgets.stars import Star, StarSize
_HAND = Gdk.Cursor.new(Gdk.CursorType.HAND2)
def _update_icon(image, icon, icon_size):
if isinstance(icon, GdkPixbuf.Pixbuf):
image = image.set_from_pixbuf(icon)
elif isinstance(icon, Gtk.Image):
image = image.set_from_pixbuf(icon.get_pixbuf())
elif isinstance(icon, str):
image = image.set_from_icon_name(icon, icon_size)
else:
msg = "Acceptable icon values: None, GdkPixbuf, GtkImage or str"
raise TypeError(msg)
return image
class _Tile(object):
MIN_WIDTH = em(7)
def __init__(self):
self.set_focus_on_click(False)
self.set_relief(Gtk.ReliefStyle.NONE)
self.box = Gtk.Box.new(Gtk.Orientation.VERTICAL, 0)
self.box.set_size_request(self.MIN_WIDTH, -1)
self.add(self.box)
def build_default(self, label, icon, icon_size):
if icon is not None:
if isinstance(icon, Gtk.Image):
self.image = icon
else:
self.image = Gtk.Image()
_update_icon(self.image, icon, icon_size)
self.box.pack_start(self.image, True, True, 0)
self.label = Gtk.Label.new(label)
self.box.pack_start(self.label, True, True, 0)
class TileButton(Gtk.Button, _Tile):
def __init__(self):
Gtk.Button.__init__(self)
_Tile.__init__(self)
class TileToggleButton(Gtk.RadioButton, _Tile):
def __init__(self):
Gtk.RadioButton.__init__(self)
self.set_mode(False)
_Tile.__init__(self)
class LabelTile(TileButton):
MIN_WIDTH = -1
def __init__(self, label, icon, icon_size=Gtk.IconSize.MENU):
TileButton.__init__(self)
self.build_default(label, icon, icon_size)
self.label.set_line_wrap(True)
context = self.label.get_style_context()
context.add_class("label-tile")
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
def do_draw(self, cr):
cr.save()
A = self.get_allocation()
if self.has_focus():
Gtk.render_focus(self.get_style_context(),
cr,
3, 3,
A.width - 6, A.height - 6)
for child in self:
self.propagate_draw(child, cr)
cr.restore()
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
class CategoryTile(TileButton):
def __init__(self, label, icon, icon_size=Gtk.IconSize.DIALOG):
TileButton.__init__(self)
self.set_size_request(em(8), -1)
self.build_default(label, icon, icon_size)
self.label.set_justify(Gtk.Justification.CENTER)
self.label.set_alignment(0.5, 0.0)
self.label.set_line_wrap(True)
self.box.set_border_width(StockEms.SMALL)
context = self.label.get_style_context()
context.add_class("category-tile")
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
def do_draw(self, cr):
cr.save()
A = self.get_allocation()
if self.has_focus():
Gtk.render_focus(self.get_style_context(),
cr,
3, 3,
A.width - 6, A.height - 6)
for child in self:
self.propagate_draw(child, cr)
cr.restore()
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
_global_featured_tile_width = em(11)
class FeaturedTile(TileButton):
INSTALLED_OVERLAY_SIZE = 22
_MARKUP = '<b><small>%s</small></b>'
def __init__(self, helper, doc, icon_size=48):
TileButton.__init__(self)
self._pressed = False
label = helper.get_appname(doc)
icon = helper.get_icon_at_size(doc, icon_size, icon_size)
stats = helper.get_review_stats(doc)
helper.update_availability(doc)
helper.connect("needs-refresh", self._on_needs_refresh, doc, icon_size)
self.is_installed = helper.is_installed(doc)
self._overlay = helper.icons.load_icon(Icons.INSTALLED_OVERLAY,
self.INSTALLED_OVERLAY_SIZE,
0) # flags
self.box.set_orientation(Gtk.Orientation.HORIZONTAL)
self.box.set_spacing(StockEms.SMALL)
self.content_left = Gtk.Box.new(Gtk.Orientation.VERTICAL,
StockEms.MEDIUM)
self.content_right = Gtk.Box.new(Gtk.Orientation.VERTICAL, 1)
self.box.pack_start(self.content_left, False, False, 0)
self.box.pack_start(self.content_right, False, False, 0)
self.image = Gtk.Image()
_update_icon(self.image, icon, icon_size)
self.content_left.pack_start(self.image, False, False, 0)
self.title = Gtk.Label.new(self._MARKUP %
GObject.markup_escape_text(label))
self.title.set_alignment(0.0, 0.5)
self.title.set_use_markup(True)
self.title.set_ellipsize(Pango.EllipsizeMode.END)
self.content_right.pack_start(self.title, False, False, 0)
categories = helper.get_categories(doc)
if categories is not None:
self.category = Gtk.Label.new('<span font_desc="%i">%s</span>' %
(em(0.6), GObject.markup_escape_text(categories)))
self.category.set_use_markup(True)
self.category.set_alignment(0.0, 0.5)
self.category.set_ellipsize(Pango.EllipsizeMode.END)
self.content_right.pack_start(self.category, False, False, 4)
stats_a11y = None
if stats is not None:
self.stars = Star(size=StarSize.SMALL)
self.stars.render_outline = True
self.stars.set_rating(stats.ratings_average)
self.rating_box = Gtk.Box.new(Gtk.Orientation.HORIZONTAL,
StockEms.SMALL)
self.rating_box.pack_start(self.stars, False, False, 0)
self.n_ratings = Gtk.Label.new(
'<span font_desc="%i"> (%i)</span>' % (
em(0.45), stats.ratings_total))
self.n_ratings.set_use_markup(True)
self.n_ratings.set_name("subtle-label")
self.n_ratings.set_alignment(0.0, 0.5)
self.rating_box.pack_start(self.n_ratings, False, False, 0)
self.content_right.pack_start(self.rating_box, False, False, 0)
# TRANSLATORS: this is an accessibility description for eg orca and
# is not visible in the ui
stats_a11y = _('%(stars)d stars - %(reviews)d reviews') % {
'stars': stats.ratings_average, 'reviews': stats.ratings_total}
# work out width tile needs to be to ensure ratings text is all
# visible
req_width = (self.stars.size_request().width +
self.image.size_request().width +
self.n_ratings.size_request().width +
StockEms.MEDIUM * 3
)
global _global_featured_tile_width
_global_featured_tile_width = max(_global_featured_tile_width,
req_width)
details = AppDetails(db=helper.db, doc=doc)
# TRANSLATORS: Free here means Gratis
price = details.price or _("Free")
if price == '0.00':
# TRANSLATORS: Free here means Gratis
price = _("Free")
# TRANSLATORS: Free here means Gratis
if price != _("Free"):
price = 'US$ ' + price
self.price = Gtk.Label.new(
'<span font_desc="%i">%s</span>' % (em(0.6), price))
self.price.set_use_markup(True)
self.price.set_name("subtle-label")
self.price.set_alignment(0.0, 0.5)
self.content_right.pack_start(self.price, False, False, 0)
self.set_name("featured-tile")
a11y_name = '. '.join([t
for t in [label, categories, stats_a11y, price] if t])
self.get_accessible().set_name(a11y_name)
backend = get_install_backend()
backend.connect("transaction-finished",
self.on_transaction_finished,
helper, doc)
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
self.connect("button-press-event", self.on_press)
self.connect("button-release-event", self.on_release)
def _on_needs_refresh(self, helper, pkgname, doc, icon_size):
icon = helper.get_icon_at_size(doc, icon_size, icon_size)
_update_icon(self.image, icon, icon_size)
def do_get_preferred_width(self):
w = _global_featured_tile_width
return w, w
def do_draw(self, cr):
cr.save()
A = self.get_allocation()
if self._pressed:
cr.translate(1, 1)
if self.has_focus():
Gtk.render_focus(self.get_style_context(),
cr,
3, 3,
A.width - 6, A.height - 6)
for child in self:
self.propagate_draw(child, cr)
if self.is_installed:
# paint installed tick overlay
if self.get_direction() != Gtk.TextDirection.RTL:
x = y = 36
else:
x = A.width - 56
y = 36
Gdk.cairo_set_source_pixbuf(cr, self._overlay, x, y)
cr.paint()
cr.restore()
def on_transaction_finished(self, backend, result, helper, doc):
trans_pkgname = str(result.pkgname)
pkgname = helper.get_pkgname(doc)
if trans_pkgname != pkgname:
return
# update installed state
helper.update_availability(doc)
self.is_installed = helper.is_installed(doc)
self.queue_draw()
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
return True
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
self._pressed = False
return True
def on_press(self, widget, event):
self._pressed = True
def on_release(self, widget, event):
if not self._pressed:
return
self.emit("clicked")
self._pressed = False
class ChannelSelector(Gtk.Button):
PADDING = 0
def __init__(self, section_button):
Gtk.Button.__init__(self)
alignment = Gtk.Alignment.new(0.5, 0.5, 0.0, 1.0)
alignment.set_padding(self.PADDING, self.PADDING,
self.PADDING, self.PADDING)
self.add(alignment)
self.arrow = Gtk.Arrow.new(Gtk.ArrowType.DOWN, Gtk.ShadowType.IN)
alignment.add(self.arrow)
# vars
self.parent_style_type = Gtk.Toolbar
self.section_button = section_button
self.popup = None
self.connect("button-press-event", self.on_button_press)
def do_draw(self, cr):
cr.save()
parent_style = self.get_ancestor(self.parent_style_type)
context = parent_style.get_style_context()
color = darken(context.get_border_color(Gtk.StateFlags.ACTIVE), 0.2)
cr.set_line_width(1)
a = self.get_allocation()
lin = cairo.LinearGradient(0, 0, 0, a.height)
lin.add_color_stop_rgba(0.1,
color.red,
color.green,
color.blue,
0.0) # alpha
lin.add_color_stop_rgba(0.5,
color.red,
color.green,
color.blue,
1.0) # alpha
lin.add_color_stop_rgba(1.0,
color.red,
color.green,
color.blue,
0.1) # alpha
cr.set_source(lin)
cr.move_to(0.5, 0.5)
cr.rel_line_to(0, a.height)
cr.stroke()
cr.move_to(a.width - 0.5, 0.5)
cr.rel_line_to(0, a.height)
cr.stroke()
cr.restore()
for child in self:
self.propagate_draw(child, cr)
def on_button_press(self, button, event):
if self.popup is None:
self.build_channel_selector()
self.show_channel_sel_popup(self, event)
#~
#~ def on_style_updated(self, widget):
#~ context = widget.get_style_context()
#~ context.save()
#~ context.add_class("menu")
#~ bgcolor = context.get_background_color(Gtk.StateFlags.NORMAL)
#~ context.restore()
#~
#~ self._dark_color = darken(bgcolor, 0.5)
def show_channel_sel_popup(self, widget, event):
def position_func(menu, (window, a)):
if self.get_direction() != Gtk.TextDirection.RTL:
tmpx = a.x
else:
tmpx = a.x + a.width - self.popup.get_allocation().width
x, y = window.get_root_coords(tmpx,
a.y + a.height)
return (x, y, False)
a = self.section_button.get_allocation()
window = self.section_button.get_window()
self.popup.popup(None, None, position_func, (window, a),
event.button, event.time)
def set_build_func(self, build_func):
self.build_func = build_func
def build_channel_selector(self):
self.popup = Gtk.Menu()
self.popup.set_name('toolbar-popup') # to set 'padding: 0;'
self.popup.get_style_context().add_class('primary-toolbar')
self.build_func(self.popup)
class SectionSelector(TileToggleButton):
MIN_WIDTH = em(5)
_MARKUP = '<small>%s</small>'
def __init__(self, label, icon, icon_size=Gtk.IconSize.DIALOG):
TileToggleButton.__init__(self)
markup = self._MARKUP % label
self.build_default(markup, icon, icon_size)
self.label.set_use_markup(True)
self.label.set_justify(Gtk.Justification.CENTER)
context = self.get_style_context()
context.add_class("section-sel-bg")
context = self.label.get_style_context()
context.add_class("section-sel")
self.draw_hint_has_channel_selector = False
self._alloc = None
self._bg_cache = {}
self.connect('size-allocate', self.on_size_allocate)
self.connect('style-updated', self.on_style_updated)
def on_size_allocate(self, *args):
alloc = self.get_allocation()
if (self._alloc is None or
self._alloc.width != alloc.width or
self._alloc.height != alloc.height):
self._alloc = alloc
# reset the bg cache
self._bg_cache = {}
def on_style_updated(self, *args):
# also reset the bg cache
self._bg_cache = {}
def _cache_bg_for_state(self, state):
a = self.get_allocation()
# tmp surface on which we render the button bg as per the gtk
# theme engine
_surf = cairo.ImageSurface(cairo.FORMAT_ARGB32,
a.width, a.height)
cr = cairo.Context(_surf)
context = self.get_style_context()
context.save()
context.set_state(state)
Gtk.render_background(context, cr,
-5, -5, a.width + 10, a.height + 10)
Gtk.render_frame(context, cr,
-5, -5, a.width + 10, a.height + 10)
del cr
# new surface which will be cached which
surf = cairo.ImageSurface(cairo.FORMAT_ARGB32,
a.width, a.height)
cr = cairo.Context(surf)
# gradient for masking
lin = cairo.LinearGradient(0, 0, 0, a.height)
lin.add_color_stop_rgba(0.0, 1, 1, 1, 0.1)
lin.add_color_stop_rgba(0.25, 1, 1, 1, 0.7)
lin.add_color_stop_rgba(0.5, 1, 1, 1, 1.0)
lin.add_color_stop_rgba(0.75, 1, 1, 1, 0.7)
lin.add_color_stop_rgba(1.0, 1, 1, 1, 0.1)
cr.set_source_surface(_surf, 0, 0)
cr.mask(lin)
del cr
# cache the resulting surf...
self._bg_cache[state] = surf
def do_draw(self, cr):
state = self.get_state_flags()
if self.get_active():
if state not in self._bg_cache:
self._cache_bg_for_state(state)
cr.set_source_surface(self._bg_cache[state], 0, 0)
cr.paint()
for child in self:
self.propagate_draw(child, cr)
class Link(Gtk.Label):
__gsignals__ = {
"clicked": (GObject.SignalFlags.RUN_LAST,
None,
(),)
}
def __init__(self, markup="", uri="none"):
Gtk.Label.__init__(self)
self._handler = 0
self.set_markup(markup, uri)
def set_markup(self, markup="", uri="none"):
markup = '<a href="%s">%s</a>' % (uri, markup)
Gtk.Label.set_markup(self, markup)
if self._handler == 0:
self._handler = self.connect("activate-link",
self.on_activate_link)
# synonyms for set_markup
def set_label(self, label):
return self.set_markup(label)
def set_text(self, text):
return self.set_markup(text)
def on_activate_link(self, uri, data):
self.emit("clicked")
def disable(self):
self.set_sensitive(False)
self.set_name("subtle-label")
def enable(self):
self.set_sensitive(True)
self.set_name("label")
class MoreLink(Gtk.Button):
_MARKUP = '<b>%s</b>'
_MORE = _("More")
def __init__(self):
Gtk.Button.__init__(self)
self.label = Gtk.Label()
self.label.set_padding(StockEms.SMALL, 0)
self.label.set_markup(self._MARKUP % _(self._MORE))
self.add(self.label)
self._init_event_handling()
context = self.get_style_context()
context.add_class("more-link")
def _init_event_handling(self):
self.connect("enter-notify-event", self.on_enter)
self.connect("leave-notify-event", self.on_leave)
def do_draw(self, cr):
if self.has_focus():
layout = self.label.get_layout()
a = self.get_allocation()
e = layout.get_pixel_extents()[1]
xo, yo = self.label.get_layout_offsets()
Gtk.render_focus(self.get_style_context(), cr,
xo - a.x - 3, yo - a.y - 1,
e.width + 6, e.height + 2)
for child in self:
self.propagate_draw(child, cr)
def on_enter(self, widget, event):
window = self.get_window()
window.set_cursor(_HAND)
def on_leave(self, widget, event):
window = self.get_window()
window.set_cursor(None)
def _build_channels_list(popup):
for i in range(3):
item = Gtk.MenuItem.new()
label = Gtk.Label.new("channel_name %s" % i)
box = Gtk.Box.new(Gtk.Orientation.HORIZONTAL, StockEms.MEDIUM)
box.pack_start(label, False, False, 0)
item.add(box)
item.show_all()
popup.attach(item, 0, 1, i, i + 1)
def get_test_buttons_window():
win = Gtk.Window()
win.set_size_request(200, 200)
vb = Gtk.VBox(spacing=12)
win.add(vb)
link = Link("<small>test link</small>", uri="www.google.co.nz")
vb.pack_start(link, False, False, 0)
button = Gtk.Button()
button.set_label("channels")
channels_button = ChannelSelector(button)
channels_button.parent_style_type = Gtk.Window
channels_button.set_build_func(_build_channels_list)
hb = Gtk.HBox()
hb.pack_start(button, False, False, 0)
hb.pack_start(channels_button, False, False, 0)
vb.pack_start(hb, False, False, 0)
win.show_all()
win.connect("destroy", Gtk.main_quit)
return win
if __name__ == "__main__":
win = get_test_buttons_window()
Gtk.main()
| gpl-3.0 | -1,711,772,411,216,868,400 | 31.782344 | 79 | 0.562308 | false |
lucperkins/heron | integration_test/src/python/integration_test/topology/one_spout_bolt_multi_tasks/one_spout_bolt_multi_tasks.py | 1 | 1368 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# copyright 2016 twitter. all rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=missing-docstring
from heronpy.api.stream import Grouping
from integration_test.src.python.integration_test.core import TestTopologyBuilder
from integration_test.src.python.integration_test.common.bolt import IdentityBolt
from integration_test.src.python.integration_test.common.spout import ABSpout
def one_spout_bolt_multi_tasks_builder(topology_name, http_server_url):
builder = TestTopologyBuilder(topology_name, http_server_url)
ab_spout = builder.add_spout("ab-spout", ABSpout, 3)
builder.add_bolt("identity-bolt", IdentityBolt,
inputs={ab_spout: Grouping.SHUFFLE},
par=3,
optional_outputs=['word'])
return builder.create_topology()
| apache-2.0 | 9,151,532,406,057,182,000 | 39.235294 | 81 | 0.741228 | false |
Azure/azure-sdk-for-python | sdk/consumption/azure-mgmt-consumption/azure/mgmt/consumption/_configuration.py | 1 | 3274 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy
from ._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
from azure.core.credentials import TokenCredential
class ConsumptionManagementClientConfiguration(Configuration):
"""Configuration for ConsumptionManagementClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: Azure Subscription ID.
:type subscription_id: str
"""
def __init__(
self,
credential, # type: "TokenCredential"
subscription_id, # type: str
**kwargs # type: Any
):
# type: (...) -> None
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
super(ConsumptionManagementClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.subscription_id = subscription_id
self.api_version = "2019-10-01"
self.credential_scopes = kwargs.pop('credential_scopes', ['https://management.azure.com/.default'])
kwargs.setdefault('sdk_moniker', 'mgmt-consumption/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs # type: Any
):
# type: (...) -> None
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.RetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.RedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.BearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)
| mit | -7,833,224,672,260,966,000 | 45.112676 | 129 | 0.67135 | false |
burtgulash/PaTrie | patrie.py | 1 | 2824 | #!/usr/bin/python3
class TNode:
def __init__(self, children):
self.children = children
def is_leaf(self):
return self.children is None
def __repr__(self):
return repr(self.children)
class PaTrie:
def __init__(self):
self.root = None
def __contains__(self, word):
cur = self.root
if cur is None:
return False
i = 0
while cur is not None and not cur.is_leaf():
for label, child in cur.children.items():
if len(label) == 0 and i < len(word):
continue
if word[i:i + len(label)] == label:
cur = child
i += len(label)
break
else:
return False
return i == len(word)
def insert(self, word):
cur = self.root
if cur is None:
self.root = TNode({ word: None })
return
i = 0
while not cur.is_leaf():
for label, child in cur.children.items():
cl = self.common_prefix_len(word[i:], label)
if cl:
if cl == len(label):
cur = child
i += len(label)
break
del cur.children[label]
cur.children[label[:cl]] = TNode({
label[cl:]: child,
word[i + cl:]: TNode(None),
})
return
else:
cur.children[word[i:]] = TNode(None)
return
cur.children = {
"": TNode(None),
word[i:]: TNode(None)
}
def __str__(self):
s = []
def _str(tnode, sofar, label, prepend):
if tnode is None:
return
if tnode.is_leaf():
if label:
s.append(prepend + "+ " + label)
s.append(prepend + " {"+sofar+"}")
else:
s.append(prepend + "+ " + label)
for label, child in tnode.children.items():
_str(child, sofar + label, label, prepend + " ")
if self.root is not None:
_str(self.root, "", "", "")
return "\n".join(s)
def common_prefix_len(self, a, b):
i = 0
for x, y in zip(a, b):
if x == y:
i += 1
else:
break
return i
if __name__ == "__main__":
t = PaTrie()
words = "autobus", "auto", "abraka", "dabra", "abrakadabra", "honza", "honirna", "honicka", "hony", "ho", "h"
for w in words:
t.insert(w)
print("AFTER INSERTING", w)
print(t.root)
print(t)
print()
| mit | -5,929,119,131,326,029,000 | 24.672727 | 113 | 0.410765 | false |
stringertheory/names | update_meter.py | 1 | 4118 | import sys
import string
import termcolor
import pymongo
import pronouncing
import unidecode
import distance
# a decorator that caches functions but stores results with a db backend would be nice.
def mongo_collection():
collection = pymongo.MongoClient().poetry.poems
return collection
def word_tokenize(sentence):
ascii_version = unidecode.unidecode(sentence.lower())
word_list = []
for word in ascii_version.split():
stripped = word.strip(string.punctuation).strip()
if stripped:
word_list.append(stripped)
return word_list
def phones_for_sentence(word_list):
approximate_words = []
phones_list = []
for word in word_list:
replacement, phones = distance.phones_for_word(word)
approximate_words.append(replacement)
# for now, just pick first alternative from list
phones_list.append(phones[0])
return approximate_words, phones_list
def stress_pattern(phones):
return pronouncing.stresses(''.join(p for p in phones))
collection = mongo_collection()
for index, document in enumerate(collection.find(no_cursor_timeout=True).sort("_id", pymongo.DESCENDING).batch_size(5), 1):
if 'analyzed' in document or not 'lines' in document:
print('skipping %s' % document['_id'], file=sys.stderr)
continue
else:
print('analyzing %s' % document['_id'], file=sys.stderr)
normalized = [word_tokenize(sentence) for sentence in document['lines']]
approximate = []
phones = []
for sentence in normalized:
a, p = phones_for_sentence(sentence)
approximate.append(a)
phones.append(p)
stresses = [stress_pattern(sentence) for sentence in phones]
# zip up for easier storage
analyzed = []
for n, a, p in zip(normalized, approximate, phones):
sentence = []
for n_, a_, p_ in zip(n, a, p):
word = {
'ascii': n_,
'closest': a_,
'phones': p_,
}
sentence.append(word)
analyzed.append(sentence)
document['analyzed'] = analyzed
document['stresses'] = stresses
collection.update_one(
{'_id': document.get('_id')},
{'$set': {'analyzed': analyzed, 'stresses': stresses}},
)
print(index, 'inserted', document['_id'])
row_list = []
for signal, line in zip(stresses, document['lines']):
terminal = []
block_list = []
for i in signal:
if int(i):
block_list.append('<div class="diagram stressed"></div>')
terminal.append(termcolor.colored(' ', 'green', 'on_blue'))
else:
terminal.append(termcolor.colored(' ', 'green', 'on_yellow'))
block_list.append('<div class="diagram unstressed"></div>')
row = '<div class="diagram sentence">%s</div>' % ''.join(block_list)
row_list.append(row)
print(''.join(terminal), file=sys.stderr)
diagram = '<div class="diagram container">%s</div>' % ''.join(row_list)
with open('formatted/%s.html' % document['_id'], 'w') as outfile:
outfile.write('<html>')
outfile.write('<head>')
outfile.write('<link rel="stylesheet" type="text/css" href="diagram.css">')
outfile.write('</head>')
outfile.write('<body>')
outfile.write(document['html'])
outfile.write('\n')
outfile.write(diagram)
outfile.write('\n')
outfile.write('</body>')
outfile.write('</html>')
# c_a, c_d = pywt.dwt(signal, 'haar')
# for i, j in enumerate(signal):
# print i, j
# print ''
# # for i in c_a:
# # print i
# # print ''
# # for i in c_d:
# # print i
# # print ''
# ps = np.abs(np.fft.fft(signal))**2
# # for i, j in enumerate(ps):
# # print i, j
# time_step = 1
# freqs = np.fft.fftfreq(len(signal), time_step)
# print >> sys.stderr, freqs
# idx = np.argsort(freqs)
# for x, y in zip(freqs[idx], ps[idx]):
# print x, y
| mit | -8,512,225,107,606,674,000 | 29.503704 | 123 | 0.575765 | false |
scality/manila | manila/tests/share/test_manager.py | 1 | 131893 | # Copyright 2014 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test of Share Manager for Manila."""
import datetime
import ddt
import mock
from oslo_serialization import jsonutils
from oslo_utils import importutils
from oslo_utils import timeutils
import six
from manila.common import constants
from manila import context
from manila import db
from manila.db.sqlalchemy import models
from manila import exception
from manila import quota
from manila.share import drivers_private_data
from manila.share import manager
from manila.share import migration
from manila.share import rpcapi
from manila.share import share_types
from manila import test
from manila.tests import db_utils
from manila.tests import utils as test_utils
from manila import utils
@ddt.ddt
class ShareManagerTestCase(test.TestCase):
def setUp(self):
super(ShareManagerTestCase, self).setUp()
self.flags(share_driver='manila.tests.fake_driver.FakeShareDriver')
# Define class directly, because this test suite dedicated
# to specific manager.
self.share_manager = importutils.import_object(
"manila.share.manager.ShareManager")
self.mock_object(self.share_manager.driver, 'do_setup')
self.mock_object(self.share_manager.driver, 'check_for_setup_error')
self.context = context.get_admin_context()
self.share_manager.driver.initialized = True
def test_share_manager_instance(self):
fake_service_name = "fake_service"
import_mock = mock.Mock()
self.mock_object(importutils, "import_object", import_mock)
private_data_mock = mock.Mock()
self.mock_object(drivers_private_data, "DriverPrivateData",
private_data_mock)
self.mock_object(manager.ShareManager, '_init_hook_drivers')
share_manager = manager.ShareManager(service_name=fake_service_name)
private_data_mock.assert_called_once_with(
context=mock.ANY,
backend_host=share_manager.host,
config_group=fake_service_name
)
self.assertTrue(import_mock.called)
self.assertTrue(manager.ShareManager._init_hook_drivers.called)
def test__init_hook_drivers(self):
fake_service_name = "fake_service"
import_mock = mock.Mock()
self.mock_object(importutils, "import_object", import_mock)
self.mock_object(drivers_private_data, "DriverPrivateData")
share_manager = manager.ShareManager(service_name=fake_service_name)
share_manager.configuration.safe_get = mock.Mock(
return_value=["Foo", "Bar"])
self.assertEqual(0, len(share_manager.hooks))
import_mock.reset()
share_manager._init_hook_drivers()
self.assertEqual(
len(share_manager.configuration.safe_get.return_value),
len(share_manager.hooks))
import_mock.assert_has_calls([
mock.call(
hook,
configuration=share_manager.configuration,
host=share_manager.host
) for hook in share_manager.configuration.safe_get.return_value
], any_order=True)
def test__execute_periodic_hook(self):
share_instances_mock = mock.Mock()
hook_data_mock = mock.Mock()
self.mock_object(
self.share_manager.db,
"share_instances_get_all_by_host",
share_instances_mock)
self.mock_object(
self.share_manager.driver,
"get_periodic_hook_data",
hook_data_mock)
self.share_manager.hooks = [mock.Mock(return_value=i) for i in (0, 1)]
self.share_manager._execute_periodic_hook(self.context)
share_instances_mock.assert_called_once_with(
context=self.context, host=self.share_manager.host)
hook_data_mock.assert_called_once_with(
context=self.context,
share_instances=share_instances_mock.return_value)
for mock_hook in self.share_manager.hooks:
mock_hook.execute_periodic_hook.assert_called_once_with(
context=self.context,
periodic_hook_data=hook_data_mock.return_value)
def test_init_host_with_no_shares(self):
self.mock_object(self.share_manager.db,
'share_instances_get_all_by_host',
mock.Mock(return_value=[]))
self.share_manager.init_host()
self.assertTrue(self.share_manager.driver.initialized)
self.share_manager.db.share_instances_get_all_by_host.\
assert_called_once_with(utils.IsAMatcher(context.RequestContext),
self.share_manager.host)
self.share_manager.driver.do_setup.assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
self.share_manager.driver.check_for_setup_error.\
assert_called_once_with()
@ddt.data(
"migrate_share",
"create_share_instance",
"manage_share",
"unmanage_share",
"delete_share_instance",
"delete_free_share_servers",
"create_snapshot",
"delete_snapshot",
"allow_access",
"deny_access",
"_report_driver_status",
"_execute_periodic_hook",
"publish_service_capabilities",
"delete_share_server",
"extend_share",
"shrink_share",
"create_consistency_group",
"delete_consistency_group",
"create_cgsnapshot",
"delete_cgsnapshot",
)
def test_call_driver_when_its_init_failed(self, method_name):
self.mock_object(self.share_manager.driver, 'do_setup',
mock.Mock(side_effect=Exception()))
self.share_manager.init_host()
self.assertRaises(
exception.DriverNotInitialized,
getattr(self.share_manager, method_name),
'foo', 'bar', 'quuz'
)
@ddt.data("do_setup", "check_for_setup_error")
def test_init_host_with_driver_failure(self, method_name):
self.mock_object(self.share_manager.driver, method_name,
mock.Mock(side_effect=Exception()))
self.mock_object(manager.LOG, 'exception')
self.share_manager.driver.initialized = False
self.share_manager.init_host()
manager.LOG.exception.assert_called_once_with(
mock.ANY, {'name': self.share_manager.driver.__class__.__name__,
'host': self.share_manager.host,
'exc': mock.ANY})
self.assertFalse(self.share_manager.driver.initialized)
def _setup_init_mocks(self, setup_access_rules=True):
instances = [
db_utils.create_share(id='fake_id_1',
status=constants.STATUS_AVAILABLE,
display_name='fake_name_1').instance,
db_utils.create_share(id='fake_id_2',
status=constants.STATUS_ERROR,
display_name='fake_name_2').instance,
db_utils.create_share(id='fake_id_3',
status=constants.STATUS_AVAILABLE,
display_name='fake_name_3').instance,
db_utils.create_share(
id='fake_id_4',
status=constants.STATUS_AVAILABLE,
task_state=constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS,
display_name='fake_name_4').instance,
]
if not setup_access_rules:
return instances
rules = [
db_utils.create_access(state=constants.STATUS_ACTIVE,
share_id='fake_id_1'),
db_utils.create_access(state=constants.STATUS_ERROR,
share_id='fake_id_3'),
]
return instances, rules
def test_init_host_with_shares_and_rules(self):
# initialisation of test data
def raise_share_access_exists(*args, **kwargs):
raise exception.ShareAccessExists(
access_type='fake_access_type', access='fake_access')
instances, rules = self._setup_init_mocks()
fake_export_locations = ['fake/path/1', 'fake/path']
share_server = 'fake_share_server_type_does_not_matter'
self.mock_object(self.share_manager.db,
'share_instances_get_all_by_host',
mock.Mock(return_value=instances))
self.mock_object(self.share_manager.db, 'share_instance_get',
mock.Mock(side_effect=[instances[0], instances[2],
instances[3]]))
self.mock_object(self.share_manager.db,
'share_export_locations_update')
self.mock_object(self.share_manager.driver, 'ensure_share',
mock.Mock(return_value=fake_export_locations))
self.mock_object(self.share_manager, '_ensure_share_instance_has_pool')
self.mock_object(self.share_manager, '_get_share_server',
mock.Mock(return_value=share_server))
self.mock_object(self.share_manager, 'publish_service_capabilities',
mock.Mock())
self.mock_object(self.share_manager.db,
'share_access_get_all_for_share',
mock.Mock(return_value=rules))
self.mock_object(self.share_manager.driver, 'allow_access',
mock.Mock(side_effect=raise_share_access_exists))
# call of 'init_host' method
self.share_manager.init_host()
# verification of call
self.share_manager.db.share_instances_get_all_by_host.\
assert_called_once_with(utils.IsAMatcher(context.RequestContext),
self.share_manager.host)
exports_update = self.share_manager.db.share_export_locations_update
exports_update.assert_has_calls([
mock.call(mock.ANY, instances[0]['id'], fake_export_locations),
mock.call(mock.ANY, instances[2]['id'], fake_export_locations)
])
self.share_manager.driver.do_setup.assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
self.share_manager.driver.check_for_setup_error.\
assert_called_once_with()
self.share_manager._ensure_share_instance_has_pool.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
])
self.share_manager._get_share_server.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
])
self.share_manager.driver.ensure_share.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), instances[0],
share_server=share_server),
mock.call(utils.IsAMatcher(context.RequestContext), instances[2],
share_server=share_server),
])
self.share_manager.db.share_access_get_all_for_share.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext),
instances[0]['share_id']),
mock.call(utils.IsAMatcher(context.RequestContext),
instances[2]['share_id']),
])
self.share_manager.publish_service_capabilities.\
assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
self.share_manager.driver.allow_access.assert_has_calls([
mock.call(mock.ANY, instances[0], rules[0],
share_server=share_server),
mock.call(mock.ANY, instances[2], rules[0],
share_server=share_server),
])
def test_init_host_with_exception_on_ensure_share(self):
def raise_exception(*args, **kwargs):
raise exception.ManilaException(message="Fake raise")
instances = self._setup_init_mocks(setup_access_rules=False)
share_server = 'fake_share_server_type_does_not_matter'
self.mock_object(self.share_manager.db,
'share_instances_get_all_by_host',
mock.Mock(return_value=instances))
self.mock_object(self.share_manager.db, 'share_instance_get',
mock.Mock(side_effect=[instances[0], instances[2],
instances[3]]))
self.mock_object(self.share_manager.driver, 'ensure_share',
mock.Mock(side_effect=raise_exception))
self.mock_object(self.share_manager, '_ensure_share_instance_has_pool')
self.mock_object(self.share_manager, '_get_share_server',
mock.Mock(return_value=share_server))
self.mock_object(self.share_manager, 'publish_service_capabilities')
self.mock_object(manager.LOG, 'error')
self.mock_object(manager.LOG, 'info')
# call of 'init_host' method
self.share_manager.init_host()
# verification of call
self.share_manager.db.share_instances_get_all_by_host.\
assert_called_once_with(utils.IsAMatcher(context.RequestContext),
self.share_manager.host)
self.share_manager.driver.do_setup.assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
self.share_manager.driver.check_for_setup_error.assert_called_with()
self.share_manager._ensure_share_instance_has_pool.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
])
self.share_manager._get_share_server.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
])
self.share_manager.driver.ensure_share.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), instances[0],
share_server=share_server),
mock.call(utils.IsAMatcher(context.RequestContext), instances[2],
share_server=share_server),
])
self.share_manager.publish_service_capabilities.\
assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
manager.LOG.info.assert_any_call(
mock.ANY,
{'task': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS,
'id': instances[3]['id']},
)
manager.LOG.info.assert_any_call(
mock.ANY,
{'id': instances[1]['id'], 'status': instances[1]['status']},
)
def test_init_host_with_exception_on_rule_access_allow(self):
def raise_exception(*args, **kwargs):
raise exception.ManilaException(message="Fake raise")
instances, rules = self._setup_init_mocks()
share_server = 'fake_share_server_type_does_not_matter'
self.mock_object(self.share_manager.db,
'share_instances_get_all_by_host',
mock.Mock(return_value=instances))
self.mock_object(self.share_manager.db, 'share_instance_get',
mock.Mock(side_effect=[instances[0], instances[2],
instances[3]]))
self.mock_object(self.share_manager.driver, 'ensure_share',
mock.Mock(return_value=None))
self.mock_object(self.share_manager, '_ensure_share_instance_has_pool')
self.mock_object(self.share_manager, '_get_share_server',
mock.Mock(return_value=share_server))
self.mock_object(self.share_manager, 'publish_service_capabilities')
self.mock_object(manager.LOG, 'error')
self.mock_object(manager.LOG, 'info')
self.mock_object(self.share_manager.db,
'share_access_get_all_for_share',
mock.Mock(return_value=rules))
self.mock_object(self.share_manager.driver, 'allow_access',
mock.Mock(side_effect=raise_exception))
# call of 'init_host' method
self.share_manager.init_host()
# verification of call
self.share_manager.db.share_instances_get_all_by_host.\
assert_called_once_with(utils.IsAMatcher(context.RequestContext),
self.share_manager.host)
self.share_manager.driver.do_setup.assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
self.share_manager.driver.check_for_setup_error.assert_called_with()
self.share_manager._ensure_share_instance_has_pool.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
])
self.share_manager._get_share_server.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), instances[0]),
mock.call(utils.IsAMatcher(context.RequestContext), instances[2]),
])
self.share_manager.driver.ensure_share.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), instances[0],
share_server=share_server),
mock.call(utils.IsAMatcher(context.RequestContext), instances[2],
share_server=share_server),
])
self.share_manager.publish_service_capabilities.\
assert_called_once_with(
utils.IsAMatcher(context.RequestContext))
manager.LOG.info.assert_any_call(
mock.ANY,
{'task': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS,
'id': instances[3]['id']},
)
manager.LOG.info.assert_any_call(
mock.ANY,
{'id': instances[1]['id'], 'status': instances[1]['status']},
)
self.share_manager.driver.allow_access.assert_has_calls([
mock.call(utils.IsAMatcher(context.RequestContext), instances[0],
rules[0], share_server=share_server),
mock.call(utils.IsAMatcher(context.RequestContext), instances[2],
rules[0], share_server=share_server),
])
manager.LOG.error.assert_has_calls([
mock.call(mock.ANY, mock.ANY),
mock.call(mock.ANY, mock.ANY),
])
def test_create_share_instance_from_snapshot_with_server(self):
"""Test share can be created from snapshot if server exists."""
network = db_utils.create_share_network()
server = db_utils.create_share_server(
share_network_id=network['id'], host='fake_host',
backend_details=dict(fake='fake'))
parent_share = db_utils.create_share(share_network_id='net-id',
share_server_id=server['id'])
share = db_utils.create_share()
share_id = share['id']
snapshot = db_utils.create_snapshot(share_id=parent_share['id'])
snapshot_id = snapshot['id']
self.share_manager.create_share_instance(
self.context, share.instance['id'], snapshot_id=snapshot_id)
self.assertEqual(share_id, db.share_get(context.get_admin_context(),
share_id).id)
shr = db.share_get(self.context, share_id)
self.assertEqual(constants.STATUS_AVAILABLE, shr['status'])
self.assertEqual(server['id'], shr['share_server_id'])
def test_create_share_instance_from_snapshot_with_server_not_found(self):
"""Test creation from snapshot fails if server not found."""
parent_share = db_utils.create_share(share_network_id='net-id',
share_server_id='fake-id')
share = db_utils.create_share()
share_id = share['id']
snapshot = db_utils.create_snapshot(share_id=parent_share['id'])
snapshot_id = snapshot['id']
self.assertRaises(exception.ShareServerNotFound,
self.share_manager.create_share_instance,
self.context,
share.instance['id'],
snapshot_id=snapshot_id
)
shr = db.share_get(self.context, share_id)
self.assertEqual(constants.STATUS_ERROR, shr['status'])
def test_create_share_instance_from_snapshot(self):
"""Test share can be created from snapshot."""
share = db_utils.create_share()
share_id = share['id']
snapshot = db_utils.create_snapshot(share_id=share_id)
snapshot_id = snapshot['id']
self.share_manager.create_share_instance(
self.context, share.instance['id'], snapshot_id=snapshot_id)
self.assertEqual(share_id, db.share_get(context.get_admin_context(),
share_id).id)
shr = db.share_get(self.context, share_id)
self.assertEqual(constants.STATUS_AVAILABLE, shr['status'])
self.assertTrue(len(shr['export_location']) > 0)
self.assertEqual(2, len(shr['export_locations']))
def test_create_delete_share_snapshot(self):
"""Test share's snapshot can be created and deleted."""
def _fake_create_snapshot(self, snapshot, **kwargs):
snapshot['progress'] = '99%'
return snapshot
self.mock_object(self.share_manager.driver, "create_snapshot",
_fake_create_snapshot)
share = db_utils.create_share()
share_id = share['id']
snapshot = db_utils.create_snapshot(share_id=share_id)
snapshot_id = snapshot['id']
self.share_manager.create_snapshot(self.context, share_id,
snapshot_id)
self.assertEqual(share_id,
db.share_snapshot_get(context.get_admin_context(),
snapshot_id).share_id)
snap = db.share_snapshot_get(self.context, snapshot_id)
self.assertEqual(constants.STATUS_AVAILABLE, snap['status'])
self.share_manager.delete_snapshot(self.context, snapshot_id)
self.assertRaises(exception.NotFound,
db.share_snapshot_get,
self.context,
snapshot_id)
def test_create_delete_share_snapshot_error(self):
"""Test snapshot can be created and deleted with error."""
def _raise_not_found(self, *args, **kwargs):
raise exception.NotFound()
self.mock_object(self.share_manager.driver, "create_snapshot",
mock.Mock(side_effect=_raise_not_found))
self.mock_object(self.share_manager.driver, "delete_snapshot",
mock.Mock(side_effect=_raise_not_found))
share = db_utils.create_share()
share_id = share['id']
snapshot = db_utils.create_snapshot(share_id=share_id)
snapshot_id = snapshot['id']
self.assertRaises(exception.NotFound,
self.share_manager.create_snapshot,
self.context, share_id, snapshot_id)
snap = db.share_snapshot_get(self.context, snapshot_id)
self.assertEqual(constants.STATUS_ERROR, snap['status'])
self.assertRaises(exception.NotFound,
self.share_manager.delete_snapshot,
self.context, snapshot_id)
self.assertEqual(
constants.STATUS_ERROR_DELETING,
db.share_snapshot_get(self.context, snapshot_id).status)
self.share_manager.driver.create_snapshot.assert_called_once_with(
self.context, utils.IsAMatcher(models.ShareSnapshotInstance),
share_server=None)
self.share_manager.driver.delete_snapshot.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
utils.IsAMatcher(models.ShareSnapshotInstance),
share_server=None)
def test_delete_share_instance_if_busy(self):
"""Test snapshot could not be deleted if busy."""
def _raise_share_snapshot_is_busy(self, *args, **kwargs):
raise exception.ShareSnapshotIsBusy(snapshot_name='fakename')
self.mock_object(self.share_manager.driver, "delete_snapshot",
mock.Mock(side_effect=_raise_share_snapshot_is_busy))
share = db_utils.create_share(status=constants.STATUS_ACTIVE)
snapshot = db_utils.create_snapshot(share_id=share['id'])
snapshot_id = snapshot['id']
self.share_manager.delete_snapshot(self.context, snapshot_id)
snap = db.share_snapshot_get(self.context, snapshot_id)
self.assertEqual(constants.STATUS_AVAILABLE, snap['status'])
self.share_manager.driver.delete_snapshot.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
utils.IsAMatcher(models.ShareSnapshotInstance),
share_server=None)
def test_create_share_instance_with_share_network_dhss_false(self):
manager.CONF.set_default('driver_handles_share_servers', False)
self.mock_object(
self.share_manager.driver.configuration, 'safe_get',
mock.Mock(return_value=False))
share_network_id = 'fake_sn'
share_instance = db_utils.create_share(
share_network_id=share_network_id).instance
self.mock_object(
self.share_manager.db, 'share_instance_get',
mock.Mock(return_value=share_instance))
self.mock_object(self.share_manager.db, 'share_instance_update')
self.assertRaisesRegex(
exception.ManilaException,
'.*%s.*' % share_instance['id'],
self.share_manager.create_share_instance, self.context,
share_instance['id'])
self.share_manager.db.share_instance_get.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
share_instance['id'],
with_share_data=True
)
self.share_manager.db.share_instance_update.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), share_instance['id'],
{'status': constants.STATUS_ERROR})
def test_create_share_instance_with_share_network_server_not_exists(self):
"""Test share can be created without share server."""
share_net = db_utils.create_share_network()
share = db_utils.create_share(share_network_id=share_net['id'])
share_id = share['id']
def fake_setup_server(context, share_network, *args, **kwargs):
return db_utils.create_share_server(
share_network_id=share_network['id'],
host='fake_host')
self.mock_object(manager.LOG, 'info')
self.share_manager.driver.create_share = mock.Mock(
return_value='fake_location')
self.share_manager._setup_server = fake_setup_server
self.share_manager.create_share_instance(self.context,
share.instance['id'])
self.assertEqual(share_id, db.share_get(context.get_admin_context(),
share_id).id)
manager.LOG.info.assert_called_with(mock.ANY, share.instance['id'])
def test_create_share_instance_with_share_network_server_fail(self):
fake_share = db_utils.create_share(share_network_id='fake_sn_id',
size=1)
fake_server = {
'id': 'fake_srv_id',
'status': constants.STATUS_CREATING,
}
self.mock_object(db, 'share_server_create',
mock.Mock(return_value=fake_server))
self.mock_object(db, 'share_instance_update',
mock.Mock(return_value=fake_share.instance))
self.mock_object(db, 'share_instance_get',
mock.Mock(return_value=fake_share.instance))
self.mock_object(manager.LOG, 'error')
def raise_share_server_not_found(*args, **kwargs):
raise exception.ShareServerNotFound(
share_server_id=fake_server['id'])
def raise_manila_exception(*args, **kwargs):
raise exception.ManilaException()
self.mock_object(db,
'share_server_get_all_by_host_and_share_net_valid',
mock.Mock(side_effect=raise_share_server_not_found))
self.mock_object(self.share_manager, '_setup_server',
mock.Mock(side_effect=raise_manila_exception))
self.assertRaises(
exception.ManilaException,
self.share_manager.create_share_instance,
self.context,
fake_share.instance['id'],
)
db.share_server_get_all_by_host_and_share_net_valid.\
assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
self.share_manager.host,
fake_share['share_network_id'],
)
db.share_server_create.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), mock.ANY)
db.share_instance_update.assert_has_calls([
mock.call(
utils.IsAMatcher(context.RequestContext),
fake_share.instance['id'],
{'status': constants.STATUS_ERROR},
)
])
self.share_manager._setup_server.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), fake_server)
manager.LOG.error.assert_called_with(mock.ANY,
fake_share.instance['id'])
def test_create_share_instance_with_share_network_not_found(self):
"""Test creation fails if share network not found."""
self.mock_object(manager.LOG, 'error')
share = db_utils.create_share(share_network_id='fake-net-id')
share_id = share['id']
self.assertRaises(
exception.ShareNetworkNotFound,
self.share_manager.create_share_instance,
self.context,
share.instance['id']
)
manager.LOG.error.assert_called_with(mock.ANY, share.instance['id'])
shr = db.share_get(self.context, share_id)
self.assertEqual(constants.STATUS_ERROR, shr['status'])
def test_create_share_instance_with_share_network_server_exists(self):
"""Test share can be created with existing share server."""
share_net = db_utils.create_share_network()
share = db_utils.create_share(share_network_id=share_net['id'])
share_srv = db_utils.create_share_server(
share_network_id=share_net['id'], host=self.share_manager.host)
share_id = share['id']
self.mock_object(manager.LOG, 'info')
driver_mock = mock.Mock()
driver_mock.create_share.return_value = "fake_location"
driver_mock.choose_share_server_compatible_with_share.return_value = (
share_srv
)
self.share_manager.driver = driver_mock
self.share_manager.create_share_instance(self.context,
share.instance['id'])
self.assertFalse(self.share_manager.driver.setup_network.called)
self.assertEqual(share_id, db.share_get(context.get_admin_context(),
share_id).id)
shr = db.share_get(self.context, share_id)
self.assertEqual(shr['status'], constants.STATUS_AVAILABLE)
self.assertEqual(shr['share_server_id'], share_srv['id'])
self.assertTrue(len(shr['export_location']) > 0)
self.assertEqual(1, len(shr['export_locations']))
manager.LOG.info.assert_called_with(mock.ANY, share.instance['id'])
@ddt.data('export_location', 'export_locations')
def test_create_share_instance_with_error_in_driver(self, details_key):
"""Test db updates if share creation fails in driver."""
share = db_utils.create_share()
share_id = share['id']
some_data = 'fake_location'
self.share_manager.driver = mock.Mock()
e = exception.ManilaException(detail_data={details_key: some_data})
self.share_manager.driver.create_share.side_effect = e
self.assertRaises(
exception.ManilaException,
self.share_manager.create_share_instance,
self.context,
share.instance['id']
)
self.assertTrue(self.share_manager.driver.create_share.called)
shr = db.share_get(self.context, share_id)
self.assertEqual(some_data, shr['export_location'])
def test_create_share_instance_with_server_created(self):
"""Test share can be created and share server is created."""
share_net = db_utils.create_share_network()
share = db_utils.create_share(share_network_id=share_net['id'])
db_utils.create_share_server(
share_network_id=share_net['id'], host=self.share_manager.host,
status=constants.STATUS_ERROR)
share_id = share['id']
fake_server = {
'id': 'fake_srv_id',
'status': constants.STATUS_CREATING,
}
self.mock_object(db, 'share_server_create',
mock.Mock(return_value=fake_server))
self.mock_object(self.share_manager, '_setup_server',
mock.Mock(return_value=fake_server))
self.share_manager.create_share_instance(self.context,
share.instance['id'])
self.assertEqual(share_id, db.share_get(context.get_admin_context(),
share_id).id)
shr = db.share_get(self.context, share_id)
self.assertEqual(constants.STATUS_AVAILABLE, shr['status'])
self.assertEqual('fake_srv_id', shr['share_server_id'])
db.share_server_create.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), mock.ANY)
self.share_manager._setup_server.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), fake_server)
def test_create_delete_share_instance_error(self):
"""Test share can be created and deleted with error."""
def _raise_not_found(self, *args, **kwargs):
raise exception.NotFound()
self.mock_object(self.share_manager.driver, "create_share",
mock.Mock(side_effect=_raise_not_found))
self.mock_object(self.share_manager.driver, "delete_share",
mock.Mock(side_effect=_raise_not_found))
share = db_utils.create_share()
share_id = share['id']
self.assertRaises(exception.NotFound,
self.share_manager.create_share_instance,
self.context,
share.instance['id'])
shr = db.share_get(self.context, share_id)
self.assertEqual(constants.STATUS_ERROR, shr['status'])
self.assertRaises(exception.NotFound,
self.share_manager.delete_share_instance,
self.context,
share.instance['id'])
shr = db.share_get(self.context, share_id)
self.assertEqual(constants.STATUS_ERROR_DELETING, shr['status'])
self.share_manager.driver.create_share.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
utils.IsAMatcher(models.ShareInstance),
share_server=None)
self.share_manager.driver.delete_share.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
utils.IsAMatcher(models.ShareInstance),
share_server=None)
def test_create_share_instance_update_availability_zone(self):
share = db_utils.create_share(availability_zone=None)
share_id = share['id']
self.share_manager.create_share_instance(
self.context, share.instance['id'])
actual_share = db.share_get(context.get_admin_context(), share_id)
self.assertIsNotNone(actual_share.availability_zone)
self.assertEqual(manager.CONF.storage_availability_zone,
actual_share.availability_zone)
def test_provide_share_server_for_share_incompatible_servers(self):
fake_exception = exception.ManilaException("fake")
fake_share_server = {'id': 'fake'}
share = db_utils.create_share()
self.mock_object(db,
'share_server_get_all_by_host_and_share_net_valid',
mock.Mock(return_value=[fake_share_server]))
self.mock_object(
self.share_manager.driver,
"choose_share_server_compatible_with_share",
mock.Mock(side_effect=fake_exception)
)
self.assertRaises(exception.ManilaException,
self.share_manager._provide_share_server_for_share,
self.context, "fake_id", share.instance)
driver_mock = self.share_manager.driver
driver_method_mock = (
driver_mock.choose_share_server_compatible_with_share
)
driver_method_mock.assert_called_once_with(
self.context, [fake_share_server], share.instance, snapshot=None,
consistency_group=None)
def test_provide_share_server_for_share_invalid_arguments(self):
self.assertRaises(ValueError,
self.share_manager._provide_share_server_for_share,
self.context, None, None)
def test_provide_share_server_for_share_parent_ss_not_found(self):
fake_parent_id = "fake_server_id"
fake_exception = exception.ShareServerNotFound("fake")
share = db_utils.create_share()
fake_snapshot = {'share': {'share_server_id': fake_parent_id}}
self.mock_object(db, 'share_server_get',
mock.Mock(side_effect=fake_exception))
self.assertRaises(exception.ShareServerNotFound,
self.share_manager._provide_share_server_for_share,
self.context, "fake_id", share.instance,
snapshot=fake_snapshot)
db.share_server_get.assert_called_once_with(
self.context, fake_parent_id)
def test_provide_share_server_for_share_parent_ss_invalid(self):
fake_parent_id = "fake_server_id"
share = db_utils.create_share()
fake_snapshot = {'share': {'share_server_id': fake_parent_id}}
fake_parent_share_server = {'status': 'fake'}
self.mock_object(db, 'share_server_get',
mock.Mock(return_value=fake_parent_share_server))
self.assertRaises(exception.InvalidShareServer,
self.share_manager._provide_share_server_for_share,
self.context, "fake_id", share.instance,
snapshot=fake_snapshot)
db.share_server_get.assert_called_once_with(
self.context, fake_parent_id)
def test_provide_share_server_for_cg_incompatible_servers(self):
fake_exception = exception.ManilaException("fake")
fake_share_server = {'id': 'fake'}
cg = db_utils.create_consistency_group()
self.mock_object(db,
'share_server_get_all_by_host_and_share_net_valid',
mock.Mock(return_value=[fake_share_server]))
self.mock_object(
self.share_manager.driver,
"choose_share_server_compatible_with_cg",
mock.Mock(side_effect=fake_exception)
)
self.assertRaises(exception.ManilaException,
self.share_manager._provide_share_server_for_cg,
self.context, "fake_id", cg)
driver_mock = self.share_manager.driver
driver_method_mock = (
driver_mock.choose_share_server_compatible_with_cg
)
driver_method_mock.assert_called_once_with(
self.context, [fake_share_server], cg, cgsnapshot=None)
def test_provide_share_server_for_cg_invalid_arguments(self):
self.assertRaises(exception.InvalidInput,
self.share_manager._provide_share_server_for_cg,
self.context, None, None)
def test_manage_share_invalid_driver(self):
self.mock_object(self.share_manager, 'driver', mock.Mock())
self.share_manager.driver.driver_handles_share_servers = True
self.mock_object(share_types,
'get_share_type_extra_specs',
mock.Mock(return_value='False'))
self.mock_object(self.share_manager.db, 'share_update', mock.Mock())
share = db_utils.create_share()
share_id = share['id']
self.assertRaises(
exception.InvalidDriverMode,
self.share_manager.manage_share, self.context, share_id, {})
self.share_manager.db.share_update.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), share_id,
{'status': constants.STATUS_MANAGE_ERROR, 'size': 1})
def test_manage_share_invalid_share_type(self):
self.mock_object(self.share_manager, 'driver', mock.Mock())
self.share_manager.driver.driver_handles_share_servers = False
self.mock_object(share_types,
'get_share_type_extra_specs',
mock.Mock(return_value='True'))
self.mock_object(self.share_manager.db, 'share_update', mock.Mock())
share = db_utils.create_share()
share_id = share['id']
self.assertRaises(
exception.ManageExistingShareTypeMismatch,
self.share_manager.manage_share, self.context, share_id, {})
self.share_manager.db.share_update.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), share_id,
{'status': constants.STATUS_MANAGE_ERROR, 'size': 1})
def test_manage_share_driver_exception(self):
CustomException = type('CustomException', (Exception,), dict())
self.mock_object(self.share_manager, 'driver', mock.Mock())
self.share_manager.driver.driver_handles_share_servers = False
self.mock_object(self.share_manager.driver,
'manage_existing',
mock.Mock(side_effect=CustomException))
self.mock_object(share_types,
'get_share_type_extra_specs',
mock.Mock(return_value='False'))
self.mock_object(self.share_manager.db, 'share_update', mock.Mock())
share = db_utils.create_share()
share_id = share['id']
driver_options = {'fake': 'fake'}
self.assertRaises(
CustomException,
self.share_manager.manage_share,
self.context, share_id, driver_options)
self.share_manager.driver.manage_existing.\
assert_called_once_with(mock.ANY, driver_options)
self.share_manager.db.share_update.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), share_id,
{'status': constants.STATUS_MANAGE_ERROR, 'size': 1})
def test_manage_share_invalid_size(self):
self.mock_object(self.share_manager, 'driver')
self.share_manager.driver.driver_handles_share_servers = False
self.mock_object(share_types,
'get_share_type_extra_specs',
mock.Mock(return_value='False'))
self.mock_object(self.share_manager.driver,
"manage_existing",
mock.Mock(return_value=None))
self.mock_object(self.share_manager.db, 'share_update', mock.Mock())
share = db_utils.create_share()
share_id = share['id']
driver_options = {'fake': 'fake'}
self.assertRaises(
exception.InvalidShare,
self.share_manager.manage_share,
self.context, share_id, driver_options)
self.share_manager.driver.manage_existing.\
assert_called_once_with(mock.ANY, driver_options)
self.share_manager.db.share_update.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), share_id,
{'status': constants.STATUS_MANAGE_ERROR, 'size': 1})
def test_manage_share_quota_error(self):
self.mock_object(self.share_manager, 'driver')
self.share_manager.driver.driver_handles_share_servers = False
self.mock_object(share_types,
'get_share_type_extra_specs',
mock.Mock(return_value='False'))
self.mock_object(self.share_manager.driver,
"manage_existing",
mock.Mock(return_value={'size': 3}))
self.mock_object(self.share_manager, '_update_quota_usages',
mock.Mock(side_effect=exception.QuotaError))
self.mock_object(self.share_manager.db, 'share_update', mock.Mock())
share = db_utils.create_share()
share_id = share['id']
driver_options = {'fake': 'fake'}
self.assertRaises(
exception.QuotaError,
self.share_manager.manage_share,
self.context, share_id, driver_options)
self.share_manager.driver.manage_existing.\
assert_called_once_with(mock.ANY, driver_options)
self.share_manager.db.share_update.assert_called_once_with(
mock.ANY, share_id,
{'status': constants.STATUS_MANAGE_ERROR, 'size': 1})
self.share_manager._update_quota_usages.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
share['project_id'], {'shares': 1, 'gigabytes': 3})
@ddt.data(
{'size': 1},
{'size': 2, 'name': 'fake'},
{'size': 3, 'export_locations': ['foo', 'bar', 'quuz']})
def test_manage_share_valid_share(self, driver_data):
export_locations = driver_data.get('export_locations')
self.mock_object(self.share_manager.db, 'share_update', mock.Mock())
self.mock_object(self.share_manager, 'driver', mock.Mock())
self.mock_object(self.share_manager, '_update_quota_usages',
mock.Mock())
self.mock_object(
self.share_manager.db,
'share_export_locations_update',
mock.Mock(side_effect=(
self.share_manager.db.share_export_locations_update)))
self.share_manager.driver.driver_handles_share_servers = False
self.mock_object(share_types,
'get_share_type_extra_specs',
mock.Mock(return_value='False'))
self.mock_object(self.share_manager.driver,
"manage_existing",
mock.Mock(return_value=driver_data))
share = db_utils.create_share()
share_id = share['id']
driver_options = {'fake': 'fake'}
self.share_manager.manage_share(self.context, share_id, driver_options)
self.share_manager.driver.manage_existing.\
assert_called_once_with(mock.ANY, driver_options)
if export_locations:
self.share_manager.db.share_export_locations_update.\
assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
share.instance['id'], export_locations, delete=True)
else:
self.assertFalse(
self.share_manager.db.share_export_locations_update.called)
valid_share_data = {
'status': constants.STATUS_AVAILABLE, 'launched_at': mock.ANY}
valid_share_data.update(driver_data)
self.share_manager.db.share_update.assert_called_once_with(
utils.IsAMatcher(context.RequestContext),
share_id, valid_share_data)
def test_update_quota_usages_new(self):
self.mock_object(self.share_manager.db, 'quota_usage_get',
mock.Mock(return_value={'in_use': 1}))
self.mock_object(self.share_manager.db, 'quota_usage_update')
project_id = 'fake_project_id'
resource_name = 'fake'
usage = 1
self.share_manager._update_quota_usages(
self.context, project_id, {resource_name: usage})
self.share_manager.db.quota_usage_get.assert_called_once_with(
mock.ANY, project_id, resource_name, mock.ANY)
self.share_manager.db.quota_usage_update.assert_called_once_with(
mock.ANY, project_id, mock.ANY, resource_name, in_use=2)
def test_update_quota_usages_update(self):
project_id = 'fake_project_id'
resource_name = 'fake'
usage = 1
side_effect = exception.QuotaUsageNotFound(project_id=project_id)
self.mock_object(
self.share_manager.db,
'quota_usage_get',
mock.Mock(side_effect=side_effect))
self.mock_object(self.share_manager.db, 'quota_usage_create')
self.share_manager._update_quota_usages(
self.context, project_id, {resource_name: usage})
self.share_manager.db.quota_usage_get.assert_called_once_with(
mock.ANY, project_id, resource_name, mock.ANY)
self.share_manager.db.quota_usage_create.assert_called_once_with(
mock.ANY, project_id, mock.ANY, resource_name, usage)
def _setup_unmanage_mocks(self, mock_driver=True, mock_unmanage=None):
if mock_driver:
self.mock_object(self.share_manager, 'driver')
if mock_unmanage:
self.mock_object(self.share_manager.driver, "unmanage",
mock_unmanage)
self.mock_object(self.share_manager.db, 'share_update')
self.mock_object(self.share_manager.db, 'share_instance_delete')
@ddt.data(True, False)
def test_unmanage_share_invalid_driver(self, driver_handles_share_servers):
self._setup_unmanage_mocks()
self.share_manager.driver.driver_handles_share_servers = (
driver_handles_share_servers
)
share_net = db_utils.create_share_network()
share_srv = db_utils.create_share_server(
share_network_id=share_net['id'], host=self.share_manager.host)
share = db_utils.create_share(share_network_id=share_net['id'],
share_server_id=share_srv['id'])
self.share_manager.unmanage_share(self.context, share['id'])
self.share_manager.db.share_update.assert_called_once_with(
mock.ANY, share['id'], {'status': constants.STATUS_UNMANAGE_ERROR})
def test_unmanage_share_invalid_share(self):
unmanage = mock.Mock(side_effect=exception.InvalidShare(reason="fake"))
self._setup_unmanage_mocks(mock_driver=False, mock_unmanage=unmanage)
share = db_utils.create_share()
self.share_manager.unmanage_share(self.context, share['id'])
self.share_manager.db.share_update.assert_called_once_with(
mock.ANY, share['id'], {'status': constants.STATUS_UNMANAGE_ERROR})
def test_unmanage_share_valid_share(self):
manager.CONF.set_default('driver_handles_share_servers', False)
self._setup_unmanage_mocks(mock_driver=False,
mock_unmanage=mock.Mock())
share = db_utils.create_share()
share_id = share['id']
share_instance_id = share.instance['id']
self.share_manager.unmanage_share(self.context, share_id)
self.share_manager.driver.unmanage.\
assert_called_once_with(mock.ANY)
self.share_manager.db.share_instance_delete.assert_called_once_with(
mock.ANY, share_instance_id)
def test_unmanage_share_valid_share_with_quota_error(self):
manager.CONF.set_default('driver_handles_share_servers', False)
self._setup_unmanage_mocks(mock_driver=False,
mock_unmanage=mock.Mock())
self.mock_object(quota.QUOTAS, 'reserve',
mock.Mock(side_effect=Exception()))
share = db_utils.create_share()
share_instance_id = share.instance['id']
self.share_manager.unmanage_share(self.context, share['id'])
self.share_manager.driver.unmanage.\
assert_called_once_with(mock.ANY)
self.share_manager.db.share_instance_delete.assert_called_once_with(
mock.ANY, share_instance_id)
def test_unmanage_share_remove_access_rules_error(self):
manager.CONF.set_default('driver_handles_share_servers', False)
manager.CONF.unmanage_remove_access_rules = True
self._setup_unmanage_mocks(mock_driver=False,
mock_unmanage=mock.Mock())
self.mock_object(self.share_manager, '_remove_share_access_rules',
mock.Mock(side_effect=Exception()))
self.mock_object(quota.QUOTAS, 'reserve', mock.Mock(return_value=[]))
share = db_utils.create_share()
self.share_manager.unmanage_share(self.context, share['id'])
self.share_manager.db.share_update.assert_called_once_with(
mock.ANY, share['id'], {'status': constants.STATUS_UNMANAGE_ERROR})
def test_unmanage_share_valid_share_remove_access_rules(self):
manager.CONF.set_default('driver_handles_share_servers', False)
manager.CONF.unmanage_remove_access_rules = True
self._setup_unmanage_mocks(mock_driver=False,
mock_unmanage=mock.Mock())
self.mock_object(self.share_manager, '_remove_share_access_rules')
self.mock_object(quota.QUOTAS, 'reserve', mock.Mock(return_value=[]))
share = db_utils.create_share()
share_id = share['id']
share_instance_id = share.instance['id']
self.share_manager.unmanage_share(self.context, share_id)
self.share_manager.driver.unmanage.\
assert_called_once_with(mock.ANY)
self.share_manager._remove_share_access_rules.assert_called_once_with(
mock.ANY, mock.ANY, mock.ANY, mock.ANY
)
self.share_manager.db.share_instance_delete.assert_called_once_with(
mock.ANY, share_instance_id)
def test_remove_share_access_rules(self):
self.mock_object(self.share_manager.db,
'share_access_get_all_for_share',
mock.Mock(return_value=['fake_ref', 'fake_ref2']))
self.mock_object(self.share_manager, '_deny_access')
share_ref = db_utils.create_share()
share_server = 'fake'
self.share_manager._remove_share_access_rules(
self.context, share_ref, share_ref.instance, share_server)
self.share_manager.db.share_access_get_all_for_share.\
assert_called_once_with(mock.ANY, share_ref['id'])
self.assertEqual(2, self.share_manager._deny_access.call_count)
def test_delete_share_instance_share_server_not_found(self):
share_net = db_utils.create_share_network()
share = db_utils.create_share(share_network_id=share_net['id'],
share_server_id='fake-id')
self.assertRaises(
exception.ShareServerNotFound,
self.share_manager.delete_share_instance,
self.context,
share.instance['id']
)
@ddt.data(True, False)
def test_delete_share_instance_last_on_srv_with_sec_service(
self, with_details):
share_net = db_utils.create_share_network()
sec_service = db_utils.create_security_service(
share_network_id=share_net['id'])
backend_details = dict(
security_service_ldap=jsonutils.dumps(sec_service))
if with_details:
share_srv = db_utils.create_share_server(
share_network_id=share_net['id'],
host=self.share_manager.host,
backend_details=backend_details)
else:
share_srv = db_utils.create_share_server(
share_network_id=share_net['id'],
host=self.share_manager.host)
db.share_server_backend_details_set(
context.get_admin_context(), share_srv['id'], backend_details)
share = db_utils.create_share(share_network_id=share_net['id'],
share_server_id=share_srv['id'])
self.share_manager.driver = mock.Mock()
manager.CONF.delete_share_server_with_last_share = True
self.share_manager.delete_share_instance(self.context,
share.instance['id'])
self.share_manager.driver.teardown_server.assert_called_once_with(
server_details=backend_details,
security_services=[jsonutils.loads(
backend_details['security_service_ldap'])])
def test_delete_share_instance_last_on_server(self):
share_net = db_utils.create_share_network()
share_srv = db_utils.create_share_server(
share_network_id=share_net['id'],
host=self.share_manager.host
)
share = db_utils.create_share(share_network_id=share_net['id'],
share_server_id=share_srv['id'])
self.share_manager.driver = mock.Mock()
manager.CONF.delete_share_server_with_last_share = True
self.share_manager.delete_share_instance(self.context,
share.instance['id'])
self.share_manager.driver.teardown_server.assert_called_once_with(
server_details=share_srv.get('backend_details'),
security_services=[])
def test_delete_share_instance_last_on_server_deletion_disabled(self):
share_net = db_utils.create_share_network()
share_srv = db_utils.create_share_server(
share_network_id=share_net['id'],
host=self.share_manager.host
)
share = db_utils.create_share(share_network_id=share_net['id'],
share_server_id=share_srv['id'])
manager.CONF.delete_share_server_with_last_share = False
self.share_manager.driver = mock.Mock()
self.share_manager.delete_share_instance(self.context,
share.instance['id'])
self.assertFalse(self.share_manager.driver.teardown_network.called)
def test_delete_share_instance_not_last_on_server(self):
share_net = db_utils.create_share_network()
share_srv = db_utils.create_share_server(
share_network_id=share_net['id'],
host=self.share_manager.host
)
share = db_utils.create_share(share_network_id=share_net['id'],
share_server_id=share_srv['id'])
db_utils.create_share(share_network_id=share_net['id'],
share_server_id=share_srv['id'])
manager.CONF.delete_share_server_with_last_share = True
self.share_manager.driver = mock.Mock()
self.share_manager.delete_share_instance(self.context,
share.instance['id'])
self.assertFalse(self.share_manager.driver.teardown_network.called)
def test_allow_deny_access(self):
"""Test access rules to share can be created and deleted."""
self.mock_object(manager.LOG, 'info')
share = db_utils.create_share()
share_id = share['id']
access = db_utils.create_access(share_id=share_id)
access_id = access['id']
self.share_manager.allow_access(self.context, share.instance['id'],
access_id)
self.assertEqual('active', db.share_access_get(self.context,
access_id).state)
exp_args = {'access_level': access['access_level'],
'share_instance_id': share.instance['id'],
'access_to': access['access_to']}
manager.LOG.info.assert_called_with(mock.ANY, exp_args)
manager.LOG.info.reset_mock()
self.share_manager.deny_access(self.context, share.instance['id'],
access_id)
exp_args = {'share_instance_id': share.instance['id'],
'access_to': access['access_to']}
manager.LOG.info.assert_called_with(mock.ANY, exp_args)
def test_allow_deny_access_error(self):
"""Test access rules to share can be created and deleted with error."""
def _fake_allow_access(self, *args, **kwargs):
raise exception.NotFound()
def _fake_deny_access(self, *args, **kwargs):
raise exception.NotFound()
self.mock_object(self.share_manager.driver, "allow_access",
_fake_allow_access)
self.mock_object(self.share_manager.driver, "deny_access",
_fake_deny_access)
share = db_utils.create_share()
share_id = share['id']
access = db_utils.create_access(share_id=share_id)
access_id = access['id']
self.assertRaises(exception.NotFound,
self.share_manager.allow_access,
self.context,
share.instance['id'],
access_id)
acs = db.share_access_get(self.context, access_id)
self.assertEqual(constants.STATUS_ERROR, acs['state'])
self.assertRaises(exception.NotFound,
self.share_manager.deny_access,
self.context,
share.instance['id'],
access_id)
acs = db.share_access_get(self.context, access_id)
self.assertEqual(constants.STATUS_ERROR, acs['state'])
def test_setup_server(self):
# Setup required test data
share_server = {
'id': 'fake_id',
'share_network_id': 'fake_sn_id',
}
metadata = {'fake_metadata_key': 'fake_metadata_value'}
share_network = {'id': 'fake_sn_id'}
network_info = {'security_services': []}
for ss_type in constants.SECURITY_SERVICES_ALLOWED_TYPES:
network_info['security_services'].append({
'name': 'fake_name' + ss_type,
'domain': 'fake_domain' + ss_type,
'server': 'fake_server' + ss_type,
'dns_ip': 'fake_dns_ip' + ss_type,
'user': 'fake_user' + ss_type,
'type': ss_type,
'password': 'fake_password' + ss_type,
})
sec_services = network_info['security_services']
server_info = {'fake_server_info_key': 'fake_server_info_value'}
network_info['network_type'] = 'fake_network_type'
# mock required stuff
self.mock_object(self.share_manager.db, 'share_network_get',
mock.Mock(return_value=share_network))
self.mock_object(self.share_manager.driver, 'allocate_network')
self.mock_object(self.share_manager, '_form_server_setup_info',
mock.Mock(return_value=network_info))
self.mock_object(self.share_manager, '_validate_segmentation_id')
self.mock_object(self.share_manager.driver, 'setup_server',
mock.Mock(return_value=server_info))
self.mock_object(self.share_manager.db,
'share_server_backend_details_set')
self.mock_object(self.share_manager.db, 'share_server_update',
mock.Mock(return_value=share_server))
# execute method _setup_server
result = self.share_manager._setup_server(
self.context, share_server, metadata=metadata)
# verify results
self.assertEqual(share_server, result)
self.share_manager.db.share_network_get.assert_has_calls([
mock.call(self.context, share_server['share_network_id']),
mock.call(self.context, share_server['share_network_id']),
])
self.share_manager.driver.allocate_network.assert_called_once_with(
self.context, share_server, share_network)
self.share_manager._form_server_setup_info.assert_called_once_with(
self.context, share_server, share_network)
self.share_manager._validate_segmentation_id.assert_called_once_with(
network_info)
self.share_manager.driver.setup_server.assert_called_once_with(
network_info, metadata=metadata)
self.share_manager.db.share_server_backend_details_set.\
assert_has_calls([
mock.call(self.context, share_server['id'],
{'security_service_' + sec_services[0]['type']:
jsonutils.dumps(sec_services[0])}),
mock.call(self.context, share_server['id'],
{'security_service_' + sec_services[1]['type']:
jsonutils.dumps(sec_services[1])}),
mock.call(self.context, share_server['id'],
{'security_service_' + sec_services[2]['type']:
jsonutils.dumps(sec_services[2])}),
mock.call(self.context, share_server['id'], server_info),
])
self.share_manager.db.share_server_update.assert_called_once_with(
self.context, share_server['id'],
{'status': constants.STATUS_ACTIVE})
def test_setup_server_server_info_not_present(self):
# Setup required test data
share_server = {
'id': 'fake_id',
'share_network_id': 'fake_sn_id',
}
metadata = {'fake_metadata_key': 'fake_metadata_value'}
share_network = {'id': 'fake_sn_id'}
network_info = {
'fake_network_info_key': 'fake_network_info_value',
'security_services': [],
'network_type': 'fake_network_type',
}
server_info = {}
# mock required stuff
self.mock_object(self.share_manager.db, 'share_network_get',
mock.Mock(return_value=share_network))
self.mock_object(self.share_manager, '_form_server_setup_info',
mock.Mock(return_value=network_info))
self.mock_object(self.share_manager.driver, 'setup_server',
mock.Mock(return_value=server_info))
self.mock_object(self.share_manager.db, 'share_server_update',
mock.Mock(return_value=share_server))
self.mock_object(self.share_manager.driver, 'allocate_network')
# execute method _setup_server
result = self.share_manager._setup_server(
self.context, share_server, metadata=metadata)
# verify results
self.assertEqual(share_server, result)
self.share_manager.db.share_network_get.assert_has_calls([
mock.call(self.context, share_server['share_network_id']),
mock.call(self.context, share_server['share_network_id'])])
self.share_manager._form_server_setup_info.assert_called_once_with(
self.context, share_server, share_network)
self.share_manager.driver.setup_server.assert_called_once_with(
network_info, metadata=metadata)
self.share_manager.db.share_server_update.assert_called_once_with(
self.context, share_server['id'],
{'status': constants.STATUS_ACTIVE})
self.share_manager.driver.allocate_network.assert_called_once_with(
self.context, share_server, share_network)
def setup_server_raise_exception(self, detail_data_proper):
# Setup required test data
share_server = {
'id': 'fake_id',
'share_network_id': 'fake_sn_id',
}
server_info = {'details_key': 'value'}
share_network = {'id': 'fake_sn_id'}
network_info = {
'fake_network_info_key': 'fake_network_info_value',
'security_services': [],
'network_type': 'fake_network_type',
}
if detail_data_proper:
detail_data = {'server_details': server_info}
self.mock_object(self.share_manager.db,
'share_server_backend_details_set')
else:
detail_data = 'not dictionary detail data'
# Mock required parameters
self.mock_object(self.share_manager.db, 'share_network_get',
mock.Mock(return_value=share_network))
self.mock_object(self.share_manager.db, 'share_server_update')
for m in ['deallocate_network', 'allocate_network']:
self.mock_object(self.share_manager.driver, m)
self.mock_object(self.share_manager, '_form_server_setup_info',
mock.Mock(return_value=network_info))
self.mock_object(self.share_manager.db,
'share_server_backend_details_set')
self.mock_object(self.share_manager.driver, 'setup_server',
mock.Mock(side_effect=exception.ManilaException(
detail_data=detail_data)))
# execute method _setup_server
self.assertRaises(
exception.ManilaException,
self.share_manager._setup_server,
self.context,
share_server,
)
# verify results
if detail_data_proper:
self.share_manager.db.share_server_backend_details_set.\
assert_called_once_with(
self.context, share_server['id'], server_info)
self.share_manager._form_server_setup_info.assert_called_once_with(
self.context, share_server, share_network)
self.share_manager.db.share_server_update.assert_called_once_with(
self.context, share_server['id'],
{'status': constants.STATUS_ERROR})
self.share_manager.db.share_network_get.assert_has_calls([
mock.call(self.context, share_server['share_network_id']),
mock.call(self.context, share_server['share_network_id'])])
self.share_manager.driver.allocate_network.assert_has_calls([
mock.call(self.context, share_server, share_network)])
self.share_manager.driver.deallocate_network.assert_has_calls([
mock.call(self.context, share_server['id'])])
def test_setup_server_incorrect_detail_data(self):
self.setup_server_raise_exception(detail_data_proper=False)
def test_setup_server_exception_in_driver(self):
self.setup_server_raise_exception(detail_data_proper=True)
@ddt.data({},
{'detail_data': 'fake'},
{'detail_data': {'server_details': 'fake'}},
{'detail_data': {'server_details': {'fake': 'fake'}}},
{'detail_data': {
'server_details': {'fake': 'fake', 'fake2': 'fake2'}}},)
def test_setup_server_exception_in_cleanup_after_error(self, data):
def get_server_details_from_data(data):
d = data.get('detail_data')
if not isinstance(d, dict):
return {}
d = d.get('server_details')
if not isinstance(d, dict):
return {}
return d
share_server = {'id': 'fake', 'share_network_id': 'fake'}
details = get_server_details_from_data(data)
exc_mock = mock.Mock(side_effect=exception.ManilaException(**data))
details_mock = mock.Mock(side_effect=exception.ManilaException())
self.mock_object(self.share_manager.db, 'share_network_get', exc_mock)
self.mock_object(self.share_manager.db,
'share_server_backend_details_set', details_mock)
self.mock_object(self.share_manager.db, 'share_server_update')
self.mock_object(self.share_manager.driver, 'deallocate_network')
self.assertRaises(
exception.ManilaException,
self.share_manager._setup_server,
self.context,
share_server,
)
self.assertTrue(self.share_manager.db.share_network_get.called)
if details:
self.assertEqual(len(details), details_mock.call_count)
expected = [mock.call(mock.ANY, share_server['id'], {k: v})
for k, v in details.items()]
self.assertEqual(expected, details_mock.call_args_list)
self.share_manager.db.share_server_update.assert_called_once_with(
self.context,
share_server['id'],
{'status': constants.STATUS_ERROR})
self.share_manager.driver.deallocate_network.assert_called_once_with(
self.context, share_server['id']
)
def test_ensure_share_instance_has_pool_with_only_host(self):
fake_share = {
'status': constants.STATUS_AVAILABLE, 'host': 'host1', 'id': 1}
host = self.share_manager._ensure_share_instance_has_pool(
context.get_admin_context(), fake_share)
self.assertIsNone(host)
def test_ensure_share_instance_has_pool_with_full_pool_name(self):
fake_share = {'host': 'host1#pool0', 'id': 1,
'status': constants.STATUS_AVAILABLE}
fake_share_expected_value = 'pool0'
host = self.share_manager._ensure_share_instance_has_pool(
context.get_admin_context(), fake_share)
self.assertEqual(fake_share_expected_value, host)
def test_ensure_share_instance_has_pool_unable_to_fetch_share(self):
fake_share = {'host': 'host@backend', 'id': 1,
'status': constants.STATUS_AVAILABLE}
with mock.patch.object(self.share_manager.driver, 'get_pool',
side_effect=Exception):
with mock.patch.object(manager, 'LOG') as mock_LOG:
self.share_manager._ensure_share_instance_has_pool(
context.get_admin_context(), fake_share)
self.assertEqual(1, mock_LOG.error.call_count)
def test__form_server_setup_info(self):
fake_network_allocations = ['foo', 'bar']
self.mock_object(
self.share_manager.db, 'network_allocations_get_for_share_server',
mock.Mock(return_value=fake_network_allocations))
fake_share_server = dict(
id='fake_share_server_id', backend_details=dict(foo='bar'))
fake_share_network = dict(
segmentation_id='fake_segmentation_id',
cidr='fake_cidr',
neutron_net_id='fake_neutron_net_id',
neutron_subnet_id='fake_neutron_subnet_id',
nova_net_id='fake_nova_net_id',
security_services='fake_security_services',
network_type='fake_network_type')
expected = dict(
server_id=fake_share_server['id'],
segmentation_id=fake_share_network['segmentation_id'],
cidr=fake_share_network['cidr'],
neutron_net_id=fake_share_network['neutron_net_id'],
neutron_subnet_id=fake_share_network['neutron_subnet_id'],
nova_net_id=fake_share_network['nova_net_id'],
security_services=fake_share_network['security_services'],
network_allocations=fake_network_allocations,
backend_details=fake_share_server['backend_details'],
network_type=fake_share_network['network_type'])
network_info = self.share_manager._form_server_setup_info(
self.context, fake_share_server, fake_share_network)
self.assertEqual(expected, network_info)
self.share_manager.db.network_allocations_get_for_share_server.\
assert_called_once_with(self.context, fake_share_server['id'])
@ddt.data(
{'network_info': {'network_type': 'vlan', 'segmentation_id': '100'}},
{'network_info': {'network_type': 'vlan', 'segmentation_id': '1'}},
{'network_info': {'network_type': 'vlan', 'segmentation_id': '4094'}},
{'network_info': {'network_type': 'vxlan', 'segmentation_id': '100'}},
{'network_info': {'network_type': 'vxlan', 'segmentation_id': '1'}},
{'network_info': {'network_type': 'vxlan',
'segmentation_id': '16777215'}},
{'network_info': {'network_type': 'gre', 'segmentation_id': '100'}},
{'network_info': {'network_type': 'gre', 'segmentation_id': '1'}},
{'network_info': {'network_type': 'gre',
'segmentation_id': '4294967295'}},
{'network_info': {'network_type': 'flat', 'segmentation_id': None}},
{'network_info': {'network_type': 'flat', 'segmentation_id': 0}},
{'network_info': {'network_type': None, 'segmentation_id': None}},
{'network_info': {'network_type': None, 'segmentation_id': 0}})
@ddt.unpack
def test_validate_segmentation_id_with_valid_values(self, network_info):
self.share_manager._validate_segmentation_id(network_info)
@ddt.data(
{'network_info': {'network_type': 'vlan', 'segmentation_id': None}},
{'network_info': {'network_type': 'vlan', 'segmentation_id': -1}},
{'network_info': {'network_type': 'vlan', 'segmentation_id': 0}},
{'network_info': {'network_type': 'vlan', 'segmentation_id': '4095'}},
{'network_info': {'network_type': 'vxlan', 'segmentation_id': None}},
{'network_info': {'network_type': 'vxlan', 'segmentation_id': 0}},
{'network_info': {'network_type': 'vxlan',
'segmentation_id': '16777216'}},
{'network_info': {'network_type': 'gre', 'segmentation_id': None}},
{'network_info': {'network_type': 'gre', 'segmentation_id': 0}},
{'network_info': {'network_type': 'gre',
'segmentation_id': '4294967296'}},
{'network_info': {'network_type': 'flat', 'segmentation_id': '1000'}},
{'network_info': {'network_type': None, 'segmentation_id': '1000'}})
@ddt.unpack
def test_validate_segmentation_id_with_invalid_values(self, network_info):
self.assertRaises(exception.NetworkBadConfigurationException,
self.share_manager._validate_segmentation_id,
network_info)
@ddt.data(5, 70)
def test_verify_server_cleanup_interval_invalid_cases(self, val):
data = dict(DEFAULT=dict(unused_share_server_cleanup_interval=val))
with test_utils.create_temp_config_with_opts(data):
self.assertRaises(exception.InvalidParameterValue,
manager.ShareManager)
@ddt.data(10, 36, 60)
def test_verify_server_cleanup_interval_valid_cases(self, val):
data = dict(DEFAULT=dict(unused_share_server_cleanup_interval=val))
with test_utils.create_temp_config_with_opts(data):
manager.ShareManager()
@mock.patch.object(db, 'share_server_get_all_unused_deletable',
mock.Mock())
@mock.patch.object(manager.ShareManager, 'delete_share_server',
mock.Mock())
def test_delete_free_share_servers_cleanup_disabled(self):
data = dict(DEFAULT=dict(automatic_share_server_cleanup=False))
with test_utils.create_temp_config_with_opts(data):
share_manager = manager.ShareManager()
share_manager.driver.initialized = True
share_manager.delete_free_share_servers(self.context)
self.assertFalse(db.share_server_get_all_unused_deletable.called)
@mock.patch.object(db, 'share_server_get_all_unused_deletable',
mock.Mock())
@mock.patch.object(manager.ShareManager, 'delete_share_server',
mock.Mock())
def test_delete_free_share_servers_driver_handles_ss_disabled(self):
data = dict(DEFAULT=dict(driver_handles_share_servers=False))
with test_utils.create_temp_config_with_opts(data):
share_manager = manager.ShareManager()
share_manager.driver.initialized = True
share_manager.delete_free_share_servers(self.context)
self.assertFalse(db.share_server_get_all_unused_deletable.called)
self.assertFalse(share_manager.delete_share_server.called)
@mock.patch.object(db, 'share_server_get_all_unused_deletable',
mock.Mock(return_value=['server1', ]))
@mock.patch.object(manager.ShareManager, 'delete_share_server',
mock.Mock())
@mock.patch.object(timeutils, 'utcnow', mock.Mock(
return_value=datetime.timedelta(minutes=20)))
def test_delete_free_share_servers(self):
self.share_manager.delete_free_share_servers(self.context)
db.share_server_get_all_unused_deletable.assert_called_once_with(
self.context,
self.share_manager.host,
datetime.timedelta(minutes=10))
self.share_manager.delete_share_server.assert_called_once_with(
self.context,
'server1')
timeutils.utcnow.assert_called_once_with()
def test_extend_share_invalid(self):
share = db_utils.create_share()
share_id = share['id']
self.mock_object(self.share_manager, 'driver')
self.mock_object(self.share_manager.db, 'share_update')
self.mock_object(quota.QUOTAS, 'rollback')
self.mock_object(self.share_manager.driver, 'extend_share',
mock.Mock(side_effect=Exception('fake')))
self.assertRaises(
exception.ShareExtendingError,
self.share_manager.extend_share, self.context, share_id, 123, {})
def test_extend_share(self):
share = db_utils.create_share()
share_id = share['id']
new_size = 123
shr_update = {
'size': int(new_size),
'status': constants.STATUS_AVAILABLE.lower()
}
reservations = {}
fake_share_server = 'fake'
manager = self.share_manager
self.mock_object(manager, 'driver')
self.mock_object(manager.db, 'share_get',
mock.Mock(return_value=share))
self.mock_object(manager.db, 'share_update',
mock.Mock(return_value=share))
self.mock_object(quota.QUOTAS, 'commit')
self.mock_object(manager.driver, 'extend_share')
self.mock_object(manager, '_get_share_server',
mock.Mock(return_value=fake_share_server))
self.share_manager.extend_share(self.context, share_id,
new_size, reservations)
self.assertTrue(manager._get_share_server.called)
manager.driver.extend_share.assert_called_once_with(
utils.IsAMatcher(models.ShareInstance),
new_size, share_server=fake_share_server
)
quota.QUOTAS.commit.assert_called_once_with(
mock.ANY, reservations, project_id=share['project_id'])
manager.db.share_update.assert_called_once_with(
mock.ANY, share_id, shr_update
)
def test_shrink_share_quota_error(self):
size = 5
new_size = 1
share = db_utils.create_share(size=size)
share_id = share['id']
self.mock_object(self.share_manager.db, 'share_update')
self.mock_object(quota.QUOTAS, 'reserve',
mock.Mock(side_effect=Exception('fake')))
self.assertRaises(
exception.ShareShrinkingError,
self.share_manager.shrink_share, self.context, share_id, new_size)
quota.QUOTAS.reserve.assert_called_with(
mock.ANY,
project_id=six.text_type(share['project_id']),
gigabytes=new_size - size
)
self.assertTrue(self.share_manager.db.share_update.called)
@ddt.data({'exc': exception.InvalidShare('fake'),
'status': constants.STATUS_SHRINKING_ERROR},
{'exc': exception.ShareShrinkingPossibleDataLoss("fake"),
'status': constants.STATUS_SHRINKING_POSSIBLE_DATA_LOSS_ERROR})
@ddt.unpack
def test_shrink_share_invalid(self, exc, status):
share = db_utils.create_share()
new_size = 1
share_id = share['id']
self.mock_object(self.share_manager, 'driver')
self.mock_object(self.share_manager.db, 'share_update')
self.mock_object(self.share_manager.db, 'share_get',
mock.Mock(return_value=share))
self.mock_object(quota.QUOTAS, 'reserve')
self.mock_object(quota.QUOTAS, 'rollback')
self.mock_object(self.share_manager.driver, 'shrink_share',
mock.Mock(side_effect=exc))
self.assertRaises(
exception.ShareShrinkingError,
self.share_manager.shrink_share, self.context, share_id, new_size)
self.share_manager.driver.shrink_share.assert_called_once_with(
utils.IsAMatcher(models.ShareInstance),
new_size, share_server=None
)
self.share_manager.db.share_update.assert_called_once_with(
mock.ANY, share_id, {'status': status}
)
self.assertTrue(quota.QUOTAS.reserve.called)
self.assertTrue(quota.QUOTAS.rollback.called)
self.assertTrue(self.share_manager.db.share_get.called)
def test_shrink_share(self):
share = db_utils.create_share()
share_id = share['id']
new_size = 123
shr_update = {
'size': int(new_size),
'status': constants.STATUS_AVAILABLE
}
fake_share_server = 'fake'
manager = self.share_manager
self.mock_object(manager, 'driver')
self.mock_object(manager.db, 'share_get',
mock.Mock(return_value=share))
self.mock_object(manager.db, 'share_update',
mock.Mock(return_value=share))
self.mock_object(quota.QUOTAS, 'commit')
self.mock_object(manager.driver, 'shrink_share')
self.mock_object(manager, '_get_share_server',
mock.Mock(return_value=fake_share_server))
self.share_manager.shrink_share(self.context, share_id, new_size)
self.assertTrue(manager._get_share_server.called)
manager.driver.shrink_share.assert_called_once_with(
utils.IsAMatcher(models.ShareInstance),
new_size, share_server=fake_share_server
)
quota.QUOTAS.commit.assert_called_once_with(
mock.ANY, mock.ANY, project_id=share['project_id'])
manager.db.share_update.assert_called_once_with(
mock.ANY, share_id, shr_update
)
def test_report_driver_status_driver_handles_ss_false(self):
fake_stats = {'field': 'val'}
fake_pool = {'name': 'pool1'}
self.share_manager.last_capabilities = {'field': 'old_val'}
self.mock_object(self.share_manager, 'driver', mock.Mock())
driver = self.share_manager.driver
driver.get_share_stats = mock.Mock(return_value=fake_stats)
self.mock_object(db, 'share_server_get_all_by_host', mock.Mock())
driver.driver_handles_share_servers = False
driver.get_share_server_pools = mock.Mock(return_value=fake_pool)
self.share_manager._report_driver_status(self.context)
driver.get_share_stats.assert_called_once_with(
refresh=True)
self.assertFalse(db.share_server_get_all_by_host.called)
self.assertFalse(driver.get_share_server_pools.called)
self.assertEqual(fake_stats, self.share_manager.last_capabilities)
def test_report_driver_status_driver_handles_ss(self):
fake_stats = {'field': 'val'}
fake_ss = {'id': '1234'}
fake_pool = {'name': 'pool1'}
self.mock_object(self.share_manager, 'driver', mock.Mock())
driver = self.share_manager.driver
driver.get_share_stats = mock.Mock(return_value=fake_stats)
self.mock_object(db, 'share_server_get_all_by_host', mock.Mock(
return_value=[fake_ss]))
driver.driver_handles_share_servers = True
driver.get_share_server_pools = mock.Mock(return_value=fake_pool)
self.share_manager._report_driver_status(self.context)
driver.get_share_stats.assert_called_once_with(refresh=True)
db.share_server_get_all_by_host.assert_called_once_with(
self.context,
self.share_manager.host)
driver.get_share_server_pools.assert_called_once_with(fake_ss)
expected_stats = {
'field': 'val',
'server_pools_mapping': {
'1234': fake_pool},
}
self.assertEqual(expected_stats, self.share_manager.last_capabilities)
def test_report_driver_status_empty_share_stats(self):
old_capabilities = {'field': 'old_val'}
fake_pool = {'name': 'pool1'}
self.share_manager.last_capabilities = old_capabilities
self.mock_object(self.share_manager, 'driver', mock.Mock())
driver = self.share_manager.driver
driver.get_share_stats = mock.Mock(return_value={})
self.mock_object(db, 'share_server_get_all_by_host', mock.Mock())
driver.driver_handles_share_servers = True
driver.get_share_server_pools = mock.Mock(return_value=fake_pool)
self.share_manager._report_driver_status(self.context)
driver.get_share_stats.assert_called_once_with(refresh=True)
self.assertFalse(db.share_server_get_all_by_host.called)
self.assertFalse(driver.get_share_server_pools.called)
self.assertEqual(old_capabilities,
self.share_manager.last_capabilities)
def test_create_consistency_group(self):
fake_cg = {'id': 'fake_id'}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'consistency_group_update',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.driver,
'create_consistency_group',
mock.Mock(return_value=None))
self.share_manager.create_consistency_group(self.context, "fake_id")
self.share_manager.db.consistency_group_update.\
assert_called_once_with(mock.ANY, 'fake_id',
{'status': constants.STATUS_AVAILABLE,
'created_at': mock.ANY})
def test_create_cg_with_share_network_driver_not_handles_servers(self):
manager.CONF.set_default('driver_handles_share_servers', False)
self.mock_object(
self.share_manager.driver.configuration, 'safe_get',
mock.Mock(return_value=False))
cg_id = 'fake_cg_id'
share_network_id = 'fake_sn'
fake_cg = {'id': 'fake_id', 'share_network_id': share_network_id}
self.mock_object(
self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'consistency_group_update')
self.assertRaises(
exception.ManilaException,
self.share_manager.create_consistency_group, self.context, cg_id)
self.share_manager.db.consistency_group_get.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), cg_id)
self.share_manager.db.consistency_group_update.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), cg_id,
{'status': constants.STATUS_ERROR})
def test_create_cg_with_share_network_driver_handles_servers(self):
manager.CONF.set_default('driver_handles_share_servers', True)
self.mock_object(
self.share_manager.driver.configuration, 'safe_get',
mock.Mock(return_value=True))
share_network_id = 'fake_sn'
fake_cg = {'id': 'fake_id', 'share_network_id': share_network_id,
'host': "fake_host"}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'consistency_group_update',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager, '_provide_share_server_for_cg',
mock.Mock(return_value=({}, fake_cg)))
self.mock_object(self.share_manager.driver,
'create_consistency_group',
mock.Mock(return_value=None))
self.share_manager.create_consistency_group(self.context, "fake_id")
self.share_manager.db.consistency_group_update.\
assert_called_once_with(mock.ANY, 'fake_id',
{'status': constants.STATUS_AVAILABLE,
'created_at': mock.ANY})
def test_create_consistency_group_with_update(self):
fake_cg = {'id': 'fake_id'}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'consistency_group_update',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.driver,
'create_consistency_group',
mock.Mock(return_value={'foo': 'bar'}))
self.share_manager.create_consistency_group(self.context, "fake_id")
self.share_manager.db.consistency_group_update.\
assert_any_call(mock.ANY, 'fake_id', {'foo': 'bar'})
self.share_manager.db.consistency_group_update.\
assert_any_call(mock.ANY, 'fake_id',
{'status': constants.STATUS_AVAILABLE,
'created_at': mock.ANY})
def test_create_consistency_group_with_error(self):
fake_cg = {'id': 'fake_id'}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'consistency_group_update',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.driver,
'create_consistency_group',
mock.Mock(side_effect=exception.Error))
self.assertRaises(exception.Error,
self.share_manager.create_consistency_group,
self.context, "fake_id")
self.share_manager.db.consistency_group_update.\
assert_called_once_with(mock.ANY, 'fake_id',
{'status': constants.STATUS_ERROR})
def test_create_consistency_group_from_cgsnapshot(self):
fake_cg = {'id': 'fake_id', 'source_cgsnapshot_id': 'fake_snap_id',
'shares': [], 'share_server_id': 'fake_ss_id'}
fake_ss = {'id': 'fake_ss_id', 'share_network_id': 'fake_sn'}
fake_snap = {'id': 'fake_snap_id', 'cgsnapshot_members': [],
'consistency_group': {'share_server_id': fake_ss['id']}}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'cgsnapshot_get',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db, 'share_server_get',
mock.Mock(
return_value=fake_ss))
self.mock_object(self.share_manager.db, 'consistency_group_update',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.driver,
'create_consistency_group_from_cgsnapshot',
mock.Mock(return_value=(None, None)))
self.share_manager.create_consistency_group(self.context, "fake_id")
self.share_manager.db.consistency_group_update.\
assert_called_once_with(mock.ANY, 'fake_id',
{'status': constants.STATUS_AVAILABLE,
'created_at': mock.ANY})
self.share_manager.db.share_server_get(mock.ANY, 'fake_ss_id')
self.share_manager.driver.create_consistency_group_from_cgsnapshot.\
assert_called_once_with(
mock.ANY, fake_cg, fake_snap, share_server=fake_ss)
def test_create_cg_cgsnapshot_share_network_driver_not_handles_servers(
self):
manager.CONF.set_default('driver_handles_share_servers', False)
self.mock_object(
self.share_manager.driver.configuration, 'safe_get',
mock.Mock(return_value=False))
cg_id = 'fake_cg_id'
share_network_id = 'fake_sn'
fake_cg = {'id': 'fake_id', 'source_cgsnapshot_id': 'fake_snap_id',
'shares': [], 'share_network_id': share_network_id,
'host': "fake_host"}
self.mock_object(
self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
fake_snap = {'id': 'fake_snap_id', 'cgsnapshot_members': []}
self.mock_object(self.share_manager.db, 'cgsnapshot_get',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db, 'consistency_group_update')
self.assertRaises(exception.ManilaException,
self.share_manager.create_consistency_group,
self.context, cg_id)
self.share_manager.db.consistency_group_get.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), cg_id)
self.share_manager.db.consistency_group_update.assert_called_once_with(
utils.IsAMatcher(context.RequestContext), cg_id,
{'status': constants.STATUS_ERROR})
def test_create_cg_from_cgsnapshot_share_network_driver_handles_servers(
self):
manager.CONF.set_default('driver_handles_share_servers', True)
self.mock_object(self.share_manager.driver.configuration, 'safe_get',
mock.Mock(return_value=True))
share_network_id = 'fake_sn'
fake_cg = {'id': 'fake_id', 'source_cgsnapshot_id': 'fake_snap_id',
'shares': [], 'share_network_id': share_network_id}
fake_snap = {'id': 'fake_snap_id', 'cgsnapshot_members': []}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'cgsnapshot_get',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db, 'consistency_group_update',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager, '_provide_share_server_for_cg',
mock.Mock(return_value=({}, fake_cg)))
self.mock_object(self.share_manager.driver,
'create_consistency_group_from_cgsnapshot',
mock.Mock(return_value=(None, None)))
self.share_manager.create_consistency_group(self.context, "fake_id")
self.share_manager.db.consistency_group_update.\
assert_called_once_with(mock.ANY, 'fake_id',
{'status': constants.STATUS_AVAILABLE,
'created_at': mock.ANY})
def test_create_consistency_group_from_cgsnapshot_with_update(self):
fake_cg = {'id': 'fake_id', 'source_cgsnapshot_id': 'fake_snap_id',
'shares': []}
fake_snap = {'id': 'fake_snap_id', 'cgsnapshot_members': []}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'cgsnapshot_get',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db, 'consistency_group_update',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.driver,
'create_consistency_group_from_cgsnapshot',
mock.Mock(return_value=({'foo': 'bar'}, None)))
self.share_manager.create_consistency_group(self.context, "fake_id")
self.share_manager.db.consistency_group_update.\
assert_any_call(mock.ANY, 'fake_id', {'foo': 'bar'})
self.share_manager.db.consistency_group_update.\
assert_any_call(mock.ANY, 'fake_id',
{'status': constants.STATUS_AVAILABLE,
'created_at': mock.ANY})
def test_create_consistency_group_from_cgsnapshot_with_share_update(self):
fake_share = {'id': 'fake_share_id'}
fake_export_locations = ['my_export_location']
fake_cg = {'id': 'fake_id', 'source_cgsnapshot_id': 'fake_snap_id',
'shares': [fake_share]}
fake_snap = {'id': 'fake_snap_id', 'cgsnapshot_members': []}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'cgsnapshot_get',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db, 'consistency_group_update')
self.mock_object(self.share_manager.db, 'share_instance_update')
self.mock_object(self.share_manager.db,
'share_export_locations_update')
fake_share_update_list = [{'id': fake_share['id'],
'foo': 'bar',
'export_locations': fake_export_locations}]
self.mock_object(self.share_manager.driver,
'create_consistency_group_from_cgsnapshot',
mock.Mock(
return_value=(None, fake_share_update_list)))
self.share_manager.create_consistency_group(self.context, "fake_id")
self.share_manager.db.share_instance_update.\
assert_any_call(mock.ANY, 'fake_share_id', {'foo': 'bar'})
self.share_manager.db.share_export_locations_update.\
assert_any_call(mock.ANY, 'fake_share_id', fake_export_locations)
self.share_manager.db.consistency_group_update.\
assert_any_call(mock.ANY, 'fake_id',
{'status': constants.STATUS_AVAILABLE,
'created_at': mock.ANY})
def test_create_consistency_group_from_cgsnapshot_with_error(self):
fake_cg = {'id': 'fake_id', 'source_cgsnapshot_id': 'fake_snap_id',
'shares': []}
fake_snap = {'id': 'fake_snap_id', 'cgsnapshot_members': []}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'cgsnapshot_get',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db,
'share_instances_get_all_by_consistency_group_id',
mock.Mock(return_value=[]))
self.mock_object(self.share_manager.db, 'consistency_group_update',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.driver,
'create_consistency_group_from_cgsnapshot',
mock.Mock(side_effect=exception.Error))
self.assertRaises(exception.Error,
self.share_manager.create_consistency_group,
self.context, "fake_id")
self.share_manager.db.consistency_group_update.\
assert_called_once_with(mock.ANY, 'fake_id',
{'status': constants.STATUS_ERROR})
def test_create_consistency_group_from_cgsnapshot_with_share_error(self):
fake_share = {'id': 'fake_share_id'}
fake_cg = {'id': 'fake_id', 'source_cgsnapshot_id': 'fake_snap_id',
'shares': [fake_share]}
fake_snap = {'id': 'fake_snap_id', 'cgsnapshot_members': []}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'cgsnapshot_get',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db,
'share_instances_get_all_by_consistency_group_id',
mock.Mock(return_value=[fake_share]))
self.mock_object(self.share_manager.db, 'consistency_group_update')
self.mock_object(self.share_manager.db, 'share_instance_update')
self.mock_object(self.share_manager.driver,
'create_consistency_group_from_cgsnapshot',
mock.Mock(side_effect=exception.Error))
self.assertRaises(exception.Error,
self.share_manager.create_consistency_group,
self.context, "fake_id")
self.share_manager.db.share_instance_update.\
assert_any_call(mock.ANY, 'fake_share_id',
{'status': constants.STATUS_ERROR})
self.share_manager.db.consistency_group_update.\
assert_called_once_with(mock.ANY, 'fake_id',
{'status': constants.STATUS_ERROR})
def test_delete_consistency_group(self):
fake_cg = {'id': 'fake_id'}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'consistency_group_update',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'consistency_group_destroy',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.driver,
'delete_consistency_group',
mock.Mock(return_value=None))
self.share_manager.delete_consistency_group(self.context, "fake_id")
self.share_manager.db.consistency_group_destroy.\
assert_called_once_with(mock.ANY, 'fake_id')
def test_delete_consistency_group_with_update(self):
fake_cg = {'id': 'fake_id'}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'consistency_group_update',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'consistency_group_destroy',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.driver,
'delete_consistency_group',
mock.Mock(return_value={'foo': 'bar'}))
self.share_manager.delete_consistency_group(self.context, "fake_id")
self.share_manager.db.consistency_group_update.\
assert_called_once_with(mock.ANY, 'fake_id', {'foo': 'bar'})
self.share_manager.db.consistency_group_destroy.\
assert_called_once_with(mock.ANY, 'fake_id')
def test_delete_consistency_group_with_error(self):
fake_cg = {'id': 'fake_id'}
self.mock_object(self.share_manager.db, 'consistency_group_get',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.db, 'consistency_group_update',
mock.Mock(return_value=fake_cg))
self.mock_object(self.share_manager.driver,
'delete_consistency_group',
mock.Mock(side_effect=exception.Error))
self.assertRaises(exception.Error,
self.share_manager.delete_consistency_group,
self.context, "fake_id")
self.share_manager.db.consistency_group_update.\
assert_called_once_with(mock.ANY, 'fake_id',
{'status': constants.STATUS_ERROR})
def test_create_cgsnapshot(self):
fake_snap = {'id': 'fake_snap_id', 'consistency_group': {},
'cgsnapshot_members': []}
self.mock_object(self.share_manager.db, 'cgsnapshot_get',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db, 'cgsnapshot_update',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.driver,
'create_cgsnapshot',
mock.Mock(return_value=(None, None)))
self.share_manager.create_cgsnapshot(self.context, fake_snap['id'])
self.share_manager.db.cgsnapshot_update.\
assert_called_once_with(mock.ANY, fake_snap['id'],
{'status': constants.STATUS_AVAILABLE,
'created_at': mock.ANY})
def test_create_cgsnapshot_with_update(self):
fake_snap = {'id': 'fake_snap_id', 'consistency_group': {},
'cgsnapshot_members': []}
self.mock_object(self.share_manager.db, 'cgsnapshot_get',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db, 'cgsnapshot_update',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.driver,
'create_cgsnapshot',
mock.Mock(return_value=({'foo': 'bar'}, None)))
self.share_manager.create_cgsnapshot(self.context, fake_snap['id'])
self.share_manager.db.cgsnapshot_update.\
assert_any_call(mock.ANY, 'fake_snap_id', {'foo': 'bar'})
self.share_manager.db.cgsnapshot_update.assert_any_call(
mock.ANY, fake_snap['id'],
{'status': constants.STATUS_AVAILABLE, 'created_at': mock.ANY})
def test_create_cgsnapshot_with_member_update(self):
fake_member = {
'id': 'fake_member_id',
'share_instance_id': 'blah',
}
fake_member_update = {
'id': 'fake_member_id',
'foo': 'bar'
}
fake_snap = {'id': 'fake_snap_id', 'consistency_group': {},
'cgsnapshot_members': [fake_member]}
self.mock_object(self.share_manager.db, 'cgsnapshot_get',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db, 'cgsnapshot_update',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db, 'cgsnapshot_member_update')
self.mock_object(self.share_manager.db, 'share_instance_get',
mock.Mock(return_value={'id': 'blah'}))
self.mock_object(self.share_manager.driver, 'create_cgsnapshot',
mock.Mock(return_value=(None, [fake_member_update])))
self.share_manager.create_cgsnapshot(self.context, fake_snap['id'])
self.share_manager.db.cgsnapshot_update.assert_any_call(
mock.ANY, fake_snap['id'],
{'cgsnapshot_members': [fake_member_update]})
self.share_manager.db.cgsnapshot_update.\
assert_any_call(mock.ANY, fake_snap['id'],
{'status': constants.STATUS_AVAILABLE,
'created_at': mock.ANY})
self.assertTrue(self.share_manager.db.cgsnapshot_member_update.called)
def test_create_cgsnapshot_with_error(self):
fake_snap = {'id': 'fake_snap_id', 'consistency_group': {},
'cgsnapshot_members': []}
self.mock_object(self.share_manager.db, 'cgsnapshot_get',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.db, 'cgsnapshot_update',
mock.Mock(return_value=fake_snap))
self.mock_object(self.share_manager.driver,
'create_cgsnapshot',
mock.Mock(side_effect=exception.Error))
self.assertRaises(exception.Error,
self.share_manager.create_cgsnapshot,
self.context, fake_snap['id'])
self.share_manager.db.cgsnapshot_update.\
assert_called_once_with(mock.ANY, fake_snap['id'],
{'status': constants.STATUS_ERROR})
def test_get_migration_info(self):
share_instance = 'fake-share-instance'
share_instance_id = 'fake-id'
share_server = 'fake-share-server'
manager = self.share_manager
self.mock_object(manager.db, 'share_instance_get',
mock.Mock(return_value=share_instance))
self.mock_object(manager.driver, 'get_migration_info')
manager.get_migration_info(self.context,
share_instance_id, share_server)
manager.db.share_instance_get.assert_called_once_with(
self.context, share_instance_id, with_share_data=True
)
manager.driver.get_migration_info.assert_called_once_with(
self.context, share_instance, share_server
)
def test_get_driver_migration_info(self):
share_instance = 'fake-share-instance'
share_instance_id = 'fake-id'
share_server = 'fake-share-server'
manager = self.share_manager
self.mock_object(manager.db, 'share_instance_get',
mock.Mock(return_value=share_instance))
self.mock_object(manager.driver, 'get_driver_migration_info')
manager.get_driver_migration_info(self.context, share_instance_id,
share_server)
manager.db.share_instance_get.assert_called_once_with(
self.context, share_instance_id, with_share_data=True
)
manager.driver.get_driver_migration_info.assert_called_once_with(
self.context, share_instance, share_server
)
def test_migrate_share_not_moved_by_driver(self):
share = db_utils.create_share()
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_success = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_SUCCESS
}
share_server = {
'id': 'fake_share_server_id',
'share_network_id': 'fake_share_network_id',
'host': 'fake_host',
'status': 'fake_status',
'backend_details': {'foo': 'bar'},
}
migration_info = 'fake-info'
manager = self.share_manager
self.mock_object(manager, 'driver')
self.mock_object(manager.db, 'share_update')
self.mock_object(manager, '_get_share_server',
mock.Mock(return_value=share_server))
self.mock_object(rpcapi.ShareAPI, 'get_driver_migration_info',
mock.Mock(return_value=migration_info))
self.mock_object(manager.driver,
'migrate_share',
mock.Mock(return_value=[False, None]))
self.mock_object(manager, '_migrate_share_generic',
mock.Mock(return_value=True))
manager.migrate_share(self.context, share_id, host)
manager.db.share_update.assert_any_call(
self.context, share_id, status_migrating
)
manager.driver.migrate_share.assert_called_once_with(
self.context, utils.IsAMatcher(models.ShareInstance),
host, migration_info
)
manager._migrate_share_generic.assert_called_once_with(
self.context, utils.IsAMatcher(models.Share), host
)
manager.db.share_update.assert_any_call(
self.context, share_id, status_success
)
def test_migrate_share_driver_migration(self):
share = db_utils.create_share()
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_success = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_SUCCESS
}
share_server = {
'id': 'fake_share_server_id',
'share_network_id': 'fake_share_network_id',
'host': 'fake_host',
'status': 'fake_status',
'backend_details': {'foo': 'bar'},
}
migration_info = 'fake-info'
manager = self.share_manager
self.mock_object(manager, 'driver')
self.mock_object(manager.db, 'share_update')
self.mock_object(manager, '_get_share_server',
mock.Mock(return_value=share_server))
self.mock_object(rpcapi.ShareAPI, 'get_driver_migration_info',
mock.Mock(return_value=migration_info))
self.mock_object(manager.driver,
'migrate_share',
mock.Mock(return_value=[True, None]))
self.mock_object(manager.db, 'share_instance_update')
manager.migrate_share(self.context, share_id, host)
manager.db.share_update.assert_any_call(
self.context, share_id, status_migrating
)
manager.driver.migrate_share.assert_called_once_with(
self.context, utils.IsAMatcher(models.ShareInstance),
host, migration_info
)
manager.db.share_update.assert_any_call(
self.context, share_id, status_success
)
def test_migrate_share_driver_migration_instance_update(self):
share = db_utils.create_share()
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_success = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_SUCCESS
}
share_server = {
'id': 'fake_share_server_id',
'share_network_id': 'fake_share_network_id',
'host': 'fake_host',
'status': 'fake_status',
'backend_details': {'foo': 'bar'},
}
migration_info = 'fake-info'
manager = self.share_manager
self.mock_object(manager, 'driver')
self.mock_object(manager.db, 'share_update')
self.mock_object(manager, '_get_share_server',
mock.Mock(return_value=share_server))
self.mock_object(rpcapi.ShareAPI, 'get_driver_migration_info',
mock.Mock(return_value=migration_info))
self.mock_object(manager.driver,
'migrate_share',
mock.Mock(return_value=[True, mock.ANY]))
self.mock_object(manager.db, 'share_instance_update')
manager.migrate_share(self.context, share_id, host)
manager.db.share_update.assert_any_call(
self.context, share_id, status_migrating
)
manager.driver.migrate_share.assert_called_once_with(
self.context, utils.IsAMatcher(models.ShareInstance),
host, migration_info
)
manager.db.share_instance_update.assert_called_once_with(
self.context, mock.ANY, mock.ANY
)
manager.db.share_update.assert_any_call(
self.context, share_id, status_success
)
def test_migrate_share_exception_driver(self):
share = db_utils.create_share()
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_error = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_ERROR
}
share_server = {
'id': 'fake_share_server_id',
'share_network_id': 'fake_share_network_id',
'host': 'fake_host',
'status': 'fake_status',
'backend_details': {'foo': 'bar'},
}
migration_info = 'fake-info'
manager = self.share_manager
self.mock_object(manager, 'driver')
self.mock_object(manager.db, 'share_update')
self.mock_object(manager, '_get_share_server',
mock.Mock(return_value=share_server))
self.mock_object(rpcapi.ShareAPI, 'get_driver_migration_info',
mock.Mock(return_value=migration_info))
self.mock_object(manager.driver,
'migrate_share',
mock.Mock(side_effect=exception.ManilaException))
self.mock_object(manager, '_migrate_share_generic',
mock.Mock(return_value=False))
self.assertRaises(exception.ShareMigrationFailed,
manager.migrate_share,
self.context, share_id, host)
manager.db.share_update.assert_any_call(
self.context, share_id, status_migrating
)
manager.driver.migrate_share.assert_called_once_with(
self.context, utils.IsAMatcher(models.ShareInstance),
host, migration_info
)
manager._migrate_share_generic.assert_called_once_with(
self.context, utils.IsAMatcher(models.Share), host
)
manager.db.share_update.assert_any_call(
self.context, share_id, status_error
)
def test_migrate_share_exception_generic(self):
share = db_utils.create_share()
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_error = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_ERROR
}
share_server = 'fake-share-server'
migration_info = 'fake-info'
manager = self.share_manager
self.mock_object(manager, 'driver')
self.mock_object(manager.db, 'share_update')
self.mock_object(manager, '_get_share_server',
mock.Mock(return_value=share_server))
self.mock_object(rpcapi.ShareAPI, 'get_driver_migration_info',
mock.Mock(return_value=migration_info))
self.mock_object(manager.driver,
'migrate_share',
mock.Mock(return_value=[False, None]))
self.mock_object(manager,
'_migrate_share_generic',
mock.Mock(side_effect=Exception))
self.assertRaises(exception.ShareMigrationFailed,
manager.migrate_share,
self.context, share_id, host, migration_info)
manager.db.share_update.assert_any_call(
self.context, share_id, status_migrating
)
manager.db.share_update.assert_any_call(
self.context, share_id, status_error
)
def test_migrate_share_force_host_copy(self):
share = db_utils.create_share()
share_id = share['id']
host = 'fake-host'
status_migrating = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_IN_PROGRESS
}
status_success = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_SUCCESS
}
manager = self.share_manager
self.mock_object(manager, 'driver')
self.mock_object(manager.db, 'share_update')
self.mock_object(manager, '_migrate_share_generic',
mock.Mock(return_value=True))
manager.migrate_share(self.context, share_id, host, True)
manager.db.share_update.assert_any_call(
self.context, share_id, status_migrating
)
manager._migrate_share_generic.assert_called_once_with(
self.context, utils.IsAMatcher(models.Share), host
)
manager.db.share_update.assert_any_call(
self.context, share_id, status_success
)
def test_migrate_share_generic(self):
share = db_utils.create_share()
share_id = share['id']
host = {'host': 'fake-host'}
status_completing = {
'task_state': constants.STATUS_TASK_STATE_MIGRATION_COMPLETING
}
status_inactive = {'status': constants.STATUS_INACTIVE}
status_available = {'status': constants.STATUS_AVAILABLE}
share_server = {
'id': 'fake_share_server_id',
'share_network_id': 'fake_share_network_id',
'host': 'fake_host',
'status': 'fake_status',
'backend_details': {'foo': 'bar'},
}
new_share_server = {
'id': 'fake_share_server_id2',
'share_network_id': 'fake_share_network_id2',
'host': 'fake_host2',
'status': 'fake_status2',
'backend_details': {'foo2': 'bar2'},
}
src_migration_info = 'fake-src-migration-info'
dest_migration_info = 'fake-dest-migration-info'
manager = self.share_manager
manager.create_share_instance(self.context, share.instance['id'])
share_instance = manager._get_share_instance(self.context, share)
new_share_instance = {'id': 'fake-id',
'status': constants.STATUS_CREATING}
self.mock_object(manager, '_get_share_instance',
mock.Mock(return_value=share_instance))
self.mock_object(migration.ShareMigrationHelper,
'change_to_read_only')
self.mock_object(migration.ShareMigrationHelper,
'create_instance_and_wait',
mock.Mock(return_value=new_share_instance))
self.mock_object(manager.db, 'share_instance_update')
self.mock_object(
manager,
'_get_share_server',
mock.Mock(side_effect=[share_server, new_share_server])
)
self.mock_object(manager.driver, 'get_migration_info',
mock.Mock(return_value=src_migration_info))
self.mock_object(rpcapi.ShareAPI, 'get_migration_info',
mock.Mock(return_value=dest_migration_info))
self.mock_object(manager.driver, 'copy_share_data')
self.mock_object(manager.db, 'share_update')
self.mock_object(migration.ShareMigrationHelper,
'revert_access_rules')
self.mock_object(migration.ShareMigrationHelper,
'delete_instance_and_wait')
manager._migrate_share_generic(self.context, share, host)
manager._get_share_instance.assert_called_once_with(
self.context, share
)
manager.db.share_instance_update.assert_any_call(
self.context, new_share_instance['id'], status_inactive
)
manager._get_share_server.assert_any_call(
mock.ANY, share_instance
)
manager._get_share_server.assert_any_call(
mock.ANY, new_share_instance
)
manager.driver.get_migration_info.assert_called_once_with(
self.context, share_instance, share_server
)
manager.driver.copy_share_data.assert_called_once_with(
self.context, mock.ANY, share, share_instance,
share_server, new_share_instance, new_share_server,
src_migration_info, dest_migration_info
)
manager.db.share_update.assert_called_once_with(
self.context, share_id, status_completing
)
manager.db.share_instance_update.assert_any_call(
self.context, new_share_instance['id'], status_available
)
def test_migrate_share_generic_exception(self):
share = db_utils.create_share()
host = {'host': 'fake-host'}
manager = self.share_manager
manager.create_share_instance(self.context, share.instance['id'])
share_instance = manager._get_share_instance(self.context, share)
self.mock_object(manager, '_get_share_instance',
mock.Mock(return_value=share_instance))
self.mock_object(migration.ShareMigrationHelper,
'change_to_read_only')
self.mock_object(migration.ShareMigrationHelper,
'create_instance_and_wait',
mock.Mock(side_effect=exception.ShareMigrationFailed(
reason='fake')))
self.mock_object(migration.ShareMigrationHelper,
'revert_access_rules')
self.assertRaises(exception.ShareMigrationFailed,
manager._migrate_share_generic,
self.context, share, host)
@ddt.ddt
class HookWrapperTestCase(test.TestCase):
def setUp(self):
super(HookWrapperTestCase, self).setUp()
self.configuration = mock.Mock()
self.configuration.safe_get.return_value = True
@manager.add_hooks
def _fake_wrapped_method(self, some_arg, some_kwarg):
return "foo"
def test_hooks_enabled(self):
self.hooks = [mock.Mock(return_value=i) for i in range(2)]
result = self._fake_wrapped_method(
"some_arg", some_kwarg="some_kwarg_value")
self.assertEqual("foo", result)
for i, mock_hook in enumerate(self.hooks):
mock_hook.execute_pre_hook.assert_called_once_with(
"some_arg",
func_name="_fake_wrapped_method",
some_kwarg="some_kwarg_value")
mock_hook.execute_post_hook.assert_called_once_with(
"some_arg",
func_name="_fake_wrapped_method",
driver_action_results="foo",
pre_hook_data=self.hooks[i].execute_pre_hook.return_value,
some_kwarg="some_kwarg_value")
def test_hooks_disabled(self):
self.hooks = []
result = self._fake_wrapped_method(
"some_arg", some_kwarg="some_kwarg_value")
self.assertEqual("foo", result)
for mock_hook in self.hooks:
self.assertFalse(mock_hook.execute_pre_hook.called)
self.assertFalse(mock_hook.execute_post_hook.called)
| apache-2.0 | 7,332,564,317,221,818,000 | 44.449001 | 79 | 0.588356 | false |
Hilary93/doomai | learning/learning2.py | 1 | 10131 | #!/usr/bin/python
import itertools as it
import pickle
from random import sample, randint, random
from time import time
from vizdoom import *
import cv2
import numpy as np
import theano
from lasagne.init import GlorotUniform, Constant
from lasagne.layers import Conv2DLayer, InputLayer, DenseLayer, MaxPool2DLayer, get_output, get_all_params, \
get_all_param_values, set_all_param_values
from lasagne.nonlinearities import rectify
from lasagne.objectives import squared_error
from lasagne.updates import rmsprop
from theano import tensor
from tqdm import *
from time import sleep
# Q-learning settings:
replay_memory_size = 10000
discount_factor = 0.99
start_epsilon = float(1.0)
end_epsilon = float(0.1)
epsilon = start_epsilon
static_epsilon_steps = 5000
epsilon_decay_steps = 20000
epsilon_decay_stride = (start_epsilon - end_epsilon) / epsilon_decay_steps
# Max reward is about 100 (for killing) so it'll be normalized
reward_scale = 1
# Some of the network's and learning settings:
learning_rate = 0.00001
batch_size = 32
epochs = 0
training_steps_per_epoch = 5000
test_episodes_per_epoch = 100
# Other parameters
skiprate = 7
downsampled_x = 60
downsampled_y = int(2/3.0*downsampled_x)
episodes_to_watch = 10
# Where to save and load network's weights.
params_savefile = "basic_params"
params_loadfile = "basic_params"
# Function for converting images
def convert(img):
img = img[0].astype(np.float32) / 255.0
img = cv2.resize(img, (downsampled_x, downsampled_y))
return img
# Replay memory:
class ReplayMemory:
def __init__(self, capacity):
state_shape = (capacity, 1, downsampled_y, downsampled_x)
self.s1 = np.zeros(state_shape, dtype=np.float32)
self.s2 = np.zeros(state_shape, dtype=np.float32)
self.a = np.zeros(capacity, dtype=np.int32)
self.r = np.zeros(capacity, dtype=np.float32)
self.nonterminal = np.zeros(capacity, dtype=np.bool_)
self.size = 0
self.capacity = capacity
self.oldest_index = 0
def add_transition(self, s1, action, s2, reward):
self.s1[self.oldest_index, 0] = s1
if s2 is None:
self.nonterminal[self.oldest_index] = False
else:
self.s2[self.oldest_index, 0] = s2
self.nonterminal[self.oldest_index] = True
self.a[self.oldest_index] = action
self.r[self.oldest_index] = reward
self.oldest_index = (self.oldest_index + 1) % self.capacity
self.size = min(self.size + 1, self.capacity)
def get_sample(self, sample_size):
i = sample(range(0, self.size), sample_size)
return self.s1[i], self.s2[i], self.a[i], self.r[i], self.nonterminal[i]
# Creates the network:
def create_network(available_actions_num):
# Creates the input variables
s1 = tensor.tensor4("States")
a = tensor.vector("Actions", dtype="int32")
q2 = tensor.vector("Next State best Q-Value")
r = tensor.vector("Rewards")
nonterminal = tensor.vector("Nonterminal", dtype="int8")
# Creates the input layer of the network.
dqn = InputLayer(shape=[None, 1, downsampled_y, downsampled_x], input_var=s1)
# Adds 3 convolutional layers, each followed by a max pooling layer.
dqn = Conv2DLayer(dqn, num_filters=32, filter_size=[8, 8],
nonlinearity=rectify, W=GlorotUniform("relu"),
b=Constant(.1))
dqn = MaxPool2DLayer(dqn, pool_size=[2, 2])
dqn = Conv2DLayer(dqn, num_filters=64, filter_size=[4, 4],
nonlinearity=rectify, W=GlorotUniform("relu"),
b=Constant(.1))
dqn = MaxPool2DLayer(dqn, pool_size=[2, 2])
dqn = Conv2DLayer(dqn, num_filters=64, filter_size=[3, 3],
nonlinearity=rectify, W=GlorotUniform("relu"),
b=Constant(.1))
dqn = MaxPool2DLayer(dqn, pool_size=[2, 2])
# Adds a single fully connected layer.
dqn = DenseLayer(dqn, num_units=512, nonlinearity=rectify, W=GlorotUniform("relu"),
b=Constant(.1))
# Adds a single fully connected layer which is the output layer.
# (no nonlinearity as it is for approximating an arbitrary real function)
dqn = DenseLayer(dqn, num_units=available_actions_num, nonlinearity=None)
# Theano stuff
q = get_output(dqn)
# Only q for the chosen actions is updated more or less according to following formula:
# target Q(s,a,t) = r + gamma * max Q(s2,_,t+1)
target_q = tensor.set_subtensor(q[tensor.arange(q.shape[0]), a], r + discount_factor * nonterminal * q2)
loss = squared_error(q, target_q).mean()
# Updates the parameters according to the computed gradient using rmsprop.
params = get_all_params(dqn, trainable=True)
updates = rmsprop(loss, params, learning_rate)
# Compiles theano functions
print "Compiling the network ..."
function_learn = theano.function([s1, q2, a, r, nonterminal], loss, updates=updates, name="learn_fn")
function_get_q_values = theano.function([s1], q, name="eval_fn")
function_get_best_action = theano.function([s1], tensor.argmax(q), name="test_fn")
print "Network compiled."
# Returns Theano objects for the net and functions.
# We wouldn't need the net anymore but it is nice to save your model.
return dqn, function_learn, function_get_q_values, function_get_best_action
# Creates and initializes the environment.
print "Initializing doom..."
game = DoomGame()
game.load_config("../../examples/config/learning2.cfg")
game.init()
print "Doom initialized."
# Creates all possible actions.
n = game.get_available_buttons_size()
actions = []
for perm in it.product([0, 1], repeat=n):
actions.append(list(perm))
# Creates replay memory which will store the transitions
memory = ReplayMemory(capacity=replay_memory_size)
net, learn, get_q_values, get_best_action = create_network(len(actions))
# Loads the network's parameters if the loadfile was specified
if params_loadfile is not None:
params = pickle.load(open(params_loadfile, "r"))
set_all_param_values(net, params)
# Makes an action according to epsilon greedy policy and performs a single backpropagation on the network.
def perform_learning_step():
# Checks the state and downsamples it.
s1 = convert(game.get_state().image_buffer)
# With probability epsilon makes a random action.
if random() <= epsilon:
a = randint(0, len(actions) - 1)
else:
# Chooses the best action according to the network.
a = get_best_action(s1.reshape([1, 1, downsampled_y, downsampled_x]))
reward = game.make_action(actions[a], skiprate + 1)
reward *= reward_scale
if game.is_episode_finished():
s2 = None
else:
s2 = convert(game.get_state().image_buffer)
# Remember the transition that was just experienced.
memory.add_transition(s1, a, s2, reward)
# Gets a single, random minibatch from the replay memory and learns from it.
if memory.size > batch_size:
s1, s2, a, reward, nonterminal = memory.get_sample(batch_size)
q2 = np.max(get_q_values(s2), axis=1)
loss = learn(s1, q2, a, reward, nonterminal)
else:
loss = 0
return loss
print "Starting the training!"
steps = 0
for epoch in range(epochs):
print "\nEpoch", epoch
train_time = 0
train_episodes_finished = 0
train_loss = []
train_rewards = []
train_start = time()
print "\nTraining ..."
game.new_episode()
for learning_step in tqdm(range(training_steps_per_epoch)):
# Learning and action is here.
train_loss.append(perform_learning_step())
# I
if game.is_episode_finished():
r = game.get_total_reward()
train_rewards.append(r)
game.new_episode()
train_episodes_finished += 1
steps += 1
if steps > static_epsilon_steps:
epsilon = max(end_epsilon, epsilon - epsilon_decay_stride)
train_end = time()
train_time = train_end - train_start
mean_loss = np.mean(train_loss)
print train_episodes_finished, "training episodes played."
print "Training results:"
train_rewards = np.array(train_rewards)
print "mean:", train_rewards.mean(), "std:", train_rewards.std(), "max:", train_rewards.max(), "min:", train_rewards.min(), "mean_loss:", mean_loss, "epsilon:", epsilon
print "t:", str(round(train_time, 2)) + "s"
# Testing
test_episode = []
test_rewards = []
test_start = time()
print "Testing..."
for test_episode in tqdm(range(test_episodes_per_epoch)):
game.new_episode()
while not game.is_episode_finished():
state = convert(game.get_state().image_buffer).reshape([1, 1, downsampled_y, downsampled_x])
best_action_index = get_best_action(state)
game.make_action(actions[best_action_index], skiprate + 1)
r = game.get_total_reward()
test_rewards.append(r)
test_end = time()
test_time = test_end - test_start
print "Test results:"
test_rewards = np.array(test_rewards)
print "mean:", test_rewards.mean(), "std:", test_rewards.std(), "max:", test_rewards.max(), "min:", test_rewards.min()
print "t:", str(round(test_time, 2)) + "s"
if params_savefile:
print "Saving network weigths to:", params_savefile
pickle.dump(get_all_param_values(net), open(params_savefile, "w"))
print "========================="
print "Training finished! Time to watch!"
game.close()
game.set_window_visible(True)
game.set_mode(Mode.ASYNC_PLAYER)
game.init()
# Sleeping time between episodes, for convenience.
episode_sleep = 0.5
for i in range(episodes_to_watch):
game.new_episode()
while not game.is_episode_finished():
state = convert(game.get_state().image_buffer).reshape([1, 1, downsampled_y, downsampled_x])
best_action_index = get_best_action(state)
game.set_action(actions[best_action_index])
for i in range(skiprate+1):
game.advance_action()
sleep(episode_sleep)
r = game.get_total_reward()
print "Total reward: ", r
| mit | 2,790,718,033,808,579,000 | 33.814433 | 172 | 0.65798 | false |
chromatic-universe/imap2017 | src/imap-python-gadget/cci_imap_gadget/core_on_login.py | 1 | 4756 | # core_on_login.py chromatic universe william k. johnson 2018
from time import sleep
#cci
from cci_imap_gadget.imap_gadget_base import cci_chilkat , \
cci_ecosys , \
cci_mini_imap_mail
from cci_imap_gadget.core_on_logout import on_logout
import cci_utils.cci_io_tools as io
# ----------------------------------------------------------------------------------------
class on_login( object ) :
"""
on_login
"""
def __init__( self ,
cci_chilkat = None ,
cci_mail = None ,
cci_ico = None ) :
"""
:param cci_chilkat:
:param cci_mail:
:param cci_ico:
"""
# logging
self._logger = io.init_logging( self.__class__.__name__ )
self._logger.info( self.__class__.__name__ + '...' )
self._cci = cci_chilkat
self._mail = cci_mail
self._ecosys = cci_ico
self._imap_states = set()
self._imap_states.add( 'non-authenticated' )
@property
def cci( self ) :
return self._cci
@cci.setter
def cci( self , cc ) :
self._cci = cc
@property
def mail( self ) :
return self._mail
@mail.setter
def mail( self , m ) :
self._mail = m
@property
def eco( self ) :
return self._ecosys
@eco.setter
def eco( self , ec ) :
self._eco = ec
@property
def logger( self ) :
return self._logger
@logger.setter
def logger( self , log ) :
self._logger = log
@property
def imap_states( self ) :
return self._imap_states
@imap_states.setter
def imap_states( self , states ) :
self._imap_states = states
def perform( self ) :
"""
:return:
"""
# xoonnect to an imap server.
self.cci.imap.put_KeepSessionLog( True )
self.cci.imap.put_VerboseLogging( True )
print( self.mail.imap_addr )
b_ret = self.cci.imap.Connect( self.mail.imap_addr )
if b_ret :
#login
b_ret = self.cci.imap.Login( self.mail.imap_mail_account ,
self.mail.imap_mail_auth )
if not b_ret :
self.logger.error( '...login failed....')
#self.logger.info( self.cci.imap.lastErrorText() )
self.logger.info( self.cci.imap.sessionLog() )
self.imap_states.remove( 'non-authenticated' )
self.imap_states.add( 'authenticated' )
sleep( 2 )
# ----------------------------------------------------------------------------------------
if __name__ == '__main__' :
try :
#imap default params
mini_mail = cci_mini_imap_mail( mail_account="wiljoh" , mail_auth="Argentina1" )
mini_mail.logger.info( repr( mini_mail) )
#cci microkernel
mini_ecosys = cci_ecosys( mta_addr='127.0.0.1' ,
mda_addr='127.0.0.1' ,
imap_mail=mini_mail )
mini_ecosys.logger.info( repr( mini_ecosys ) )
#chilkat instances
cci = cci_chilkat()
for idx in range( 0 , 9 ) :
login = on_login( cci , mini_mail , mini_ecosys )
login.perform()
#logout
logout = on_logout( cci , mini_mail , mini_ecosys )
logout.perform()
sleep( 8 )
except Exception as e :
print( str( e ) )
| mit | 2,335,134,393,039,799,300 | 34.22963 | 108 | 0.353869 | false |
wataash/Instr | instr/ke2636a.py | 1 | 3794 | import numpy as np
import unittest2
from instr.base import SourceMeter
class Keithley2636A(SourceMeter):
def __init__(self, rsrc=None, timeout_sec=600, reset=True):
self._smu = 'a'
idn = 'Keithley Instruments Inc., Model 2636A'
super().__init__(rsrc, idn, timeout_sec, reset)
@property
def smu(self):
return self._smu
@smu.setter
def smu(self, value):
if value not in ['a', 'b']:
raise ValueError
self._smu = value
def check_error(self):
if self._debug_mode:
super().check_error()
tmp = self.q('print(errorqueue.next())')
if tmp != '0.00000e+00\tQueue Is Empty\t0.00000e+00\n':
raise RuntimeError('Error on Keithley 2636A.')
def reset(self):
self.w('reset()', True)
# self.w('smua.reset(); smub.reset()', True)
def iv_sweep(self, v_start=0.0, v_end=10e-3, v_step=1e-3,
v_points=None, i_limit=1e-6, settle_time=0.0, reset=True):
"""
Reference manual 3-31
TODO: when aborted?
:return: vis, is_aborted
"""
if reset:
self.reset()
if v_points is None:
v_points = self._v_step_to_points(v_start, v_end, v_step)
lim = 'smu{}.source.limiti = {}'.format(self.smu, i_limit)
self.w(lim, True)
meas = 'SweepVLinMeasureI(smu{}, {}, {}, {}, {})'. \
format(self.smu, v_start, v_end, settle_time, v_points)
self.w(meas, True)
prnt = 'printbuffer(1, {}, smu{}.nvbuffer1.readings)'. \
format(v_points, self.smu)
resp = self.q(prnt, True)
Is = resp.split(', ')
Is = np.asarray(Is, np.float64)
if len(Is) != v_points:
aborted = True
v_points = len(Is)
else:
aborted = False
vs = np.linspace(v_start, v_end, v_points)
vis = np.array([vs, Is]).transpose()
return vis, aborted
def iv_sweep_double(self, v_max, v_step=1e-3, v_points=None,
i_limit=1e-3, settle_time=0.0, reset=True):
vis1, aborted = self.iv_sweep(0, v_max, v_step,
v_points, i_limit, settle_time, reset)
if aborted:
return vis1, aborted
vis2, aborted = self.iv_sweep(v_max, 0, v_step,
v_points, i_limit, settle_time, reset)
ret = np.concatenate((vis1, vis2))
return ret, aborted
class TestKeithley2636A(unittest2.TestCase):
def test_iv_sweep(self):
import matplotlib.pyplot as plt
ke2636a.reset()
v_start = 0.0
v_end = 1e-3
self.smu = 'a'
vis, aborted = \
ke2636a.iv_sweep(v_start, v_end, v_step=v_end / 10, i_limit=1e-9)
plt.plot(*vis.transpose(), 'o-')
plt.show()
vis, aborted = \
ke2636a.iv_sweep(v_start, v_end, v_points=101, i_limit=1e-6)
plt.plot(*vis.transpose(), 'o-')
plt.show()
# v_step ignored
vis, aborted = \
ke2636a.iv_sweep(v_start, v_end, v_step=1, v_points=11)
plt.plot(*vis.transpose(), 'o-')
plt.show()
vis, aborted = ke2636a.iv_sweep_double(10e-3)
plt.plot(*vis.transpose(), 'o-')
plt.show()
self.smu = 'b'
vis, aborted = ke2636a.iv_sweep(v_start, v_end, v_points=11)
plt.plot(*vis.transpose(), 'o-')
plt.show()
if __name__ == '__main__':
import visa
rm = visa.ResourceManager()
# ke2636a_rsrc = rm.open_resource('visa://169.254.136.196/GPIB0::20::INSTR')
ke2636a_rsrc = rm.open_resource('TCPIP::169.254.000.001::INSTR')
ke2636a = Keithley2636A(ke2636a_rsrc)
unittest2.main()
pass
| mit | -8,482,144,900,644,208,000 | 28.169231 | 80 | 0.530063 | false |
uclmr/inferbeddings | tests/inferbeddings/adversarial/closedform/test_lifted_simple_distmult_unit_cube.py | 1 | 3989 | # -*- coding: utf-8 -*-
import numpy as np
import tensorflow as tf
from inferbeddings.models import base as models
from inferbeddings.models import similarities
from inferbeddings.knowledgebase import Fact, KnowledgeBaseParser
from inferbeddings.parse import parse_clause
from inferbeddings.models.training import constraints
from inferbeddings.adversarial import Adversarial
from inferbeddings.adversarial.closedform import ClosedForm
import logging
import pytest
logger = logging.getLogger(__name__)
triples = [
('a', 'p', 'b'),
('c', 'p', 'd'),
('a', 'q', 'b')
]
facts = [Fact(predicate_name=p, argument_names=[s, o]) for s, p, o in triples]
parser = KnowledgeBaseParser(facts)
nb_entities = len(parser.entity_to_index)
nb_predicates = len(parser.predicate_to_index)
# Clauses
clause_str = 'q(X, Y) :- p(X, Y)'
clauses = [parse_clause(clause_str)]
# Instantiating the model parameters
model_class = models.get_function('DistMult')
similarity_function = similarities.get_function('dot')
model_parameters = dict(similarity_function=similarity_function)
@pytest.mark.closedform
def test_distmult_unit_cube():
for seed in range(32):
tf.reset_default_graph()
np.random.seed(seed)
tf.set_random_seed(seed)
entity_embedding_size = np.random.randint(low=1, high=5)
predicate_embedding_size = entity_embedding_size
# Instantiating entity and predicate embedding layers
entity_embedding_layer = tf.get_variable('entities',
shape=[nb_entities + 1, entity_embedding_size],
initializer=tf.contrib.layers.xavier_initializer())
predicate_embedding_layer = tf.get_variable('predicates',
shape=[nb_predicates + 1, predicate_embedding_size],
initializer=tf.contrib.layers.xavier_initializer())
# Adversary - used for computing the adversarial loss
adversarial = Adversarial(clauses=clauses, parser=parser,
entity_embedding_layer=entity_embedding_layer,
predicate_embedding_layer=predicate_embedding_layer,
model_class=model_class,
model_parameters=model_parameters,
batch_size=1)
adv_projection_steps = [constraints.unit_cube(adv_emb_layer) for adv_emb_layer in adversarial.parameters]
adversarial_loss = adversarial.loss
v_optimizer = tf.train.AdagradOptimizer(learning_rate=1e-1)
v_training_step = v_optimizer.minimize(- adversarial_loss, var_list=adversarial.parameters)
init_op = tf.global_variables_initializer()
closed_form_lifted = ClosedForm(parser=parser,
predicate_embedding_layer=predicate_embedding_layer,
model_class=model_class, model_parameters=model_parameters,
is_unit_cube=True)
opt_adversarial_loss = closed_form_lifted(clauses[0])
with tf.Session() as session:
session.run(init_op)
for finding_epoch in range(1, 100 + 1):
_ = session.run([v_training_step])
for projection_step in adv_projection_steps:
session.run([projection_step])
violation_loss_val, opt_adversarial_loss_val = session.run([adversarial_loss, opt_adversarial_loss])
if violation_loss_val + 1e-1 > opt_adversarial_loss_val:
print('{} <= {}'.format(violation_loss_val, opt_adversarial_loss_val))
assert violation_loss_val <= (opt_adversarial_loss_val + 1e-4)
tf.reset_default_graph()
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
pytest.main([__file__])
| mit | -1,489,150,325,355,351,800 | 36.632075 | 116 | 0.610679 | false |
uskudnik/ggrc-core | src/ggrc_workflows/migrations/versions/20150514130212_1431e7094e26_add_new_notification_type.py | 1 | 1859 |
"""add new notification type
Revision ID: 1431e7094e26
Revises: 2b89912f95f1
Create Date: 2015-05-14 13:02:12.165612
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
from datetime import timedelta, date
from sqlalchemy import and_
from ggrc import db
from ggrc_workflows.models import Workflow
from ggrc_workflows.notification.notification_handler import (
get_notification_type,
add_notif,
)
# revision identifiers, used by Alembic.
revision = '1431e7094e26'
down_revision = '2b89912f95f1'
def upgrade():
notification_types_table = table(
'notification_types',
column('id', sa.Integer),
column('name', sa.String),
column('description', sa.Text),
column('template', sa.String),
column('instant', sa.Boolean),
column('advance_notice', sa.Integer),
column('advance_notice_end', sa.Integer),
column('created_at', sa.DateTime),
column('modified_by_id', sa.Integer),
column('updated_at', sa.DateTime),
column('context_id', sa.Integer),
)
notification_types = [
# cycle created notifictions
{"name": "cycle_start_failed",
"description": ("Notify workflow owners that a cycle has failed to"
"start for a recurring workflow"),
"template": "cycle_start_failed",
"advance_notice": 0,
"instant": False,
},
]
op.bulk_insert(notification_types_table, notification_types)
existing_wfs = Workflow.query.filter(and_(
Workflow.frequency.in_(["weekly", "monthly", "quarterly", "annually"]),
Workflow.next_cycle_start_date >= date.today()
))
for wf in existing_wfs:
notif_type = get_notification_type("cycle_start_failed")
add_notif(wf, notif_type, wf.next_cycle_start_date + timedelta(1))
db.session.commit()
def downgrade():
pass
| apache-2.0 | 2,134,548,577,250,535,400 | 26.338235 | 77 | 0.669715 | false |
shimpe/frescobaldi | frescobaldi_app/matcher.py | 1 | 7388 | # This file is part of the Frescobaldi project, http://www.frescobaldi.org/
#
# Copyright (c) 2008 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
Highlights matching tokens such as { and }, << and >> etc.
"""
from __future__ import unicode_literals
import weakref
from PyQt4.QtGui import QAction
import app
import plugin
import ly.lex
import lydocument
import viewhighlighter
import actioncollection
import actioncollectionmanager
class AbstractMatcher(object):
def __init__(self, view=None):
"""Initialize with an optional View. (Does not keep a reference.)"""
self._view = lambda: None
if view:
self.setView(view)
app.settingsChanged.connect(self.updateSettings)
self.updateSettings()
def updateSettings(self):
from PyQt4.QtCore import QSettings
s = QSettings()
s.beginGroup("editor_highlighting")
self._match_duration = s.value("match", 1, int) * 1000
def setView(self, view):
"""Set the current View (to monitor for cursor position changes)."""
old = self._view()
if old:
old.cursorPositionChanged.disconnect(self.showMatches)
if view:
self._view = weakref.ref(view)
view.cursorPositionChanged.connect(self.showMatches)
else:
self._view = lambda: None
def view(self):
"""Return the current View."""
return self._view()
def highlighter(self):
"""Implement to return an ArbitraryHighlighter for the current View."""
pass
def showMatches(self):
"""Highlights matching tokens if the view's cursor is at such a token."""
cursors = matches(self.view().textCursor(), self.view())
if cursors:
self.highlighter().highlight("match", cursors, 2, self._match_duration)
else:
self.highlighter().clear("match")
class Matcher(AbstractMatcher, plugin.MainWindowPlugin):
"""One Matcher automatically handling the current View."""
def __init__(self, mainwindow):
super(Matcher, self).__init__()
ac = self.actionCollection = Actions()
actioncollectionmanager.manager(mainwindow).addActionCollection(ac)
ac.view_matching_pair.triggered.connect(self.moveto_match)
ac.view_matching_pair_select.triggered.connect(self.select_match)
mainwindow.currentViewChanged.connect(self.setView)
view = mainwindow.currentView()
if view:
self.setView(view)
def highlighter(self):
return viewhighlighter.highlighter(self.view())
def moveto_match(self):
"""Jump to the matching token."""
self.goto_match(False)
def select_match(self):
"""Select from the current to the matching token."""
self.goto_match(True)
def goto_match(self, select=False):
"""Jump to the matching token, selecting the text if select is True."""
cursor = self.view().textCursor()
cursors = matches(cursor)
if len(cursors) < 2:
return
if select:
if cursors[0] < cursors[1]:
anchor, pos = cursors[0].selectionStart(), cursors[1].selectionEnd()
else:
anchor, pos = cursors[0].selectionEnd(), cursors[1].selectionStart()
cursor.setPosition(anchor)
cursor.setPosition(pos, cursor.KeepAnchor)
else:
cursor.setPosition(cursors[1].selectionStart())
self.view().setTextCursor(cursor)
class Actions(actioncollection.ActionCollection):
name = "matchingpair"
def createActions(self, parent):
self.view_matching_pair = QAction(parent)
self.view_matching_pair_select = QAction(parent)
def translateUI(self):
self.view_matching_pair.setText(_("Matching Pai&r"))
self.view_matching_pair_select.setText(_("&Select Matching Pair"))
def matches(cursor, view=None):
"""Return a list of zero to two cursors specifying matching tokens.
If the list is empty, the cursor was not at a MatchStart/MatchEnd token,
if the list only contains one cursor the matching token could not be found,
if the list contains two cursors, the first is the token the cursor was at,
and the second is the matching token.
If view is given, only the visible part of the document is searched.
"""
block = cursor.block()
column = cursor.position() - block.position()
tokens = lydocument.Runner(lydocument.Document(cursor.document()))
tokens.move_to_block(block)
if view is not None:
first_block = view.firstVisibleBlock()
bottom = view.contentOffset().y() + view.viewport().height()
pred_forward = lambda: view.blockBoundingGeometry(tokens.block).top() <= bottom
pred_backward = lambda: tokens.block >= first_block
else:
pred_forward = lambda: True
pred_backward = lambda: True
source = None
for token in tokens.forward_line():
if token.pos <= column <= token.end:
if isinstance(token, ly.lex.MatchStart):
match, other = ly.lex.MatchStart, ly.lex.MatchEnd
def source_gen():
while pred_forward():
for t in tokens.forward_line():
yield t
if not tokens.next_block():
break
source = source_gen()
break
elif isinstance(token, ly.lex.MatchEnd):
match, other = ly.lex.MatchEnd, ly.lex.MatchStart
def source_gen():
while pred_backward():
for t in tokens.backward_line():
yield t
if not tokens.previous_block():
break
source = source_gen()
break
elif token.pos > column:
break
cursors = []
if source:
# we've found a matcher item
cursors.append(tokens.cursor())
nest = 0
for token2 in source:
if isinstance(token2, other) and token2.matchname == token.matchname:
if nest == 0:
# we've found the matching item!
cursors.append(tokens.cursor())
break
else:
nest -= 1
elif isinstance(token2, match) and token2.matchname == token.matchname:
nest += 1
return cursors
app.mainwindowCreated.connect(Matcher.instance)
| gpl-2.0 | -4,223,175,629,480,719,000 | 34.864078 | 87 | 0.611668 | false |
turbokongen/home-assistant | tests/helpers/test_device_registry.py | 1 | 38502 | """Tests for the Device Registry."""
import time
from unittest.mock import patch
import pytest
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED
from homeassistant.core import CoreState, callback
from homeassistant.helpers import device_registry, entity_registry
from tests.common import MockConfigEntry, flush_store, mock_device_registry
@pytest.fixture
def registry(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def update_events(hass):
"""Capture update events."""
events = []
@callback
def async_capture(event):
events.append(event.data)
hass.bus.async_listen(device_registry.EVENT_DEVICE_REGISTRY_UPDATED, async_capture)
return events
async def test_get_or_create_returns_same_entry(hass, registry, update_events):
"""Make sure we do not duplicate entries."""
entry = registry.async_get_or_create(
config_entry_id="1234",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
sw_version="sw-version",
name="name",
manufacturer="manufacturer",
model="model",
)
entry2 = registry.async_get_or_create(
config_entry_id="1234",
connections={(device_registry.CONNECTION_NETWORK_MAC, "11:22:33:66:77:88")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry3 = registry.async_get_or_create(
config_entry_id="1234",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
assert len(registry.devices) == 1
assert entry.id == entry2.id
assert entry.id == entry3.id
assert entry.identifiers == {("bridgeid", "0123")}
assert entry3.manufacturer == "manufacturer"
assert entry3.model == "model"
assert entry3.name == "name"
assert entry3.sw_version == "sw-version"
await hass.async_block_till_done()
# Only 2 update events. The third entry did not generate any changes.
assert len(update_events) == 2
assert update_events[0]["action"] == "create"
assert update_events[0]["device_id"] == entry.id
assert update_events[1]["action"] == "update"
assert update_events[1]["device_id"] == entry.id
async def test_requirement_for_identifier_or_connection(registry):
"""Make sure we do require some descriptor of device."""
entry = registry.async_get_or_create(
config_entry_id="1234",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers=set(),
manufacturer="manufacturer",
model="model",
)
entry2 = registry.async_get_or_create(
config_entry_id="1234",
connections=set(),
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry3 = registry.async_get_or_create(
config_entry_id="1234",
connections=set(),
identifiers=set(),
manufacturer="manufacturer",
model="model",
)
assert len(registry.devices) == 2
assert entry
assert entry2
assert entry3 is None
async def test_multiple_config_entries(registry):
"""Make sure we do not get duplicate entries."""
entry = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry2 = registry.async_get_or_create(
config_entry_id="456",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry3 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
assert len(registry.devices) == 1
assert entry.id == entry2.id
assert entry.id == entry3.id
assert entry2.config_entries == {"123", "456"}
@pytest.mark.parametrize("load_registries", [False])
async def test_loading_from_storage(hass, hass_storage):
"""Test loading stored devices on start."""
hass_storage[device_registry.STORAGE_KEY] = {
"version": device_registry.STORAGE_VERSION,
"data": {
"devices": [
{
"config_entries": ["1234"],
"connections": [["Zigbee", "01.23.45.67.89"]],
"id": "abcdefghijklm",
"identifiers": [["serial", "12:34:56:AB:CD:EF"]],
"manufacturer": "manufacturer",
"model": "model",
"name": "name",
"sw_version": "version",
"entry_type": "service",
"area_id": "12345A",
"name_by_user": "Test Friendly Name",
"disabled_by": "user",
}
],
"deleted_devices": [
{
"config_entries": ["1234"],
"connections": [["Zigbee", "23.45.67.89.01"]],
"id": "bcdefghijklmn",
"identifiers": [["serial", "34:56:AB:CD:EF:12"]],
}
],
},
}
await device_registry.async_load(hass)
registry = device_registry.async_get(hass)
assert len(registry.devices) == 1
assert len(registry.deleted_devices) == 1
entry = registry.async_get_or_create(
config_entry_id="1234",
connections={("Zigbee", "01.23.45.67.89")},
identifiers={("serial", "12:34:56:AB:CD:EF")},
manufacturer="manufacturer",
model="model",
)
assert entry.id == "abcdefghijklm"
assert entry.area_id == "12345A"
assert entry.name_by_user == "Test Friendly Name"
assert entry.entry_type == "service"
assert entry.disabled_by == "user"
assert isinstance(entry.config_entries, set)
assert isinstance(entry.connections, set)
assert isinstance(entry.identifiers, set)
entry = registry.async_get_or_create(
config_entry_id="1234",
connections={("Zigbee", "23.45.67.89.01")},
identifiers={("serial", "34:56:AB:CD:EF:12")},
manufacturer="manufacturer",
model="model",
)
assert entry.id == "bcdefghijklmn"
assert isinstance(entry.config_entries, set)
assert isinstance(entry.connections, set)
assert isinstance(entry.identifiers, set)
async def test_removing_config_entries(hass, registry, update_events):
"""Make sure we do not get duplicate entries."""
entry = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry2 = registry.async_get_or_create(
config_entry_id="456",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry3 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")},
identifiers={("bridgeid", "4567")},
manufacturer="manufacturer",
model="model",
)
assert len(registry.devices) == 2
assert entry.id == entry2.id
assert entry.id != entry3.id
assert entry2.config_entries == {"123", "456"}
registry.async_clear_config_entry("123")
entry = registry.async_get_device({("bridgeid", "0123")})
entry3_removed = registry.async_get_device({("bridgeid", "4567")})
assert entry.config_entries == {"456"}
assert entry3_removed is None
await hass.async_block_till_done()
assert len(update_events) == 5
assert update_events[0]["action"] == "create"
assert update_events[0]["device_id"] == entry.id
assert update_events[1]["action"] == "update"
assert update_events[1]["device_id"] == entry2.id
assert update_events[2]["action"] == "create"
assert update_events[2]["device_id"] == entry3.id
assert update_events[3]["action"] == "update"
assert update_events[3]["device_id"] == entry.id
assert update_events[4]["action"] == "remove"
assert update_events[4]["device_id"] == entry3.id
async def test_deleted_device_removing_config_entries(hass, registry, update_events):
"""Make sure we do not get duplicate entries."""
entry = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry2 = registry.async_get_or_create(
config_entry_id="456",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry3 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")},
identifiers={("bridgeid", "4567")},
manufacturer="manufacturer",
model="model",
)
assert len(registry.devices) == 2
assert len(registry.deleted_devices) == 0
assert entry.id == entry2.id
assert entry.id != entry3.id
assert entry2.config_entries == {"123", "456"}
registry.async_remove_device(entry.id)
registry.async_remove_device(entry3.id)
assert len(registry.devices) == 0
assert len(registry.deleted_devices) == 2
await hass.async_block_till_done()
assert len(update_events) == 5
assert update_events[0]["action"] == "create"
assert update_events[0]["device_id"] == entry.id
assert update_events[1]["action"] == "update"
assert update_events[1]["device_id"] == entry2.id
assert update_events[2]["action"] == "create"
assert update_events[2]["device_id"] == entry3.id
assert update_events[3]["action"] == "remove"
assert update_events[3]["device_id"] == entry.id
assert update_events[4]["action"] == "remove"
assert update_events[4]["device_id"] == entry3.id
registry.async_clear_config_entry("123")
assert len(registry.devices) == 0
assert len(registry.deleted_devices) == 2
registry.async_clear_config_entry("456")
assert len(registry.devices) == 0
assert len(registry.deleted_devices) == 2
# No event when a deleted device is purged
await hass.async_block_till_done()
assert len(update_events) == 5
# Re-add, expect to keep the device id
entry2 = registry.async_get_or_create(
config_entry_id="456",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
assert entry.id == entry2.id
future_time = time.time() + device_registry.ORPHANED_DEVICE_KEEP_SECONDS + 1
with patch("time.time", return_value=future_time):
registry.async_purge_expired_orphaned_devices()
# Re-add, expect to get a new device id after the purge
entry4 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
assert entry3.id != entry4.id
async def test_removing_area_id(registry):
"""Make sure we can clear area id."""
entry = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry_w_area = registry.async_update_device(entry.id, area_id="12345A")
registry.async_clear_area_id("12345A")
entry_wo_area = registry.async_get_device({("bridgeid", "0123")})
assert not entry_wo_area.area_id
assert entry_w_area != entry_wo_area
async def test_deleted_device_removing_area_id(registry):
"""Make sure we can clear area id of deleted device."""
entry = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry_w_area = registry.async_update_device(entry.id, area_id="12345A")
registry.async_remove_device(entry.id)
registry.async_clear_area_id("12345A")
entry2 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
assert entry.id == entry2.id
entry_wo_area = registry.async_get_device({("bridgeid", "0123")})
assert not entry_wo_area.area_id
assert entry_w_area != entry_wo_area
async def test_specifying_via_device_create(registry):
"""Test specifying a via_device and updating."""
via = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("hue", "0123")},
manufacturer="manufacturer",
model="via",
)
light = registry.async_get_or_create(
config_entry_id="456",
connections=set(),
identifiers={("hue", "456")},
manufacturer="manufacturer",
model="light",
via_device=("hue", "0123"),
)
assert light.via_device_id == via.id
async def test_specifying_via_device_update(registry):
"""Test specifying a via_device and updating."""
light = registry.async_get_or_create(
config_entry_id="456",
connections=set(),
identifiers={("hue", "456")},
manufacturer="manufacturer",
model="light",
via_device=("hue", "0123"),
)
assert light.via_device_id is None
via = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("hue", "0123")},
manufacturer="manufacturer",
model="via",
)
light = registry.async_get_or_create(
config_entry_id="456",
connections=set(),
identifiers={("hue", "456")},
manufacturer="manufacturer",
model="light",
via_device=("hue", "0123"),
)
assert light.via_device_id == via.id
async def test_loading_saving_data(hass, registry):
"""Test that we load/save data correctly."""
orig_via = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("hue", "0123")},
manufacturer="manufacturer",
model="via",
name="Original Name",
sw_version="Orig SW 1",
entry_type="device",
)
orig_light = registry.async_get_or_create(
config_entry_id="456",
connections=set(),
identifiers={("hue", "456")},
manufacturer="manufacturer",
model="light",
via_device=("hue", "0123"),
disabled_by="user",
)
orig_light2 = registry.async_get_or_create(
config_entry_id="456",
connections=set(),
identifiers={("hue", "789")},
manufacturer="manufacturer",
model="light",
via_device=("hue", "0123"),
)
registry.async_remove_device(orig_light2.id)
orig_light3 = registry.async_get_or_create(
config_entry_id="789",
connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:AB:CD:EF:12")},
identifiers={("hue", "abc")},
manufacturer="manufacturer",
model="light",
)
registry.async_get_or_create(
config_entry_id="abc",
connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:AB:CD:EF:12")},
identifiers={("abc", "123")},
manufacturer="manufacturer",
model="light",
)
registry.async_remove_device(orig_light3.id)
orig_light4 = registry.async_get_or_create(
config_entry_id="789",
connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:AB:CD:EF:12")},
identifiers={("hue", "abc")},
manufacturer="manufacturer",
model="light",
)
assert orig_light4.id == orig_light3.id
assert len(registry.devices) == 3
assert len(registry.deleted_devices) == 1
orig_via = registry.async_update_device(
orig_via.id, area_id="mock-area-id", name_by_user="mock-name-by-user"
)
# Now load written data in new registry
registry2 = device_registry.DeviceRegistry(hass)
await flush_store(registry._store)
await registry2.async_load()
# Ensure same order
assert list(registry.devices) == list(registry2.devices)
assert list(registry.deleted_devices) == list(registry2.deleted_devices)
new_via = registry2.async_get_device({("hue", "0123")})
new_light = registry2.async_get_device({("hue", "456")})
new_light4 = registry2.async_get_device({("hue", "abc")})
assert orig_via == new_via
assert orig_light == new_light
assert orig_light4 == new_light4
async def test_no_unnecessary_changes(registry):
"""Make sure we do not consider devices changes."""
entry = registry.async_get_or_create(
config_entry_id="1234",
connections={("ethernet", "12:34:56:78:90:AB:CD:EF")},
identifiers={("hue", "456"), ("bla", "123")},
)
with patch(
"homeassistant.helpers.device_registry.DeviceRegistry.async_schedule_save"
) as mock_save:
entry2 = registry.async_get_or_create(
config_entry_id="1234", identifiers={("hue", "456")}
)
assert entry.id == entry2.id
assert len(mock_save.mock_calls) == 0
async def test_format_mac(registry):
"""Make sure we normalize mac addresses."""
entry = registry.async_get_or_create(
config_entry_id="1234",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
for mac in ["123456ABCDEF", "123456abcdef", "12:34:56:ab:cd:ef", "1234.56ab.cdef"]:
test_entry = registry.async_get_or_create(
config_entry_id="1234",
connections={(device_registry.CONNECTION_NETWORK_MAC, mac)},
)
assert test_entry.id == entry.id, mac
assert test_entry.connections == {
(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:ab:cd:ef")
}
# This should not raise
for invalid in [
"invalid_mac",
"123456ABCDEFG", # 1 extra char
"12:34:56:ab:cdef", # not enough :
"12:34:56:ab:cd:e:f", # too many :
"1234.56abcdef", # not enough .
"123.456.abc.def", # too many .
]:
invalid_mac_entry = registry.async_get_or_create(
config_entry_id="1234",
connections={(device_registry.CONNECTION_NETWORK_MAC, invalid)},
)
assert list(invalid_mac_entry.connections)[0][1] == invalid
async def test_update(registry):
"""Verify that we can update some attributes of a device."""
entry = registry.async_get_or_create(
config_entry_id="1234",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("hue", "456"), ("bla", "123")},
)
new_identifiers = {("hue", "654"), ("bla", "321")}
assert not entry.area_id
assert not entry.name_by_user
with patch.object(registry, "async_schedule_save") as mock_save:
updated_entry = registry.async_update_device(
entry.id,
area_id="12345A",
manufacturer="Test Producer",
model="Test Model",
name_by_user="Test Friendly Name",
new_identifiers=new_identifiers,
via_device_id="98765B",
disabled_by="user",
)
assert mock_save.call_count == 1
assert updated_entry != entry
assert updated_entry.area_id == "12345A"
assert updated_entry.manufacturer == "Test Producer"
assert updated_entry.model == "Test Model"
assert updated_entry.name_by_user == "Test Friendly Name"
assert updated_entry.identifiers == new_identifiers
assert updated_entry.via_device_id == "98765B"
assert updated_entry.disabled_by == "user"
assert registry.async_get_device({("hue", "456")}) is None
assert registry.async_get_device({("bla", "123")}) is None
assert registry.async_get_device({("hue", "654")}) == updated_entry
assert registry.async_get_device({("bla", "321")}) == updated_entry
assert (
registry.async_get_device(
{}, {(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}
)
== updated_entry
)
assert registry.async_get(updated_entry.id) is not None
async def test_update_remove_config_entries(hass, registry, update_events):
"""Make sure we do not get duplicate entries."""
entry = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry2 = registry.async_get_or_create(
config_entry_id="456",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
entry3 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")},
identifiers={("bridgeid", "4567")},
manufacturer="manufacturer",
model="model",
)
assert len(registry.devices) == 2
assert entry.id == entry2.id
assert entry.id != entry3.id
assert entry2.config_entries == {"123", "456"}
updated_entry = registry.async_update_device(
entry2.id, remove_config_entry_id="123"
)
removed_entry = registry.async_update_device(
entry3.id, remove_config_entry_id="123"
)
assert updated_entry.config_entries == {"456"}
assert removed_entry is None
removed_entry = registry.async_get_device({("bridgeid", "4567")})
assert removed_entry is None
await hass.async_block_till_done()
assert len(update_events) == 5
assert update_events[0]["action"] == "create"
assert update_events[0]["device_id"] == entry.id
assert update_events[1]["action"] == "update"
assert update_events[1]["device_id"] == entry2.id
assert update_events[2]["action"] == "create"
assert update_events[2]["device_id"] == entry3.id
assert update_events[3]["action"] == "update"
assert update_events[3]["device_id"] == entry.id
assert update_events[4]["action"] == "remove"
assert update_events[4]["device_id"] == entry3.id
async def test_update_sw_version(registry):
"""Verify that we can update software version of a device."""
entry = registry.async_get_or_create(
config_entry_id="1234",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bla", "123")},
)
assert not entry.sw_version
sw_version = "0x20020263"
with patch.object(registry, "async_schedule_save") as mock_save:
updated_entry = registry.async_update_device(entry.id, sw_version=sw_version)
assert mock_save.call_count == 1
assert updated_entry != entry
assert updated_entry.sw_version == sw_version
async def test_cleanup_device_registry(hass, registry):
"""Test cleanup works."""
config_entry = MockConfigEntry(domain="hue")
config_entry.add_to_hass(hass)
d1 = registry.async_get_or_create(
identifiers={("hue", "d1")}, config_entry_id=config_entry.entry_id
)
registry.async_get_or_create(
identifiers={("hue", "d2")}, config_entry_id=config_entry.entry_id
)
d3 = registry.async_get_or_create(
identifiers={("hue", "d3")}, config_entry_id=config_entry.entry_id
)
registry.async_get_or_create(
identifiers={("something", "d4")}, config_entry_id="non_existing"
)
ent_reg = await entity_registry.async_get_registry(hass)
ent_reg.async_get_or_create("light", "hue", "e1", device_id=d1.id)
ent_reg.async_get_or_create("light", "hue", "e2", device_id=d1.id)
ent_reg.async_get_or_create("light", "hue", "e3", device_id=d3.id)
device_registry.async_cleanup(hass, registry, ent_reg)
assert registry.async_get_device({("hue", "d1")}) is not None
assert registry.async_get_device({("hue", "d2")}) is not None
assert registry.async_get_device({("hue", "d3")}) is not None
assert registry.async_get_device({("something", "d4")}) is None
async def test_cleanup_device_registry_removes_expired_orphaned_devices(hass, registry):
"""Test cleanup removes expired orphaned devices."""
config_entry = MockConfigEntry(domain="hue")
config_entry.add_to_hass(hass)
registry.async_get_or_create(
identifiers={("hue", "d1")}, config_entry_id=config_entry.entry_id
)
registry.async_get_or_create(
identifiers={("hue", "d2")}, config_entry_id=config_entry.entry_id
)
registry.async_get_or_create(
identifiers={("hue", "d3")}, config_entry_id=config_entry.entry_id
)
registry.async_clear_config_entry(config_entry.entry_id)
assert len(registry.devices) == 0
assert len(registry.deleted_devices) == 3
ent_reg = await entity_registry.async_get_registry(hass)
device_registry.async_cleanup(hass, registry, ent_reg)
assert len(registry.devices) == 0
assert len(registry.deleted_devices) == 3
future_time = time.time() + device_registry.ORPHANED_DEVICE_KEEP_SECONDS + 1
with patch("time.time", return_value=future_time):
device_registry.async_cleanup(hass, registry, ent_reg)
assert len(registry.devices) == 0
assert len(registry.deleted_devices) == 0
async def test_cleanup_startup(hass):
"""Test we run a cleanup on startup."""
hass.state = CoreState.not_running
await device_registry.async_get_registry(hass)
with patch(
"homeassistant.helpers.device_registry.Debouncer.async_call"
) as mock_call:
hass.bus.async_fire(EVENT_HOMEASSISTANT_STARTED)
await hass.async_block_till_done()
assert len(mock_call.mock_calls) == 1
@pytest.mark.parametrize("load_registries", [False])
async def test_cleanup_entity_registry_change(hass):
"""Test we run a cleanup when entity registry changes.
Don't pre-load the registries as the debouncer will then not be waiting for
EVENT_ENTITY_REGISTRY_UPDATED events.
"""
await device_registry.async_load(hass)
await entity_registry.async_load(hass)
ent_reg = entity_registry.async_get(hass)
with patch(
"homeassistant.helpers.device_registry.Debouncer.async_call"
) as mock_call:
entity = ent_reg.async_get_or_create("light", "hue", "e1")
await hass.async_block_till_done()
assert len(mock_call.mock_calls) == 0
# Normal update does not trigger
ent_reg.async_update_entity(entity.entity_id, name="updated")
await hass.async_block_till_done()
assert len(mock_call.mock_calls) == 0
# Device ID update triggers
ent_reg.async_get_or_create("light", "hue", "e1", device_id="bla")
await hass.async_block_till_done()
assert len(mock_call.mock_calls) == 1
# Removal also triggers
ent_reg.async_remove(entity.entity_id)
await hass.async_block_till_done()
assert len(mock_call.mock_calls) == 2
async def test_restore_device(hass, registry, update_events):
"""Make sure device id is stable."""
entry = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
assert len(registry.devices) == 1
assert len(registry.deleted_devices) == 0
registry.async_remove_device(entry.id)
assert len(registry.devices) == 0
assert len(registry.deleted_devices) == 1
entry2 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")},
identifiers={("bridgeid", "4567")},
manufacturer="manufacturer",
model="model",
)
entry3 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
manufacturer="manufacturer",
model="model",
)
assert entry.id == entry3.id
assert entry.id != entry2.id
assert len(registry.devices) == 2
assert len(registry.deleted_devices) == 0
assert isinstance(entry3.config_entries, set)
assert isinstance(entry3.connections, set)
assert isinstance(entry3.identifiers, set)
await hass.async_block_till_done()
assert len(update_events) == 4
assert update_events[0]["action"] == "create"
assert update_events[0]["device_id"] == entry.id
assert update_events[1]["action"] == "remove"
assert update_events[1]["device_id"] == entry.id
assert update_events[2]["action"] == "create"
assert update_events[2]["device_id"] == entry2.id
assert update_events[3]["action"] == "create"
assert update_events[3]["device_id"] == entry3.id
async def test_restore_simple_device(hass, registry, update_events):
"""Make sure device id is stable."""
entry = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
)
assert len(registry.devices) == 1
assert len(registry.deleted_devices) == 0
registry.async_remove_device(entry.id)
assert len(registry.devices) == 0
assert len(registry.deleted_devices) == 1
entry2 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "34:56:78:CD:EF:12")},
identifiers={("bridgeid", "4567")},
)
entry3 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("bridgeid", "0123")},
)
assert entry.id == entry3.id
assert entry.id != entry2.id
assert len(registry.devices) == 2
assert len(registry.deleted_devices) == 0
await hass.async_block_till_done()
assert len(update_events) == 4
assert update_events[0]["action"] == "create"
assert update_events[0]["device_id"] == entry.id
assert update_events[1]["action"] == "remove"
assert update_events[1]["device_id"] == entry.id
assert update_events[2]["action"] == "create"
assert update_events[2]["device_id"] == entry2.id
assert update_events[3]["action"] == "create"
assert update_events[3]["device_id"] == entry3.id
async def test_restore_shared_device(hass, registry, update_events):
"""Make sure device id is stable for shared devices."""
entry = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("entry_123", "0123")},
manufacturer="manufacturer",
model="model",
)
assert len(registry.devices) == 1
assert len(registry.deleted_devices) == 0
registry.async_get_or_create(
config_entry_id="234",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("entry_234", "2345")},
manufacturer="manufacturer",
model="model",
)
assert len(registry.devices) == 1
assert len(registry.deleted_devices) == 0
registry.async_remove_device(entry.id)
assert len(registry.devices) == 0
assert len(registry.deleted_devices) == 1
entry2 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("entry_123", "0123")},
manufacturer="manufacturer",
model="model",
)
assert entry.id == entry2.id
assert len(registry.devices) == 1
assert len(registry.deleted_devices) == 0
assert isinstance(entry2.config_entries, set)
assert isinstance(entry2.connections, set)
assert isinstance(entry2.identifiers, set)
registry.async_remove_device(entry.id)
entry3 = registry.async_get_or_create(
config_entry_id="234",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("entry_234", "2345")},
manufacturer="manufacturer",
model="model",
)
assert entry.id == entry3.id
assert len(registry.devices) == 1
assert len(registry.deleted_devices) == 0
assert isinstance(entry3.config_entries, set)
assert isinstance(entry3.connections, set)
assert isinstance(entry3.identifiers, set)
entry4 = registry.async_get_or_create(
config_entry_id="123",
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={("entry_123", "0123")},
manufacturer="manufacturer",
model="model",
)
assert entry.id == entry4.id
assert len(registry.devices) == 1
assert len(registry.deleted_devices) == 0
assert isinstance(entry4.config_entries, set)
assert isinstance(entry4.connections, set)
assert isinstance(entry4.identifiers, set)
await hass.async_block_till_done()
assert len(update_events) == 7
assert update_events[0]["action"] == "create"
assert update_events[0]["device_id"] == entry.id
assert update_events[1]["action"] == "update"
assert update_events[1]["device_id"] == entry.id
assert update_events[2]["action"] == "remove"
assert update_events[2]["device_id"] == entry.id
assert update_events[3]["action"] == "create"
assert update_events[3]["device_id"] == entry.id
assert update_events[4]["action"] == "remove"
assert update_events[4]["device_id"] == entry.id
assert update_events[5]["action"] == "create"
assert update_events[5]["device_id"] == entry.id
assert update_events[1]["action"] == "update"
assert update_events[1]["device_id"] == entry.id
async def test_get_or_create_empty_then_set_default_values(hass, registry):
"""Test creating an entry, then setting default name, model, manufacturer."""
entry = registry.async_get_or_create(
identifiers={("bridgeid", "0123")}, config_entry_id="1234"
)
assert entry.name is None
assert entry.model is None
assert entry.manufacturer is None
entry = registry.async_get_or_create(
config_entry_id="1234",
identifiers={("bridgeid", "0123")},
default_name="default name 1",
default_model="default model 1",
default_manufacturer="default manufacturer 1",
)
assert entry.name == "default name 1"
assert entry.model == "default model 1"
assert entry.manufacturer == "default manufacturer 1"
entry = registry.async_get_or_create(
config_entry_id="1234",
identifiers={("bridgeid", "0123")},
default_name="default name 2",
default_model="default model 2",
default_manufacturer="default manufacturer 2",
)
assert entry.name == "default name 1"
assert entry.model == "default model 1"
assert entry.manufacturer == "default manufacturer 1"
async def test_get_or_create_empty_then_update(hass, registry):
"""Test creating an entry, then setting name, model, manufacturer."""
entry = registry.async_get_or_create(
identifiers={("bridgeid", "0123")}, config_entry_id="1234"
)
assert entry.name is None
assert entry.model is None
assert entry.manufacturer is None
entry = registry.async_get_or_create(
config_entry_id="1234",
identifiers={("bridgeid", "0123")},
name="name 1",
model="model 1",
manufacturer="manufacturer 1",
)
assert entry.name == "name 1"
assert entry.model == "model 1"
assert entry.manufacturer == "manufacturer 1"
entry = registry.async_get_or_create(
config_entry_id="1234",
identifiers={("bridgeid", "0123")},
default_name="default name 1",
default_model="default model 1",
default_manufacturer="default manufacturer 1",
)
assert entry.name == "name 1"
assert entry.model == "model 1"
assert entry.manufacturer == "manufacturer 1"
async def test_get_or_create_sets_default_values(hass, registry):
"""Test creating an entry, then setting default name, model, manufacturer."""
entry = registry.async_get_or_create(
config_entry_id="1234",
identifiers={("bridgeid", "0123")},
default_name="default name 1",
default_model="default model 1",
default_manufacturer="default manufacturer 1",
)
assert entry.name == "default name 1"
assert entry.model == "default model 1"
assert entry.manufacturer == "default manufacturer 1"
entry = registry.async_get_or_create(
config_entry_id="1234",
identifiers={("bridgeid", "0123")},
default_name="default name 2",
default_model="default model 2",
default_manufacturer="default manufacturer 2",
)
assert entry.name == "default name 1"
assert entry.model == "default model 1"
assert entry.manufacturer == "default manufacturer 1"
| apache-2.0 | -2,436,744,861,999,438,000 | 33.811935 | 88 | 0.625552 | false |
TriggeredMessaging/pydotmailer | pydotmailer.py | 1 | 26995 | # pydotmailer - A lightweight wrapper for the dotMailer API, written in Python.
# Copyright (c) 2012 Triggered Messaging Ltd, released under the MIT license
# Home page:
# https://github.com/TriggeredMessaging/pydotmailer/
# See README and LICENSE files.
#
# dotMailer API docs are at http://www.dotmailer.co.uk/api/
# This class was influenced by earllier work: https://github.com/JeremyJones/dotmailer-client/blob/master/dotmailer.py
import base64
import time
from datetime import datetime, timedelta
from suds.client import Client as SOAPClient
__version__ = '0.1.2'
try:
import simplejson as json
except ImportError:
import json # fall back to traditional json module.
import logging
logger = logging.getLogger(__name__)
from dotmailersudsplugin import DotMailerSudsPlugin
class PyDotMailer(object):
version = '0.1'
class RESULT_FIELDS_ERROR_CODE:
"""
Defines for RESULT_FIELDS.ERROR_CODE error codes which we're deriving from the string the ESP
e.g dotMailer returns.
"""
ERROR_CAMPAIGN_NOT_FOUND = 'ERROR_CAMPAIGN_NOT_FOUND' # no email template
ERROR_CAMPAIGN_SENDNOTPERMITTED = 'ERROR_CAMPAIGN_SENDNOTPERMITTED'
# not paid enough? dotMailer tends to return this if you've run out of campaign credits or a similar issue.
ERROR_CAMPAIGN_APINOTPERMITTED = 'ERROR_CAMPAIGN_APINOTPERMITTED' # e,g, exceeded dotmailer API limits (API_USAGE_EXCEEDED)
ERROR_GENERIC = 'ERROR_UNKNOWN' # code which couldn't be parsed.
ERROR_CONTACT_NOT_FOUND = 'ERROR_CONTACT_NOT_FOUND' # no email address?
ERROR_CONTACT_UNSUBSCRIBED = 'ERROR_CONTACT_UNSUBSCRIBED' # no send permission
ERROR_CONTACT_BLACKHOLED = 'ERROR_CONTACT_BLACKHOLED' # address blackholed
ERROR_OTHER = 'ERROR_OTHER' # Etc
TIMEOUT_ERROR = 'Timeout Error' # Timeout from ESP
ERROR_UNFINISHED = "ERROR_UNFINISHED" # Load had not finished
ERROR_ESP_LOAD_FAIL = 'ERROR_ESP_LOAD_FAIL' # Data not loaded
# Cache the information on the API location on the server
api_url = ''
def __init__(self, api_username='', api_password='', secure=True):
"""
Connect to the dotMailer API at apiconnector.com, using SUDS.
param string $ap_key Not present, because the dotMailer API doesn't support an API key
@param api_username Your dotMailer user name
@param api_password Your dotMailer password
@param secure Whether or not this should use a secure connection (HTTPS).
Always True if the ESP doesn't support an insecure API.
"""
# Remember the HTTPS flag
self.secure = secure or False # Cast to a boolean (?)
# Choose the dotMailer API URL
if secure:
self.api_url = 'https://apiconnector.com/API.asmx?WSDL'
else:
self.api_url = 'http://apiconnector.com/API.asmx?WSDL'
# Connect to the API, using SUDS. Log before and after to track the time taken.
logger.debug("Connecting to web service")
self.client = SOAPClient(self.api_url,
plugins=[DotMailerSudsPlugin()]) # Plugin makes a tiny XML patch for dotMailer
logger.debug("Connected to web service")
# Change the logging level to CRITICAL to avoid logging errors for every API call which fails via suds
logging.getLogger('suds.client').setLevel(logging.CRITICAL)
# Remember the username and password. There's no API key to remember with dotMailer
self.api_username = api_username
self.api_password = api_password
if (not api_username) or (not api_password):
raise Exception('Bad username or password')
self.last_exception = None
def unpack_exception(self, e):
""" unpack the exception thrown by suds. This contains a string code in e.fault.faultstring containing text e.g.
Server was unable to process request. ---> Campaign not found ERROR_CAMPAIGN_NOT_FOUND
Use this to set a suitable value for dict_result
@param e exception
@return dict_result, e.g. {'ok':False,
'errors':[e.message],
'error_code':PyDotMailer.ERRORS.ERROR_CAMPAIGN_NOT_FOUND }
"""
self.last_exception = e # in case caller cares
fault_string = ''
# http://stackoverflow.com/questions/610883/how-to-know-if-an-object-has-an-attribute-in-python
if e and hasattr(e, 'fault') and hasattr(e.fault, 'faultstring'):
fault_string = e.fault.faultstring
# todo clearly a more generic way of doing this would be good.
if 'ERROR_CAMPAIGN_NOT_FOUND' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_NOT_FOUND
elif 'ERROR_CAMPAIGN_SENDNOTPERMITTED' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_SENDNOTPERMITTED
elif 'ERROR_APIUSAGE_EXCEEDED' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_APINOTPERMITTED
elif 'ERROR_CONTACT_NOT_FOUND' in fault_string:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_NOT_FOUND
elif 'ERROR_CONTACT_SUPPRESSED' in fault_string:
# Server was unable to process request. ---> Contact is suppressed. ERROR_CONTACT_SUPPRESSED
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_UNSUBSCRIBED
else:
error_code = PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_OTHER
dict_result = {'ok': False, 'errors': [e.message], 'error_code': error_code}
return dict_result
def add_contacts_to_address_book(self, address_book_id, s_contacts, wait_to_complete_seconds=False):
"""
Add a list of contacts to the address book
@param address_book_id the id of the address book
@param s_contacts containing the contacts to be added. You may upload either a .csv or .xls file.
It must contain one column with the heading "Email".
Other columns must will attempt to map to your custom data fields.
@param wait_to_complete_seconds seconds to wait.
@return dict e.g. {'progress_id': 15edf1c4-ce5f-42e3-b182-3b20c880bcf8, 'ok': True, 'result': Finished}
http://www.dotmailer.co.uk/api/address_books/add_contacts_to_address_book_with_progress.aspx
"""
dict_result = {'ok': True}
return_code = None
base64_data = base64.b64encode(s_contacts)
try:
progress_id = self.client.service.AddContactsToAddressBookWithProgress(username=self.api_username,
password=self.api_password,
addressbookID=address_book_id,
data=base64_data,
dataType='CSV')
dict_result = {'ok': True}
if wait_to_complete_seconds:
# retry loop...
dt_wait_until = datetime.utcnow() + timedelta(seconds=wait_to_complete_seconds) # wait for max
sleep_time = 0.2 # start with short sleep between retries
while (not return_code or return_code.get('result') == 'NotFinished') and \
datetime.utcnow() < dt_wait_until:
time.sleep(sleep_time)
return_code = self.get_contact_import_progress(progress_id) # E.g: {'error_code': 'ERROR_UNFINISHED', 'ok': False, 'result': NotFinished}
# gradually backoff with longer sleep intervals up to a max of 5 seconds
sleep_time = min(sleep_time * 2, 5.0)
if return_code:
dict_result = return_code
dict_result.update({'progress_id': progress_id})
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result
def add_contact_to_address_book(self, address_book_id, email_address, d_fields, email_type="Html",
audience_type="Unknown",
opt_in_type="Unknown"):
"""
add a single contact into an address book. - uses AddContactToAddressBook
@param address_book_id the id of the address book
@param email_address The email address to add
@param d_fields - dict containing the data to be added. e.g. { 'firstname': 'mike', 'lastname': 'austin'}.
columns must map to standard fields in DM or will attempt to map to your custom data fields in DM.
@param email_type = "Html" - the new contact will be set to receive this format by default.
@return dict e.g. {'contact_id': 123532543, 'ok': True, 'contact': APIContact object }
"""
# Initialise the result dictionary
dict_result = {'ok': False}
# Create an APIContact object with the details of the record to load. For example:
# APIContact: (APIContact){
# ID = None, Email = None,
# AudienceType = (ContactAudienceTypes){ value = None, }
# DataFields = (ContactDataFields){ Keys = (ArrayOfString){ string[] = <empty> }
# Values = (ArrayOfAnyType){ anyType[] = <empty> }
# OptInType = (ContactOptInTypes){ value = None }
# EmailType = (ContactEmailTypes){ value = None }
# Notes = None }
contact = self.client.factory.create('APIContact')
del contact.ID
contact.Email = email_address
# Copy field data into the call
for field_name in d_fields:
if field_name != 'email' and d_fields.get(field_name):
contact.DataFields.Keys[0].append(field_name)
contact.DataFields.Values[0].append(d_fields.get(field_name))
# remove some empty values that will upset suds/dotMailer
####del contact.AudienceType
####del contact.OptInType
contact.AudienceType = audience_type
contact.OptInType = opt_in_type
contact.EmailType = email_type
#### logging.getLogger('suds.client').setLevel(logging.DEBUG)
try:
created_contact = self.client.service.AddContactToAddressBook(username=self.api_username,
password=self.api_password,
contact=contact,
addressbookId=address_book_id)
# Example dict_result contents:
# { 'contact': (APIContact){ ID = 417373614, Email = "[email protected]",
# AudienceType = "Unknown",
# DataFields = (ContactDataFields){
# Keys = (ArrayOfString){ string[] = "Postcode", }
# Values = (ArrayOfAnyType){ anyType[] = "SW1A 0AA", } }
# OptInType = "Unknown", EmailType = "Html" },
# 'ok': True, 'contact_id': 417373614}
dict_result = ({'ok': True, 'contact_id': created_contact.ID, 'contact': created_contact})
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result
def get_contact_import_progress(self, progress_id):
"""
@param progress_id the progress_id from add_contacts_to_address_book
@return dict e.g. {'ok': False, 'result': NotFinished} or dict: {'ok': True, 'result': Finished}
http://www.dotmailer.co.uk/api/contacts/get_contact_import_progress.aspx
"""
dict_result = {'ok': True}
try:
return_code = self.client.service.GetContactImportProgress(username=self.api_username,
password=self.api_password,
progressID=progress_id)
if return_code == 'Finished':
dict_result = {'ok': True, 'result': return_code, 'errors': [' Load OK. See report at https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=%s ' % progress_id] }
elif return_code == 'RejectedByWatchdog':
# API call AddContactsToAddressBookWithProgress has triggered "RejectedByWatchdog" for one client and (we believe) dotMailer blocked the whole upload.
# https://support.dotmailer.com/entries/44346548-Data-Watchdog-FAQs
# https://support.dotmailer.com/entries/21449156-Better-API-feedback-for-Reject...
dict_result = {'ok': False, 'result': return_code, 'error_code':PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_ESP_LOAD_FAIL,
'errors': [' Load Fail. See report at https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=%s ' % progress_id]}
else:
dict_result = {'ok': False, 'result': return_code, 'error_code':PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_UNFINISHED,
'errors': [' Load Unfinished. See report at https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=%s ' % progress_id]}
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result # E.g: {'ok': True, 'result': Finished, 'errors': [u'<a href="https://r1-app.dotmailer.com/Contacts/Import/WatchdogReport.aspx?g=d82602bb-adfb-4e2d-aabc-5fb77af2ae3d">Load OK Report</a>']}
def send_campaign_to_contact(self, campaign_id, contact_id, send_date=None):
"""
@param campaign_id
@param contact_id
@param send_date date/time in server time when the campaign should be sent.
@return dict e.g. {'ok': True} or {'ok': False,
'result': <return code if there is one>,
'errors':['sample error']}
http://www.dotmailer.co.uk/api/campaigns/send_campaign_to_contact.aspx
"""
# format the date in ISO format, e.g. "2012-03-28T19:51:00" for sending via SOAP call.
if not send_date:
send_date = datetime.utcnow()
dict_result = {'ok': True}
iso_send_date = self.dt_to_iso_date(send_date)
return_code = None
try:
return_code = self.client.service.SendCampaignToContact(username=self.api_username,
password=self.api_password,
campaignId=campaign_id,
contactid=contact_id,
sendDate=iso_send_date) # note inconsistent case
# in DM API
if return_code:
# return code, which means an error
dict_result = {'ok': False, 'result': return_code}
except Exception as e:
dict_result = self.unpack_exception(e)
return dict_result
def get_contact_by_email(self, email):
"""
@param email email address to search for.
@return dict e.g. {'ok': True,
contact_id: 32323232, # the dotMailer contact ID
email: # the email address of the returned record
d_fields: { field_name: field_value }, # dictionary with multiple fields, keyed by field name
# The result member is the raw return from dotMailer.
'result': (APIContact){
ID = 367568124
Email = "[email protected]"
AudienceType = "Unknown"
DataFields =
(ContactDataFields){
Keys =
(ArrayOfString){
string[] =
"FIRSTNAME",
"FULLNAME",
"GENDER",
"LASTNAME",
"POSTCODE",
}
Values =
(ArrayOfAnyType){
anyType[] =
None,
None,
None,
None,
}
}
OptInType = "Unknown"
EmailType = "Html"
Notes = None
}}
http://www.dotmailer.co.uk/api/contacts/get_contact_by_email.aspx
"""
dict_result = {'ok': True}
data_fields = None
try:
return_code = self.client.service.GetContactByEmail(username=self.api_username,
password=self.api_password,
email=email)
dict_result = {'ok': True, 'result': return_code}
if dict_result.get('ok'):
# create a dictionary with structure { field_name: field_value }
try:
data_fields = dict_result.get('result').DataFields
d_fields = self._clean_returned_data_fields(data_fields=data_fields)
dict_result.update({'d_fields': d_fields})
except:
logger.exception("Exception unpacking fields in GetContactByEmail for email=%s" % email)
# log additional info separately in case something bad has happened
# which'll cause this logging line to raise.
logger.error("Further info: data_fields=%s" % data_fields)
contact_id = return_code.ID
dict_result.update({'contact_id': contact_id})
returned_email_address = return_code.Email
dict_result.update({'email': returned_email_address})
except Exception as e:
dict_result = self.unpack_exception(e)
error_code = dict_result.get("error_code")
if error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_NOT_FOUND:
pass # ignore these expected errors
elif error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_APINOTPERMITTED:
pass
else:
logger.exception("Exception in GetContactByEmail")
return dict_result
def dt_to_iso_date(self, dt):
""" convert a python datetime to an iso date, e.g. "2012-03-28T19:51:00"
ready to send via SOAP
http://www.iso.org/iso/date_and_time_format
"""
try:
iso_dt = dt.strftime('%Y-%m-%dT%H:%M:%S')
except:
logger.exception('Exception converting dt to iso')
iso_dt = None
return iso_dt
def _clean_returned_data_fields(self, data_fields):
"""
Case 1886: If there's an empty first name/last name key, then dotMailer fails to return a value,
so the lengths don't match.
If this happens, scan through the keys and add an extra value of None just before the dodgy key(s)
len_data_fields_names = len(data_fields_keys)
len_data_fields_values = len(data_fields_values)
if len_data_fields_names > len_data_fields_values:
# Different number of keys and values, so do a copy but insert None when necessary
name_index = 0
value_index = 0
while name_index < len_data_fields_names:
field_name = data_fields_keys[name_index]
if name_index+1 < len_data_fields_names:
next_field_name = data_fields_keys[name_index+1]
else:
next_field_name = ""
if ((len_data_fields_names > len_data_fields_values)
and (next_field_name=="FIRSTNAME" or next_field_name=="LASTNAME" or next_field_name=="FULLNAME")):
d_fields.update({field_name: None }) # Insert new value Null
len_data_fields_values += 1 # Count one more value, but don't step on to next value
else:
d_fields.update({field_name: data_fields_values[value_index] }) # Copy the real value
value_index += 1 # Step on to next value
name_index += 1 # Next key
"""
d_fields = {}
data_fields_keys = data_fields.Keys[0]
data_fields_values = data_fields.Values[0]
# Case 1886: If there's an empty first name/last name key, then dotMailer fails to return a value,
# so the lengths don't match
# If this happens, scan through the keys and add an extra value of None just before the dodgy key(s)
len_data_fields_names = len(data_fields_keys)
len_data_fields_values = len(data_fields_values)
if len_data_fields_names > len_data_fields_values:
# Different number of keys and values, so do a copy but insert None when necessary
name_index = 0
value_index = 0
while name_index < len_data_fields_names:
field_name = data_fields_keys[name_index]
if name_index+1 < len_data_fields_names:
next_field_name = data_fields_keys[name_index+1]
else:
next_field_name = ""
if ((len_data_fields_names > len_data_fields_values)
and (next_field_name == "FIRSTNAME"
or next_field_name == "LASTNAME"
or next_field_name == "FULLNAME")):
d_fields.update({field_name: None}) # Insert new value Null
len_data_fields_values += 1 # Count one more value, but don't step on to next value
else:
d_fields.update({field_name: data_fields_values[value_index]}) # Copy the real value
value_index += 1 # Step on to next value
name_index += 1 # Next key
else:
# Same number of keys and values, so just do a straightforward copy
for idx, field_name in enumerate(data_fields_keys):
logger.debug(idx, field_name, data_fields_values[idx])
d_fields.update({field_name: data_fields_values[idx]})
return d_fields
def get_contact_by_id(self, contact_id):
"""
@param contact_id - id to search for
@return dict e.g. {'ok': True,
contact_id: 32323232, # the dotMailer contact ID
email: # the email address of the returned record
d_fields: { field_name: field_value }, # dictionary with multiple fields, keyed by field name
# The result member is the raw return from dotMailer.
'result': (APIContact){
ID = 367568124
Email = "[email protected]"
AudienceType = "Unknown"
DataFields =
(ContactDataFields){
Keys =
(ArrayOfString){
string[] =
"FIRSTNAME",
"FULLNAME",
"GENDER",
"LASTNAME",
"POSTCODE",
}
Values =
(ArrayOfAnyType){
anyType[] =
None,
None,
None,
None,
}
}
OptInType = "Unknown"
EmailType = "Html"
Notes = None
}}
http://www.dotmailer.co.uk/api/contacts/get_contact_by_id.aspx
"""
dict_result = {'ok': True}
data_fields = None
try:
return_code = self.client.service.GetContactById(username=self.api_username, password=self.api_password,
id=contact_id)
dict_result = {'ok': True, 'result': return_code}
if dict_result.get('ok'):
# create a dictionary with structure { field_name: field_value }
try:
d_fields = {}
data_fields = dict_result.get('result').DataFields
d_fields = self._clean_returned_data_fields(data_fields=data_fields)
dict_result.update({'d_fields': d_fields })
except:
logger.exception("Exception unpacking fields in GetContactById for id=%s" % contact_id)
# log additional info separately in case something bad has happened
# which'll cause this logging line to raise.
logger.error("Further info: data_fields=%s" % data_fields)
contact_id = return_code.ID
dict_result.update({'contact_id': contact_id})
returned_email_address = return_code.Email
dict_result.update({'email': returned_email_address})
except Exception as e:
dict_result = self.unpack_exception(e)
error_code = dict_result.get('error_code')
if error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CONTACT_NOT_FOUND:
pass # Don't log these expected errors
elif error_code == PyDotMailer.RESULT_FIELDS_ERROR_CODE.ERROR_CAMPAIGN_APINOTPERMITTED:
pass
return dict_result
"""
might implement a command line at some point.
def main():
try:
addressbookid = sys.argv[2] #should use argparse or similar.
contactsfilename = sys.argv[3]
except IndexError:
print "Usage: dotmailer addcontactstoaddressbook addressbookid contactsfilename\n"
sys.exit(1)
initial_data = open(contactsfilename, 'r').read()
"""
| mit | 7,280,185,620,400,163,000 | 54.204499 | 215 | 0.544619 | false |
Runscope/pysaml2 | tests/test_30_mdstore.py | 1 | 7860 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
import re
from saml2.httpbase import HTTPBase
from saml2.mdstore import MetadataStore, MetaDataMDX
from saml2.mdstore import destinations
from saml2.mdstore import name
from saml2 import md
from saml2 import sigver
from saml2 import BINDING_SOAP
from saml2 import BINDING_HTTP_REDIRECT
from saml2 import BINDING_HTTP_POST
from saml2 import BINDING_HTTP_ARTIFACT
from saml2 import saml
from saml2 import config
from saml2.attribute_converter import ac_factory
from saml2.attribute_converter import d_to_local_name
from saml2.extension import mdui
from saml2.extension import idpdisc
from saml2.extension import dri
from saml2.extension import mdattr
from saml2.extension import ui
from saml2.s_utils import UnknownPrincipal
import xmldsig
import xmlenc
from pathutils import full_path
sec_config = config.Config()
#sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
ONTS = {
saml.NAMESPACE: saml,
mdui.NAMESPACE: mdui,
mdattr.NAMESPACE: mdattr,
dri.NAMESPACE: dri,
ui.NAMESPACE: ui,
idpdisc.NAMESPACE: idpdisc,
md.NAMESPACE: md,
xmldsig.NAMESPACE: xmldsig,
xmlenc.NAMESPACE: xmlenc
}
ATTRCONV = ac_factory(full_path("attributemaps"))
METADATACONF = {
"1": {
"local": [full_path("swamid-1.0.xml")]
},
"2": {
"local": [full_path("InCommon-metadata.xml")]
},
"3": {
"local": [full_path("extended.xml")]
},
"7": {
"local": [full_path("metadata_sp_1.xml"),
full_path("InCommon-metadata.xml")],
"remote": [
{"url": "https://kalmar2.org/simplesaml/module.php/aggregator/?id=kalmarcentral2&set=saml2",
"cert": full_path("kalmar2.pem")}]
},
"4": {
"local": [full_path("metadata_example.xml")]
},
"5": {
"local": [full_path("metadata.aaitest.xml")]
},
"8": {
"mdfile": [full_path("swamid.md")]
}
}
def _eq(l1, l2):
return set(l1) == set(l2)
def _fix_valid_until(xmlstring):
new_date = datetime.datetime.now() + datetime.timedelta(days=1)
new_date = new_date.strftime("%Y-%m-%dT%H:%M:%SZ")
return re.sub(r' validUntil=".*?"', ' validUntil="%s"' % new_date,
xmlstring)
def test_swami_1():
UMU_IDP = 'https://idp.umu.se/saml2/idp/metadata.php'
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["1"])
assert len(mds) == 1 # One source
idps = mds.with_descriptor("idpsso")
assert idps.keys()
idpsso = mds.single_sign_on_service(UMU_IDP)
assert len(idpsso) == 1
assert destinations(idpsso) == [
'https://idp.umu.se/saml2/idp/SSOService.php']
_name = name(mds[UMU_IDP])
assert _name == u'Umeå University (SAML2)'
certs = mds.certs(UMU_IDP, "idpsso", "signing")
assert len(certs) == 1
sps = mds.with_descriptor("spsso")
assert len(sps) == 108
wants = mds.attribute_requirement('https://connect8.sunet.se/shibboleth')
lnamn = [d_to_local_name(mds.attrc, attr) for attr in wants["optional"]]
assert _eq(lnamn, ['eduPersonPrincipalName', 'mail', 'givenName', 'sn',
'eduPersonScopedAffiliation'])
wants = mds.attribute_requirement('https://beta.lobber.se/shibboleth')
assert wants["required"] == []
lnamn = [d_to_local_name(mds.attrc, attr) for attr in wants["optional"]]
assert _eq(lnamn, ['eduPersonPrincipalName', 'mail', 'givenName', 'sn',
'eduPersonScopedAffiliation', 'eduPersonEntitlement'])
def test_incommon_1():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["2"])
print mds.entities()
assert mds.entities() > 1700
idps = mds.with_descriptor("idpsso")
print idps.keys()
assert len(idps) > 300 # ~ 18%
try:
_ = mds.single_sign_on_service('urn:mace:incommon:uiuc.edu')
except UnknownPrincipal:
pass
idpsso = mds.single_sign_on_service('urn:mace:incommon:alaska.edu')
assert len(idpsso) == 1
print idpsso
assert destinations(idpsso) == [
'https://idp.alaska.edu/idp/profile/SAML2/Redirect/SSO']
sps = mds.with_descriptor("spsso")
acs_sp = []
for nam, desc in sps.items():
if "attribute_consuming_service" in desc:
acs_sp.append(nam)
assert len(acs_sp) == 0
# Look for attribute authorities
aas = mds.with_descriptor("attribute_authority")
print aas.keys()
assert len(aas) == 180
def test_ext_2():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["3"])
# No specific binding defined
ents = mds.with_descriptor("spsso")
for binding in [BINDING_SOAP, BINDING_HTTP_POST, BINDING_HTTP_ARTIFACT,
BINDING_HTTP_REDIRECT]:
assert mds.single_logout_service(ents.keys()[0], binding, "spsso")
def test_example():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["4"])
assert len(mds.keys()) == 1
idps = mds.with_descriptor("idpsso")
assert idps.keys() == [
'http://xenosmilus.umdc.umu.se/simplesaml/saml2/idp/metadata.php']
certs = mds.certs(
'http://xenosmilus.umdc.umu.se/simplesaml/saml2/idp/metadata.php',
"idpsso", "signing")
assert len(certs) == 1
def test_switch_1():
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["5"])
assert len(mds.keys()) > 160
idps = mds.with_descriptor("idpsso")
print idps.keys()
idpsso = mds.single_sign_on_service(
'https://aai-demo-idp.switch.ch/idp/shibboleth')
assert len(idpsso) == 1
print idpsso
assert destinations(idpsso) == [
'https://aai-demo-idp.switch.ch/idp/profile/SAML2/Redirect/SSO']
assert len(idps) > 30
aas = mds.with_descriptor("attribute_authority")
print aas.keys()
aad = aas['https://aai-demo-idp.switch.ch/idp/shibboleth']
print aad.keys()
assert len(aad["attribute_authority_descriptor"]) == 1
assert len(aad["idpsso_descriptor"]) == 1
sps = mds.with_descriptor("spsso")
dual = [eid for eid, ent in idps.items() if eid in sps]
print len(dual)
assert len(dual) == 0
def test_metadata_file():
sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
mds = MetadataStore(ONTS.values(), ATTRCONV, sec_config,
disable_ssl_certificate_validation=True)
mds.imp(METADATACONF["8"])
print len(mds.keys())
assert len(mds.keys()) == 560
def test_mdx_service():
sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
http = HTTPBase(verify=False, ca_bundle=None)
mdx = MetaDataMDX(ONTS.values(), ATTRCONV, "http://pyff-test.nordu.net",
sec_config, None, http)
foo = mdx.service("https://idp.umu.se/saml2/idp/metadata.php",
"idpsso_descriptor", "single_sign_on_service")
assert len(foo) == 1
assert foo.keys()[0] == BINDING_HTTP_REDIRECT
def test_mdx_certs():
sec_config.xmlsec_binary = sigver.get_xmlsec_binary(["/opt/local/bin"])
http = HTTPBase(verify=False, ca_bundle=None)
mdx = MetaDataMDX(ONTS.values(), ATTRCONV, "http://pyff-test.nordu.net",
sec_config, None, http)
foo = mdx.certs("https://idp.umu.se/saml2/idp/metadata.php", "idpsso")
assert len(foo) == 1
if __name__ == "__main__":
test_mdx_certs()
| bsd-2-clause | 1,031,400,579,156,348,700 | 30.063241 | 104 | 0.630233 | false |
landscapeio/astroid | test/unittest_manager.py | 1 | 4552 | # copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:[email protected]
#
# This file is part of astroid.
#
# astroid is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# astroid is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with astroid. If not, see <http://www.gnu.org/licenses/>.
from logilab.common.testlib import TestCase, unittest_main
import sys
from os.path import join, abspath, dirname
from astroid.manager import AstroidManager, _silent_no_wrap
from astroid.bases import BUILTINS
DATA = join(dirname(abspath(__file__)), 'data')
class AstroidManagerTC(TestCase):
def setUp(self):
self.manager = AstroidManager()
self.manager.astroid_cache.clear()
def test_ast_from_module(self):
import unittest
astroid = self.manager.ast_from_module(unittest)
self.assertEqual(astroid.pure_python, True)
import time
astroid = self.manager.ast_from_module(time)
self.assertEqual(astroid.pure_python, False)
def test_ast_from_class(self):
astroid = self.manager.ast_from_class(int)
self.assertEqual(astroid.name, 'int')
self.assertEqual(astroid.parent.frame().name, BUILTINS)
astroid = self.manager.ast_from_class(object)
self.assertEqual(astroid.name, 'object')
self.assertEqual(astroid.parent.frame().name, BUILTINS)
self.assertIn('__setattr__', astroid)
def _test_ast_from_zip(self, archive):
origpath = sys.path[:]
sys.modules.pop('mypypa', None)
archive_path = join(DATA, archive)
sys.path.insert(0, archive_path)
try:
module = self.manager.ast_from_module_name('mypypa')
self.assertEqual(module.name, 'mypypa')
self.assertTrue(module.file.endswith('%s/mypypa' % archive),
module.file)
finally:
# remove the module, else after importing egg, we don't get the zip
if 'mypypa' in self.manager.astroid_cache:
del self.manager.astroid_cache['mypypa']
del self.manager._mod_file_cache[('mypypa', None)]
if archive_path in sys.path_importer_cache:
del sys.path_importer_cache[archive_path]
sys.path = origpath
def test_ast_from_module_name_egg(self):
self._test_ast_from_zip('MyPyPa-0.1.0-py2.5.egg')
def test_ast_from_module_name_zip(self):
self._test_ast_from_zip('MyPyPa-0.1.0-py2.5.zip')
def test_from_directory(self):
obj = self.manager.project_from_files([DATA], _silent_no_wrap, 'data')
self.assertEqual(obj.name, 'data')
self.assertEqual(obj.path, join(DATA, '__init__.py'))
def test_project_node(self):
obj = self.manager.project_from_files([DATA], _silent_no_wrap, 'data')
expected = set(['SSL1', '__init__', 'all', 'appl', 'format', 'module',
'module2', 'noendingnewline', 'nonregr', 'notall'])
expected = ['data', 'data.SSL1', 'data.SSL1.Connection1',
'data.absimport', 'data.all',
'data.appl', 'data.appl.myConnection', 'data.email', 'data.format',
'data.module', 'data.module2', 'data.noendingnewline',
'data.nonregr', 'data.notall']
self.assertListEqual(sorted(k for k in obj.keys()), expected)
def test_do_not_expose_main(self):
obj = self.manager.ast_from_module_name('__main__')
self.assertEqual(obj.name, '__main__')
self.assertEqual(obj.items(), [])
class BorgAstroidManagerTC(TestCase):
def test_borg(self):
"""test that the AstroidManager is really a borg, i.e. that two different
instances has same cache"""
first_manager = AstroidManager()
built = first_manager.ast_from_module_name(BUILTINS)
second_manager = AstroidManager()
second_built = first_manager.ast_from_module_name(BUILTINS)
self.assertIs(built, second_built)
if __name__ == '__main__':
unittest_main()
| gpl-2.0 | -5,291,246,639,128,607,000 | 39.283186 | 87 | 0.641257 | false |
kubeflow/pipelines | test/sample-test/check_notebook_results.py | 1 | 4241 | # Copyright 2018-2021 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import subprocess
import utils
import yaml
from constants import RUN_LIST_PAGE_SIZE, DEFAULT_CONFIG
from kfp import Client
class NoteBookChecker(object):
def __init__(self, testname, result, run_pipeline, experiment_name, host, namespace='kubeflow'):
""" Util class for checking notebook sample test running results.
:param testname: test name in the json xml.
:param result: name of the file that stores the test result
:param run_pipeline: whether to submit for a pipeline run.
:param host: The hostname of KFP API endpoint.
:param namespace: where the pipeline system is deployed.
:param experiment_name: Name of the experiment to monitor
"""
self._testname = testname
self._result = result
self._exit_code = None
self._run_pipeline = run_pipeline
self._host = host
self._namespace = namespace
self._experiment_name = experiment_name
def run(self):
""" Run the notebook sample as a python script. """
self._exit_code = str(
subprocess.call(['ipython', '%s.py' % self._testname]))
def check(self):
""" Check the pipeline running results of the notebook sample. """
test_cases = []
test_name = self._testname + ' Sample Test'
###### Write the script exit code log ######
utils.add_junit_test(test_cases, 'test script execution',
(self._exit_code == '0'),
'test script failure with exit code: '
+ self._exit_code)
try:
with open(DEFAULT_CONFIG, 'r') as f:
raw_args = yaml.safe_load(f)
except yaml.YAMLError as yamlerr:
raise RuntimeError('Illegal default config:{}'.format(yamlerr))
except OSError as ose:
raise FileExistsError('Default config not found:{}'.format(ose))
else:
test_timeout = raw_args['test_timeout']
if self._run_pipeline:
experiment = self._experiment_name
###### Initialization ######
client = Client(host=self._host)
###### Get experiments ######
experiment_id = client.get_experiment(experiment_name=experiment).id
###### Get runs ######
list_runs_response = client.list_runs(page_size=RUN_LIST_PAGE_SIZE,
experiment_id=experiment_id)
###### Check all runs ######
for run in list_runs_response.runs:
run_id = run.id
response = client.wait_for_run_completion(run_id, test_timeout)
succ = (response.run.status.lower()=='succeeded')
utils.add_junit_test(test_cases, 'job completion',
succ, 'waiting for job completion failure')
###### Output Argo Log for Debugging ######
workflow_json = client._get_workflow_json(run_id)
workflow_id = workflow_json['metadata']['name']
print("Argo Workflow Name: ", workflow_id)
argo_log, _ = utils.run_bash_command(
'argo logs {} -n {}'.format(workflow_id, self._namespace))
print("=========Argo Workflow Log=========")
print(argo_log)
if not succ:
utils.write_junit_xml(test_name, self._result, test_cases)
exit(1)
###### Write out the test result in junit xml ######
utils.write_junit_xml(test_name, self._result, test_cases)
| apache-2.0 | -2,007,456,211,296,987,100 | 40.578431 | 100 | 0.578637 | false |
python-xlib/python-xlib | examples/xdamage.py | 1 | 4638 | #!/usr/bin/python
#
# examples/xdamage.py -- demonstrate damage extension
#
# Copyright (C) 2019 Mohit Garg <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
# Python 2/3 compatibility.
from __future__ import print_function
import sys
import os
# Change path so we find Xlib
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from Xlib import display, X, threaded,Xutil
import time
try:
import thread
except ModuleNotFoundError:
import _thread as thread
from Xlib.ext import damage
from PIL import Image, ImageTk
import traceback
def redraw(win, gc):
# win.clear_area()
win.fill_rectangle(gc, 0, 0, 60, 60)
def blink(display, win, gc, cols):
while 1:
time.sleep(2)
print('Changing color', cols[0])
gc.change(foreground = cols[0])
cols = (cols[1], cols[0])
redraw(win, gc)
display.flush()
def get_image_from_win(win, pt_w, pt_h, pt_x=0, pt_y=0):
try:
raw = win.get_image(pt_x, pt_y, pt_w, pt_h, X.ZPixmap, 0xffffffff)
image = Image.frombytes("RGB", (pt_w, pt_h), raw.data, "raw", "BGRX")
return image
except Exception:
traceback.print_exc()
def check_ext(disp):
# Check for extension
if not disp.has_extension('DAMAGE'):
sys.stderr.write('server does not have the DAMAGE extension\n')
sys.stderr.write("\n".join(disp.list_extensions()))
if disp.query_extension('DAMAGE') is None:
sys.exit(1)
else:
r = disp.damage_query_version()
print('DAMAGE version {}.{}'.format(r.major_version, r.minor_version))
def main():
d = display.Display()
root = d.screen().root
check_ext(d)
colormap = d.screen().default_colormap
red = colormap.alloc_named_color("red").pixel
blue = colormap.alloc_named_color("blue").pixel
background = colormap.alloc_named_color("white").pixel
window1 = root.create_window(100, 100, 250, 100, 1,
X.CopyFromParent, X.InputOutput,
X.CopyFromParent,
background_pixel = background,
event_mask = X.StructureNotifyMask | X.ExposureMask)
window1.set_wm_name('Changing Window')
window1.map()
gc = window1.create_gc(foreground = red)
thread.start_new_thread(blink, (d, window1, gc, (blue, red)))
window1.damage_create(damage.DamageReportRawRectangles)
window1.set_wm_normal_hints(
flags=(Xutil.PPosition | Xutil.PSize | Xutil.PMinSize),
min_width=50,
min_height=50
)
window2 = root.create_window(100, 250, 250, 100, 1,
X.CopyFromParent, X.InputOutput,
X.CopyFromParent,
background_pixel = background,
event_mask = X.StructureNotifyMask | X.ExposureMask)
window2.set_wm_normal_hints(
flags=(Xutil.PPosition | Xutil.PSize | Xutil.PMinSize),
min_width=50,
min_height=50
)
window2.set_wm_name('Tracking Window')
window2.map()
while 1:
event = d.next_event()
if event.type == X.Expose:
if event.count == 0:
redraw(window1, gc)
elif event.type == d.extension_event.DamageNotify:
image = get_image_from_win(window1, event.area.width, event.area.height, event.area.x, event.area.y)
bgpm = window2.create_pixmap(image.width, image.height, d.screen().root_depth)
bggc = window2.create_gc(foreground=0, background=0)
bgpm.put_pil_image(bggc, 0, 0, image)
window2.copy_area(bggc, bgpm, 0, 0, image.width, image.height, 0, 0)
# bggc.free()
elif event.type == X.DestroyNotify:
sys.exit(0)
if __name__ == "__main__":
main()
| lgpl-2.1 | -16,081,992,391,398,216 | 32.366906 | 112 | 0.609314 | false |
ergs/rever | tests/test_tools.py | 1 | 1374 | """Rever tools tests"""
import os
import tempfile
import pytest
from rever.tools import indir, render_authors, hash_url, replace_in_file
@pytest.mark.parametrize('inp, pattern, new, leading_whitespace, exp', [
('__version__ = "wow.mom"', r'__version__\s*=.*', '__version__ = "WAKKA"',
True, '__version__ = "WAKKA"\n'),
(' __version__ = "wow.mom"', r' __version__\s*=.*',
' __version__ = "WAKKA"', False, ' __version__ = "WAKKA"\n'),
(' __version__ = "wow.mom"', r'__version__\s*=.*', '__version__ = "WAKKA"',
True, ' __version__ = "WAKKA"\n'),
])
def test_replace_in_file(inp, pattern, new, leading_whitespace, exp):
with tempfile.NamedTemporaryFile('w+t') as f:
f.write(inp)
f.seek(0)
replace_in_file(pattern, new, f.name, leading_whitespace)
f.seek(0)
obs = f.read()
assert exp == obs
def test_indir():
cur = os.getcwd()
new = os.path.dirname(cur)
with indir(new):
assert os.getcwd() == new
assert os.getcwd() == cur
def test_render_authors():
for a, b in zip([(), ('Jane Doe'), ('Jane Doe', 'John Smith')], ['', 'Jane Doe', 'Jane Doe and John Smith']):
assert render_authors(a) == b
def test_hash_url_http():
hash_url('http://python.org')
def test_hash_url_ftp():
hash_url('ftp://ftp.astron.com/pub/file/file-5.33.tar.gz')
| bsd-3-clause | -2,316,216,988,696,159,700 | 29.533333 | 113 | 0.562591 | false |
lindegroup/lookback | config/models.py | 1 | 1764 | # This Python file uses the following encoding: utf-8
# Part of the Lookback project (https://github.com/lindegroup/lookback)
# Copyright 2015 The Linde Group Computer Support, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
from django.db import models
from config.crypt import CryptoHelper
class Configuration(models.Model):
"""This model stores all configuration options for accessing the external
system."""
url = models.URLField('URL')
user = models.CharField('Username', max_length=255)
password = models.CharField('Password', max_length=512)
# System type choices
JSS = 'JSS'
SYSTEM_TYPE_CHOICES = (
(JSS, 'JSS'),
)
system_type = models.CharField('System Type',
choices=SYSTEM_TYPE_CHOICES,
default=JSS,
max_length=32)
def save(self, *args, **kwargs):
"""automatically encrypt during save"""
helper = CryptoHelper()
self.password = helper.encrypt(self.password)
super(Configuration, self).save(*args, **kwargs)
def decrypt_password(self):
helper = CryptoHelper()
return helper.decrypt(self.password)
| apache-2.0 | 6,610,376,122,505,525,000 | 35.75 | 77 | 0.670635 | false |
uclouvain/OSIS-Louvain | base/models/enums/person_source_type.py | 1 | 1455 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.utils.translation import gettext_lazy as _
BASE = 'BASE'
DISSERTATION = 'DISSERTATION'
CHOICES = (
(BASE, _("Base")),
(DISSERTATION, _("Dissertation"))
)
| agpl-3.0 | -1,855,065,493,383,536,000 | 41.764706 | 87 | 0.645117 | false |
louiejtaylor/pyViKO | examples/batch.py | 1 | 1173 | if __name__ == '__main__':
#####
##temporary dev hack
import os,time
os.chdir('..')
#####
from pyviko import core, mutation, restriction
#ovr = core.readFasta('test/dem/over/2000.fasta')
#toKO = core.readFasta('test/dem/ko/2000.fasta')
'''#True batch script
t1 = time.time()
for i in range(len(toKO)):
m = mutation.Mutant(toKO[i][1],numMutations=1,regEx=True)
m.setOverGene(overSeq = ovr[i][1])
#print toKO[i][1]
print m.findMutants(ignoreRxSites = False, rSiteLength='all')[:5]
print
print m.findMutants()[:5]
print "done "+str(i)
print
print time.time()-t1
'''
#testing RC search
ovr=['ATGATTACCCGGGTTTCCCAAAGGGTTTCATCCTAA']
z=''' TTACCCGGGTTTCCCAAAGGGTTTCAT'''
toKO = ['ATGAAACCCTTTGGGAAACCCGGGTAA']
t1 = time.time()
for i in range(len(toKO))[:1]:
m = mutation.Mutant(toKO[i][1],numMutations=1,regEx=True)
m.setOverGene(overSeq = ovr[i][1])
#print toKO[i][1]
print m.findMutants(ignoreRxSites = False, rSiteLength='all')[:5]
print
print m.findMutants()[:5]
print "done "+str(i)
print
print time.time()-t1
#overlaps = [core.findOverlap(toKO[i][1],ovr[i][1]) for i in range(len(toKO))]
#print overlaps
| mit | 7,485,453,507,291,470,000 | 26.302326 | 79 | 0.658994 | false |
tangledhelix/dp_pp_utils | make_project.py | 1 | 11438 | #!/usr/bin/env python3
import json
import requests
import os
import sys
import re
import shutil
from os.path import basename
from jinja2 import Template
from subprocess import call
from trello import TrelloClient
from zipfile import ZipFile
AUTH_CONFIG = "auth-config.json"
TRELLO_TEMPLATE = "TEMPLATE: PPgen workflow"
PGDP_URL = "https://www.pgdp.net"
GITHUB_REMOTE = "origin"
GITHUB_BRANCH = "main"
# Set true to assume we'll use ppgen; false otherwise
PPGEN = True
class MakeProject():
def __init__(self):
self.dp_base = f"{os.environ['HOME']}/dp"
self.projects_base = f"{self.dp_base}/pp"
self.template_dir = f"{self.dp_base}/util/templates"
self.params = {}
self.trello_template = TRELLO_TEMPLATE
with open(f"{self.dp_base}/util/{AUTH_CONFIG}") as file:
self.auth = json.loads(file.read())
def get_param(self, param_name, prompt_text):
param_answer = input(f"{prompt_text}: ")
if param_name == "project_id":
param_answer = param_answer.replace("projectID", "")
self.params[param_name] = param_answer
def get_params(self):
self.get_param("project_name", 'Project name, e.g. "missfairfax"')
self.get_param("project_id", 'Project ID, e.g. "projectID5351bd1e5eca9"')
self.project_dir = f"{self.projects_base}/{self.params['project_name']}"
#self.params["kindlegen_dir"] = self.dp_base + "/kindlegen"
def pgdp_login(self):
payload = {
"destination": "/c/",
"userNM": self.auth["pgdp"]["username"],
"userPW": self.auth["pgdp"]["password"],
}
r = requests.post(f"{PGDP_URL}/c/accounts/login.php", data=payload)
if r.status_code != 200:
print("Error: unable to log into DP site")
sys.exit(1)
self.dp_cookie = r.headers["Set-Cookie"].split(";")[0]
def scrape_project_info(self):
r = requests.post(
f"{PGDP_URL}/c/project.php?id=projectID{self.params['project_id']}",
headers={"Cookie": self.dp_cookie}
)
if r.status_code != 200:
print("Error: unable to retrieve DP project info")
sys.exit(1)
html_doc = re.sub(r"\n", "", r.text)
self.params["title"] = re.sub(
#
# This version broke on cavalry. Changing " to <, see if it works
# r'.*<td[^>]+><b>Title</b></td><td[^>]+>([^"]+)</td>.*',
#
# This version broke on irishjournal, the site updated
# to use th instead of tr... updating to match site.
#r'.*<td[^>]+><b>Title</b></td><td[^>]+>([^<]+)</td>.*',
#
r'.*<th\s+class=["\']label["\']>Title</th>\s*<td[^>]+>([^<]+)</td>.*',
r"\1",
html_doc
)
self.params["author"] = re.sub(
# This version broke on irishjournal, the site updated
# to use th instead of tr... updating to match site.
#r'.*<td[^>]+><b>Author</b></td><td[^>]+>([^<]+)</td>.*',
#
r'.*<th\s+class=["\']label["\']>Author</th>\s*<td[^>]+>([^<]+)</td>.*',
r"\1",
html_doc
)
#<tr><th class='label'>Forum</th><td colspan='4'><a href='https://www.pgdp.net/phpBB3/viewtopic.php?t=63502'>Discuss this project</a> (19 replies)</td></tr>
self.params["forum_link"] = re.sub(
# This version broke on irishjournal, the site updated
# to use th instead of tr... updating to match site.
#r".*<td[^>]+><b>Forum</b></td><td[^>]+><a href='([^']+)'>.*",
#<a href='([^']+)'>
#
r".*<th\s+class=.label.>Forum</th>\s*<td[^>]+>\s*<a href='([^']+)'.*",
r"\1",
html_doc
)
def create_directories(self):
os.mkdir(self.project_dir, mode=0o755)
os.chdir(self.project_dir)
os.mkdir("images", mode=0o755)
os.mkdir("illustrations", mode=0o755)
os.mkdir("pngs", mode=0o755)
def create_git_repository(self):
call(["git", "init"])
call(["git", "add", "."])
call(["git", "commit", "-m", "Initial import from DP"])
call(["git", "remote", "add", GITHUB_REMOTE, self.git_remote_url])
call(["git", "push", "-u", GITHUB_REMOTE, GITHUB_BRANCH])
def process_template(self, src_filename, dst_filename=None):
if not dst_filename:
dst_filename = src_filename
with open(f"{self.template_dir}/{src_filename}") as file:
template = Template(file.read())
with open(f"{self.project_dir}/{dst_filename}", "w") as file:
file.write(template.render(self.params))
def copy_text_file(self):
project_id = self.params["project_id"]
project_name = self.params["project_name"]
project_dir = self.project_dir
input_file = f"{project_dir}/projectID{project_id}.txt"
if PPGEN:
output_file = f"{project_dir}/{project_name}-src.txt"
else:
output_file = f"{project_dir}/{project_name}-utf8.txt"
shutil.copyfile(input_file, output_file)
def make_github_repo(self):
headers = {
"Accept": "application/vnd.github.v3+json",
"Content-Type": "application/json",
}
payload = {
"name": f"DP_{self.params['project_name']}",
"description": 'DP PP project "{self.params["title"]}" ID {self.params["project_id"]}',
"private": False,
"has_issues": False,
"has_wiki": False,
"has_downloads": False,
"auto_init": False,
}
auth_data = (
self.auth["github"]["username"],
self.auth["github"]["password"],
)
r = requests.post("https://api.github.com/user/repos",
auth=auth_data, headers=headers,
data=json.dumps(payload))
if r.status_code == 201:
print("Created GitHub repository")
json_response = json.loads(r.text)
self.git_remote_url = json_response["clone_url"].replace(
"github.com",
self.auth["github"]["username"] + "@github.com"
)
else:
print(f"ERROR: GitHub response code {r.status_code} unexpected.")
def make_gitlab_repo(self):
headers = {
"Content-Type": "application/json",
"PRIVATE-TOKEN": self.auth["gitlab"],
}
payload = {
"name": f"DP_{self.params['project_name']}",
"description": f'DP PP project "{self.params["title"]}" ID {self.params["project_id"]}',
"visibility": "private",
"issues_enabled": False,
"merge_requests_enabled": False,
"jobs_enabled": False,
"wiki_enabled": False,
"snippets_enabled": False,
"container_registry_enabled": False,
"shared_runners_enabled": False,
"lfs_enabled": False,
"request_access_enabled": False,
}
r = requests.post("https://gitlab.com/api/v4/projects",
headers=headers,
data=json.dumps(payload))
if r.status_code == 201:
print("Created Gitlab repository")
json_response = json.loads(r.text)
self.git_remote_url = json_response["ssh_url_to_repo"]
else:
print(f"ERROR: Gitlab response code {r.status_code} unexpected.")
print(r.text)
def make_online_repo(self):
if self.auth["git_site"] == "github":
project.make_github_repo()
elif self.auth["git_site"] == "gitlab":
project.make_gitlab_repo()
def make_trello_board(self):
client = TrelloClient(
api_key=self.auth["trello"]["api_key"],
api_secret=self.auth["trello"]["api_secret"],
token=self.auth["trello"]["token"],
token_secret=self.auth["trello"]["token_secret"],
)
template = None
for board in client.list_boards():
if board.name == self.trello_template:
template = board
break
new_board = client.add_board(
f"DP: {self.params['title']}",
source_board=template,
permission_level="public"
)
for _list in new_board.list_lists():
if _list.name == "Notes":
for _card in _list.list_cards():
if _card.name == "Project info":
info_card = _card
break
break
new_description = info_card.desc.replace(
"{{PROJECT_NAME}}", self.params["project_name"]
).replace(
"{{PROJECT_ID}}", self.params["project_id"]
)
info_card.set_description(new_description)
self.params["trello_url"] = new_board.url
print(f"Created Trello board - {new_board.url}")
def download_text(self):
print("Downloading text from DP ...", end="", flush=True)
zipfile = f"projectID{self.params['project_id']}.zip"
url = f"{PGDP_URL}/projects/projectID{self.params['project_id']}/projectID{self.params['project_id']}.zip"
r = requests.get(url, headers={"Cookie": self.dp_cookie})
with open(zipfile, "wb") as file:
file.write(r.content)
self.unzip_file(zipfile, self.project_dir)
print(" done.")
def download_images(self):
print("Downloading images from DP ...", end="", flush=True)
zipfile = f"projectID{self.params['project_id']}images.zip"
url = f"{PGDP_URL}/c/tools/download_images.php?projectid=projectID{self.params['project_id']}"
r = requests.get(url, headers={"Cookie": self.dp_cookie})
with open(zipfile, "wb") as file:
file.write(r.content)
self.unzip_file(zipfile, f"{self.project_dir}/pngs")
print(" done.")
def unzip_file(self, filename, path):
with ZipFile(filename, "r") as zip_ref:
zip_ref.extractall(path)
os.remove(filename)
if __name__ == "__main__":
# By default, create remote resources like Trello & GitHub.
CREATE_REMOTE = True
# Process arguments, if any
if len(sys.argv) >= 2:
if sys.argv[1] == "-h" or sys.argv[1] == "--help":
print(f"Usage: {sys.argv[0]} [<option(s)>]")
print(" -h, --help: print this help")
print(" -l, --local: only create local resources (for debug)")
sys.exit(1)
elif sys.argv[1] == "-l" or sys.argv[1] == "--local":
CREATE_REMOTE = False
project = MakeProject()
project.get_params()
project.pgdp_login()
project.scrape_project_info()
project.create_directories()
project.download_text()
project.download_images()
# Make a copy of the text to work on
project.copy_text_file()
if CREATE_REMOTE:
project.make_online_repo()
project.make_trello_board()
project.process_template("Makefile")
project.process_template("README.md")
project.process_template("pp-gitignore", ".gitignore")
if CREATE_REMOTE:
# This is only done if remote, because it will try to push.
project.create_git_repository()
| mit | -6,135,142,467,199,165,000 | 34.302469 | 164 | 0.539255 | false |
pydanny/django-easy-profiles | setup.py | 1 | 1088 | from setuptools import setup, find_packages
version = '0.1.0'
LONG_DESCRIPTION = open("README.rst").read()
setup(
name='django-easy-profiles',
version=version,
description="django-easy-profiles",
long_description=LONG_DESCRIPTION,
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: JavaScript",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='forms,django,profiles,middleware',
author='Daniel Greenfeld',
author_email='[email protected]',
url='http://github.com/pydanny/django-easy-profiles',
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
) | mit | 5,192,354,460,655,273,000 | 32 | 71 | 0.628676 | false |
sahlinet/fastapp | fastapp/api_serializers.py | 1 | 3087 | from rest_framework import serializers
from rest_framework.reverse import reverse
from fastapp.models import Base, Apy, Setting, Counter, TransportEndpoint, Transaction, LogEntry
import logging
logger = logging.getLogger(__name__)
class CounterSerializer(serializers.ModelSerializer):
class Meta:
model = Counter
fields = ('executed', 'failed')
class LogSerializer(serializers.ModelSerializer):
class Meta:
model = LogEntry
fields = ('level', 'msg', 'created', )
class TransactionSerializer(serializers.ModelSerializer):
logs = LogSerializer(many=True, read_only=True)
class Meta:
model = Transaction
fields = ('rid', 'tin', 'tout', 'status', 'created', 'modified', 'async', 'logs', )
class ApySerializer(serializers.ModelSerializer):
counter = CounterSerializer(many=False, read_only=True)
class Meta:
model = Apy
fields = ('id', 'name', 'module', 'counter', 'description', 'public', 'schedule', 'everyone')
def save_object(self, obj, **kwargs):
logger.debug("save_and_sync")
obj.save_and_sync(**kwargs)
class PublicApySerializer(serializers.ModelSerializer):
"""
Return all Apy objects which are made public. Enrich
"""
first_lastname = serializers.SerializerMethodField(method_name="creator")
base = serializers.SerializerMethodField(method_name="base_name")
url = serializers.SerializerMethodField(method_name="detail_view")
class Meta:
model = Apy
fields = ('id', 'name', 'module', 'description',
'first_lastname', 'url', 'base')
def creator(self, obj):
try:
user = obj.base.user
return user.first_name + " " + user.last_name
except Base.DoesNotExist, e:
logger.warn(e)
def base_name(self, obj):
return obj.base.name
def detail_view(self, obj):
return reverse('public-apy-detail', args=[obj.pk],
request=self.context['request'])
class SettingSerializer(serializers.ModelSerializer):
class Meta:
model = Setting
fields = ('id', 'key', 'value', 'public')
class TransportEndpointSerializer(serializers.ModelSerializer):
class Meta:
model = TransportEndpoint
fields = ('id', 'url', 'override_settings_priv',
'override_settings_pub', 'token')
class BaseSerializer(serializers.ModelSerializer):
apy = serializers.PrimaryKeyRelatedField(many=True, read_only=True)
state = serializers.Field()
executors = serializers.Field()
foreign_apys = serializers.HyperlinkedRelatedField(
many=True,
read_only=False,
view_name='public-apy-detail'
)
class Meta:
model = Base
fields = ('id', 'name', 'state', 'uuid',
'executors', 'content', 'foreign_apys', 'public', 'static_public',)
def save_object(self, obj, **kwargs):
super(BaseSerializer, self).save_object(obj, **kwargs)
logger.debug("save_and_sync")
obj.save_and_sync(**kwargs)
| mit | -5,149,677,560,269,219,000 | 29.564356 | 101 | 0.63816 | false |
kern3020/opportunity | opportunity/tracker/migrations/0002_auto__add_mentorship.py | 1 | 13807 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Mentorship'
db.create_table('tracker_mentorship', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('jobseeker', self.gf('django.db.models.fields.related.ForeignKey')(related_name='jobseeker', to=orm['tracker.UserProfile'])),
('mentor', self.gf('django.db.models.fields.related.ForeignKey')(related_name='mentor', to=orm['tracker.UserProfile'])),
('coach', self.gf('django.db.models.fields.related.ForeignKey')(related_name='coach', to=orm['tracker.UserProfile'])),
('startDate', self.gf('django.db.models.fields.DateField')()),
))
db.send_create_signal('tracker', ['Mentorship'])
def backwards(self, orm):
# Deleting model 'Mentorship'
db.delete_table('tracker_mentorship')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'tracker.apply': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Apply'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Position']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.company': {
'Meta': {'ordering': "['name']", 'object_name': 'Company'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'division': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'state_province': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200'}),
'zipCode': ('django.db.models.fields.CharField', [], {'max_length': '16', 'null': 'True', 'blank': 'True'})
},
'tracker.conversation': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Conversation'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'via': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.gratitude': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Gratitude'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.interview': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Interview'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'position': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Position']", 'unique': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'when': ('django.db.models.fields.DateField', [], {}),
'withWhom': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']"})
},
'tracker.lunch': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Lunch'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'venue': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'when': ('django.db.models.fields.DateField', [], {}),
'withWhom': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Person']"})
},
'tracker.mentorship': {
'Meta': {'object_name': 'Mentorship'},
'coach': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'coach'", 'to': "orm['tracker.UserProfile']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'jobseeker': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'jobseeker'", 'to': "orm['tracker.UserProfile']"}),
'mentor': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'mentor'", 'to': "orm['tracker.UserProfile']"}),
'startDate': ('django.db.models.fields.DateField', [], {})
},
'tracker.networking': {
'Meta': {'ordering': "['-when', 'time']", 'object_name': 'Networking'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'time': ('django.db.models.fields.TimeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'venue': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Company']", 'unique': 'True'}),
'when': ('django.db.models.fields.DateField', [], {})
},
'tracker.onlinepresence': {
'Meta': {'ordering': "['name']", 'object_name': 'OnlinePresence'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.par': {
'Meta': {'ordering': "['question']", 'object_name': 'PAR'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'par_response': ('django.db.models.fields.TextField', [], {}),
'question': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.person': {
'Meta': {'object_name': 'Person'},
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Company']", 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.pitch': {
'Meta': {'object_name': 'Pitch'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'role': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'thePitch': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"})
},
'tracker.position': {
'Meta': {'ordering': "['title']", 'object_name': 'Position'},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'company': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.Company']", 'unique': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['tracker.UserProfile']"}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'tracker.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['tracker'] | mit | -6,980,998,740,839,843,000 | 71.673684 | 182 | 0.539581 | false |
iafan/zing | tests/settings.py | 1 | 3639 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
# Copyright (C) Zing contributors.
#
# This file is a part of the Zing project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
"""Test settings."""
import os
SECRET_KEY = "test_secret_key"
# Ideally this setting would be set in a per-test basis, unfortunately some code
# such as `django.utils.timezone.get_default_timezone` read from this setting
# and at the same time are behind a `lru_cache` decorator, which makes it
# impossible to alter the value at runtime because decorators are applied at
# function definition time.
TIME_ZONE = 'Europe/Amsterdam'
ROOT_DIR = os.path.dirname(os.path.abspath(os.path.dirname(__file__)))
POOTLE_TRANSLATION_DIRECTORY = os.path.join(ROOT_DIR, 'pytest_pootle', 'data', 'po')
POOTLE_MT_BACKENDS = [
['fake_test_backend', 'api_key']
]
POOTLE_CAPTCHA_ENABLED = False
MIDDLEWARE = [
#: Must be as high as possible (see above)
'django.middleware.cache.UpdateCacheMiddleware',
#: Avoids caching for authenticated users
'pootle.middleware.cache.CacheAnonymousOnly',
#: Protect against clickjacking and numerous xss attack techniques
'django.middleware.clickjacking.XFrameOptionsMiddleware',
#: Support for e-tag
'django.middleware.http.ConditionalGetMiddleware',
#: Protection against cross-site request forgery
'django.middleware.csrf.CsrfViewMiddleware',
#: Must be before authentication
'django.contrib.sessions.middleware.SessionMiddleware',
#: Must be before anything user-related
'pootle.middleware.auth.AuthenticationMiddleware',
#: User-related
'django.middleware.locale.LocaleMiddleware',
#: Sets Python's locale based on request's locale for sorting, etc.
'pootle.middleware.setlocale.SetLocale',
#: Nice 500 and 403 pages (must be after locale to have translated versions)
'pootle.middleware.errorpages.ErrorPagesMiddleware',
'django.middleware.common.CommonMiddleware',
#: Must be early in the response cycle (close to bottom)
'pootle.middleware.captcha.CaptchaMiddleware',
]
# Using the only Redis DB for testing
CACHES = {
# Must set up entries for persistent stores here because we have a check in
# place that will abort everything otherwise
'default': {
'BACKEND': 'pootle.core.cache.DummyCache',
},
'redis': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://127.0.0.1:6379/15',
'TIMEOUT': None,
# FIXME: can't use `fakeredis` here as django-redis' `incr` uses Redis'
# `eval()` command, which is unsupported in `fakeredis`.
},
'stats': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': 'redis://127.0.0.1:6379/15',
'TIMEOUT': None,
'OPTIONS': {
'REDIS_CLIENT_CLASS': 'fakeredis.FakeStrictRedis',
},
},
}
# Using synchronous mode for testing
RQ_QUEUES = {
'default': {
'USE_REDIS_CACHE': 'redis',
'DEFAULT_TIMEOUT': 360,
'ASYNC': False,
},
}
# Mail server settings
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Faster password hasher
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
SILENCED_SYSTEM_CHECKS = [
'pootle.C005', # Silence the RedisCache check as we use a dummy cache
'pootle.C017', # Distinct redis DB numbers for default, redis, stats
'pootle.W005', # DEBUG = True
'pootle.W011', # POOTLE_CONTACT_EMAIL has default setting
]
| gpl-3.0 | -5,269,662,005,326,668,000 | 32.694444 | 84 | 0.692223 | false |
adhish20/TwitterWithCassandra | users/forms.py | 1 | 1902 | import uuid
from django import forms
import cass
class LoginForm(forms.Form):
username = forms.CharField(max_length=30)
password = forms.CharField(widget=forms.PasswordInput(render_value=False))
def clean(self):
username = self.cleaned_data['username']
password = self.cleaned_data['password']
try:
user = cass.get_user_by_username(username)
except cass.DatabaseError:
raise forms.ValidationError(u'Invalid username and/or password')
if user.get('password') != password:
raise forms.ValidationError(u'Invalid username and/or password')
return self.cleaned_data
def get_username(self):
return self.cleaned_data['username']
class RegistrationForm(forms.Form):
username = forms.RegexField(regex=r'^\w+$', max_length=30)
password1 = forms.CharField(widget=forms.PasswordInput(render_value=False))
password2 = forms.CharField(widget=forms.PasswordInput(render_value=False))
def clean_username(self):
username = self.cleaned_data['username']
try:
cass.get_user_by_username(username)
raise forms.ValidationError(u'Username is already taken')
except cass.DatabaseError:
pass
return username
def clean(self):
if ('password1' in self.cleaned_data and 'password2' in
self.cleaned_data):
password1 = self.cleaned_data['password1']
password2 = self.cleaned_data['password2']
if password1 != password2:
raise forms.ValidationError(
u'You must type the same password each time')
return self.cleaned_data
def save(self):
username = self.cleaned_data['username']
password = self.cleaned_data['password1']
cass.save_user(username, password)
return username
| mit | 3,853,147,340,635,723,000 | 34.222222 | 79 | 0.638275 | false |
tobiasgehring/qudi | logic/sequence_generator_logic.py | 1 | 52322 | # -*- coding: utf-8 -*-
"""
This file contains the Qudi sequence generator logic for general sequence structure.
Qudi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Qudi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Qudi. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the
top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/>
"""
import numpy as np
import pickle
import os
import time
from qtpy import QtCore
from collections import OrderedDict
import inspect
import importlib
import sys
from logic.pulse_objects import PulseBlockElement
from logic.pulse_objects import PulseBlock
from logic.pulse_objects import PulseBlockEnsemble
from logic.pulse_objects import PulseSequence
from logic.generic_logic import GenericLogic
from logic.sampling_functions import SamplingFunctions
from logic.samples_write_methods import SamplesWriteMethods
class SequenceGeneratorLogic(GenericLogic, SamplingFunctions, SamplesWriteMethods):
"""unstable: Nikolas Tomek
This is the Logic class for the pulse (sequence) generation.
The basis communication with the GUI should be done as follows:
The logic holds all the created objects in its internal lists. The GUI is
able to view this list and get the element of this list.
How the logic will contruct its objects according to configuration dicts.
The configuration dicts contain essentially, which parameters of either the
PulseBlockElement objects or the PulseBlock objects can be changed and
set via the GUI.
In the end the information transfer happend through lists (read by the GUI)
and dicts (set by the GUI). The logic sets(creats) the objects in the list
and read the dict, which tell it which parameters to expect from the GUI.
"""
_modclass = 'sequencegeneratorlogic'
_modtype = 'logic'
# define signals
sigBlockDictUpdated = QtCore.Signal(dict)
sigEnsembleDictUpdated = QtCore.Signal(dict)
sigSequenceDictUpdated = QtCore.Signal(dict)
sigSampleEnsembleComplete = QtCore.Signal(str, np.ndarray, np.ndarray)
sigSampleSequenceComplete = QtCore.Signal(str, list)
sigCurrentBlockUpdated = QtCore.Signal(object)
sigCurrentEnsembleUpdated = QtCore.Signal(object)
sigCurrentSequenceUpdated = QtCore.Signal(object)
sigSettingsUpdated = QtCore.Signal(list, str, float, dict, str)
sigPredefinedSequencesUpdated = QtCore.Signal(dict)
sigPredefinedSequenceGenerated = QtCore.Signal(str)
def __init__(self, config, **kwargs):
super().__init__(config=config, **kwargs)
self.log.info('The following configuration was found.')
# checking for the right configuration
for key in config.keys():
self.log.info('{0}: {1}'.format(key, config[key]))
# Get all the attributes from the SamplingFunctions module:
SamplingFunctions.__init__(self)
# Get all the attributes from the SamplesWriteMethods module:
SamplesWriteMethods.__init__(self)
# here the currently shown data objects of the editors should be stored
self.current_block = None
self.current_ensemble = None
self.current_sequence = None
# The created PulseBlock objects are saved in this dictionary. The keys are the names.
self.saved_pulse_blocks = OrderedDict()
# The created PulseBlockEnsemble objects are saved in this dictionary.
# The keys are the names.
self.saved_pulse_block_ensembles = OrderedDict()
# The created Sequence objects are saved in this dictionary. The keys are the names.
self.saved_pulse_sequences = OrderedDict()
if 'pulsed_file_dir' in config.keys():
self.pulsed_file_dir = config['pulsed_file_dir']
if not os.path.exists(self.pulsed_file_dir):
homedir = self.get_home_dir()
self.pulsed_file_dir = os.path.join(homedir, 'pulsed_files')
self.log.warning('The directort defined in "pulsed_file_dir" in the config for '
'SequenceGeneratorLogic class does not exist! The default home '
'directory\n{0}'
'\nwill be taken instead.'.format(self.pulsed_file_dir))
else:
homedir = self.get_home_dir()
self.pulsed_file_dir = os.path.join(homedir, 'pulsed_files')
self.log.warning('No directory with the attribute "pulsed_file_dir" is defined for the '
'SequenceGeneratorLogic! The default home directory\n{0}\nwill be '
'taken instead.'.format(self.pulsed_file_dir))
# Byte size of the max. memory usage during sampling/write-to-file process
if 'overhead_bytes' in config.keys():
self.sampling_overhead_bytes = config['overhead_bytes']
else:
self.sampling_overhead_bytes = None
self.log.warning('No max. memory overhead specified in config.\nIn order to avoid '
'memory overflow during sampling/writing of Pulse objects you must '
'set "overhead_bytes".')
# directory for additional generate methods to import
# (other than qudi/logic/predefined_methods)
if 'additional_methods_dir' in config.keys():
if os.path.exists(config['additional_methods_dir']):
self.additional_methods_dir = config['additional_methods_dir']
else:
self.additional_methods_dir = None
self.log.error('Specified path "{0}" for import of additional generate methods '
'does not exist.'.format(config['additional_methods_dir']))
else:
self.additional_methods_dir = None
self.block_dir = self._get_dir_for_name('pulse_block_objects')
self.ensemble_dir = self._get_dir_for_name('pulse_ensemble_objects')
self.sequence_dir = self._get_dir_for_name('sequence_objects')
self.waveform_dir = self._get_dir_for_name('sampled_hardware_files')
self.temp_dir = self._get_dir_for_name('temporary_files')
# Information on used channel configuration for sequence generation
# IMPORTANT: THIS CONFIG DOES NOT REPRESENT THE ACTUAL SETTINGS ON THE HARDWARE
self.analog_channels = 2
self.digital_channels = 4
self.activation_config = ['a_ch1', 'd_ch1', 'd_ch2', 'a_ch2', 'd_ch3', 'd_ch4']
self.laser_channel = 'd_ch1'
self.amplitude_dict = OrderedDict({'a_ch1': 0.5, 'a_ch2': 0.5, 'a_ch3': 0.5, 'a_ch4': 0.5})
self.sample_rate = 25e9
# The file format for the sampled hardware-compatible waveforms and sequences
self.waveform_format = 'wfmx' # can be 'wfmx', 'wfm' or 'fpga'
self.sequence_format = 'seq' # only .seq file format
# a dictionary with all predefined generator methods and measurement sequence names
self.generate_methods = None
def on_activate(self):
""" Initialisation performed during activation of the module.
"""
self._get_blocks_from_file()
self._get_ensembles_from_file()
self._get_sequences_from_file()
self._attach_predefined_methods()
if 'activation_config' in self._statusVariables:
self.activation_config = self._statusVariables['activation_config']
if 'laser_channel' in self._statusVariables:
self.laser_channel = self._statusVariables['laser_channel']
if 'amplitude_dict' in self._statusVariables:
self.amplitude_dict = self._statusVariables['amplitude_dict']
if 'sample_rate' in self._statusVariables:
self.sample_rate = self._statusVariables['sample_rate']
if 'waveform_format' in self._statusVariables:
self.waveform_format = self._statusVariables['waveform_format']
self.analog_channels = len([chnl for chnl in self.activation_config if 'a_ch' in chnl])
self.digital_channels = len([chnl for chnl in self.activation_config if 'd_ch' in chnl])
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
def on_deactivate(self):
""" Deinitialisation performed during deactivation of the module.
"""
self._statusVariables['activation_config'] = self.activation_config
self._statusVariables['laser_channel'] = self.laser_channel
self._statusVariables['amplitude_dict'] = self.amplitude_dict
self._statusVariables['sample_rate'] = self.sample_rate
self._statusVariables['waveform_format'] = self.waveform_format
def _attach_predefined_methods(self):
"""
Retrieve in the folder all files for predefined methods and attach their methods to the
@return:
"""
self.generate_methods = OrderedDict()
filenames_list = []
additional_filenames_list = []
# The assumption is that in the directory predefined_methods, there are
# *.py files, which contain only methods!
path = os.path.join(self.get_main_dir(), 'logic', 'predefined_methods')
for entry in os.listdir(path):
filepath = os.path.join(path, entry)
if os.path.isfile(filepath) and entry.endswith('.py'):
filenames_list.append(entry[:-3])
# Also attach methods from the non-default additional methods directory if defined in config
if self.additional_methods_dir is not None:
# attach to path
sys.path.append(self.additional_methods_dir)
for entry in os.listdir(self.additional_methods_dir):
filepath = os.path.join(self.additional_methods_dir, entry)
if os.path.isfile(filepath) and entry.endswith('.py'):
additional_filenames_list.append(entry[:-3])
for filename in filenames_list:
mod = importlib.import_module('logic.predefined_methods.{0}'.format(filename))
# To allow changes in predefined methods during runtime by simply reloading
# sequence_generator_logic.
importlib.reload(mod)
for method in dir(mod):
try:
# Check for callable function or method:
ref = getattr(mod, method)
if callable(ref) and (inspect.ismethod(ref) or inspect.isfunction(ref)):
# Bind the method as an attribute to the Class
setattr(SequenceGeneratorLogic, method, getattr(mod, method))
# Add method to dictionary if it is a generator method
if method.startswith('generate_'):
self.generate_methods[method[9:]] = eval('self.'+method)
except:
self.log.error('It was not possible to import element {0} from {1} into '
'SequenceGenerationLogic.'.format(method, filename))
for filename in additional_filenames_list:
mod = importlib.import_module(filename)
for method in dir(mod):
try:
# Check for callable function or method:
ref = getattr(mod, method)
if callable(ref) and (inspect.ismethod(ref) or inspect.isfunction(ref)):
# Bind the method as an attribute to the Class
setattr(SequenceGeneratorLogic, method, getattr(mod, method))
# Add method to dictionary if it is a generator method
if method.startswith('generate_'):
self.generate_methods[method[9:]] = eval('self.'+method)
except:
self.log.error('It was not possible to import element {0} from {1} into '
'SequenceGenerationLogic.'.format(method, filepath))
self.sigPredefinedSequencesUpdated.emit(self.generate_methods)
return
def _get_dir_for_name(self, name):
""" Get the path to the pulsed sub-directory 'name'.
@param str name: name of the folder
@return: str, absolute path to the directory with folder 'name'.
"""
path = os.path.join(self.pulsed_file_dir, name)
if not os.path.exists(path):
os.makedirs(os.path.abspath(path))
return os.path.abspath(path)
def request_init_values(self):
"""
@return:
"""
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
self.sigCurrentBlockUpdated.emit(self.current_block)
self.sigCurrentEnsembleUpdated.emit(self.current_ensemble)
self.sigCurrentSequenceUpdated.emit(self.current_sequence)
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
self.sigPredefinedSequencesUpdated.emit(self.generate_methods)
return
def set_settings(self, activation_config, laser_channel, sample_rate, amplitude_dict, waveform_format):
"""
Sets all settings for the generator logic.
@param activation_config:
@param laser_channel:
@param sample_rate:
@param amplitude_dict:
@param waveform_format:
@return:
"""
# check if the currently chosen laser channel is part of the config and adjust if this
# is not the case. Choose first digital channel in that case.
if laser_channel not in activation_config:
laser_channel = None
for channel in activation_config:
if 'd_ch' in channel:
laser_channel = channel
break
if laser_channel is None:
self.log.warning('No digital channel present in sequence generator activation '
'config.')
self.laser_channel = laser_channel
self.activation_config = activation_config
self.analog_channels = len([chnl for chnl in activation_config if 'a_ch' in chnl])
self.digital_channels = len([chnl for chnl in activation_config if 'd_ch' in chnl])
self.amplitude_dict = amplitude_dict
self.sample_rate = sample_rate
self.waveform_format = waveform_format
self.sigSettingsUpdated.emit(activation_config, laser_channel, sample_rate, amplitude_dict,
waveform_format)
return self.activation_config, self.laser_channel, self.sample_rate, self.amplitude_dict, \
waveform_format
# -----------------------------------------------------------------------------
# BEGIN sequence/block generation
# -----------------------------------------------------------------------------
def get_saved_asset(self, name):
"""
Returns the data object for a saved Ensemble/Sequence with name "name". Searches in the
saved assets for a Sequence object first. If no Sequence by that name could be found search
for Ensembles instead. If neither could be found return None.
@param name: Name of the Sequence/Ensemble
@return: PulseSequence | PulseBlockEnsemble | None
"""
if name == '':
asset_obj = None
elif name in list(self.saved_pulse_sequences):
asset_obj = self.saved_pulse_sequences[name]
elif name in list(self.saved_pulse_block_ensembles):
asset_obj = self.saved_pulse_block_ensembles[name]
else:
asset_obj = None
self.log.warning('No PulseSequence or PulseBlockEnsemble by the name "{0}" could be '
'found in saved assets. Returning None.'.format(name))
return asset_obj
def save_block(self, name, block):
""" Serialize a PulseBlock object to a *.blk file.
@param name: string, name of the block to save
@param block: PulseBlock object which will be serialized
"""
# TODO: Overwrite handling
block.name = name
self.current_block = block
self.saved_pulse_blocks[name] = block
self._save_blocks_to_file()
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
self.sigCurrentBlockUpdated.emit(self.current_block)
return
def load_block(self, name):
"""
@param name:
@return:
"""
if name not in self.saved_pulse_blocks:
self.log.error('PulseBlock "{0}" could not be found in saved pulse blocks. Load failed.'
''.format(name))
return
block = self.saved_pulse_blocks[name]
self.current_block = block
self.sigCurrentBlockUpdated.emit(self.current_block)
return
def delete_block(self, name):
""" Remove the serialized object "name" from the block list and HDD.
@param name: string, name of the PulseBlock object to be removed.
"""
if name in list(self.saved_pulse_blocks):
del(self.saved_pulse_blocks[name])
if hasattr(self.current_block, 'name'):
if self.current_block.name == name:
self.current_block = None
self.sigCurrentBlockUpdated.emit(self.current_block)
self._save_blocks_to_file()
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
else:
self.log.warning('PulseBlock object with name "{0}" not found in saved '
'blocks.\nTherefore nothing is removed.'.format(name))
return
def _get_blocks_from_file(self):
""" Update the saved_pulse_block dict from file """
block_files = [f for f in os.listdir(self.block_dir) if 'block_dict.blk' in f]
if len(block_files) == 0:
self.log.info('No serialized block dict was found in {0}.'.format(self.block_dir))
self.saved_pulse_blocks = OrderedDict()
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
return
# raise error if more than one file is present
if len(block_files) > 1:
self.log.error('More than one serialized block dict was found in {0}.\n'
'Using {1}.'.format(self.block_dir, block_files[-1]))
block_files = block_files[-1]
try:
with open(os.path.join(self.block_dir, block_files), 'rb') as infile:
self.saved_pulse_blocks = pickle.load(infile)
except:
self.saved_pulse_blocks = OrderedDict()
self.log.error('Failed to deserialize ensemble dict "{0}" from "{1}".'
''.format(block_files, self.block_dir))
self.sigBlockDictUpdated.emit(self.saved_pulse_blocks)
return
def _save_blocks_to_file(self):
""" Saves the saved_pulse_block dict to file """
try:
with open(os.path.join(self.block_dir, 'block_dict.blk.tmp'), 'wb') as outfile:
pickle.dump(self.saved_pulse_blocks, outfile)
except:
self.log.error('Failed to serialize ensemble dict in "{0}".'
''.format(os.path.join(self.block_dir, 'block_dict.blk.tmp')))
return
# remove old file and rename temp file
try:
os.rename(os.path.join(self.block_dir, 'block_dict.blk.tmp'),
os.path.join(self.block_dir, 'block_dict.blk'))
except WindowsError:
os.remove(os.path.join(self.block_dir, 'block_dict.blk'))
os.rename(os.path.join(self.block_dir, 'block_dict.blk.tmp'),
os.path.join(self.block_dir, 'block_dict.blk'))
return
def save_ensemble(self, name, ensemble):
""" Saves a PulseBlockEnsemble with name name to file.
@param str name: name of the ensemble, which will be serialized.
@param obj ensemble: a PulseBlockEnsemble object
"""
# TODO: Overwrite handling
ensemble.name = name
self.current_ensemble = ensemble
self.saved_pulse_block_ensembles[name] = ensemble
self._save_ensembles_to_file()
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
self.sigCurrentEnsembleUpdated.emit(self.current_ensemble)
return
def load_ensemble(self, name):
"""
@param name:
@return:
"""
if name not in self.saved_pulse_block_ensembles:
self.log.error('PulseBlockEnsemble "{0}" could not be found in saved pulse block '
'ensembles. Load failed.'.format(name))
return
ensemble = self.saved_pulse_block_ensembles[name]
# set generator settings if found in ensemble metadata
if ensemble.sample_rate is not None:
self.sample_rate = ensemble.sample_rate
if ensemble.amplitude_dict is not None:
self.amplitude_dict = ensemble.amplitude_dict
if ensemble.activation_config is not None:
self.activation_config = ensemble.activation_config
if ensemble.laser_channel is not None:
self.laser_channel = ensemble.laser_channel
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
self.current_ensemble = ensemble
self.sigCurrentEnsembleUpdated.emit(ensemble)
return
def delete_ensemble(self, name):
""" Remove the ensemble with 'name' from the ensemble list and HDD. """
if name in list(self.saved_pulse_block_ensembles):
del(self.saved_pulse_block_ensembles[name])
if hasattr(self.current_ensemble, 'name'):
if self.current_ensemble.name == name:
self.current_ensemble = None
self.sigCurrentEnsembleUpdated.emit(self.current_ensemble)
self._save_ensembles_to_file()
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
else:
self.log.warning('PulseBlockEnsemble object with name "{0}" not found in saved '
'ensembles.\nTherefore nothing is removed.'.format(name))
return
def _get_ensembles_from_file(self):
""" Update the saved_pulse_block_ensembles dict from file """
ensemble_files = [f for f in os.listdir(self.ensemble_dir) if 'ensemble_dict.ens' in f]
if len(ensemble_files) == 0:
self.log.info('No serialized ensembles dict was found in {0}.'
''.format(self.ensemble_dir))
self.saved_pulse_block_ensembles = OrderedDict()
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
return
# raise error if more than one file is present
if len(ensemble_files) > 1:
self.log.error('More than one serialized ensemble dict was found in {0}.\n'
'Using {1}.'.format(self.ensemble_dir, ensemble_files[-1]))
ensemble_files = ensemble_files[-1]
try:
with open(os.path.join(self.ensemble_dir, ensemble_files), 'rb') as infile:
self.saved_pulse_block_ensembles = pickle.load(infile)
except:
self.saved_pulse_block_ensembles = OrderedDict()
self.log.error('Failed to deserialize ensemble dict "{0}" from "{1}".'
''.format(ensemble_files, self.ensemble_dir))
self.sigEnsembleDictUpdated.emit(self.saved_pulse_block_ensembles)
return
def _save_ensembles_to_file(self):
""" Saves the saved_pulse_block_ensembles dict to file """
try:
with open(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp'), 'wb') as outfile:
pickle.dump(self.saved_pulse_block_ensembles, outfile)
except:
self.log.error('Failed to serialize ensemble dict in "{0}".'
''.format(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp')))
return
# remove old file and rename temp file
try:
os.rename(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp'),
os.path.join(self.ensemble_dir, 'ensemble_dict.ens'))
except WindowsError:
os.remove(os.path.join(self.ensemble_dir, 'ensemble_dict.ens'))
os.rename(os.path.join(self.ensemble_dir, 'ensemble_dict.ens.tmp'),
os.path.join(self.ensemble_dir, 'ensemble_dict.ens'))
return
def save_sequence(self, name, sequence):
""" Serialize the PulseSequence object with name 'name' to file.
@param str name: name of the sequence object.
@param object sequence: a PulseSequence object, which is going to be
serialized to file.
@return: str: name of the serialized object, if needed.
"""
# TODO: Overwrite handling
sequence.name = name
self.current_sequence = sequence
self.saved_pulse_sequences[name] = sequence
self._save_sequences_to_file()
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
self.sigCurrentSequenceUpdated.emit(self.current_sequence)
def load_sequence(self, name):
"""
@param name:
@return:
"""
if name not in self.saved_pulse_sequences:
self.log.error('PulseSequence "{0}" could not be found in saved pulse sequences. '
'Load failed.'.format(name))
return
sequence = self.saved_pulse_sequences[name]
# set generator settings if found in seqeunce metadata
if sequence.sample_rate is not None:
self.sample_rate = sequence.sample_rate
if sequence.amplitude_dict is not None:
self.amplitude_dict = sequence.amplitude_dict
if sequence.activation_config is not None:
self.activation_config = sequence.activation_config
if sequence.laser_channel is not None:
self.laser_channel = sequence.laser_channel
self.sigSettingsUpdated.emit(self.activation_config, self.laser_channel, self.sample_rate,
self.amplitude_dict, self.waveform_format)
self.current_sequence = sequence
self.sigCurrentSequenceUpdated.emit(sequence)
return
def delete_sequence(self, name):
""" Remove the sequence "name" from the sequence list and HDD.
@param str name: name of the sequence object, which should be deleted.
"""
if name in list(self.saved_pulse_sequences):
del(self.saved_pulse_sequences[name])
if hasattr(self.current_sequence, 'name'):
if self.current_sequence.name == name:
self.current_sequence = None
self.sigCurrentSequenceUpdated.emit(self.current_sequence)
self._save_sequences_to_file()
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
else:
self.log.warning('PulseBlockEnsemble object with name "{0}" not found in saved '
'ensembles.\nTherefore nothing is removed.'.format(name))
return
def generate_predefined_sequence(self, predefined_sequence_name, args):
"""
@param predefined_sequence_name:
@param args:
@return:
"""
gen_method = self.generate_methods[predefined_sequence_name]
try:
gen_method(*args)
except:
self.log.error('Generation of predefined sequence "{0}" failed.'
''.format(predefined_sequence_name))
return
self.sigPredefinedSequenceGenerated.emit(predefined_sequence_name)
return
def _get_sequences_from_file(self):
""" Update the saved_pulse_sequences dict from file """
sequence_files = [f for f in os.listdir(self.sequence_dir) if 'sequence_dict.sequ' in f]
if len(sequence_files) == 0:
self.log.info('No serialized sequence dict was found in {0}.'.format(self.sequence_dir))
self.saved_pulse_sequences = OrderedDict()
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
return
# raise error if more than one file is present
if len(sequence_files) > 1:
self.log.error('More than one serialized sequence dict was found in {0}.\n'
'Using {1}.'.format(self.sequence_dir, sequence_files[-1]))
sequence_files = sequence_files[-1]
try:
with open(os.path.join(self.sequence_dir, sequence_files), 'rb') as infile:
self.saved_pulse_sequences = pickle.load(infile)
except:
self.saved_pulse_sequences = OrderedDict()
self.log.error('Failed to deserialize sequence dict "{0}" from "{1}".'
''.format(sequence_files, self.sequence_dir))
self.sigSequenceDictUpdated.emit(self.saved_pulse_sequences)
return
def _save_sequences_to_file(self):
""" Saves the saved_pulse_sequences dict to file """
try:
with open(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp'), 'wb') as outfile:
pickle.dump(self.saved_pulse_sequences, outfile)
except:
self.log.error('Failed to serialize ensemble dict in "{0}".'
''.format(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp')))
return
# remove old file and rename temp file
try:
os.rename(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp'),
os.path.join(self.sequence_dir, 'sequence_dict.sequ'))
except WindowsError:
os.remove(os.path.join(self.sequence_dir, 'sequence_dict.sequ'))
os.rename(os.path.join(self.sequence_dir, 'sequence_dict.sequ.tmp'),
os.path.join(self.sequence_dir, 'sequence_dict.sequ'))
return
#---------------------------------------------------------------------------
# END sequence/block generation
#---------------------------------------------------------------------------
#---------------------------------------------------------------------------
# BEGIN sequence/block sampling
#---------------------------------------------------------------------------
def _analyze_block_ensemble(self, ensemble):
"""
@param ensemble:
@return:
"""
state_length_bins_arr = np.array([], dtype=int)
number_of_elements = 0
for block, reps in ensemble.block_list:
number_of_elements += (reps+1)*len(block.element_list)
num_state_changes = (reps+1) * len(block.element_list)
tmp_length_bins = np.zeros(num_state_changes, dtype=int)
# Iterate over all repertitions of the current block
state_index = 0
for rep_no in range(reps+1):
# Iterate over the Block_Elements inside the current block
for elem_index, block_element in enumerate(block.element_list):
init_length_s = block_element.init_length_s
increment_s = block_element.increment_s
element_length_s = init_length_s + (rep_no * increment_s)
tmp_length_bins[state_index] = int(np.rint(element_length_s * self.sample_rate))
state_index += 1
state_length_bins_arr = np.append(state_length_bins_arr, tmp_length_bins)
number_of_samples = np.sum(state_length_bins_arr)
number_of_states = len(state_length_bins_arr)
return number_of_samples, number_of_elements, number_of_states, state_length_bins_arr
def sample_pulse_block_ensemble(self, ensemble_name, write_to_file=True, offset_bin=0,
name_tag=None):
""" General sampling of a PulseBlockEnsemble object, which serves as the construction plan.
@param str ensemble_name: Name, which should correlate with the name of on of the displayed
ensembles.
@param bool write_to_file: Write either to RAM or to File (depends on the available space
in RAM). If set to FALSE, this method will return the samples
(digital and analog) as numpy arrays
@param int offset_bin: If many pulse ensembles are samples sequentially, then the
offset_bin of the previous sampling can be passed to maintain
rotating frame across pulse_block_ensembles
@param str name_tag: a name tag, which is used to keep the sampled files together, which
where sampled from the same PulseBlockEnsemble object but where
different offset_bins were used.
@return tuple: of length 4 with
(analog_samples, digital_samples, [<created_files>], offset_bin).
analog_samples:
numpy arrays containing the sampled voltages
digital_samples:
numpy arrays containing the sampled logic levels
[<created_files>]:
list of strings, with the actual created files through the pulsing
device
offset_bin:
integer, which is used for maintaining the rotation frame.
This method is creating the actual samples (voltages and logic states) for each time step
of the analog and digital channels specified in the PulseBlockEnsemble.
Therefore it iterates through all blocks, repetitions and elements of the ensemble and
calculates the exact voltages (float64) according to the specified math_function. The
samples are later on stored inside a float32 array.
So each element is calculated with high precision (float64) and then down-converted to
float32 to be stored.
To preserve the rotating frame, an offset counter is used to indicate the absolute time
within the ensemble. All calculations are done with time bins (dtype=int) to avoid rounding
errors. Only in the last step when a single PulseBlockElement object is sampled these
integer bin values are translated into a floating point time.
The chunkwise write mode is used to save memory usage at the expense of time. Here for each
PulseBlockElement the write_to_file method in the HW module is called to avoid large
arrays inside the memory. In other words: The whole sample arrays are never created at any
time. This results in more function calls and general overhead causing the much longer time
to complete.
"""
# lock module if it's not already locked (sequence sampling in progress)
if self.getState() == 'idle':
self.lock()
sequence_sampling_in_progress = False
else:
sequence_sampling_in_progress = True
# determine if chunkwise writing is enabled (the overhead byte size is set)
chunkwise = self.sampling_overhead_bytes is not None
# Set the filename (excluding the channel naming suffix, i.e. '_ch1')
if name_tag is None:
filename = ensemble_name
else:
filename = name_tag
# check for old files associated with the new ensemble and delete them from host PC
if write_to_file:
# get sampled filenames on host PC referring to the same ensemble
# be careful, in contrast to linux os, windows os is in general case
# insensitive! Therefore one needs to check and remove all files
# matching the case insensitive case for windows os.
if 'win' in sys.platform:
# make it simple and make everything lowercase.
filename_list = [f for f in os.listdir(self.waveform_dir) if
f.lower().startswith(filename.lower() + '_ch')]
else:
filename_list = [f for f in os.listdir(self.waveform_dir) if
f.startswith(filename + '_ch')]
# delete all filenames in the list
for file in filename_list:
os.remove(os.path.join(self.waveform_dir, file))
if len(filename_list) != 0:
self.log.info('Found old sampled ensembles for name "{0}". Files deleted before '
'sampling: {1}'.format(filename, filename_list))
start_time = time.time()
# get ensemble
ensemble = self.saved_pulse_block_ensembles[ensemble_name]
# Ensemble parameters to determine the shape of sample arrays
ana_channels = ensemble.analog_channels
dig_channels = ensemble.digital_channels
ana_chnl_names = [chnl for chnl in self.activation_config if 'a_ch' in chnl]
dig_chnl_names = [chnl for chnl in self.activation_config if 'd_ch' in chnl]
if self.digital_channels != dig_channels or self.analog_channels != ana_channels:
self.log.error('Sampling of PulseBlockEnsemble "{0}" failed!\nMismatch in number of '
'analog and digital channels between logic ({1}, {2}) and '
'PulseBlockEnsemble ({3}, {4}).'
''.format(ensemble_name, self.analog_channels, self.digital_channels,
ana_channels, dig_channels))
return np.array([]), np.array([]), -1
number_of_samples, number_of_elements, number_of_states, state_length_bins_arr = self._analyze_block_ensemble(ensemble)
# The time bin offset for each element to be sampled to preserve rotating frame.
if chunkwise and write_to_file:
# Flags and counter for chunkwise writing
is_first_chunk = True
is_last_chunk = False
element_count = 0
else:
# Allocate huge sample arrays if chunkwise writing is disabled.
analog_samples = np.empty([ana_channels, number_of_samples], dtype = 'float32')
digital_samples = np.empty([dig_channels, number_of_samples], dtype = bool)
# Starting index for the sample array entrys
entry_ind = 0
# Iterate over all blocks within the PulseBlockEnsemble object
for block, reps in ensemble.block_list:
# Iterate over all repertitions of the current block
for rep_no in range(reps+1):
# Iterate over the Block_Elements inside the current block
for elem_ind, block_element in enumerate(block.element_list):
parameters = block_element.parameters
init_length_s = block_element.init_length_s
increment_s = block_element.increment_s
digital_high = block_element.digital_high
pulse_function = block_element.pulse_function
element_length_s = init_length_s + (rep_no*increment_s)
element_length_bins = int(np.rint(element_length_s * self.sample_rate))
# create floating point time array for the current element inside rotating frame
time_arr = (offset_bin + np.arange(element_length_bins, dtype='float64')) / self.sample_rate
if chunkwise and write_to_file:
# determine it the current element is the last one to be sampled.
# Toggle the is_last_chunk flag accordingly.
element_count += 1
if element_count == number_of_elements:
is_last_chunk = True
# allocate temporary sample arrays to contain the current element
analog_samples = np.empty([ana_channels, element_length_bins], dtype='float32')
digital_samples = np.empty([dig_channels, element_length_bins], dtype=bool)
# actually fill the allocated sample arrays with values.
for i, state in enumerate(digital_high):
digital_samples[i] = np.full(element_length_bins, state, dtype=bool)
for i, func_name in enumerate(pulse_function):
analog_samples[i] = np.float32(self._math_func[func_name](time_arr, parameters[i])/self.amplitude_dict[ana_chnl_names[i]])
# write temporary sample array to file
self._write_to_file[self.waveform_format](filename, analog_samples,
digital_samples,
number_of_samples, is_first_chunk,
is_last_chunk)
# set flag to FALSE after first write
is_first_chunk = False
else:
# if the ensemble should be sampled as a whole (chunkwise = False) fill the
# entries in the huge sample arrays
for i, state in enumerate(digital_high):
digital_samples[i, entry_ind:entry_ind+element_length_bins] = np.full(element_length_bins, state, dtype=bool)
for i, func_name in enumerate(pulse_function):
analog_samples[i, entry_ind:entry_ind+element_length_bins] = np.float32(self._math_func[func_name](time_arr, parameters[i])/self.amplitude_dict[ana_chnl_names[i]])
# increment the index offset of the overall sample array for the next
# element
entry_ind += element_length_bins
# if the rotating frame should be preserved (default) increment the offset
# counter for the time array.
if ensemble.rotating_frame:
offset_bin += element_length_bins
if not write_to_file:
# return a status message with the time needed for sampling the entire ensemble as a
# whole without writing to file.
self.log.info('Time needed for sampling and writing PulseBlockEnsemble to file as a '
'whole: {0} sec.'.format(int(np.rint(time.time() - start_time))))
# return the sample arrays for write_to_file was set to FALSE
if not sequence_sampling_in_progress:
self.unlock()
self.sigSampleEnsembleComplete.emit(filename, analog_samples, digital_samples)
return analog_samples, digital_samples, offset_bin
elif chunkwise:
# return a status message with the time needed for sampling and writing the ensemble
# chunkwise.
self.log.info('Time needed for sampling and writing to file chunkwise: {0} sec'
''.format(int(np.rint(time.time()-start_time))))
if not sequence_sampling_in_progress:
self.unlock()
self.sigSampleEnsembleComplete.emit(filename, np.array([]), np.array([]))
return np.array([]), np.array([]), offset_bin
else:
# If the sampling should not be chunkwise and write to file is enabled call the
# write_to_file method only once with both flags set to TRUE
is_first_chunk = True
is_last_chunk = True
self._write_to_file[self.waveform_format](filename, analog_samples, digital_samples,
number_of_samples, is_first_chunk,
is_last_chunk)
# return a status message with the time needed for sampling and writing the ensemble as
# a whole.
self.log.info('Time needed for sampling and writing PulseBlockEnsemble to file as a '
'whole: {0} sec'.format(int(np.rint(time.time()-start_time))))
if not sequence_sampling_in_progress:
self.unlock()
self.sigSampleEnsembleComplete.emit(filename, np.array([]), np.array([]))
return np.array([]), np.array([]), offset_bin
def sample_pulse_sequence(self, sequence_name, write_to_file=True):
""" Samples the PulseSequence object, which serves as the construction plan.
@param str ensemble_name: Name, which should correlate with the name of on of the displayed
ensembles.
@param bool write_to_file: Write either to RAM or to File (depends on the available space
in RAM). If set to FALSE, this method will return the samples
(digital and analog) as numpy arrays
The sequence object is sampled by call subsequently the sampling routine for the
PulseBlockEnsemble objects and passing if needed the rotating frame option.
Only those PulseBlockEnsemble object where sampled that are different! These can be
directly obtained from the internal attribute different_ensembles_dict of a PulseSequence.
Right now two 'simple' methods of sampling where implemented, which reuse the sample
function for the Pulse_Block_Ensembles. One, which samples by preserving the phase (i.e.
staying in the rotating frame) and the other which samples without keep a phase
relationship between the different entries of the PulseSequence object.
More sophisticated sequence sampling method can be implemented here.
"""
# lock module
if self.getState() == 'idle':
self.lock()
else:
self.log.error('Cannot sample sequence "{0}" because the sequence generator logic is '
'still busy (locked).\nFunction call ignored.'.format(sequence_name))
return
if write_to_file:
# get sampled filenames on host PC referring to the same ensemble
filename_list = [f for f in os.listdir(self.sequence_dir) if
f.startswith(sequence_name + '.seq')]
# delete all filenames in the list
for file in filename_list:
os.remove(os.path.join(self.sequence_dir, file))
if len(filename_list) != 0:
self.log.warning('Found old sequence for name "{0}". Files deleted before '
'sampling: {1}'.format(sequence_name, filename_list))
start_time = time.time()
ana_chnl_names = [chnl for chnl in self.activation_config if 'a_ch' in chnl]
ana_chnl_num = [int(chnl.split('ch')[-1]) for chnl in ana_chnl_names]
# get ensemble
sequence_obj = self.saved_pulse_sequences[sequence_name]
sequence_param_dict_list = []
# if all the Pulse_Block_Ensembles should be in the rotating frame, then each ensemble
# will be created in general with a different offset_bin. Therefore, in order to keep track
# of the sampled Pulse_Block_Ensembles one has to introduce a running number as an
# additional name tag, so keep the sampled files separate.
if sequence_obj.rotating_frame:
ensemble_index = 0 # that will indicate the ensemble index
offset_bin = 0 # that will be used for phase preserving
for ensemble_obj, seq_param in sequence_obj.ensemble_param_list:
# to make something like 001
name_tag = sequence_name + '_' + str(ensemble_index).zfill(3)
dummy1, \
dummy2, \
offset_bin_return = self.sample_pulse_block_ensemble(ensemble_obj.name,
write_to_file=write_to_file,
offset_bin=offset_bin,
name_tag=name_tag)
# the temp_dict is a format how the sequence parameter will be saved
temp_dict = dict()
name_list = []
for ch_num in ana_chnl_num:
name_list.append(name_tag + '_ch' + str(ch_num) + '.' + self.waveform_format)
temp_dict['name'] = name_list
# update the sequence parameter to the temp dict:
temp_dict.update(seq_param)
# add the whole dict to the list of dicts, containing information about how to
# write the sequence properly in the hardware file:
sequence_param_dict_list.append(temp_dict)
# for the next run, the returned offset_bin will serve as starting point for
# phase preserving.
offset_bin = offset_bin_return
ensemble_index += 1
else:
# if phase prevervation between the sequence entries is not needed, then only the
# different ensembles will be sampled, since the offset_bin does not matter for them:
for ensemble_name in sequence_obj.different_ensembles_dict:
self.sample_pulse_block_ensemble(ensemble_name, write_to_file=write_to_file,
offset_bin=0, name_tag=None)
# go now through the sequence list and replace all the entries with the output of the
# sampled ensemble file:
for ensemble_obj, seq_param in sequence_obj.ensemble_param_list:
temp_dict = dict()
name_list = []
for ch_num in ana_chnl_num:
name_list.append(ensemble_obj.name + '_ch' + str(ch_num) + '.' + self.waveform_format)
temp_dict['name'] = name_list
# update the sequence parameter to the temp dict:
temp_dict.update(seq_param)
sequence_param_dict_list.append(temp_dict)
if write_to_file:
# pass the whole information to the sequence creation method:
self._write_to_file[self.sequence_format](sequence_name, sequence_param_dict_list)
self.log.info('Time needed for sampling and writing Pulse Sequence to file: {0} sec.'
''.format(int(np.rint(time.time() - start_time))))
else:
self.log.info('Time needed for sampling Pulse Sequence: {0} sec.'
''.format(int(np.rint(time.time() - start_time))))
# unlock module
self.unlock()
self.sigSampleSequenceComplete.emit(sequence_name, sequence_param_dict_list)
return
#---------------------------------------------------------------------------
# END sequence/block sampling
#---------------------------------------------------------------------------
| gpl-3.0 | -7,307,263,554,439,006,000 | 50.447394 | 191 | 0.595218 | false |
rcgee/oq-hazardlib | openquake/hazardlib/site.py | 1 | 18827 | # -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2012-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Module :mod:`openquake.hazardlib.site` defines :class:`Site`.
"""
import numpy
from openquake.baselib.python3compat import range
from openquake.baselib.slots import with_slots
from openquake.baselib.general import split_in_blocks
from openquake.hazardlib.geo.mesh import Mesh
from openquake.hazardlib.geo.utils import cross_idl
@with_slots
class Site(object):
"""
Site object represents a geographical location defined by its position
as well as its soil characteristics.
:param location:
Instance of :class:`~openquake.hazardlib.geo.point.Point` representing
where the site is located.
:param vs30:
Average shear wave velocity in the top 30 m, in m/s.
:param vs30measured:
Boolean value, ``True`` if ``vs30`` was measured on that location
and ``False`` if it was inferred.
:param z1pt0:
Vertical distance from earth surface to the layer where seismic waves
start to propagate with a speed above 1.0 km/sec, in meters.
:param z2pt5:
Vertical distance from earth surface to the layer where seismic waves
start to propagate with a speed above 2.5 km/sec, in km.
:param backarc":
Boolean value, ``True`` if the site is in the subduction backarc and
``False`` if it is in the subduction forearc or is unknown
:raises ValueError:
If any of ``vs30``, ``z1pt0`` or ``z2pt5`` is zero or negative.
.. note::
:class:`Sites <Site>` are pickleable
"""
_slots_ = 'location vs30 vs30measured z1pt0 z2pt5 backarc'.split()
def __init__(self, location, vs30, vs30measured, z1pt0, z2pt5,
backarc=False):
if not vs30 > 0:
raise ValueError('vs30 must be positive')
if not z1pt0 > 0:
raise ValueError('z1pt0 must be positive')
if not z2pt5 > 0:
raise ValueError('z2pt5 must be positive')
self.location = location
self.vs30 = vs30
self.vs30measured = vs30measured
self.z1pt0 = z1pt0
self.z2pt5 = z2pt5
self.backarc = backarc
def __str__(self):
"""
>>> import openquake.hazardlib
>>> loc = openquake.hazardlib.geo.point.Point(1, 2, 3)
>>> str(Site(loc, 760.0, True, 100.0, 5.0))
'<Location=<Latitude=2.000000, Longitude=1.000000, Depth=3.0000>, \
Vs30=760.0000, Vs30Measured=True, Depth1.0km=100.0000, Depth2.5km=5.0000, \
Backarc=False>'
"""
return (
"<Location=%s, Vs30=%.4f, Vs30Measured=%r, Depth1.0km=%.4f, "
"Depth2.5km=%.4f, Backarc=%r>") % (
self.location, self.vs30, self.vs30measured, self.z1pt0,
self.z2pt5, self.backarc)
def __hash__(self):
return hash((self.location.x, self.location.y))
def __eq__(self, other):
return (self.location.x, self.location.y) == (
other.location.x, other.location.y)
def __repr__(self):
"""
>>> import openquake.hazardlib
>>> loc = openquake.hazardlib.geo.point.Point(1, 2, 3)
>>> site = Site(loc, 760.0, True, 100.0, 5.0)
>>> str(site) == repr(site)
True
"""
return self.__str__()
def _extract(array_or_float, indices):
try: # if array
return array_or_float[indices]
except TypeError: # if float
return array_or_float
@with_slots
class SiteCollection(object):
"""
A collection of :class:`sites <Site>`.
Instances of this class are intended to represent a large collection
of sites in a most efficient way in terms of memory usage.
.. note::
Because calculations assume that :class:`Sites <Site>` are on the
Earth's surface, all `depth` information in a :class:`SiteCollection`
is discarded. The collection `mesh` will only contain lon and lat. So
even if a :class:`SiteCollection` is created from sites containing
`depth` in their geometry, iterating over the collection will yield
:class:`Sites <Site>` with a reference depth of 0.0.
:param sites:
A list of instances of :class:`Site` class.
"""
dtype = numpy.dtype([
('sids', numpy.uint32),
('lons', numpy.float64),
('lats', numpy.float64),
('_vs30', numpy.float64),
('_vs30measured', numpy.bool),
('_z1pt0', numpy.float64),
('_z2pt5', numpy.float64),
('_backarc', numpy.bool),
])
_slots_ = dtype.names
@classmethod
def from_points(cls, lons, lats, sitemodel):
"""
Build the site collection from
:param lons:
a sequence of longitudes
:param lats:
a sequence of latitudes
:param sitemodel:
an object containing the attributes
reference_vs30_value,
reference_vs30_type,
reference_depth_to_1pt0km_per_sec,
reference_depth_to_2pt5km_per_sec,
reference_backarc
"""
assert len(lons) == len(lats), (len(lons), len(lats))
self = cls.__new__(cls)
self.complete = self
self.total_sites = len(lons)
self.sids = numpy.arange(len(lons), dtype=numpy.uint32)
self.lons = numpy.array(lons)
self.lats = numpy.array(lats)
self._vs30 = sitemodel.reference_vs30_value
self._vs30measured = sitemodel.reference_vs30_type == 'measured'
self._z1pt0 = sitemodel.reference_depth_to_1pt0km_per_sec
self._z2pt5 = sitemodel.reference_depth_to_2pt5km_per_sec
self._backarc = sitemodel.reference_backarc
return self
def __init__(self, sites):
self.complete = self
self.total_sites = n = len(sites)
self.sids = numpy.zeros(n, dtype=int)
self.lons = numpy.zeros(n, dtype=float)
self.lats = numpy.zeros(n, dtype=float)
self._vs30 = numpy.zeros(n, dtype=float)
self._vs30measured = numpy.zeros(n, dtype=bool)
self._z1pt0 = numpy.zeros(n, dtype=float)
self._z2pt5 = numpy.zeros(n, dtype=float)
self._backarc = numpy.zeros(n, dtype=bool)
for i in range(n):
self.sids[i] = i
self.lons[i] = sites[i].location.longitude
self.lats[i] = sites[i].location.latitude
self._vs30[i] = sites[i].vs30
self._vs30measured[i] = sites[i].vs30measured
self._z1pt0[i] = sites[i].z1pt0
self._z2pt5[i] = sites[i].z2pt5
self._backarc[i] = sites[i].backarc
# protect arrays from being accidentally changed. it is useful
# because we pass these arrays directly to a GMPE through
# a SiteContext object and if a GMPE is implemented poorly it could
# modify the site values, thereby corrupting site and all the
# subsequent calculation. note that this doesn't protect arrays from
# being changed by calling itemset()
for arr in (self._vs30, self._vs30measured, self._z1pt0, self._z2pt5,
self.lons, self.lats, self._backarc, self.sids):
arr.flags.writeable = False
def __toh5__(self):
array = numpy.zeros(self.total_sites, self.dtype)
for slot in self._slots_:
array[slot] = getattr(self, slot)
attrs = dict(total_sites=self.total_sites)
return array, attrs
def __fromh5__(self, array, attrs):
for slot in self._slots_:
setattr(self, slot, array[slot])
vars(self).update(attrs)
self.complete = self
@property
def mesh(self):
"""Return a mesh with the given lons and lats"""
return Mesh(self.lons, self.lats, depths=None)
@property
def indices(self):
"""The full set of indices from 0 to total_sites - 1"""
return numpy.arange(0, self.total_sites)
def split_in_tiles(self, hint):
"""
Split a SiteCollection into a set of tiles (SiteCollection instances).
:param hint: hint for how many tiles to generate
"""
tiles = []
for seq in split_in_blocks(range(len(self)), hint or 1):
indices = numpy.array(seq, int)
sc = SiteCollection.__new__(SiteCollection)
sc.complete = sc
sc.total_sites = len(indices)
sc.sids = self.sids[indices]
sc.lons = self.lons[indices]
sc.lats = self.lats[indices]
sc._vs30 = _extract(self._vs30, indices)
sc._vs30measured = _extract(self._vs30measured, indices)
sc._z1pt0 = _extract(self._z1pt0, indices)
sc._z2pt5 = _extract(self._z2pt5, indices)
sc._backarc = _extract(self._backarc, indices)
tiles.append(sc)
return tiles
def __iter__(self):
"""
Iterate through all :class:`sites <Site>` in the collection, yielding
one at a time.
"""
if isinstance(self.vs30, float): # from points
for i, location in enumerate(self.mesh):
yield Site(location, self._vs30, self._vs30measured,
self._z1pt0, self._z2pt5, self._backarc)
else: # from sites
for i, location in enumerate(self.mesh):
yield Site(location, self.vs30[i], self.vs30measured[i],
self.z1pt0[i], self.z2pt5[i], self.backarc[i])
def filter(self, mask):
"""
Create a FilteredSiteCollection with only a subset of sites
from this one.
:param mask:
Numpy array of boolean values of the same length as this sites
collection. ``True`` values should indicate that site with that
index should be included into the filtered collection.
:returns:
A new :class:`FilteredSiteCollection` instance, unless all the
values in ``mask`` are ``True``, in which case this site collection
is returned, or if all the values in ``mask`` are ``False``,
in which case method returns ``None``. New collection has data
of only those sites that were marked for inclusion in mask.
See also :meth:`expand`.
"""
assert len(mask) == len(self), (len(mask), len(self))
if mask.all():
# all sites satisfy the filter, return
# this collection unchanged
return self
if not mask.any():
# no sites pass the filter, return None
return None
# extract indices of Trues from the mask
[indices] = mask.nonzero()
return FilteredSiteCollection(indices, self)
def expand(self, data, placeholder):
"""
For non-filtered site collections just checks that data
has the right number of elements and returns it. It is
here just for API compatibility with filtered site collections.
"""
assert len(data) == len(self), (len(data), len(self))
return data
def __len__(self):
"""
Return the number of sites in the collection.
"""
return self.total_sites
def __repr__(self):
return '<SiteCollection with %d sites>' % self.total_sites
# adding a number of properties for the site model data
for name in 'vs30 vs30measured z1pt0 z2pt5 backarc'.split():
def getarray(sc, name=name): # sc is a SiteCollection
value = getattr(sc, '_' + name)
if isinstance(value, (float, bool)):
arr = numpy.array([value] * len(sc), dtype=type(value))
arr.flags.writeable = False
return arr
else:
return value
setattr(SiteCollection, name, property(getarray, doc='%s array' % name))
@with_slots
class FilteredSiteCollection(object):
"""
A class meant to store proper subsets of a complete collection of sites
in a memory-efficient way.
:param indices:
an array of indices referring to the complete site collection
:param complete:
the complete site collection the filtered collection was
derived from
Notice that if you filter a FilteredSiteCollection `fsc`, you will
get a different FilteredSiteCollection referring to the complete
SiteCollection `fsc.complete`, not to the filtered collection `fsc`.
"""
_slots_ = 'indices complete'.split()
def __init__(self, indices, complete):
if complete is not complete.complete:
raise ValueError(
'You should pass a full site collection, not %s' % complete)
self.indices = indices
self.complete = complete
@property
def total_sites(self):
"""The total number of the original sites, without filtering"""
return self.complete.total_sites
@property
def mesh(self):
"""Return a mesh with the given lons and lats"""
return Mesh(self.lons, self.lats, depths=None)
def filter(self, mask):
"""
Create a FilteredSiteCollection with only a subset of sites
from this one.
:param mask:
Numpy array of boolean values of the same length as this
filtered sites collection. ``True`` values should indicate
that site with that index should be included into the
filtered collection.
:returns:
A new :class:`FilteredSiteCollection` instance, unless all the
values in ``mask`` are ``True``, in which case this site collection
is returned, or if all the values in ``mask`` are ``False``,
in which case method returns ``None``. New collection has data
of only those sites that were marked for inclusion in mask.
See also :meth:`expand`.
"""
assert len(mask) == len(self), (len(mask), len(self))
if mask.all():
return self
elif not mask.any():
return None
indices = self.indices.take(mask.nonzero()[0])
return FilteredSiteCollection(indices, self.complete)
def expand(self, data, placeholder):
"""
Expand a short array `data` over a filtered site collection of the
same length and return a long array of size `total_sites` filled
with the placeholder.
The typical workflow is the following: there is a whole site
collection, the one that has an information about all the sites.
Then it gets filtered for performing some calculation on a limited
set of sites (like for instance filtering sites by their proximity
to a rupture). That filtering process can be repeated arbitrary
number of times, i.e. a collection that is already filtered can
be filtered for further limiting the set of sites to compute on.
Then the (supposedly expensive) computation is done on a limited
set of sites which still appears as just a :class:`SiteCollection`
instance, so that computation code doesn't need to worry about
filtering, it just needs to handle site collection objects. The
calculation result comes in a form of 1d or 2d numpy array (that
is, either one value per site or one 1d array per site) with length
equal to number of sites in a filtered collection. That result
needs to be expanded to an array of similar structure but the one
that holds values for all the sites in the original (unfiltered)
collection. This is what :meth:`expand` is for. It creates a result
array of ``total_sites`` length and puts values from ``data`` into
appropriate places in it remembering indices of sites that were
chosen for actual calculation and leaving ``placeholder`` value
everywhere else.
:param data:
1d or 2d numpy array with first dimension representing values
computed for site from this collection.
:param placeholder:
A scalar value to be put in result array for those sites that
were filtered out and no real calculation was performed for them.
:returns:
Array of length ``total_sites`` with values from ``data``
distributed in the appropriate places.
"""
len_data = data.shape[0]
assert len_data == len(self), (len_data, len(self))
assert len_data <= self.total_sites
assert self.indices[-1] < self.total_sites, (
self.indices[-1], self.total_sites)
if data.ndim == 1:
# single-dimensional array
result = numpy.empty(self.total_sites)
result.fill(placeholder)
result.put(self.indices, data)
return result
assert data.ndim == 2
# two-dimensional array
num_values = data.shape[1]
result = numpy.empty((self.total_sites, num_values))
result.fill(placeholder)
for i in range(num_values):
result[:, i].put(self.indices, data[:, i])
return result
def __iter__(self):
"""
Iterate through all :class:`sites <Site>` in the collection, yielding
one at a time.
"""
for i, location in enumerate(self.mesh):
yield Site(location, self.vs30[i], self.vs30measured[i],
self.z1pt0[i], self.z2pt5[i], self.backarc[i])
def __len__(self):
"""Return the number of filtered sites"""
return len(self.indices)
def __repr__(self):
return '<FilteredSiteCollection with %d of %d sites>' % (
len(self.indices), self.total_sites)
def _extract_site_param(fsc, name):
# extract the site parameter 'name' from the filtered site collection
return getattr(fsc.complete, name).take(fsc.indices)
# attach a number of properties filtering the arrays
for name in 'vs30 vs30measured z1pt0 z2pt5 backarc lons lats sids'.split():
prop = property(
lambda fsc, name=name: _extract_site_param(fsc, name),
doc='Extract %s array from FilteredSiteCollection' % name)
setattr(FilteredSiteCollection, name, prop)
| agpl-3.0 | 877,749,385,576,351,600 | 37.738683 | 79 | 0.616296 | false |
chandrikas/sm | drivers/ISOSR.py | 1 | 27571 | #!/usr/bin/python
#
# Copyright (C) Citrix Systems Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# ISOSR: remote iso storage repository
import SR, VDI, SRCommand, util
import nfs
import os, re
import xs_errors
CAPABILITIES = ["VDI_CREATE", "VDI_DELETE", "VDI_ATTACH", "VDI_DETACH",
"SR_SCAN", "SR_ATTACH", "SR_DETACH"]
CONFIGURATION = \
[ [ 'location', 'path to mount (required) (e.g. server:/path)' ],
[ 'options',
'extra options to pass to mount (deprecated) (e.g. \'-o ro\')' ],
[ 'type','cifs or nfs'],
nfs.NFS_VERSION]
DRIVER_INFO = {
'name': 'ISO',
'description': 'Handles CD images stored as files in iso format',
'vendor': 'Citrix Systems Inc',
'copyright': '(C) 2008 Citrix Systems Inc',
'driver_version': '1.0',
'required_api_version': '1.0',
'capabilities': CAPABILITIES,
'configuration': CONFIGURATION
}
TYPE = "iso"
SMB_VERSION_1 = '1.0'
SMB_VERSION_3 = '3.0'
NFSPORT = 2049
def is_image_utf8_compatible(s):
regex = re.compile("\.iso$|\.img$", re.I)
if regex.search(s) == None:
return False
# Check for extended characters
if type(s) == str:
try:
s.decode('utf-8')
except UnicodeDecodeError, e:
util.SMlog("WARNING: This string is not UTF-8 compatible.")
return False
return True
def tools_iso_name(filename):
# The tools ISO used have a "xs-" prefix in its name.
# We recognise both and set the name_label accordingly.
if filename[:3] == "xs-":
return "xs-tools.iso"
else:
return "guest-tools.iso"
class ISOSR(SR.SR):
"""Local file storage repository"""
# Some helper functions:
def _checkmount(self):
"""Checks that the mountpoint exists and is mounted"""
if not util.pathexists(self.mountpoint):
return False
try:
ismount = util.ismount(self.mountpoint)
except util.CommandException, inst:
return False
return ismount
def _checkTargetStr(self, location):
if not self.dconf.has_key('type'):
return
if self.dconf['type'] == 'cifs':
tgt = ''
if re.search('^//',location):
tgt = location.split('/')[2]
elif re.search(r'^\\',location):
l = location.split('\\')
for i in location.split('\\'):
if i:
tgt = i
break
if not tgt:
raise xs_errors.XenError('ISOLocationStringError')
else:
if location.find(':') == -1:
raise xs_errors.XenError('ISOLocationStringError')
tgt = location.split(':')[0]
try:
util._convertDNS(tgt)
except:
raise xs_errors.XenError('DNSError')
uuid_file_regex = re.compile(
"([0-9a-f]{8}-(([0-9a-f]{4})-){3}[0-9a-f]{12})\.(iso|img)", re.I)
def _loadvdis(self):
"""Scan the directory and get uuids either from the VDI filename, \
or by creating a new one."""
if self.vdis:
return
for name in filter(is_image_utf8_compatible,
util.listdir(self.path, quiet = True)):
fileName = self.path + "/" + name
if os.path.isdir(fileName):
util.SMlog("_loadvdis : %s is a directory. Ignore" % fileName)
continue
# CA-80254: Check for iso/img files whose name consists of extended
# characters.
try:
name.decode('ascii')
except UnicodeDecodeError:
raise xs_errors.XenError('CIFSExtendedCharsNotSupported', \
opterr = 'The repository contains at least one file whose name consists of extended characters.')
self.vdis[name] = ISOVDI(self, name)
# Set the VDI UUID if the filename is of the correct form.
# Otherwise, one will be generated later in VDI._db_introduce.
m = self.uuid_file_regex.match(name)
if m:
self.vdis[name].uuid = m.group(1)
# Synchronise the read-only status with existing VDI records
__xenapi_records = util.list_VDI_records_in_sr(self)
__xenapi_locations = {}
for vdi in __xenapi_records.keys():
__xenapi_locations[__xenapi_records[vdi]['location']] = vdi
for vdi in self.vdis.values():
if vdi.location in __xenapi_locations:
v = __xenapi_records[__xenapi_locations[vdi.location]]
sm_config = v['sm_config']
if sm_config.has_key('created'):
vdi.sm_config['created'] = sm_config['created']
vdi.read_only = False
# Now for the main functions:
def handles(type):
"""Do we handle this type?"""
if type == TYPE:
return True
return False
handles = staticmethod(handles)
def content_type(self, sr_uuid):
"""Returns the content_type XML"""
return super(ISOSR, self).content_type(sr_uuid)
vdi_path_regex = re.compile("[a-z0-9.-]+\.(iso|img)", re.I)
def vdi(self, uuid):
"""Create a VDI class. If the VDI does not exist, we determine
here what its filename should be."""
filename = util.to_plain_string(self.srcmd.params.get('vdi_location'))
if filename is None:
smconfig = self.srcmd.params.get('vdi_sm_config')
if smconfig is None:
# uh, oh, a VDI.from_uuid()
import XenAPI
_VDI = self.session.xenapi.VDI
try:
vdi_ref = _VDI.get_by_uuid(uuid)
except XenAPI.Failure, e:
if e.details[0] != 'UUID_INVALID': raise
else:
filename = _VDI.get_location(vdi_ref)
if filename is None:
# Get the filename from sm-config['path'], or use the UUID
# if the path param doesn't exist.
if smconfig and smconfig.has_key('path'):
filename = smconfig['path']
if not self.vdi_path_regex.match(filename):
raise xs_errors.XenError('VDICreate', \
opterr='Invalid path "%s"' % filename)
else:
filename = '%s.img' % uuid
return ISOVDI(self, filename)
def load(self, sr_uuid):
"""Initialises the SR"""
# First of all, check we've got the correct keys in dconf
if not self.dconf.has_key('location'):
raise xs_errors.XenError('ConfigLocationMissing')
# Construct the path we're going to mount under:
if self.dconf.has_key("legacy_mode"):
self.mountpoint = util.to_plain_string(self.dconf['location'])
else:
# Verify the target address
self._checkTargetStr(self.dconf['location'])
self.mountpoint = os.path.join(SR.MOUNT_BASE, sr_uuid)
# Add on the iso_path value if there is one
if self.dconf.has_key("iso_path"):
iso_path = util.to_plain_string(self.dconf['iso_path'])
if iso_path.startswith("/"):
iso_path=iso_path[1:]
self.path = os.path.join(self.mountpoint, iso_path)
else:
self.path = self.mountpoint
# Handle optional dconf attributes
self.nfsversion = nfs.validate_nfsversion(self.dconf.get('nfsversion'))
# Fill the required SMB version
self.smbversion = SMB_VERSION_3
# Check if smb version is specified from client
self.is_smbversion_specified = False
# Some info we need:
self.sr_vditype = 'phy'
self.credentials = None
def delete(self, sr_uuid):
pass
def attach(self, sr_uuid):
"""Std. attach"""
# Very-Legacy mode means the ISOs are in the local fs - so no need to attach.
if self.dconf.has_key('legacy_mode'):
# Verify path exists
if not os.path.exists(self.mountpoint):
raise xs_errors.XenError('ISOLocalPath')
return
# Check whether we're already mounted
if self._checkmount():
return
# Create the mountpoint if it's not already there
if not util.isdir(self.mountpoint):
util.makedirs(self.mountpoint)
mountcmd=[]
location = util.to_plain_string(self.dconf['location'])
self.credentials = os.path.join("/tmp", util.gen_uuid())
# TODO: Have XC standardise iso type string
protocol = 'nfs_iso'
options = ''
if self.dconf.has_key('type'):
protocol = self.dconf['type']
elif ":/" not in location:
protocol = 'cifs'
if 'options' in self.dconf:
options = self.dconf['options'].split(' ')
if protocol == 'cifs':
options = filter(lambda x: x != "", options)
else:
options = self.getNFSOptions(options)
# SMB options are passed differently for create via
# XC/xe sr-create and create via xe-mount-iso-sr
# In both cases check if SMB version is passed are not.
# If not use self.smbversion.
if protocol == 'cifs':
if self.dconf.has_key('type'):
# Create via XC or sr-create
# Check for username and password
mountcmd=["mount.cifs", location, self.mountpoint]
if 'vers' in self.dconf:
self.is_smbversion_specified = True
self.smbversion = self.dconf['vers']
util.SMlog("self.dconf['vers'] = %s" % self.dconf['vers'])
self.appendCIFSMountOptions(mountcmd)
else:
# Creation via xe-mount-iso-sr
try:
mountcmd = ["mount", location, self.mountpoint]
if options and options[0] == '-o':
pos = options[1].find('vers=')
if pos == -1:
options[1] += ',' + self.getSMBVersion()
else:
self.smbversion = self.getSMBVersionFromOptions(
options[1])
self.is_smbversion_specified = True
else:
raise ValueError
mountcmd.extend(options)
except ValueError:
raise xs_errors.XenError('ISOInvalidXeMountOptions')
# Check the validity of 'smbversion'.
# Raise an exception for any invalid version.
if self.smbversion not in [SMB_VERSION_1, SMB_VERSION_3]:
self._cleanupcredentials()
raise xs_errors.XenError('ISOInvalidSMBversion')
# Attempt mounting
try:
if protocol == 'nfs_iso':
# For NFS, do a soft mount with tcp as protocol. Since ISO SR is
# going to be r-only, a failure in nfs link can be reported back
# to the process waiting.
serv_path = location.split(':')
util._testHost(serv_path[0], NFSPORT, 'NFSTarget')
nfs.soft_mount(self.mountpoint, serv_path[0], serv_path[1],
'tcp', useroptions=options,
nfsversion=self.nfsversion)
else:
smb3_fail_reason = None
if self.smbversion in SMB_VERSION_3:
util.SMlog('ISOSR mount over smb 3.0')
try:
self.mountOverSMB(mountcmd)
except util.CommandException, inst:
if not self.is_smbversion_specified:
util.SMlog('Retrying ISOSR mount over smb 1.0')
smb3_fail_reason = inst.reason
# mountcmd is constructed such that the last two
# items will contain -o argument and its value.
del mountcmd[-2:]
self.smbversion = SMB_VERSION_1
if not options:
self.appendCIFSMountOptions(mountcmd)
else:
if options[0] == '-o':
# regex can be used here since we have
# already validated version entry
options[1] = re.sub('vers=3.0', 'vers=1.0',
options[1])
mountcmd.extend(options)
self.mountOverSMB(mountcmd)
else:
self._cleanupcredentials()
raise xs_errors.XenError(
'ISOMountFailure', opterr=inst.reason)
else:
util.SMlog('ISOSR mount over smb 1.0')
self.mountOverSMB(mountcmd)
except util.CommandException, inst:
self._cleanupcredentials()
if not self.is_smbversion_specified:
raise xs_errors.XenError(
'ISOMountFailure', opterr=smb3_fail_reason)
else:
raise xs_errors.XenError(
'ISOMountFailure', opterr=inst.reason)
self._cleanupcredentials()
# Check the iso_path is accessible
if not self._checkmount():
self.detach(sr_uuid)
raise xs_errors.XenError('ISOSharenameFailure')
def getSMBVersionFromOptions(self, options):
"""Extract SMB version from options """
smb_ver = None
options_list = options.split(',')
for option in options_list:
if option.startswith('vers='):
version = option.split('=')
if len(version) == 2:
smb_ver = version[1]
break
return smb_ver
def getSMBVersion(self):
"""Pass smb version option to mount.cifs"""
smbversion = "vers=%s" % self.smbversion
return smbversion
def mountOverSMB(self, mountcmd):
"""This function raises util.CommandException"""
util.pread(mountcmd, True)
try:
if not self.is_smbversion_specified:
# Store the successful smb version in PBD config
self.updateSMBVersInPBDConfig()
except Exception as exc:
util.SMlog("Exception: %s" % str(exc))
if self._checkmount():
util.pread(["umount", self.mountpoint])
raise util.CommandException
def updateSMBVersInPBDConfig(self):
"""Store smb version in PBD config"""
pbd = util.find_my_pbd(self.session, self.host_ref, self.sr_ref)
if pbd is not None:
util.SMlog('Updating SMB version in PBD device config')
dconf = self.session.xenapi.PBD.get_device_config(pbd)
dconf['vers'] = self.smbversion
self.session.xenapi.PBD.set_device_config(pbd, dconf)
else:
raise Exception('Could not find PBD for corresponding SR')
def getNFSOptions(self, options):
"""Append options to mount.nfs"""
#Only return any options specified with -o
nfsOptions = ''
for index, opt in enumerate(options):
if opt == "-o":
nfsOptions = options[index + 1]
break
return nfsOptions
def appendCIFSMountOptions(self, mountcmd):
"""Append options to mount.cifs"""
options = []
try:
options.append(self.getCIFSPasswordOptions())
options.append(self.getCacheOptions())
options.append('guest')
options.append(self.getSMBVersion())
except:
util.SMlog("Exception while attempting to append mount options")
raise
# Extend mountcmd appropriately
if options:
options = ",".join(str(x) for x in options if x)
mountcmd.extend(["-o", options])
def getCacheOptions(self):
"""Pass cache options to mount.cifs"""
return "cache=none"
def getCIFSPasswordOptions(self):
if self.dconf.has_key('username') \
and (self.dconf.has_key('cifspassword') or self.dconf.has_key('cifspassword_secret')):
dom_username = self.dconf['username'].split('\\')
if len(dom_username) == 1:
domain = None
username = dom_username[0]
elif len(dom_username) == 2:
domain = dom_username[0]
username = dom_username[1]
else:
err_str = ("A maximum of 2 tokens are expected "
"(<domain>\<username>). {} were given."
.format(len(dom_username)))
util.SMlog('CIFS ISO SR mount error: ' + err_str)
raise xs_errors.XenError('ISOMountFailure', opterr=err_str)
if self.dconf.has_key('cifspassword_secret'):
password = util.get_secret(self.session, self.dconf['cifspassword_secret'])
else:
password = self.dconf['cifspassword']
domain = util.to_plain_string(domain)
username = util.to_plain_string(username)
password = util.to_plain_string(password)
cred_str = 'username={}\npassword={}\n'.format(username, password)
if domain:
cred_str += 'domain={}\n'.format(domain)
# Open credentials file and truncate
f = open(self.credentials, 'w')
f.write(cred_str)
f.close()
credentials = "credentials=%s" % self.credentials
return credentials
def _cleanupcredentials(self):
if self.credentials and os.path.exists(self.credentials):
os.unlink(self.credentials)
def detach(self, sr_uuid):
"""Std. detach"""
# This handles legacy mode too, so no need to check
if not self._checkmount():
return
try:
util.pread(["umount", self.mountpoint]);
except util.CommandException, inst:
raise xs_errors.XenError('NFSUnMount', \
opterr = 'error is %d' % inst.code)
def scan(self, sr_uuid):
"""Scan: see _loadvdis"""
if not util.isdir(self.path):
raise xs_errors.XenError('SRUnavailable', \
opterr = 'no such directory %s' % self.path)
if (not self.dconf.has_key('legacy_mode')) and (not self._checkmount()):
raise xs_errors.XenError('SRUnavailable', \
opterr = 'directory not mounted: %s' % self.path)
#try:
if not self.vdis:
self._loadvdis()
self.physical_size = util.get_fs_size(self.path)
self.physical_utilisation = util.get_fs_utilisation(self.path)
self.virtual_allocation = self.physical_size
other_config = self.session.xenapi.SR.get_other_config(self.sr_ref)
if other_config.has_key('xenserver_tools_sr') and \
other_config['xenserver_tools_sr'] == "true":
# Out of all the xs-tools ISOs which exist in this dom0, we mark
# only one as the official one.
# Pass 1: find the latest version
latest_build_vdi = None
latest_build_number = "0"
for vdi_name in self.vdis:
vdi = self.vdis[vdi_name]
if latest_build_vdi == None:
latest_build_vdi = vdi.location
latest_build_number = "0"
if vdi.sm_config.has_key('xs-tools-build'):
bld = vdi.sm_config['xs-tools-build']
if bld >= latest_build_number:
latest_build_vdi = vdi.location
latest_build_number = bld
# Pass 2: mark all VDIs accordingly
for vdi_name in self.vdis:
vdi = self.vdis[vdi_name]
if vdi.location == latest_build_vdi:
vdi.sm_config['xs-tools'] = "true"
else:
if vdi.sm_config.has_key("xs-tools"):
del vdi.sm_config['xs-tools']
# Synchronise the VDIs: this will update the sm_config maps of current records
scanrecord = SR.ScanRecord(self)
scanrecord.synchronise_new()
scanrecord.synchronise_existing()
# Everything that looks like an xs-tools ISO but which isn't the
# primary one will also be renamed "Old version of ..."
sr = self.session.xenapi.SR.get_by_uuid(sr_uuid)
all_vdis = self.session.xenapi.VDI.get_all_records_where("field \"SR\" = \"%s\"" % sr)
for vdi_ref in all_vdis.keys():
vdi = all_vdis[vdi_ref]
if vdi['sm_config'].has_key('xs-tools-version'):
name = tools_iso_name(vdi['location'])
if vdi['sm_config'].has_key('xs-tools'):
self.session.xenapi.VDI.set_name_label(vdi_ref, name)
else:
self.session.xenapi.VDI.set_name_label(vdi_ref, "Old version of " + name)
# never forget old VDI records to cope with rolling upgrade
for location in scanrecord.gone:
vdi = scanrecord.get_xenapi_vdi(location)
util.SMlog("Marking previous version of tools ISO: location=%s uuid=%s" % (vdi['location'], vdi['uuid']))
vdi = self.session.xenapi.VDI.get_by_uuid(vdi['uuid'])
name_label = self.session.xenapi.VDI.get_name_label(vdi)
if not(name_label.startswith("Old version of ")):
self.session.xenapi.VDI.set_name_label(vdi, "Old version of " + name_label)
# Mark it as missing for informational purposes only
self.session.xenapi.VDI.set_missing(vdi, True)
self.session.xenapi.VDI.remove_from_sm_config(vdi, 'xs-tools' )
else:
return super(ISOSR, self).scan(sr_uuid)
def create(self, sr_uuid, size):
self.attach(sr_uuid)
if self.dconf.has_key('type'):
smconfig = self.session.xenapi.SR.get_sm_config(self.sr_ref)
smconfig['iso_type'] = self.dconf['type']
self.session.xenapi.SR.set_sm_config(self.sr_ref, smconfig)
# CA-80254: Check for iso/img files whose name consists of extended
# characters.
for f in util.listdir(self.path, quiet = True):
if is_image_utf8_compatible(f):
try:
f.decode('ascii')
except UnicodeDecodeError:
raise xs_errors.XenError('CIFSExtendedCharsNotSupported',
opterr = 'The repository contains at least one file whose name consists of extended characters.')
self.detach(sr_uuid)
class ISOVDI(VDI.VDI):
def load(self, vdi_uuid):
# Nb, in the vdi_create call, the filename is unset, so the following
# will fail.
self.vdi_type = "iso"
try:
stat = os.stat(self.path)
self.utilisation = long(stat.st_size)
self.size = long(stat.st_size)
self.label = self.filename
except:
pass
def __init__(self, mysr, filename):
self.path = os.path.join(mysr.path, filename)
VDI.VDI.__init__(self, mysr, None)
self.location = filename
self.filename = filename
self.read_only = True
self.label = filename
self.sm_config = {}
if mysr.dconf.has_key("legacy_mode"):
if filename.startswith("xs-tools") or filename.startswith("guest-tools"):
self.label = tools_iso_name(filename)
# Mark this as a Tools CD
# self.sm_config['xs-tools'] = 'true'
# Extract a version string, if present
vsn = filename[filename.find("tools")+len("tools"):][:-len(".iso")].strip("-").split("-",1)
# "4.1.0"
if len(vsn) == 1:
build_number="0" # string
product_version=vsn[0]
# "4.1.0-1234"
elif len(vsn) > 1:
build_number=vsn[1]
product_version=vsn[0]
else:
build_number=0
product_version="unknown"
util.SMlog("version=%s build=%s" % (product_version, build_number))
self.sm_config['xs-tools-version'] = product_version
self.sm_config['xs-tools-build'] = build_number
def detach(self, sr_uuid, vdi_uuid):
pass
def attach(self, sr_uuid, vdi_uuid):
try:
os.stat(self.path)
return super(ISOVDI, self).attach(sr_uuid, vdi_uuid)
except:
raise xs_errors.XenError('VDIMissing')
def create(self, sr_uuid, vdi_uuid, size):
self.uuid = vdi_uuid
self.path = os.path.join(self.sr.path, self.filename)
self.size = size
self.utilisation = 0L
self.read_only = False
self.sm_config = self.sr.srcmd.params['vdi_sm_config']
self.sm_config['created'] = util._getDateString()
if util.pathexists(self.path):
raise xs_errors.XenError('VDIExists')
try:
handle = open(self.path,"w")
handle.truncate(size)
handle.close()
self._db_introduce()
return super(ISOVDI, self).get_params()
except Exception, exn:
util.SMlog("Exception when creating VDI: %s" % exn)
raise xs_errors.XenError('VDICreate', \
opterr='could not create file: "%s"' % self.path)
def delete(self, sr_uuid, vdi_uuid):
util.SMlog("Deleting...")
self.uuid = vdi_uuid
self._db_forget()
if not util.pathexists(self.path):
return
try:
util.SMlog("Unlinking...")
os.unlink(self.path)
util.SMlog("Done...")
except:
raise xs_errors.XenError('VDIDelete')
# delete, update, introduce unimplemented. super class will raise
# exceptions
if __name__ == '__main__':
SRCommand.run(ISOSR, DRIVER_INFO)
else:
SR.registerSR(ISOSR)
| lgpl-2.1 | -2,616,487,094,124,012,500 | 38.556671 | 125 | 0.534583 | false |
Outernet-Project/librarian | tests/routes/test_auth.py | 1 | 7612 | try:
import __builtin__ as builtins
except ImportError:
import builtins
import mock
import librarian.routes.auth as mod
# Test login route handler
@mock.patch.object(mod.Login, 'perform_redirect')
@mock.patch.object(mod.Login, 'request')
def test_login_form_valid(request, perform_redirect):
route = mod.Login()
assert route.form_valid() is None
request.user.options.process.assert_called_once_with('language')
perform_redirect.assert_called_once_with()
# Test logout route handler
@mock.patch.object(mod.Logout, 'perform_redirect')
@mock.patch.object(mod.Logout, 'request')
def test_logout_get(request, perform_redirect):
route = mod.Logout()
assert route.get() == ''
request.user.logout.assert_called_once_with()
perform_redirect.assert_called_once_with()
# Test reset password route handler
@mock.patch.object(mod, '_')
@mock.patch.object(mod, 'i18n_url')
@mock.patch.object(mod, 'template')
@mock.patch.object(mod.User, 'set_password')
@mock.patch.object(mod.PasswordReset, 'request')
def test_reset_password_form_valid(request, set_password, template, i18n_url,
lazy_gettext):
request.user.is_authenticated = False
route = mod.PasswordReset()
route.form = mock.Mock()
route.form.processed_data = {'username': 'usr', 'password1': 'pwd'}
resp = route.form_valid()
assert isinstance(resp, mod.PasswordReset.HTTPResponse)
set_password.assert_called_once_with('usr', 'pwd')
@mock.patch.object(mod, '_')
@mock.patch.object(mod, 'i18n_url')
@mock.patch.object(mod, 'template')
@mock.patch.object(mod.User, 'set_password')
@mock.patch.object(mod.PasswordReset, 'request')
def test_reset_password_form_valid_authenticated(request, set_password,
template, i18n_url, _):
request.user.is_authenticated = True
route = mod.PasswordReset()
route.form = mock.Mock()
route.form.processed_data = {'username': 'usr', 'password1': 'pwd'}
route.form_valid()
request.user.logout.assert_called_once_with()
# Test emergency reset route handler
@mock.patch.object(mod.EmergencyReset, 'request')
@mock.patch.object(mod.EmergencyReset, 'abort')
@mock.patch.object(mod.os.path, 'isfile')
def test_emergency_reset_read_token_file_not_found(isfile, abort, request):
isfile.return_value = False
route = mod.EmergencyReset()
route.read_token_file()
abort.assert_called_once_with(404)
@mock.patch.object(mod.EmergencyReset, 'request')
@mock.patch.object(mod.EmergencyReset, 'abort')
@mock.patch.object(mod.os.path, 'isfile')
@mock.patch.object(builtins, 'open')
def test_emergency_reset_read_token_file_empty(open_fn, isfile, abort,
request):
isfile.return_value = True
# set up mocked empty file object
mocked_file = mock.Mock()
mocked_file.read.return_value = ''
ctx_manager = mock.MagicMock()
ctx_manager.__enter__.return_value = mocked_file
open_fn.return_value = ctx_manager
# perform test
route = mod.EmergencyReset()
with mock.patch.object(route, 'config') as config:
route.read_token_file()
abort.assert_called_once_with(404)
open_fn.assert_called_once_with(config.get.return_value, 'r')
@mock.patch.object(mod.EmergencyReset, 'request')
@mock.patch.object(mod.EmergencyReset, 'abort')
@mock.patch.object(mod.os.path, 'isfile')
@mock.patch.object(builtins, 'open')
def test_emergency_reset_read_token_file(open_fn, isfile, abort, request):
isfile.return_value = True
# set up mocked empty file object
mocked_file = mock.Mock()
mocked_file.read.return_value = 'token'
ctx_manager = mock.MagicMock()
ctx_manager.__enter__.return_value = mocked_file
open_fn.return_value = ctx_manager
# perform test
route = mod.EmergencyReset()
assert route.read_token_file() == 'token'
assert not abort.called
@mock.patch.object(mod.User, 'generate_reset_token')
@mock.patch.object(mod.EmergencyReset, 'request')
def test_emergency_reset_get_reset_token(request, generate_reset_token):
route = mod.EmergencyReset()
# test case of GET request
request.method = 'GET'
assert route.get_reset_token() == generate_reset_token.return_value
# test case of POST request
request.method = 'POST'
assert route.get_reset_token() == request.params.get.return_value
@mock.patch.object(mod.EmergencyReset, 'request')
@mock.patch.object(mod, 'exts')
def test_emergency_reset_clear_auth_databases(exts, request):
route = mod.EmergencyReset()
route.clear_auth_databases()
db = exts.databases.librarian
db.Delete.assert_any_call('users')
db.execute.assert_any_call(db.Delete.return_value)
db.Delete.assert_any_call('sessions')
db.execute.assert_any_call(db.Delete.return_value)
@mock.patch.object(mod, 'exts')
@mock.patch.object(mod.User, 'create')
@mock.patch.object(mod.EmergencyReset, 'get_reset_token')
@mock.patch.object(mod.EmergencyReset, 'request')
def test_emergency_reset_recreate_user(request, get_reset_token, create, exts):
route = mod.EmergencyReset()
route.recreate_user('usr', 'pwd')
create.assert_called_once_with('usr',
'pwd',
is_superuser=True,
db=exts.databases.librarian,
reset_token=get_reset_token.return_value)
@mock.patch.object(mod.EmergencyReset, 'get_reset_token')
@mock.patch.object(mod.EmergencyReset, 'request')
def test_emergency_reset_get_context(request, get_reset_token):
route = mod.EmergencyReset()
ctx = route.get_context()
assert ctx['reset_token'] == get_reset_token.return_value
@mock.patch.object(builtins, 'super')
@mock.patch.object(mod, 'i18n_url')
@mock.patch.object(mod.EmergencyReset, 'read_token_file')
@mock.patch.object(mod.EmergencyReset, 'redirect')
@mock.patch.object(mod.EmergencyReset, 'request')
def test_emergency_reset_get_authenticated(request, redirect, read_token_file,
i18n_url, super_fn):
request.user.is_authenticated = True
route = mod.EmergencyReset()
route.get()
read_token_file.assert_called_once_with()
redirect.assert_called_once_with(i18n_url.return_value)
assert not super_fn.called
@mock.patch.object(builtins, 'super')
@mock.patch.object(mod.EmergencyReset, 'read_token_file')
@mock.patch.object(mod.EmergencyReset, 'redirect')
@mock.patch.object(mod.EmergencyReset, 'request')
def test_emergency_reset_get_not_authenticated(request, redirect,
read_token_file, super_fn):
request.user.is_authenticated = False
route = mod.EmergencyReset()
route.get()
read_token_file.assert_called_once_with()
assert not redirect.called
assert super_fn.called
@mock.patch.object(mod, '_')
@mock.patch.object(mod, 'template')
@mock.patch.object(mod.EmergencyReset, 'recreate_user')
@mock.patch.object(mod.EmergencyReset, 'clear_auth_databases')
@mock.patch.object(mod.EmergencyReset, 'request')
def test_emergency_reset_form_valid(request, clear_auth_databases,
recreate_user, template, lazy_gettext):
route = mod.EmergencyReset()
route.form = mock.Mock()
route.form.processed_data = {'username': 'usr', 'password1': 'pwd'}
resp = route.form_valid()
clear_auth_databases.assert_called_once_with()
recreate_user.assert_called_once_with('usr', 'pwd')
assert isinstance(resp, mod.EmergencyReset.HTTPResponse)
| gpl-3.0 | 7,878,728,443,754,681,000 | 35.421053 | 79 | 0.681818 | false |
gsnbng/erpnext | erpnext/loan_management/doctype/loan_disbursement/test_loan_disbursement.py | 1 | 3110 | # -*- coding: utf-8 -*-
# Copyright (c) 2019, Frappe Technologies Pvt. Ltd. and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.utils import (nowdate, add_days, get_datetime, get_first_day, get_last_day, date_diff, flt, add_to_date)
from erpnext.loan_management.doctype.loan.test_loan import (create_loan_type, create_loan_security_pledge, create_repayment_entry,
make_loan_disbursement_entry, create_loan_accounts, create_loan_security_type, create_loan_security, create_demand_loan, create_loan_security_price)
from erpnext.loan_management.doctype.process_loan_interest_accrual.process_loan_interest_accrual import process_loan_interest_accrual_for_demand_loans
from erpnext.loan_management.doctype.loan_interest_accrual.loan_interest_accrual import days_in_year
from erpnext.selling.doctype.customer.test_customer import get_customer_dict
class TestLoanDisbursement(unittest.TestCase):
def setUp(self):
create_loan_accounts()
create_loan_type("Demand Loan", 2000000, 13.5, 25, 0, 5, 'Cash', 'Payment Account - _TC', 'Loan Account - _TC',
'Interest Income Account - _TC', 'Penalty Income Account - _TC')
create_loan_security_type()
create_loan_security()
create_loan_security_price("Test Security 1", 500, "Nos", get_datetime() , get_datetime(add_to_date(nowdate(), hours=24)))
create_loan_security_price("Test Security 2", 250, "Nos", get_datetime() , get_datetime(add_to_date(nowdate(), hours=24)))
if not frappe.db.exists("Customer", "_Test Loan Customer"):
frappe.get_doc(get_customer_dict('_Test Loan Customer')).insert(ignore_permissions=True)
self.applicant = frappe.db.get_value("Customer", {'name': '_Test Loan Customer'}, 'name')
def test_loan_topup(self):
pledges = []
pledges.append({
"loan_security": "Test Security 1",
"qty": 4000.00,
"haircut": 50,
"loan_security_price": 500.00
})
loan_security_pledge = create_loan_security_pledge(self.applicant, pledges)
loan = create_demand_loan(self.applicant, "Demand Loan", loan_security_pledge.name,
posting_date=get_first_day(nowdate()))
loan.submit()
first_date = get_first_day(nowdate())
last_date = get_last_day(nowdate())
no_of_days = date_diff(last_date, first_date) + 1
accrued_interest_amount = (loan.loan_amount * loan.rate_of_interest * no_of_days) \
/ (days_in_year(get_datetime().year) * 100)
make_loan_disbursement_entry(loan.name, loan.loan_amount, disbursement_date=first_date)
process_loan_interest_accrual_for_demand_loans(posting_date=add_days(last_date, 1))
# Should not be able to create loan disbursement entry before repayment
self.assertRaises(frappe.ValidationError, make_loan_disbursement_entry, loan.name,
500000, first_date)
repayment_entry = create_repayment_entry(loan.name, self.applicant, add_days(get_last_day(nowdate()), 5),
"Regular Payment", 611095.89)
repayment_entry.submit()
loan.reload()
# After repayment loan disbursement entry should go through
make_loan_disbursement_entry(loan.name, 500000, disbursement_date=add_days(last_date, 16))
| agpl-3.0 | 3,905,159,435,813,598,700 | 41.60274 | 150 | 0.736334 | false |
jmhal/CCAPython | framework/manage/services.py | 1 | 9027 | from CCAPython.gov.cca import Services
from CCAPython.gov.cca.ports import ConnectionEventService
from CCAPython.gov.cca.ports import EventType
from CCAPython.framework.info.connectioninfo import ConnectionEvent
from CCAPython.framework.common.typemap import TypeMapDict
from CCAPython.framework.common.exceptions import PortNotFoundException
class ServicesHandle(Services, ConnectionEventService):
def __init__(self):
# Maps strings portName to a list (CCAPython.gov.cca.Ports, CCAPython.gov.cca.TypeMap).
# (portName) -> [Port, TypeMap]
self.d_usesPort = {}
self.d_providesPorts = {}
# Maps string ports names to string ports types
# (portName) -> (portType)
self.d_portType = {}
# Maps a CCAPython.gov.cca.ports.EventType value to a list of CCAPython.gov.cca.ports.EventListener
# (EventType) -> (ConnectionEventListener [])
self.d_listeners = {}
# A CCAPython.gov.cca.Type containing the properties of the component instance
self.d_instanceProperties = TypeMapDict()
# New methods
def initialize(self, fwk, componentID, properties, is_alias):
"""
input: a CCAPython.gov.cca.AbstractFramework fwk, a CCAPython.gov.cca.ComponentID componentID and a CCAPython.gov.cca.TypeMap properties
ouput: void
"""
self.framework = fwk
self.componentID = componentID
self.properties = properties
self.d_is_alias = is_alias
def getInstanceProperties():
"""
input: none
output: a CCAPython.gov.cca.TypeMap object
"""
return self.d_instanceProperties
def setInstanceProperties(self, properties):
"""
input: a CCAPython.gov.cca.TypeMap properties
output: none
"""
self.d_instanceProperties = properties
return
def setPortProperties(self, portName, properties):
"""
input: a string portName, a CCAPython.gov.cca.TypeMap properties
output: none
"""
if portName in self.d_providesPorts:
elf.d_providesPorts[portName][1] = properties
elif portName in self.d_usesPort:
self.d_usesPort[portName][1] = properties
else:
raise PortNotFoundException(portName)
def getProvidedPortNames(self):
"""
input: none
output: a list of strings
"""
return self.d_providesPorts.keys()
def getUsedPortNames(self):
"""
input: none
output: a list of strings
"""
return self.d_usesPort.keys()
def bindPort(self, portName, port):
"""
input: a string portName, a CCAPython.gov.cca.Port object
output: void
"""
if portName not in self.d_usesPort.keys():
raise PortNotFoundException(portName)
self.d_usesPort[portName] = [port, TypeMapDict()]
return
def getProvidesPort(self, name):
"""
input: string name
output: void
"""
if name not in self.d_providesPorts.keys():
raise PortNotFoundException(name)
return self.d_providesPorts[name][0]
def notifyConnectionEvent(self, portName, event):
"""
This method will notify the component from the calling Services of an event
input: string portName, a CCAPython.gov.cca.ports.EventType value event
output: void
"""
listenerList = []
for ev in self.d_listeners:
if ev == event:
listenerList += self.d_listeners[event]
tm = TypeMapDict()
tm.putString("cca.PortName", portName)
tm.putString("cca.PortType", self.d_portType[portName])
ce = ConnectionEvent(event, tm)
for listener in listenerList:
listener.connectionActivity(ce)
return
# Methods from CCAPython.gov.cca.Services
def getComponentID(self):
"""
input: none
output: a ComponentID object
"""
return self.componentID
def createTypeMap(self):
"""
input: none
output: a TypeMap object
throws CCAException
"""
return TypeMapDict()
def registerUsesPort(self, portName, _type, properties):
"""
input: string portName, string type, and TypeMap properties
output: void
throws CCAException
"""
if portName in self.d_providesPorts or portName in self.d_usesPort:
print portName + " is not unique. Not doing anything."
return
else:
self.d_usesPort[portName] = [None, properties]
self.d_portType[portName] = _type
if self.framework != None:
if self.framework.isProvidedService(_type):
self.framework.provideRequestedServices(self.d_componentID, portName, _type)
def unregisterUsesPort(self, portName):
"""
input: string portName
output: void
throws CCAException
"""
self.d_usesPort.pop(portName, None)
self.d_portType.pop(portName, None)
return
def addProvidesPort(self, inPort, portName, _type, properties):
"""
input: Port inPort, string portName, string type, and TypeMap properties
output: void
throws CCAException
"""
if portName in self.d_providesPorts or portName in self.d_usesPort:
print portName + " is not unique. Not doing anything."
return
if not self.d_is_alias and not inPort.isType(_type):
print "Port instance is not an instance of specified type"
return
self.d_providesPorts[portName] = [inPort, properties]
self.d_portType[portName] = _type
return
def removeProvidesPort(self, portName):
"""
input: string portName
output: void
throws CCAException
"""
self.d_providesPorts.pop(portName, None)
self.d_portType.pop(portName, None)
return
def getPortProperties(self, portName):
"""
input: string portName
output: a TypeMap object
"""
if portName in self.d_usesPort:
return self.d_usesPort[portName][1]
elif portName in self.d_providesPorts:
return self.d_providesPorts[portName][1]
else :
return None
def getPort(self, portName):
"""
input: string portName
output: a Port object
throws CCAException
"""
if portName in self.d_usesPort:
return self.d_usesPort[portName][0]
def getPortNonblocking(self, portName):
"""
input: string portName
output: a Port object
throws CCAException
"""
return self.getPort(portName)
def releasePort(self, portName):
"""
input: string portName
output: void
throws CCAException
"""
if portName in self.d_usesPort:
self.d_usesPort[portName] = None
def registerForRelease(self, callback):
"""
input: a CCAPython.gov.cca.ComponentRelease object callback
output: void
"""
self.framework.setInstanceRelease(self.componentID, callback)
# Methods from CCAPython.gov.cca.ports.ServiceRegistry
def addService(self, serviceType, portProvider):
"""
input: a string serviceType, a CCAPython.gov.cca.ports.ServiceProvider object portProvider
output: a boolean
throws CCAException
"""
self.framework.addServiceProvider(serviceType, self.componentID, portProvider)
return True
def addSingletonService(self, serviceType, server):
"""
input: a string serviceType, a CCAPython.gov.cca.Port object server
output: a boolean
throws CCAException
"""
self.framework.addServicePort(serviceType, server)
return true
def removeService(self, serviceType):
"""
input: a string serviceType
output: none
throws CCAException
"""
self.framework.removeFromRegistry(serviceType)
return None
# Methods from CCAPython.gov.cca.ports.ConnectionEventService
def addConnectionEventListener(self, et, cel):
"""
input: a CCAPython.gov.cca.ports.EventType et, a CCAPython.gov.cca.ports.ConnectionEventListener cel
output: void
"""
if et == EventType.Error:
return
if et == EventType.ALL:
self.addConnectionEventListener(EventType.ConnectPending)
self.addConnectionEventListener(EventType.Connected)
self.addConnectionEventListener(EventType.DisconnectPending)
self.addConnectionEventListener(EventType.Disconnected)
elif cel not in self.d_listeners[et]:
self.d_listeners[et].append(cel)
return
def removeConnectionEventListener(self, et, cel):
"""
input: a CCAPython.gov.cca.ports.EventType et, a CCAPython.gov.cca.ports.ConnectionEventListener cel
output: void
"""
if et == EventType.Error:
return
if et == EventType.ALL:
for event in self.d_listeners:
self.removeConnectionEventListener(event, cel)
return
else:
self.d_listeners[et].remove(cel)
return
| apache-2.0 | 4,384,464,468,484,002,300 | 30.127586 | 142 | 0.643625 | false |
kris71990/China_info | chinadicts.py | 1 | 1299 | prov_cap = {
"Hebei": "Shijiazhuang",
"Shanxi": "Taiyuan",
"Liaoning": "Shenyang",
"Jilin": "Changchun",
"Heilongjiang": "Harbin",
"Jiangsu": "Nanjing",
"Zhejiang": "Hangzhou",
"Anhui": "Hefei",
"Fujian": "Fuzhou",
"Jiangxi": "Nanchang",
"Shandong": "Jinan",
"Henan": "Zhengzhou",
"Hubei": "Wuhan",
"Hunan": "Changsha",
"Guangdong": "Guangzhou",
"Hainan": "Haikou",
"Sichuan": "Chengdu",
"Guizhou": "Guiyang",
"Yunnan": "Kunming",
"Shaanxi": "Xi'an",
"Gansu": "Lanzhou",
"Qinghai": "Xining",
"Taiwan": "Taipei"
}
autregion = {
"Inner Mongolia": "NM",
"Guangxi Zhuang": "GX",
"Tibet": "XZ",
"Ningxia Hui": "NX",
"Xinjiang Uyghur": "XJ"
}
autregion_capitals = {
"Inner Mongolia": "Hohhot",
"Guangxi Zhuang": "Nanning",
"Tibet": "Lhasa",
"Ningxia Hui": "Yinchuan",
"Xinjiang Uyghur": "Urumqi"
}
admregion = {
"Hong Kong": "HK",
"Macau": "MC"
}
admregion_capitals = {
"Hong Kong": "Hong Kong",
"Macau": "Macau"
}
municipality = {
"Beijing": "BJ",
"Tianjin": "TJ",
"Shanghai": "SH",
"Chongqing": "CQ"
}
mun_capitals = {
"Beijing": "Beijing",
"Tianjin": "Tianjin",
"Shanghai": "Shanghai",
"Chongqing": "Chongqing"
}
| mit | -3,503,002,077,816,666,000 | 18.984615 | 32 | 0.532717 | false |
JensTimmerman/radical.pilot | src/radical/pilot/utils/analysis.py | 1 | 12671 |
import os
# ------------------------------------------------------------------------------
#
def get_experiment_frames(experiments, datadir=None):
"""
read profiles for all sessions in the given 'experiments' dict. That dict
is expected to be like this:
{ 'test 1' : [ [ 'rp.session.thinkie.merzky.016609.0007', 'stampede popen sleep 1/1/1/1 (?)'] ],
'test 2' : [ [ 'rp.session.ip-10-184-31-85.merzky.016610.0112', 'stampede shell sleep 16/8/8/4' ] ],
'test 3' : [ [ 'rp.session.ip-10-184-31-85.merzky.016611.0013', 'stampede shell mdrun 16/8/8/4' ] ],
'test 4' : [ [ 'rp.session.titan-ext4.marksant1.016607.0005', 'titan shell sleep 1/1/1/1 a' ] ],
'test 5' : [ [ 'rp.session.titan-ext4.marksant1.016607.0006', 'titan shell sleep 1/1/1/1 b' ] ],
'test 6' : [ [ 'rp.session.ip-10-184-31-85.merzky.016611.0013', 'stampede - isolated', ],
[ 'rp.session.ip-10-184-31-85.merzky.016612.0012', 'stampede - integrated', ],
[ 'rp.session.titan-ext4.marksant1.016607.0006', 'blue waters - integrated' ] ]
} name in
ie. iname in t is a list of experiment names, and each label has a list of
session/label pairs, where the label will be later used to label (duh) plots.
we return a similar dict where the session IDs are data frames
"""
import pandas as pd
exp_frames = dict()
if not datadir:
datadir = os.getcwd()
print 'reading profiles in %s' % datadir
for exp in experiments:
print " - %s" % exp
exp_frames[exp] = list()
for sid, label in experiments[exp]:
print " - %s" % sid
import glob
for prof in glob.glob ("%s/%s-pilot.*.prof" % (datadir, sid)):
print " - %s" % prof
frame = get_profile_frame (prof)
exp_frames[exp].append ([frame, label])
return exp_frames
# ------------------------------------------------------------------------------
#
def get_profile_frame (prof):
import pandas as pd
return pd.read_csv(prof)
# ------------------------------------------------------------------------------
#
tmp = None
def add_concurrency (frame, tgt, spec):
"""
add a column 'tgt' which is a cumulative sum of conditionals of enother row.
The purpose is the following: if a unit enters a component, the tgt row counter is
increased by 1, if the unit leaves the component, the counter is decreases by 1.
For any time, the resulting row contains the number of units which is in the
component. Or state. Or whatever.
The arguments are:
'tgt' : name of the new column
'spec' : a set of filters to determine if a unit enters or leaves
'spec' is expected to be a dict of the following format:
spec = { 'in' : [{'col1' : 'pat1',
'col2' : 'pat2'},
...],
'out' : [{'col3' : 'pat3',
'col4' : 'pat4'},
...]
}
where:
'in' : filter set to determine the unit entering
'out' : filter set to determine the unit leaving
'col' : name of column for which filter is defined
'event' : event which correlates to entering/leaving
'msg' : qualifier on the event, if event is not unique
Example:
spec = {'in' : [{'state' :'Executing'}],
'out' : [{'state' :'Done'},
{'state' :'Failed'},
{'state' :'Cancelled'}]
}
get_concurrency (df, 'concurrently_running', spec)
"""
import numpy
# create a temporary row over which we can do the commulative sum
# --------------------------------------------------------------------------
def _conc (row, spec):
# row must match any filter dict in 'spec[in/out]'
# for any filter dict it must match all col/pat pairs
# for each in filter
for f in spec['in']:
match = 1
# for each col/val in that filter
for col, pat in f.iteritems():
if row[col] != pat:
match = 0
break
if match:
# one filter matched!
# print " + : %-20s : %.2f : %-20s : %s " % (row['uid'], row['time'], row['event'], row['message'])
return 1
# for each out filter
for f in spec['out']:
match = 1
# for each col/val in that filter
for col, pat in f.iteritems():
if row[col] != pat:
match = 0
break
if match:
# one filter matched!
# print " - : %-20s : %.2f : %-20s : %s " % (row['uid'], row['time'], row['event'], row['message'])
return -1
# no filter matched
# print " : %-20s : %.2f : %-20s : %s " % (row['uid'], row['time'], row['event'], row['message'])
return 0
# --------------------------------------------------------------------------
# we only want to later look at changes of the concurrency -- leading or trailing
# idle times are to be ignored. We thus set repeating values of the cumsum to NaN,
# so that they can be filtered out when ploting: df.dropna().plot(...).
# That specifically will limit the plotted time range to the area of activity.
# The full time range can still be plotted when ommitting the dropna() call.
# --------------------------------------------------------------------------
def _time (x):
global tmp
if x != tmp: tmp = x
else : x = numpy.NaN
return x
# --------------------------------------------------------------------------
# sanitize concurrency: negative values indicate incorrect event ordering,
# so we set the repesctive values to 0
# --------------------------------------------------------------------------
def _abs (x):
if x < 0:
return numpy.NaN
return x
# --------------------------------------------------------------------------
frame[tgt] = frame.apply(lambda row: _conc(row, spec), axis=1).cumsum()
frame[tgt] = frame.apply(lambda row: _abs (row[tgt]), axis=1)
frame[tgt] = frame.apply(lambda row: _time(row[tgt]), axis=1)
# print frame[[tgt, 'time']]
# ------------------------------------------------------------------------------
#
t0 = None
def calibrate_frame(frame, spec):
"""
move the time axis of a profiling frame so that t_0 is at the first event
matching the given 'spec'. 'spec' has the same format as described in
'add_concurrency' (list of dicts with col:pat filters)
"""
# --------------------------------------------------------------------------
def _find_t0 (row, spec):
# row must match any filter dict in 'spec[in/out]'
# for any filter dict it must match all col/pat pairs
global t0
if t0 is not None:
# already found t0
return
# for each col/val in that filter
for f in spec:
match = 1
for col, pat in f.iteritems():
if row[col] != pat:
match = 0
break
if match:
# one filter matched!
t0 = row['time']
return
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
def _calibrate (row, t0):
if t0 is None:
# no t0...
return
return row['time'] - t0
# --------------------------------------------------------------------------
# we need to iterate twice over the frame: first to find t0, then to
# calibrate the time axis
global t0
t0 = None # no t0
frame.apply(lambda row: _find_t0 (row, spec), axis=1)
if t0 == None:
print "Can't recalibrate, no matching timestamp found"
return
frame['time'] = frame.apply(lambda row: _calibrate(row, t0 ), axis=1)
# ------------------------------------------------------------------------------
#
def create_plot():
"""
create a plot object and tune its layout to our liking.
"""
import matplotlib.pyplot as plt
fig, plot = plt.subplots(figsize=(12,6))
plot.xaxis.set_tick_params(width=1, length=7)
plot.yaxis.set_tick_params(width=1, length=7)
plot.spines['right' ].set_position(('outward', 10))
plot.spines['top' ].set_position(('outward', 10))
plot.spines['bottom'].set_position(('outward', 10))
plot.spines['left' ].set_position(('outward', 10))
plt.xticks(fontsize=14)
plt.yticks(fontsize=14)
fig.tight_layout()
return fig, plot
# ------------------------------------------------------------------------------
#
def frame_plot (frames, axis, title=None, logx=False, logy=False,
legend=True, figdir=None):
"""
plot the given axis from the give data frame. We create a plot, and plot
all frames given in the list. The list is expected to contain [frame,label]
pairs
frames: list of tuples of dataframes and labels
frames = [[stampede_df_1, 'stampede - popen'],
[stampede_df_2, 'stampede - shell'],
[stampede_df_3, 'stampede - ORTE' ]]
axis: tuple of data frame column index and axis label
axis = ['time', 'time (s)']
"""
# create figure and layout
fig, plot = create_plot()
# set plot title
if title:
plot.set_title(title, y=1.05, fontsize=18)
# plot the data frames
# NOTE: we need to set labels separately, because of
# https://github.com/pydata/pandas/issues/9542
labels = list()
for frame, label in frames:
try:
frame.dropna().plot(ax=plot, logx=logx, logy=logy,
x=axis[0][0], y=axis[1][0],
drawstyle='steps',
label=label, legend=False)
except Exception as e:
print "skipping frame '%s': '%s'" % (label, e)
if legend:
plot.legend(labels=labels, loc='upper right', fontsize=14, frameon=True)
# set axis labels
plot.set_xlabel(axis[0][1], fontsize=14)
plot.set_ylabel(axis[1][1], fontsize=14)
plot.set_frame_on(True)
# save as png and pdf. Use the title as base for names
if title: base = title
else : base = "%s_%s" % (axis[0][1], axis[1][1])
# clean up base name -- only keep alphanum and such
import re
base = re.sub('[^a-zA-Z0-9\.\-]', '_', base)
base = re.sub('_+', '_', base)
if not figdir:
figdir = os.getcwd()
print 'saving %s/%s.png' % (figdir, base)
fig.savefig('%s/%s.png' % (figdir, base), bbox_inches='tight')
print 'saving %s/%s.pdf' % (figdir, base)
fig.savefig('%s/%s.pdf' % (figdir, base), bbox_inches='tight')
return fig, plot
# ------------------------------------------------------------------------------
#
def create_analytical_frame (idx, kind, args, limits, step):
"""
create an artificial data frame, ie. a data frame which does not contain
data gathered from an experiment, but data representing an analytical
construct of some 'kind'.
idx: data frame column index to fill (a time column is always created)
kind: construct to use (only 'rate' is supporte right now)
args: construct specific parameters
limits: time range for which data are to be created
step: time steps for which data are to be created
"""
import pandas as pd
# --------------------------------------------------------------------------
def _frange(start, stop, step):
while start <= stop:
yield start
start += step
# --------------------------------------------------------------------------
if kind == 'rate' :
t_0 = args.get ('t_0', 0.0)
rate = args.get ('rate', 1.0)
data = list()
for t in _frange(limits[0], limits[1], step):
data.append ({'time': t+t_0, idx: t*rate})
return pd.DataFrame (data)
else:
raise ValueError ("No such frame kind '%s'" % kind)
# ------------------------------------------------------------------------------
| mit | -4,651,960,791,394,131,000 | 34.793785 | 113 | 0.475495 | false |
ludojmj/treelud | server/paramiko/dsskey.py | 1 | 6975 | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
DSS keys.
"""
import os
from hashlib import sha1
from Crypto.PublicKey import DSA
from paramiko import util
from paramiko.common import zero_byte
from paramiko.py3compat import long
from paramiko.ssh_exception import SSHException
from paramiko.message import Message
from paramiko.ber import BER, BERException
from paramiko.pkey import PKey
class DSSKey (PKey):
"""
Representation of a DSS key which can be used to sign an verify SSH2
data.
"""
def __init__(self, msg=None, data=None, filename=None, password=None, vals=None, file_obj=None):
self.p = None
self.q = None
self.g = None
self.y = None
self.x = None
if file_obj is not None:
self._from_private_key(file_obj, password)
return
if filename is not None:
self._from_private_key_file(filename, password)
return
if (msg is None) and (data is not None):
msg = Message(data)
if vals is not None:
self.p, self.q, self.g, self.y = vals
else:
if msg is None:
raise SSHException('Key object may not be empty')
if msg.get_text() != 'ssh-dss':
raise SSHException('Invalid key')
self.p = msg.get_mpint()
self.q = msg.get_mpint()
self.g = msg.get_mpint()
self.y = msg.get_mpint()
self.size = util.bit_length(self.p)
def asbytes(self):
m = Message()
m.add_string('ssh-dss')
m.add_mpint(self.p)
m.add_mpint(self.q)
m.add_mpint(self.g)
m.add_mpint(self.y)
return m.asbytes()
def __str__(self):
return self.asbytes()
def __hash__(self):
h = hash(self.get_name())
h = h * 37 + hash(self.p)
h = h * 37 + hash(self.q)
h = h * 37 + hash(self.g)
h = h * 37 + hash(self.y)
# h might be a long by now...
return hash(h)
def get_name(self):
return 'ssh-dss'
def get_bits(self):
return self.size
def can_sign(self):
return self.x is not None
def sign_ssh_data(self, data):
digest = sha1(data).digest()
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q), long(self.x)))
# generate a suitable k
qsize = len(util.deflate_long(self.q, 0))
while True:
k = util.inflate_long(os.urandom(qsize), 1)
if (k > 2) and (k < self.q):
break
r, s = dss.sign(util.inflate_long(digest, 1), k)
m = Message()
m.add_string('ssh-dss')
# apparently, in rare cases, r or s may be shorter than 20 bytes!
rstr = util.deflate_long(r, 0)
sstr = util.deflate_long(s, 0)
if len(rstr) < 20:
rstr = zero_byte * (20 - len(rstr)) + rstr
if len(sstr) < 20:
sstr = zero_byte * (20 - len(sstr)) + sstr
m.add_string(rstr + sstr)
return m
def verify_ssh_sig(self, data, msg):
if len(msg.asbytes()) == 40:
# spies.com bug: signature has no header
sig = msg.asbytes()
else:
kind = msg.get_text()
if kind != 'ssh-dss':
return 0
sig = msg.get_binary()
# pull out (r, s) which are NOT encoded as mpints
sigR = util.inflate_long(sig[:20], 1)
sigS = util.inflate_long(sig[20:], 1)
sigM = util.inflate_long(sha1(data).digest(), 1)
dss = DSA.construct((long(self.y), long(self.g), long(self.p), long(self.q)))
return dss.verify(sigM, (sigR, sigS))
def _encode_key(self):
if self.x is None:
raise SSHException('Not enough key information')
keylist = [0, self.p, self.q, self.g, self.y, self.x]
try:
b = BER()
b.encode(keylist)
except BERException:
raise SSHException('Unable to create ber encoding of key')
return b.asbytes()
def write_private_key_file(self, filename, password=None):
self._write_private_key_file('DSA', filename, self._encode_key(), password)
def write_private_key(self, file_obj, password=None):
self._write_private_key('DSA', file_obj, self._encode_key(), password)
def generate(bits=1024, progress_func=None):
"""
Generate a new private DSS key. This factory function can be used to
generate a new host key or authentication key.
:param int bits: number of bits the generated key should be.
:param function progress_func:
an optional function to call at key points in key generation (used
by ``pyCrypto.PublicKey``).
:return: new `.DSSKey` private key
"""
dsa = DSA.generate(bits, os.urandom, progress_func)
key = DSSKey(vals=(dsa.p, dsa.q, dsa.g, dsa.y))
key.x = dsa.x
return key
generate = staticmethod(generate)
### internals...
def _from_private_key_file(self, filename, password):
data = self._read_private_key_file('DSA', filename, password)
self._decode_key(data)
def _from_private_key(self, file_obj, password):
data = self._read_private_key('DSA', file_obj, password)
self._decode_key(data)
def _decode_key(self, data):
# private key file contains:
# DSAPrivateKey = { version = 0, p, q, g, y, x }
try:
keylist = BER(data).decode()
except BERException as e:
raise SSHException('Unable to parse key file: ' + str(e))
if (type(keylist) is not list) or (len(keylist) < 6) or (keylist[0] != 0):
raise SSHException('not a valid DSA private key file (bad ber encoding)')
self.p = keylist[1]
self.q = keylist[2]
self.g = keylist[3]
self.y = keylist[4]
self.x = keylist[5]
self.size = util.bit_length(self.p)
| mit | -2,605,472,682,343,415,000 | 33.227273 | 100 | 0.567455 | false |
alexandrebarachant/mne-python | mne/decoding/tests/test_ems.py | 1 | 3384 | # Author: Denis A. Engemann <[email protected]>
#
# License: BSD (3-clause)
import os.path as op
import numpy as np
from numpy.testing import assert_array_almost_equal
from nose.tools import assert_equal, assert_raises
from mne import io, Epochs, read_events, pick_types
from mne.utils import requires_sklearn, check_version
from mne.decoding import compute_ems, EMS
data_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
curdir = op.join(op.dirname(__file__))
raw_fname = op.join(data_dir, 'test_raw.fif')
event_name = op.join(data_dir, 'test-eve.fif')
tmin, tmax = -0.2, 0.5
event_id = dict(aud_l=1, vis_l=3)
@requires_sklearn
def test_ems():
"""Test event-matched spatial filters"""
raw = io.read_raw_fif(raw_fname, preload=False)
# create unequal number of events
events = read_events(event_name)
events[-2, 2] = 3
picks = pick_types(raw.info, meg=True, stim=False, ecg=False,
eog=False, exclude='bads')
picks = picks[1:13:3]
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
assert_raises(ValueError, compute_ems, epochs, ['aud_l', 'vis_l'])
epochs = epochs.equalize_event_counts(epochs.event_id, copy=False)[0]
assert_raises(KeyError, compute_ems, epochs, ['blah', 'hahah'])
surrogates, filters, conditions = compute_ems(epochs)
assert_equal(list(set(conditions)), [1, 3])
events = read_events(event_name)
event_id2 = dict(aud_l=1, aud_r=2, vis_l=3)
epochs = Epochs(raw, events, event_id2, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True)
epochs = epochs.equalize_event_counts(epochs.event_id, copy=False)[0]
n_expected = sum([len(epochs[k]) for k in ['aud_l', 'vis_l']])
assert_raises(ValueError, compute_ems, epochs)
surrogates, filters, conditions = compute_ems(epochs, ['aud_r', 'vis_l'])
assert_equal(n_expected, len(surrogates))
assert_equal(n_expected, len(conditions))
assert_equal(list(set(conditions)), [2, 3])
# test compute_ems cv
epochs = epochs['aud_r', 'vis_l']
epochs.equalize_event_counts(epochs.event_id)
if check_version('sklearn', '0.18'):
from sklearn.model_selection import StratifiedKFold
cv = StratifiedKFold()
else:
from sklearn.cross_validation import StratifiedKFold
cv = StratifiedKFold(epochs.events[:, 2])
compute_ems(epochs, cv=cv)
compute_ems(epochs, cv=2)
assert_raises(ValueError, compute_ems, epochs, cv='foo')
assert_raises(ValueError, compute_ems, epochs, cv=len(epochs) + 1)
raw.close()
# EMS transformer, check that identical to compute_ems
X = epochs.get_data()
y = epochs.events[:, 2]
X = X / np.std(X) # X scaled outside cv in compute_ems
Xt, coefs = list(), list()
ems = EMS()
assert_equal(ems.__repr__(), '<EMS: not fitted.>')
# manual leave-one-out to avoid sklearn version problem
for test in range(len(y)):
train = np.setdiff1d(range(len(y)), test)
ems.fit(X[train], y[train])
coefs.append(ems.filters_)
Xt.append(ems.transform(X[[test]]))
assert_equal(ems.__repr__(), '<EMS: fitted with 4 filters on 2 classes.>')
assert_array_almost_equal(filters, np.mean(coefs, axis=0))
assert_array_almost_equal(surrogates, np.vstack(Xt))
| bsd-3-clause | 6,914,058,630,183,954,000 | 37.454545 | 78 | 0.648936 | false |
snipsco/snipsskills | snipsmanager/utils/microphone_setup.py | 1 | 1997 | # -*-: coding utf-8 -*-
""" Downloader for Snips assistants. """
import os
import shutil
from .os_helpers import cmd_exists, is_raspi_os, execute_command, pipe_commands
from .. import ASOUNDCONF_DEST_PATH
# pylint: disable=too-few-public-methods
class MicrophoneSetup:
""" Downloader for Snips assistants. """
ASOUNDCONF_PATH = "../config/asound.conf"
@staticmethod
def setup_asoundconf(microphone_id):
if not is_raspi_os():
return
if microphone_id == 'respeaker':
MicrophoneSetup._copy_asoundconf("asound.conf.respeaker")
elif microphone_id == 'jabra':
MicrophoneSetup._copy_asoundconf("asound.conf.jabra")
else:
MicrophoneSetup._copy_asoundconf("asound.conf.default")
@staticmethod
def _copy_asoundconf(asoundconf_file):
""" Copy asound.conf configuration to local path.
:param asoundconf_file: the name of the asound.conf configuration, as
present in the config folder.
"""
this_dir, this_filename = os.path.split(__file__)
asoundconf_path = os.path.join(this_dir, MicrophoneSetup.ASOUNDCONF_PATH, asoundconf_file)
shutil.copy2(asoundconf_path, ASOUNDCONF_DEST_PATH)
class RespeakerMicrophoneSetup:
@staticmethod
def setup(vendor_id, product_id):
if not is_raspi_os():
return
execute_command("sudo rm -f /lib/udev/rules.d/50-rspk.rules")
echo_command = ("echo ACTION==\"add\", SUBSYSTEMS==\"usb\", ATTRS{{idVendor}}==\"{}\", " +
"ATTRS{{idProduct}}==\"{}\", MODE=\"660\", GROUP=\"plugdev\"") \
.format(vendor_id, product_id)
tee_command = "sudo tee --append /lib/udev/rules.d/50-rspk.rules"
pipe_commands(echo_command, tee_command, silent=True)
execute_command("sudo adduser pi plugdev")
execute_command("sudo udevadm control --reload")
execute_command("sudo udevadm trigger")
| mit | -8,214,272,238,034,088,000 | 33.431034 | 98 | 0.625939 | false |
edickie/ciftify | ciftify/bin/ciftify_seed_corr.py | 1 | 8983 | #!/usr/bin/env python3
"""
Produces a correlation map of the mean time series within the seed with
every voxel in the functional file.
Usage:
ciftify_seed_corr [options] <func> <seed>
Arguments:
<func> functional data (nifti or cifti)
<seed> seed mask (nifti, cifti or gifti)
Options:
--outputname STR Specify the output filename
--output-ts Also output write the from the seed to text
--roi-label INT Specify the numeric label of the ROI you want a seedmap for
--hemi HEMI If the seed is a gifti file, specify the hemisphere (R or L) here
--mask FILE brainmask
--fisher-z Apply the fisher-z transform (arctanh) to the correlation map
--weighted compute weighted average timeseries from the seed map
--use-TRs FILE Only use the TRs listed in the file provided (TR's in file starts with 1)
-v,--verbose Verbose logging
--debug Debug logging
-h, --help Prints this message
DETAILS:
The default output filename is created from the <func> and <seed> filenames,
(i.e. func.dscalar.nii + seed.dscalar.nii --> func_seed.dscalar.nii)
and written to same folder as the <func> input. Use the '--outputname'
argument to specify a different outputname. The output datatype matches the <func>
input.
The mean timeseries is calculated using ciftify_meants, '--roi-label', '--hemi',
'--mask', and '--weighted' arguments are passed to it. See ciftify_meants '--help' for
more info on their usage. The timeseries output (*_meants.csv) of this step can be
saved to disk using the '--output-ts' option.
If a mask is provided with the ('--mask') option. (Such as a brainmask) it will be
applied to both the seed and functional file.
The '--use-TRs' argument allows you to calcuate the correlation maps from specific
timepoints (TRs) in the timeseries. This option can be used to exclude outlier
timepoints or to limit the calculation to a subsample of the timecourse
(i.e. only the beggining or end). It expects a text file containing the integer numbers
TRs to keep (where the first TR=1).
Written by Erin W Dickie
"""
import os
import sys
import subprocess
import tempfile
import shutil
import logging
import logging.config
import numpy as np
import scipy as sp
import nibabel as nib
from docopt import docopt
import ciftify
from ciftify.utils import run
from ciftify.meants import MeantsSettings
# Read logging.conf
logger = logging.getLogger('ciftify')
logger.setLevel(logging.DEBUG)
class UserSettings(MeantsSettings):
def __init__(self, arguments):
MeantsSettings.__init__(self, arguments)
self.fisher_z = arguments['--fisher-z']
self.output_prefix = self.get_output_prefix(arguments['--outputname'])
self.outputcsv = self.get_outputcsv(arguments['--output-ts'])
self.TR_file = self.get_TRfile(arguments['--use-TRs'])
def get_output_prefix(self, outputname):
'''
output_prefix is outputname if it was specified
if not, it is created from the func and seed input paths
'''
## determine outbase if it has not been specified
if outputname:
output_prefix = outputname.replace('.nii.gz','').replace('.dscalar.nii','')
else:
outbase = '{}_{}'.format(self.func.base, self.seed.base)
output_prefix = os.path.join(os.path.dirname(self.func.path), outbase)
## uses utils funciton to make sure the output is writable, will sys.exit with error if not the case
ciftify.utils.check_output_writable(output_prefix)
return(output_prefix)
def get_outputcsv(self, output_ts):
'''set outputcsv name if this is asked for'''
if output_ts:
outputcsv = '{}_meants.csv'.format(self.output_prefix)
else:
outputcsv = None
return(outputcsv)
def get_TRfile(self, TRfile):
if TRfile:
ciftify.utils.check_input_readable(TRfile)
return(TRfile)
def main():
arguments = docopt(__doc__)
debug = arguments['--debug']
verbose = arguments['--verbose']
ch = logging.StreamHandler()
ch.setLevel(logging.WARNING)
if verbose:
ch.setLevel(logging.INFO)
if debug:
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
## set up the top of the log
logger.info('{}{}'.format(ciftify.utils.ciftify_logo(),
ciftify.utils.section_header('Starting ciftify_seed_corr')))
ciftify.utils.log_arguments(arguments)
settings = UserSettings(arguments)
with ciftify.utils.TempDir() as tmpdir:
logger.info('Creating tempdir:{} on host:{}'.format(tmpdir,
os.uname()[1]))
ret = run_ciftify_seed_corr(settings, tmpdir)
logger.info(ciftify.utils.section_header('Done ciftify_seed_corr'))
sys.exit(ret)
def run_ciftify_seed_corr(settings, tempdir):
logger.debug('func: type: {}, base: {}'.format(settings.func.type, settings.func.base))
logger.debug('seed: type: {}, base: {}'.format(settings.seed.type, settings.seed.base))
if ".dlabel.nii" in settings.seed.path:
logger.error("Sorry this function can't handle .dlabel.nii seeds")
sys.exit(1)
seed_ts = ciftify.meants.calc_meants_with_numpy(settings)
logger.debug('seed_ts shape before reshaping {}'.format(seed_ts.shape))
if ((len(seed_ts.shape) != 2) or (seed_ts.shape[0] != 1 and seed_ts.shape[1] !=1)):
logger.error("Incorrect shape dimensions. May have forgotten to indicate the '--weighted' or '-roi-label' file")
sys.exit(1)
seed_ts = seed_ts.reshape(seed_ts.shape[0]*seed_ts.shape[1])
logger.debug('seed_ts shape after reshaping {}'.format(seed_ts.shape))
logger.debug('Writing output with prefix: {}'.format(settings.output_prefix))
logger.debug('Writing meants: {}'.format(settings.outputcsv))
logger.info('Using numpy to calculate seed-correlation')
## convert to nifti
if settings.func.type == "cifti":
func_fnifti = os.path.join(tempdir,'func.nii.gz')
run(['wb_command','-cifti-convert','-to-nifti',settings.func.path, func_fnifti])
func_data, outA, header, dims = ciftify.niio.load_nifti(func_fnifti)
# import template, store the output paramaters
if settings.func.type == "nifti":
func_data, outA, header, dims = ciftify.niio.load_nifti(settings.func.path)
if settings.mask:
if settings.mask.type == "cifti":
mask_fnifti = os.path.join(tempdir,'mask.nii.gz')
run(['wb_command','-cifti-convert','-to-nifti', settings.mask.path, mask_fnifti])
mask_data, _, _, _ = ciftify.niio.load_nifti(mask_fnifti)
if settings.mask.type == "nifti":
mask_data, _, _, _ = ciftify.niio.load_nifti(settings.mask.path)
# decide which TRs go into the correlation
if settings.TR_file:
TR_file = np.loadtxt(settings.TR_file, int)
TRs = TR_file - 1 # shift TR-list to be zero-indexed
else:
TRs = np.arange(dims[3])
# get mean seed timeseries
## even if no mask given, mask out all zero elements..
std_array = np.std(func_data, axis=1)
std_nonzero = np.where(std_array > 0)[0]
idx_mask = std_nonzero
if settings.mask:
idx_of_mask = np.where(mask_data > 0)[0]
idx_mask = np.intersect1d(idx_mask, idx_of_mask)
# create output array
out = np.zeros([dims[0]*dims[1]*dims[2], 1])
# look through each time series, calculating r
for i in np.arange(len(idx_mask)):
out[idx_mask[i]] = np.corrcoef(seed_ts[TRs], func_data[idx_mask[i], TRs])[0][1]
# create the 3D volume and export
out = out.reshape([dims[0], dims[1], dims[2], 1])
out = nib.nifti1.Nifti1Image(out, outA)
## determine nifti filenames for the next two steps
if settings.func.type == "nifti":
if settings.fisher_z:
nifti_corr_output = os.path.join(tempdir, 'corr_out.nii.gz')
nifti_Zcorr_output = '{}.nii.gz'.format(settings.output_prefix)
else:
nifti_corr_output = '{}.nii.gz'.format(settings.output_prefix)
if settings.func.type == "cifti":
nifti_corr_output = os.path.join(tempdir, 'corr_out.nii.gz')
if settings.fisher_z:
nifti_Zcorr_output = os.path.join(tempdir, 'corrZ_out.nii.gz')
else:
nifti_Zcorr_output = nifti_corr_output
# write out nifti
out.to_filename(nifti_corr_output)
# do fisher-z transform on values
if settings.fisher_z:
run(['wb_command', "-volume-math 'atanh(x)'", nifti_Zcorr_output,
'-var', 'x', nifti_corr_output])
if settings.func.type == "cifti":
## convert back
run(['wb_command','-cifti-convert','-from-nifti',
nifti_Zcorr_output,
settings.func.path,
'{}.dscalar.nii'.format(settings.output_prefix),
'-reset-scalars'])
if __name__ == '__main__':
main()
| mit | -9,101,947,211,567,938,000 | 36.902954 | 120 | 0.649338 | false |
Linutronix/elbe | elbepack/initvmaction.py | 1 | 23779 | # ELBE - Debian Based Embedded Rootfilesystem Builder
# Copyright (c) 2015-2017 Manuel Traut <[email protected]>
# Copyright (c) 2015-2018 Torben Hohn <[email protected]>
# Copyright (c) 2015 Silvio Fricke <[email protected]>
# Copyright (c) 2017 Philipp Arras <[email protected]>
# Copyright (c) 2017 Benedikt Spranger <[email protected]>
# Copyright (c) 2017 John Ogness <[email protected]>
#
# SPDX-License-Identifier: GPL-3.0-or-later
import sys
import time
import os
import datetime
import libvirt
import elbepack
from elbepack.treeutils import etree
from elbepack.directories import elbe_exe
from elbepack.shellhelper import CommandError, system, command_out_stderr, \
command_out
from elbepack.filesystem import TmpdirFilesystem
from elbepack.elbexml import ElbeXML, ValidationError, ValidationMode
from elbepack.config import cfg
from elbepack.xmlpreprocess import PreprocessWrapper
def is_soap_local():
return cfg["soaphost"] in ("localhost", "127.0.0.1")
def cmd_exists(x):
return any(os.access(os.path.join(path, x), os.X_OK)
for path in os.environ["PATH"].split(os.pathsep))
# Create download directory with timestamp,
# if necessary
def ensure_outdir(opt):
if opt.outdir is None:
opt.outdir = "elbe-build-%s" % (
datetime.datetime.now().strftime("%Y%m%d-%H%M%S"))
print("Saving generated Files to %s" % opt.outdir)
class InitVMError(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
class InitVMAction:
actiondict = {}
@classmethod
def register(cls, tag):
def _register(action):
action.tag = tag
cls.actiondict[action.tag] = action
return action
return _register
@classmethod
def print_actions(cls):
print("available subcommands are:", file=sys.stderr)
for a in cls.actiondict:
print(" %s" % a, file=sys.stderr)
def __new__(cls, node):
action = cls.actiondict[node]
return object.__new__(action)
def __init__(self, node, initvmNeeded=True):
self.initvm = None
self.conn = None
self.node = node
# initvm might be running on a different host. Thus there's
# no need to talk with libvirt
if not is_soap_local():
return
# The tag initvmNeeded is required in order to be able to run `elbe
# initvm create`
try:
self.conn = libvirt.open("qemu:///system")
except libvirt.libvirtError as verr:
if not isinstance(verr.args[0], str):
raise
if verr.args[0].startswith('Failed to connect socket to'):
retries = 18
while retries > 0:
retries -= 1
time.sleep(10)
try:
self.conn = libvirt.open("qemu:///system")
except libvirt.libvirtError as verr:
if not isinstance(verr.args[0], str):
raise
if verr.args[0].startswith('Failed to connect socket to'):
pass
if self.conn:
break
if not self.conn:
print("", file=sys.stderr)
print("Accessing libvirt provider system not possible.", file=sys.stderr)
print("Even after waiting 180 seconds.", file=sys.stderr)
print("Make sure that package 'libvirt-daemon-system' is", file=sys.stderr)
print("installed, and the service is running properly", file=sys.stderr)
sys.exit(20)
elif verr.args[0].startswith('authentication unavailable'):
print("", file=sys.stderr)
print("Accessing libvirt provider system not allowed.", file=sys.stderr)
print("Users which want to use elbe need to be members of the 'libvirt' group.", file=sys.stderr)
print("'gpasswd -a <user> libvirt' and logging in again,", file=sys.stderr)
print("should fix the problem.", file=sys.stderr)
sys.exit(20)
elif verr.args[0].startswith('error from service: CheckAuthorization'):
print("", file=sys.stderr)
print("Accessing libvirt failed.", file=sys.stderr)
print("Probably entering the password for accssing libvirt", file=sys.stderr)
print("timed out. If this occured after 'elbe initvm create'", file=sys.stderr)
print("it should be safe to use 'elbe initvm start' to", file=sys.stderr)
print("continue.", file=sys.stderr)
sys.exit(20)
else:
# In case we get here, the exception is unknown, and we want to see it
raise
doms = self.conn.listAllDomains()
for d in doms:
if d.name() == cfg['initvm_domain']:
self.initvm = d
if not self.initvm and initvmNeeded:
sys.exit(20)
def execute(self, _initvmdir, _opt, _args):
raise NotImplementedError('execute() not implemented')
def initvm_state(self):
return self.initvm.info()[0]
@InitVMAction.register('start')
class StartAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
if self.initvm_state() == libvirt.VIR_DOMAIN_RUNNING:
print('Initvm already running.')
sys.exit(20)
elif self.initvm_state() == libvirt.VIR_DOMAIN_SHUTOFF:
# Domain is shut off. Let's start it!
self.initvm.create()
# Wait five seconds for the initvm to boot
# TODO: Instead of waiting for five seconds
# check whether SOAP server is reachable.
for _ in range(1, 5):
sys.stdout.write("*")
sys.stdout.flush()
time.sleep(1)
print("*")
@InitVMAction.register('ensure')
class EnsureAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
# initvm might be running on a different host, thus skipping
# the check
if not is_soap_local():
return
if self.initvm_state() == libvirt.VIR_DOMAIN_SHUTOFF:
system('%s initvm start' % elbe_exe)
elif self.initvm_state() == libvirt.VIR_DOMAIN_RUNNING:
stop = time.time() + 300
while time.time() < stop:
if command_out('%s control list_projects' % elbe_exe)[0] == 0:
break
time.sleep(10)
if time.time() > stop:
print("Waited for 5 minutes and the daemon is still not active."
" Exit.")
sys.exit(20)
else:
print("Elbe initvm in bad state.")
sys.exit(20)
@InitVMAction.register('stop')
class StopAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
if self.initvm_state() != libvirt.VIR_DOMAIN_RUNNING:
print('Initvm is not running.')
sys.exit(20)
while True:
sys.stdout.write("*")
sys.stdout.flush()
time.sleep(1)
state = self.initvm_state()
if state == libvirt.VIR_DOMAIN_SHUTDOWN:
continue
if state == libvirt.VIR_DOMAIN_SHUTOFF:
break
try:
self.initvm.shutdown()
except libvirt.libvirtError as e:
raise e
print("\nInitvm shutoff")
@InitVMAction.register('attach')
class AttachAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, _opt, _args):
if self.initvm_state() != libvirt.VIR_DOMAIN_RUNNING:
print('Error: Initvm not running properly.')
sys.exit(20)
print('Attaching to initvm console.')
system('virsh --connect qemu:///system console %s' % cfg['initvm_domain'])
def submit_and_dl_result(xmlfile, cdrom, opt):
# pylint: disable=too-many-statements
# pylint: disable=too-many-branches
try:
with PreprocessWrapper(xmlfile, opt) as ppw:
xmlfile = ppw.preproc
ret, prjdir, err = command_out_stderr(
'%s control create_project' % (elbe_exe))
if ret != 0:
print("elbe control create_project failed.", file=sys.stderr)
print(err, file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
prjdir = prjdir.strip()
cmd = '%s control set_xml %s %s' % (elbe_exe, prjdir, xmlfile)
ret, _, err = command_out_stderr(cmd)
if ret != 0:
print("elbe control set_xml failed2", file=sys.stderr)
print(err, file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
except CommandError:
# this is the failure from PreprocessWrapper
# it already printed the error message from
# elbe preprocess
print("Giving up", file=sys.stderr)
sys.exit(20)
if opt.writeproject:
with open(opt.writeproject, "w") as wpf:
wpf.write(prjdir)
if cdrom is not None:
print("Uploading CDROM. This might take a while")
try:
system(
'%s control set_cdrom "%s" "%s"' %
(elbe_exe, prjdir, cdrom))
except CommandError:
print("elbe control set_cdrom Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("Upload finished")
build_opts = ''
if opt.build_bin:
build_opts += '--build-bin '
if opt.build_sources:
build_opts += '--build-sources '
if cdrom:
build_opts += '--skip-pbuilder '
try:
system(
'%s control build "%s" %s' %
(elbe_exe, prjdir, build_opts))
except CommandError:
print("elbe control build Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("Build started, waiting till it finishes")
try:
system('%s control wait_busy "%s"' % (elbe_exe, prjdir))
except CommandError:
print('elbe control wait_busy Failed', file=sys.stderr)
print('', file=sys.stderr)
print('The project will not be deleted from the initvm.',
file=sys.stderr)
print('The files, that have been built, can be downloaded using:',
file=sys.stderr)
print('%s control get_files --output "%s" "%s"' % (elbe_exe,
opt.outdir,
prjdir),
file=sys.stderr)
print("", file=sys.stderr)
print('The project can then be removed using:',
file=sys.stderr)
print('%s control del_project "%s"' % (elbe_exe, prjdir),
file=sys.stderr)
print("", file=sys.stderr)
sys.exit(10)
print("")
print("Build finished !")
print("")
if opt.build_sdk:
try:
system(
'%s control build_sdk "%s" %s' %
(elbe_exe, prjdir, build_opts))
except CommandError:
print("elbe control build_sdk Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("SDK Build started, waiting till it finishes")
try:
system('%s control wait_busy "%s"' % (elbe_exe, prjdir))
except CommandError:
print('elbe control wait_busy Failed, while waiting for the SDK',
file=sys.stderr)
print('', file=sys.stderr)
print('The project will not be deleted from the initvm.',
file=sys.stderr)
print('The files, that have been built, can be downloaded using:',
file=sys.stderr)
print('%s control get_files --output "%s" "%s"' % (elbe_exe,
opt.outdir,
prjdir),
file=sys.stderr)
print("", file=sys.stderr)
print('The project can then be removed using:',
file=sys.stderr)
print('%s control del_project "%s"' % (elbe_exe, prjdir),
file=sys.stderr)
print("", file=sys.stderr)
sys.exit(10)
print("")
print("SDK Build finished !")
print("")
try:
system(
'%s control dump_file "%s" validation.txt' %
(elbe_exe, prjdir))
except CommandError:
print(
"Project failed to generate validation.txt",
file=sys.stderr)
print("Getting log.txt", file=sys.stderr)
try:
system(
'%s control dump_file "%s" log.txt' %
(elbe_exe, prjdir))
except CommandError:
print("Failed to dump log.txt", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
if opt.skip_download:
print("")
print("Listing available files:")
print("")
try:
system('%s control get_files "%s"' % (elbe_exe, prjdir))
except CommandError:
print("elbe control get_files Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
print("")
print(
'Get Files with: elbe control get_file "%s" <filename>' %
prjdir)
else:
print("")
print("Getting generated Files")
print("")
ensure_outdir(opt)
try:
system('%s control get_files --output "%s" "%s"' % (
elbe_exe, opt.outdir, prjdir))
except CommandError:
print("elbe control get_files Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
if not opt.keep_files:
try:
system('%s control del_project "%s"' % (
elbe_exe, prjdir))
except CommandError:
print("remove project from initvm failed",
file=sys.stderr)
sys.exit(20)
def extract_cdrom(cdrom):
""" Extract cdrom iso image
returns a TmpdirFilesystem() object containing
the source.xml, which is also validated.
"""
tmp = TmpdirFilesystem()
system('7z x -o%s "%s" source.xml' % (tmp.path, cdrom))
print("", file=sys.stderr)
if not tmp.isfile('source.xml'):
print(
"Iso image does not contain a source.xml file",
file=sys.stderr)
print(
"This is not supported by 'elbe initvm'",
file=sys.stderr)
print("", file=sys.stderr)
print("Exiting !!!", file=sys.stderr)
sys.exit(20)
try:
exml = ElbeXML(
tmp.fname('source.xml'),
url_validation=ValidationMode.NO_CHECK)
except ValidationError as e:
print(
"Iso image does contain a source.xml file.",
file=sys.stderr)
print(
"But that xml does not validate correctly",
file=sys.stderr)
print("", file=sys.stderr)
print("Exiting !!!", file=sys.stderr)
print(e)
sys.exit(20)
print("Iso Image with valid source.xml detected !")
print(
"Image was generated using Elbe Version %s" %
exml.get_elbe_version())
return tmp
@InitVMAction.register('create')
class CreateAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node, initvmNeeded=False)
def execute(self, initvmdir, opt, args):
# pylint: disable=too-many-branches
# pylint: disable=too-many-statements
if self.initvm is not None:
print("Initvm is already defined for the libvirt domain '%s'.\n" % cfg['initvm_domain'])
print("If you want to build in your old initvm, "
"use `elbe initvm submit <xml>`.")
print("If you want to remove your old initvm from libvirt "
"run `virsh --connect qemu:///system undefine %s`.\n" % cfg['initvm_domain'])
print("You can specify another libvirt domain by setting the "
"ELBE_INITVM_DOMAIN environment variable to an unused domain name.\n")
print("Note:")
print("\t1) You can reimport your old initvm via "
"`virsh --connect qemu:///system define <file>`")
print("\t where <file> is the corresponding libvirt.xml")
print("\t2) virsh --connect qemu:///system undefine does not delete the image "
"of your old initvm.")
sys.exit(20)
# Upgrade from older versions which used tmux
try:
system("tmux has-session -t ElbeInitVMSession 2>/dev/null")
print ("ElbeInitVMSession exists in tmux. "
"It may belong to an old elbe version. "
"Please stop it to prevent interfering with this version.", file=sys.stderr)
sys.exit(20)
except CommandError:
pass
# Init cdrom to None, if we detect it, we set it
cdrom = None
if len(args) == 1:
if args[0].endswith('.xml'):
# We have an xml file, use that for elbe init
xmlfile = args[0]
try:
xml = etree(xmlfile)
except ValidationError as e:
print("XML file is invalid: %s" % str(e))
# Use default XML if no initvm was specified
if not xml.has("initvm"):
xmlfile = os.path.join(
elbepack.__path__[0], "init/default-init.xml")
elif args[0].endswith('.iso'):
# We have an iso image, extract xml from there.
tmp = extract_cdrom(args[0])
xmlfile = tmp.fname('source.xml')
cdrom = args[0]
else:
print(
"Unknown file ending (use either xml or iso)",
file=sys.stderr)
sys.exit(20)
else:
# No xml File was specified, build the default elbe-init-with-ssh
xmlfile = os.path.join(
elbepack.__path__[0],
"init/default-init.xml")
try:
init_opts = ''
if opt.devel:
init_opts += ' --devel'
if opt.nesting:
init_opts += ' --nesting'
if not opt.build_bin:
init_opts += ' --skip-build-bin'
if not opt.build_sources:
init_opts += ' --skip-build-source'
with PreprocessWrapper(xmlfile, opt) as ppw:
if cdrom:
system('%s init %s --directory "%s" --cdrom "%s" "%s"' %
(elbe_exe, init_opts, initvmdir, cdrom, ppw.preproc))
else:
system(
'%s init %s --directory "%s" "%s"' %
(elbe_exe, init_opts, initvmdir, ppw.preproc))
except CommandError:
print("'elbe init' Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
# Read xml file for libvirt
with open(os.path.join(initvmdir, 'libvirt.xml')) as f:
xml = f.read()
# Register initvm in libvirt
try:
self.conn.defineXML(xml)
except CommandError:
print('Registering initvm in libvirt failed', file=sys.stderr)
print('Try `virsh --connect qemu:///system undefine %s` to delete existing initvm' % cfg['initvm_domain'],
file=sys.stderr)
sys.exit(20)
# Build initvm
try:
system('cd "%s"; make' % (initvmdir))
except CommandError:
print("Building the initvm Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
try:
system('%s initvm start' % elbe_exe)
except CommandError:
print("Starting the initvm Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
if len(args) == 1:
# if provided xml file has no initvm section xmlfile is set to a
# default initvm XML file. But we need the original file here
if args[0].endswith('.xml'):
# stop here if no project node was specified
try:
x = etree(args[0])
except ValidationError as e:
print("XML file is invalid: %s" % str(e))
sys.exit(20)
if not x.has('project'):
print("elbe initvm ready: use 'elbe initvm submit "
"myproject.xml' to build a project")
sys.exit(0)
xmlfile = args[0]
elif cdrom is not None:
xmlfile = tmp.fname('source.xml')
submit_and_dl_result(xmlfile, cdrom, opt)
@InitVMAction.register('submit')
class SubmitAction(InitVMAction):
def __init__(self, node):
InitVMAction.__init__(self, node)
def execute(self, _initvmdir, opt, args):
try:
system('%s initvm ensure' % elbe_exe)
except CommandError:
print("Starting the initvm Failed", file=sys.stderr)
print("Giving up", file=sys.stderr)
sys.exit(20)
# Init cdrom to None, if we detect it, we set it
cdrom = None
if len(args) == 1:
if args[0].endswith('.xml'):
# We have an xml file, use that for elbe init
xmlfile = args[0]
elif args[0].endswith('.iso'):
# We have an iso image, extract xml from there.
tmp = extract_cdrom(args[0])
xmlfile = tmp.fname('source.xml')
cdrom = args[0]
else:
print(
"Unknown file ending (use either xml or iso)",
file=sys.stderr)
sys.exit(20)
submit_and_dl_result(xmlfile, cdrom, opt)
@InitVMAction.register('sync')
class SyncAction(InitVMAction):
def __init__(self, node):
super(SyncAction, self).__init__(node)
def execute(self, _initvmdir, opt, args):
top_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
try:
system("rsync --info=name1,stats1 --archive --times "
"--exclude='.git*' --exclude='*.pyc' --exclude='elbe-build*' "
"--exclude='initvm' --exclude='__pycache__' --exclude='docs' "
"--exclude='examples' "
"--rsh='ssh -p %s' --chown=root:root "
"%s/ root@localhost:/var/cache/elbe/devel" %
(cfg["sshport"], top_dir))
except CommandError as E:
print(E)
| gpl-3.0 | 8,525,162,397,998,769,000 | 33.263689 | 118 | 0.529711 | false |
Capitains/Nautilus | capitains_nautilus/flask_ext.py | 1 | 7289 | from pkg_resources import resource_filename
import logging
from copy import deepcopy
from collections import defaultdict
from flask import Blueprint, Response
from capitains_nautilus.apis.cts import CTSApi
from capitains_nautilus.apis.dts import DTSApi
def _all_origins():
return "*"
class FlaskNautilus(object):
""" HTTP API Interfaces for MyCapytains resolvers
:param prefix: Prefix on which to install the extension
:param app: Application on which to register
:param name: Name to use for the blueprint
:param resolver: Resolver
:type resolver: Resolver
:param flask_caching: HTTP Cache should be a FlaskCaching Cache object
:type flask_caching: Cache
:cvar access_Control_Allow_Methods: Dictionary with route name and allowed methods over CORS
:cvar access_Control_Allow_Origin: Dictionary with route name and allowed host over CORS or "*"
:param logger: Logging handler.
:type logger: logging
:param apis: Set of APIs to connect to Nautilus
:type apis: set of classes
:cvar ROUTES: List of triple length tuples
:cvar Access_Control_Allow_Methods: Dictionary with route name and allowed methods over CORS
:cvar Access_Control_Allow_Origin: Dictionary with route name and allowed host over CORS or "*"
:cvar LoggingHandler: Logging handler to be set for the blueprint
:ivar logger: Logging handler
:type logger: logging.Logger
:ivar resolver: CapiTainS resolver
"""
Access_Control_Allow_Origin = "*"
LoggingHandler = logging.StreamHandler
def __init__(self, prefix="", app=None, name=None,
resolver=None,
flask_caching=None,
access_Control_Allow_Origin=None,
access_Control_Allow_Methods=None,
logger=None, apis=None
):
self._extensions = {}
self.logger = None
self.retriever = None
self.resolver = resolver
self.setLogger(logger)
self.name = name
self.prefix = prefix
self.blueprint = None
self.ROUTES = []
self.CACHED = []
self.routes = []
if apis is None:
from warnings import warn
warn(
"The parameter `apis` will need to be set-up explicitly starting 2.0.0",
DeprecationWarning
)
apis = {CTSApi(), DTSApi()}
self.Access_Control_Allow_Methods = access_Control_Allow_Methods
if not self.Access_Control_Allow_Methods:
self.Access_Control_Allow_Methods = {}
if access_Control_Allow_Origin:
self.Access_Control_Allow_Origin = defaultdict(_all_origins)
self.Access_Control_Allow_Origin.update(access_Control_Allow_Origin)
else:
self.Access_Control_Allow_Origin = FlaskNautilus.Access_Control_Allow_Origin
for api in apis:
api.init_extension(self)
self.__flask_caching__ = flask_caching
if self.name is None:
self.name = __name__
if app:
self.init_app(app=app)
def register(self, extension, extension_name):
""" Register an extension into the Nautilus Router
:param extension: Extension
:param extension_name: Name of the Extension
:return:
"""
self._extensions[extension_name] = extension
self.ROUTES.extend([
tuple(list(t) + [extension_name])
for t in extension.ROUTES
])
self.CACHED.extend([
(f_name, extension_name)
for f_name in extension.CACHED
])
# This order allows for user defaults to overwrite extension ones
self.Access_Control_Allow_Methods.update({
k: v
for k, v in extension.Access_Control_Allow_Methods.items()
if k not in self.Access_Control_Allow_Methods
})
@property
def flaskcache(self):
return self.__flask_caching__
def setLogger(self, logger):
""" Set up the Logger for the application
:param logger: logging.Logger object
:return: Logger instance
"""
self.logger = logger
if logger is None:
self.logger = logging.getLogger("capitains_nautilus")
formatter = logging.Formatter("[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s")
stream = FlaskNautilus.LoggingHandler()
stream.setLevel(logging.INFO)
stream.setFormatter(formatter)
self.logger.addHandler(stream)
if self.resolver:
self.resolver.logger = self.logger
return self.logger
def init_app(self, app):
""" Initiate the extension on the application
:param app: Flask Application
:return: Blueprint for Flask Nautilus registered in app
:rtype: Blueprint
"""
self.init_blueprint(app)
if self.flaskcache is not None:
for func, extension_name in self.CACHED:
func = getattr(self._extensions[extension_name], func)
setattr(
self._extensions[extension_name],
func.__name__,
self.flaskcache.memoize()(func)
)
return self.blueprint
def init_blueprint(self, app):
""" Properly generates the blueprint, registering routes and filters and connecting the app and the blueprint
:return: Blueprint of the extension
:rtype: Blueprint
"""
self.blueprint = Blueprint(
self.name,
self.name,
template_folder=resource_filename("capitains_nautilus", "data/templates"),
url_prefix=self.prefix
)
# Register routes
for url, name, methods, extension_name in self.ROUTES:
self.blueprint.add_url_rule(
url,
view_func=self.view(name, extension_name),
endpoint=name[2:],
methods=methods
)
app.register_blueprint(self.blueprint)
return self.blueprint
def view(self, function_name, extension_name):
""" Builds response according to a function name
:param function_name: Route name / function name
:param extension_name: Name of the extension holding the function
:return: Function
"""
if isinstance(self.Access_Control_Allow_Origin, dict):
d = {
"Access-Control-Allow-Origin": self.Access_Control_Allow_Origin[function_name],
"Access-Control-Allow-Methods": self.Access_Control_Allow_Methods[function_name]
}
else:
d = {
"Access-Control-Allow-Origin": self.Access_Control_Allow_Origin,
"Access-Control-Allow-Methods": self.Access_Control_Allow_Methods[function_name]
}
def r(*x, **y):
val = getattr(self._extensions[extension_name], function_name)(*x, **y)
if isinstance(val, Response):
val.headers.extend(d)
return val
else:
val = list(val)
val[2].update(d)
return tuple(val)
return r
| mpl-2.0 | -642,994,500,773,671,200 | 32.131818 | 117 | 0.600494 | false |
sbg2133/miscellaneous_projects | carina/ItoNH.py | 1 | 1115 | import numpy as np
import matplotlib.pyplot as plt
from astropy.io import fits
import aplpy
from astropy.wcs import WCS
import sys, os
from getIQU import IQU
from astropy import coordinates as coord
from astropy.coordinates import SkyCoord
from astropy import units as u
from scipy.interpolate import griddata
plt.ion()
root_dir = '/home/wizwit/miscellaneous_projects/carina/carinaData'
blast250_file = os.path.join(root_dir, 'smooth/3.0_arcmin/carinaneb_250_smoothed_3.0_rl.fits')
beta = 1.27
def getPsi(path_to_file):
I, Q, U, __, wcs = IQU(path_to_file)
Pvals = np.sqrt(Q**2 + U**2)
pvals = Pvals/I
# pvals /= pol_eff[band_idx]
psi = 0.5*np.arctan2(U,Q)
return I, Q, U, wcs, psi
I, __, __, wcs_250, __, = getPsi(blast250_file)
#tau_d = (nu/nu0)**beta
# See Walker pg. 71
# nu0 = frequency at which dust emission becomes optically thin
#nu0 = 0.103 * Td # 0.103 (THz/K) * Td
#Inu_dust = Bnu(Td)*(1.0 - np.exp(1.0 - e**(-1.0*tau_d))
# See Walker pg. 69
# Av = 1.086*tau_d
# N_H = 1.79e21 * Av # (atoms/cm**2 mag)
# 1) Solve tau_d for temperature
# 2) Plug into Inu_dust equation
| gpl-3.0 | -8,403,188,222,198,603,000 | 24.340909 | 94 | 0.673543 | false |
trasa/sprout | sprout/sprout/servicehosts.py | 1 | 1382 | import os
from fabric.api import *
def create_objects(cfg, service_hosts):
""" Turn a list of service host info into objects that can do
starting, stopping of services, or other things that
we think up.
"""
return [ServiceHost(
s['hostname'],
s['services'],
cfg.get_remote_user())
for s in service_hosts]
class ServiceHost(object):
def __init__(self, hostname, services, remote_user):
self.hostname = hostname
self.services = services
self.remote_user = remote_user
self.connected = False
def _connect(self):
if not self.connected:
# connect to self.hostname
env.user = self.remote_user
env.host_string = self.hostname
self.connected = True
def _run_service(self, service_name, state):
sudo('/sbin/service %s %s' % (service_name, state))
def _run_all_services(self, state):
for service_name in self.services:
self._run_service(service_name, state)
def start(self):
self._connect()
_run_all_services('start')
def stop(self):
self._connect()
self._run_all_services('stop')
def restart(self):
""" Restart the services on this host."""
self._connect()
self._run_all_services('restart')
| apache-2.0 | -7,280,064,624,537,456,000 | 26.64 | 65 | 0.575977 | false |
hqcckes/python-scheduler | Server/message.py | 1 | 2650 | # coding=utf-8
import os
import sys
import codecs
import json
import logging
import logging.config
from rpyc import Service
from rpyc.utils.server import ThreadedServer
from ConfigParser import SafeConfigParser
class Message(Service):
@staticmethod
def exposed_send(message):
import urllib2
logger.info(u"短信内容:" + message)
lx = u"0"
dlzh = cf.get(u"message", u"username")
dlmm = cf.get(u"message", u"password")
sjhm = cf.get(u"message", u"phone")
url = cf.get(u"message", u"url")
dxnr = urllib2.quote(message.encode(u"GB18030"))
fhls = u"0"
data = u"LX=" + lx + u"&DLZH=" + dlzh + u"&DLMM=" + dlmm + u"&SJHM=" + sjhm + u"&DXNR=" + dxnr + u"&FHLS=" + fhls
url = url + data
request = urllib2.Request(url)
response = urllib2.urlopen(request).read()
response = response.decode(u"GB18030")
if response == u"0":
logger.info(u"警报短信发送成功!")
else:
logger.warning(u"警报短信发送失败,返回码:" + response)
return response
def setup_logging(path=u"message.json", level=logging.INFO, env_key=u"LOG_CFG"):
"""
加载日志配置
:param path: 默认路径
:param level: 默认日志等级
:param env_key: 环境变量
:return:
"""
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with codecs.open(filename=path, mode=u"rb", encoding=u"utf8") as f:
config = json.load(f)
logging.config.dictConfig(config)
else:
logging.basicConfig(level=level)
def read_conf(path=u"message.conf"):
"""
加载配置
:param path: 配置文件路径
:return: ConfigParser
"""
if not os.path.exists(path):
logger.error(u"没有找到配置文件:\"message.conf\" !")
sys.exit(2)
config = SafeConfigParser()
with codecs.open(path, u"rb", encoding=u"utf8") as c_file:
config.readfp(c_file)
return config
if __name__ == u'__main__':
# 系统文件分隔符
sep = os.sep
# 脚本当前所在路径,用GB18030解码以解决中文路径问题
c_path = os.path.split(os.path.realpath(__file__))[0].decode(u"GB18030")
# 加载日志配置
setup_logging(path=os.path.join(c_path, u"config/message.json"))
logger = logging.getLogger(__name__)
# 加载配置文件
config_file = os.path.join(c_path, u"config/message.conf")
cf = read_conf(path=config_file)
service = ThreadedServer(Message, port=9999, auto_register=False)
service.start()
| gpl-3.0 | -404,603,665,324,014,900 | 26.066667 | 121 | 0.605911 | false |
ContinuumIO/ashiba | enaml/enaml/qt/qt_stack_item.py | 1 | 3902 | #------------------------------------------------------------------------------
# Copyright (c) 2013, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#------------------------------------------------------------------------------
from atom.api import Typed
from enaml.widgets.stack_item import ProxyStackItem
from .QtGui import QFrame
from .q_single_widget_layout import QSingleWidgetLayout
from .qt_container import QtContainer
from .qt_widget import QtWidget
class QStackItem(QFrame):
""" A QFrame subclass which acts as an item QStack.
"""
def __init__(self, *args, **kwargs):
""" Initialize a QStackItem.
Parameters
----------
*args, **kwargs
The position and keyword arguments required to initialize
a QWidget.
"""
super(QStackItem, self).__init__(*args, **kwargs)
self._stack_widget = None
self.setLayout(QSingleWidgetLayout())
def stackWidget(self):
""" Get the stack widget for this stack item.
Returns
-------
result : QWidget or None
The stack widget being managed by this item.
"""
return self._stack_widget
def setStackWidget(self, widget):
""" Set the stack widget for this stack item.
Parameters
----------
widget : QWidget
The QWidget to use as the stack widget in this item.
"""
self._stack_widget = widget
self.layout().setWidget(widget)
class QtStackItem(QtWidget, ProxyStackItem):
""" A Qt implementation of an Enaml ProxyStackItem.
"""
#: A reference to the widget created by the proxy.
widget = Typed(QStackItem)
#--------------------------------------------------------------------------
# Initialization API
#--------------------------------------------------------------------------
def create_widget(self):
""" Create the underlying QStackItem widget.
"""
self.widget = QStackItem(self.parent_widget())
def init_layout(self):
""" Initialize the layout for the underyling widget.
"""
super(QtStackItem, self).init_layout()
self.widget.setStackWidget(self.stack_widget())
#--------------------------------------------------------------------------
# Utility Methods
#--------------------------------------------------------------------------
def stack_widget(self):
""" Find and return the stack widget child for this widget.
"""
d = self.declaration.stack_widget()
if d is not None:
return d.proxy.widget
#--------------------------------------------------------------------------
# Child Events
#--------------------------------------------------------------------------
def child_added(self, child):
""" Handle the child added event for a QtStackItem.
"""
super(QtStackItem, self).child_added(child)
if isinstance(child, QtContainer):
self.widget.setStackWidget(self.stack_widget())
def child_removed(self, child):
""" Handle the child added event for a QtStackItem.
"""
super(QtStackItem, self).child_removed(child)
if isinstance(child, QtContainer):
self.widget.setStackWidget(self.stack_widget())
#--------------------------------------------------------------------------
# Widget Update Methods
#--------------------------------------------------------------------------
def set_visible(self, visible):
""" An overridden visibility setter.
This setter disables changing visibility on the widget since
the visibility is controlled entirely by the parent stack.
"""
pass
| bsd-3-clause | -7,036,452,119,728,805,000 | 30.467742 | 79 | 0.494106 | false |
forgeservicelab/ansible-roles.django_saml_app | templates/settings.py | 1 | 5840 | """
Django settings for samldemo project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
import logging
logging.basicConfig(level=logging.DEBUG)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@!poms#fy-w!ad&i945blb)arnx!(zj$37x1b$n9l_8*$2=m-0'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djangosaml2',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
#'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'samldemo.urls'
WSGI_APPLICATION = 'samldemo.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'djangosaml2.backends.Saml2Backend',
)
LOGIN_URL = '/saml2/login/'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer'
import saml2
SAML_CONFIG = {
# full path to the xmlsec1 binary programm
'xmlsec_binary': '/usr/bin/xmlsec1',
# your entity id, usually your subdomain plus the url to the metadata view
'entityid': '{{ ansible_fqdn }}/saml2/metadata/',
# directory with attribute mapping
'attribute_map_dir': '/usr/local/lib/python2.7/dist-packages/saml2/attributemaps',
# this block states what services we provide
'service': {
# we are just a lonely SP
'sp' : {
'allow_unsolicited': True,
'name': 'Federated Django sample SP',
'endpoints': {
# url and binding to the assetion consumer service view
# do not change the binding or service name
'assertion_consumer_service': [
('http://{{ ansible_fqdn }}/saml2/acs/',
saml2.BINDING_HTTP_POST),
],
# url and binding to the single logout service view
# do not change the binding or service name
'single_logout_service': [
('http://{{ ansible_fqdn }}/saml2/ls/',
saml2.BINDING_HTTP_REDIRECT),
],
},
# attributes that this project need to identify a user
'required_attributes': ['cn'],
# attributes that may be useful to have but not required
'optional_attributes': ['eduPersonAffiliation'],
# in this section the list of IdPs we talk to are defined
'idp': {
# we do not need a WAYF service since there is
# only an IdP defined here. This IdP should be
# present in our metadata
# the keys of this dictionary are entity ids
'{{ django_saml_app_idp }}/saml2/idp/metadata.php': {
'single_sign_on_service': {
saml2.BINDING_HTTP_REDIRECT: '{{ django_saml_app_idp }}/saml2/idp/SSOService.php',
},
'single_logout_service': {
saml2.BINDING_HTTP_REDIRECT: '{{ django_saml_app_idp }}/saml2/idp/SingleLogoutService.php',
},
},
},
},
},
# where the remote metadata is stored
'metadata': {
'local': [os.path.join(BASE_DIR, 'remote_metadata.xml')],
},
# set to 1 to output debugging information
'debug': 1,
# certificate
'key_file': os.path.join(BASE_DIR, 'key'),
'cert_file': os.path.join(BASE_DIR, 'cert.crt'),
# own metadata settings
'contact_person': [
{'given_name': 'Tomas',
'sur_name': 'Karasek',
'company': 'Digile',
'email_address': '[email protected]',
'contact_type': 'technical'},
{'given_name': 'Tomas',
'sur_name': 'Karasek',
'company': 'Digile',
'email_address': '[email protected]',
'contact_type': 'administrative'},
],
# you can set multilanguage information here
'organization': {
'name': [('Digile', 'en')],
'display_name': [('Digile', 'en')],
'url': [('http://forgeservicelab.fi', 'en')],
},
'valid_for': 24, # how long is our metadata valid
}
SAML_ATTRIBUTE_MAPPING = {
# cn is in the OID notation urn:oid:2.5.4.3
'cn': ('username', ),
'mail': ('email', ),
'givenName': ('first_name', ),
'sn': ('last_name', )
} | mit | 8,473,894,465,641,903,000 | 28.5 | 113 | 0.62089 | false |
wil/pyroman | examples/example1/03_standard_chains.py | 1 | 1452 | """
Pyroman uses some standard chains, set in it's config.
These chains are used by the "allow()", "reject()" and "drop()" commandos
for nicer rule writing, and probably should do exactly that.
If you want maximal performance, you'll want to change these to ACCEPT and DROP
directly by calling 'Firewall.accept = "ACCEPT"' and removing the lines below.
The (small) benefits of using this approach is that you can easily disable
the rules (by modifying 'drop' and 'reject') without reloading your firewall
and that you get complete traffic counters in these chains.
The variables "Firewall.accept", "Firewall.drop" and "Firewall.reject" are
used here, so you can change them in one place only.
"""
Firewall.accept="accept"
add_chain(Firewall.accept)
# Kernel and iptables can do new string matches?
if Firewall.iptables_version(min="1.3.4") and \
Firewall.kernel_version(min="2.6.12"):
# Drop bittorrent traffic
iptables(Firewall.accept, '-m string --string "BitTorrent protocol" ' + \
'--algo bm --from 0 --to 100 -j DROP')
# add accept default rule to the chain
iptables(Firewall.accept, "-j ACCEPT")
# this is a silent drop
Firewall.drop="drop"
add_chain(Firewall.drop)
iptables(Firewall.drop, "-j DROP")
# .. these are clean "reject" rules (i.e. send 'connection refused' back)
Firewall.reject="reject"
add_chain(Firewall.reject)
iptables(Firewall.reject, "-p tcp -j REJECT --reject-with tcp-reset")
iptables(Firewall.reject, "-j REJECT")
| mit | 4,925,883,790,830,189,000 | 39.333333 | 79 | 0.74449 | false |
ViderumGlobal/ckanext-requestdata | ckanext/requestdata/controllers/package.py | 1 | 3021 | from ckan.lib import base
from ckan.common import c, _
from ckan import logic
import ckan.model as model
import ckan.lib.helpers as h
from ckan.plugins import toolkit
from ckan.controllers.package import PackageController as _PackageController
import ckan.lib.navl.dictization_functions as dict_fns
from ckanext.requestdata.helpers import has_query_param
get_action = logic.get_action
NotAuthorized = logic.NotAuthorized
ValidationError = logic.ValidationError
clean_dict = logic.clean_dict
try:
# Support CKAN 2.6
redirect = base.redirect
except ImportError:
# Redirect is not redirect_to in CKAN 2.7
redirect = h.redirect_to
abort = base.abort
tuplize_dict = logic.tuplize_dict
parse_params = logic.parse_params
class PackageController(_PackageController):
def create_metadata_package(self):
# Handle metadata-only datasets
if has_query_param('metadata'):
package_type = 'requestdata-metadata-only'
form_vars = {
'errors': {},
'dataset_type': package_type,
'action': 'new',
'error_summary': {},
'data': {
'tag_string': '',
'group_id': None,
'type': package_type
},
'stage': ['active']
}
if toolkit.request.method == 'POST':
context = {'model': model, 'session': model.Session,
'user': c.user, 'auth_user_obj': c.userobj}
data_dict = clean_dict(dict_fns.unflatten(
tuplize_dict(parse_params(toolkit.request.POST))))
data_dict['type'] = package_type
try:
package = get_action('package_create')(context, data_dict)
url = h.url_for(controller='package', action='read',
id=package['name'])
redirect(url)
except NotAuthorized:
abort(403, _('Unauthorized to create a dataset.'))
except ValidationError, e:
errors = e.error_dict
error_summary = e.error_summary
form_vars = {
'errors': errors,
'dataset_type': package_type,
'action': 'new',
'error_summary': error_summary,
'stage': ['active']
}
form_vars['data'] = data_dict
extra_vars = {
'form_vars': form_vars,
'form_snippet': 'package/new_package_form.html',
'dataset_type': package_type
}
return toolkit.render('package/new.html',
extra_vars=extra_vars)
else:
return self.new()
else:
return self.new()
| agpl-3.0 | -8,142,442,439,098,392,000 | 33.329545 | 78 | 0.503807 | false |
Motolea/pentagram | aplicatiepentagram/Pentagram/views.py | 1 | 3440 | from django.shortcuts import render
from django.contrib.auth.models import User
from rest_framework.response import Response
from rest_framework.decorators import api_view
from rest_framework import status
from rest_framework.authtoken.views import ObtainAuthToken
from rest_framework.authtoken.models import Token
from Pentagram.models import Photo
from Pentagram.models import Comment
from Pentagram.models import Like
from Pentagram.serializers import UserSerializer
from Pentagram.serializers import PhotoSerializer
from Pentagram.serializers import CommentSerializer
from rest_framework.permissions import AllowAny
from rest_framework.decorators import permission_classes
@api_view(['GET', 'POST'])
def photos(request):
if request.method == "GET":
photos = Photo.objects.all()
serializer = PhotoSerializer(photos, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
if request.method == "POST":
photo_serializer = PhotoSerializer(data=request.data)
if photo_serializer.is_valid():
photo_serializer.save()
return Response(status=status.HTTP_201_CREATED)
return Response(status=status.HTTP_400_BAD_REQUEST, data=photo_serializer.errors)
@api_view(['GET','POST'])
@permission_classes((AllowAny,))
def users(request):
if request.method == "GET":
users = User.objects.all()
serializer = UserSerializer(users, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
if request.method == "POST":
user_serializer = UserSerializer(data=request.data)
if user_serializer.is_valid():
user_serializer.save()
return Response(status=status.HTTP_201_CREATED)
return Response(status=status.HTTP_400_BAD_REQUEST, data=user_serializer.errors)
@api_view(['GET','POST'])
def comments(request, id_photo):
if request.method == "GET":
comments = Comment.objects.filter(photo_id=id_photo)
serializer = CommentSerializer(comments, many=True)
return Response(status=status.HTTP_200_OK, data=serializer.data)
if request.method == "POST":
request.POST['photo'] = id_photo
comment_serializer = CommentSerializer(data=request.data)
if comment_serializer.is_valid():
comment_serializer.save()
return Response(status=status.HTTP_201_CREATED)
return Response(status=status.HTTP_400_BAD_REQUEST, data=comment_serializer.errors)
@api_view(['GET', 'POST'])
def like(request, id_photo):
if request.method == 'GET':
counter = Like.objects.filter(photo_id=id_photo).count()
return Response(status=status.HTTP_200_OK, data=counter)
if request.method == 'POST':
if Like.objects.filter(photo=id_photo, user=request.user.id).count() == 0:
Like.objects.create(photo_id=id_photo, user=request.user).save()
return Response(status=status.HTTP_201_CREATED)
else:
Like.objects.filter(photo=id_photo, user=request.user.id).delete()
return Response(status=status.HTTP_205_RESET_CONTENT)
class CustomObtainAuthToken(ObtainAuthToken):
def post(self, request, *args, **kwargs):
response = super(CustomObtainAuthToken, self).post(request, *args, **kwargs)
token = Token.objects.get(key=response.data['token'])
return Response({'token': token.key, 'id': token.user_id})
| gpl-3.0 | 409,339,148,438,177,900 | 39.952381 | 91 | 0.701744 | false |
hlange/LogSoCR | .waf/waflib/extras/ocaml.py | 1 | 9469 | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2010 (ita)
"ocaml support"
import os, re
from waflib import Utils, Task
from waflib.Logs import error
from waflib.TaskGen import feature, before_method, after_method, extension
EXT_MLL = ['.mll']
EXT_MLY = ['.mly']
EXT_MLI = ['.mli']
EXT_MLC = ['.c']
EXT_ML = ['.ml']
open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
def filter_comments(txt):
meh = [0]
def repl(m):
if m.group(1): meh[0] += 1
elif m.group(2): meh[0] -= 1
elif not meh[0]: return m.group()
return ''
return foo.sub(repl, txt)
def scan(self):
node = self.inputs[0]
code = filter_comments(node.read())
global open_re
names = []
import_iterator = open_re.finditer(code)
if import_iterator:
for import_match in import_iterator:
names.append(import_match.group(1))
found_lst = []
raw_lst = []
for name in names:
nd = None
for x in self.incpaths:
nd = x.find_resource(name.lower()+'.ml')
if not nd: nd = x.find_resource(name+'.ml')
if nd:
found_lst.append(nd)
break
else:
raw_lst.append(name)
return (found_lst, raw_lst)
native_lst=['native', 'all', 'c_object']
bytecode_lst=['bytecode', 'all']
@feature('ocaml')
def init_ml(self):
Utils.def_attrs(self,
type = 'all',
incpaths_lst = [],
bld_incpaths_lst = [],
mlltasks = [],
mlytasks = [],
mlitasks = [],
native_tasks = [],
bytecode_tasks = [],
linktasks = [],
bytecode_env = None,
native_env = None,
compiled_tasks = [],
includes = '',
uselib = '',
are_deps_set = 0)
@feature('ocaml')
@after_method('init_ml')
def init_envs_ml(self):
self.islibrary = getattr(self, 'islibrary', False)
global native_lst, bytecode_lst
self.native_env = None
if self.type in native_lst:
self.native_env = self.env.derive()
if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a'
self.bytecode_env = None
if self.type in bytecode_lst:
self.bytecode_env = self.env.derive()
if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
if self.type == 'c_object':
self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
@feature('ocaml')
@before_method('apply_vars_ml')
@after_method('init_envs_ml')
def apply_incpaths_ml(self):
inc_lst = self.includes.split()
lst = self.incpaths_lst
for dir in inc_lst:
node = self.path.find_dir(dir)
if not node:
error("node not found: " + str(dir))
continue
if not node in lst:
lst.append(node)
self.bld_incpaths_lst.append(node)
# now the nodes are added to self.incpaths_lst
@feature('ocaml')
@before_method('process_source')
def apply_vars_ml(self):
for i in self.incpaths_lst:
if self.bytecode_env:
app = self.bytecode_env.append_value
app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
if self.native_env:
app = self.native_env.append_value
app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
for name in self.uselib.split():
for vname in varnames:
cnt = self.env[vname+'_'+name]
if cnt:
if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
if self.native_env: self.native_env.append_value(vname, cnt)
@feature('ocaml')
@after_method('process_source')
def apply_link_ml(self):
if self.bytecode_env:
ext = self.islibrary and '.cma' or '.run'
linktask = self.create_task('ocalink')
linktask.bytecode = 1
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.env = self.bytecode_env
self.linktasks.append(linktask)
if self.native_env:
if self.type == 'c_object': ext = '.o'
elif self.islibrary: ext = '.cmxa'
else: ext = ''
linktask = self.create_task('ocalinkx')
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
linktask.env = self.native_env
self.linktasks.append(linktask)
# we produce a .o file to be used by gcc
self.compiled_tasks.append(linktask)
@extension(*EXT_MLL)
def mll_hook(self, node):
mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'))
mll_task.env = self.native_env.derive()
self.mlltasks.append(mll_task)
self.source.append(mll_task.outputs[0])
@extension(*EXT_MLY)
def mly_hook(self, node):
mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')])
mly_task.env = self.native_env.derive()
self.mlytasks.append(mly_task)
self.source.append(mly_task.outputs[0])
task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'))
task.env = self.native_env.derive()
@extension(*EXT_MLI)
def mli_hook(self, node):
task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'))
task.env = self.native_env.derive()
self.mlitasks.append(task)
@extension(*EXT_MLC)
def mlc_hook(self, node):
task = self.create_task('ocamlcc', node, node.change_ext('.o'))
task.env = self.native_env.derive()
self.compiled_tasks.append(task)
@extension(*EXT_ML)
def ml_hook(self, node):
if self.native_env:
task = self.create_task('ocamlx', node, node.change_ext('.cmx'))
task.env = self.native_env.derive()
task.incpaths = self.bld_incpaths_lst
self.native_tasks.append(task)
if self.bytecode_env:
task = self.create_task('ocaml', node, node.change_ext('.cmo'))
task.env = self.bytecode_env.derive()
task.bytecode = 1
task.incpaths = self.bld_incpaths_lst
self.bytecode_tasks.append(task)
def compile_may_start(self):
if not getattr(self, 'flag_deps', ''):
self.flag_deps = 1
# the evil part is that we can only compute the dependencies after the
# source files can be read (this means actually producing the source files)
if getattr(self, 'bytecode', ''): alltasks = self.generator.bytecode_tasks
else: alltasks = self.generator.native_tasks
self.signature() # ensure that files are scanned - unfortunately
tree = self.generator.bld
for node in self.inputs:
lst = tree.node_deps[self.uid()]
for depnode in lst:
for t in alltasks:
if t == self: continue
if depnode in t.inputs:
self.set_run_after(t)
# TODO necessary to get the signature right - for now
delattr(self, 'cache_sig')
self.signature()
return Task.Task.runnable_status(self)
class ocamlx(Task.Task):
"""native caml compilation"""
color = 'GREEN'
run_str = '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
scan = scan
runnable_status = compile_may_start
class ocaml(Task.Task):
"""bytecode caml compilation"""
color = 'GREEN'
run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
scan = scan
runnable_status = compile_may_start
class ocamlcmi(Task.Task):
"""interface generator (the .i files?)"""
color = 'BLUE'
run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLINCLUDES} -o ${TGT} -c ${SRC}'
before = ['ocamlcc', 'ocaml', 'ocamlcc']
class ocamlcc(Task.Task):
"""ocaml to c interfaces"""
color = 'GREEN'
run_str = 'cd ${TGT[0].bld_dir()} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${OCAMLINCLUDES} -c ${SRC[0].abspath()}'
class ocamllex(Task.Task):
"""lexical generator"""
color = 'BLUE'
run_str = '${OCAMLLEX} ${SRC} -o ${TGT}'
before = ['ocamlcmi', 'ocaml', 'ocamlcc']
class ocamlyacc(Task.Task):
"""parser generator"""
color = 'BLUE'
run_str = '${OCAMLYACC} -b ${tsk.base()} ${SRC}'
before = ['ocamlcmi', 'ocaml', 'ocamlcc']
def base(self):
node = self.outputs[0]
s = os.path.splitext(node.name)[0]
return node.bld_dir() + os.sep + s
def link_may_start(self):
if getattr(self, 'bytecode', 0): alltasks = self.generator.bytecode_tasks
else: alltasks = self.generator.native_tasks
for x in alltasks:
if not x.hasrun:
return Task.ASK_LATER
if not getattr(self, 'order', ''):
# now reorder the inputs given the task dependencies
# this part is difficult, we do not have a total order on the tasks
# if the dependencies are wrong, this may not stop
seen = []
pendant = []+alltasks
while pendant:
task = pendant.pop(0)
if task in seen: continue
for x in task.run_after:
if not x in seen:
pendant.append(task)
break
else:
seen.append(task)
self.inputs = [x.outputs[0] for x in seen]
self.order = 1
return Task.Task.runnable_status(self)
class ocalink(Task.Task):
"""bytecode caml link"""
color = 'YELLOW'
run_str = '${OCAMLC} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS} ${SRC}'
runnable_status = link_may_start
after = ['ocaml', 'ocamlcc']
class ocalinkx(Task.Task):
"""native caml link"""
color = 'YELLOW'
run_str = '${OCAMLOPT} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS_OPT} ${SRC}'
runnable_status = link_may_start
after = ['ocamlx', 'ocamlcc']
def configure(conf):
opt = conf.find_program('ocamlopt', var='OCAMLOPT', mandatory=False)
occ = conf.find_program('ocamlc', var='OCAMLC', mandatory=False)
if (not opt) or (not occ):
conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
v = conf.env
v['OCAMLC'] = occ
v['OCAMLOPT'] = opt
v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX', mandatory=False)
v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC', mandatory=False)
v['OCAMLFLAGS'] = ''
where = conf.cmd_and_log(conf.env.OCAMLC + ['-where']).strip()+os.sep
v['OCAMLLIB'] = where
v['LIBPATH_OCAML'] = where
v['INCLUDES_OCAML'] = where
v['LIB_OCAML'] = 'camlrun'
| agpl-3.0 | 1,413,643,713,363,514,400 | 27.607251 | 117 | 0.659521 | false |
vlegoff/tsunami | src/primaires/format/editeurs/floatedit/__init__.py | 1 | 4072 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant l'éditeur 'floatedit'.
Si des redéfinitions de contexte-éditeur standard doivent être faites, elles
seront placées dans ce package
Note importante : ce package contient la définition d'un éditeur, mais
celui-ci peut très bien être étendu par d'autres modules. Au quel cas,
les extensions n'apparaîtront pas ici.
"""
from primaires.interpreteur.editeur.presentation import Presentation
from primaires.interpreteur.editeur.description import Description
from primaires.salle.editeurs.redit.edt_details import EdtDetails
class EdtFloatedit(Presentation):
"""Classe définissant l'éditeur de description flottante 'floatedit'."""
nom = "floatedit"
def __init__(self, personnage, flottante):
"""Constructeur de l'éditeur"""
if personnage:
instance_connexion = personnage.instance_connexion
else:
instance_connexion = None
Presentation.__init__(self, instance_connexion, flottante)
if personnage and flottante:
self.construire(flottante)
def __getnewargs__(self):
return (None, None)
def construire(self, flottante):
"""Construction de l'éditeur"""
# Description
description = self.ajouter_choix("description", "d", Description,
flottante)
description.parent = self
description.apercu = "{objet.description.paragraphes_indentes}"
description.aide_courte = \
"| |tit|" + "Description flottante '{}'".format(
flottante.cle).ljust(76) + "|ff||\n" + self.opts.separateur
# Détails
details = self.ajouter_choix("details", "e", EdtDetails, flottante,
"details")
details.parent = self
details.aide_courte = \
"Entrez le nom d'un |cmd|détail existant|ff| pour l'éditer ou " \
"un |cmd|nouveau détail|ff|\n" \
"pour le créer ; |ent|/|ff| pour revenir à la fenêtre parente.\n" \
"Options :\n" \
" - |ent|/s <détail existant> / <synonyme 1> (/ <synonyme 2> / " \
"...)|ff| : permet\n" \
" de modifier les synonymes du détail passée en paramètre. " \
"Pour chaque\n" \
" synonyme donné à l'option, s'il existe, il sera supprimé ; " \
"sinon, il sera\n" \
" ajouté à la liste.\n" \
" - |ent|/d <détail existant>|ff| : supprime le détail " \
"indiqué\n\n"
| bsd-3-clause | -354,768,347,464,347,840 | 41.505263 | 79 | 0.679297 | false |
ilstreltsov/django-db-mailer | dbmail/south_migrations/0039_auto__add_unique_mailsubscription_address.py | 1 | 20732 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding unique constraint on 'MailSubscription', fields ['address']
db.create_unique('dbmail_mailsubscription', ['address'])
def backwards(self, orm):
# Removing unique constraint on 'MailSubscription', fields ['address']
db.delete_unique('dbmail_mailsubscription', ['address'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'dbmail.apikey': {
'Meta': {'object_name': 'ApiKey'},
'api_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '25'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailbasetemplate': {
'Meta': {'object_name': 'MailBaseTemplate'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailbcc': {
'Meta': {'object_name': 'MailBcc'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailcategory': {
'Meta': {'object_name': 'MailCategory'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailfile': {
'Meta': {'object_name': 'MailFile'},
'filename': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'files'", 'to': "orm['dbmail.MailTemplate']"})
},
'dbmail.mailfromemail': {
'Meta': {'object_name': 'MailFromEmail'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'credential': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['dbmail.MailFromEmailCredential']", 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailfromemailcredential': {
'Meta': {'object_name': 'MailFromEmailCredential'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'fail_silently': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'host': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'port': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'use_tls': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
'dbmail.mailgroup': {
'Meta': {'object_name': 'MailGroup'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailgroupemail': {
'Meta': {'unique_together': "(('email', 'group'),)", 'object_name': 'MailGroupEmail'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'emails'", 'to': "orm['dbmail.MailGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'dbmail.maillog': {
'Meta': {'object_name': 'MailLog'},
'backend': ('django.db.models.fields.CharField', [], {'default': "'mail'", 'max_length': '25', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'error_exception': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailLogException']", 'null': 'True', 'blank': 'True'}),
'error_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_sent': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'log_id': ('django.db.models.fields.CharField', [], {'max_length': '60', 'db_index': 'True'}),
'num_of_retries': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'provider': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '250', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailTemplate']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'dbmail.maillogemail': {
'Meta': {'object_name': 'MailLogEmail'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'log': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailLog']"}),
'mail_type': ('django.db.models.fields.CharField', [], {'max_length': '3'})
},
'dbmail.maillogexception': {
'Meta': {'object_name': 'MailLogException'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '150'})
},
'dbmail.maillogtrack': {
'Meta': {'object_name': 'MailLogTrack'},
'counter': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39'}),
'ip_area_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_country_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_country_code3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_country_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_dma_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_latitude': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_longitude': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_postal_code': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'ip_region': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'is_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'mail_log': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailLog']"}),
'ua': ('django.db.models.fields.CharField', [], {'max_length': '350', 'null': 'True', 'blank': 'True'}),
'ua_browser': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ua_browser_version': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'ua_dist': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'ua_dist_version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ua_os': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'ua_os_version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.mailsubscription': {
'Meta': {'object_name': 'MailSubscription'},
'address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '60'}),
'backend': ('django.db.models.fields.CharField', [], {'default': "'dbmail.backends.mail'", 'max_length': '50'}),
'defer_at_allowed_hours': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'end_hour': ('django.db.models.fields.CharField', [], {'default': "'23:59'", 'max_length': '5'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_checked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'start_hour': ('django.db.models.fields.CharField', [], {'default': "'00:00'", 'max_length': '5'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'dbmail.mailtemplate': {
'Meta': {'object_name': 'MailTemplate'},
'base': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailBaseTemplate']", 'null': 'True', 'blank': 'True'}),
'bcc_email': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['dbmail.MailBcc']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['dbmail.MailCategory']", 'null': 'True', 'blank': 'True'}),
'context_note': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'enable_log': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'from_email': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['dbmail.MailFromEmail']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interval': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_html': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'num_of_retries': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'priority': ('django.db.models.fields.SmallIntegerField', [], {'default': '6'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'subject': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.signal': {
'Meta': {'object_name': 'Signal'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailGroup']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interval': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'receive_once': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'rules': ('django.db.models.fields.TextField', [], {'default': "'{{ instance.email }}'", 'null': 'True', 'blank': 'True'}),
'signal': ('django.db.models.fields.CharField', [], {'default': "'post_save'", 'max_length': '15'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.MailTemplate']"}),
'update_model': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'dbmail.signaldeferreddispatch': {
'Meta': {'object_name': 'SignalDeferredDispatch'},
'args': ('django.db.models.fields.TextField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'done': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'eta': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kwargs': ('django.db.models.fields.TextField', [], {}),
'params': ('django.db.models.fields.TextField', [], {})
},
'dbmail.signallog': {
'Meta': {'object_name': 'SignalLog'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'model_pk': ('django.db.models.fields.BigIntegerField', [], {}),
'signal': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['dbmail.Signal']"})
}
}
complete_apps = ['dbmail'] | gpl-2.0 | 5,494,623,558,211,153,000 | 80.305882 | 200 | 0.544472 | false |
catapult-project/catapult | telemetry/telemetry/internal/platform/tracing_agent/atrace_tracing_agent_unittest.py | 3 | 2049 | # Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import absolute_import
import unittest
import mock
from systrace.tracing_agents import atrace_agent as systrace_atrace_agent
from telemetry.core import exceptions
from telemetry.internal.platform.tracing_agent import atrace_tracing_agent
from tracing.trace_data import trace_data
class AtraceTracingAgentTest(unittest.TestCase):
def setUp(self):
self._mock_platform_backend = mock.NonCallableMagicMock()
self._mock_config = mock.NonCallableMagicMock()
@mock.patch.object(systrace_atrace_agent.AtraceAgent, 'GetResults')
def testCollectAgentTraceDataEmptyTrace(self, mock_get_results):
# Make GetResults() in the mock systrace atrace agent return an empty
# trace.
empty_atrace_output = '# tracer: nop'
mock_get_results.return_value.raw_data = empty_atrace_output
atrace_agent = atrace_tracing_agent.AtraceTracingAgent(
self._mock_platform_backend, self._mock_config)
mock_trace_builder = mock.NonCallableMagicMock(spec=['AddTraceFor'])
atrace_agent.CollectAgentTraceData(mock_trace_builder)
mock_trace_builder.AddTraceFor.assert_called_once_with(
trace_data.ATRACE_PART, empty_atrace_output, allow_unstructured=True)
@mock.patch.object(systrace_atrace_agent.AtraceAgent, 'GetResults')
def testCollectAgentTraceDataTimeout(self, mock_get_results):
# Make GetResults() in the mock systrace atrace agent return false to
# simulate a timeout happening inside the Systrace Atrace agent.
mock_get_results.return_value = False
atrace_agent = atrace_tracing_agent.AtraceTracingAgent(
self._mock_platform_backend, self._mock_config)
mock_trace_builder = mock.NonCallableMagicMock(spec=['AddTraceFor'])
with self.assertRaises(exceptions.AtraceTracingError):
atrace_agent.CollectAgentTraceData(mock_trace_builder)
mock_trace_builder.AddTraceFor.assert_not_called()
| bsd-3-clause | -2,279,390,763,361,511,400 | 43.543478 | 77 | 0.768668 | false |
BFH-BTI7301-project1/ClockAlarm | _clockalarm/_tests/SimpleAlertEditWidget_test.py | 1 | 1384 | from os.path import dirname, abspath, join
import pytest
from PyQt5.QtWidgets import QApplication, QGroupBox, QTimeEdit, \
QDateTimeEdit
from _clockalarm.UI.SimpleAlertEditWidget import SimpleAlertEditWidget
from _clockalarm.utils import importExportUtils
@pytest.fixture
def init_paths(scope="module"):
importExportUtils.DEFAULT_CONFIG_PATH = join(dirname(abspath(__file__)),
"config_test.cfg")
importExportUtils.ALERT_DB_PATH = join(dirname(abspath(__file__)),
"alertsDB_test.json")
app = QApplication([])
@pytest.mark.test
def test_constructor_without_alert():
"""Test the :class:`~_clockalarm.UI.SimpleAlertEditWidget` constructor
without an alert.
"""
saew = SimpleAlertEditWidget()
group_box = saew.findChildren(QGroupBox)[0]
periodicity_edit = saew.findChildren(QTimeEdit)[0]
assert group_box.title() == 'Set up a new Simple Alert'
assert periodicity_edit.displayFormat() == 'HH:mm:ss'
assert periodicity_edit.time().toString() == '00:00:00'
print(periodicity_edit.displayFormat())
assert isinstance(saew.date_time_edit, QDateTimeEdit)
@pytest.mark.test
def test_constructor_with_alert_list(init_paths):
"""Test the :class:`~_clockalarm.UI.AlertListWidget` constructor with an
alert list.
"""
pass
| gpl-3.0 | -86,595,258,066,842,460 | 28.446809 | 76 | 0.677746 | false |
saullocastro/pyNastran | pyNastran/bdf/mesh_utils/test/test_convert.py | 1 | 7968 | from __future__ import print_function
import os
import unittest
import numpy as np
from numpy import allclose
#import pyNastran
#from pyNastran.bdf.bdf import BDF
#root_path = pyNastran.__path__[0]
#test_path = os.path.join(root_path, 'bdf', 'test', 'unit')
from pyNastran.bdf.cards.elements.mass import CONM2
import pyNastran
from pyNastran.bdf.bdf import BDF, read_bdf, CaseControlDeck, PARAM
from pyNastran.bdf.mesh_utils.convert import convert, get_scale_factors
from pyNastran.utils.log import SimpleLogger
pkg_path = pyNastran.__path__[0]
np.set_printoptions(edgeitems=3, infstr='inf',
linewidth=75, nanstr='nan', precision=3,
suppress=True, threshold=1000, formatter=None)
log = SimpleLogger(level='error')
class TestConvert(unittest.TestCase):
"""various BDF conversion tests"""
def test_convert_bar(self):
"""converts a bar model"""
model_path = os.path.join(pkg_path, '..', 'models', 'beam_modes')
bdf_filename = os.path.join(model_path, 'beam_modes.dat')
bdf_filename_out = os.path.join(model_path, 'beam_modes_temp.bdf')
bdf_filename_out2 = os.path.join(model_path, 'beam_modes_converted.bdf')
model = read_bdf(bdf_filename, log=log, validate=False)
#card_lines = ['EIGRL', 42, None, None, 20]
#model.add_card(card_lines, 'EIGRL')
#model.case_control_deck = CaseControlDeck(lines)
model.write_bdf(bdf_filename_out)
#units_from = ['in', 'lbm', 's']
units_from = ['mm', 'Mg', 's']
units_to = ['m', 'kg', 's']
convert(model, units_to, units=units_from)
del model.params['WTMASS']
model.write_bdf(bdf_filename_out2)
os.remove(bdf_filename_out)
os.remove(bdf_filename_out2)
def _test_convert_isat(self):
"""converts a isat model"""
model_path = os.path.join(pkg_path, '..', 'models', 'isat')
bdf_filename = os.path.join(model_path, 'ISat_Dploy_Sm.dat')
bdf_filename_out = os.path.join(model_path, 'isat.bdf')
bdf_filename_out2 = os.path.join(model_path, 'isat_converted.bdf')
model = read_bdf(bdf_filename, log=log, validate=False)
#card_lines = ['EIGRL', 42, None, None, 20]
#model.add_card(card_lines, 'EIGRL')
#model.case_control_deck = CaseControlDeck(lines)
model.write_bdf(bdf_filename_out)
#units_from = ['in', 'lbm', 's']
units_from = ['mm', 'Mg', 's']
units_to = ['m', 'kg', 's']
convert(model, units_to, units=units_from)
del model.params['WTMASS']
model.write_bdf(bdf_filename_out2)
os.remove(bdf_filename_out)
os.remove(bdf_filename_out2)
def _test_convert_bwb(self):
"""converts a bwb model"""
bdf_filename = os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_saero.bdf')
bdf_filename_out = os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_modes.bdf')
bdf_filename_out2 = os.path.join(pkg_path, '..', 'models', 'bwb', 'bwb_modes_converted.bdf')
model = read_bdf(bdf_filename, log=log, validate=False)
model.sol = 103
lines = [
'ECHO = NONE',
'SUBCASE 1',
' DISPLACEMENT(PLOT) = ALL',
' MPC = 1',
' SPC = 100',
' SUPORT1 = 1',
' METHOD = 42',
]
card_lines = ['EIGRL', 42, None, None, 20]
model.add_card(card_lines, 'EIGRL')
model.case_control_deck = CaseControlDeck(lines, log=log)
model.write_bdf(bdf_filename_out)
units_from = ['in', 'lbm', 's']
#units_from = ['mm', 'Mg', 's']
units_to = ['m', 'kg', 's']
convert(model, units_to, units=units_from)
model.write_bdf(bdf_filename_out2)
os.remove(bdf_filename_out)
os.remove(bdf_filename_out2)
def test_convert_sine(self):
"""converts a sine model"""
model_path = os.path.join(pkg_path, '..', 'models', 'freq_sine')
bdf_filename = os.path.join(model_path, 'good_sine.dat')
bdf_filename_out = os.path.join(model_path, 'sine_modes.bdf')
bdf_filename_out2 = os.path.join(model_path, 'sine_converted.bdf')
model = read_bdf(bdf_filename, log=log, validate=False)
model.sol = 103
lines = [
'ECHO = NONE',
'SUBCASE 1',
' DISPLACEMENT(PLOT) = ALL',
#'$ SPC = 100',
' METHOD = 42',
]
card_lines = ['EIGRL', 42, None, None, 20]
model.add_card(card_lines, 'EIGRL')
model.case_control_deck = CaseControlDeck(lines, log=log)
model.params['GRDPNT'] = PARAM('GRDPNT', 0)
#del model.params['WTMASS']
model.write_bdf(bdf_filename_out)
#units_from = ['in', 'lbm', 's']
units_from = ['mm', 'Mg', 's']
units_to = ['m', 'kg', 's']
convert(model, units_to, units=units_from)
model.write_bdf(bdf_filename_out2)
os.remove(bdf_filename_out)
os.remove(bdf_filename_out2)
def test_convert_units(self):
"""tests various conversions"""
# from -> to
xyz_scale, mass_scale, time_scale, weight_scale, gravity_scale = get_scale_factors(
['in', 'lbm', 's'], ['ft', 'lbm', 's'])
assert xyz_scale == 1./12.
assert mass_scale == 1.
assert time_scale == 1.
assert weight_scale == 1., weight_scale
assert gravity_scale == 1./12., gravity_scale
wtmass = 1. / (32.2 * 12.)
wtmass_expected = 1. / (32.2)
assert allclose(wtmass/gravity_scale, wtmass_expected), 'wtmass=%s wtmass_expected=%s' % (wtmass, wtmass_expected)
xyz_scale, mass_scale, time_scale, weight_scale, gravity_scale = get_scale_factors(
['mm', 'Mg', 's'], ['m', 'kg', 's'])
assert xyz_scale == 1./1000.
assert mass_scale == 1000.
assert time_scale == 1.
assert weight_scale == 1., weight_scale
assert gravity_scale == 1.
xyz_scale, mass_scale, time_scale, weight_scale, gravity_scale = get_scale_factors(
['ft', 'lbm', 's'], ['m', 'kg', 's'])
assert xyz_scale == 0.3048
assert mass_scale == 0.45359237, mass_scale
assert time_scale == 1.
assert allclose(weight_scale, 4.4482216526), weight_scale
assert allclose(gravity_scale, 1/32.2), 'gravity_scale=%s 1/expected=%s' % (gravity_scale, 1/(32.2))
wtmass = 1. / (32.2)
wtmass_expected = 1.
assert allclose(wtmass/gravity_scale, wtmass_expected), 'wtmass=%s wtmass_expected=%s' % (wtmass/gravity_scale, wtmass_expected)
def test_convert_01(self):
"""converts the CONM2s units"""
model = BDF(log=log)
eid = 1000
nid = 100
cid = 0
mass = 247200. # kg
X = [30.16, 0., 3.55] # m
I11 = 1.39e7 # kg-m^2
I22 = 3.66e7
I33 = 4.99e7
I13 = I12 = I23 = 0.
I = I11, I12, I22, I13, I23, I33
elem = CONM2(eid, nid, mass, cid=cid, X=X, I=I, comment='')
model.masses[eid] = elem
units_to = ['in', 'lbm', 's']
units_from = ['m', 'kg', 's']
convert(model, units_to, units=units_from)
#print(model.masses[eid].write_card_16())
def test_convert_02(self):
"""converts a full model units"""
bdf_filename = os.path.abspath(
os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero.bdf'))
bdf_filename_out = os.path.abspath(
os.path.join(pkg_path, '..', 'models', 'bwb', 'BWB_saero.out'))
model = read_bdf(bdf_filename, log=log)
units_to = ['m', 'kg', 's']
units_from = ['in', 'lbm', 's']
#units_to = units_from
convert(model, units_to, units_from)
model.write_bdf(bdf_filename_out)
os.remove(bdf_filename_out)
if __name__ == '__main__': # pragma: no cover
unittest.main()
| lgpl-3.0 | 7,829,366,165,569,614,000 | 38.251232 | 136 | 0.563755 | false |
brunoabud/ic | plugins/ICGRAY2BGR/plugin_object.py | 1 | 1150 | # coding: utf-8
# Copyright (C) 2016 Bruno Abude Cardoso
#
# Imagem Cinemática is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Imagem Cinemática is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import cv2
class ICGRAY2BGR(object):
def __init__(self, plugin_path):
self.plugin_path = plugin_path
self.parameters = []
def parameter_changed(self, param_name, value):
return None
def apply_filter(self, frame):
colorspace, data, pos, timestamp = frame
data = cv2.cvtColor(data, cv2.COLOR_GRAY2BGR)
return ("BGR", data)
def release_plugin(self, error_level=0):
pass
| gpl-3.0 | 2,798,068,830,518,351,400 | 32.764706 | 75 | 0.705575 | false |
meppe/ros-ort | src/frcnn/src/lib/pycocotools/mask.py | 1 | 4062 | __author__ = 'tsungyi'
import lib.pycocotools._mask as _mask
# Interface for manipulating masks stored in RLE format.
#
# RLE is a simple yet efficient format for storing binary masks. RLE
# first divides a vector (or vectorized image) into a series of piecewise
# constant regions and then for each piece simply stores the length of
# that piece. For example, given M=[0 0 1 1 1 0 1] the RLE counts would
# be [2 3 1 1], or for M=[1 1 1 1 1 1 0] the counts would be [0 6 1]
# (note that the odd counts are always the numbers of zeros). Instead of
# storing the counts directly, additional compression is achieved with a
# variable bitrate representation based on a common scheme called LEB128.
#
# Compression is greatest given large piecewise constant regions.
# Specifically, the size of the RLE is proportional to the number of
# *boundaries* in M (or for an image the number of boundaries in the y
# direction). Assuming fairly simple shapes, the RLE representation is
# O(sqrt(n)) where n is number of pixels in the object. Hence space usage
# is substantially lower, especially for large simple objects (large n).
#
# Many common operations on masks can be computed directly using the RLE
# (without need for decoding). This includes computations such as area,
# union, intersection, etc. All of these operations are linear in the
# size of the RLE, in other words they are O(sqrt(n)) where n is the area
# of the object. Computing these operations on the original mask is O(n).
# Thus, using the RLE can result in substantial computational savings.
#
# The following API functions are defined:
# encode - Encode binary masks using RLE.
# decode - Decode binary masks encoded via RLE.
# merge - Compute union or intersection of encoded masks.
# iou - Compute intersection over union between masks.
# area - Compute area of encoded masks.
# toBbox - Get bounding boxes surrounding encoded masks.
# frPyObjects - Convert polygon, bbox, and uncompressed RLE to encoded RLE mask.
#
# Usage:
# Rs = encode( masks )
# masks = decode( Rs )
# R = merge( Rs, intersect=false )
# o = iou( dt, gt, iscrowd )
# a = area( Rs )
# bbs = toBbox( Rs )
# Rs = frPyObjects( [pyObjects], h, w )
#
# In the API the following formats are used:
# Rs - [dict] Run-length encoding of binary masks
# R - dict Run-length encoding of binary mask
# masks - [hxwxn] Binary mask(s) (must have type np.ndarray(dtype=uint8) in column-major order)
# iscrowd - [nx1] list of np.ndarray. 1 indicates corresponding gt image has crowd region to ignore
# bbs - [nx4] Bounding box(es) stored as [x y w h]
# poly - Polygon stored as [[x1 y1 x2 y2...],[x1 y1 ...],...] (2D list)
# dt,gt - May be either bounding boxes or encoded masks
# Both poly and bbs are 0-indexed (bbox=[0 0 1 1] encloses first pixel).
#
# Finally, a note about the intersection over union (iou) computation.
# The standard iou of a ground truth (gt) and detected (dt) object is
# iou(gt,dt) = area(intersect(gt,dt)) / area(union(gt,dt))
# For "crowd" regions, we use a modified criteria. If a gt object is
# marked as "iscrowd", we allow a dt to match any subregion of the gt.
# Choosing gt' in the crowd gt that best matches the dt can be done using
# gt'=intersect(dt,gt). Since by definition union(gt',dt)=dt, computing
# iou(gt,dt,iscrowd) = iou(gt',dt) = area(intersect(gt,dt)) / area(dt)
# For crowd gt regions we use this modified criteria above for the iou.
#
# To compile run "python setup.py build_ext --inplace"
# Please do not contact us for help with compiling.
#
# Microsoft COCO Toolbox. version 2.0
# Data, paper, and tutorials available at: http://mscoco.org/
# Code written by Piotr Dollar and Tsung-Yi Lin, 2015.
# Licensed under the Simplified BSD License [see coco/license.txt]
encode = _mask.encode
decode = _mask.decode
iou = _mask.iou
merge = _mask.merge
area = _mask.area
toBbox = _mask.toBbox
frPyObjects = _mask.frPyObjects | gpl-3.0 | -2,878,177,237,587,652,600 | 48.54878 | 100 | 0.699655 | false |
joshcai/utdcs | processing/views.py | 1 | 3030 | # Create your views here.
from django.http import HttpResponse, HttpResponseRedirect, Http404
from django.template import RequestContext, loader
from django.core.urlresolvers import reverse
from django.shortcuts import render, get_object_or_404
from django.db.models import Q
from website import secrets
import datetime
from processing.models import Post
#debugging
#import pdb
def render_post(current_post):
newpost = """var sketchProc=function(processingInstance){ with (processingInstance){
var xWidth=400;
var yHeight=400;
frameRate(45);
size(xWidth, yHeight);"""
newpost += current_post
newpost +="}};"
return newpost
def index(request, page_num=1):
post_entries = Post.objects.order_by('-date').exclude(deleted=True)
context ={
'post_entries': post_entries[(float(page_num)-1)*5:float(page_num)*5],
'page_num': page_num,
'request': request,
}
if float(page_num) > 1:
context['prev'] = True
if float(page_num)*5 < len(post_entries): # this can be optimized later - (code is already hitting database once)
context['next'] = True
return render(request, 'processing/index.html', context)
def submit(request):
if request.method == 'POST':
if request.POST['title'] and request.POST['content']:
d = datetime.datetime.now()
if request.POST['author']:
auth = request.POST['author']
else:
auth = "Anonymous"
p = Post(title=request.POST['title'],
content=request.POST['content'],
content_rendered=render_post(request.POST['content']),
author=auth,
date=d,
date_str=d.strftime('%B %d, %Y %I:%M%p'))
p.save()
return HttpResponseRedirect(reverse('processing:index'))
else:
context={
'title': request.POST['title'],
'content': request.POST['content'],
'error_message': "Title and content required<br />",
'url': reverse('processing:submit'),
'request': request,
}
return render(request, 'processing/newpost.html', context)
return render(request, 'processing/newpost.html', {'url': reverse('processing:submit'), 'request': request})
def login(request):
context={'request': request}
if request.method == 'POST':
if request.POST['password'] == secrets.login_password:
request.session['logged_in'] = True
return HttpResponseRedirect(reverse('blog:index'))
else:
context['error_message'] = "Invalid password<br />"
return render(request, 'blog/login.html', context)
def delete(request, post_id):
if 'logged_in' in request.session and request.session['logged_in']:
post = get_object_or_404(Post, pk=post_id)
post.deleted = True
post.save()
return HttpResponseRedirect(reverse('blog:index'))
def post(request, post_id):
post = get_object_or_404(Post, pk=post_id)
context={
'post': post,
'request': request,
}
query = Post.objects.all().exclude(deleted=True)
next = query.filter(pk__gt=post_id)
if next:
context['next'] = next[0]
prev = query.filter(pk__lt=post_id).order_by('id').reverse()
if prev:
context['prev'] = prev[0]
return render(request,'processing/post.html', context)
| mit | 2,868,276,417,460,019,000 | 30.894737 | 114 | 0.69604 | false |
daniaki/pyPPI | pyppi/tests/test_ontology.py | 1 | 6976 | import os
from unittest import TestCase
from ..data_mining.ontology import get_active_instance
from ..data_mining.ontology import (
get_up_to_lca,
get_lca_of_terms,
group_terms_by_ontology_type,
filter_obsolete_terms,
parse_obo12_file
)
base_path = os.path.dirname(__file__)
test_obo_file = '{}/{}'.format(base_path, "test_data/test_go.obo.gz")
dag = get_active_instance(filename=test_obo_file)
class TestULCAInducer(TestCase):
def test_ulca_inducer_case_1(self):
expected = [
"GO:0008150", "GO:0008150",
"GO:0009987", "GO:0009987",
"GO:0065007", "GO:0065007",
"GO:0050896",
"GO:0050789", "GO:0050789",
"GO:0007154",
"GO:0050794", "GO:0050794",
"GO:0051716"
]
p1 = ["GO:0007154", "GO:0050794"]
p2 = ["GO:0050794", "GO:0051716"]
induced = get_up_to_lca(p1, p2)
self.assertEqual(list(sorted(expected)), list(sorted(induced)))
def test_ulca_inducer_case_2(self):
expected = [
"GO:0008150", "GO:0008150",
"GO:0009987",
"GO:0065007",
"GO:0050789",
"GO:0050794",
"GO:0007154"
]
p1 = ["GO:0007154"]
p2 = ["GO:0050794"]
induced = get_up_to_lca(p1, p2)
self.assertEqual(list(sorted(expected)), list(sorted(induced)))
def test_lca_returns_original_input_if_no_common_ancestor(self):
expected = ["GO:0008150", "GO:0008150"]
p1 = ["GO:0008150"]
p2 = ["GO:0008150"]
induced = get_up_to_lca(p1, p2)
self.assertEqual(list(sorted(expected)), list(sorted(induced)))
def test_lca_returns_original_input_if_empty_input(self):
expected = ["GO:0008150"]
p1 = ["GO:0008150"]
p2 = []
induced = get_up_to_lca(p1, p2)
self.assertEqual(list(sorted(expected)), list(sorted(induced)))
expected = ["GO:0008150"]
p1 = []
p2 = ["GO:0008150"]
induced = get_up_to_lca(p1, p2)
self.assertEqual(list(sorted(expected)), list(sorted(induced)))
def test_can_parse_obo_file(self):
self.assertEqual(len(dag), 49209)
expected = str({
"id": "GO:0007165",
"name": 'signal transduction',
"namespace": 'biological_process',
"is_a": sorted(['GO:0009987', 'GO:0050794']),
"part_of": sorted(['GO:0051716', 'GO:0023052', 'GO:0007154']),
"is_obsolete": False
})
self.assertEqual(str(dag["GO:0007165"]), expected)
def test_can_parse_all_is_a_entries(self):
self.assertEqual(len(dag), 49209)
expected = sorted(['GO:0009987', 'GO:0050794'])
result = sorted([t.id for t in dag["GO:0007165"].is_a])
self.assertEqual(result, expected)
def test_can_parse_all_part_of_entries(self):
self.assertEqual(len(dag), 49209)
expected = sorted(['GO:0051716', 'GO:0023052', 'GO:0007154'])
result = sorted([t.id for t in dag["GO:0007165"].part_of])
self.assertEqual(result, expected)
def test_parser_correctly_sets_obsolete_status(self):
self.assertTrue(dag["GO:0030939"].is_obsolete)
def test_goterm_hash_is_has_of_id(self):
term = dag["GO:0030939"]
self.assertEqual(hash(term), hash("GO:0030939"))
def test_has_parent(self):
term = dag["GO:0007165"]
self.assertTrue(term.has_parent(dag["GO:0007154"]))
def test_has_ancestor(self):
term = dag["GO:0007165"]
self.assertTrue(term.has_ancestor(dag["GO:0008150"]))
def test_can_get_all_parents_of_a_term(self):
term = dag["GO:0007165"]
result = sorted([t.id for t in term.all_parents])
expected = sorted([
"GO:0007154",
"GO:0050789",
"GO:0008150",
"GO:0009987",
"GO:0051716",
"GO:0023052",
"GO:0050794",
"GO:0050896",
"GO:0065007",
])
self.assertEqual(result, expected)
def test_correctly_computes_term_depth(self):
term = dag["GO:0007165"]
self.assertEqual(term.depth, 4)
term = dag["GO:0023052"]
self.assertEqual(term.depth, 1)
term = dag["GO:0008150"]
self.assertEqual(term.depth, 0)
def test_can_get_lca_of_terms(self):
terms = [
dag["GO:0007154"],
dag["GO:0050794"],
dag["GO:0051716"]
]
result = get_lca_of_terms(terms)
expected = [dag["GO:0008150"]]
self.assertEqual(result, expected)
terms = [
dag["GO:0007154"],
dag["GO:0051716"]
]
result = get_lca_of_terms(terms)
expected = [dag["GO:0009987"]]
self.assertEqual(result, expected)
terms = [
dag["GO:0007165"]
]
result = get_lca_of_terms(terms)
expected = list(set([
dag["GO:0050794"]
]))
self.assertEqual(result, expected)
def test_get_lca_of_terms_returns_None_if_no_common_ancestors(self):
terms = [
dag["GO:0008150"],
dag["GO:0008150"],
]
result = get_lca_of_terms(terms)
self.assertIsNone(result)
def test_can_group_by_ontology(self):
grouped = group_terms_by_ontology_type(
term_ids=["GO:0008150", "GO:0104005", "GO:0016459"],
max_count=None
)
expected = {
'cc': ["GO:0016459"],
'bp': ["GO:0008150"],
'mf': ["GO:0104005"],
}
self.assertEqual(grouped, expected)
def test_can_filter_max_count(self):
grouped = group_terms_by_ontology_type(
term_ids=[
"GO:0008150", "GO:0008150", "GO:0008150",
"GO:0104005", "GO:0016459", "GO:0016459"
],
max_count=2
)
expected = {
'cc': ["GO:0016459", "GO:0016459"],
'bp': ["GO:0008150", "GO:0008150"],
'mf': ["GO:0104005"],
}
self.assertEqual(grouped, expected)
def test_max_count_filter_ignored_when_none(self):
grouped = group_terms_by_ontology_type(
term_ids=[
"GO:0008150", "GO:0008150", "GO:0008150",
"GO:0104005", "GO:0016459", "GO:0016459"
],
max_count=None
)
expected = {
'cc': ["GO:0016459", "GO:0016459"],
'bp': ["GO:0008150", "GO:0008150", "GO:0008150"],
'mf': ["GO:0104005"],
}
self.assertEqual(grouped, expected)
def test_can_filter_obsolete_terms(self):
result = filter_obsolete_terms(["GO:0000005", "GO:0000006"])
expected = ["GO:0000006"]
self.assertEqual(result, expected)
def test_can_parse_alt_ids(self):
self.assertEqual(dag['GO:0000975'].id, 'GO:0044212')
| mit | 8,340,560,331,163,820,000 | 30.282511 | 74 | 0.535837 | false |
azumimuo/family-xbmc-addon | plugin.video.specto/resources/lib/sources/kissanime_tv.py | 1 | 4887 | # -*- coding: utf-8 -*-
'''
Specto Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,json
from resources.lib.libraries import cleantitle
from resources.lib.libraries import client
from resources.lib.libraries import control
from resources.lib.libraries import cache
from resources.lib import resolvers
class source:
def __init__(self):
self.base_link = 'http://kissanime.io/'
#self.base_link = client.source(self.base_link, output='geturl')
self.search_link = '/wp-admin/admin-ajax.php'
self.movie_list = '/720p-1080p-bluray-movies-list/'
def get_movie(self, imdb, title, year):
try:
leter = title[0]
result = cache.get(self.filmxy_cache,9000,leter)
print "r1",result
years = ['(%s)' % str(year), '(%s)' % str(int(year)+1), '(%s)' % str(int(year)-1)]
result = [i for i in result if cleantitle.movie(title) == cleantitle.movie(i[2])]
print "r2",result
result = [i[0] for i in result if any(x in i[1] for x in years)][0]
print "r3",result
url = client.replaceHTMLCodes(result)
url = url.encode('utf-8')
return url
except Exception as e:
control.log('Filmxy ERROR %s' % e)
return
def filmxy_cache(self, leter=''):
try:
url = urlparse.urljoin(self.base_link, self.search_link)
#control.log('>>>>>>>>>>>>---------- CACHE %s' % url)
headers = {'X-Requested-With':"XMLHttpRequest"}
params = {"action":"ajax_process2", "query":leter.upper()}
params = urllib.urlencode(params)
result = client.request(url, post=params, headers=headers)
result = client.parseDOM(result, 'p')
result = [(client.parseDOM(i, 'a', ret='href')[0], client.parseDOM(i, 'a')[0], client.parseDOM(i, 'a')[0]) for i in result]
result = [(re.sub('http.+?//.+?/','/', i[0]), re.findall("\(\d+\)", i[1]), i[2].split('(')[0]) for i in result]
#control.log('>>>>>>>>>>>>---------- CACHE-4 %s' % result)
result = [(i[0], i[1][0], i[2].strip()) for i in result if len(i[1]) > 0]
return result
except Exception as e:
control.log('Filmxy Cache ERROR %s' % e)
return
def get_sources(self, url, hosthdDict, hostDict, locDict):
try:
print "URL",url
sources = []
if url == None: return sources
url1 = urlparse.urljoin(self.base_link, url)
result = client.request(url1)
url1 = client.parseDOM(result, 'a', attrs = {'id': 'main-down'}, ret='href')[0]
print "LINKS1",url1
result = client.request(url1)
print "LINKS2", result
for quality in ['720p', '1080p']:
links = client.parseDOM(result, 'div', attrs = {'class': '.+?'+quality})[0]
links = client.parseDOM(links, 'li')
links = [(client.parseDOM(i, 'a', ret='href')[0]) for i in links]
if '1080p' in quality: q = '1080p'
elif '720p' in quality or 'hd' in quality: q = 'HD'
else: q = 'SD'
for j in links:
print "j",j
host = j.split('/')[2]
host = host.strip().lower()
host = client.replaceHTMLCodes(host)
if not host in hostDict: raise Exception()
host = host.encode('utf-8')
print "HOST",host, j
sources.append({'source': host, 'quality': q, 'provider': 'Filmxy', 'url': j})
print "LINKS3", links
return sources
except Exception as e:
control.log('Filmxy Source ERROR %s' % e)
return sources
def resolve(self, url):
try:
#url = client.request(url, output='geturl')
#if 'requiressl=yes' in url: url = url.replace('http://', 'https://')
#else: url = url.replace('https://', 'http://')
url = resolvers.request(url)
return url
except:
return
| gpl-2.0 | 5,392,704,476,561,538,000 | 37.179688 | 135 | 0.544506 | false |
openmips/stbgui | lib/python/Components/Renderer/NextEpgInfo.py | 2 | 3268 | from Components.VariableText import VariableText
from Renderer import Renderer
from enigma import eLabel, eEPGCache, eServiceReference
from time import localtime, strftime
from skin import parseColor
class NextEpgInfo(Renderer, VariableText):
def __init__(self):
Renderer.__init__(self)
VariableText.__init__(self)
self.epgcache = eEPGCache.getInstance()
self.numberOfItems = 1
self.hideLabel = 0
self.timecolor = ""
self.labelcolor = ""
self.foregroundColor = "00?0?0?0"
self.numOfSpaces = 1
GUI_WIDGET = eLabel
def changed(self, what):
self.text = ""
reference = self.source.service
info = reference and self.source.info
if info:
currentEvent = self.source.getCurrentEvent()
if currentEvent:
if not self.epgcache.startTimeQuery(eServiceReference(reference.toString()), currentEvent.getBeginTime() + currentEvent.getDuration()):
spaces = " "*self.numOfSpaces
if self.numberOfItems == 1:
event = self.epgcache.getNextTimeEntry()
if event:
if self.hideLabel:
self.text = "%s%s%s%s%s" % (self.timecolor, strftime("%H:%M", localtime(event.getBeginTime())), spaces, self.foregroundColor, event.getEventName())
else:
self.text = "%s%s:%s%s%s" % (self.labelcolor, pgettext("now/next: 'next' event label", "Next"), spaces, self.foregroundColor, event.getEventName())
else:
for x in range(self.numberOfItems):
event = self.epgcache.getNextTimeEntry()
if event:
self.text += "%s%s%s%s%s\n" % (self.timecolor, strftime("%H:%M", localtime(event.getBeginTime())), spaces, self.foregroundColor, event.getEventName())
if not self.hideLabel:
self.text = self.text and "%s%s\n%s" % (self.labelcolor, pgettext("now/next: 'next' event label", "Next"), self.text) or ""
def applySkin(self, desktop, parent):
attribs = []
for (attrib, value) in self.skinAttributes:
if attrib == "NumberOfItems":
self.numberOfItems = int(value)
attribs.append((attrib, value))
if attrib == "noLabel":
self.hideLabel = int(value)
attribs.append((attrib, value))
if attrib == "numOfSpaces":
self.numOfSpaces = int(value)
attribs.append((attrib, value))
if attrib == "timeColor":
self.timecolor = self.hex2strColor(parseColor(value).argb())
attribs.append((attrib, value))
if attrib == "labelColor":
self.labelcolor = self.hex2strColor(parseColor(value).argb())
attribs.append((attrib, value))
if attrib == "foregroundColor":
self.foregroundColor = self.hex2strColor(parseColor(value).argb())
attribs.append((attrib, value))
for (attrib, value) in attribs:
self.skinAttributes.remove((attrib, value))
self.timecolor = self.formatColorString(self.timecolor)
self.labelcolor = self.formatColorString(self.labelcolor)
self.foregroundColor = self.formatColorString(self.foregroundColor)
return Renderer.applySkin(self, desktop, parent)
# hex:
# 0 1 2 3 4 5 6 7 8 9 a b c d e f
# converts to:
# 0 1 2 3 4 5 6 7 8 9 : ; < = > ?
def hex2strColor(self, rgb):
out = ""
for i in range(28,-1,-4):
out += "%s" % chr(0x30 + (rgb>>i & 0xf))
return out
def formatColorString(self, color):
if color:
return "%s%s" % ('\c', color)
return "%s%s" % ('\c', self.foregroundColor) | gpl-2.0 | 6,980,544,276,022,878,000 | 37.011628 | 158 | 0.678703 | false |
WilliamMayor/pinscher | pinscher/Keyfile.py | 1 | 1219 | import string
import pickle
import os
import utilities
class Keyfile:
LENGTH = 32
CHARACTERS = string.digits + string.letters + string.punctuation + ' '
@staticmethod
def create(path, database_path, **kwargs):
k = Keyfile()
k.path = path
k.database_path = os.path.abspath(database_path)
k.key = kwargs.get('key', utilities.generate_key())
k.iv = kwargs.get('iv', utilities.generate_iv())
k.length = kwargs.get('length', Keyfile.LENGTH)
k.characters = kwargs.get('characters', Keyfile.CHARACTERS)
k.save()
return Keyfile.load(path)
@staticmethod
def load(path):
k = pickle.load(open(path, 'rb'))
k.path = path
return k
def __getstate__(self):
_dict = self.__dict__.copy()
del _dict['path']
return _dict
def __setstate__(self, _dict):
self.__dict__.update(_dict)
def __hash__(self):
return self.path.__hash__()
def __eq__(self, other):
return self.path == other.path
def save(self):
pickle.dump(self, open(self.path, 'wb'))
def delete(self):
os.remove(self.path)
os.remove(self.database_path)
| gpl-3.0 | -6,969,443,440,557,586,000 | 23.38 | 74 | 0.575062 | false |
PyCQA/astroid | astroid/brain/brain_type.py | 1 | 2187 | """
Astroid hooks for type support.
Starting from python3.9, type object behaves as it had __class_getitem__ method.
However it was not possible to simply add this method inside type's body, otherwise
all types would also have this method. In this case it would have been possible
to write str[int].
Guido Van Rossum proposed a hack to handle this in the interpreter:
https://github.com/python/cpython/blob/67e394562d67cbcd0ac8114e5439494e7645b8f5/Objects/abstract.c#L181-L184
This brain follows the same logic. It is no wise to add permanently the __class_getitem__ method
to the type object. Instead we choose to add it only in the case of a subscript node
which inside name node is type.
Doing this type[int] is allowed whereas str[int] is not.
Thanks to Lukasz Langa for fruitful discussion.
"""
from astroid import extract_node, inference_tip, nodes
from astroid.const import PY39_PLUS
from astroid.exceptions import UseInferenceDefault
from astroid.manager import AstroidManager
def _looks_like_type_subscript(node):
"""
Try to figure out if a Name node is used inside a type related subscript
:param node: node to check
:type node: astroid.node_classes.NodeNG
:return: true if the node is a Name node inside a type related subscript
:rtype: bool
"""
if isinstance(node, nodes.Name) and isinstance(node.parent, nodes.Subscript):
return node.name == "type"
return False
def infer_type_sub(node, context=None):
"""
Infer a type[...] subscript
:param node: node to infer
:type node: astroid.node_classes.NodeNG
:param context: inference context
:type context: astroid.context.InferenceContext
:return: the inferred node
:rtype: nodes.NodeNG
"""
node_scope, _ = node.scope().lookup("type")
if node_scope.qname() != "builtins":
raise UseInferenceDefault()
class_src = """
class type:
def __class_getitem__(cls, key):
return cls
"""
node = extract_node(class_src)
return node.infer(context=context)
if PY39_PLUS:
AstroidManager().register_transform(
nodes.Name, inference_tip(infer_type_sub), _looks_like_type_subscript
)
| lgpl-2.1 | -6,311,474,582,028,243,000 | 32.646154 | 108 | 0.715592 | false |
gwwfps/boxrps | admin.py | 1 | 15675 | #!/usr/bin/env python
import logging
import yaml
import cgi
from xml.dom import minidom as md
from datetime import datetime, timedelta
from collections import defaultdict
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.ext import db
from django.utils import simplejson
from models import *
from utils import render_to, parse_item
class AdminHandler(webapp.RequestHandler):
def get(self):
self.response.out.write('Hello world!')
class ParseHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/parse.html')
def post(self):
all_members = Member.all()
parsed = md.parseString(self.request.get('log').encode('utf-8'))
members = []
for member in parsed.getElementsByTagName('member'):
try:
name = member.firstChild.firstChild
except AttributeError:
continue
if name is None:
continue
name = name.toxml().strip().capitalize()
class_ = member.childNodes[1].firstChild.toxml().upper()
m = Member.gql('WHERE name = :1', name).get()
if not m:
new_member = Member(name=name, class_=class_)
new_member.put()
else:
m.class_=class_
m.put()
members.append(name)
items = []
for item in parsed.getElementsByTagName('item'):
try:
name = item.firstChild.firstChild.toxml()
except AttributeError:
continue
time = item.childNodes[1].firstChild.toxml()
looter = item.childNodes[2].firstChild.toxml()
pt = item.childNodes[3].firstChild.toxml()
items.append(parse_item(name) + (time, looter, pt))
render_to(self.response, 'admin/parseadd.html',
members=set(members), all_members=all_members, events=Event.all().order('name'),
datetime=parsed.getElementsByTagName('start')[0].firstChild.toxml(),
items=items)
class RaidHanlder(webapp.RequestHandler):
def get(self):
pass
def post(self):
pass
class EventHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/events.html', events=Event.all())
def post(self):
batch = self.request.get('batch')
batch = batch.split('\n')
for line in batch:
event, pt = line.split('\t')
Event(name=cgi.escape(event), default_pt = int(float(pt.strip()))).put()
self.get()
class AjaxHandler(webapp.RequestHandler):
def post(self):
action = self.request.get('action')
if action == 'addevent':
event = Event(name=self.request.get('name'),
default_pt=int(self.request.get('pt')))
event.put()
elif action == 'geteventpt':
event = Event.get(db.Key(self.request.get('key')))
if event:
self.response.out.write(simplejson.dumps({'pt':event.default_pt}))
elif action == 'addraid':
date = datetime.strptime(self.request.get('date'), '%Y.%m.%d %H:%M')
pt = int(self.request.get('pt'))
note = self.request.get('note')
members = self.request.get('members').split('|')[0:-1]
loot = self.request.get('loot').split('|')[0:-1]
memcache = {}
for m in Member.all():
memcache[m.name] = m
key = self.request.get('key')
if key:
encounter = Encounter.get(db.Key(key))
else:
encounter = None
if encounter:
delta = 0
oldpt = encounter.pt
if not encounter.pt == pt:
delta = pt - encounter.pt
encounter.pt = pt
encounter.note = note
encounter.datetime = date
old_members = set([m.name for m in encounter.attending_members()])
members = set([member.strip().capitalize() for member in members])
remaining = old_members & members
newly_added = members - old_members
removed = old_members - members
for m in remaining:
member = memcache[m]
member.earned += delta
member.balance += delta
member.put()
for m in newly_added:
nm = memcache.get(m.strip().capitalize())
if not nm:
nm = Member(name=m)
memcache[m] = nm
nm.earned += pt
nm.balance += pt
nm.put()
encounter.attendees.append(nm.key())
for m in removed:
dm = memcache[m]
dm.earned -= oldpt
dm.balance -= oldpt
dm.put()
encounter.attendees.remove(dm.key())
encounter.put()
Member.recalculate_attendance()
lset = {}
for l in encounter.loots:
lset[str(l.key())] = l
plset = set(lset.keys())
for piece in loot:
_, name, time, looter, cost, lkey = piece.split(';')
looter = looter.strip().capitalize()
cost = int(cost)*(-1)
time = datetime.strptime(time, '%Y.%m.%d %H:%M')
if lkey in lset:
plset.remove(lkey)
l = lset[lkey]
if not l.looter.name == looter or not l.cost == cost:
m = l.looter
m.spent -= l.cost
m.balance -= l.cost
m.put()
ltr = memcache[looter]
ltr.spent += cost
ltr.balance += cost
ltr.put()
l.looter = ltr
l.cost = cost
l.put()
else:
item = Item.gql('WHERE name = :1', name).get()
if not item:
item = Item(name=name, gid=0, default_cost=cost)
item.put()
looter = memcache[looter]
looter.spent += cost
looter.balance += cost
looter.put()
loot = Loot(encounter=encounter, cost=cost, looter=looter,
datetime=time, item=item)
loot.put()
for rkey in plset:
l = lset[rkey]
m = l.looter
m.spent -= l.cost
m.balance -= l.cost
m.put()
l.delete()
else:
event = Event.get(db.Key(self.request.get('event')))
attendees = []
for member in members:
m = memcache.get(member.strip().capitalize())
if not m:
m = Member(name=member)
memcache[member.strip().capitalize()] = m
m.earned += pt
m.balance += pt
m.put()
attendees.append(m.key())
encounter = Encounter(event=event, note=note, pt=pt, datetime=date,
attendees=attendees)
encounter.put()
Member.recalculate_attendance()
for piece in loot:
logging.info(piece.encode('utf-8'))
id, name, time, looter, cost, _ = piece.split(';')
looter = looter.strip().capitalize()
try:
id = int(id)
except ValueError:
id = 0
time = datetime.strptime(time, '%Y.%m.%d %H:%M')
looter = memcache[looter]
cost = int(cost)*(-1)
item = Item.gql('WHERE name = :1', name).get()
if item:
if id:
item.gid = id
item.put()
else:
item = Item(name=name, gid=id, default_cost=cost)
item.put()
looter.spent += cost
looter.balance += cost
looter.put()
loot = Loot(encounter=encounter, cost=cost, looter=looter,
datetime=time, item=item)
loot.put()
self.response.out.write(simplejson.dumps({'key': str(encounter.key())}))
elif action == "deladjustment":
aid = self.request.get('aid')
adj = Adjustment.get(db.Key(aid))
m = adj.member
m.balance -= adj.pt
m.adjusted -= adj.pt
m.put()
adj.delete()
self.response.out.write(simplejson.dumps({}))
class ImportHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/import.html')
def post(self):
text = self.request.get('import')
for line in text.split('\n'):
line = line.split('\t')
name = line[2].capitalize()
earned = int(float(line[6]))
spent = (-1)*int(float(line[7]))
adjusted = int(float(line[8]))
balance = int(float(line[9]))
m = Member.gql('WHERE name = :1', name).get()
if m:
m.earned = earned
m.spent = spent
m.balance = balance
m.adjusted = adjusted
else:
m = Member(name=name, spent=spent, earned=earned,
balance=balance, adjusted=adjusted)
m.put()
self.get()
class AdjustmentHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'admin/adjust.html', members=Member.all(),
adjustments=Adjustment.all())
def post(self):
member = Member.gql('WHERE name = :1', self.request.get('member').capitalize()).get()
if member:
pt = int(self.request.get('pt'))
reason = self.request.get('reason')
dt = datetime.now()
Adjustment(pt=pt, member=member, reason=reason, datetime=dt).put()
member.adjusted += pt
member.balance += pt
member.usable = min(member.balance, member.attendance * member.balance / 100)
member.put()
self.get()
class YamlHandler(webapp.RequestHandler):
def get(self):
render_to(self.response, 'dump.html',
dump='<form action="/o/yaml" method="POST"><input type="submit" /> </form>')
def post(self):
stream = file('rps.yaml', 'r')
data = yaml.load(stream)
id_to_key = {}
items = defaultdict(list)
att = defaultdict(list)
memcache = {}
for m in Member.all():
memcache[m.name] = m.key()
for entry in data:
if 'adjustment_id' in entry:
continue
# member = Member.gql('WHERE name = :1', entry['member_name']).get()
# adj = Adjustment(pt=int(entry['adjustment_value']),
# reason=entry['adjustment_reason'],
# datetime=datetime.fromtimestamp(entry['adjustment_date']),
# member=member)
# adj.put()
# elif 'item_id' in entry:
if 'item_id' in entry:
items[entry['raid_id']].append((entry['item_name'],
entry['item_buyer'],
entry['item_value'],
entry['item_date']))
elif 'raid_added_by' in entry:
event = Event.gql('WHERE name = :1', entry['raid_name']).get()
if event:
if not entry['raid_note']:
entry['raid_note'] = ''
raid = Encounter(event=event, note=entry['raid_note'],
pt=int(entry['raid_value']),
datetime=datetime.fromtimestamp(entry['raid_date']))
raid.put()
id_to_key[entry['raid_id']] = raid.key()
else:
logging.error(entry)
elif 'member_lastraid' in entry:
continue
else:
try:
att[entry['raid_id']].append(entry['member_name'])
except KeyError:
logging.error(entry)
for rid, key in id_to_key.items():
r = Encounter.get(key)
for member in att[rid]:
m = memcache[member.capitalize()]
if m:
r.attendees.append(m)
else:
logging.error(member)
r.put()
for name, buyer, value, date in items[rid]:
try:
value = int(float(value))*(-1)
except UnicodeEncodeError:
logging.error(name)
item = Item.gql('WHERE name = :1', name).get()
if not item:
item = Item(name=name, default_cost=value)
item.put()
loot = Loot(item=item, encounter=key, cost=value,
looter=memcache[buyer],
datetime=datetime.fromtimestamp(date))
loot.put()
render_to(self.response, 'dump.html', dump=data)
class EditRaidHandler(webapp.RequestHandler):
def get(self, key):
raid = Encounter.get(db.Key(key))
if raid:
render_to(self.response, 'admin/parseadd.html', key=key,
members=set([m.name for m in raid.attending_members()]),
all_members=Member.all(), events=Event.all(),
datetime=raid.datetime.strftime('%Y.%m.%d %H:%M'),
items=[(i.item.gid, i.item.name, i.datetime.strftime('%Y.%m.%d %H:%M'), i.looter.name, (-1)*i.cost, str(i.key())) for i in raid.loots],
raid=raid)
def main():
application = webapp.WSGIApplication([('/o/', AdminHandler),
('/o/parse', ParseHandler),
('/o/events', EventHandler),
('/o/ajax', AjaxHandler),
('/o/import', ImportHandler),
('/o/adjust', AdjustmentHandler),
('/o/yaml', YamlHandler),
('/o/editraid/(.+)', EditRaidHandler)],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
| mit | -3,077,891,950,705,445,000 | 38.987245 | 157 | 0.45008 | false |
googleads/google-ads-python | google/ads/googleads/v7/services/types/campaign_budget_service.py | 1 | 6329 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v7.enums.types import (
response_content_type as gage_response_content_type,
)
from google.ads.googleads.v7.resources.types import (
campaign_budget as gagr_campaign_budget,
)
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
from google.rpc import status_pb2 as status # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v7.services",
marshal="google.ads.googleads.v7",
manifest={
"GetCampaignBudgetRequest",
"MutateCampaignBudgetsRequest",
"CampaignBudgetOperation",
"MutateCampaignBudgetsResponse",
"MutateCampaignBudgetResult",
},
)
class GetCampaignBudgetRequest(proto.Message):
r"""Request message for
[CampaignBudgetService.GetCampaignBudget][google.ads.googleads.v7.services.CampaignBudgetService.GetCampaignBudget].
Attributes:
resource_name (str):
Required. The resource name of the campaign
budget to fetch.
"""
resource_name = proto.Field(proto.STRING, number=1,)
class MutateCampaignBudgetsRequest(proto.Message):
r"""Request message for
[CampaignBudgetService.MutateCampaignBudgets][google.ads.googleads.v7.services.CampaignBudgetService.MutateCampaignBudgets].
Attributes:
customer_id (str):
Required. The ID of the customer whose
campaign budgets are being modified.
operations (Sequence[google.ads.googleads.v7.services.types.CampaignBudgetOperation]):
Required. The list of operations to perform
on individual campaign budgets.
partial_failure (bool):
If true, successful operations will be
carried out and invalid operations will return
errors. If false, all operations will be carried
out in one transaction if and only if they are
all valid. Default is false.
validate_only (bool):
If true, the request is validated but not
executed. Only errors are returned, not results.
response_content_type (google.ads.googleads.v7.enums.types.ResponseContentTypeEnum.ResponseContentType):
The response content type setting. Determines
whether the mutable resource or just the
resource name should be returned post mutation.
"""
customer_id = proto.Field(proto.STRING, number=1,)
operations = proto.RepeatedField(
proto.MESSAGE, number=2, message="CampaignBudgetOperation",
)
partial_failure = proto.Field(proto.BOOL, number=3,)
validate_only = proto.Field(proto.BOOL, number=4,)
response_content_type = proto.Field(
proto.ENUM,
number=5,
enum=gage_response_content_type.ResponseContentTypeEnum.ResponseContentType,
)
class CampaignBudgetOperation(proto.Message):
r"""A single operation (create, update, remove) on a campaign
budget.
Attributes:
update_mask (google.protobuf.field_mask_pb2.FieldMask):
FieldMask that determines which resource
fields are modified in an update.
create (google.ads.googleads.v7.resources.types.CampaignBudget):
Create operation: No resource name is
expected for the new budget.
update (google.ads.googleads.v7.resources.types.CampaignBudget):
Update operation: The campaign budget is
expected to have a valid resource name.
remove (str):
Remove operation: A resource name for the removed budget is
expected, in this format:
``customers/{customer_id}/campaignBudgets/{budget_id}``
"""
update_mask = proto.Field(
proto.MESSAGE, number=4, message=field_mask.FieldMask,
)
create = proto.Field(
proto.MESSAGE,
number=1,
oneof="operation",
message=gagr_campaign_budget.CampaignBudget,
)
update = proto.Field(
proto.MESSAGE,
number=2,
oneof="operation",
message=gagr_campaign_budget.CampaignBudget,
)
remove = proto.Field(proto.STRING, number=3, oneof="operation",)
class MutateCampaignBudgetsResponse(proto.Message):
r"""Response message for campaign budget mutate.
Attributes:
partial_failure_error (google.rpc.status_pb2.Status):
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results (Sequence[google.ads.googleads.v7.services.types.MutateCampaignBudgetResult]):
All results for the mutate.
"""
partial_failure_error = proto.Field(
proto.MESSAGE, number=3, message=status.Status,
)
results = proto.RepeatedField(
proto.MESSAGE, number=2, message="MutateCampaignBudgetResult",
)
class MutateCampaignBudgetResult(proto.Message):
r"""The result for the campaign budget mutate.
Attributes:
resource_name (str):
Returned for successful operations.
campaign_budget (google.ads.googleads.v7.resources.types.CampaignBudget):
The mutated campaign budget with only mutable fields after
mutate. The field will only be returned when
response_content_type is set to "MUTABLE_RESOURCE".
"""
resource_name = proto.Field(proto.STRING, number=1,)
campaign_budget = proto.Field(
proto.MESSAGE, number=2, message=gagr_campaign_budget.CampaignBudget,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | -8,298,692,412,461,880,000 | 36.229412 | 128 | 0.681624 | false |
desec-io/desec-stack | api/desecapi/tests/test_replication.py | 1 | 5066 | import json
import os
import random
import string
import time
from datetime import datetime
from tempfile import TemporaryDirectory
from django.test import testcases
from rest_framework import status
from desecapi.replication import Repository
from desecapi.tests.base import DesecTestCase
class ReplicationTest(DesecTestCase):
def test_serials(self):
url = self.reverse('v1:serial')
zones = [
{'name': 'test.example.', 'edited_serial': 12345},
{'name': 'example.org.', 'edited_serial': 54321},
]
serials = {zone['name']: zone['edited_serial'] for zone in zones}
pdns_requests = [{
'method': 'GET',
'uri': self.get_full_pdns_url(r'/zones', ns='MASTER'),
'status': 200,
'body': json.dumps(zones),
}]
# Run twice to make sure cache output varies on remote address
for i in range(2):
response = self.client.get(path=url, REMOTE_ADDR='123.8.0.2')
self.assertStatus(response, status.HTTP_401_UNAUTHORIZED)
with self.assertPdnsRequests(pdns_requests):
response = self.client.get(path=url, REMOTE_ADDR='10.8.0.2')
self.assertStatus(response, status.HTTP_200_OK)
self.assertEqual(response.data, serials)
# Do not expect pdns request in next iteration (result will be cached)
pdns_requests = []
class RepositoryTest(testcases.TestCase):
def assertGit(self, path):
self.assertTrue(
os.path.exists(os.path.join(path, '.git')),
f'Expected a git repository at {path} but did not find .git subdirectory.'
)
def assertHead(self, repo, message=None, sha=None):
actual_sha, actual_message = repo.get_head()
if actual_sha is None:
self.fail(f'Expected HEAD to have commit message "{message}" and hash "{sha}", but repository has no '
f'commits.')
if sha:
self.assertEqual(actual_sha, sha, f'Expected HEAD to have hash "{sha}" but had "{actual_sha}".')
if message:
self.assertIn(
message, actual_message,
f'Expected "{message}" to appear in the last commit message, but only found "{actual_message}".',
)
def assertHasCommit(self, repo: Repository, commit_id):
self.assertIsNotNone(
repo.get_commit(commit_id)[0], f'Expected repository to have commit {commit_id}, but it had not.'
)
def assertHasCommits(self, repo: Repository, commit_id_list):
for commit in commit_id_list:
self.assertHasCommit(repo, commit)
def assertHasNotCommit(self, repo: Repository, commit_id):
self.assertIsNone(
repo.get_commit(commit_id)[0], f'Expected repository to not have commit {commit_id}, but it had.'
)
def assertHasNotCommits(self, repo: Repository, commit_id_list):
for commit in commit_id_list:
self.assertHasNotCommit(repo, commit)
def assertNoCommits(self, repo: Repository):
head = repo.get_head()
self.assertEqual(head, (None, None), f'Expected that repository has no commits, but HEAD was {head}.')
@staticmethod
def _random_string(length):
return ''.join(random.choices(string.ascii_lowercase, k=length))
def _random_commit(self, repo: Repository, message=''):
with open(os.path.join(repo.path, self._random_string(16)), 'w') as f:
f.write(self._random_string(500))
repo.commit_all(message)
return repo.get_head()[0]
def _random_commits(self, num, repo: Repository, message=''):
return [self._random_commit(repo, message) for _ in range(num)]
def test_init(self):
with TemporaryDirectory() as path:
repo = Repository(path)
repo.init()
self.assertGit(path)
def test_commit(self):
with TemporaryDirectory() as path:
repo = Repository(path)
repo.init()
repo.commit_all('commit1')
self.assertNoCommits(repo)
with open(os.path.join(path, 'test_commit'), 'w') as f:
f.write('foo')
repo.commit_all('commit2')
self.assertHead(repo, message='commit2')
def test_remove_history(self):
with TemporaryDirectory() as path:
repo = Repository(path)
repo.init()
remove = self._random_commits(5, repo, 'to be removed') # we're going to remove these 'old' commits
keep = self._random_commits(1, repo, 'anchor to be kept') # as sync anchor, the last 'old' commit is kept
cutoff = datetime.now()
time.sleep(1)
keep += self._random_commits(5, repo, 'to be kept') # we're going to keep these 'new' commits
self.assertHasCommits(repo, remove + keep)
repo.remove_history(before=cutoff)
self.assertHasCommits(repo, keep)
self.assertHasNotCommits(repo, remove)
| mit | 7,817,141,733,850,955,000 | 36.25 | 118 | 0.606593 | false |
amonapp/amon | amon/apps/_account/forms.py | 1 | 6520 | from django import forms
from django.contrib.auth import authenticate
from django.conf import settings
from django.contrib.auth import get_user_model
# from amon.apps.notifications.models import notifications_model
# from amon.apps.alerts.models import alerts_model
# from amon.apps.account.models import user_preferences_model, forgotten_pass_tokens_model
# from amon.apps.api.models import api_key_model
from timezone_field import TimeZoneFormField
from amon.apps.account.mailer import send_email_forgotten_password
User = get_user_model()
class LoginForm(forms.Form):
email = forms.EmailField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Email'}))
password = forms.CharField(required=True, widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Password'}))
remember_me = forms.BooleanField(widget=forms.CheckboxInput(), label='Remember Me', required=False)
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
if email and password:
user = authenticate(email=email, password=password)
if user:
return self.cleaned_data
raise forms.ValidationError("Invalid login details")
def clean_remember_me(self):
remember_me = self.cleaned_data.get('remember_me')
if not remember_me:
settings.SESSION_EXPIRE_AT_BROWSER_CLOSE = True
else:
settings.SESSION_EXPIRE_AT_BROWSER_CLOSE = False
return remember_me
class AdminUserForm(forms.Form):
email = forms.EmailField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Email'}))
password = forms.CharField(required=True, widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Password'}))
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
if email and password:
user = User.objects.filter(email=email).count()
if user:
raise forms.ValidationError("User already exists")
return self.cleaned_data
def save(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
user = User.objects.create_user(email, password)
user.is_admin = True
user.is_staff = True
user.is_superuser = True
user.save()
# notifications_model.save(data={"email": email}, provider_id='email')
# api_key_model.add_initial_data()
class ProfileForm(forms.Form):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
user_preferences = user_preferences_model.get_preferences(user_id=self.user.id)
user_timezone = user_preferences.get('timezone', 'UTC')
super(ProfileForm, self).__init__(*args, **kwargs)
self.fields['timezone'].widget.attrs.update({'select2-dropdown': '', 'data-size': 360})
self.fields['timezone'].initial = user_timezone
self.fields['email'].initial = self.user.email
email = forms.EmailField(required=True, widget=forms.TextInput(attrs={'placeholder': 'Email'}))
timezone = TimeZoneFormField()
# Check email uniqueness
def clean_email(self):
email = self.cleaned_data.get('email')
if email:
if self.user.email != email:
unique = User.objects.filter(email__iexact=email).count()
if unique > 0:
raise forms.ValidationError(u'An user with this email address already exists.')
return email
def save(self):
data = {'timezone': str(self.cleaned_data['timezone'])}
# user_preferences_model.save_preferences(user_id=self.user.id, data=data)
self.user.email = self.cleaned_data['email']
self.user.save()
class ChangePasswordForm(forms.Form):
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user', None)
super(ChangePasswordForm, self).__init__(*args, **kwargs)
current_password = forms.CharField(required=True, widget=(forms.PasswordInput(attrs={'placeholder': 'Password'})))
new_password = forms.CharField(required=True, widget=(forms.PasswordInput(attrs={'placeholder': 'Password'})))
def clean_current_password(self):
password = self.cleaned_data.get('current_password')
if self.user.check_password(password):
return self.cleaned_data
raise forms.ValidationError("Your current password is not correct")
def save(self):
password = self.cleaned_data.get('new_password')
self.user.set_password(password)
self.user.save()
return True
class ForgottenPasswordForm(forms.Form):
def __init__(self, *args, **kwargs):
super(ForgottenPasswordForm, self).__init__(*args, **kwargs)
email = forms.EmailField(required=True, widget=(forms.TextInput(attrs={'placeholder': 'Your Login Email'})))
def clean(self):
email = self.cleaned_data.get('email')
if email:
user = User.objects.filter(email=email).count()
if user == 0:
raise forms.ValidationError("User does not exists")
return self.cleaned_data
def save(self):
email = self.cleaned_data.get('email')
token = forgotten_pass_tokens_model.set_token(email=email)
send_email_forgotten_password(token=token, recipients=[email])
return True
class ResetPasswordForm(forms.Form):
password = forms.CharField(
required=True,
label='Your new password',
widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Password'})
)
repeat_password = forms.CharField(
required=True,
label='Confirm it',
widget=forms.PasswordInput(render_value=False, attrs={'placeholder': 'Repeat Password'})
)
def clean(self):
repeat_password = self.cleaned_data.get('repeat_password')
password = self.cleaned_data.get('password')
if repeat_password and password:
if repeat_password != password:
raise forms.ValidationError("Passwords does not match")
return self.cleaned_data
def save(self, user=None):
password = self.cleaned_data.get('password')
user.set_password(password)
user.save() | agpl-3.0 | 8,258,274,880,384,959,000 | 29.905213 | 128 | 0.639724 | false |
TheChymera/LabbookDB | labbookdb/tests/test_report.py | 1 | 3726 | import pytest
from os import path
DB_PATH = '~/.demolog/meta.db'
DATA_DIR = path.join(path.dirname(path.realpath(__file__)),'../../example_data/')
def test_implant_angle_filter():
from labbookdb.report.selection import animal_id, animal_treatments, animal_operations
import numpy as np
db_path=DB_PATH
df = animal_operations(db_path)
#validate target by code
df = df[~df['OrthogonalStereotacticTarget_code'].isnull()]
df = df[df['OrthogonalStereotacticTarget_code'].str.contains('dr')]
#check pitch
df = df[~df['OrthogonalStereotacticTarget_pitch'].isin([0,np.NaN])]
animals = df['Animal_id'].tolist()
animals_eth = [animal_id(db_path,'ETH/AIC',i,reverse=True) for i in animals]
assert animals_eth == ['5684']
def test_animal_cage_treatment_control_in_report():
"""Check if animal which died before the cagetreatment was applied to its last home cage is indeed not showing a cage treatment, but still showing the animal treatment."""
from labbookdb.report.tracking import animals_info
df = animals_info(DB_PATH,
save_as=None,
functional_scan_responders=True,
treatments=True,
)
assert df[df['ETH/AIC']=='6255']['cage_treatment'].values[0] == ""
assert df[df['ETH/AIC']=='6255']['animal_treatment'].values[0] == 'aFluIV_'
def test_animal_id():
"""Check if LabbookDB animal ID is correctly reported based on external database identifier."""
from labbookdb.report.selection import animal_id
my_id = animal_id(DB_PATH,
database='ETH/AIC',
identifier='6255'
)
assert my_id == 41
def test_bids_eventsfile():
"""Check if correct BIDS events file can be sourced."""
from labbookdb.report.tracking import bids_eventsfile
import pandas as pd
df = bids_eventsfile(DB_PATH,'chr_longSOA')
bids_eventsfile = path.join(DATA_DIR,'bids_eventsfile.csv')
df_ = pd.read_csv(bids_eventsfile, index_col=0)
assert df[['onset','duration']].equals(df_[['onset','duration']])
def test_drinking_by_cage_treatment(
treatment_relative_date=True,
rounding='D',
):
from labbookdb.report.tracking import treatment_group, append_external_identifiers, qualitative_dates, cage_consumption
from labbookdb.report.selection import cage_periods, cage_drinking_measurements
known_cage_ids = [25, 38, 41]
known_consumption_values = [2.35, 2.51, 2.94, 2.95, 3.16, 3.17, 3.22, 3.23, 3.24, 3.25, 3.49, 3.63, 3.72, 4.04, 4.09, 4.58, 4.98, 5.15, 5.31, 5.39, 5.54, 5.97, 6.73, 6.78]
df = cage_drinking_measurements(DB_PATH,['cFluDW'])
df = cage_consumption(DB_PATH,df)
fuzzy_matching = {
"ofM":[-14,-15,-13,-7,-8,-6],
"ofMaF":[0,-1],
"ofMcF1":[14,13,15],
"ofMcF2":[28,27,29],
"ofMpF":[45,44,46,43,47],
}
df = qualitative_dates(df,
iterator_column='Cage_id',
date_column='relative_end_date',
label='qualitative_date',
fuzzy_matching=fuzzy_matching,
)
cage_ids = sorted(df['Cage_id'].unique())
assert cage_ids == known_cage_ids
consumption_values = df['day_animal_consumption'].values
consumption_values = [round(i, 2) for i in consumption_values]
consumption_values = sorted(list(set(consumption_values)))
assert consumption_values == known_consumption_values
def test_groups():
"""Create a `pandas.DataFrame` containing treatment and genotype group assignments"""
from labbookdb.report.tracking import treatment_group, append_external_identifiers
known_sorted_ids = [
'5667',
'5668',
'5673',
'5674',
'5675',
'5689',
'5690',
'5691',
'5692',
'5694',
'5699',
'5700',
'5704',
'5705',
'5706',
'6254',
'6256',
'6262',
]
df = treatment_group(DB_PATH, ['cFluDW','cFluDW_'], level='cage')
df = append_external_identifiers(DB_PATH, df, ['Genotype_code'])
sorted_ids = sorted(df['ETH/AIC'].tolist())
assert sorted_ids == known_sorted_ids
| bsd-3-clause | -7,529,482,802,946,345,000 | 30.05 | 172 | 0.69431 | false |
Micronaet/micronaet-production | production_accounting_external_closed/report/production_parser.py | 1 | 2846 | # -*- coding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2001-2015 Micronaet S.r.l. (<http://www.micronaet.it>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
import os
import sys
import logging
import openerp
import openerp.netsvc as netsvc
import openerp.addons.decimal_precision as dp
from openerp.report import report_sxw
from openerp.osv import fields, osv, expression, orm
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp import SUPERUSER_ID, api
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.float_utils import float_round as round
from openerp.tools import (DEFAULT_SERVER_DATE_FORMAT,
DEFAULT_SERVER_DATETIME_FORMAT,
DATETIME_FORMATS_MAP,
float_compare)
_logger = logging.getLogger(__name__)
class Parser(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(Parser, self).__init__(cr, uid, name, context)
self.localcontext.update({
'get_closed_object': self.get_closed_object,
'get_date': self.get_date,
})
def get_date(self, ):
''' For report time
'''
return datetime.now().strftime(DEFAULT_SERVER_DATETIME_FORMAT)
def get_closed_object(self, ):
''' List of order
'''
sol_pool = self.pool.get('sale.order.line')
sol_ids = sol_pool.search(self.cr, self.uid, [
('mrp_id.state', 'not in', ('cancel', 'done')),
('mrp_id', '!=', False),
('go_in_production', '=', True),
('mx_closed', '=', True),
])
items = []
for item in sorted(sol_pool.browse(
self.cr, self.uid, sol_ids),
key=lambda x: (x.mrp_id.name,x.mrp_sequence)):
if item.product_uom_qty > item.product_uom_maked_sync_qty:
items.append(item)
return items
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -6,230,255,985,330,158,000 | 35.961039 | 79 | 0.608925 | false |
PU-Crypto/AES | Rijndael/KeySchedule.py | 1 | 1376 | # -*- coding: utf-8 -*-
#KeySchedule
from Rijndael.SubBytes import *
from Rijndael.Tables import RijndaelRcon
import math
def RotWord(Spalte):
#Verschiebe die Plaetze im Array
output = list()
output.append(Spalte[1])
output.append(Spalte[2])
output.append(Spalte[3])
output.append(Spalte[0])
return output
def XorRcon(Spalte, SpalteVor4, RconCount):
#Verknuepfe Schritt fuer Schritt die Sonderfaelle(immer die erste Spalte eines RoundKeys) Xor, inklusive der RconTabelle
output = list()
Rcon = RijndaelRcon.Rcon[RconCount]
for i in range(0,4):
output.append(format(int(Spalte[i],16)^int(SpalteVor4[i], 16)^int(format(Rcon[i], '#04x'),16), '#04x'))
return output
def Xor(Spalte, SpalteVor4):
#Verknuepfe Wert fuer Wert Xor
output = list()
for i in range(0,4):
output.append(format(int(Spalte[i], 16)^int(SpalteVor4[i], 16), '#04x'))#Hexadezimal
return output
def KeySchedule(Key):
#Erweitere den Schluessel auf insgesamt 10 weitere von einander abhaengige Schluessel
roundCounter = 0
for i in range(4,41,4):
Key.append(RotWord(Key[i-1]))
Key[i] = TranslateToSBox(Key[i])
Key[i] = XorRcon(Key[i],Key[i-4],roundCounter)
roundCounter += 1
for j in range(i+1,i+4):
Key.append(Xor(Key[j-1],Key[j-4]))
return Key
| lgpl-3.0 | 7,545,850,936,786,939,000 | 28.913043 | 124 | 0.653343 | false |
sbt9uc/osf.io | tests/api_tests/users/test_views.py | 1 | 32030 | # -*- coding: utf-8 -*-
import urlparse
from nose.tools import * # flake8: noqa
from website.models import Node
from website.util.sanitize import strip_html
from tests.base import ApiTestCase
from tests.factories import AuthUserFactory, DashboardFactory, FolderFactory, ProjectFactory
from api.base.settings.defaults import API_BASE
class TestUsers(ApiTestCase):
def setUp(self):
super(TestUsers, self).setUp()
self.user_one = AuthUserFactory()
self.user_two = AuthUserFactory()
def tearDown(self):
super(TestUsers, self).tearDown()
def test_returns_200(self):
res = self.app.get('/{}users/'.format(API_BASE))
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_find_user_in_users(self):
url = "/{}users/".format(API_BASE)
res = self.app.get(url)
user_son = res.json['data']
ids = [each['id'] for each in user_son]
assert_in(self.user_two._id, ids)
def test_all_users_in_users(self):
url = "/{}users/".format(API_BASE)
res = self.app.get(url)
user_son = res.json['data']
ids = [each['id'] for each in user_son]
assert_in(self.user_one._id, ids)
assert_in(self.user_two._id, ids)
def test_find_multiple_in_users(self):
url = "/{}users/?filter[fullname]=fred".format(API_BASE)
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_in(self.user_one._id, ids)
assert_in(self.user_two._id, ids)
def test_find_single_user_in_users(self):
url = "/{}users/?filter[fullname]=my".format(API_BASE)
self.user_one.fullname = 'My Mom'
self.user_one.save()
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_in(self.user_one._id, ids)
assert_not_in(self.user_two._id, ids)
def test_find_no_user_in_users(self):
url = "/{}users/?filter[fullname]=NotMyMom".format(API_BASE)
res = self.app.get(url)
user_json = res.json['data']
ids = [each['id'] for each in user_json]
assert_not_in(self.user_one._id, ids)
assert_not_in(self.user_two._id, ids)
def test_users_list_takes_profile_image_size_param(self):
size = 42
url = "/{}users/?profile_image_size={}".format(API_BASE, size)
res = self.app.get(url)
user_json = res.json['data']
for user in user_json:
profile_image_url = user['attributes']['profile_image_url']
query_dict = urlparse.parse_qs(urlparse.urlparse(profile_image_url).query)
assert_equal(int(query_dict.get('size')[0]), size)
class TestUserDetail(ApiTestCase):
def setUp(self):
super(TestUserDetail, self).setUp()
self.user_one = AuthUserFactory()
self.user_one.social['twitter'] = 'howtopizza'
self.user_one.save()
self.user_two = AuthUserFactory()
def tearDown(self):
super(TestUserDetail, self).tearDown()
def test_gets_200(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_get_correct_pk_user(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
user_json = res.json['data']
assert_equal(user_json['attributes']['fullname'], self.user_one.fullname)
assert_equal(user_json['attributes']['twitter'], 'howtopizza')
def test_get_incorrect_pk_user_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url)
user_json = res.json['data']
assert_not_equal(user_json['attributes']['fullname'], self.user_one.fullname)
def test_get_incorrect_pk_user_not_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url, auth=self.user_one.auth)
user_json = res.json['data']
assert_not_equal(user_json['attributes']['fullname'], self.user_one.fullname)
assert_equal(user_json['attributes']['fullname'], self.user_two.fullname)
def test_user_detail_takes_profile_image_size_param(self):
size = 42
url = "/{}users/{}/?profile_image_size={}".format(API_BASE, self.user_one._id, size)
res = self.app.get(url)
user_json = res.json['data']
profile_image_url = user_json['attributes']['profile_image_url']
query_dict = urlparse.parse_qs(urlparse.urlparse(profile_image_url).query)
assert_equal(int(query_dict.get('size')[0]), size)
class TestUserNodes(ApiTestCase):
def setUp(self):
super(TestUserNodes, self).setUp()
self.user_one = AuthUserFactory()
self.user_one.social['twitter'] = 'howtopizza'
self.user_one.save()
self.user_two = AuthUserFactory()
self.public_project_user_one = ProjectFactory(title="Public Project User One",
is_public=True,
creator=self.user_one)
self.private_project_user_one = ProjectFactory(title="Private Project User One",
is_public=False,
creator=self.user_one)
self.public_project_user_two = ProjectFactory(title="Public Project User Two",
is_public=True,
creator=self.user_two)
self.private_project_user_two = ProjectFactory(title="Private Project User Two",
is_public=False,
creator=self.user_two)
self.deleted_project_user_one = FolderFactory(title="Deleted Project User One",
is_public=False,
creator=self.user_one,
is_deleted=True)
self.folder = FolderFactory()
self.deleted_folder = FolderFactory(title="Deleted Folder User One",
is_public=False,
creator=self.user_one,
is_deleted=True)
self.dashboard = DashboardFactory()
def tearDown(self):
super(TestUserNodes, self).tearDown()
def test_authorized_in_gets_200(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_anonymous_gets_200(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
def test_get_projects_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_projects_not_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_projects_logged_in_as_different_user(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_two._id)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public_project_user_two._id, ids)
assert_not_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
class TestUserRoutesNodeRoutes(ApiTestCase):
def setUp(self):
super(TestUserRoutesNodeRoutes, self).setUp()
self.user_one = AuthUserFactory()
self.user_one.social['twitter'] = 'howtopizza'
self.user_two = AuthUserFactory()
self.public_project_user_one = ProjectFactory(title="Public Project User One", is_public=True, creator=self.user_one)
self.private_project_user_one = ProjectFactory(title="Private Project User One", is_public=False, creator=self.user_one)
self.public_project_user_two = ProjectFactory(title="Public Project User Two", is_public=True, creator=self.user_two)
self.private_project_user_two = ProjectFactory(title="Private Project User Two", is_public=False, creator=self.user_two)
self.deleted_project_user_one = FolderFactory(title="Deleted Project User One", is_public=False, creator=self.user_one, is_deleted=True)
self.folder = FolderFactory()
self.deleted_folder = FolderFactory(title="Deleted Folder User One", is_public=False, creator=self.user_one, is_deleted=True)
self.dashboard = DashboardFactory()
def tearDown(self):
super(TestUserRoutesNodeRoutes, self).tearDown()
Node.remove()
def test_get_200_path_users_me_userone_logged_in(self):
url = "/{}users/me/".format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
def test_get_200_path_users_me_usertwo_logged_in(self):
url = "/{}users/me/".format(API_BASE)
res = self.app.get(url, auth=self.user_two.auth)
assert_equal(res.status_code, 200)
def test_get_403_path_users_me_no_user(self):
# TODO: change expected exception from 403 to 401 for unauthorized users
url = "/{}users/me/".format(API_BASE)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_get_404_path_users_user_id_me_user_logged_in(self):
url = "/{}users/{}/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_me_no_user(self):
url = "/{}users/{}/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_me_unauthorized_user(self):
url = "/{}users/{}/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_200_path_users_user_id_user_logged_in(self):
url = "/{}users/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
def test_get_200_path_users_user_id_no_user(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_get_200_path_users_user_id_unauthorized_user(self):
url = "/{}users/{}/".format(API_BASE, self.user_two._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['id'], self.user_two._id)
def test_get_200_path_users_me_nodes_user_logged_in(self):
url = "/{}users/me/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_403_path_users_me_nodes_no_user(self):
# TODO: change expected exception from 403 to 401 for unauthorized users
url = "/{}users/me/nodes/".format(API_BASE)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_get_200_path_users_user_id_nodes_user_logged_in(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_200_path_users_user_id_nodes_no_user(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
# an anonymous/unauthorized user can only see the public projects user_one contributes to.
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_200_path_users_user_id_nodes_unauthorized_user(self):
url = "/{}users/{}/nodes/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth)
assert_equal(res.status_code, 200)
# an anonymous/unauthorized user can only see the public projects user_one contributes to.
ids = {each['id'] for each in res.json['data']}
assert_in(self.public_project_user_one._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.public_project_user_two._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.deleted_folder._id, ids)
assert_not_in(self.deleted_project_user_one._id, ids)
def test_get_404_path_users_user_id_nodes_me_user_logged_in(self):
url = "/{}users/{}/nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_nodes_me_unauthorized_user(self):
url = "/{}users/{}/nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_users_user_id_nodes_me_no_user(self):
url = "/{}users/{}/nodes/me/".format(API_BASE, self.user_one._id)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_me_user_logged_in(self):
url = "/{}nodes/me/".format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_me_no_user(self):
url = "/{}nodes/me/".format(API_BASE)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_user_id_user_logged_in(self):
url = "/{}nodes/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_user_id_unauthorized_user(self):
url = "/{}nodes/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_get_404_path_nodes_user_id_no_user(self):
url = "/{}nodes/{}/".format(API_BASE, self.user_one._id)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 404)
class TestUserUpdate(ApiTestCase):
def setUp(self):
super(TestUserUpdate, self).setUp()
self.user_one = AuthUserFactory.build(
fullname='Martin Luther King Jr.',
given_name='Martin',
family_name='King',
suffix='Jr.',
social=dict(
github='userOneGithub',
scholar='userOneScholar',
personal='http://www.useronepersonalwebsite.com',
twitter='userOneTwitter',
linkedIn='userOneLinkedIn',
impactStory='userOneImpactStory',
orcid='userOneOrcid',
researcherId='userOneResearcherId'
)
)
self.user_one.save()
self.user_one_url = "/v2/users/{}/".format(self.user_one._id)
self.user_two = AuthUserFactory()
self.user_two.save()
self.new_user_one_data = {
'id': self.user_one._id,
'fullname': 'el-Hajj Malik el-Shabazz',
'given_name': 'Malcolm',
'middle_names': 'Malik el-Shabazz',
'family_name': 'X',
'suffix': 'Sr.',
'gitHub': 'newGitHub',
'scholar': 'newScholar',
'personal_website': 'http://www.newpersonalwebsite.com',
'twitter': 'http://www.newpersonalwebsite.com',
'linkedIn': 'newLinkedIn',
'impactStory': 'newImpactStory',
'orcid': 'newOrcid',
'researcherId': 'newResearcherId',
}
def tearDown(self):
super(TestUserUpdate, self).tearDown()
def test_patch_user_logged_out(self):
res = self.app.patch_json_api(self.user_one_url, {
'fullname': self.new_user_one_data['fullname'],
}, expect_errors=True)
assert_equal(res.status_code, 401)
def test_patch_user_without_required_field(self):
# PATCH does not require required fields
res = self.app.patch_json_api(self.user_one_url, {
'family_name': self.new_user_one_data['family_name'],
}, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['family_name'], self.new_user_one_data['family_name'])
self.user_one.reload()
assert_equal(self.user_one.family_name, self.new_user_one_data['family_name'])
def test_put_user_without_required_field(self):
# PUT requires all required fields
res = self.app.put_json_api(self.user_one_url, {
'family_name': self.new_user_one_data['family_name'],
}, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_partial_patch_user_logged_in(self):
# Test to make sure new fields are patched and old fields stay the same
res = self.app.patch_json_api(self.user_one_url, {
'id': self.user_one._id,
'fullname': 'new_fullname',
'gitHub': 'even_newer_github',
'suffix': 'The Millionth'
}, auth=self.user_one.auth)
self.user_one.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['fullname'], 'new_fullname')
assert_equal(res.json['data']['suffix'], 'The Millionth')
assert_equal(res.json['data']['gitHub'], 'even_newer_github')
assert_equal(res.json['data']['given_name'], self.user_one.given_name)
assert_equal(res.json['data']['middle_names'], self.user_one.middle_names)
assert_equal(res.json['data']['family_name'], self.user_one.family_name)
assert_equal(res.json['data']['personal_website'], self.user_one.social['personal'])
assert_equal(res.json['data']['twitter'], self.user_one.social['twitter'])
assert_equal(res.json['data']['linkedIn'], self.user_one.social['linkedIn'])
assert_equal(res.json['data']['impactStory'], self.user_one.social['impactStory'])
assert_equal(res.json['data']['orcid'], self.user_one.social['orcid'])
assert_equal(res.json['data']['researcherId'], self.user_one.social['researcherId'])
assert_equal(self.user_one.fullname, 'new_fullname')
assert_equal(self.user_one.suffix, 'The Millionth')
assert_equal(self.user_one.social['github'], 'even_newer_github')
def test_partial_patch_user_logged_in(self):
# Test to make sure new fields are patched and old fields stay the same
res = self.app.patch_json_api(self.user_one_url, {
'id': self.user_one._id,
'fullname': 'new_fullname',
'suffix': 'The Millionth'
}, auth=self.user_one.auth)
self.user_one.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], 'new_fullname')
assert_equal(res.json['data']['attributes']['suffix'], 'The Millionth')
assert_equal(res.json['data']['attributes']['gitHub'], self.user_one.social['github'])
assert_equal(res.json['data']['attributes']['given_name'], self.user_one.given_name)
assert_equal(res.json['data']['attributes']['middle_names'], self.user_one.middle_names)
assert_equal(res.json['data']['attributes']['family_name'], self.user_one.family_name)
assert_equal(res.json['data']['attributes']['personal_website'], self.user_one.social['personal'])
assert_equal(res.json['data']['attributes']['twitter'], self.user_one.social['twitter'])
assert_equal(res.json['data']['attributes']['linkedIn'], self.user_one.social['linkedIn'])
assert_equal(res.json['data']['attributes']['impactStory'], self.user_one.social['impactStory'])
assert_equal(res.json['data']['attributes']['orcid'], self.user_one.social['orcid'])
assert_equal(res.json['data']['attributes']['researcherId'], self.user_one.social['researcherId'])
assert_equal(self.user_one.fullname, 'new_fullname')
assert_equal(self.user_one.suffix, 'The Millionth')
assert_equal(self.user_one.social['github'], self.user_one.social['github'])
def test_partial_put_user_logged_in(self):
# Test to make sure new fields are patched and old fields stay the same
res = self.app.put_json_api(self.user_one_url, {
'id': self.user_one._id,
'fullname': 'new_fullname',
'gitHub': 'even_newer_github',
'suffix': 'The Millionth'
}, auth=self.user_one.auth)
self.user_one.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], 'new_fullname')
assert_equal(res.json['data']['attributes']['suffix'], 'The Millionth')
assert_equal(res.json['data']['attributes']['gitHub'], 'even_newer_github')
assert_equal(res.json['data']['attributes']['given_name'], self.user_one.given_name)
assert_equal(res.json['data']['attributes']['middle_names'], self.user_one.middle_names)
assert_equal(res.json['data']['attributes']['family_name'], self.user_one.family_name)
assert_equal(self.user_one.fullname, 'new_fullname')
assert_equal(self.user_one.suffix, 'The Millionth')
assert_equal(self.user_one.social['github'], 'even_newer_github')
def test_put_user_logged_in(self):
# Logged in user updates their user information via put
res = self.app.put_json_api(self.user_one_url, self.new_user_one_data, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], self.new_user_one_data['fullname'])
assert_equal(res.json['data']['attributes']['given_name'], self.new_user_one_data['given_name'])
assert_equal(res.json['data']['attributes']['middle_names'], self.new_user_one_data['middle_names'])
assert_equal(res.json['data']['attributes']['family_name'], self.new_user_one_data['family_name'])
assert_equal(res.json['data']['attributes']['suffix'], self.new_user_one_data['suffix'])
assert_equal(res.json['data']['attributes']['gitHub'], self.new_user_one_data['gitHub'])
assert_equal(res.json['data']['attributes']['personal_website'], self.new_user_one_data['personal_website'])
assert_equal(res.json['data']['attributes']['twitter'], self.new_user_one_data['twitter'])
assert_equal(res.json['data']['attributes']['linkedIn'], self.new_user_one_data['linkedIn'])
assert_equal(res.json['data']['attributes']['impactStory'], self.new_user_one_data['impactStory'])
assert_equal(res.json['data']['attributes']['orcid'], self.new_user_one_data['orcid'])
assert_equal(res.json['data']['attributes']['researcherId'], self.new_user_one_data['researcherId'])
self.user_one.reload()
assert_equal(self.user_one.fullname, self.new_user_one_data['fullname'])
assert_equal(self.user_one.given_name, self.new_user_one_data['given_name'])
assert_equal(self.user_one.middle_names, self.new_user_one_data['middle_names'])
assert_equal(self.user_one.family_name, self.new_user_one_data['family_name'])
assert_equal(self.user_one.suffix, self.new_user_one_data['suffix'])
assert_equal(self.user_one.social['github'], self.new_user_one_data['gitHub'])
assert_equal(self.user_one.social['personal'], self.new_user_one_data['personal_website'])
assert_equal(self.user_one.social['twitter'], self.new_user_one_data['twitter'])
assert_equal(self.user_one.social['linkedIn'], self.new_user_one_data['linkedIn'])
assert_equal(self.user_one.social['impactStory'], self.new_user_one_data['impactStory'])
assert_equal(self.user_one.social['orcid'], self.new_user_one_data['orcid'])
assert_equal(self.user_one.social['researcherId'], self.new_user_one_data['researcherId'])
def test_put_user_logged_out(self):
res = self.app.put_json_api(self.user_one_url, self.new_user_one_data, expect_errors=True)
assert_equal(res.status_code, 401)
def test_put_wrong_user(self):
# User tries to update someone else's user information via put
res = self.app.put_json_api(self.user_one_url, self.new_user_one_data, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_patch_wrong_user(self):
# User tries to update someone else's user information via patch
res = self.app.patch_json_api(self.user_one_url, {
'fullname': self.new_user_one_data['fullname'],
}, auth=self.user_two.auth, expect_errors=True)
assert_equal(res.status_code, 403)
self.user_one.reload()
assert_not_equal(self.user_one.fullname, self.new_user_one_data['fullname'])
def test_update_user_sanitizes_html_properly(self):
"""Post request should update resource, and any HTML in fields should be stripped"""
bad_fullname = 'Malcolm <strong>X</strong>'
bad_family_name = 'X <script>alert("is")</script> a cool name'
res = self.app.patch_json_api(self.user_one_url, {
'fullname': bad_fullname,
'family_name': bad_family_name,
}, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['attributes']['fullname'], strip_html(bad_fullname))
assert_equal(res.json['data']['attributes']['family_name'], strip_html(bad_family_name))
class TestDeactivatedUser(ApiTestCase):
def setUp(self):
super(TestDeactivatedUser, self).setUp()
self.user = AuthUserFactory()
def test_deactivated_user_returns_410_response(self):
url = '/{}users/{}/'.format(API_BASE, self.user._id)
res = self.app.get(url, auth=self.user.auth , expect_errors=False)
assert_equal(res.status_code, 200)
self.user.is_disabled = True
self.user.save()
res = self.app.get(url, auth=self.user.auth , expect_errors=True)
assert_equal(res.status_code, 410)
class TestExceptionFormatting(ApiTestCase):
def setUp(self):
super(TestExceptionFormatting, self).setUp()
self.user = AuthUserFactory.build(
fullname='Martin Luther King Jr.',
given_name='Martin',
family_name='King',
suffix='Jr.',
social=dict(
github='userOneGithub',
scholar='userOneScholar',
personal='http://www.useronepersonalwebsite.com',
twitter='userOneTwitter',
linkedIn='userOneLinkedIn',
impactStory='userOneImpactStory',
orcid='userOneOrcid',
researcherId='userOneResearcherId'
)
)
self.url = '/{}users/{}/'.format(API_BASE, self.user._id)
self.user_two = AuthUserFactory()
def test_updates_user_with_no_fullname(self):
res = self.app.put_json_api(self.url, auth=self.user.auth, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert('fullname' in res.json['errors'][0]['meta']['field'])
assert('This field is required.' in res.json['errors'][0]['detail'])
def test_updates_user_unauthorized(self):
res = self.app.put_json_api(self.url, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert_equal(errors[0], {'detail': "Authentication credentials were not provided."})
def test_updates_user_forbidden(self):
res = self.app.put_json_api(self.url, auth=self.user_two.auth, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert_equal(errors[0], {'detail': 'You do not have permission to perform this action.'})
def test_user_does_not_exist_formatting(self):
url = '/{}users/{}/'.format(API_BASE, '12345')
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
errors = res.json['errors']
assert(isinstance(errors, list))
assert_equal(errors[0], {'detail': 'Not found.'})
def test_basic_auth_me_wrong_password(self):
url = '/{}users/{}/'.format(API_BASE, 'me')
res = self.app.get(url, auth=(self.user.username, 'nottherightone'), expect_errors=True)
assert_equal(res.status_code, 401)
| apache-2.0 | 1,204,231,969,254,728,200 | 46.949102 | 144 | 0.613862 | false |
LazoCoder/Pokemon-Terminal | tests/test_main.py | 1 | 3018 | #!/usr/bin/env python3
# To run the tests, use: python3 -m pytest --capture=sys
from pokemonterminal.database import Database
from pokemonterminal.filters import Filter, RegionFilter, NonExtrasFilter
from pokemonterminal.main import main
from tests.test_utils import region_dict
import random
db = Database()
def broken_test_no_args(capsys):
""" FIXME: Now the the main file accepts zero arguments """
main([__file__])
out, err = capsys.readouterr()
assert out.startswith("No command line arguments specified.")
def broken_test_three_args(capsys):
""" FIXME: Now the main file accepts way more then 3 arguments """
main([__file__, 1, 2, 3])
out, err = capsys.readouterr()
assert out.startswith("Invalid number of arguments.")
def broken_test_two_letters(capsys):
""" FIXME: The search argorhytm is now bultin the name filter """
main([__file__, 'bu'])
out, err = capsys.readouterr()
assert 'Butterfree' in out
# prefix search only
main([__file__, 'ut'])
out, err = capsys.readouterr()
assert 'butterfree' not in out.lower()
def test_extra(capsys):
main(['-e', '-dr'])
# TODO: Assertion based on number of files on ./Extras
assert str(random.choice(Filter.filtered_list)).startswith('---')
def test_region_names(capsys):
try:
main(['-r', 'wrong_region', '-dr'])
except SystemExit:
pass # It's supposed to crash.
err: str = capsys.readouterr()[1].strip()
assert err.endswith(
"(choose from 'kanto', 'johto', 'hoenn', 'sinnoh', 'unova', 'kalos')")
def test_all(capsys):
main(['-dr', '-ne'])
out = capsys.readouterr()[0]
for region_info in region_dict.values():
assert (region_info.first or '') in out # convert None --> ''
assert (region_info.last or '') in out # convert None --> ''
def test_region(capsys):
regFilter = RegionFilter(None, None)
noExtras = NonExtrasFilter(None, None)
# matrix test of first pokemon name and last pokemon name from all regions
for name, region_info in region_dict.items():
filtered = [p for p in Filter.POKEMON_LIST
if regFilter.matches(p, name)
and noExtras.matches(p, None)]
assert len(filtered) == region_info.size
assert random.choice(filtered).get_region() == name
assert filtered[0].get_id() == ('%03d' % (region_info.start))
assert filtered[-1].get_id() == ('%03d' % (region_info.end))
assert filtered[0].get_name() == region_info.first.lower()
assert filtered[-1].get_name() == region_info.last.lower()
if __name__ == '__main__':
# Test runner: Runs all functions whose name begins with `test_`
# locals() changes when trying to do this without the list comprehension!!!
name_funcs = [(n, f) for n, f in locals().items() if n.startswith('test_')]
for name, func in name_funcs:
if callable(func):
func()
else:
print(name + ' is not callable()!')
| gpl-3.0 | -2,077,544,168,449,750,000 | 33.689655 | 79 | 0.628893 | false |
griffy/Pyap | pyap/library/db.py | 1 | 2688 | # Pyap - The Python Audio Player Library
#
# Copyright (c) 2012 Joel Griffith
# Copyright (c) 2005 Joe Wreschnig
# Copyright (c) 2002 David I. Lehn
# Copyright (c) 2005-2011 the SQLAlchemy authors and contributors
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from sqlalchemy import create_engine
from sqlalchemy import Table, Column, Integer, Unicode, MetaData
from sqlalchemy.schema import ForeignKey
from sqlalchemy.orm import mapper, relationship, sessionmaker
from pyap.audio import Audio
from pyap.playlist import Playlist
def setup(uri):
# TODO: echo should be false
if uri is None:
engine = create_engine('sqlite:///:memory:', echo=True)
else:
engine = create_engine('sqlite:///' + uri, echo=True)
metadata = MetaData()
#audio_types_table = Table('audio_types', metadata,
# Column('id', Integer, primary_key=True),
# Column('type', Unicode, unique=True)
#)
audio_table = Table('audio', metadata,
Column('id', Integer, primary_key=True),
Column('uri', Unicode, unique=True, index=True),
Column('type', Integer, nullable=False),
Column('artist', Unicode),
Column('title', Unicode),
Column('album', Unicode),
Column('track', Integer),
Column('length', Integer)
)
playlist_table = Table('playlists', metadata,
Column('id', Integer, primary_key=True),
Column('name', Unicode, unique=True, index=True)
)
# many-to-many junction table for audio and playlists
audio_playlist_table = Table('audio_playlists', metadata,
Column('audio_id', Integer, ForeignKey('audio.id')),
Column('playlist_id', Integer, ForeignKey('playlists.id'))
)
metadata.create_all(engine)
mapper(Audio, audio_table)
mapper(Playlist, playlist_table, properties={
'audio': relationship(Audio, secondary=audio_playlist_table,
backref='playlists')}
)
return sessionmaker(bind=engine)
| gpl-2.0 | 4,940,832,737,651,426,000 | 34.84 | 68 | 0.682292 | false |
jkakavas/creepy | creepy/plugins/googleplus/googleplus.py | 1 | 10302 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from models.InputPlugin import InputPlugin
from oauth2client.client import OAuth2WebServerFlow, AccessTokenCredentials
from googleapiclient.discovery import build
import logging
import os
import urllib
import httplib2
import dateutil.parser
from PyQt4.QtGui import QWizard, QWizardPage, QLabel, QLineEdit, QVBoxLayout, QHBoxLayout, QMessageBox
from PyQt4.QtCore import QUrl
from PyQt4.QtWebKit import QWebView
from utilities import GeneralUtilities, QtHandler
# set up logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
fh = logging.FileHandler(os.path.join(GeneralUtilities.getLogDir(), 'creepy_main.log'))
fh.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(levelname)s:%(asctime)s In %(filename)s:%(lineno)d: %(message)s')
fh.setFormatter(formatter)
guiLoggingHandler = QtHandler.QtHandler()
guiLoggingHandler.setFormatter(formatter)
logger.addHandler(fh)
logger.addHandler(guiLoggingHandler)
class Googleplus(InputPlugin):
name = 'googleplus'
hasWizard = True
hasRateLimitInfo = False
hasLocationBasedMode = False
def __init__(self):
# Try and read the labels file
self.http = httplib2.Http()
labels_config = self.getConfigObj(self.name + '.labels')
try:
self.labels = labels_config['labels']
except Exception, err:
self.labels = None
logger.error('Could not load the labels file for the {0} plugin .'.format(self.name))
logger.error(err)
self.config, self.options_string = self.readConfiguration('string_options')
self.options_boolean = self.readConfiguration('boolean_options')[1]
self.service = None
def searchForTargets(self, search_term):
possibleTargets = []
logger.debug('Searching for Targets from Google+ Plugin. Search term is : {0}'.format(search_term))
try:
if self.service is None:
self.service = self.getAuthenticatedService()
peopleResource = self.service.people()
peopleDocument = peopleResource.search(query=search_term).execute()
if 'items' in peopleDocument:
logger.debug('Google+ returned ' + str(len(peopleDocument['items'])) + ' results')
for person in peopleDocument['items']:
target = {'pluginName': 'GooglePlus Plugin',
'targetUserid': person['id'],
'targetUsername': person['displayName'],
'targetPicture': 'profile_pic_%s' % person['id'],
'targetFullname': person['displayName']}
# save the pic in the temp folder to show it later
filename = 'profile_pic_%s' % person['id']
temp_file = os.path.join(GeneralUtilities.getTempDir(), filename)
# Retieve and save the profile photo only if it does not exist
if not os.path.exists(temp_file):
urllib.urlretrieve(person['image']['url'], temp_file)
possibleTargets.append(target)
except Exception, err:
logger.error(err)
logger.error('Error searching for targets in Google+ plugin.')
return possibleTargets
def getAuthenticatedService(self):
try:
credentials = AccessTokenCredentials.new_from_json(self.options_string['hidden_credentials'])
if credentials.invalid:
self.http = credentials.refresh(self.http)
else:
self.http = credentials.authorize(self.http)
service = build('plus', 'v1', http=self.http)
return service
except Exception, e:
logger.error(e)
logger.error('Error getting an authentication context')
return None
def runConfigWizard(self):
try:
flow = OAuth2WebServerFlow(self.options_string['hidden_application_clientid'],
self.options_string['hidden_application_secret'],
scope='https://www.googleapis.com/auth/plus.login',
redirect_uri='urn:ietf:wg:oauth:2.0:oob')
authorizationURL = flow.step1_get_authorize_url()
self.wizard = QWizard()
self.wizard.setWindowTitle('Google+ plugin configuration wizard')
page1 = QWizardPage()
page2 = QWizardPage()
layout1 = QVBoxLayout()
layout2 = QVBoxLayout()
layoutInputPin = QHBoxLayout()
label1a = QLabel(
'Click next to connect to Google. Please login with your account and follow the instructions in order \
to authorize creepy')
label2a = QLabel(
'Copy the code that you will receive once you authorize cree.py in the field below and click finish')
codeLabel = QLabel('Code')
inputCode = QLineEdit()
inputCode.setObjectName('inputCode')
html = QWebView()
# Url decode the authorization url so that scope and redirect url gets decoded.
# QWebView will fail to load the url correctly otherwise
html.load(QUrl(urllib.unquote_plus(authorizationURL)))
layout1.addWidget(label1a)
layout2.addWidget(html)
layout2.addWidget(label2a)
layoutInputPin.addWidget(codeLabel)
layoutInputPin.addWidget(inputCode)
layout2.addLayout(layoutInputPin)
page1.setLayout(layout1)
page2.setLayout(layout2)
page2.registerField('inputCode*', inputCode)
self.wizard.addPage(page1)
self.wizard.addPage(page2)
self.wizard.resize(800, 600)
if self.wizard.exec_():
try:
credentials = flow.step2_exchange(str(self.wizard.field('inputCode').toString()).strip(), self.http)
self.options_string['hidden_credentials'] = credentials.to_json()
self.saveConfiguration(self.config)
except Exception, err:
logger.error(err)
self.showWarning('Error completing the wizard',
'We were unable to obtain the credentials for your account, please try to run the\
wizard again.')
except Exception, err:
logger.error(err)
def showWarning(self, title, text):
try:
QMessageBox.warning(self.wizard, title, text)
except Exception, err:
logger(err)
def isConfigured(self):
if self.service is None:
self.service = self.getAuthenticatedService()
try:
peopleResource = self.service.people()
personDocument = peopleResource.get(userId='me').execute()
return True, ''
except Exception, err:
logger.error(err)
return False, err
def returnAnalysis(self, target, search_params):
if self.service is None:
self.service = self.getAuthenticatedService()
locations_list = []
try:
logger.debug('Attempting to retrieve the activities from Google Plus for user ' + target['targetUserid'])
activitiesResource = self.service.activities()
request = activitiesResource.list(userId=target['targetUserid'], collection='public')
while request:
activitiesDocument = request.execute()
logger.debug('{0} activities were retrieved from GooglePlus Plugin'.format(
str(len(activitiesDocument['items']))))
for activity in activitiesDocument['items']:
if hasattr(activity, 'location'):
loc = {}
loc['plugin'] = 'googleplus'
loc['context'] = activity['object']['content']
loc['infowindow'] = self.constructContextInfoWindow(activity, target['targetUsername'])
loc['date'] = dateutil.parser.parse(activity['published'])
loc['lat'] = activity['location']['position']['latitude']
loc['lon'] = activity['location']['position']['longitude']
loc['shortName'] = activity['location']['displayName']
loc['accuracy'] = 'high'
locations_list.append(loc)
elif hasattr(activity, 'geocode'):
loc = {}
loc['plugin'] = 'googleplus'
loc['context'] = activity['object']['content']
loc['infowindow'] = self.constructContextInfoWindow(activity, target['targetUsername'])
loc['date'] = dateutil.parser.parse(activity['published'])
loc['lat'], loc['lon'] = activity['geocode'].split(' ')
loc['shortName'] = activity['placeName']
loc['accuracy'] = 'high'
locations_list.append(loc)
request = self.service.activities().list_next(request, activitiesDocument)
logger.debug('{0} locations were retrieved from GooglePlus Plugin'.format(str(len(locations_list))))
except Exception, e:
logger.error(e)
logger.error('Error getting locations from GooglePlus plugin')
return locations_list, None
def constructContextInfoWindow(self, activity, username):
html = unicode(self.options_string['infowindow_html'], 'utf-8')
return html.replace('@TEXT@', activity['object']['content']).replace('@DATE@', activity['published']).replace(
'@PLUGIN@', u'googleplus').replace('@USERNAME@', username)
def getLabelForKey(self, key):
"""
read the plugin_name.labels
file and try to get label text for the key that was asked
"""
if not self.labels:
return key
if key not in self.labels.keys():
return key
return self.labels[key]
| gpl-3.0 | -3,139,426,532,070,948,000 | 46.041096 | 120 | 0.583964 | false |
magchips/labalyzer | setup.py | 1 | 3470 | #!/usr/bin/env python
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2010 <Atreju Tauschinsky> <[email protected]>
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
###################### DO NOT TOUCH THIS (HEAD TO THE SECOND PART) ######################
import os
import sys
try:
import DistUtilsExtra.auto
except ImportError:
print >> sys.stderr, 'To build labalyzer you need https://launchpad.net/python-distutils-extra'
sys.exit(1)
assert DistUtilsExtra.auto.__version__ >= '2.18', 'needs DistUtilsExtra.auto >= 2.18'
def update_config(values = {}):
oldvalues = {}
try:
fin = file('labalyzer_lib/labalyzerconfig.py', 'r')
fout = file(fin.name + '.new', 'w')
for line in fin:
fields = line.split(' = ') # Separate variable from value
if fields[0] in values:
oldvalues[fields[0]] = fields[1].strip()
line = "%s = %s\n" % (fields[0], values[fields[0]])
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("ERROR: Can't find labalyzer_lib/labalyzerconfig.py")
sys.exit(1)
return oldvalues
def update_desktop_file(datadir):
try:
fin = file('labalyzer.desktop.in', 'r')
fout = file(fin.name + '.new', 'w')
for line in fin:
if 'Icon=' in line:
line = "Icon=%s\n" % (datadir + 'media/labalyzer.svg')
fout.write(line)
fout.flush()
fout.close()
fin.close()
os.rename(fout.name, fin.name)
except (OSError, IOError), e:
print ("ERROR: Can't find labalyzer.desktop.in")
sys.exit(1)
class InstallAndUpdateDataDirectory(DistUtilsExtra.auto.install_auto):
def run(self):
values = {'__labalyzer_data_directory__': "'%s'" % (self.prefix + '/share/labalyzer/'),
'__version__': "'%s'" % self.distribution.get_version()}
previous_values = update_config(values)
update_desktop_file(self.prefix + '/share/labalyzer/')
DistUtilsExtra.auto.install_auto.run(self)
update_config(previous_values)
##################################################################################
###################### YOU SHOULD MODIFY ONLY WHAT IS BELOW ######################
##################################################################################
DistUtilsExtra.auto.setup(
name='labalyzer',
version='0.1',
license='GPL-3',
#author='Your Name',
#author_email='[email protected]',
#description='UI for managing …',
#long_description='Here a longer description',
#url='https://launchpad.net/labalyzer',
cmdclass={'install': InstallAndUpdateDataDirectory}
)
| gpl-3.0 | 3,448,645,533,441,506,300 | 34.030303 | 99 | 0.580161 | false |
olga-perederieieva/pyDEA | pyDEA/main.py | 1 | 3347 | ''' This module contains methods for running pyDEA from terminal.
'''
import sys
from pyDEA.core.data_processing.parameters import parse_parameters_from_file
from pyDEA.core.utils.run_routine import RunMethodTerminal
from pyDEA.core.utils.dea_utils import clean_up_pickled_files, get_logger
def main(filename, output_format='xlsx', output_dir='', sheet_name_usr=''):
''' Main function to run DEA models from terminal.
Args:
filename (str): path to file with parameters.
output_format (str, optional): file format of solution file.
This value is used
only if OUTPUT_FILE in parameters is empty or set to auto.
Defaults to xlsx.
output_dir (str, optional): directory where solution must
be written.
If it is not given, solution will be written to current folder.
This value is used
only if OUTPUT_FILE in parameters is empty or set to auto.
sheet_name_usr (str, optional): name of the sheet in xls- or
xlsx-file with
input data from which data will be read. If input data file is
in csv format,
this value is ignored.
'''
print('Params file', filename, 'output_format', output_format,
'output_dir', output_dir, 'sheet_name_usr', sheet_name_usr)
logger = get_logger()
logger.info('Params file "%s", output format "%s", output directory "%s", sheet name "%s".',
filename, output_format, output_dir, sheet_name_usr)
params = parse_parameters_from_file(filename)
params.print_all_parameters()
run_method = RunMethodTerminal(params, sheet_name_usr, output_format,
output_dir)
run_method.run(params)
clean_up_pickled_files()
logger.info('pyDEA exited.')
if __name__ == '__main__':
args = sys.argv[1:]
logger = get_logger()
logger.info('pyDEA started as a console application.')
print('args = {0}'.format(args))
if len(args) < 1 or len(args) > 4:
logger.error('Invalid number of input arguments. At least one '
'argument must be given, no more than 4 arguments, but %d were given.',
len(args))
raise ValueError('Invalid number of input arguments. At least one '
'argument must be given, no more than 4 arguments'
' are expected. Input arguments are:\n (1) path to'
' file with parameters (compulsory)\n'
'(2) output file format, possible values: xls, xlsx'
' and csv, default value is xlsx (optional), this'
' value is used only if auto or empty string was set'
' for OUTPUT_FILE in parameters file \n'
'(3) output directory (optional, if not specified,'
' output is written to current directory)\n'
'(4) sheet name from which data should be read '
'(optional, if not specified, data is read from'
' the first sheet)')
try:
main(*args)
except Exception as excinfo:
logger.error(excinfo)
raise
| mit | 2,119,575,361,617,346,300 | 45.486111 | 96 | 0.574544 | false |
wireservice/csvkit | setup.py | 1 | 2897 | #!/usr/bin/env python
import sys
from setuptools import setup
install_requires = [
'agate>=1.6.1',
'agate-excel>=0.2.2',
'agate-dbf>=0.2.0',
'agate-sql>=0.5.3',
'six>=1.6.1',
'setuptools',
]
if sys.version_info < (2, 7):
install_requires.append('argparse>=1.2.1')
install_requires.append('ordereddict>=1.1')
install_requires.append('simplejson>=3.6.3')
setup(
name='csvkit',
version='1.0.6',
description='A suite of command-line tools for working with CSV, the king of tabular file formats.',
long_description=open('README.rst').read(),
author='Christopher Groskopf',
author_email='[email protected]',
url='https://github.com/wireservice/csvkit',
project_urls={
'Documentation': 'https://csvkit.readthedocs.io/en/latest/',
},
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
],
packages=[
'csvkit',
'csvkit.convert',
'csvkit.utilities'
],
entry_points={
'console_scripts': [
'csvclean = csvkit.utilities.csvclean:launch_new_instance',
'csvcut = csvkit.utilities.csvcut:launch_new_instance',
'csvformat = csvkit.utilities.csvformat:launch_new_instance',
'csvgrep = csvkit.utilities.csvgrep:launch_new_instance',
'csvjoin = csvkit.utilities.csvjoin:launch_new_instance',
'csvjson = csvkit.utilities.csvjson:launch_new_instance',
'csvlook = csvkit.utilities.csvlook:launch_new_instance',
'csvpy = csvkit.utilities.csvpy:launch_new_instance',
'csvsort = csvkit.utilities.csvsort:launch_new_instance',
'csvsql = csvkit.utilities.csvsql:launch_new_instance',
'csvstack = csvkit.utilities.csvstack:launch_new_instance',
'csvstat = csvkit.utilities.csvstat:launch_new_instance',
'in2csv = csvkit.utilities.in2csv:launch_new_instance',
'sql2csv = csvkit.utilities.sql2csv:launch_new_instance'
]
},
install_requires=install_requires
)
| mit | 7,940,495,066,647,459,000 | 37.626667 | 104 | 0.622023 | false |
lizardsystem/lizard-kpi | lizard_kpi/testsettings.py | 1 | 3003 | import os
from lizard_ui.settingshelper import setup_logging
from lizard_ui.settingshelper import STATICFILES_FINDERS
DEBUG = True
TEMPLATE_DEBUG = True
# SETTINGS_DIR allows media paths and so to be relative to this settings file
# instead of hardcoded to c:\only\on\my\computer.
SETTINGS_DIR = os.path.dirname(os.path.realpath(__file__))
# BUILDOUT_DIR is for access to the "surrounding" buildout, for instance for
# BUILDOUT_DIR/var/static files to give django-staticfiles a proper place
# to place all collected static files.
BUILDOUT_DIR = os.path.abspath(os.path.join(SETTINGS_DIR, '..'))
LOGGING = setup_logging(BUILDOUT_DIR)
# ENGINE: 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
# In case of geodatabase, prepend with:
# django.contrib.gis.db.backends.(postgis)
DATABASES = {
# If you want to use another database, consider putting the database
# settings in localsettings.py. Otherwise, if you change the settings in
# the current file and commit them to the repository, other developers will
# also use these settings whether they have that database or not.
# One of those other developers is Jenkins, our continuous integration
# solution. Jenkins can only run the tests of the current application when
# the specified database exists. When the tests cannot run, Jenkins sees
# that as an error.
'default': {
'NAME': os.path.join(BUILDOUT_DIR, 'var', 'sqlite', 'test.db'),
'ENGINE': 'django.db.backends.sqlite3',
'USER': '',
'PASSWORD': '',
'HOST': '', # empty string for localhost.
'PORT': '', # empty string for default.
}
}
SITE_ID = 1
INSTALLED_APPS = [
'lizard_kpi',
'lizard_ui',
'staticfiles',
'compressor',
'south',
'django_nose',
'django_extensions',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.gis',
'django.contrib.sites',
]
ROOT_URLCONF = 'lizard_kpi.urls'
TEMPLATE_CONTEXT_PROCESSORS = (
# Uncomment this one if you use lizard-map.
# 'lizard_map.context_processors.processor.processor',
# Default django 1.3 processors.
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.contrib.messages.context_processors.messages"
)
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
# Used for django-staticfiles (and for media files
STATIC_URL = '/static_media/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BUILDOUT_DIR, 'var', 'static')
MEDIA_ROOT = os.path.join(BUILDOUT_DIR, 'var', 'media')
STATICFILES_FINDERS = STATICFILES_FINDERS
try:
# Import local settings that aren't stored in svn/git.
from lizard_kpi.local_testsettings import *
except ImportError:
pass
| gpl-3.0 | 259,069,673,091,839,230 | 34.329412 | 79 | 0.699301 | false |
ZachMassia/platformio | platformio/builder/scripts/nordicnrf51.py | 1 | 2079 | # Copyright 2014-2016 Ivan Kravets <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Builder for Nordic nRF51 series ARM microcontrollers.
"""
from os.path import join
from SCons.Script import (COMMAND_LINE_TARGETS, AlwaysBuild, Default,
DefaultEnvironment, SConscript)
env = DefaultEnvironment()
SConscript(env.subst(join("$PIOBUILDER_DIR", "scripts", "basearm.py")))
if env.subst("$BOARD") == "rfduino":
env.Append(
CPPFLAGS=["-fno-builtin"],
LINKFLAGS=["--specs=nano.specs"]
)
env.Replace(
UPLOADER=join("$PIOPACKAGES_DIR", "tool-rfdloader", "rfdloader"),
UPLOADERFLAGS=["-q", "$UPLOAD_PORT"],
UPLOADCMD='"$UPLOADER" $UPLOADERFLAGS $SOURCES'
)
#
# Target: Build executable and linkable firmware
#
target_elf = env.BuildProgram()
#
# Target: Build the .bin file
#
if "uploadlazy" in COMMAND_LINE_TARGETS:
target_firm = join("$BUILD_DIR", "firmware.hex")
else:
target_firm = env.ElfToHex(join("$BUILD_DIR", "firmware"), target_elf)
#
# Target: Print binary size
#
target_size = env.Alias("size", target_elf, "$SIZEPRINTCMD")
AlwaysBuild(target_size)
#
# Target: Upload by default .bin file
#
if env.subst("$BOARD") == "rfduino":
upload = env.Alias(
["upload", "uploadlazy"], target_firm,
[lambda target, source, env: env.AutodetectUploadPort(), "$UPLOADCMD"])
else:
upload = env.Alias(["upload", "uploadlazy"], target_firm, env.UploadToDisk)
AlwaysBuild(upload)
#
# Target: Define targets
#
Default([target_firm, target_size])
| apache-2.0 | 3,195,199,141,947,326,000 | 26 | 79 | 0.685426 | false |
metglobal/openexchangerates | openexchangerates/tests.py | 1 | 3483 | import unittest
from decimal import Decimal
from httpretty import HTTPretty, httprettified
import openexchangerates
from datetime import date as Date
class TestOpenExchangeRates(unittest.TestCase):
_FIXTURE_CURRENCIES = """{
"AED": "United Arab Emirates Dirham",
"AFN": "Afghan Afghani",
"ALL": "Albanian Lek"
}
"""
_FIXTURE_LATEST = """{
"disclaimer": "<Disclaimer data>",
"license": "<License data>",
"timestamp": 1358150409,
"base": "USD",
"rates": {
"AED": 3.666311,
"AFN": 51.2281,
"ALL": 104.748751
}
}
"""
_FIXTURE_HISTORICAL = """{
"disclaimer": "<Disclaimer data>",
"license": "<License data>",
"timestamp": 1358150409,
"base": "USD",
"rates": {
"AED": 3.666311,
"AFN": 51.2281,
"ALL": 104.748751
}
}
"""
@httprettified
def test_historical(self):
"""Tests openexchangerates.OpenExchangeRateClient.historical``"""
client = openexchangerates.OpenExchangeRatesClient('DUMMY_API_KEY')
date = Date.fromtimestamp(1358150409)
HTTPretty.register_uri(HTTPretty.GET, client.ENDPOINT_HISTORICAL %
date.strftime("%Y-%m-%d"),
body=self._FIXTURE_LATEST)
historical = client.historical(date)
self.assertIn('rates', historical)
rates = historical['rates']
self.assertEqual(len(rates), 3)
self.assertIn('AED', rates)
self.assertEqual(rates['AED'], Decimal('3.666311'))
self.assertIn('AFN', rates)
self.assertEqual(rates['AFN'], Decimal('51.2281'))
self.assertIn('ALL', rates)
self.assertEqual(rates['ALL'], Decimal('104.748751'))
@httprettified
def test_currencies(self):
"""Tests ``openexchangerates.OpenExchangeRateClient\.currencies``"""
client = openexchangerates.OpenExchangeRatesClient('DUMMY_API_KEY')
HTTPretty.register_uri(HTTPretty.GET, client.ENDPOINT_CURRENCIES,
body=self._FIXTURE_CURRENCIES)
currencies = client.currencies()
self.assertEqual(len(currencies), 3)
self.assertIn('AED', currencies)
self.assertIn('AFN', currencies)
self.assertIn('ALL', currencies)
@httprettified
def test_latest(self):
"""Tests openexchangerates.OpenExchangeRateClient.latest``"""
client = openexchangerates.OpenExchangeRatesClient('DUMMY_API_KEY')
HTTPretty.register_uri(HTTPretty.GET, client.ENDPOINT_LATEST,
body=self._FIXTURE_LATEST)
latest = client.latest()
self.assertIn('rates', latest)
rates = latest['rates']
self.assertEqual(len(rates), 3)
self.assertIn('AED', rates)
self.assertEqual(rates['AED'], Decimal('3.666311'))
self.assertIn('AFN', rates)
self.assertEqual(rates['AFN'], Decimal('51.2281'))
self.assertIn('ALL', rates)
self.assertEqual(rates['ALL'], Decimal('104.748751'))
@httprettified
def test_exception(self):
"""Tests ``openexchangerates.OpenExchangeRateClientException``"""
client = openexchangerates.OpenExchangeRatesClient('DUMMY_API_KEY')
HTTPretty.register_uri(HTTPretty.GET, client.ENDPOINT_LATEST,
status=404)
with(self.assertRaises(
openexchangerates.OpenExchangeRatesClientException)) as e:
client.latest()
| mit | 2,540,546,426,846,509,000 | 33.485149 | 76 | 0.612116 | false |
Oli76/rwslib | rwslib/builders.py | 1 | 77124 | # -*- coding: utf-8 -*-
__author__ = 'isparks'
import uuid
from xml.etree import cElementTree as ET
from datetime import datetime
from string import ascii_letters
from rwslib.builder_constants import *
"""
builders.py provides convenience classes for building ODM documents for clinical data and metadata post messages.
"""
# -----------------------------------------------------------------------------------------------------------------------
# Constants
VALID_ID_CHARS = ascii_letters + '_'
# -----------------------------------------------------------------------------------------------------------------------
# Utilities
def now_to_iso8601():
"""Returns NOW date/time as a UTC date/time formated as iso8601 string"""
utc_date = datetime.utcnow()
return dt_to_iso8601(utc_date)
def dt_to_iso8601(dt):
"""Turn a datetime into an ISO8601 formatted string"""
return dt.strftime("%Y-%m-%dT%H:%M:%S")
def bool_to_yes_no(val):
"""Convert True/False to Yes/No"""
return 'Yes' if val else 'No'
def bool_to_true_false(val):
"""Convert True/False to TRUE / FALSE"""
return 'TRUE' if val else 'FALSE'
def indent(elem, level=0):
"""Indent a elementree structure"""
i = "\n" + level * " "
if len(elem) > 0:
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
def make_element(builder, tag, content):
"""Make an element with this tag and text content"""
builder.start(tag, {})
builder.data(content) # Must be UTF-8 encoded
builder.end(tag)
# -----------------------------------------------------------------------------------------------------------------------
# Classes
class ODMElement(object):
"""Base class for ODM XML element classes"""
def __call__(self, *args):
"""Collect all children passed in call"""
for child in args:
self << child
return self
def __lshift__(self, other):
"""__lshift__ should be overridden in descendant classes to accept child elements and incorporate them.
By default takes no child elements
"""
raise ValueError("%s takes no child elements" % self.__class__.__name__)
def add(self, *args):
"""Like call but adds a set of args"""
for child in args:
self << child
return self
def set_single_attribute(self, other, trigger_klass, property_name):
"""Used to set guard the setting of an attribute which is singular and can't be set twice"""
if isinstance(other, trigger_klass):
# Check property exists
if not hasattr(self, property_name):
raise AttributeError("%s has no property %s" % (self.__class__.__name__, property_name))
if getattr(self, property_name) is None:
setattr(self, property_name, other)
else:
raise ValueError(
'%s already has a %s element set.' % (self.__class__.__name__, other.__class__.__name__,))
def set_list_attribute(self, other, trigger_klass, property_name):
"""Used to set guard the setting of a list attribute, ensuring the same element is not added twice."""
# Check property exists
if isinstance(other, trigger_klass):
if not hasattr(self, property_name):
raise AttributeError("%s has no property %s" % (self.__class__.__name__, property_name))
val = getattr(self, property_name, [])
if other in val:
raise ValueError("%s already exists in %s" % (other.__class__.__name__, self.__class__.__name__))
else:
val.append(other)
setattr(self, property_name, val)
class UserRef(ODMElement):
def __init__(self, oid):
self.oid = oid
def build(self, builder):
builder.start("UserRef", dict(UserOID=self.oid))
builder.end("UserRef")
class LocationRef(ODMElement):
def __init__(self, oid):
self.oid = oid
def build(self, builder):
builder.start("LocationRef", dict(LocationOID=self.oid))
builder.end("LocationRef")
class ReasonForChange(ODMElement):
def __init__(self, reason):
self.reason = reason
def build(self, builder):
builder.start("ReasonForChange", {})
builder.data(self.reason)
builder.end("ReasonForChange")
class DateTimeStamp(ODMElement):
def __init__(self, date_time):
self.date_time = date_time
def build(self, builder):
builder.start("DateTimeStamp", {})
if isinstance(self.date_time, datetime):
builder.data(dt_to_iso8601(self.date_time))
else:
builder.data(self.date_time)
builder.end("DateTimeStamp")
class AuditRecord(ODMElement):
"""AuditRecord is supported only by ItemData in Rave"""
EDIT_MONITORING = 'Monitoring'
EDIT_DATA_MANAGEMENT = 'DataManagement'
EDIT_DB_AUDIT = 'DBAudit'
EDIT_POINTS = [EDIT_MONITORING, EDIT_DATA_MANAGEMENT, EDIT_DB_AUDIT]
def __init__(self, edit_point=None, used_imputation_method=None, identifier=None, include_file_oid=None):
self._edit_point = None
self.edit_point = edit_point
self.used_imputation_method = used_imputation_method
self._id = None
self.id = identifier
self.include_file_oid = include_file_oid
self.user_ref = None
self.location_ref = None
self.reason_for_change = None
self.date_time_stamp = None
@property
def id(self):
return self._id
@id.setter
def id(self, value):
if value not in [None, ''] and str(value).strip() != '':
val = str(value).strip()[0]
if val not in VALID_ID_CHARS:
raise AttributeError('%s id cannot start with "%s" character' % (self.__class__.__name__, val,))
self._id = value
@property
def edit_point(self):
return self._edit_point
@edit_point.setter
def edit_point(self, value):
if value is not None:
if value not in self.EDIT_POINTS:
raise AttributeError('%s edit_point must be one of %s not %s' % (
self.__class__.__name__, ','.join(self.EDIT_POINTS), value,))
self._edit_point = value
def build(self, builder):
params = {}
if self.edit_point is not None:
params["EditPoint"] = self.edit_point
if self.used_imputation_method is not None:
params['UsedImputationMethod'] = bool_to_yes_no(self.used_imputation_method)
if self.id is not None:
params['ID'] = str(self.id)
if self.include_file_oid is not None:
params['mdsol:IncludeFileOID'] = bool_to_yes_no(self.include_file_oid)
builder.start("AuditRecord", params)
if self.user_ref is None:
raise ValueError("User Reference not set.")
self.user_ref.build(builder)
if self.location_ref is None:
raise ValueError("Location Reference not set.")
self.location_ref.build(builder)
if self.date_time_stamp is None:
raise ValueError("DateTime not set.")
self.date_time_stamp.build(builder)
# Optional
if self.reason_for_change is not None:
self.reason_for_change.build(builder)
builder.end("AuditRecord")
def __lshift__(self, other):
if not isinstance(other, (UserRef, LocationRef, DateTimeStamp, ReasonForChange,)):
raise ValueError("AuditRecord cannot accept a child element of type %s" % other.__class__.__name__)
# Order is important, apparently
self.set_single_attribute(other, UserRef, 'user_ref')
self.set_single_attribute(other, LocationRef, 'location_ref')
self.set_single_attribute(other, DateTimeStamp, 'date_time_stamp')
self.set_single_attribute(other, ReasonForChange, 'reason_for_change')
return other
class TransactionalElement(ODMElement):
"""Models an ODM Element that is allowed a transaction type. Different elements have different
allowed transaction types"""
ALLOWED_TRANSACTION_TYPES = []
def __init__(self, transaction_type):
self._transaction_type = None
self.transaction_type = transaction_type
@property
def transaction_type(self):
return self._transaction_type
@transaction_type.setter
def transaction_type(self, value):
if value is not None:
if value not in self.ALLOWED_TRANSACTION_TYPES:
raise AttributeError('%s transaction_type element must be one of %s not %s' % (
self.__class__.__name__, ','.join(self.ALLOWED_TRANSACTION_TYPES), value,))
self._transaction_type = value
class MdsolQuery(ODMElement):
"""MdsolQuery extension element for Queries at item level only"""
def __init__(self, value=None, query_repeat_key=None, recipient=None, status=None, requires_response=None,
response=None):
self.value = value
self.query_repeat_key = query_repeat_key
self.recipient = recipient
self._status = None
self.status = status
self.requires_response = requires_response
self.response = response
@property
def status(self):
return self._status
@status.setter
def status(self, value):
if value is not None:
if not isinstance(value, QueryStatusType):
raise AttributeError("%s action type is invalid in mdsol:Query." % (value,))
self._status = value
def build(self, builder):
params = {}
if self.value is not None:
params['Value'] = str(self.value)
if self.query_repeat_key is not None:
params['QueryRepeatKey'] = str(self.query_repeat_key)
if self.recipient is not None:
params['Recipient'] = str(self.recipient)
if self.status is not None:
params['Status'] = self.status.value
if self.requires_response is not None:
params['RequiresResponse'] = bool_to_yes_no(self.requires_response)
# When closing a query
if self.response is not None:
params['Response'] = str(self.response)
builder.start("mdsol:Query", params)
builder.end("mdsol:Query")
class ItemData(TransactionalElement):
"""Models the ODM ItemData object"""
ALLOWED_TRANSACTION_TYPES = ['Insert', 'Update', 'Upsert', 'Context', 'Remove']
def __init__(self, itemoid, value, specify_value=None, transaction_type=None, lock=None, freeze=None, verify=None):
super(self.__class__, self).__init__(transaction_type)
self.itemoid = itemoid
self.value = value
self.specify_value = specify_value
self.lock = lock
self.freeze = freeze
self.verify = verify
self.audit_record = None
self.queries = []
self.measurement_unit_ref = None
def build(self, builder):
"""Build XML by appending to builder
<ItemData ItemOID="MH_DT" Value="06 Jan 2009" TransactionType="Insert">
"""
params = dict(ItemOID=self.itemoid)
if self.transaction_type is not None:
params["TransactionType"] = self.transaction_type
if self.value in [None, '']:
params['IsNull'] = 'Yes'
else:
params['Value'] = str(self.value)
if self.specify_value is not None:
params['mdsol:SpecifyValue'] = self.specify_value
if self.lock is not None:
params['mdsol:Lock'] = bool_to_yes_no(self.lock)
if self.freeze is not None:
params['mdsol:Freeze'] = bool_to_yes_no(self.freeze)
if self.verify is not None:
params['mdsol:Verify'] = bool_to_yes_no(self.verify)
builder.start("ItemData", params)
if self.audit_record is not None:
self.audit_record.build(builder)
# Measurement unit ref must be after audit record or RWS complains
if self.measurement_unit_ref is not None:
self.measurement_unit_ref.build(builder)
for query in self.queries:
query.build(builder)
builder.end("ItemData")
def __lshift__(self, other):
if not isinstance(other, (MeasurementUnitRef, AuditRecord, MdsolQuery,)):
raise ValueError("ItemData object can only receive MeasurementUnitRef, AuditRecord or MdsolQuery objects")
self.set_single_attribute(other, MeasurementUnitRef, 'measurement_unit_ref')
self.set_single_attribute(other, AuditRecord, 'audit_record')
self.set_list_attribute(other, MdsolQuery, 'queries')
return other
class ItemGroupData(TransactionalElement):
"""Models the ODM ItemGroupData object.
Note no name for the ItemGroupData element is required. This is built automatically by the form.
"""
ALLOWED_TRANSACTION_TYPES = ['Insert', 'Update', 'Upsert', 'Context']
def __init__(self, transaction_type=None, item_group_repeat_key=None, whole_item_group=False):
super(self.__class__, self).__init__(transaction_type)
self.item_group_repeat_key = item_group_repeat_key
self.whole_item_group = whole_item_group
self.items = {}
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, ItemData):
raise ValueError("ItemGroupData object can only receive ItemData object")
if other.itemoid in self.items:
raise ValueError("ItemGroupData object with that itemoid is already in the ItemGroupData object")
self.items[other.itemoid] = other
return other
def build(self, builder, formname):
"""Build XML by appending to builder
"""
params = dict(ItemGroupOID=formname)
if self.transaction_type is not None:
params["TransactionType"] = self.transaction_type
if self.item_group_repeat_key is not None:
params["ItemGroupRepeatKey"] = str(
self.item_group_repeat_key) # may be @context for transaction type upsert or context
params["mdsol:Submission"] = "WholeItemGroup" if self.whole_item_group else "SpecifiedItemsOnly"
builder.start("ItemGroupData", params)
# Ask children
for item in self.items.values():
item.build(builder)
builder.end("ItemGroupData")
class FormData(TransactionalElement):
"""Models the ODM FormData object"""
ALLOWED_TRANSACTION_TYPES = ['Insert', 'Update']
def __init__(self, formoid, transaction_type=None, form_repeat_key=None):
super(self.__class__, self).__init__(transaction_type)
self.formoid = formoid
self.form_repeat_key = form_repeat_key
self.itemgroups = []
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, ItemGroupData):
raise ValueError("FormData object can only receive ItemGroupData object")
self.set_list_attribute(other, ItemGroupData, 'itemgroups')
return other
def build(self, builder):
"""Build XML by appending to builder
<FormData FormOID="MH" TransactionType="Update">
"""
params = dict(FormOID=self.formoid)
if self.transaction_type is not None:
params["TransactionType"] = self.transaction_type
if self.form_repeat_key is not None:
params["FormRepeatKey"] = str(self.form_repeat_key)
builder.start("FormData", params)
# Ask children
for itemgroup in self.itemgroups:
itemgroup.build(builder, self.formoid)
builder.end("FormData")
class StudyEventData(TransactionalElement):
"""Models the ODM StudyEventData object"""
ALLOWED_TRANSACTION_TYPES = ['Insert', 'Update', 'Remove', 'Context']
def __init__(self, study_event_oid, transaction_type="Update", study_event_repeat_key=None):
super(self.__class__, self).__init__(transaction_type)
self.study_event_oid = study_event_oid
self.study_event_repeat_key = study_event_repeat_key
self.forms = []
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, FormData):
raise ValueError("StudyEventData object can only receive FormData object")
self.set_list_attribute(other, FormData, 'forms')
return other
def build(self, builder):
"""Build XML by appending to builder
<StudyEventData StudyEventOID="SCREENING" StudyEventRepeatKey="1" TransactionType="Update">
"""
params = dict(StudyEventOID=self.study_event_oid)
if self.transaction_type is not None:
params["TransactionType"] = self.transaction_type
if self.study_event_repeat_key is not None:
params["StudyEventRepeatKey"] = self.study_event_repeat_key
builder.start("StudyEventData", params)
# Ask children
for form in self.forms:
form.build(builder)
builder.end("StudyEventData")
class SubjectData(TransactionalElement):
"""Models the ODM SubjectData and ODM SiteRef objects"""
ALLOWED_TRANSACTION_TYPES = ['Insert', 'Update', 'Upsert']
def __init__(self, sitelocationoid, subject_key, subject_key_type="SubjectName", transaction_type="Update"):
super(self.__class__, self).__init__(transaction_type)
self.sitelocationoid = sitelocationoid
self.subject_key = subject_key
self.subject_key_type = subject_key_type
self.study_events = [] # Can have collection
self.audit_record = None
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (StudyEventData, AuditRecord,)):
raise ValueError("SubjectData object can only receive StudyEventData or AuditRecord object")
self.set_list_attribute(other, StudyEventData, 'study_events')
self.set_single_attribute(other, AuditRecord, 'audit_record')
return other
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(SubjectKey=self.subject_key)
params['mdsol:SubjectKeyType'] = self.subject_key_type
if self.transaction_type is not None:
params["TransactionType"] = self.transaction_type
builder.start("SubjectData", params)
# Ask children
if self.audit_record is not None:
self.audit_record.build(builder)
builder.start("SiteRef", {'LocationOID': self.sitelocationoid})
builder.end("SiteRef")
for event in self.study_events:
event.build(builder)
builder.end("SubjectData")
class ClinicalData(ODMElement):
"""Models the ODM ClinicalData object"""
def __init__(self, projectname, environment, metadata_version_oid="1"):
self.projectname = projectname
self.environment = environment
self.metadata_version_oid = metadata_version_oid
self.subject_data = None
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, SubjectData):
raise ValueError("ClinicalData object can only receive SubjectData object")
self.set_single_attribute(other, SubjectData, 'subject_data')
return other
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(MetaDataVersionOID=self.metadata_version_oid,
StudyOID="%s (%s)" % (self.projectname, self.environment,),
)
builder.start("ClinicalData", params)
# Ask children
if self.subject_data is not None:
self.subject_data.build(builder)
builder.end("ClinicalData")
class ODM(ODMElement):
"""Models the ODM object"""
FILETYPE_TRANSACTIONAL = 'Transactional'
FILETYPE_SNAPSHOT = 'Snapshot'
def __init__(self, originator, description="", creationdatetime=now_to_iso8601(), fileoid=None, filetype=None):
self.originator = originator # Required
self.description = description
self.creationdatetime = creationdatetime
# filetype will always be "Transactional"
# ODM version will always be 1.3
# Granularity="SingleSubject"
# AsOfDateTime always OMITTED (it's optional)
self.clinical_data = None
self.study = None
self.filetype = ODM.FILETYPE_TRANSACTIONAL if filetype is None else ODM.FILETYPE_SNAPSHOT
# Create unique fileoid if none given
self.fileoid = str(uuid.uuid4()) if fileoid is None else fileoid
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (ClinicalData, Study,)):
raise ValueError("ODM object can only receive ClinicalData or Study object")
self.set_single_attribute(other, ClinicalData, 'clinical_data')
self.set_single_attribute(other, Study, 'study')
return other
def getroot(self):
"""Build XML object, return the root"""
builder = ET.TreeBuilder()
params = dict(ODMVersion="1.3",
FileType=self.filetype,
CreationDateTime=self.creationdatetime,
Originator=self.originator,
FileOID=self.fileoid,
xmlns="http://www.cdisc.org/ns/odm/v1.3",
)
params['xmlns:mdsol'] = "http://www.mdsol.com/ns/odm/metadata"
if self.description:
params['Description'] = self.description
builder.start("ODM", params)
# Ask the children
if self.study is not None:
self.study.build(builder)
if self.clinical_data is not None:
self.clinical_data.build(builder)
builder.end("ODM")
return builder.close()
def __str__(self):
doc = self.getroot()
indent(doc)
header = '<?xml version="1.0" encoding="utf-8" ?>\n'
return header + ET.tostring(doc, encoding='utf-8').decode('utf-8')
# -----------------------------------------------------------------------------------------------------------------------
# Metadata Objects
class GlobalVariables(ODMElement):
"""GlobalVariables Metadata element"""
def __init__(self, protocol_name, name=None, description=''):
"""Name and description are not important. protocol_name maps to the Rave project name"""
self.protocol_name = protocol_name
self.name = name if name is not None else protocol_name
self.description = description
def build(self, builder):
"""Build XML by appending to builder"""
builder.start("GlobalVariables", {})
make_element(builder, 'StudyName', self.name)
make_element(builder, 'StudyDescription', self.description)
make_element(builder, 'ProtocolName', self.protocol_name)
builder.end("GlobalVariables")
class TranslatedText(ODMElement):
"""Represents a language and a translated text for that language"""
def __init__(self, text, lang=None):
self.text = text
self.lang = lang
def build(self, builder):
"""Build XML by appending to builder"""
params = {}
if self.lang is not None:
params['xml:lang'] = self.lang
builder.start("TranslatedText", params)
builder.data(self.text)
builder.end("TranslatedText")
class Symbol(ODMElement):
def __init__(self):
self.translations = []
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, TranslatedText):
raise ValueError("Symbol can only accept TranslatedText objects as children")
self.set_list_attribute(other, TranslatedText, 'translations')
return other
def build(self, builder):
"""Build XML by appending to builder"""
builder.start("Symbol", {})
for child in self.translations:
child.build(builder)
builder.end("Symbol")
class MeasurementUnit(ODMElement):
"""A measurement unit"""
def __init__(self,
oid,
name,
unit_dictionary_name=None,
constant_a=1,
constant_b=1,
constant_c=0,
constant_k=0,
standard_unit=False):
self.symbols = []
self.oid = oid
self.name = name
self.unit_dictionary_name = unit_dictionary_name
self.constant_a = constant_a
self.constant_b = constant_b
self.constant_c = constant_c
self.constant_k = constant_k
self.standard_unit = standard_unit
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(OID=self.oid,
Name=self.name)
if self.unit_dictionary_name:
params['mdsol:UnitDictionaryName'] = self.unit_dictionary_name
for suffix in ['A', 'B', 'C', 'K']:
val = getattr(self, 'constant_{0}'.format(suffix.lower()))
params['mdsol:Constant{0}'.format(suffix)] = str(val)
if self.standard_unit:
params['mdsol:StandardUnit'] = 'Yes'
builder.start("MeasurementUnit", params)
for child in self.symbols:
child.build(builder)
builder.end("MeasurementUnit")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, Symbol):
raise ValueError("MeasurementUnits object can only receive Symbol object")
self.set_list_attribute(other, Symbol, 'symbols')
return other
class BasicDefinitions(ODMElement):
"""Container for Measurement units"""
def __init__(self):
self.measurement_units = []
def build(self, builder):
"""Build XML by appending to builder"""
builder.start("BasicDefinitions", {})
for child in self.measurement_units:
child.build(builder)
builder.end("BasicDefinitions")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, MeasurementUnit):
raise ValueError("BasicDefinitions object can only receive MeasurementUnit object")
self.measurement_units.append(other)
return other
class StudyEventRef(ODMElement):
def __init__(self, oid, order_number, mandatory):
self.oid = oid
self.order_number = order_number
self.mandatory = mandatory
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(StudyEventOID=self.oid,
OrderNumber=str(self.order_number),
Mandatory=bool_to_yes_no(self.mandatory))
builder.start("StudyEventRef", params)
builder.end("StudyEventRef")
class Protocol(ODMElement):
"""Protocol child of MetaDataVersion, holder of StudyEventRefs"""
def __init__(self):
self.study_event_refs = []
def build(self, builder):
"""Build XML by appending to builder"""
builder.start("Protocol", {})
for child in self.study_event_refs:
child.build(builder)
builder.end("Protocol")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (StudyEventRef,)):
raise ValueError('Protocol cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, StudyEventRef, 'study_event_refs')
return other
class FormRef(ODMElement):
def __init__(self, oid, order_number, mandatory):
self.oid = oid
self.order_number = order_number
self.mandatory = mandatory
def build(self, builder):
params = dict(FormOID=self.oid,
OrderNumber=str(self.order_number),
Mandatory=bool_to_yes_no(self.mandatory)
)
builder.start('FormRef', params)
builder.end('FormRef')
class StudyEventDef(ODMElement):
# Event types
SCHEDULED = 'Scheduled'
UNSCHEDULED = 'Unscheduled'
COMMON = 'Common'
def __init__(self, oid, name, repeating, event_type,
category=None,
access_days=None,
start_win_days=None,
target_days=None,
end_win_days=None,
overdue_days=None,
close_days=None
):
self.oid = oid
self.name = name
self.repeating = repeating
self.event_type = event_type
self.category = category
self.access_days = access_days
self.start_win_days = start_win_days
self.target_days = target_days
self.end_win_days = end_win_days
self.overdue_days = overdue_days
self.close_days = close_days
self.formrefs = []
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(OID=self.oid, Name=self.name,
Repeating=bool_to_yes_no(self.repeating),
Type=self.event_type)
if self.category is not None:
params['Category'] = self.category
if self.access_days is not None:
params['mdsol:AccessDays'] = str(self.access_days)
if self.start_win_days is not None:
params['mdsol:StartWinDays'] = str(self.start_win_days)
if self.target_days is not None:
params['mdsol:TargetDays'] = str(self.target_days)
if self.end_win_days is not None:
params['mdsol:EndWinDays'] = str(self.end_win_days)
if self.overdue_days is not None:
params['mdsol:OverDueDays'] = str(self.overdue_days)
if self.close_days is not None:
params['mdsol:CloseDays'] = str(self.close_days)
builder.start("StudyEventDef", params)
for formref in self.formrefs:
formref.build(builder)
builder.end("StudyEventDef")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (FormRef,)):
raise ValueError('StudyEventDef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, FormRef, 'formrefs')
return other
class ItemGroupRef(ODMElement):
def __init__(self, oid, order_number, mandatory=True):
self.oid = oid
self.order_number = order_number
self.mandatory = mandatory
def build(self, builder):
params = dict(ItemGroupOID=self.oid,
OrderNumber=str(self.order_number),
Mandatory=bool_to_yes_no(self.mandatory),
)
builder.start("ItemGroupRef", params)
builder.end("ItemGroupRef")
class MdsolHelpText(ODMElement):
"""Help element for FormDefs and ItemDefs"""
def __init__(self, lang, content):
self.lang = lang
self.content = content
def build(self, builder):
builder.start('mdsol:HelpText', {'xml:lang': self.lang})
builder.data(self.content)
builder.end('mdsol:HelpText')
class MdsolViewRestriction(ODMElement):
"""ViewRestriction for FormDefs and ItemDefs"""
def __init__(self, rolename):
self.rolename = rolename
def build(self, builder):
builder.start('mdsol:ViewRestriction', {})
builder.data(self.rolename)
builder.end('mdsol:ViewRestriction')
class MdsolEntryRestriction(ODMElement):
"""EntryRestriction for FormDefs and ItemDefs"""
def __init__(self, rolename):
self.rolename = rolename
def build(self, builder):
builder.start('mdsol:EntryRestriction', {})
builder.data(self.rolename)
builder.end('mdsol:EntryRestriction')
class FormDef(ODMElement):
LOG_PORTRAIT = 'Portrait'
LOG_LANDSCAPE = 'Landscape'
DDE_MUSTNOT = 'MustNotDDE'
DDE_MAY = 'MayDDE'
DDE_MUST = 'MustDDE'
NOLINK = 'NoLink'
LINK_NEXT = 'LinkNext'
LINK_CUSTOM = 'LinkCustom'
def __init__(self, oid, name,
repeating=False,
order_number=None,
active=True,
template=False,
signature_required=False,
log_direction=LOG_PORTRAIT,
double_data_entry=DDE_MUSTNOT,
confirmation_style=NOLINK,
link_study_event_oid=None,
link_form_oid=None
):
self.oid = oid
self.name = name
self.order_number = order_number
self.repeating = repeating # Not actually used by Rave.
self.active = active
self.template = template
self.signature_required = signature_required
self.log_direction = log_direction
self.double_data_entry = double_data_entry
self.confirmation_style = confirmation_style
self.link_study_event_oid = link_study_event_oid
self.link_form_oid = link_form_oid
self.itemgroup_refs = []
self.helptexts = [] # Not clear that Rave can accept multiple from docs
self.view_restrictions = []
self.entry_restrictions = []
def build(self, builder):
params = dict(OID=self.oid,
Name=self.name,
Repeating=bool_to_yes_no(self.repeating)
)
if self.order_number is not None:
params['mdsol:OrderNumber'] = str(self.order_number)
if self.active is not None:
params['mdsol:Active'] = bool_to_yes_no(self.active)
params['mdsol:Template'] = bool_to_yes_no(self.template)
params['mdsol:SignatureRequired'] = bool_to_yes_no(self.signature_required)
params['mdsol:LogDirection'] = self.log_direction
params['mdsol:DoubleDataEntry'] = self.double_data_entry
params['mdsol:ConfirmationStyle'] = self.confirmation_style
if self.link_study_event_oid:
params['mdsol:LinkStudyEventOID'] = self.link_study_event_oid
if self.link_form_oid:
params['mdsol:LinkFormOID'] = self.link_form_oid
builder.start("FormDef", params)
for itemgroup_ref in self.itemgroup_refs:
itemgroup_ref.build(builder)
for helptext in self.helptexts:
helptext.build(builder)
for view_restriction in self.view_restrictions:
view_restriction.build(builder)
for entry_restriction in self.entry_restrictions:
entry_restriction.build(builder)
builder.end("FormDef")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (ItemGroupRef, MdsolHelpText, MdsolViewRestriction, MdsolEntryRestriction,)):
raise ValueError('StudyEventDef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, ItemGroupRef, 'itemgroup_refs')
self.set_list_attribute(other, MdsolHelpText, 'helptexts')
self.set_list_attribute(other, MdsolViewRestriction, 'view_restrictions')
self.set_list_attribute(other, MdsolEntryRestriction, 'entry_restrictions')
return other
class MdsolLabelRef(ODMElement):
"""A reference to a label on a form"""
def __init__(self, oid, order_number):
self.oid = oid
self.order_number = order_number
def build(self, builder):
params = dict(LabelOID=self.oid,
OrderNumber=str(self.order_number),
)
builder.start('mdsol:LabelRef', params)
builder.end('mdsol:LabelRef')
class MdsolAttribute(ODMElement):
def __init__(self, namespace, name, value, transaction_type='Insert'):
self.namespace = namespace
self.name = name
self.value = value
self.transaction_type = transaction_type
def build(self, builder):
params = dict(Namespace=self.namespace,
Name=self.name,
Value=self.value,
TransactionType=self.transaction_type,
)
builder.start('mdsol:Attribute', params)
builder.end('mdsol:Attribute')
class ItemRef(ODMElement):
def __init__(self, oid, order_number, mandatory=False, key_sequence=None,
imputation_method_oid=None, role=None, role_codelist_oid=None):
self.oid = oid
self.order_number = order_number
self.mandatory = mandatory
self.key_sequence = key_sequence
self.imputation_method_oid = imputation_method_oid
self.role = role
self.role_codelist_oid = role_codelist_oid
self.attributes = []
def build(self, builder):
params = dict(ItemOID=self.oid,
OrderNumber=str(self.order_number),
Mandatory=bool_to_yes_no(self.mandatory)
)
if self.key_sequence is not None:
params['KeySequence'] = str(self.key_sequence)
if self.imputation_method_oid is not None:
params['ImputationMethodOID'] = self.imputation_method_oid
if self.role is not None:
params['Role'] = self.role
if self.role_codelist_oid is not None:
params['RoleCodeListOID'] = self.role_codelist_oid
builder.start('ItemRef', params)
for attribute in self.attributes:
attribute.build(builder)
builder.end('ItemRef')
def __lshift__(self, other):
"""ItemRef can accept MdsolAttribute(s)"""
if not isinstance(other, (MdsolAttribute)):
raise ValueError('ItemRef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, MdsolAttribute, 'attributes')
return other
class ItemGroupDef(ODMElement):
def __init__(self, oid, name, repeating=False, is_reference_data=False, sas_dataset_name=None,
domain=None, origin=None, role=None, purpose=None, comment=None):
self.oid = oid
self.name = name
self.repeating = repeating
self.is_reference_data = is_reference_data
self.sas_dataset_name = sas_dataset_name
self.domain = domain
self.origin = origin
self.role = role
self.purpose = purpose
self.comment = comment
self.item_refs = []
self.label_refs = []
def build(self, builder):
params = dict(OID=self.oid,
Name=self.name,
Repeating=bool_to_yes_no(self.repeating),
IsReferenceData=bool_to_yes_no(self.is_reference_data)
)
if self.sas_dataset_name is not None:
params['SASDatasetName'] = self.sas_dataset_name
if self.domain is not None:
params['Domain'] = self.domain
if self.origin is not None:
params['Origin'] = self.origin
if self.role is not None:
params['Role'] = self.role
if self.purpose is not None:
params['Purpose'] = self.purpose
if self.comment is not None:
params['Comment'] = self.comment
builder.start('ItemGroupDef', params)
for itemref in self.item_refs:
itemref.build(builder)
# Extensions always listed AFTER core elements
for labelref in self.label_refs:
labelref.build(builder)
builder.end('ItemGroupDef')
def __lshift__(self, other):
"""ItemGroupDef can accept ItemRef and LabelRef"""
if not isinstance(other, (ItemRef, MdsolLabelRef)):
raise ValueError('ItemGroupDef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, ItemRef, 'item_refs')
self.set_list_attribute(other, MdsolLabelRef, 'label_refs')
return other
class Question(ODMElement):
def __init__(self):
self.translations = []
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (TranslatedText)):
raise ValueError('Question cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, TranslatedText, 'translations')
return other
def build(self, builder):
"""Questions can contain translations"""
builder.start('Question', {})
for translation in self.translations:
translation.build(builder)
builder.end('Question')
class MeasurementUnitRef(ODMElement):
def __init__(self, oid, order_number=None):
self.oid = oid
self.order_number = order_number
def build(self, builder):
params = dict(MeasurementUnitOID=self.oid)
if self.order_number is not None:
params['mdsol:OrderNumber'] = str(self.order_number)
builder.start('MeasurementUnitRef', params)
builder.end('MeasurementUnitRef')
class MdsolHeaderText(ODMElement):
"""Header text for ItemDef when showed in grid"""
def __init__(self, content, lang=None):
self.content = content
self.lang = lang
def build(self, builder):
params = {}
if self.lang is not None:
params['xml:lang'] = self.lang
builder.start('mdsol:HeaderText', params)
builder.data(self.content)
builder.end('mdsol:HeaderText')
class CodeListRef(ODMElement):
"""CodeListRef: a reference a codelist within an ItemDef"""
def __init__(self, oid):
self.oid = oid
def build(self, builder):
builder.start('CodeListRef', {'CodeListOID': self.oid})
builder.end('CodeListRef')
class MdsolLabelDef(ODMElement):
"""Label definition"""
def __init__(self, oid, name, field_number=None):
self.oid = oid
self.name = name
self.field_number = field_number
self.help_texts = []
self.translations = []
self.view_restrictions = []
def build(self, builder):
params = dict(OID=self.oid, Name=self.name)
if self.field_number is not None:
params['FieldNumber'] = str(self.field_number)
builder.start("mdsol:LabelDef", params)
for translation in self.translations:
translation.build(builder)
for view_restriction in self.view_restrictions:
view_restriction.build(builder)
builder.end("mdsol:LabelDef")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (MdsolViewRestriction, TranslatedText)):
raise ValueError('MdsolLabelDef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, TranslatedText, 'translations')
self.set_list_attribute(other, MdsolViewRestriction, 'view_restrictions')
return other
class MdsolReviewGroup(ODMElement):
"""Maps to Rave review groups for an Item"""
def __init__(self, name):
self.name = name
def build(self, builder):
builder.start('mdsol:ReviewGroup', {})
builder.data(self.name)
builder.end('mdsol:ReviewGroup')
class CheckValue(ODMElement):
"""A value in a RangeCheck"""
def __init__(self, value):
self.value = value
def build(self, builder):
builder.start('CheckValue', {})
builder.data(str(self.value))
builder.end('CheckValue')
class RangeCheck(ODMElement):
"""
Rangecheck in Rave relates to QueryHigh QueryLow and NonConformandHigh and NonComformanLow
for other types of RangeCheck, need to use an EditCheck (part of Rave's extensions to ODM)
"""
def __init__(self, comparator, soft_hard):
self._comparator = None
self.comparator = comparator
self._soft_hard = None
self.soft_hard = soft_hard
self.check_value = None
self.measurement_unit_ref = None
@property
def comparator(self):
return self._comparator
@comparator.setter
def comparator(self, value):
if not isinstance(value, RangeCheckComparatorType):
raise AttributeError("%s comparator is invalid in RangeCheck." % (value,))
self._comparator = value
@property
def soft_hard(self):
return self._soft_hard
@soft_hard.setter
def soft_hard(self, value):
if not isinstance(value, RangeCheckType):
raise AttributeError("%s soft_hard invalid in RangeCheck." % (value,))
self._soft_hard = value
def build(self, builder):
params = dict(SoftHard=self.soft_hard.value, Comparator=self.comparator.value)
builder.start("RangeCheck", params)
if self.check_value is not None:
self.check_value.build(builder)
if self.measurement_unit_ref is not None:
self.measurement_unit_ref.build(builder)
builder.end("RangeCheck")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (CheckValue, MeasurementUnitRef,)):
raise ValueError('RangeCheck cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_single_attribute(other, CheckValue, 'check_value')
self.set_single_attribute(other, MeasurementUnitRef, 'measurement_unit_ref')
class ItemDef(ODMElement):
VALID_DATATYPES = [DataType.Text, DataType.Integer, DataType.Float, DataType.Date,
DataType.DateTime, DataType.Time]
def __init__(self, oid, name, datatype, length,
significant_digits=None,
sas_field_name=None,
sds_var_name=None,
origin=None, # Not mapped in Rave
comment=None,
active=True,
control_type=None,
acceptable_file_extensions=None,
indent_level=0,
source_document_verify=False,
default_value=None,
sas_format=None,
sas_label=None,
query_future_date=False,
visible=True,
translation_required=False,
query_non_conformance=False,
other_visits=False,
can_set_item_group_date=False,
can_set_form_date=False,
can_set_study_event_date=False,
can_set_subject_date=False,
visual_verify=False,
does_not_break_signature=False,
date_time_format=None,
field_number=None,
variable_oid=None
):
self.oid = oid
self.name = name
if datatype not in ItemDef.VALID_DATATYPES:
raise AttributeError('{0} is not a valid datatype!'.format(datatype))
if control_type is not None:
if not isinstance(control_type, ControlType):
raise AttributeError("{0} is not a valid Control Type".format(control_type))
self.datatype = datatype
self.length = length
self.significant_digits = significant_digits
self.sas_field_name = sas_field_name
self.sds_var_name = sds_var_name
self.origin = origin
self.comment = comment
self.active = active
self.control_type = control_type
self.acceptable_file_extensions = acceptable_file_extensions
self.indent_level = indent_level
self.source_document_verify = source_document_verify
self.default_value = default_value
self.sas_format = sas_format
self.sas_label = sas_label
self.query_future_date = query_future_date
self.visible = visible
self.translation_required = translation_required
self.query_non_conformance = query_non_conformance
self.other_visits = other_visits
self.can_set_item_group_date = can_set_item_group_date
self.can_set_form_date = can_set_form_date
self.can_set_study_event_date = can_set_study_event_date
self.can_set_subject_date = can_set_subject_date
self.visual_verify = visual_verify
self.does_not_break_signature = does_not_break_signature
self.date_time_format = date_time_format
self.field_number = field_number
self.variable_oid = variable_oid
self.question = None
self.codelistref = None
self.measurement_unit_refs = []
self.help_texts = []
self.view_restrictions = []
self.entry_restrictions = []
self.header_text = None
self.review_groups = []
self.range_checks = []
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(OID=self.oid,
Name=self.name,
DataType=self.datatype.value,
Length=str(self.length),
)
if self.date_time_format is not None:
params['mdsol:DateTimeFormat'] = self.date_time_format
params['mdsol:Active'] = bool_to_yes_no(self.active)
if self.significant_digits is not None:
params['SignificantDigits'] = str(self.significant_digits)
if self.sas_field_name is not None:
params['SASFieldName'] = self.sas_field_name
if self.sds_var_name is not None:
params['SDSVarName'] = self.sds_var_name
if self.origin is not None:
params['Origin'] = self.origin
if self.comment is not None:
params['Comment'] = self.comment
if self.control_type is not None:
params['mdsol:ControlType'] = self.control_type.value
if self.acceptable_file_extensions is not None:
params['mdsol:AcceptableFileExtensions'] = self.acceptable_file_extensions
if self.default_value is not None:
params['mdsol:DefaultValue'] = str(self.default_value)
params['mdsol:SourceDocument'] = bool_to_yes_no(self.source_document_verify)
params['mdsol:IndentLevel'] = str(self.indent_level)
if self.sas_format is not None:
params['mdsol:SASFormat'] = self.sas_format
if self.sas_label is not None:
params['mdsol:SASLabel'] = self.sas_label
params['mdsol:QueryFutureDate'] = bool_to_yes_no(self.query_future_date)
params['mdsol:Visible'] = bool_to_yes_no(self.visible)
params['mdsol:TranslationRequired'] = bool_to_yes_no(self.translation_required)
params['mdsol:QueryNonConformance'] = bool_to_yes_no(self.query_non_conformance)
params['mdsol:OtherVisits'] = bool_to_yes_no(self.other_visits)
params['mdsol:CanSetItemGroupDate'] = bool_to_yes_no(self.can_set_item_group_date)
params['mdsol:CanSetFormDate'] = bool_to_yes_no(self.can_set_form_date)
params['mdsol:CanSetStudyEventDate'] = bool_to_yes_no(self.can_set_study_event_date)
params['mdsol:CanSetSubjectDate'] = bool_to_yes_no(self.can_set_subject_date)
params['mdsol:VisualVerify'] = bool_to_yes_no(self.visual_verify)
params['mdsol:DoesNotBreakSignature'] = bool_to_yes_no(self.does_not_break_signature)
if self.field_number is not None:
params['mdsol:FieldNumber'] = self.field_number
if self.variable_oid is not None:
params['mdsol:VariableOID'] = self.variable_oid
builder.start("ItemDef", params)
if self.question is not None:
self.question.build(builder)
if self.codelistref is not None:
self.codelistref.build(builder)
for mur in self.measurement_unit_refs:
mur.build(builder)
for range_check in self.range_checks:
range_check.build(builder)
if self.header_text is not None:
self.header_text.build(builder)
for view_restriction in self.view_restrictions:
view_restriction.build(builder)
for entry_restriction in self.entry_restrictions:
entry_restriction.build(builder)
for help_text in self.help_texts:
help_text.build(builder)
for review_group in self.review_groups:
review_group.build(builder)
builder.end("ItemDef")
def __lshift__(self, other):
"""Override << operator"""
# ExternalQuestion?,,
# Role*, Alias*,
# mdsol:HelpText?, mdsol:ViewRestriction* or mdsolEntryRestrictions*), (or mdsol:ReviewGroups*), mdsol:Label?)
if not isinstance(other, (MdsolHelpText, MdsolEntryRestriction, MdsolViewRestriction, Question,
MeasurementUnitRef, CodeListRef, MdsolHeaderText, MdsolReviewGroup, RangeCheck)):
raise ValueError('ItemDef cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_single_attribute(other, Question, 'question')
self.set_single_attribute(other, CodeListRef, 'codelistref')
self.set_single_attribute(other, MdsolHeaderText, 'header_text')
self.set_list_attribute(other, RangeCheck, 'range_checks')
self.set_list_attribute(other, MeasurementUnitRef, 'measurement_unit_refs')
self.set_list_attribute(other, MdsolHelpText, 'help_texts')
self.set_list_attribute(other, MdsolViewRestriction, 'view_restrictions')
self.set_list_attribute(other, MdsolEntryRestriction, 'entry_restrictions')
self.set_list_attribute(other, MdsolReviewGroup, 'review_groups')
return other
class Decode(ODMElement):
def __init__(self):
self.translations = []
def build(self, builder):
builder.start("Decode", {})
for translation in self.translations:
translation.build(builder)
builder.end("Decode")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, TranslatedText):
raise ValueError('Decode cannot accept child of type {0}'.format(other.__class__.__name__))
self.translations.append(other)
return other
class CodeListItem(ODMElement):
def __init__(self, coded_value, order_number=None, specify=False):
self.coded_value = coded_value
self.order_number = order_number
self.specify = specify
self.decode = None
def build(self, builder):
params = dict(CodedValue=self.coded_value)
if self.order_number is not None:
params['mdsol:OrderNumber'] = str(self.order_number)
if self.specify:
params['mdsol:Specify'] = "Yes"
builder.start("CodeListItem", params)
if self.decode is not None:
self.decode.build(builder)
builder.end("CodeListItem")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, Decode):
raise ValueError('CodelistItem cannot accept child of type {0}'.format(other.__class__.__name__))
self.set_single_attribute(other, Decode, 'decode')
return other
class CodeList(ODMElement):
"""A container for CodeListItems equivalent of Rave Dictionary"""
VALID_DATATYPES = [DataType.Integer, DataType.Text, DataType.Float, DataType.String]
def __init__(self, oid, name, datatype, sas_format_name=None):
self.oid = oid
self.name = name
if datatype not in CodeList.VALID_DATATYPES:
raise ValueError("{0} is not a valid CodeList datatype".format(datatype))
self.datatype = datatype
self.sas_format_name = sas_format_name
self.codelist_items = []
def build(self, builder):
params = dict(OID=self.oid,
Name=self.name,
DataType=self.datatype.value)
if self.sas_format_name is not None:
params['SASFormatName'] = self.sas_format_name
builder.start("CodeList", params)
for item in self.codelist_items:
item.build(builder)
builder.end("CodeList")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, CodeListItem):
raise ValueError('Codelist cannot accept child of type {0}'.format(other.__class__.__name__))
self.set_list_attribute(other, CodeListItem, 'codelist_items')
return other
class MdsolConfirmationMessage(ODMElement):
"""Form is saved confirmation message"""
def __init__(self, message, lang=None):
self.message = message
self.lang = lang
def build(self, builder):
params = {}
if self.lang:
params['xml:lang'] = self.lang
builder.start('mdsol:ConfirmationMessage', params)
builder.data(self.message)
builder.end('mdsol:ConfirmationMessage')
class MdsolDerivationStep(ODMElement):
"""A derivation step modeled after the Architect Loader definition.
Do not use directly, use appropriate subclasses.
"""
VALID_STEPS = VALID_DERIVATION_STEPS
def __init__(self,
variable_oid=None,
data_format=None,
form_oid=None,
folder_oid=None,
field_oid=None,
value=None,
function=None,
custom_function=None,
record_position=None,
form_repeat_number=None,
folder_repeat_number=None,
logical_record_position=None
):
self.variable_oid = variable_oid
self.data_format = data_format
self.form_oid = form_oid
self.folder_oid = folder_oid
self.field_oid = field_oid
self.value = value
self._function = None
self.function = function
self.custom_function = custom_function
self.record_position = record_position
self.form_repeat_number = form_repeat_number
self.folder_repeat_number = folder_repeat_number
self.logical_record_position = logical_record_position
@property
def function(self):
return self._function
@function.setter
def function(self, value):
if value is not None:
if value not in MdsolDerivationStep.VALID_STEPS:
raise AttributeError("Invalid derivation function %s" % value)
self._function = value
def build(self, builder):
params = dict()
if self.variable_oid is not None:
params['VariableOID'] = self.variable_oid
if self.data_format is not None:
params['DataFormat'] = self.data_format
if self.folder_oid is not None:
params['FolderOID'] = self.folder_oid
if self.field_oid is not None:
params['FieldOID'] = self.field_oid
if self.form_oid is not None:
params['FormOID'] = self.form_oid
if self.value is not None:
params['Value'] = self.value
if self.function is not None:
params['Function'] = self.function.value
if self.custom_function is not None:
params['CustomFunction'] = self.custom_function
if self.record_position is not None:
params['RecordPosition'] = str(self.record_position)
if self.form_repeat_number is not None:
params['FormRepeatNumber'] = str(self.form_repeat_number)
if self.folder_repeat_number is not None:
params['FolderRepeatNumber'] = str(self.folder_repeat_number)
if self.logical_record_position is not None:
params['LogicalRecordPosition'] = self.logical_record_position
builder.start("mdsol:DerivationStep", params)
builder.end("mdsol:DerivationStep")
class MdsolCheckStep(ODMElement):
"""A check step modeled after the Architect Loader definition.
Do not use directly, use appropriate subclasses.
"""
VALID_STEPS = ALL_STEPS
def __init__(self,
variable_oid=None,
data_format=None,
form_oid=None,
folder_oid=None,
field_oid=None,
static_value=None,
function=None,
custom_function=None,
record_position=None,
form_repeat_number=None,
folder_repeat_number=None,
logical_record_position=None
):
self.variable_oid = variable_oid
self.data_format = data_format
self.form_oid = form_oid
self.folder_oid = folder_oid
self.field_oid = field_oid
self.static_value = static_value
self._function = None
self.function = function
self.custom_function = custom_function
self.record_position = record_position
self.form_repeat_number = form_repeat_number
self.folder_repeat_number = folder_repeat_number
self.logical_record_position = logical_record_position
@property
def function(self):
return self._function
@function.setter
def function(self, value):
if value is not None:
if value not in MdsolCheckStep.VALID_STEPS:
raise AttributeError("Invalid function %s" % value)
self._function = value
def build(self, builder):
params = dict()
if self.variable_oid is not None:
params['VariableOID'] = self.variable_oid
if self.data_format is not None:
params['DataFormat'] = self.data_format
if self.folder_oid is not None:
params['FolderOID'] = self.folder_oid
if self.field_oid is not None:
params['FieldOID'] = self.field_oid
if self.form_oid is not None:
params['FormOID'] = self.form_oid
if self.static_value is not None:
params['StaticValue'] = self.static_value
if self.function is not None:
params['Function'] = self.function.value
if self.custom_function is not None:
params['CustomFunction'] = self.custom_function
if self.record_position is not None:
params['RecordPosition'] = str(self.record_position)
if self.form_repeat_number is not None:
params['FormRepeatNumber'] = str(self.form_repeat_number)
if self.folder_repeat_number is not None:
params['FolderRepeatNumber'] = str(self.folder_repeat_number)
if self.logical_record_position is not None:
params['LogicalRecordPosition'] = self.logical_record_position
builder.start("mdsol:CheckStep", params)
builder.end("mdsol:CheckStep")
class MdsolCheckAction(ODMElement):
"""
Check Action modeled after check action in Architect Loader spreadsheet.
Do not use directly, use appropriate sub-class.
"""
def __init__(self,
variable_oid=None,
field_oid=None,
form_oid=None,
folder_oid=None,
record_position=None,
form_repeat_number=None,
folder_repeat_number=None,
check_action_type=None,
check_string=None,
check_options=None,
check_script=None
):
self.variable_oid = variable_oid
self.folder_oid = folder_oid
self.field_oid = field_oid
self.form_oid = form_oid
self.record_position = record_position
self.form_repeat_number = form_repeat_number
self.folder_repeat_number = folder_repeat_number
self._check_action_type = None
self.check_action_type = check_action_type
self.check_string = check_string
self.check_options = check_options
self.check_script = check_script
@property
def check_action_type(self):
return self._check_action_type
@check_action_type.setter
def check_action_type(self, value):
if value is not None:
if not isinstance(value, ActionType):
raise AttributeError("Invalid check action %s" % value)
self._check_action_type = value
def build(self, builder):
params = dict()
if self.variable_oid is not None:
params['VariableOID'] = self.variable_oid
if self.field_oid is not None:
params['FieldOID'] = self.field_oid
if self.form_oid is not None:
params['FormOID'] = self.form_oid
if self.folder_oid is not None:
params['FolderOID'] = self.folder_oid
if self.record_position is not None:
params['RecordPosition'] = str(self.record_position)
if self.form_repeat_number is not None:
params['FormRepeatNumber'] = str(self.form_repeat_number)
if self.folder_repeat_number is not None:
params['FolderRepeatNumber'] = str(self.folder_repeat_number)
if self.check_action_type is not None:
params['Type'] = self.check_action_type.value
if self.check_string is not None:
params['String'] = self.check_string
if self.check_options is not None:
params['Options'] = self.check_options
if self.check_script is not None:
params['Script'] = self.check_script
builder.start("mdsol:CheckAction", params)
builder.end("mdsol:CheckAction")
class MdsolEditCheckDef(ODMElement):
"""Extension for Rave edit checks"""
def __init__(self, oid, active=True, bypass_during_migration=False, needs_retesting=False):
self.oid = oid
self.active = active
self.bypass_during_migration = bypass_during_migration
self.needs_retesting = needs_retesting
self.check_steps = []
self.check_actions = []
def build(self, builder):
params = dict(OID=self.oid,
Active=bool_to_true_false(self.active),
BypassDuringMigration=bool_to_true_false(self.bypass_during_migration),
NeedsRetesting=bool_to_true_false(self.needs_retesting)
)
builder.start('mdsol:EditCheckDef', params)
for step in self.check_steps:
step.build(builder)
for action in self.check_actions:
action.build(builder)
builder.end('mdsol:EditCheckDef')
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (MdsolCheckStep, MdsolCheckAction,)):
raise ValueError('EditCheck cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, MdsolCheckStep, 'check_steps')
self.set_list_attribute(other, MdsolCheckAction, 'check_actions')
class MdsolDerivationDef(ODMElement):
"""Extension for Rave derivations"""
def __init__(self, oid, active=True,
bypass_during_migration=False,
needs_retesting=False,
variable_oid=None,
field_oid=None,
form_oid=None,
folder_oid=None,
record_position=None,
form_repeat_number=None,
folder_repeat_number=None,
logical_record_position=None,
all_variables_in_folders=None,
all_variables_in_fields=None
):
self.oid = oid
self.active = active
self.bypass_during_migration = bypass_during_migration
self.needs_retesting = needs_retesting
self.variable_oid = variable_oid
self.field_oid = field_oid
self.form_oid = form_oid
self.folder_oid = folder_oid
self.record_position = record_position
self.form_repeat_number = form_repeat_number
self.folder_repeat_number = folder_repeat_number
self.logical_record_position = logical_record_position
self.all_variables_in_folders = all_variables_in_folders
self.all_variables_in_fields = all_variables_in_fields
self.derivation_steps = []
def build(self, builder):
params = dict(
OID=self.oid,
Active=bool_to_true_false(self.active),
BypassDuringMigration=bool_to_true_false(self.bypass_during_migration),
NeedsRetesting=bool_to_true_false(self.needs_retesting)
)
if self.variable_oid is not None:
params['VariableOID'] = self.variable_oid
if self.field_oid is not None:
params['FieldOID'] = self.field_oid
if self.form_oid is not None:
params['FormOID'] = self.form_oid
if self.folder_oid is not None:
params['FolderOID'] = self.folder_oid
if self.record_position is not None:
params['RecordPosition'] = str(self.record_position)
if self.form_repeat_number is not None:
params['FormRepeatNumber'] = str(self.form_repeat_number)
if self.folder_repeat_number is not None:
params['FolderRepeatNumber'] = str(self.folder_repeat_number)
if self.all_variables_in_folders is not None:
params['AllVariablesInFolders'] = bool_to_true_false(self.all_variables_in_folders)
if self.all_variables_in_fields is not None:
params['AllVariablesInFields'] = bool_to_true_false(self.all_variables_in_fields)
if self.logical_record_position is not None:
params['LogicalRecordPosition'] = self.logical_record_position
builder.start('mdsol:DerivationDef', params)
for step in self.derivation_steps:
step.build(builder)
builder.end('mdsol:DerivationDef')
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, MdsolDerivationStep):
raise ValueError('Derivation cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_list_attribute(other, MdsolDerivationStep, 'derivation_steps')
class MdsolCustomFunctionDef(ODMElement):
"""Extension for Rave Custom functions"""
VB = "VB" # VB was deprecated in later Rave versions.
C_SHARP = "C#"
SQL = "SQ"
VALID_LANGUAGES = [C_SHARP, SQL, VB]
def __init__(self, oid, code, language="C#"):
self.oid = oid
self.code = code
self.language = language
def build(self, builder):
params = dict(OID=self.oid, Language=self.language)
builder.start('mdsol:CustomFunctionDef', params)
builder.data(self.code)
builder.end('mdsol:CustomFunctionDef')
class MetaDataVersion(ODMElement):
"""MetaDataVersion, child of study"""
def __init__(self, oid, name,
description=None,
primary_formoid=None,
default_matrix_oid=None,
delete_existing=False,
signature_prompt=None):
self.oid = oid
self.name = name
self.description = description
self.primary_formoid = primary_formoid
self.default_matrix_oid = default_matrix_oid
self.delete_existing = delete_existing
self.signature_prompt = signature_prompt
self.confirmation_message = None
self.protocol = None
self.codelists = []
self.item_defs = []
self.label_defs = []
self.item_group_defs = []
self.form_defs = []
self.study_event_defs = []
self.edit_checks = []
self.derivations = []
self.custom_functions = []
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(OID=self.oid, Name=self.name)
if self.description is not None:
params['Description'] = self.description
if self.signature_prompt is not None:
params['mdsol:SignaturePrompt'] = self.signature_prompt
if self.primary_formoid is not None:
params['mdsol:PrimaryFormOID'] = self.primary_formoid
if self.default_matrix_oid is not None:
params['mdsol:DefaultMatrixOID'] = self.default_matrix_oid
params['mdsol:DeleteExisting'] = bool_to_yes_no(self.delete_existing)
builder.start("MetaDataVersion", params)
if self.protocol:
self.protocol.build(builder)
for event in self.study_event_defs:
event.build(builder)
for formdef in self.form_defs:
formdef.build(builder)
for itemgroupdef in self.item_group_defs:
itemgroupdef.build(builder)
for itemdef in self.item_defs:
itemdef.build(builder)
for codelist in self.codelists:
codelist.build(builder)
# Extensions must always come after core elements
if self.confirmation_message:
self.confirmation_message.build(builder)
for labeldef in self.label_defs:
labeldef.build(builder)
for edit_check in self.edit_checks:
edit_check.build(builder)
for derivation in self.derivations:
derivation.build(builder)
for custom_function in self.custom_functions:
custom_function.build(builder)
builder.end("MetaDataVersion")
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (Protocol, StudyEventDef, FormDef, ItemGroupDef, ItemDef, MdsolLabelDef, CodeList,
MdsolConfirmationMessage, MdsolEditCheckDef, MdsolDerivationDef,
MdsolCustomFunctionDef)):
raise ValueError('MetaDataVersion cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_single_attribute(other, Protocol, 'protocol')
self.set_single_attribute(other, MdsolConfirmationMessage, 'confirmation_message')
self.set_list_attribute(other, StudyEventDef, 'study_event_defs')
self.set_list_attribute(other, FormDef, 'form_defs')
self.set_list_attribute(other, ItemGroupDef, 'item_group_defs')
self.set_list_attribute(other, MdsolLabelDef, 'label_defs')
self.set_list_attribute(other, ItemDef, 'item_defs')
self.set_list_attribute(other, CodeList, 'codelists')
self.set_list_attribute(other, MdsolEditCheckDef, 'edit_checks')
self.set_list_attribute(other, MdsolDerivationDef, 'derivations')
self.set_list_attribute(other, MdsolCustomFunctionDef, 'custom_functions') # NB. Current schema limits to 1
return other
class Study(ODMElement):
"""ODM Study Metadata element"""
PROJECT = 'Project'
GLOBAL_LIBRARY = 'GlobalLibrary Volume'
PROJECT_TYPES = [PROJECT, GLOBAL_LIBRARY]
def __init__(self, oid, project_type=None):
self.oid = oid
self.global_variables = None
self.basic_definitions = None
self.metadata_version = None
self.studyevent_defs = []
if project_type is None:
self.project_type = "Project"
else:
if project_type in Study.PROJECT_TYPES:
self.project_type = project_type
else:
raise ValueError('Project type "{0}" not valid. Expected one of {1}'.format(project_type,
','.join(
Study.PROJECT_TYPES)))
def __lshift__(self, other):
"""Override << operator"""
if not isinstance(other, (GlobalVariables, BasicDefinitions, MetaDataVersion)):
raise ValueError('Study cannot accept a {0} as a child element'.format(other.__class__.__name__))
self.set_single_attribute(other, GlobalVariables, 'global_variables')
self.set_single_attribute(other, BasicDefinitions, 'basic_definitions')
self.set_single_attribute(other, MetaDataVersion, 'metadata_version')
return other
def build(self, builder):
"""Build XML by appending to builder"""
params = dict(OID=self.oid)
params['mdsol:ProjectType'] = self.project_type
builder.start("Study", params)
# Ask children
if self.global_variables is not None:
self.global_variables.build(builder)
if self.basic_definitions is not None:
self.basic_definitions.build(builder)
if self.metadata_version is not None:
self.metadata_version.build(builder)
builder.end("Study")
| mit | -2,991,630,257,282,767,400 | 33.787551 | 121 | 0.603444 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.