repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
juhnowski/FishingRod | refs/heads/master | production/pygsl-0.9.5/pygsl/testing/complex.py | 2 | import _ufuncs
_token = "complex_"
_tokl = len(_token)
for _name in dir(_ufuncs):
if _name[:_tokl] == _token:
_shortname = _name[_tokl:]
_cmd = "%s = _ufuncs.%s" % (_shortname, _name)
#printy cmd
exec(_cmd)
del _token
del _tokl
del _shortname
del _name
del _cmd
|
ericholscher/django | refs/heads/master | django/contrib/staticfiles/utils.py | 322 | import os
import fnmatch
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def matches_patterns(path, patterns=None):
"""
Return True or False depending on whether the ``path`` should be
ignored (if it matches any pattern in ``ignore_patterns``).
"""
if patterns is None:
patterns = []
for pattern in patterns:
if fnmatch.fnmatchcase(path, pattern):
return True
return False
def get_files(storage, ignore_patterns=None, location=''):
"""
Recursively walk the storage directories yielding the paths
of all files that should be copied.
"""
if ignore_patterns is None:
ignore_patterns = []
directories, files = storage.listdir(location)
for fn in files:
if matches_patterns(fn, ignore_patterns):
continue
if location:
fn = os.path.join(location, fn)
yield fn
for dir in directories:
if matches_patterns(dir, ignore_patterns):
continue
if location:
dir = os.path.join(location, dir)
for fn in get_files(storage, ignore_patterns, dir):
yield fn
def check_settings(base_url=None):
"""
Checks if the staticfiles settings have sane values.
"""
if base_url is None:
base_url = settings.STATIC_URL
if not base_url:
raise ImproperlyConfigured(
"You're using the staticfiles app "
"without having set the required STATIC_URL setting.")
if settings.MEDIA_URL == base_url:
raise ImproperlyConfigured("The MEDIA_URL and STATIC_URL "
"settings must have different values")
if ((settings.MEDIA_ROOT and settings.STATIC_ROOT) and
(settings.MEDIA_ROOT == settings.STATIC_ROOT)):
raise ImproperlyConfigured("The MEDIA_ROOT and STATIC_ROOT "
"settings must have different values")
|
meduz/openRetina | refs/heads/master | openRetina/__main__.py | 3 | print('hello world, retina')
|
tralamazza/micropython | refs/heads/master | tests/basics/python36.py | 21 | # tests for things that only Python 3.6 supports
# underscores in numeric literals
print(100_000)
print(0b1010_0101)
print(0xff_ff)
# underscore supported by int constructor
print(int('1_2_3'))
print(int('0o1_2_3', 8))
|
poojavade/Genomics_Docker | refs/heads/master | Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/Bio/AlignIO/ClustalIO.py | 1 | # Copyright 2006-2013 by Peter Cock. All rights reserved.
#
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Bio.AlignIO support for "clustal" output from CLUSTAL W and other tools.
You are expected to use this module via the Bio.AlignIO functions (or the
Bio.SeqIO functions if you want to work directly with the gapped sequences).
"""
from __future__ import print_function
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.Align import MultipleSeqAlignment
from .Interfaces import AlignmentIterator, SequentialAlignmentWriter
__docformat__ = "restructuredtext en"
class ClustalWriter(SequentialAlignmentWriter):
"""Clustalw alignment writer."""
def write_alignment(self, alignment):
"""Use this to write (another) single alignment to an open file."""
if len(alignment) == 0:
raise ValueError("Must have at least one sequence")
if alignment.get_alignment_length() == 0:
# This doubles as a check for an alignment object
raise ValueError("Non-empty sequences are required")
# Old versions of the parser in Bio.Clustalw used a ._version property,
try:
version = str(alignment._version)
except AttributeError:
version = ""
if not version:
version = '1.81'
if version.startswith("2."):
# e.g. 2.0.x
output = "CLUSTAL %s multiple sequence alignment\n\n\n" % version
else:
# e.g. 1.81 or 1.83
output = "CLUSTAL X (%s) multiple sequence alignment\n\n\n" % version
cur_char = 0
max_length = len(alignment[0])
if max_length <= 0:
raise ValueError("Non-empty sequences are required")
# keep displaying sequences until we reach the end
while cur_char != max_length:
# calculate the number of sequences to show, which will
# be less if we are at the end of the sequence
if (cur_char + 50) > max_length:
show_num = max_length - cur_char
else:
show_num = 50
# go through all of the records and print out the sequences
# when we output, we do a nice 80 column output, although this
# may result in truncation of the ids.
for record in alignment:
# Make sure we don't get any spaces in the record
# identifier when output in the file by replacing
# them with underscores:
line = record.id[0:30].replace(" ", "_").ljust(36)
line += str(record.seq[cur_char:(cur_char + show_num)])
output += line + "\n"
# now we need to print out the star info, if we've got it
# This was stored by Bio.Clustalw using a ._star_info property.
if hasattr(alignment, "_star_info") and alignment._star_info != '':
output += (" " * 36) + \
alignment._star_info[cur_char:(cur_char + show_num)] + "\n"
output += "\n"
cur_char += show_num
# Want a trailing blank new line in case the output is concatenated
self.handle.write(output + "\n")
class ClustalIterator(AlignmentIterator):
"""Clustalw alignment iterator."""
_header = None # for caching lines between __next__ calls
def __next__(self):
handle = self.handle
if self._header is None:
line = handle.readline()
else:
# Header we saved from when we were parsing
# the previous alignment.
line = self._header
self._header = None
if not line:
raise StopIteration
# Whitelisted headers we know about
known_headers = ['CLUSTAL', 'PROBCONS', 'MUSCLE', 'MSAPROBS','Kalign']
if line.strip().split()[0] not in known_headers:
raise ValueError("%s is not a known CLUSTAL header: %s" %
(line.strip().split()[0],
", ".join(known_headers)))
# find the clustal version in the header line
version = None
for word in line.split():
if word[0] == '(' and word[-1] == ')':
word = word[1:-1]
if word[0] in '0123456789':
version = word
break
# There should be two blank lines after the header line
line = handle.readline()
while line.strip() == "":
line = handle.readline()
# If the alignment contains entries with the same sequence
# identifier (not a good idea - but seems possible), then this
# dictionary based parser will merge their sequences. Fix this?
ids = []
seqs = []
consensus = ""
seq_cols = None # Used to extract the consensus
# Use the first block to get the sequence identifiers
while True:
if line[0] != " " and line.strip() != "":
# Sequences identifier...
fields = line.rstrip().split()
# We expect there to be two fields, there can be an optional
# "sequence number" field containing the letter count.
if len(fields) < 2 or len(fields) > 3:
raise ValueError("Could not parse line:\n%s" % line)
ids.append(fields[0])
seqs.append(fields[1])
# Record the sequence position to get the consensus
if seq_cols is None:
start = len(fields[0]) + line[len(fields[0]):].find(fields[1])
end = start + len(fields[1])
seq_cols = slice(start, end)
del start, end
assert fields[1] == line[seq_cols]
if len(fields) == 3:
# This MAY be an old style file with a letter count...
try:
letters = int(fields[2])
except ValueError:
raise ValueError("Could not parse line, bad sequence number:\n%s" % line)
if len(fields[1].replace("-", "")) != letters:
raise ValueError("Could not parse line, invalid sequence number:\n%s" % line)
elif line[0] == " ":
# Sequence consensus line...
assert len(ids) == len(seqs)
assert len(ids) > 0
assert seq_cols is not None
consensus = line[seq_cols]
assert not line[:seq_cols.start].strip()
assert not line[seq_cols.stop:].strip()
# Check for blank line (or end of file)
line = handle.readline()
assert line.strip() == ""
break
else:
# No consensus
break
line = handle.readline()
if not line:
break # end of file
assert line.strip() == ""
assert seq_cols is not None
# Confirm all same length
for s in seqs:
assert len(s) == len(seqs[0])
if consensus:
assert len(consensus) == len(seqs[0])
# Loop over any remaining blocks...
done = False
while not done:
# There should be a blank line between each block.
# Also want to ignore any consensus line from the
# previous block.
while (not line) or line.strip() == "":
line = handle.readline()
if not line:
break # end of file
if not line:
break # end of file
if line.split(None, 1)[0] in known_headers:
# Found concatenated alignment.
done = True
self._header = line
break
for i in range(len(ids)):
assert line[0] != " ", "Unexpected line:\n%s" % repr(line)
fields = line.rstrip().split()
# We expect there to be two fields, there can be an optional
# "sequence number" field containing the letter count.
if len(fields) < 2 or len(fields) > 3:
raise ValueError("Could not parse line:\n%s" % repr(line))
if fields[0] != ids[i]:
raise ValueError("Identifiers out of order? Got '%s' but expected '%s'"
% (fields[0], ids[i]))
if fields[1] != line[seq_cols]:
start = len(fields[0]) + line[len(fields[0]):].find(fields[1])
assert start == seq_cols.start, 'Old location %s -> %i:XX' % (seq_cols, start)
end = start + len(fields[1])
seq_cols = slice(start, end)
del start, end
# Append the sequence
seqs[i] += fields[1]
assert len(seqs[i]) == len(seqs[0])
if len(fields) == 3:
# This MAY be an old style file with a letter count...
try:
letters = int(fields[2])
except ValueError:
raise ValueError("Could not parse line, bad sequence number:\n%s" % line)
if len(seqs[i].replace("-", "")) != letters:
raise ValueError("Could not parse line, invalid sequence number:\n%s" % line)
# Read in the next line
line = handle.readline()
# There should now be a consensus line
if consensus:
assert line[0] == " "
assert seq_cols is not None
consensus += line[seq_cols]
assert len(consensus) == len(seqs[0])
assert not line[:seq_cols.start].strip()
assert not line[seq_cols.stop:].strip()
# Read in the next line
line = handle.readline()
assert len(ids) == len(seqs)
if len(seqs) == 0 or len(seqs[0]) == 0:
raise StopIteration
if self.records_per_alignment is not None \
and self.records_per_alignment != len(ids):
raise ValueError("Found %i records in this alignment, told to expect %i"
% (len(ids), self.records_per_alignment))
records = (SeqRecord(Seq(s, self.alphabet), id=i, description=i)
for (i, s) in zip(ids, seqs))
alignment = MultipleSeqAlignment(records, self.alphabet)
# TODO - Handle alignment annotation better, for now
# mimic the old parser in Bio.Clustalw
if version:
alignment._version = version
if consensus:
alignment_length = len(seqs[0])
assert len(consensus) == alignment_length, \
"Alignment length is %i, consensus length is %i, '%s'" \
% (alignment_length, len(consensus), consensus)
alignment._star_info = consensus
return alignment
|
shail2810/nova | refs/heads/master | nova/api/openstack/compute/versionsV21.py | 40 | # Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob.exc
from nova.api.openstack.compute import versions
from nova.api.openstack.compute.views import versions as views_versions
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
ALIAS = "versions"
class VersionsController(wsgi.Controller):
@extensions.expected_errors(404)
def show(self, req, id='v2.1'):
builder = views_versions.get_view_builder(req)
if req.is_legacy_v2():
id = 'v2.0'
if id not in versions.VERSIONS:
raise webob.exc.HTTPNotFound()
return builder.build_version(versions.VERSIONS[id])
class Versions(extensions.V21APIExtensionBase):
"""API Version information."""
name = "Versions"
alias = ALIAS
version = 1
def get_resources(self):
resources = [
extensions.ResourceExtension(ALIAS, VersionsController(),
custom_routes_fn=self.version_map)]
return resources
def get_controller_extensions(self):
return []
def version_map(self, mapper, wsgi_resource):
mapper.connect("versions", "/",
controller=wsgi_resource,
action='show', conditions={"method": ['GET']})
mapper.redirect("", "/")
|
cudadog/django-allauth | refs/heads/master | allauth/socialaccount/providers/openid/migrations/0001_initial.py | 73 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='OpenIDNonce',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('server_url', models.CharField(max_length=255)),
('timestamp', models.IntegerField()),
('salt', models.CharField(max_length=255)),
('date_created', models.DateTimeField(auto_now_add=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OpenIDStore',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('server_url', models.CharField(max_length=255)),
('handle', models.CharField(max_length=255)),
('secret', models.TextField()),
('issued', models.IntegerField()),
('lifetime', models.IntegerField()),
('assoc_type', models.TextField()),
],
options={
},
bases=(models.Model,),
),
]
|
petersrinivasan/neopeng | refs/heads/master | memory.py | 1 | # 1. Something that actually "writes" an integer to memory and can "read" an integer from a memory address
# 2. Value - something allowing us to get several integers from memory and interpret them as a thing, or
# write a thing out as several integers into memory
# 3. A specific type of value: "pointer". Interpretation: location in underlying address space of
# whatever type this is pointing to.
# 2+3. A function to get a pointer value from any value.
# 4. "Reference". System for managing "references", which at least must give us a value for a reference
# when we ask for it. Name. Scope.
# 5. Scope. ?? Functions. Calling things.
# Reference: "name" "scope" "value"
# asdf = [1, 2, 3]
# My memory manager makes some space for a list with 1, 2, 3 in it. Value X
# Now create a reference with name "asdf", the current scope, and value X.
# qwerty = asdf
# Create a reference with name "qwerty", the current scope, and value...? X.
# qwerty[1] = 'hi' ====> qwerty is a reference with value X. Change X to have its second element be 'hi'.
# asdf[1] = ? it returns [1, 'hi', 3]
# asdf = ['nope'] ====> asdf's value is now Y
#
# def F(r, s):
# r = s + 1
# return r + s
#
# a = [1,1,1,1,,1,1,1,1]
# pa = a.value.getPointer()
# b = 4
# a = F(a, b)
#
# create val(addressX, [thousand element list])
# create ref("a", 0, X)
# create val(addressY, 4)
# create ref("b", 0, Y)
# Call F! *enter scope* [optional: make values X' and Y', copies of X and Y]
# create ref("r", 1, X)
# --- create ref("r", 1, lookup("a",0))
# --- r = 1
# create ref("s", 1, Y)
# create val(addressZ, the value in s, which is at Y and is 4, plus 1, or 5)
# update ref("r", 1, Z)
# create val(addressA, the value in r (5) plus the value in s (4), or 9)
# return
# *leave scope*
# update ref("a", 0, A)
class Memory(object):
def getAddressOfUnusedMemoryOfSizeN(self, N):
pass
def write(self, address, listOfIntegers):
pass
def read(self, address, N):
pass
def display(self):
print(self.mem)
class Value(object):
"""Represents a fixed size structure written somehow to memory
A value has a size, which is the size of the list of integers returned by getData.
getSize returns that size.
getData queries the underlying memory and builds a list of integers to return.
getPointer returns a Value of size 1 whose getData returns a list of size 1 whose
element is the address of the memory backing the Value that this pointer Value points to.
e.g. if we have a Value X storing a list [3,4] at underlying address 22-23, X.getPointer()
returns a Value Y storing a list [22] at underlying address ???whoknows it's not up to us.
"""
def getSize(self):
pass
def getData(self):
pass
def createPointerValue(self):
pass
def free(self):
pass
def display(self, name):
print(name + " has index "+str(self.address)+" size "+ str(self.size) + " in:")
self.mem.display()
def clone(self):
pass
class Reference(object):
pass
#???
class ReferenceManager(object):
# enter scope #???
# leave scope
# assign value to reference
# get value of reference
def __init__(self, mem):
self.mem = mem
# refs is a stack. Each element is a scope which can have several name:value pairs.
# the current scope is peek. Leaving scope is pop (plus cleanup). Entering scope is
# push (plus initialization)
self.refs = [{}]
def setReferenceValue(self, name, value):
self.refs[-1][name] = value
def getReferenceValue(self, name):
return self.refs[-1][name]
def enterScopeByValue(self, previousScopeNamesOfParameterValues, newScopeParameterNames):
newScope = {}
for parameterName, previousParameterName in zip(newScopeParameterNames, previousScopeNamesOfParameterValues):
referenceValue = self.getReferenceValue(previousParameterName).clone()
newScope[parameterName] = referenceValue
self.refs.append(newScope)
def enterScope(self, previousScopeNamesOfParameterValues, newScopeParameterNames):
newScope = {}
for parameterName, previousParameterName in zip(newScopeParameterNames, previousScopeNamesOfParameterValues):
referenceValue = self.getReferenceValue(previousParameterName)
newScope[parameterName] = referenceValue
self.refs.append(newScope)
def leaveScope(self):
cleanup = self.refs.pop()
# clean this shit up
# a = 1
# aPlus1 = a + 1
# referenceManager.enterScope(["b", "c"], ["a", "aPlus1"])
#
class PythonFixedSizeListOfIntegersMemory(Memory):
def __init__(self, size):
self.mem = [0]*size
self.used = [0]*size
self.size = size
self.EOM = "EOM Addressing past end of memory"
self.DEFRAG = "DEFRAG Memory too fragrmented; Not enough executive blocks"
def getAddressOfUnusedMemoryOfSizeN(self, N):
self.rangeTooLarge(0,N)
unusedSize = 0
address = 0
while unusedSize < N:
if address == self.size:
raise Exception(self.EOM)
if self.used[address] == 0:
unusedSize += 1
else:
unusedSize = 0
address += 1
if unusedSize == N:
return address - N
else:
raise Exception(self.DEFRAG)
def rangeTooLarge(self,address,N):
exclusiveEndAddress = address + N
if exclusiveEndAddress > self.size:
raise Exception(self.EOM)
def markMemoryUsed(self, address, N):
for i in range(address, address + N):
self.used[i] = 1
def markMemoryUnused(self, address, N):
for i in range(address, address + N):
self.used[i] = 0
def write(self, address, listOfIntegers):
length = len(listOfIntegers)
self.rangeTooLarge(address,length)
for i in range(length):
self.mem[address + i] = listOfIntegers[i]
self.markMemoryUsed(address,len(listOfIntegers))
def read(self, address, N):
storedData = [0]*N
for i in range(N):
storedData[i] = self.mem[address + i]
return storedData
def free(self, address, N):
self.markMemoryUnused(address, N)
class ArbitrarySizeValue(Value):
def __init__(self,mem,data):
self.mem = mem
self.size = len(data)
self.address = self.mem.getAddressOfUnusedMemoryOfSizeN(self.size)
self.mem.write(self.address,data)
def getSize(self):
return self.size
def getData(self):
return self.mem.read(self.address,self.size)
def createPointerValue(self):
pointer = PointerValue(self.mem,[self.address])
return pointer
def free(self):
self.mem.free(self.address,self.size)
def clone(self):
newValue = ArbitrarySizeValue(self.mem,self.getData())
return newValue
# PointerValue should point to a size 1 mem whose data is the index of the thing it's pointing to
class PointerValue(ArbitrarySizeValue):
def __init__(self, mem, data):
super(PointerValue, self).__init__(mem, data)
m = PythonFixedSizeListOfIntegersMemory(10)
#m.write(3, [1,2,3,4])
#print(m.mem[4]) #returns 2
#r = m.read(3, 4)
#print(r) #returns [1,2,3,4]
##m.write(99,[1,2]) #raises Exception
m.getAddressOfUnusedMemoryOfSizeN(1)
# somehow we have a Value that's a ListOfSizeTwo called v
v = ArbitrarySizeValue(m, [6, 8])
vData = v.getData()
#print(v.mem.mem)
#print(v.mem.used)
vPointer = v.createPointerValue()
vPointerData = vPointer.getData()
vDataAddress = vPointerData[0]
otherVData = m.read(vDataAddress, 2)
#print(otherVData)
# vData and otherVData are equal
#print()
m = PythonFixedSizeListOfIntegersMemory(10)
d = ArbitrarySizeValue(m, [-1])
v = ArbitrarySizeValue(m, [111, 21, 441])
#v.display("v")
vp = v.createPointerValue()
#vp.display("&v")
vpp = vp.createPointerValue()
#vpp.display("&&v")
w = ArbitrarySizeValue(m, [999])
#w.display("w")
wp = w.createPointerValue()
#wp.display("&w")
vp2 = v.createPointerValue()
#vp2.display("&v (2)")
vpp2 = vp2.createPointerValue()
#vpp2.display("&&v (2)")
w.free()
vp.free()
#m.display()
vppp = vpp.createPointerValue()
#vppp.display("&&&v")
vppp = vpp.createPointerValue()
#print("\r[-1, 111, 21, 441, 5, 4, 5, 6, 1, 8]")
#vppp.display("vppp")
# asdf = [42, 42]
# qwerty = asdf <---
# qwerty[0] = 1 <---
# print(asdf)
m = PythonFixedSizeListOfIntegersMemory(30)
manager = ReferenceManager(m)
# asdf = 42+42
manager.setReferenceValue("asdf", ArbitrarySizeValue(m, [42, 42]))
# qwerty = <the same reference as> asdf
manager.setReferenceValue("qwerty", manager.getReferenceValue("asdf"))
# qwerty highest order byte becomes 1
# 9 = 1001
# 65537 = 00000001 00000001 00000000 00000001 ----- 0x01010001 ... 0-9a-f or 16 possibilities 2^4
# boolean is 1 bit
# byte is 8 bits
# char 8 bits
# short 8 bits
# int 16 or 32
v = manager.getReferenceValue("qwerty")
m.write(v.createPointerValue().getData()[0], [1])
# print asdf
print(manager.getReferenceValue("asdf").getData())
# pAsdf = &asdf
manager.setReferenceValue("pAsdf", manager.getReferenceValue("asdf").createPointerValue())
# call F(pAsdf) where F is def F(pB): ...
manager.enterScopeByValue(["pAsdf"], ["pB"])
# *pB = 99
m.write(manager.getReferenceValue("pB").getData()[0],[99])
# print *pB
print(m.read(manager.getReferenceValue("pB").getData()[0],2))
# return
manager.leaveScope()
# print qwerty
print(manager.getReferenceValue("qwerty").getData())
m.display()
# F(a+1,b)
# def F(lol, hi)
# enter scope needs input values and "output" references |
samba-team/samba | refs/heads/master | source4/dsdb/tests/python/ad_dc_search_performance.py | 1 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import optparse
import sys
sys.path.insert(0, 'bin/python')
import os
import samba
import samba.getopt as options
import random
import tempfile
import shutil
import time
import itertools
from samba.netcmd.main import cmd_sambatool
# We try to use the test infrastructure of Samba 4.3+, but if it
# doesn't work, we are probably in a back-ported patch and trying to
# run on 4.1 or something.
#
# Don't copy this horror into ordinary tests -- it is special for
# performance tests that want to apply to old versions.
try:
from samba.tests.subunitrun import SubunitOptions, TestProgram
ANCIENT_SAMBA = False
except ImportError:
ANCIENT_SAMBA = True
samba.ensure_external_module("testtools", "testtools")
samba.ensure_external_module("subunit", "subunit/python")
from subunit.run import SubunitTestRunner
import unittest
from samba.samdb import SamDB
from samba.auth import system_session
from ldb import Message, MessageElement, Dn, LdbError
from ldb import FLAG_MOD_ADD, FLAG_MOD_REPLACE, FLAG_MOD_DELETE
from ldb import SCOPE_BASE, SCOPE_SUBTREE, SCOPE_ONELEVEL
parser = optparse.OptionParser("ad_dc_performance.py [options] <host>")
sambaopts = options.SambaOptions(parser)
parser.add_option_group(sambaopts)
parser.add_option_group(options.VersionOptions(parser))
if not ANCIENT_SAMBA:
subunitopts = SubunitOptions(parser)
parser.add_option_group(subunitopts)
# use command line creds if available
credopts = options.CredentialsOptions(parser)
parser.add_option_group(credopts)
opts, args = parser.parse_args()
if len(args) < 1:
parser.print_usage()
sys.exit(1)
host = args[0]
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp)
random.seed(1)
class PerfTestException(Exception):
pass
BATCH_SIZE = 1000
N_GROUPS = 5
class GlobalState(object):
next_user_id = 0
n_groups = 0
next_linked_user = 0
next_relinked_user = 0
next_linked_user_3 = 0
next_removed_link_0 = 0
class UserTests(samba.tests.TestCase):
def add_if_possible(self, *args, **kwargs):
"""In these tests sometimes things are left in the database
deliberately, so we don't worry if we fail to add them a second
time."""
try:
self.ldb.add(*args, **kwargs)
except LdbError:
pass
def setUp(self):
super(UserTests, self).setUp()
self.state = GlobalState # the class itself, not an instance
self.lp = lp
self.ldb = SamDB(host, credentials=creds,
session_info=system_session(lp), lp=lp)
self.base_dn = self.ldb.domain_dn()
self.ou = "OU=pid%s,%s" % (os.getpid(), self.base_dn)
self.ou_users = "OU=users,%s" % self.ou
self.ou_groups = "OU=groups,%s" % self.ou
self.ou_computers = "OU=computers,%s" % self.ou
for dn in (self.ou, self.ou_users, self.ou_groups,
self.ou_computers):
self.add_if_possible({
"dn": dn,
"objectclass": "organizationalUnit"})
def tearDown(self):
super(UserTests, self).tearDown()
def test_00_00_do_nothing(self):
# this gives us an idea of the overhead
pass
def _prepare_n_groups(self, n):
self.state.n_groups = n
for i in range(n):
self.add_if_possible({
"dn": "cn=g%d,%s" % (i, self.ou_groups),
"objectclass": "group"})
def _add_users(self, start, end):
for i in range(start, end):
self.ldb.add({
"dn": "cn=u%d,%s" % (i, self.ou_users),
"objectclass": "user"})
def _add_users_ldif(self, start, end):
lines = []
for i in range(start, end):
lines.append("dn: cn=u%d,%s" % (i, self.ou_users))
lines.append("objectclass: user")
lines.append("")
self.ldb.add_ldif('\n'.join(lines))
def _test_unindexed_search(self):
expressions = [
('(&(objectclass=user)(description='
'Built-in account for adminstering the computer/domain))'),
'(description=Built-in account for adminstering the computer/domain)',
'(objectCategory=*)',
'(samaccountname=Administrator*)'
]
for expression in expressions:
t = time.time()
for i in range(50):
self.ldb.search(self.ou,
expression=expression,
scope=SCOPE_SUBTREE,
attrs=['cn'])
print('%d %s took %s' % (i, expression,
time.time() - t),
file=sys.stderr)
def _test_indexed_search(self):
expressions = ['(objectclass=group)',
'(samaccountname=Administrator)'
]
for expression in expressions:
t = time.time()
for i in range(10000):
self.ldb.search(self.ou,
expression=expression,
scope=SCOPE_SUBTREE,
attrs=['cn'])
print('%d runs %s took %s' % (i, expression,
time.time() - t),
file=sys.stderr)
def _test_complex_search(self):
classes = ['samaccountname', 'objectCategory', 'dn', 'member']
values = ['*', '*t*', 'g*', 'user']
comparators = ['=', '<=', '>='] # '~=' causes error
maybe_not = ['!(', '']
joiners = ['&', '|']
# The number of permuations is 18432, which is not huge but
# would take hours to search. So we take a sample.
all_permutations = list(itertools.product(joiners,
classes, classes,
values, values,
comparators, comparators,
maybe_not, maybe_not))
random.seed(1)
for (j, c1, c2, v1, v2,
o1, o2, n1, n2) in random.sample(all_permutations, 100):
expression = ''.join(['(', j,
'(', n1, c1, o1, v1,
'))' if n1 else ')',
'(', n2, c2, o2, v2,
'))' if n2 else ')',
')'])
print(expression)
self.ldb.search(self.ou,
expression=expression,
scope=SCOPE_SUBTREE,
attrs=['cn'])
def _test_member_search(self, rounds=10):
expressions = []
for d in range(50):
expressions.append('(member=cn=u%d,%s)' % (d + 500, self.ou_users))
expressions.append('(member=u%d*)' % (d + 700,))
for i in range(N_GROUPS):
expressions.append('(memberOf=cn=g%d,%s)' % (i, self.ou_groups))
expressions.append('(memberOf=cn=g%d*)' % (i,))
expressions.append('(memberOf=cn=*%s*)' % self.ou_groups)
for expression in expressions:
t = time.time()
for i in range(rounds):
self.ldb.search(self.ou,
expression=expression,
scope=SCOPE_SUBTREE,
attrs=['cn'])
print('%d runs %s took %s' % (i, expression,
time.time() - t),
file=sys.stderr)
def _test_add_many_users(self, n=BATCH_SIZE):
s = self.state.next_user_id
e = s + n
self._add_users(s, e)
self.state.next_user_id = e
def _test_add_many_users_ldif(self, n=BATCH_SIZE):
s = self.state.next_user_id
e = s + n
self._add_users_ldif(s, e)
self.state.next_user_id = e
def _link_user_and_group(self, u, g):
m = Message()
m.dn = Dn(self.ldb, "CN=g%d,%s" % (g, self.ou_groups))
m["member"] = MessageElement("cn=u%d,%s" % (u, self.ou_users),
FLAG_MOD_ADD, "member")
self.ldb.modify(m)
def _test_link_many_users(self, n=BATCH_SIZE):
self._prepare_n_groups(N_GROUPS)
s = self.state.next_linked_user
e = s + n
for i in range(s, e):
# put everyone in group 0, and one other group
g = i % (N_GROUPS - 1) + 1
self._link_user_and_group(i, g)
self._link_user_and_group(i, 0)
self.state.next_linked_user = e
test_00_01_adding_users_1000 = _test_add_many_users
test_00_10_complex_search_1k_users = _test_complex_search
test_00_11_unindexed_search_1k_users = _test_unindexed_search
test_00_12_indexed_search_1k_users = _test_indexed_search
test_00_13_member_search_1k_users = _test_member_search
test_01_02_adding_users_2000_ldif = _test_add_many_users_ldif
test_01_03_adding_users_3000 = _test_add_many_users
test_01_10_complex_search_3k_users = _test_complex_search
test_01_11_unindexed_search_3k_users = _test_unindexed_search
test_01_12_indexed_search_3k_users = _test_indexed_search
def test_01_13_member_search_3k_users(self):
self._test_member_search(rounds=5)
test_02_01_link_users_1000 = _test_link_many_users
test_02_02_link_users_2000 = _test_link_many_users
test_02_03_link_users_3000 = _test_link_many_users
test_03_10_complex_search_linked_users = _test_complex_search
test_03_11_unindexed_search_linked_users = _test_unindexed_search
test_03_12_indexed_search_linked_users = _test_indexed_search
def test_03_13_member_search_linked_users(self):
self._test_member_search(rounds=2)
if "://" not in host:
if os.path.isfile(host):
host = "tdb://%s" % host
else:
host = "ldap://%s" % host
if ANCIENT_SAMBA:
runner = SubunitTestRunner()
if not runner.run(unittest.makeSuite(UserTests)).wasSuccessful():
sys.exit(1)
sys.exit(0)
else:
TestProgram(module=__name__, opts=subunitopts)
|
nwiizo/workspace_2017 | refs/heads/master | ansible-modules-extras/cloud/amazon/execute_lambda.py | 33 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: execute_lambda
short_description: Execute an AWS Lambda function
description:
- This module executes AWS Lambda functions, allowing synchronous and asynchronous
invocation.
version_added: "2.2"
extends_documentation_fragment:
- aws
author: "Ryan Scott Brown (@ryansb) <[email protected]>"
requirements:
- python >= 2.6
- boto3
notes:
- Async invocation will always return an empty C(output) key.
- Synchronous invocation may result in a function timeout, resulting in an
empty C(output) key.
options:
name:
description:
- The name of the function to be invoked. This can only be used for
invocations within the calling account. To invoke a function in another
account, use I(function_arn) to specify the full ARN.
required: false
default: None
function_arn:
description:
- The name of the function to be invoked
required: false
default: None
tail_log:
description:
- If C(tail_log=true), the result of the task will include the last 4 KB
of the CloudWatch log for the function execution. Log tailing only
works if you use synchronous invocation C(wait=true). This is usually
used for development or testing Lambdas.
required: false
default: false
wait:
description:
- Whether to wait for the function results or not. If I(wait) is false,
the task will not return any results. To wait for the Lambda function
to complete, set C(wait=true) and the result will be available in the
I(output) key.
required: false
default: true
dry_run:
description:
- Do not *actually* invoke the function. A C(DryRun) call will check that
the caller has permissions to call the function, especially for
checking cross-account permissions.
required: false
default: False
version_qualifier:
description:
- Which version/alias of the function to run. This defaults to the
C(LATEST) revision, but can be set to any existing version or alias.
See https;//docs.aws.amazon.com/lambda/latest/dg/versioning-aliases.html
for details.
required: false
default: LATEST
payload:
description:
- A dictionary in any form to be provided as input to the Lambda function.
required: false
default: {}
'''
EXAMPLES = '''
- execute_lambda:
name: test-function
# the payload is automatically serialized and sent to the function
payload:
foo: bar
value: 8
register: response
# Test that you have sufficient permissions to execute a Lambda function in
# another account
- execute_lambda:
function_arn: arn:aws:lambda:us-east-1:123456789012:function/some-function
dry_run: true
- execute_lambda:
name: test-function
payload:
foo: bar
value: 8
wait: true
tail_log: true
register: response
# the response will have a `logs` key that will contain a log (up to 4KB) of the function execution in Lambda.
- execute_lambda:
name: test-function
version_qualifier: PRODUCTION
'''
RETURN = '''
output:
description: Function output if wait=true and the function returns a value
returned: success
type: dict
sample: "{ 'output': 'something' }"
logs:
description: The last 4KB of the function logs. Only provided if I(tail_log) is true
type: string
status:
description: C(StatusCode) of API call exit (200 for synchronous invokes, 202 for async)
type: int
sample: 200
'''
import base64
import json
import traceback
try:
import boto3
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
name = dict(),
function_arn = dict(),
wait = dict(choices=BOOLEANS, default=True, type='bool'),
tail_log = dict(choices=BOOLEANS, default=False, type='bool'),
dry_run = dict(choices=BOOLEANS, default=False, type='bool'),
version_qualifier = dict(),
payload = dict(default={}, type='dict'),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[
['name', 'function_arn'],
]
)
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
name = module.params.get('name')
function_arn = module.params.get('function_arn')
await_return = module.params.get('wait')
dry_run = module.params.get('dry_run')
tail_log = module.params.get('tail_log')
version_qualifier = module.params.get('version_qualifier')
payload = module.params.get('payload')
if not HAS_BOTO3:
module.fail_json(msg='Python module "boto3" is missing, please install it')
if not (name or function_arn):
module.fail_json(msg="Must provide either a function_arn or a name to invoke.")
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=HAS_BOTO3)
if not region:
module.fail_json(msg="The AWS region must be specified as an "
"environment variable or in the AWS credentials "
"profile.")
try:
client = boto3_conn(module, conn_type='client', resource='lambda',
region=region, endpoint=ec2_url, **aws_connect_kwargs)
except (botocore.exceptions.ClientError, botocore.exceptions.ValidationError) as e:
module.fail_json(msg="Failure connecting boto3 to AWS", exception=traceback.format_exc(e))
invoke_params = {}
if await_return:
# await response
invoke_params['InvocationType'] = 'RequestResponse'
else:
# fire and forget
invoke_params['InvocationType'] = 'Event'
if dry_run or module.check_mode:
# dry_run overrides invocation type
invoke_params['InvocationType'] = 'DryRun'
if tail_log and await_return:
invoke_params['LogType'] = 'Tail'
elif tail_log and not await_return:
module.fail_json(msg="The `tail_log` parameter is only available if "
"the invocation waits for the function to complete. "
"Set `wait` to true or turn off `tail_log`.")
else:
invoke_params['LogType'] = 'None'
if version_qualifier:
invoke_params['Qualifier'] = version_qualifier
if payload:
invoke_params['Payload'] = json.dumps(payload)
if function_arn:
invoke_params['FunctionName'] = function_arn
elif name:
invoke_params['FunctionName'] = name
try:
response = client.invoke(**invoke_params)
except botocore.exceptions.ClientError as ce:
if ce.response['Error']['Code'] == 'ResourceNotFoundException':
module.fail_json(msg="Could not find Lambda to execute. Make sure "
"the ARN is correct and your profile has "
"permissions to execute this function.",
exception=traceback.format_exc(ce))
module.fail_json("Client-side error when invoking Lambda, check inputs and specific error",
exception=traceback.format_exc(ce))
except botocore.exceptions.ParamValidationError as ve:
module.fail_json(msg="Parameters to `invoke` failed to validate",
exception=traceback.format_exc(ve))
except Exception as e:
module.fail_json(msg="Unexpected failure while invoking Lambda function",
exception=traceback.format_exc(e))
results ={
'logs': '',
'status': response['StatusCode'],
'output': '',
}
if response.get('LogResult'):
try:
# logs are base64 encoded in the API response
results['logs'] = base64.b64decode(response.get('LogResult', ''))
except Exception as e:
module.fail_json(msg="Failed while decoding logs", exception=traceback.format_exc(e))
if invoke_params['InvocationType'] == 'RequestResponse':
try:
results['output'] = json.loads(response['Payload'].read())
except Exception as e:
module.fail_json(msg="Failed while decoding function return value", exception=traceback.format_exc(e))
if isinstance(results.get('output'), dict) and any(
[results['output'].get('stackTrace'), results['output'].get('errorMessage')]):
# AWS sends back stack traces and error messages when a function failed
# in a RequestResponse (synchronous) context.
template = ("Function executed, but there was an error in the Lambda function. "
"Message: {errmsg}, Type: {type}, Stack Trace: {trace}")
error_data = {
# format the stacktrace sent back as an array into a multiline string
'trace': '\n'.join(
[' '.join([
str(x) for x in line # cast line numbers to strings
]) for line in results.get('output', {}).get('stackTrace', [])]
),
'errmsg': results['output'].get('errorMessage'),
'type': results['output'].get('errorType')
}
module.fail_json(msg=template.format(**error_data), result=results)
module.exit_json(changed=True, result=results)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
tswast/google-cloud-python | refs/heads/master | bigtable/google/cloud/bigtable_admin_v2/types.py | 2 | # -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import sys
from google.api_core.protobuf_helpers import get_messages
from google.cloud.bigtable_admin_v2.proto import bigtable_instance_admin_pb2
from google.cloud.bigtable_admin_v2.proto import bigtable_table_admin_pb2
from google.cloud.bigtable_admin_v2.proto import instance_pb2
from google.cloud.bigtable_admin_v2.proto import table_pb2
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import options_pb2
from google.iam.v1 import policy_pb2
from google.longrunning import operations_pb2
from google.protobuf import any_pb2
from google.protobuf import duration_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
from google.protobuf import timestamp_pb2
from google.rpc import status_pb2
from google.type import expr_pb2
_shared_modules = [
iam_policy_pb2,
options_pb2,
policy_pb2,
operations_pb2,
any_pb2,
duration_pb2,
empty_pb2,
field_mask_pb2,
timestamp_pb2,
status_pb2,
expr_pb2,
]
_local_modules = [
bigtable_instance_admin_pb2,
bigtable_table_admin_pb2,
instance_pb2,
table_pb2,
]
names = []
for module in _shared_modules: # pragma: NO COVER
for name, message in get_messages(module).items():
setattr(sys.modules[__name__], name, message)
names.append(name)
for module in _local_modules:
for name, message in get_messages(module).items():
message.__module__ = "google.cloud.bigtable_admin_v2.types"
setattr(sys.modules[__name__], name, message)
names.append(name)
__all__ = tuple(sorted(names))
|
derekjchow/models | refs/heads/master | research/minigo/dualnet_test.py | 2 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for dualnet and dualnet_model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
import tensorflow as tf # pylint: disable=g-bad-import-order
import dualnet
import go
import model_params
import preprocessing
import utils_test
tf.logging.set_verbosity(tf.logging.ERROR)
class TestDualNet(utils_test.MiniGoUnitTest):
def test_train(self):
with tempfile.TemporaryDirectory() as working_dir, \
tempfile.NamedTemporaryFile() as tf_record:
preprocessing.make_dataset_from_sgf(
utils_test.BOARD_SIZE, 'example_game.sgf', tf_record.name)
dualnet.train(
working_dir, [tf_record.name], 1, model_params.DummyMiniGoParams())
def test_inference(self):
with tempfile.TemporaryDirectory() as working_dir, \
tempfile.TemporaryDirectory() as export_dir:
dualnet.bootstrap(working_dir, model_params.DummyMiniGoParams())
exported_model = os.path.join(export_dir, 'bootstrap-model')
dualnet.export_model(working_dir, exported_model)
n1 = dualnet.DualNetRunner(
exported_model, model_params.DummyMiniGoParams())
n1.run(go.Position(utils_test.BOARD_SIZE))
n2 = dualnet.DualNetRunner(
exported_model, model_params.DummyMiniGoParams())
n2.run(go.Position(utils_test.BOARD_SIZE))
if __name__ == '__main__':
tf.test.main()
|
dsanders11/django-autocomplete-light | refs/heads/master | autocomplete_light/autocomplete/choice_list.py | 2 | from __future__ import unicode_literals
from django.utils.encoding import force_text
from .list import AutocompleteList
__all__ = ('AutocompleteChoiceList',)
class AutocompleteChoiceList(AutocompleteList):
"""
Simple :py:class:`~.list.AutocompleteList` implementation which expects
:py:attr:`choices` to be a list of tuple choices in the fashion of
:py:attr:`django:django.db.models.Field.choices`.
.. py:attribute:: choices
List of choice tuples ``(value, label)`` like
:py:attr:`django:django.db.models.Field.choices`. Example::
choices = (
('v', 'Video'),
('p', 'Paper'),
)
.. py:attribute:: limit_choices
The maximum of items to suggest from :py:attr:`choices`.
.. py:attribute:: order_by
:py:meth:`~.choice_list.AutocompleteChoiceList.order_choices` will use
this against :py:attr:`choices` as an argument :py:func:`sorted`.
"""
def order_by(cls, choice):
return force_text(choice[1]).lower()
def choices_for_values(self):
"""
Return any :py:attr:`choices` that is in :py:attr:`values`.
"""
values_choices = []
for choice in self.choices:
if choice[0] in self.values:
values_choices.append(choice)
return self.order_choices(values_choices)
def choices_for_request(self):
"""
Return any :py:attr:`choices` tuple that contains the search string. It
is case insensitive and ignores spaces.
"""
requests_choices = []
q = self.request.GET.get('q', '').lower().strip()
for choice in self.choices:
m = force_text(choice[0]).lower() + force_text(choice[1]).lower()
if q in m:
requests_choices.append(choice)
return self.order_choices(requests_choices)[0:self.limit_choices]
def choice_value(self, choice):
""" Return item 0 of the choice tuple. """
return choice[0]
def choice_label(self, choice):
""" Return item 1 of the choice tuple. """
return choice[1]
|
WiredProgrammers/hacking-tools | refs/heads/master | pyhashcat/pyhashcat/Hasher.py | 3 | __author__ = 'girish'
from abc import ABCMeta, abstractmethod
import hashlib
class AbstractHasher(metaclass=ABCMeta):
@abstractmethod
def getHash(self,data,asHex=False):
raise NotImplementedError
class MD5Hasher(AbstractHasher):
def getHash(self,data,asHex=False):
md5 = hashlib.md5()
md5.update(data.encode())
return md5.hexdigest() if asHex else md5.digest()
class SHA1Hasher(AbstractHasher):
def getHash(self,data,asHex=False):
sha1 = hashlib.sha1()
sha1.update(data.encode())
return sha1.hexdigest() if asHex else sha1.digest()
class SHA256Hasher(AbstractHasher):
def getHash(self,data,asHex=False):
sha256 = hashlib.sha256()
sha256.update(data.encode())
return sha256.hexdigest() if asHex else sha256.digest()
|
eeshangarg/oh-mainline | refs/heads/master | vendor/packages/beautifulsoup4/bs4/__init__.py | 417 | """Beautiful Soup
Elixir and Tonic
"The Screen-Scraper's Friend"
http://www.crummy.com/software/BeautifulSoup/
Beautiful Soup uses a pluggable XML or HTML parser to parse a
(possibly invalid) document into a tree representation. Beautiful Soup
provides provides methods and Pythonic idioms that make it easy to
navigate, search, and modify the parse tree.
Beautiful Soup works with Python 2.6 and up. It works better if lxml
and/or html5lib is installed.
For more than you ever wanted to know about Beautiful Soup, see the
documentation:
http://www.crummy.com/software/BeautifulSoup/bs4/doc/
"""
__author__ = "Leonard Richardson ([email protected])"
__version__ = "4.3.2"
__copyright__ = "Copyright (c) 2004-2013 Leonard Richardson"
__license__ = "MIT"
__all__ = ['BeautifulSoup']
import os
import re
import warnings
from .builder import builder_registry, ParserRejectedMarkup
from .dammit import UnicodeDammit
from .element import (
CData,
Comment,
DEFAULT_OUTPUT_ENCODING,
Declaration,
Doctype,
NavigableString,
PageElement,
ProcessingInstruction,
ResultSet,
SoupStrainer,
Tag,
)
# The very first thing we do is give a useful error if someone is
# running this code under Python 3 without converting it.
syntax_error = u'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work. You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
class BeautifulSoup(Tag):
"""
This class defines the basic interface called by the tree builders.
These methods will be called by the parser:
reset()
feed(markup)
The tree builder may call these methods from its feed() implementation:
handle_starttag(name, attrs) # See note about return value
handle_endtag(name)
handle_data(data) # Appends to the current data node
endData(containerClass=NavigableString) # Ends the current data node
No matter how complicated the underlying parser is, you should be
able to build a tree using 'start tag' events, 'end tag' events,
'data' events, and "done with data" events.
If you encounter an empty-element tag (aka a self-closing tag,
like HTML's <br> tag), call handle_starttag and then
handle_endtag.
"""
ROOT_TAG_NAME = u'[document]'
# If the end-user gives no indication which tree builder they
# want, look for one with these features.
DEFAULT_BUILDER_FEATURES = ['html', 'fast']
ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
def __init__(self, markup="", features=None, builder=None,
parse_only=None, from_encoding=None, **kwargs):
"""The Soup object is initialized as the 'root tag', and the
provided markup (which can be a string or a file-like object)
is fed into the underlying parser."""
if 'convertEntities' in kwargs:
warnings.warn(
"BS4 does not respect the convertEntities argument to the "
"BeautifulSoup constructor. Entities are always converted "
"to Unicode characters.")
if 'markupMassage' in kwargs:
del kwargs['markupMassage']
warnings.warn(
"BS4 does not respect the markupMassage argument to the "
"BeautifulSoup constructor. The tree builder is responsible "
"for any necessary markup massage.")
if 'smartQuotesTo' in kwargs:
del kwargs['smartQuotesTo']
warnings.warn(
"BS4 does not respect the smartQuotesTo argument to the "
"BeautifulSoup constructor. Smart quotes are always converted "
"to Unicode characters.")
if 'selfClosingTags' in kwargs:
del kwargs['selfClosingTags']
warnings.warn(
"BS4 does not respect the selfClosingTags argument to the "
"BeautifulSoup constructor. The tree builder is responsible "
"for understanding self-closing tags.")
if 'isHTML' in kwargs:
del kwargs['isHTML']
warnings.warn(
"BS4 does not respect the isHTML argument to the "
"BeautifulSoup constructor. You can pass in features='html' "
"or features='xml' to get a builder capable of handling "
"one or the other.")
def deprecated_argument(old_name, new_name):
if old_name in kwargs:
warnings.warn(
'The "%s" argument to the BeautifulSoup constructor '
'has been renamed to "%s."' % (old_name, new_name))
value = kwargs[old_name]
del kwargs[old_name]
return value
return None
parse_only = parse_only or deprecated_argument(
"parseOnlyThese", "parse_only")
from_encoding = from_encoding or deprecated_argument(
"fromEncoding", "from_encoding")
if len(kwargs) > 0:
arg = kwargs.keys().pop()
raise TypeError(
"__init__() got an unexpected keyword argument '%s'" % arg)
if builder is None:
if isinstance(features, basestring):
features = [features]
if features is None or len(features) == 0:
features = self.DEFAULT_BUILDER_FEATURES
builder_class = builder_registry.lookup(*features)
if builder_class is None:
raise FeatureNotFound(
"Couldn't find a tree builder with the features you "
"requested: %s. Do you need to install a parser library?"
% ",".join(features))
builder = builder_class()
self.builder = builder
self.is_xml = builder.is_xml
self.builder.soup = self
self.parse_only = parse_only
if hasattr(markup, 'read'): # It's a file-type object.
markup = markup.read()
elif len(markup) <= 256:
# Print out warnings for a couple beginner problems
# involving passing non-markup to Beautiful Soup.
# Beautiful Soup will still parse the input as markup,
# just in case that's what the user really wants.
if (isinstance(markup, unicode)
and not os.path.supports_unicode_filenames):
possible_filename = markup.encode("utf8")
else:
possible_filename = markup
is_file = False
try:
is_file = os.path.exists(possible_filename)
except Exception, e:
# This is almost certainly a problem involving
# characters not valid in filenames on this
# system. Just let it go.
pass
if is_file:
warnings.warn(
'"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
if markup[:5] == "http:" or markup[:6] == "https:":
# TODO: This is ugly but I couldn't get it to work in
# Python 3 otherwise.
if ((isinstance(markup, bytes) and not b' ' in markup)
or (isinstance(markup, unicode) and not u' ' in markup)):
warnings.warn(
'"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
for (self.markup, self.original_encoding, self.declared_html_encoding,
self.contains_replacement_characters) in (
self.builder.prepare_markup(markup, from_encoding)):
self.reset()
try:
self._feed()
break
except ParserRejectedMarkup:
pass
# Clear out the markup and remove the builder's circular
# reference to this object.
self.markup = None
self.builder.soup = None
def _feed(self):
# Convert the document to Unicode.
self.builder.reset()
self.builder.feed(self.markup)
# Close out any unfinished strings and close all the open tags.
self.endData()
while self.currentTag.name != self.ROOT_TAG_NAME:
self.popTag()
def reset(self):
Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
self.hidden = 1
self.builder.reset()
self.current_data = []
self.currentTag = None
self.tagStack = []
self.preserve_whitespace_tag_stack = []
self.pushTag(self)
def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
"""Create a new tag associated with this soup."""
return Tag(None, self.builder, name, namespace, nsprefix, attrs)
def new_string(self, s, subclass=NavigableString):
"""Create a new NavigableString associated with this soup."""
navigable = subclass(s)
navigable.setup()
return navigable
def insert_before(self, successor):
raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
def insert_after(self, successor):
raise NotImplementedError("BeautifulSoup objects don't support insert_after().")
def popTag(self):
tag = self.tagStack.pop()
if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]:
self.preserve_whitespace_tag_stack.pop()
#print "Pop", tag.name
if self.tagStack:
self.currentTag = self.tagStack[-1]
return self.currentTag
def pushTag(self, tag):
#print "Push", tag.name
if self.currentTag:
self.currentTag.contents.append(tag)
self.tagStack.append(tag)
self.currentTag = self.tagStack[-1]
if tag.name in self.builder.preserve_whitespace_tags:
self.preserve_whitespace_tag_stack.append(tag)
def endData(self, containerClass=NavigableString):
if self.current_data:
current_data = u''.join(self.current_data)
# If whitespace is not preserved, and this string contains
# nothing but ASCII spaces, replace it with a single space
# or newline.
if not self.preserve_whitespace_tag_stack:
strippable = True
for i in current_data:
if i not in self.ASCII_SPACES:
strippable = False
break
if strippable:
if '\n' in current_data:
current_data = '\n'
else:
current_data = ' '
# Reset the data collector.
self.current_data = []
# Should we add this string to the tree at all?
if self.parse_only and len(self.tagStack) <= 1 and \
(not self.parse_only.text or \
not self.parse_only.search(current_data)):
return
o = containerClass(current_data)
self.object_was_parsed(o)
def object_was_parsed(self, o, parent=None, most_recent_element=None):
"""Add an object to the parse tree."""
parent = parent or self.currentTag
most_recent_element = most_recent_element or self._most_recent_element
o.setup(parent, most_recent_element)
if most_recent_element is not None:
most_recent_element.next_element = o
self._most_recent_element = o
parent.contents.append(o)
def _popToTag(self, name, nsprefix=None, inclusivePop=True):
"""Pops the tag stack up to and including the most recent
instance of the given tag. If inclusivePop is false, pops the tag
stack up to but *not* including the most recent instqance of
the given tag."""
#print "Popping to %s" % name
if name == self.ROOT_TAG_NAME:
# The BeautifulSoup object itself can never be popped.
return
most_recently_popped = None
stack_size = len(self.tagStack)
for i in range(stack_size - 1, 0, -1):
t = self.tagStack[i]
if (name == t.name and nsprefix == t.prefix):
if inclusivePop:
most_recently_popped = self.popTag()
break
most_recently_popped = self.popTag()
return most_recently_popped
def handle_starttag(self, name, namespace, nsprefix, attrs):
"""Push a start tag on to the stack.
If this method returns None, the tag was rejected by the
SoupStrainer. You should proceed as if the tag had not occured
in the document. For instance, if this was a self-closing tag,
don't call handle_endtag.
"""
# print "Start tag %s: %s" % (name, attrs)
self.endData()
if (self.parse_only and len(self.tagStack) <= 1
and (self.parse_only.text
or not self.parse_only.search_tag(name, attrs))):
return None
tag = Tag(self, self.builder, name, namespace, nsprefix, attrs,
self.currentTag, self._most_recent_element)
if tag is None:
return tag
if self._most_recent_element:
self._most_recent_element.next_element = tag
self._most_recent_element = tag
self.pushTag(tag)
return tag
def handle_endtag(self, name, nsprefix=None):
#print "End tag: " + name
self.endData()
self._popToTag(name, nsprefix)
def handle_data(self, data):
self.current_data.append(data)
def decode(self, pretty_print=False,
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
formatter="minimal"):
"""Returns a string or Unicode representation of this document.
To get Unicode, pass None for encoding."""
if self.is_xml:
# Print the XML declaration
encoding_part = ''
if eventual_encoding != None:
encoding_part = ' encoding="%s"' % eventual_encoding
prefix = u'<?xml version="1.0"%s?>\n' % encoding_part
else:
prefix = u''
if not pretty_print:
indent_level = None
else:
indent_level = 0
return prefix + super(BeautifulSoup, self).decode(
indent_level, eventual_encoding, formatter)
# Alias to make it easier to type import: 'from bs4 import _soup'
_s = BeautifulSoup
_soup = BeautifulSoup
class BeautifulStoneSoup(BeautifulSoup):
"""Deprecated interface to an XML parser."""
def __init__(self, *args, **kwargs):
kwargs['features'] = 'xml'
warnings.warn(
'The BeautifulStoneSoup class is deprecated. Instead of using '
'it, pass features="xml" into the BeautifulSoup constructor.')
super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
class StopParsing(Exception):
pass
class FeatureNotFound(ValueError):
pass
#By default, act as an HTML pretty-printer.
if __name__ == '__main__':
import sys
soup = BeautifulSoup(sys.stdin)
print soup.prettify()
|
Huyuwei/tvm | refs/heads/master | topi/tests/python/test_topi_deformable_conv2d.py | 2 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
import tvm
from tvm import autotvm
import topi
import topi.testing
from tvm.contrib.pickle_memoize import memoize
from topi.util import get_const_tuple
from common import get_all_backend
def verify_deformable_conv2d_nchw(batch, in_channel, in_size, num_filter, kernel, stride, padding, dilation=1, deformable_groups=1, groups=1):
print("Workload: (%d, %d, %d, %d, %d, %d, %d, %d, %d, %d)" % (batch, in_channel, in_size,
num_filter, kernel, stride, padding, dilation, deformable_groups, groups))
A = tvm.placeholder((batch, in_channel, in_size, in_size), name='A')
out_size = (in_size - (kernel - 1) * dilation - 1 + 2 * padding) // stride + 1
Offset = tvm.placeholder((batch, deformable_groups * kernel * kernel * 2, out_size, out_size), name='offset')
W = tvm.placeholder((num_filter, in_channel, kernel, kernel), name='W')
bias = tvm.placeholder((num_filter, 1, 1), name='bias')
a_shape = get_const_tuple(A.shape)
offset_shape = get_const_tuple(Offset.shape)
w_shape = get_const_tuple(W.shape)
bias_shape = get_const_tuple(bias.shape)
dtype = A.dtype
@memoize("topi.tests.test_topi_deformable_conv2d_nchw.verify_deformable_conv2d_nchw")
def get_ref_data():
a_np = np.random.uniform(size=a_shape).astype(dtype)
offset_np = np.random.randn(*offset_shape).astype(dtype)
w_np = np.random.uniform(size=w_shape).astype(dtype)
b_np = np.random.uniform(size=bias_shape).astype(dtype)
c_np = topi.testing.deformable_conv2d_nchw_python(a_np, offset_np, w_np, stride, padding,
dilation, deformable_groups, groups)
return a_np, offset_np, w_np, c_np
a_np, offset_np, w_np, c_np = get_ref_data()
def check_device(device):
ctx = tvm.context(device, 0)
if not ctx.exist:
print("Skip because %s is not enabled" % device)
return
print("Running on target: %s" % device)
with tvm.target.create(device):
C = topi.nn.deformable_conv2d_nchw(A, Offset, W, stride, padding, dilation,
deformable_groups, groups, out_dtype=dtype)
s = topi.generic.schedule_deformable_conv2d_nchw([C])
a = tvm.nd.array(a_np, ctx)
offset = tvm.nd.array(offset_np, ctx)
w = tvm.nd.array(w_np, ctx)
c = tvm.nd.empty(c_np.shape, dtype=c_np.dtype, ctx=ctx)
func = tvm.build(s, [A, Offset, W, C], device)
func(a, offset, w, c)
tvm.testing.assert_allclose(c.asnumpy(), c_np, rtol=1e-5)
for device in ['llvm', 'cuda']:
check_device(device)
def test_deformable_conv2d_nchw():
verify_deformable_conv2d_nchw(1, 16, 7, 16, 1, 1, 0, deformable_groups=4)
verify_deformable_conv2d_nchw(1, 16, 7, 16, 3, 1, 1, dilation=2, deformable_groups=4)
verify_deformable_conv2d_nchw(1, 16, 7, 16, 3, 1, 2, dilation=2)
if __name__ == "__main__":
test_deformable_conv2d_nchw()
|
hajjboy95/crazyflie-clients-python | refs/heads/develop | lib/cfclient/ui/tabs/PlotTab.py | 28 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
This tab plots different logging data defined by configurations that has been
pre-configured.
"""
__author__ = 'Bitcraze AB'
__all__ = ['PlotTab']
import glob
import json
import logging
import os
import sys
logger = logging.getLogger(__name__)
from PyQt4 import QtCore, QtGui, uic
from PyQt4.QtCore import pyqtSlot, pyqtSignal, QThread, Qt
from PyQt4.QtGui import QMessageBox
from PyQt4.QtGui import QApplication, QStyledItemDelegate, QAbstractItemView
from PyQt4.QtCore import QAbstractItemModel, QModelIndex, QString, QVariant
from pprint import pprint
import datetime
from cfclient.ui.widgets.plotwidget import PlotWidget
from cflib.crazyflie.log import Log
from cfclient.ui.tab import Tab
plot_tab_class = uic.loadUiType(sys.path[0] +
"/cfclient/ui/tabs/plotTab.ui")[0]
class LogConfigModel(QAbstractItemModel):
"""Model for log configurations in the ComboBox"""
def __init__(self, parent=None):
super(LogConfigModel, self).__init__(parent)
self._nodes = []
def add_block(self, block):
self._nodes.append(block)
self.layoutChanged.emit()
def parent(self, index):
"""Re-implemented method to get the parent of the given index"""
return QModelIndex()
def remove_block(self, block):
"""Remove a block from the view"""
raise NotImplementedError()
def columnCount(self, parent):
"""Re-implemented method to get the number of columns"""
return 1
def rowCount(self, parent):
"""Re-implemented method to get the number of rows for a given index"""
parent_item = parent.internalPointer()
if parent.isValid():
parent_item = parent.internalPointer()
return 0
else:
return len(self._nodes)
def index(self, row, column, parent):
"""Re-implemented method to get the index for a specified
row/column/parent combination"""
if not self._nodes:
return QModelIndex()
node = parent.internalPointer()
if not node:
index = self.createIndex(row, column, self._nodes[row])
return index
else:
return self.createIndex(row, column, node.get_child(row))
def data(self, index, role):
"""Re-implemented method to get the data for a given index and role"""
node = index.internalPointer()
if not index.isValid() or not 0 <= index.row() < len(self._nodes):
return QVariant()
if role == Qt.DisplayRole:
return self._nodes[index.row()].name
return QVariant()
def reset(self):
"""Reset the model"""
self._nodes = []
self.layoutChanged.emit()
def get_config(self, i):
return self._nodes[i]
class PlotTab(Tab, plot_tab_class):
"""Tab for plotting logging data"""
_log_data_signal = pyqtSignal(int, object, object)
_log_error_signal = pyqtSignal(object, str)
_disconnected_signal = pyqtSignal(str)
_connected_signal = pyqtSignal(str)
colors = ['g', 'b', 'm', 'r', 'y', 'c']
def __init__(self, tabWidget, helper, *args):
super(PlotTab, self).__init__(*args)
self.setupUi(self)
self.tabName = "Plotter"
self.menuName = "Plotter"
self._log_error_signal.connect(self._logging_error)
self._plot = PlotWidget(fps=30)
# Check if we could find the PyQtImport. If not, then
# set this tab as disabled
self.enabled = self._plot.can_enable
self._model = LogConfigModel()
self.dataSelector.setModel(self._model)
self._log_data_signal.connect(self._log_data_received)
self.tabWidget = tabWidget
self.helper = helper
self.plotLayout.addWidget(self._plot)
# Connect external signals if we can use the tab
if self.enabled:
self._disconnected_signal.connect(self._disconnected)
self.helper.cf.disconnected.add_callback(
self._disconnected_signal.emit)
self._connected_signal.connect(self._connected)
self.helper.cf.connected.add_callback(
self._connected_signal.emit)
self.helper.cf.log.block_added_cb.add_callback(self._config_added)
self.dataSelector.currentIndexChanged.connect(
self._selection_changed)
self._previous_config = None
self._started_previous = False
def _connected(self, link_uri):
"""Callback when the Crazyflie has been connected"""
self._plot.removeAllDatasets()
self._plot.set_title("")
def _disconnected(self, link_uri):
"""Callback for when the Crazyflie has been disconnected"""
self._model.reset()
self.dataSelector.setCurrentIndex(-1)
self._previous_config = None
self._started_previous = False
def _log_data_signal_wrapper(self, ts, data, logconf):
"""Wrapper for signal"""
# For some reason the *.emit functions are not
# the same over time (?!) so they cannot be registered and then
# removed as callbacks.
self._log_data_signal.emit(ts, data, logconf)
def _log_error_signal_wrapper(self, config, msg):
"""Wrapper for signal"""
# For some reason the *.emit functions are not
# the same over time (?!) so they cannot be registered and then
# removed as callbacks.
self._log_error_signal.emit(config, msg)
def _selection_changed(self, i):
"""Callback from ComboBox when a new item has been selected"""
# Check if we have disconnected
if i < 0:
return
# First check if we need to stop the old block
if self._started_previous and self._previous_config:
logger.debug("Should stop config [%s], stopping!",
self._previous_config.name)
self._previous_config.delete()
# Remove our callback for the previous config
if self._previous_config:
self._previous_config.data_received_cb.remove_callback(
self._log_data_signal_wrapper)
self._previous_config.error_cb.remove_callback(
self._log_error_signal_wrapper)
lg = self._model.get_config(i)
if not lg.started:
logger.debug("Config [%s] not started, starting!", lg.name)
self._started_previous = True
lg.start()
else:
self._started_previous = False
self._plot.removeAllDatasets()
color_selector = 0
self._plot.set_title(lg.name)
for d in lg.variables:
self._plot.add_curve(d.name,
self.colors[color_selector % len(self.colors)])
color_selector += 1
lg.data_received_cb.add_callback(self._log_data_signal_wrapper)
lg.error_cb.add_callback(self._log_error_signal_wrapper)
self._previous_config = lg
def _config_added(self, logconfig):
"""Callback from the log layer when a new config has been added"""
logger.debug("Callback for new config [%s]", logconfig.name)
self._model.add_block(logconfig)
def _logging_error(self, log_conf, msg):
"""Callback from the log layer when an error occurs"""
QMessageBox.about(self, "Plot error", "Error when starting log config"
" [%s]: %s" % (log_conf.name, msg))
def _log_data_received(self, timestamp, data, logconf):
"""Callback when the log layer receives new data"""
# Check so that the incoming data belongs to what we are currently
# logging
if self._previous_config:
if self._previous_config.name == logconf.name:
self._plot.add_data(data, timestamp)
|
sadmansk/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/third_party/pytest/src/_pytest/config/argparsing.py | 32 | import six
import warnings
import argparse
FILE_OR_DIR = "file_or_dir"
class Parser(object):
""" Parser for command line arguments and ini-file values.
:ivar extra_info: dict of generic param -> value to display in case
there's an error processing the command line arguments.
"""
def __init__(self, usage=None, processopt=None):
self._anonymous = OptionGroup("custom options", parser=self)
self._groups = []
self._processopt = processopt
self._usage = usage
self._inidict = {}
self._ininames = []
self.extra_info = {}
def processoption(self, option):
if self._processopt:
if option.dest:
self._processopt(option)
def getgroup(self, name, description="", after=None):
""" get (or create) a named option Group.
:name: name of the option group.
:description: long description for --help output.
:after: name of other group, used for ordering --help output.
The returned group object has an ``addoption`` method with the same
signature as :py:func:`parser.addoption
<_pytest.config.Parser.addoption>` but will be shown in the
respective group in the output of ``pytest. --help``.
"""
for group in self._groups:
if group.name == name:
return group
group = OptionGroup(name, description, parser=self)
i = 0
for i, grp in enumerate(self._groups):
if grp.name == after:
break
self._groups.insert(i + 1, group)
return group
def addoption(self, *opts, **attrs):
""" register a command line option.
:opts: option names, can be short or long options.
:attrs: same attributes which the ``add_option()`` function of the
`argparse library
<http://docs.python.org/2/library/argparse.html>`_
accepts.
After command line parsing options are available on the pytest config
object via ``config.option.NAME`` where ``NAME`` is usually set
by passing a ``dest`` attribute, for example
``addoption("--long", dest="NAME", ...)``.
"""
self._anonymous.addoption(*opts, **attrs)
def parse(self, args, namespace=None):
from _pytest._argcomplete import try_argcomplete
self.optparser = self._getparser()
try_argcomplete(self.optparser)
return self.optparser.parse_args([str(x) for x in args], namespace=namespace)
def _getparser(self):
from _pytest._argcomplete import filescompleter
optparser = MyOptionParser(self, self.extra_info)
groups = self._groups + [self._anonymous]
for group in groups:
if group.options:
desc = group.description or group.name
arggroup = optparser.add_argument_group(desc)
for option in group.options:
n = option.names()
a = option.attrs()
arggroup.add_argument(*n, **a)
# bash like autocompletion for dirs (appending '/')
optparser.add_argument(FILE_OR_DIR, nargs="*").completer = filescompleter
return optparser
def parse_setoption(self, args, option, namespace=None):
parsedoption = self.parse(args, namespace=namespace)
for name, value in parsedoption.__dict__.items():
setattr(option, name, value)
return getattr(parsedoption, FILE_OR_DIR)
def parse_known_args(self, args, namespace=None):
"""parses and returns a namespace object with known arguments at this
point.
"""
return self.parse_known_and_unknown_args(args, namespace=namespace)[0]
def parse_known_and_unknown_args(self, args, namespace=None):
"""parses and returns a namespace object with known arguments, and
the remaining arguments unknown at this point.
"""
optparser = self._getparser()
args = [str(x) for x in args]
return optparser.parse_known_args(args, namespace=namespace)
def addini(self, name, help, type=None, default=None):
""" register an ini-file option.
:name: name of the ini-variable
:type: type of the variable, can be ``pathlist``, ``args``, ``linelist``
or ``bool``.
:default: default value if no ini-file option exists but is queried.
The value of ini-variables can be retrieved via a call to
:py:func:`config.getini(name) <_pytest.config.Config.getini>`.
"""
assert type in (None, "pathlist", "args", "linelist", "bool")
self._inidict[name] = (help, type, default)
self._ininames.append(name)
class ArgumentError(Exception):
"""
Raised if an Argument instance is created with invalid or
inconsistent arguments.
"""
def __init__(self, msg, option):
self.msg = msg
self.option_id = str(option)
def __str__(self):
if self.option_id:
return "option %s: %s" % (self.option_id, self.msg)
else:
return self.msg
class Argument(object):
"""class that mimics the necessary behaviour of optparse.Option
its currently a least effort implementation
and ignoring choices and integer prefixes
https://docs.python.org/3/library/optparse.html#optparse-standard-option-types
"""
_typ_map = {"int": int, "string": str, "float": float, "complex": complex}
def __init__(self, *names, **attrs):
"""store parms in private vars for use in add_argument"""
self._attrs = attrs
self._short_opts = []
self._long_opts = []
self.dest = attrs.get("dest")
if "%default" in (attrs.get("help") or ""):
warnings.warn(
'pytest now uses argparse. "%default" should be'
' changed to "%(default)s" ',
DeprecationWarning,
stacklevel=3,
)
try:
typ = attrs["type"]
except KeyError:
pass
else:
# this might raise a keyerror as well, don't want to catch that
if isinstance(typ, six.string_types):
if typ == "choice":
warnings.warn(
"type argument to addoption() is a string %r."
" For parsearg this is optional and when supplied"
" should be a type."
" (options: %s)" % (typ, names),
DeprecationWarning,
stacklevel=3,
)
# argparse expects a type here take it from
# the type of the first element
attrs["type"] = type(attrs["choices"][0])
else:
warnings.warn(
"type argument to addoption() is a string %r."
" For parsearg this should be a type."
" (options: %s)" % (typ, names),
DeprecationWarning,
stacklevel=3,
)
attrs["type"] = Argument._typ_map[typ]
# used in test_parseopt -> test_parse_defaultgetter
self.type = attrs["type"]
else:
self.type = typ
try:
# attribute existence is tested in Config._processopt
self.default = attrs["default"]
except KeyError:
pass
self._set_opt_strings(names)
if not self.dest:
if self._long_opts:
self.dest = self._long_opts[0][2:].replace("-", "_")
else:
try:
self.dest = self._short_opts[0][1:]
except IndexError:
raise ArgumentError("need a long or short option", self)
def names(self):
return self._short_opts + self._long_opts
def attrs(self):
# update any attributes set by processopt
attrs = "default dest help".split()
if self.dest:
attrs.append(self.dest)
for attr in attrs:
try:
self._attrs[attr] = getattr(self, attr)
except AttributeError:
pass
if self._attrs.get("help"):
a = self._attrs["help"]
a = a.replace("%default", "%(default)s")
# a = a.replace('%prog', '%(prog)s')
self._attrs["help"] = a
return self._attrs
def _set_opt_strings(self, opts):
"""directly from optparse
might not be necessary as this is passed to argparse later on"""
for opt in opts:
if len(opt) < 2:
raise ArgumentError(
"invalid option string %r: "
"must be at least two characters long" % opt,
self,
)
elif len(opt) == 2:
if not (opt[0] == "-" and opt[1] != "-"):
raise ArgumentError(
"invalid short option string %r: "
"must be of the form -x, (x any non-dash char)" % opt,
self,
)
self._short_opts.append(opt)
else:
if not (opt[0:2] == "--" and opt[2] != "-"):
raise ArgumentError(
"invalid long option string %r: "
"must start with --, followed by non-dash" % opt,
self,
)
self._long_opts.append(opt)
def __repr__(self):
args = []
if self._short_opts:
args += ["_short_opts: " + repr(self._short_opts)]
if self._long_opts:
args += ["_long_opts: " + repr(self._long_opts)]
args += ["dest: " + repr(self.dest)]
if hasattr(self, "type"):
args += ["type: " + repr(self.type)]
if hasattr(self, "default"):
args += ["default: " + repr(self.default)]
return "Argument({})".format(", ".join(args))
class OptionGroup(object):
def __init__(self, name, description="", parser=None):
self.name = name
self.description = description
self.options = []
self.parser = parser
def addoption(self, *optnames, **attrs):
""" add an option to this group.
if a shortened version of a long option is specified it will
be suppressed in the help. addoption('--twowords', '--two-words')
results in help showing '--two-words' only, but --twowords gets
accepted **and** the automatic destination is in args.twowords
"""
conflict = set(optnames).intersection(
name for opt in self.options for name in opt.names()
)
if conflict:
raise ValueError("option names %s already added" % conflict)
option = Argument(*optnames, **attrs)
self._addoption_instance(option, shortupper=False)
def _addoption(self, *optnames, **attrs):
option = Argument(*optnames, **attrs)
self._addoption_instance(option, shortupper=True)
def _addoption_instance(self, option, shortupper=False):
if not shortupper:
for opt in option._short_opts:
if opt[0] == "-" and opt[1].islower():
raise ValueError("lowercase shortoptions reserved")
if self.parser:
self.parser.processoption(option)
self.options.append(option)
class MyOptionParser(argparse.ArgumentParser):
def __init__(self, parser, extra_info=None):
if not extra_info:
extra_info = {}
self._parser = parser
argparse.ArgumentParser.__init__(
self,
usage=parser._usage,
add_help=False,
formatter_class=DropShorterLongHelpFormatter,
)
# extra_info is a dict of (param -> value) to display if there's
# an usage error to provide more contextual information to the user
self.extra_info = extra_info
def parse_args(self, args=None, namespace=None):
"""allow splitting of positional arguments"""
args, argv = self.parse_known_args(args, namespace)
if argv:
for arg in argv:
if arg and arg[0] == "-":
lines = ["unrecognized arguments: %s" % (" ".join(argv))]
for k, v in sorted(self.extra_info.items()):
lines.append(" %s: %s" % (k, v))
self.error("\n".join(lines))
getattr(args, FILE_OR_DIR).extend(argv)
return args
class DropShorterLongHelpFormatter(argparse.HelpFormatter):
"""shorten help for long options that differ only in extra hyphens
- collapse **long** options that are the same except for extra hyphens
- special action attribute map_long_option allows surpressing additional
long options
- shortcut if there are only two options and one of them is a short one
- cache result on action object as this is called at least 2 times
"""
def _format_action_invocation(self, action):
orgstr = argparse.HelpFormatter._format_action_invocation(self, action)
if orgstr and orgstr[0] != "-": # only optional arguments
return orgstr
res = getattr(action, "_formatted_action_invocation", None)
if res:
return res
options = orgstr.split(", ")
if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2):
# a shortcut for '-h, --help' or '--abc', '-a'
action._formatted_action_invocation = orgstr
return orgstr
return_list = []
option_map = getattr(action, "map_long_option", {})
if option_map is None:
option_map = {}
short_long = {}
for option in options:
if len(option) == 2 or option[2] == " ":
continue
if not option.startswith("--"):
raise ArgumentError(
'long optional argument without "--": [%s]' % (option), self
)
xxoption = option[2:]
if xxoption.split()[0] not in option_map:
shortened = xxoption.replace("-", "")
if (
shortened not in short_long
or len(short_long[shortened]) < len(xxoption)
):
short_long[shortened] = xxoption
# now short_long has been filled out to the longest with dashes
# **and** we keep the right option ordering from add_argument
for option in options:
if len(option) == 2 or option[2] == " ":
return_list.append(option)
if option[2:] == short_long.get(option.replace("-", "")):
return_list.append(option.replace(" ", "=", 1))
action._formatted_action_invocation = ", ".join(return_list)
return action._formatted_action_invocation
|
elena/django | refs/heads/master | django/test/client.py | 4 | import json
import mimetypes
import os
import sys
from copy import copy
from functools import partial
from http import HTTPStatus
from importlib import import_module
from io import BytesIO
from urllib.parse import unquote_to_bytes, urljoin, urlparse, urlsplit
from asgiref.sync import sync_to_async
from django.conf import settings
from django.core.handlers.asgi import ASGIRequest
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import WSGIRequest
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import (
got_request_exception, request_finished, request_started,
)
from django.db import close_old_connections
from django.http import HttpRequest, QueryDict, SimpleCookie
from django.test import signals
from django.test.utils import ContextList
from django.urls import resolve
from django.utils.encoding import force_bytes
from django.utils.functional import SimpleLazyObject
from django.utils.http import urlencode
from django.utils.itercompat import is_iterable
from django.utils.regex_helper import _lazy_re_compile
__all__ = (
'AsyncClient', 'AsyncRequestFactory', 'Client', 'RedirectCycleError',
'RequestFactory', 'encode_file', 'encode_multipart',
)
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
CONTENT_TYPE_RE = _lazy_re_compile(r'.*; charset=([\w\d-]+);?')
# Structured suffix spec: https://tools.ietf.org/html/rfc6838#section-4.2.8
JSON_CONTENT_TYPE_RE = _lazy_re_compile(r'^application\/(.+\+)?json')
class RedirectCycleError(Exception):
"""The test client has been asked to follow a redirect loop."""
def __init__(self, message, last_response):
super().__init__(message)
self.last_response = last_response
self.redirect_chain = last_response.redirect_chain
class FakePayload:
"""
A wrapper around BytesIO that restricts what can be read since data from
the network can't be sought and cannot be read outside of its content
length. This makes sure that views can't do anything under the test client
that wouldn't work in real life.
"""
def __init__(self, content=None):
self.__content = BytesIO()
self.__len = 0
self.read_started = False
if content is not None:
self.write(content)
def __len__(self):
return self.__len
def read(self, num_bytes=None):
if not self.read_started:
self.__content.seek(0)
self.read_started = True
if num_bytes is None:
num_bytes = self.__len or 0
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
content = self.__content.read(num_bytes)
self.__len -= num_bytes
return content
def write(self, content):
if self.read_started:
raise ValueError("Unable to write a payload after it's been read")
content = force_bytes(content)
self.__content.write(content)
self.__len += len(content)
def closing_iterator_wrapper(iterable, close):
try:
yield from iterable
finally:
request_finished.disconnect(close_old_connections)
close() # will fire request_finished
request_finished.connect(close_old_connections)
def conditional_content_removal(request, response):
"""
Simulate the behavior of most Web servers by removing the content of
responses for HEAD requests, 1xx, 204, and 304 responses. Ensure
compliance with RFC 7230, section 3.3.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = b''
if request.method == 'HEAD':
if response.streaming:
response.streaming_content = []
else:
response.content = b''
return response
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes. Use the WSGI
interface to compose requests, but return the raw HttpResponse object with
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
"""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super().__init__(*args, **kwargs)
def __call__(self, environ):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._middleware_chain is None:
self.load_middleware()
request_started.disconnect(close_old_connections)
request_started.send(sender=self.__class__, environ=environ)
request_started.connect(close_old_connections)
request = WSGIRequest(environ)
# sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably
# required for backwards compatibility with external tests against
# admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = self.get_response(request)
# Simulate behaviors of most Web servers.
conditional_content_removal(request, response)
# Attach the originating request to the response so that it could be
# later retrieved.
response.wsgi_request = request
# Emulate a WSGI server by calling the close method on completion.
if response.streaming:
response.streaming_content = closing_iterator_wrapper(
response.streaming_content, response.close)
else:
request_finished.disconnect(close_old_connections)
response.close() # will fire request_finished
request_finished.connect(close_old_connections)
return response
class AsyncClientHandler(BaseHandler):
"""An async version of ClientHandler."""
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
self.enforce_csrf_checks = enforce_csrf_checks
super().__init__(*args, **kwargs)
async def __call__(self, scope):
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._middleware_chain is None:
self.load_middleware(is_async=True)
# Extract body file from the scope, if provided.
if '_body_file' in scope:
body_file = scope.pop('_body_file')
else:
body_file = FakePayload('')
request_started.disconnect(close_old_connections)
await sync_to_async(request_started.send, thread_sensitive=False)(sender=self.__class__, scope=scope)
request_started.connect(close_old_connections)
request = ASGIRequest(scope, body_file)
# Sneaky little hack so that we can easily get round
# CsrfViewMiddleware. This makes life easier, and is probably required
# for backwards compatibility with external tests against admin views.
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
# Request goes through middleware.
response = await self.get_response_async(request)
# Simulate behaviors of most Web servers.
conditional_content_removal(request, response)
# Attach the originating ASGI request to the response so that it could
# be later retrieved.
response.asgi_request = request
# Emulate a server by calling the close method on completion.
if response.streaming:
response.streaming_content = await sync_to_async(closing_iterator_wrapper, thread_sensitive=False)(
response.streaming_content,
response.close,
)
else:
request_finished.disconnect(close_old_connections)
# Will fire request_finished.
await sync_to_async(response.close, thread_sensitive=False)()
request_finished.connect(close_old_connections)
return response
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
"""
Store templates and contexts that are rendered.
The context is copied so that it is an accurate representation at the time
of rendering.
"""
store.setdefault('templates', []).append(template)
if 'context' not in store:
store['context'] = ContextList()
store['context'].append(copy(context))
def encode_multipart(boundary, data):
"""
Encode multipart POST data from a dictionary of form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# Not by any means perfect, but good enough for our purposes.
def is_file(thing):
return hasattr(thing, "read") and callable(thing.read)
# Each bit of the multipart form data could be either a form value or a
# file, or a *list* of form values and/or files. Remember that HTTP field
# names can be duplicated!
for (key, value) in data.items():
if value is None:
raise TypeError(
"Cannot encode None for key '%s' as POST data. Did you mean "
"to pass an empty string or omit the value?" % key
)
elif is_file(value):
lines.extend(encode_file(boundary, key, value))
elif not isinstance(value, str) and is_iterable(value):
for item in value:
if is_file(item):
lines.extend(encode_file(boundary, key, item))
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
item
])
else:
lines.extend(to_bytes(val) for val in [
'--%s' % boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
value
])
lines.extend([
to_bytes('--%s--' % boundary),
b'',
])
return b'\r\n'.join(lines)
def encode_file(boundary, key, file):
def to_bytes(s):
return force_bytes(s, settings.DEFAULT_CHARSET)
# file.name might not be a string. For example, it's an int for
# tempfile.TemporaryFile().
file_has_string_name = hasattr(file, 'name') and isinstance(file.name, str)
filename = os.path.basename(file.name) if file_has_string_name else ''
if hasattr(file, 'content_type'):
content_type = file.content_type
elif filename:
content_type = mimetypes.guess_type(filename)[0]
else:
content_type = None
if content_type is None:
content_type = 'application/octet-stream'
filename = filename or key
return [
to_bytes('--%s' % boundary),
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
% (key, filename)),
to_bytes('Content-Type: %s' % content_type),
b'',
to_bytes(file.read())
]
class RequestFactory:
"""
Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def __init__(self, *, json_encoder=DjangoJSONEncoder, **defaults):
self.json_encoder = json_encoder
self.defaults = defaults
self.cookies = SimpleCookie()
self.errors = BytesIO()
def _base_environ(self, **request):
"""
The base environment for a request.
"""
# This is a minimal valid WSGI environ dictionary, plus:
# - HTTP_COOKIE: for cookie support,
# - REMOTE_ADDR: often useful, see #8551.
# See https://www.python.org/dev/peps/pep-3333/#environ-variables
return {
'HTTP_COOKIE': '; '.join(sorted(
'%s=%s' % (morsel.key, morsel.coded_value)
for morsel in self.cookies.values()
)),
'PATH_INFO': '/',
'REMOTE_ADDR': '127.0.0.1',
'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'SERVER_NAME': 'testserver',
'SERVER_PORT': '80',
'SERVER_PROTOCOL': 'HTTP/1.1',
'wsgi.version': (1, 0),
'wsgi.url_scheme': 'http',
'wsgi.input': FakePayload(b''),
'wsgi.errors': self.errors,
'wsgi.multiprocess': True,
'wsgi.multithread': False,
'wsgi.run_once': False,
**self.defaults,
**request,
}
def request(self, **request):
"Construct a generic request object."
return WSGIRequest(self._base_environ(**request))
def _encode_data(self, data, content_type):
if content_type is MULTIPART_CONTENT:
return encode_multipart(BOUNDARY, data)
else:
# Encode the content so that the byte representation is correct.
match = CONTENT_TYPE_RE.match(content_type)
if match:
charset = match[1]
else:
charset = settings.DEFAULT_CHARSET
return force_bytes(data, encoding=charset)
def _encode_json(self, data, content_type):
"""
Return encoded JSON if data is a dict, list, or tuple and content_type
is application/json.
"""
should_encode = JSON_CONTENT_TYPE_RE.match(content_type) and isinstance(data, (dict, list, tuple))
return json.dumps(data, cls=self.json_encoder) if should_encode else data
def _get_path(self, parsed):
path = parsed.path
# If there are parameters, add them
if parsed.params:
path += ";" + parsed.params
path = unquote_to_bytes(path)
# Replace the behavior where non-ASCII values in the WSGI environ are
# arbitrarily decoded with ISO-8859-1.
# Refs comment in `get_bytes_from_wsgi()`.
return path.decode('iso-8859-1')
def get(self, path, data=None, secure=False, **extra):
"""Construct a GET request."""
data = {} if data is None else data
return self.generic('GET', path, secure=secure, **{
'QUERY_STRING': urlencode(data, doseq=True),
**extra,
})
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
secure=False, **extra):
"""Construct a POST request."""
data = self._encode_json({} if data is None else data, content_type)
post_data = self._encode_data(data, content_type)
return self.generic('POST', path, post_data, content_type,
secure=secure, **extra)
def head(self, path, data=None, secure=False, **extra):
"""Construct a HEAD request."""
data = {} if data is None else data
return self.generic('HEAD', path, secure=secure, **{
'QUERY_STRING': urlencode(data, doseq=True),
**extra,
})
def trace(self, path, secure=False, **extra):
"""Construct a TRACE request."""
return self.generic('TRACE', path, secure=secure, **extra)
def options(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"Construct an OPTIONS request."
return self.generic('OPTIONS', path, data, content_type,
secure=secure, **extra)
def put(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"""Construct a PUT request."""
data = self._encode_json(data, content_type)
return self.generic('PUT', path, data, content_type,
secure=secure, **extra)
def patch(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"""Construct a PATCH request."""
data = self._encode_json(data, content_type)
return self.generic('PATCH', path, data, content_type,
secure=secure, **extra)
def delete(self, path, data='', content_type='application/octet-stream',
secure=False, **extra):
"""Construct a DELETE request."""
data = self._encode_json(data, content_type)
return self.generic('DELETE', path, data, content_type,
secure=secure, **extra)
def generic(self, method, path, data='',
content_type='application/octet-stream', secure=False,
**extra):
"""Construct an arbitrary HTTP request."""
parsed = urlparse(str(path)) # path can be lazy
data = force_bytes(data, settings.DEFAULT_CHARSET)
r = {
'PATH_INFO': self._get_path(parsed),
'REQUEST_METHOD': method,
'SERVER_PORT': '443' if secure else '80',
'wsgi.url_scheme': 'https' if secure else 'http',
}
if data:
r.update({
'CONTENT_LENGTH': str(len(data)),
'CONTENT_TYPE': content_type,
'wsgi.input': FakePayload(data),
})
r.update(extra)
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
if not r.get('QUERY_STRING'):
# WSGI requires latin-1 encoded strings. See get_path_info().
query_string = parsed[4].encode().decode('iso-8859-1')
r['QUERY_STRING'] = query_string
return self.request(**r)
class AsyncRequestFactory(RequestFactory):
"""
Class that lets you create mock ASGI-like Request objects for use in
testing. Usage:
rf = AsyncRequestFactory()
get_request = await rf.get('/hello/')
post_request = await rf.post('/submit/', {'foo': 'bar'})
Once you have a request object you can pass it to any view function,
including synchronous ones. The reason we have a separate class here is:
a) this makes ASGIRequest subclasses, and
b) AsyncTestClient can subclass it.
"""
def _base_scope(self, **request):
"""The base scope for a request."""
# This is a minimal valid ASGI scope, plus:
# - headers['cookie'] for cookie support,
# - 'client' often useful, see #8551.
scope = {
'asgi': {'version': '3.0'},
'type': 'http',
'http_version': '1.1',
'client': ['127.0.0.1', 0],
'server': ('testserver', '80'),
'scheme': 'http',
'method': 'GET',
'headers': [],
**self.defaults,
**request,
}
scope['headers'].append((
b'cookie',
b'; '.join(sorted(
('%s=%s' % (morsel.key, morsel.coded_value)).encode('ascii')
for morsel in self.cookies.values()
)),
))
return scope
def request(self, **request):
"""Construct a generic request object."""
# This is synchronous, which means all methods on this class are.
# AsyncClient, however, has an async request function, which makes all
# its methods async.
if '_body_file' in request:
body_file = request.pop('_body_file')
else:
body_file = FakePayload('')
return ASGIRequest(self._base_scope(**request), body_file)
def generic(
self, method, path, data='', content_type='application/octet-stream',
secure=False, **extra,
):
"""Construct an arbitrary HTTP request."""
parsed = urlparse(str(path)) # path can be lazy.
data = force_bytes(data, settings.DEFAULT_CHARSET)
s = {
'method': method,
'path': self._get_path(parsed),
'server': ('127.0.0.1', '443' if secure else '80'),
'scheme': 'https' if secure else 'http',
'headers': [(b'host', b'testserver')],
}
if data:
s['headers'].extend([
(b'content-length', str(len(data)).encode('ascii')),
(b'content-type', content_type.encode('ascii')),
])
s['_body_file'] = FakePayload(data)
follow = extra.pop('follow', None)
if follow is not None:
s['follow'] = follow
s['headers'] += [
(key.lower().encode('ascii'), value.encode('latin1'))
for key, value in extra.items()
]
# If QUERY_STRING is absent or empty, we want to extract it from the
# URL.
if not s.get('query_string'):
s['query_string'] = parsed[4]
return self.request(**s)
class ClientMixin:
"""
Mixin with common methods between Client and AsyncClient.
"""
def store_exc_info(self, **kwargs):
"""Store exceptions when they are generated by a view."""
self.exc_info = sys.exc_info()
def check_exception(self, response):
"""
Look for a signaled exception, clear the current context exception
data, re-raise the signaled exception, and clear the signaled exception
from the local cache.
"""
response.exc_info = self.exc_info
if self.exc_info:
_, exc_value, _ = self.exc_info
self.exc_info = None
if self.raise_request_exception:
raise exc_value
@property
def session(self):
"""Return the current session variables."""
engine = import_module(settings.SESSION_ENGINE)
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
if cookie:
return engine.SessionStore(cookie.value)
session = engine.SessionStore()
session.save()
self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
return session
def login(self, **credentials):
"""
Set the Factory to appear as if it has successfully logged into a site.
Return True if login is possible or False if the provided credentials
are incorrect.
"""
from django.contrib.auth import authenticate
user = authenticate(**credentials)
if user:
self._login(user)
return True
return False
def force_login(self, user, backend=None):
def get_backend():
from django.contrib.auth import load_backend
for backend_path in settings.AUTHENTICATION_BACKENDS:
backend = load_backend(backend_path)
if hasattr(backend, 'get_user'):
return backend_path
if backend is None:
backend = get_backend()
user.backend = backend
self._login(user, backend)
def _login(self, user, backend=None):
from django.contrib.auth import login
# Create a fake request to store login details.
request = HttpRequest()
if self.session:
request.session = self.session
else:
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore()
login(request, user, backend)
# Save the session values.
request.session.save()
# Set the cookie to represent the session.
session_cookie = settings.SESSION_COOKIE_NAME
self.cookies[session_cookie] = request.session.session_key
cookie_data = {
'max-age': None,
'path': '/',
'domain': settings.SESSION_COOKIE_DOMAIN,
'secure': settings.SESSION_COOKIE_SECURE or None,
'expires': None,
}
self.cookies[session_cookie].update(cookie_data)
def logout(self):
"""Log out the user by removing the cookies and session object."""
from django.contrib.auth import get_user, logout
request = HttpRequest()
if self.session:
request.session = self.session
request.user = get_user(request)
else:
engine = import_module(settings.SESSION_ENGINE)
request.session = engine.SessionStore()
logout(request)
self.cookies = SimpleCookie()
def _parse_json(self, response, **extra):
if not hasattr(response, '_json'):
if not JSON_CONTENT_TYPE_RE.match(response.get('Content-Type')):
raise ValueError(
'Content-Type header is "%s", not "application/json"'
% response.get('Content-Type')
)
response._json = json.loads(response.content.decode(response.charset), **extra)
return response._json
class Client(ClientMixin, RequestFactory):
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, enforce_csrf_checks=False, raise_request_exception=True, **defaults):
super().__init__(**defaults)
self.handler = ClientHandler(enforce_csrf_checks)
self.raise_request_exception = raise_request_exception
self.exc_info = None
self.extra = None
def request(self, **request):
"""
The master request method. Compose the environment dictionary and pass
to the handler, return the result of the handler. Assume defaults for
the query environment, which can be overridden using the arguments to
the request.
"""
environ = self._base_environ(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = partial(store_rendered_templates, data)
signal_uid = "template-render-%s" % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = "request-exception-%s" % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
response = self.handler(environ)
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
# Check for signaled exceptions.
self.check_exception(response)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get('templates', [])
response.context = data.get('context')
response.json = partial(self._parse_json, response)
# Attach the ResolverMatch instance to the response.
response.resolver_match = SimpleLazyObject(lambda: resolve(request['PATH_INFO']))
# Flatten a single context. Not really necessary anymore thanks to the
# __getattr__ flattening in ContextList, but has some edge case
# backwards compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
def get(self, path, data=None, follow=False, secure=False, **extra):
"""Request a response from the server using GET."""
self.extra = extra
response = super().get(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, **extra)
return response
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
follow=False, secure=False, **extra):
"""Request a response from the server using POST."""
self.extra = extra
response = super().post(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def head(self, path, data=None, follow=False, secure=False, **extra):
"""Request a response from the server using HEAD."""
self.extra = extra
response = super().head(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, **extra)
return response
def options(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Request a response from the server using OPTIONS."""
self.extra = extra
response = super().options(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def put(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Send a resource to the server using PUT."""
self.extra = extra
response = super().put(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def patch(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Send a resource to the server using PATCH."""
self.extra = extra
response = super().patch(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def delete(self, path, data='', content_type='application/octet-stream',
follow=False, secure=False, **extra):
"""Send a DELETE request to the server."""
self.extra = extra
response = super().delete(path, data=data, content_type=content_type, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
return response
def trace(self, path, data='', follow=False, secure=False, **extra):
"""Send a TRACE request to the server."""
self.extra = extra
response = super().trace(path, data=data, secure=secure, **extra)
if follow:
response = self._handle_redirects(response, data=data, **extra)
return response
def _handle_redirects(self, response, data='', content_type='', **extra):
"""
Follow any redirects by requesting responses from the server using GET.
"""
response.redirect_chain = []
redirect_status_codes = (
HTTPStatus.MOVED_PERMANENTLY,
HTTPStatus.FOUND,
HTTPStatus.SEE_OTHER,
HTTPStatus.TEMPORARY_REDIRECT,
HTTPStatus.PERMANENT_REDIRECT,
)
while response.status_code in redirect_status_codes:
response_url = response.url
redirect_chain = response.redirect_chain
redirect_chain.append((response_url, response.status_code))
url = urlsplit(response_url)
if url.scheme:
extra['wsgi.url_scheme'] = url.scheme
if url.hostname:
extra['SERVER_NAME'] = url.hostname
if url.port:
extra['SERVER_PORT'] = str(url.port)
# Prepend the request path to handle relative path redirects
path = url.path
if not path.startswith('/'):
path = urljoin(response.request['PATH_INFO'], path)
if response.status_code in (HTTPStatus.TEMPORARY_REDIRECT, HTTPStatus.PERMANENT_REDIRECT):
# Preserve request method and query string (if needed)
# post-redirect for 307/308 responses.
request_method = response.request['REQUEST_METHOD'].lower()
if request_method not in ('get', 'head'):
extra['QUERY_STRING'] = url.query
request_method = getattr(self, request_method)
else:
request_method = self.get
data = QueryDict(url.query)
content_type = None
response = request_method(path, data=data, content_type=content_type, follow=False, **extra)
response.redirect_chain = redirect_chain
if redirect_chain[-1] in redirect_chain[:-1]:
# Check that we're not redirecting to somewhere we've already
# been to, to prevent loops.
raise RedirectCycleError("Redirect loop detected.", last_response=response)
if len(redirect_chain) > 20:
# Such a lengthy chain likely also means a loop, but one with
# a growing path, changing view, or changing query argument;
# 20 is the value of "network.http.redirection-limit" from Firefox.
raise RedirectCycleError("Too many redirects.", last_response=response)
return response
class AsyncClient(ClientMixin, AsyncRequestFactory):
"""
An async version of Client that creates ASGIRequests and calls through an
async request path.
Does not currently support "follow" on its methods.
"""
def __init__(self, enforce_csrf_checks=False, raise_request_exception=True, **defaults):
super().__init__(**defaults)
self.handler = AsyncClientHandler(enforce_csrf_checks)
self.raise_request_exception = raise_request_exception
self.exc_info = None
self.extra = None
async def request(self, **request):
"""
The master request method. Compose the scope dictionary and pass to the
handler, return the result of the handler. Assume defaults for the
query environment, which can be overridden using the arguments to the
request.
"""
if 'follow' in request:
raise NotImplementedError(
'AsyncClient request methods do not accept the follow '
'parameter.'
)
scope = self._base_scope(**request)
# Curry a data dictionary into an instance of the template renderer
# callback function.
data = {}
on_template_render = partial(store_rendered_templates, data)
signal_uid = 'template-render-%s' % id(request)
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
# Capture exceptions created by the handler.
exception_uid = 'request-exception-%s' % id(request)
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
try:
response = await self.handler(scope)
finally:
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
got_request_exception.disconnect(dispatch_uid=exception_uid)
# Check for signaled exceptions.
self.check_exception(response)
# Save the client and request that stimulated the response.
response.client = self
response.request = request
# Add any rendered template detail to the response.
response.templates = data.get('templates', [])
response.context = data.get('context')
response.json = partial(self._parse_json, response)
# Attach the ResolverMatch instance to the response.
response.resolver_match = SimpleLazyObject(lambda: resolve(request['path']))
# Flatten a single context. Not really necessary anymore thanks to the
# __getattr__ flattening in ContextList, but has some edge case
# backwards compatibility implications.
if response.context and len(response.context) == 1:
response.context = response.context[0]
# Update persistent cookie data.
if response.cookies:
self.cookies.update(response.cookies)
return response
|
casimp/pyxe | refs/heads/master | pyxe/fitting_tools.py | 2 | # -*- coding: utf-8 -*-
"""
Created on Wed Jul 15 08:34:51 2015
@author: Chris
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
import matplotlib.pyplot as plt
import numpy as np
from scipy.optimize import curve_fit
from numpy.polynomial.chebyshev import chebval
import numba
from pyxe.fitting_functions import gaussian, lorentzian, psuedo_voigt
from pyxe.fitting_functions import strain_transformation
@numba.jit(nopython=True)
def pawley_sum(I, h, q, q0, fwhm, func_num):
""" Computes diffraction profile for given set of Pawley parameters.
Args:
I (ndarray): Empty (zeros) intensity array of correct length
h (ndarray): List of peak intensities
q (ndarray): Reciprocal lattice
q0 (ndarray): List of peak positions
fwhm (ndarray): List of fwhm
Returns:
ndarray: Computed intensity profile
"""
# print(sig)
if func_num == 0:
sig = fwhm / (2 * np.sqrt(2 * np.log(2)))
for i in range(len(q0)):
I = I + h[i] * np.exp(-(q - q0[i]) ** 2 / (2 * sig[i] ** 2))
return I
if func_num == 1:
sig = fwhm / 2
for i in range(len(q0)):
I = I + h[i] / (1.0 + ((q - q0[i]) / sig[i])**2)
#I = I + h[i] * np.exp(-(q - q0[i]) ** 2 / (2 * sig[i] ** 2))
# p[0] + p[1] * np.exp(- (x - p[2])**2 / (2. * p[3]**2))
return I
def pawley_hkl(detector, back, fw_order=None, func='gaussian'):
""" Wrapper for Pawley fitting, allowing spec. of detector and background.
Args:
detector (pyxpb.peaks.Peak): pyxpb detector instance
back (ndarray): Background intensity profile
fw_order (int): Order of polynomial
Returns:
function: Pawley fitting function
"""
def pawley(q, *p):
if func == 'gaussian':
func_num = 0
else:
func_num = 1 if func == 'lorentzian' else 2
np_fw = fw_order + 1 if fw_order is not None else len(detector._fwhm)
p0 = len(detector.hkl) + np_fw
I = np.zeros_like(q)
p_fw = p[p0 - np_fw: p0]
# print(p_fw)
for idx, material in enumerate(detector.materials):
# Extract number of peaks and associated hkl values
npeaks = len(detector.hkl[material])
hkl = [[int(col) for col in row] for row in detector.hkl[material]]
# Estimate of a and associated q0/d0 values
a = p[idx]
d0 = a / np.sqrt(np.sum(np.array(hkl)**2, axis=1))
q0 = 2 * np.pi / d0
q0 = q0[np.logical_and(q0 > np.min(q), q0 < np.max(q))]
# Calculate FWHM and (associated) sigma/c value
fwhm = np.expand_dims(detector.fwhm_q(q0, p_fw), 1)
# Extract intensity values
h = np.array(p[p0: p0 + npeaks])
# print(fwhm)
I = pawley_sum(I, h, q, q0, fwhm, func_num)
p0 += npeaks
return I + back
return pawley
def extract_parameters(detector, q_lim, I_max=1, I_lim=None):
""" Extract initial Pawley parameters from detector/setup.
Args:
detector (pyxpb.peaks.Peak): pyXpb detector instance
q_lim (list, tuple): Limit q0 range for pawley fitting
I_max (float): Multiplication factor for intensity
I_lim (float): Relative minimum intensity limit
Returns:
list: Pawley parameter estimates
"""
p = [detector.materials[mat]['a'] for mat in detector.materials]
# print(p, type(p))
p += detector._fwhm
# print(p, type(p))
for material in detector.materials:
for idx, i in enumerate(detector.relative_heights()[material]):
q0 = detector.q0[material][idx]
if np.logical_and(q0 > q_lim[0], q0 < q_lim[1]):
if I_lim is None or i > I_lim:
# print(p, type(p), i, I_max)
p = np.append(p, i * I_max)
return list(p)
def q0_valid_range(detector, q_lim, I_lim=None):
""" Finds valid q0 range (min, max) from detector/material/argument comb.
Args:
detector (pyxpb.peaks.Peak): pyXpb detector instance
q_lim (list, tuple): Limit q0 range for pawley fitting
I_max (float): Multiplication factor for intensity
I_lim (float): Relative minimum intensity limit
Returns:
tuple: q_min, q_max
"""
q0_valid = []
for material in detector.materials:
for idx, i in enumerate(detector.relative_heights()[material]):
q0 = detector.q0[material][idx]
if np.logical_and(q0 > q_lim[0], q0 < q_lim[1]):
if I_lim is None or i > I_lim:
q0_valid.append(q0)
return np.min(q0_valid), np.max(q0_valid)
def array_fit_pawley(q_array, I_array, detector, err_lim=1e-4,
q_lim=(2, None), progress=True, func='gaussian',
poisson=True):
""" Pawley peak fit wrapper for ndarray of diffraction profiles/az slices.
The peak fit is completed using a Gaussian profile assumption (lorentzian
and psuedo-voigt to be implemented in the future). Specify an error limit
as a threshold for valid peak fitting.
Args:
q_array (ndarray): 2d array containing q as a function of az slice idx
I_array (ndarray): Nd array of intensity profiles wrt. posn/az_slice
detector (pyxpb.peaks.Peak): pyxpb detector instance
err_limit (float): Maximum error (in strain) for peak fit
q_lim (list, tuple): Limit q0 range for pawley fitting
progress (bool): Live progress bar
poisson (bool): Poisson weighting
Return:
tuple: peaks, peaks_err, fwhm, fwhm_err (fwhm, fwhm_err = None, None)
"""
nmat, nf = len(detector.materials), len(detector._fwhm)
assert nmat > 0, "No materials have yet been specified."
data = [np.nan * np.ones(I_array.shape[:-1]) for _ in range(4)]
peaks, peaks_err, fwhm, fwhm_err = data
slices = [i for i in range(q_array.shape[0])]
err_exceed, run_error = 0, 0
for az_idx in slices:
# Load in detector calibrated q array and crop data
q = q_array[az_idx]
q_lim[0] = q_lim[0] if q_lim[0] is not None else np.min(q)
q_lim[1] = q_lim[1] if q_lim[1] is not None else np.max(q)
crop = np.logical_and(q > q_lim[0], q < q_lim[1])
q = q[crop]
q0_min, q0_max = q0_valid_range(detector, q_lim)
# Only used to calc FWHM at approx. locations
q0_range = np.linspace(q0_min, q0_max, 100)
if detector._back.ndim == 2:
background = chebval(q, detector._back[az_idx])
else:
background = chebval(q, detector._back)
for position in np.ndindex(I_array.shape[:-2]):
index = tuple(position) + (az_idx,)
I = I_array[index][crop]
p0 = extract_parameters(detector, q_lim, np.nanmax(I))
# Fit peak across window
try:
pawley = pawley_hkl(detector, background, func=func)
sig = 1 + I**0.5 if poisson else None
coeff, var_mat = curve_fit(pawley, q, I, p0=p0, sigma=sig)
perr = np.sqrt(np.diag(var_mat))
peak, peak_err = coeff[0], perr[0] # Single material
pfw, pfw_err = coeff[nmat: nmat + nf], perr[nmat: nmat + nf]
# print([pfw, q0_range[0], q0_range[-1]])
fw = np.sum(np.polyval(pfw, q0_range)**0.5) / 100
fw_err = np.sum(np.polyval(pfw_err, q0_range)**0.5) / 100
# Check error and store
if peak_err / peak > err_lim:
err_exceed += 1
else:
peaks[index], peaks_err[index] = peak, peak_err
fwhm[index], fwhm_err[index] = fw, fw_err
except RuntimeError:
run_error += 1
if progress:
frac = (az_idx) / len(slices)
prog = '\rProgress: [{0:20s}] {1:.0f}%'
sys.stdout.write(prog.format('#' * int(20 * frac), 100 * frac))
sys.stdout.flush()
if progress:
prog = '\rProgress: [{0:20s}] {1:.0f}%'
sys.stdout.write(prog.format('#' * int(20 * 1), 100 * 1))
sys.stdout.flush()
print('\nTotal points: %i (%i az_angles x %i positions)'
'\nPeak not found in %i position/detector combinations'
'\nError limit exceeded (or pcov not estimated) %i times' %
(peaks.size, peaks.shape[-1], peaks[..., 0].size,
run_error, err_exceed))
return peaks, peaks_err, fwhm, fwhm_err
def p0_approx(data, window, func='gaussian'):
""" Esimates peak parameters for gauss/lorentz/psuedo-voigt peak fits.
Args:
data (tuple): q, I data arrays
window (tuple): min, max edges of the search window
func (str): Peak fitting function (gaussian, lorentzian, psuedo-voigt)
Returns:
tuple: Estimated peak parameters
"""
x, y = data
if x[0] > x[1]:
x = x[::-1]
y = y[::-1]
peak_ind = np.searchsorted(x, window)
q = x[peak_ind[0]:peak_ind[1]]
I = y[peak_ind[0]:peak_ind[1]]
max_index = np.argmax(I)
hm = np.min(I) + (np.max(I) - np.min(I)) / 2
stdev = q[max_index + np.argmin(I[max_index:] > hm)] - q[max_index]
if stdev <= 0:
stdev = 0.1
p0 = [np.min(I), np.max(I) - np.min(I), q[max_index], stdev]
p0.append(0) # linear background
if func == 'psuedo_voigt':
p0.append(0.5)
return p0
def peak_fit(data, window, p0=None, func='gaussian', poisson=True):
""" Peak fit for diffraction data across specified q window.
The peak fitting is completed using either a Gaussian, Lorentzian or
Psuedo-Voigt procedure. The initial estimate of parameter (p0) can be
supplied or else computed.
Args:
data (tuple, list): q, I data arrays
window (tuple, list): min, max edges of the search window
p0 (tuple): Estimated curve paramaters
func (str): Peak fitting function (gaussian, lorentzian, psuedo_voigt)
Return:
tuple: parameters, co-variance matrix (see scipy.optimize.curve_fit)
"""
func_dict = {'gaussian': gaussian, 'lorentzian': lorentzian,
'psuedo_voigt': psuedo_voigt}
func_name = func
func = func_dict[func.lower()]
if data[0][0] > data[0][-1]:
data[0] = data[0][::-1]
data[1] = data[1][::-1]
if p0 is None:
p0 = p0_approx(data, window, func_name)
peak_ind = np.searchsorted(data[0], window)
x = data[0][peak_ind[0]:peak_ind[1]]
I = data[1][peak_ind[0]:peak_ind[1]]
sig = I**0.5 if poisson else None
# Likely better than a fixed value for EDXRD versus. monochromatic
# as min(EDXRD_I>0) = 1 whereas min(mono_I>0) << 1
if sig is not None:
if np.max(sig) == 0:
sig[sig == 0] = 1
else:
sig[sig == 0] = np.min(sig[sig > 0])
# Weighted fit is done in the following manner (see scipy docs):
# r = ydata - f(xdata, *popt)
# chisq = sum((r / sigma) ** 2)
# poisson weighting: chisq = sum((r / ydata**0.5) ** 2)
return curve_fit(func, x, I, p0, sigma=sig)
def array_fit(q_array, I_array, window, func='gaussian',
error_limit=1e-4, progress=True, poisson=True):
""" Peak fit wrapper for ndarray of diffraction profiles/azimuhtal slices.
The peak fitting is completed using either a Gaussian, Lorentzian or
Psuedo-Voigt procedure. Specify an error limit as a threshold for valid
peak fitting.
Args:
q_array (ndarray): 2d array containing q as a function of az slice idx
I_array (ndarray): Nd array of intensity profiles wrt. posn/az_slice
window (tuple): min, max edges of the search window
func (str): Peak fitting function (gaussian, lorentzian, psuedo-voigt)
error_limit (float): Maximum error (in strain) for peak fit
progress (bool): Live progress bar
Return:
tuple: peaks, peaks_err, fwhm, fwhm_err
"""
data = [np.nan * np.ones(I_array.shape[:-1]) for _ in range(4)]
peaks, peaks_err, fwhm, fwhm_err = data
slices = [i for i in range(q_array.shape[0])]
err_exceed, run_error = 0, 0
for idx, az_idx in enumerate(slices):
# Load in detector calibrated q array
q = q_array[az_idx]
for position in np.ndindex(I_array.shape[:-2]):
index = tuple(position) + (az_idx,)
I = I_array[index]
p0 = p0_approx((q, I), window, func)
# Fit peak across window
try:
coeff, var_matrix = peak_fit((q, I), window, p0, func, poisson)
perr = np.sqrt(np.diag(var_matrix))
peak, peak_err = coeff[2], perr[2]
fw, fw_err = coeff[3], perr[3]
if func == 'gaussian':
fw, fw_err = fw * 2.35482, fw_err * 2.35482
elif func == 'lorentzian':
fw, fw_err = fw * 2, fw_err * 2
# Check error and store
if np.abs(peak_err / peak) > error_limit:
err_exceed += 1
else:
peaks[index], peaks_err[index] = peak, peak_err
fwhm[index], fwhm_err[index] = fw, fw_err
except RuntimeError:
run_error += 1
if progress:
percent = 100 * ((idx) / len(slices))
sys.stdout.write('\rProgress: [{0:20s}] {1:.0f}%'.format('#' *
int(percent / 5), percent))
sys.stdout.flush()
if progress:
percent = 100
sys.stdout.write('\rProgress: [{0:20s}] {1:.0f}%'.format('#' *
int(percent / 5), percent))
sys.stdout.flush()
print('\nTotal points: %i (%i az_angles x %i positions)'
'\nPeak not found in %i position/detector combintions'
'\nError limit exceeded (or pcov not estimated) %i times' %
(peaks.size, peaks.shape[-1], peaks[..., 0].size,
run_error, err_exceed))
return peaks, peaks_err, fwhm, fwhm_err
def full_ring_fit(strain, phi):
""" Computes strain tensor from phi v normal strain distribution.
Fits the strain transformation equation to the calculated strain at each
azimuthal location.
Args:
strain (ndarray): Nd strain array where final dimension is of len(phi)
phi (ndarray): Azimuthal angle of each azimuthal slice (rad)
Returns:
tuple: Strain tensor (e_xx, e_yy, e_xy)
Strain tensor error (e_xx_err, e_yy_err, e_xy_err)
Strain tensor rmse (e_rmse)
"""
strain_tensor = np.nan * np.ones(strain.shape[:-1] + (3,))
strain_tensor_error = np.nan * np.ones(strain.shape[:-1] + (3,))
strain_tensor_rmse = np.nan * np.ones(strain.shape[:-1] + (1,))
error_count = 0
for idx in np.ndindex(strain_tensor.shape[:-1]):
data = strain[idx]
not_nan = ~np.isnan(data)
p0 = [np.nanmean(data), 3 * np.nanstd(data) / (2 ** 0.5), 0]
try:
popt, pcov = curve_fit(strain_transformation,
phi[not_nan], data[not_nan], p0)
strain_tensor[idx] = popt
strain_tensor_error[idx] = np.sqrt(np.diag(pcov))
e_t = strain_transformation(phi, *popt)
e = strain[idx]
strain_tensor_rmse[idx] = np.nanmean((e_t - e)**2)**0.5
except (TypeError, RuntimeError, ValueError):
error_count += 1
#else:
# error_count += 1
print('\nUnable to fit full ring at %i out of %i points'
% (error_count, np.size(strain[..., 0])))
return strain_tensor, strain_tensor_error, strain_tensor_rmse
#def mirror_data(phi, data):
# """ Attempts to merge azimuthally distributed data across poles.
#
# Only works in when there is an odd number of azimuthal slices.
#
# Args:
# phi (ndarray): Azimuthal slice positions (rad)
# data (ndarray): Data to be azimuthally mirrored
#
# Returns:
# tuple: mphi, mdata - azimuthally merged data
# """
# mphi = phi[:int(phi[:].shape[0]/2)]
# peak_shape = data.shape
# phi_len = int(peak_shape[-2]/2)
# new_shape = (peak_shape[:-2] + (phi_len, ) + peak_shape[-1:])
# mdata = np.nan * np.zeros(new_shape)
# for i in range(phi_len):
# mdata[:, i] = (data[:, i] + data[:, i + new_shape[-2]]) / 2
# return mphi, mdata
#
#
#def single_pawley(detector, q, I, back, p_fw=None, func='gaussian'):
# """ Full pawley fitting for a specific q, I combination.
#
# Option to use different initial parameters for FWHM fit. This allows a
# lower order polynomial to be specified.
#
# Args:
# detector: pyxpb detector object
# q (ndarray): Reciprocal lattice
# I (ndarray): Intensity values
# az_idx (int): Azimuthal slice index
# p_fw (list, tuple): New initial estimate
# """
# nmat, nf = len(detector.materials), len(detector._fwhm)
# background = chebval(q, back)
# q_lim = [np.min(q), np.max(q)]
# p0 = extract_parameters(detector, q_lim, np.nanmax(I))
#
# if p_fw is not None:
# p_fw0_idx = range(nmat, nmat + nf)
# p0_new = [i for idx, i in enumerate(p0) if idx not in p_fw0_idx]
#
# for idx, i in enumerate(p_fw):
# p0_new.insert(nmat + idx, i)
#
# nf = len(p_fw)
# else:
# p0_new = p0
# pawley = pawley_hkl(detector, background, nf - 1, func=func)
# return curve_fit(pawley, q, I, p0=p0_new)
#
#if __name__ == '__main__':
# import os
# from pyxe.energy_dispersive import EDI12
# base = os.path.split(os.path.dirname(__file__))[0]
# fpath_1 = os.path.join(base, r'pyxe/data/50418.nxs')
# fpath_2 = os.path.join(base, r'pyxe/data/50414.nxs')
# fine = EDI12(fpath_1)
# fine.add_material('Fe')
# fine.plot_intensity(pawley=True)
# plt.show()
# print(fine.detector.fwhm_param) |
guorendong/iridium-browser-ubuntu | refs/heads/ubuntu/precise | third_party/mojo/src/mojo/public/tools/bindings/mojom_bindings_generator_unittest.py | 107 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from mojom_bindings_generator import MakeImportStackMessage
class MojoBindingsGeneratorTest(unittest.TestCase):
"""Tests mojo_bindings_generator."""
def testMakeImportStackMessage(self):
"""Tests MakeImportStackMessage()."""
self.assertEquals(MakeImportStackMessage(["x"]), "")
self.assertEquals(MakeImportStackMessage(["x", "y"]),
"\n y was imported by x")
self.assertEquals(MakeImportStackMessage(["x", "y", "z"]),
"\n z was imported by y\n y was imported by x")
if __name__ == "__main__":
unittest.main()
|
showa-yojyo/notebook | refs/heads/develop | source/_sample/ptt/trends-closest.py | 2 | #!/usr/bin/env python
# Demonstration GET trends/closest
# See https://dev.twitter.com/rest/reference/get/trends/closest
from secret import twitter_instance
from json import dump
import sys
tw = twitter_instance()
# [1]
response = tw.trends.closest(long=139.773828, lat=35.696805)
# [2]
dump(response, sys.stdout, ensure_ascii=False, indent=4, sort_keys=True)
|
chauhanhardik/populo_2 | refs/heads/master | common/djangoapps/track/backends/tests/test_mongodb.py | 172 | from __future__ import absolute_import
from mock import patch
from django.test import TestCase
from track.backends.mongodb import MongoBackend
class TestMongoBackend(TestCase):
def setUp(self):
super(TestMongoBackend, self).setUp()
self.mongo_patcher = patch('track.backends.mongodb.MongoClient')
self.mongo_patcher.start()
self.addCleanup(self.mongo_patcher.stop)
self.backend = MongoBackend()
def test_mongo_backend(self):
events = [{'test': 1}, {'test': 2}]
self.backend.send(events[0])
self.backend.send(events[1])
# Check if we inserted events into the database
calls = self.backend.collection.insert.mock_calls
self.assertEqual(len(calls), 2)
# Unpack the arguments and check if the events were used
# as the first argument to collection.insert
def first_argument(call):
_, args, _ = call
return args[0]
self.assertEqual(events[0], first_argument(calls[0]))
self.assertEqual(events[1], first_argument(calls[1]))
|
michaelgugino/loggerfall | refs/heads/master | loggerfall.py | 1 | #!/usr/bin/env python
# Modified work by Michael Gugino
# Original work Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Streaming data (logs) via websockets.
Authentication, error handling, etc are not implemented yet
"""
import logging
import tornado.escape
import tornado.ioloop
import tornado.options
import tornado.web
import tornado.websocket
import os.path
import uuid
from tornado.httpserver import HTTPServer
from tornado.options import define, options
import redis
import ast
pool = redis.ConnectionPool(host='localhost', port=6379, db=0)
redcon = redis.Redis(connection_pool=pool)
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r"/", MainHandler),
(r"/chatsocket", ChatSocketHandler),
]
settings = dict(
cookie_secret="__TODO:_GENERATE_YOUR_OWN_RANDOM_VALUE_HERE__",
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
)
tornado.web.Application.__init__(self, handlers, **settings)
class MainHandler(tornado.web.RequestHandler):
def get(self):
#need to remove get/post data from first get.
hostchannel = str(self.get_argument("HOST", default='none', strip=False))
appchannel = str(self.get_argument("APP", default='none', strip=False))
cache = []
#Need to update the index.html
self.render("index.html", messages=cache)
class ChatSocketHandler(tornado.websocket.WebSocketHandler):
channelcache = dict()
channels = dict()
cache_size = 200
def get_compression_options(self):
# Non-None enables compression with default options.
return {}
#client opens a connection to receive messages.
def open(self):
hostchannel = str(self.get_argument("HOST", default='none', strip=False))
appchannel = str(self.get_argument("APP", default='none', strip=False))
hostappchan = hostchannel + '::' + appchannel
if (hostchannel == 'none' or appchannel == 'none'):
return none
self.subscribe = hostappchan
if str(hostappchan) in ChatSocketHandler.channels:
ChatSocketHandler.channels[hostappchan].add(self)
else:
#add our channel set to channels dictionary
ChatSocketHandler.channels[hostappchan] = set()
#create local cache for channel messages
ChatSocketHandler.channelcache[hostappchan] = list()
#add our client to our channel set
ChatSocketHandler.channels[hostappchan].add(self)
#Send full 2k messages on connect
self.send_cache_on_connect(hostappchan)
def send_cache_on_connect(self,channel):
print "send_cache_on_connect: ", channel
try:
count = 0;
res = redcon.lrange(channel,-2000,-1)
reslen = len(res)
for msg in res:
if count > reslen - self.cache_size:
local_cache_update(channel, msg, 0)
count = count + 1
self.write_message(ast.literal_eval(msg))
except:
logging.error("Error sending message", exc_info=True)
ChatSocketHandler.channels[channel].remove(self)
#Client has disconnected
def on_close(self):
hostchannel = str(self.get_argument("HOST", default='none', strip=False))
appchannel = str(self.get_argument("APP", default='none', strip=False))
hostappchan = hostchannel + '::' + appchannel
ChatSocketHandler.channels[hostappchan].remove(self)
@classmethod
def send_updates3(cls, channel, msg):
for waiter in cls.channels[channel]:
try:
waiter.write_message(ast.literal_eval(msg))
except:
logging.error("Error sending message", exc_info=True)
cls.channels[channel].remove(waiter)
def on_message(self, message):
pass
def local_cache_update(channel, msg, cache_len):
if cache_len > 200:
ChatSocketHandler.channelcache[channel].pop(0)
ChatSocketHandler.channelcache[channel].append(msg)
pass
def local_cache_check(channel, cache_set, msg):
if msg in cache_set:
return True
else:
print "not in cache: ", msg
return False
def check_redis():
if ChatSocketHandler.channels:
removals = list()
for channel in ChatSocketHandler.channels:
#check for any active listeners on this channel
if len(ChatSocketHandler.channels[channel]) == 0:
removals.append(ChatSocketHandler.channels[channel])
else:
messages_to_send = list()
for msg in redcon.lrange(channel,-200,-1):
#check if message in local cache.
#append to local cache.
#clear old entries from local cache.
cache_set = set(ChatSocketHandler.channelcache[channel])
cache_len = len(cache_set)
if not local_cache_check(channel, cache_set, msg):
cache_len = cache_len+1
local_cache_update(channel, msg, cache_len)
messages_to_send.append(msg)
for msg in messages_to_send:
ChatSocketHandler.send_updates3(channel, msg)
for chan in removals:
del ChatSocketHandler.channels[channel]
del ChatSocketHandler.channelcache[channel]
def main(**kwargs):
import socket
define("port", default=0, help="run on the given port", type=int)
define("path", default="/tmp/test", help="run on the given port", type=str)
define("fd", default=8888, help="run on the given port", type=int)
tornado.options.parse_command_line()
app = Application()
if options.port > 0:
app.listen(options.port)
else:
sock = socket.fromfd(options.fd, socket.AF_INET, socket.SOCK_STREAM)
server = HTTPServer(app, **kwargs)
server.add_socket(sock)
goer = tornado.ioloop.IOLoop.instance()
pcall = tornado.ioloop.PeriodicCallback(check_redis, 1000)
pcall.start()
goer.start()
if __name__ == "__main__":
main()
|
nRFMesh/mbed-os | refs/heads/master | tools/host_tests/tcpecho_client.py | 73 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import socket
import string, random
from time import time
from mbed_settings import SERVER_ADDRESS
ECHO_PORT = 7
LEN_PACKET = 127
N_PACKETS = 5000
TOT_BITS = float(LEN_PACKET * N_PACKETS * 8) * 2
MEGA = float(1024 * 1024)
UPDATE_STEP = (N_PACKETS/10)
class TCP_EchoClient:
def __init__(self, host):
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.s.connect((host, ECHO_PORT))
self.packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
def __packet(self):
# Comment out the checks when measuring the throughput
# self.packet = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(LEN_PACKET))
self.s.send(self.packet)
data = self.s.recv(LEN_PACKET)
# assert self.packet == data, "packet error:\n%s\n%s\n" % (self.packet, data)
def test(self):
start = time()
for i in range(N_PACKETS):
if (i % UPDATE_STEP) == 0: print '%.2f%%' % ((float(i)/float(N_PACKETS)) * 100.)
self.__packet()
t = time() - start
print 'Throughput: (%.2f)Mbits/s' % ((TOT_BITS / t)/MEGA)
def __del__(self):
self.s.close()
while True:
e = TCP_EchoClient(SERVER_ADDRESS)
e.test()
|
takeshineshiro/nova | refs/heads/master | nova/tests/unit/compute/test_compute_mgr.py | 3 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for ComputeManager()."""
import contextlib
import time
import uuid
from cinderclient import exceptions as cinder_exception
from eventlet import event as eventlet_event
import mock
from mox3 import mox
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_utils import importutils
from oslo_utils import timeutils
from oslo_utils import uuidutils
import six
import nova
from nova.compute import build_results
from nova.compute import manager
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova.conductor import api as conductor_api
from nova.conductor import rpcapi as conductor_rpcapi
from nova import context
from nova import db
from nova import exception
from nova.network import api as network_api
from nova.network import model as network_model
from nova import objects
from nova.objects import block_device as block_device_obj
from nova import test
from nova.tests.unit.compute import fake_resource_tracker
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
from nova.tests.unit import fake_network_cache_model
from nova.tests.unit import fake_server_actions
from nova.tests.unit.objects import test_instance_fault
from nova.tests.unit.objects import test_instance_info_cache
from nova import utils
from nova.virt import driver as virt_driver
from nova.virt import event as virtevent
from nova.virt import fake as fake_driver
from nova.virt import hardware
CONF = cfg.CONF
CONF.import_opt('compute_manager', 'nova.service')
class ComputeManagerUnitTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerUnitTestCase, self).setUp()
self.flags(use_local=True, group='conductor')
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
fake_server_actions.stub_out_action_events(self.stubs)
@mock.patch.object(manager.ComputeManager, '_get_power_state')
@mock.patch.object(manager.ComputeManager, '_sync_instance_power_state')
@mock.patch.object(objects.Instance, 'get_by_uuid')
def _test_handle_lifecycle_event(self, mock_get, mock_sync,
mock_get_power_state, transition,
event_pwr_state, current_pwr_state):
event = mock.Mock()
event.get_instance_uuid.return_value = mock.sentinel.uuid
event.get_transition.return_value = transition
mock_get_power_state.return_value = current_pwr_state
self.compute.handle_lifecycle_event(event)
mock_get.assert_called_with(mock.ANY, mock.sentinel.uuid,
expected_attrs=[])
if event_pwr_state == current_pwr_state:
mock_sync.assert_called_with(mock.ANY, mock_get.return_value,
event_pwr_state)
else:
self.assertFalse(mock_sync.called)
def test_handle_lifecycle_event(self):
event_map = {virtevent.EVENT_LIFECYCLE_STOPPED: power_state.SHUTDOWN,
virtevent.EVENT_LIFECYCLE_STARTED: power_state.RUNNING,
virtevent.EVENT_LIFECYCLE_PAUSED: power_state.PAUSED,
virtevent.EVENT_LIFECYCLE_RESUMED: power_state.RUNNING,
virtevent.EVENT_LIFECYCLE_SUSPENDED:
power_state.SUSPENDED,
}
for transition, pwr_state in six.iteritems(event_map):
self._test_handle_lifecycle_event(transition=transition,
event_pwr_state=pwr_state,
current_pwr_state=pwr_state)
def test_handle_lifecycle_event_state_mismatch(self):
self._test_handle_lifecycle_event(
transition=virtevent.EVENT_LIFECYCLE_STOPPED,
event_pwr_state=power_state.SHUTDOWN,
current_pwr_state=power_state.RUNNING)
def test_delete_instance_info_cache_delete_ordering(self):
call_tracker = mock.Mock()
call_tracker.clear_events_for_instance.return_value = None
mgr_class = self.compute.__class__
orig_delete = mgr_class._delete_instance
specd_compute = mock.create_autospec(mgr_class)
# spec out everything except for the method we really want
# to test, then use call_tracker to verify call sequence
specd_compute._delete_instance = orig_delete
mock_inst = mock.Mock()
mock_inst.uuid = 'inst-1'
mock_inst.save = mock.Mock()
mock_inst.destroy = mock.Mock()
mock_inst.system_metadata = mock.Mock()
def _mark_notify(*args, **kwargs):
call_tracker._notify_about_instance_usage(*args, **kwargs)
def _mark_shutdown(*args, **kwargs):
call_tracker._shutdown_instance(*args, **kwargs)
specd_compute.instance_events = call_tracker
specd_compute._notify_about_instance_usage = _mark_notify
specd_compute._shutdown_instance = _mark_shutdown
mock_inst.info_cache = call_tracker
specd_compute._delete_instance(specd_compute,
self.context,
mock_inst,
mock.Mock(),
mock.Mock())
methods_called = [n for n, a, k in call_tracker.mock_calls]
self.assertEqual(['clear_events_for_instance',
'_notify_about_instance_usage',
'_shutdown_instance', 'delete'],
methods_called)
@mock.patch.object(manager.ComputeManager, '_get_resource_tracker')
@mock.patch.object(fake_driver.FakeDriver, 'get_available_nodes')
@mock.patch.object(manager.ComputeManager, '_get_compute_nodes_in_db')
def test_update_available_resource(self, get_db_nodes, get_avail_nodes,
get_rt):
info = {'cn_id': 1}
def _make_compute_node(hyp_hostname):
cn = mock.Mock(spec_set=['hypervisor_hostname', 'id',
'destroy'])
cn.id = info['cn_id']
info['cn_id'] += 1
cn.hypervisor_hostname = hyp_hostname
return cn
def _make_rt(node):
n = mock.Mock(spec_set=['update_available_resource',
'nodename'])
n.nodename = node
return n
ctxt = mock.Mock()
db_nodes = [_make_compute_node('node1'),
_make_compute_node('node2'),
_make_compute_node('node3'),
_make_compute_node('node4')]
avail_nodes = set(['node2', 'node3', 'node4', 'node5'])
avail_nodes_l = list(avail_nodes)
rts = [_make_rt(node) for node in avail_nodes_l]
# Make the 2nd and 3rd ones raise
exc = exception.ComputeHostNotFound(host='fake')
rts[1].update_available_resource.side_effect = exc
exc = test.TestingException()
rts[2].update_available_resource.side_effect = exc
rts_iter = iter(rts)
def _get_rt_side_effect(*args, **kwargs):
return next(rts_iter)
expected_rt_dict = {avail_nodes_l[0]: rts[0],
avail_nodes_l[2]: rts[2],
avail_nodes_l[3]: rts[3]}
get_db_nodes.return_value = db_nodes
get_avail_nodes.return_value = avail_nodes
get_rt.side_effect = _get_rt_side_effect
self.compute.update_available_resource(ctxt)
get_db_nodes.assert_called_once_with(ctxt, use_slave=True)
self.assertEqual([mock.call(node) for node in avail_nodes],
get_rt.call_args_list)
for rt in rts:
rt.update_available_resource.assert_called_once_with(ctxt)
self.assertEqual(expected_rt_dict,
self.compute._resource_tracker_dict)
# First node in set should have been removed from DB
for db_node in db_nodes:
if db_node.hypervisor_hostname == 'node1':
db_node.destroy.assert_called_once_with()
else:
self.assertFalse(db_node.destroy.called)
def test_delete_instance_without_info_cache(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake',
vm_state=vm_states.ERROR,
host=self.compute.host,
expected_attrs=['system_metadata'])
quotas = mock.create_autospec(objects.Quotas, spec_set=True)
with contextlib.nested(
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute, '_shutdown_instance'),
mock.patch.object(instance, 'obj_load_attr'),
mock.patch.object(instance, 'save'),
mock.patch.object(instance, 'destroy')
) as (
compute_notify_about_instance_usage, comupte_shutdown_instance,
instance_obj_load_attr, instance_save, instance_destroy
):
instance.info_cache = None
self.compute._delete_instance(self.context, instance, [], quotas)
@mock.patch.object(network_api.API, 'allocate_for_instance')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(time, 'sleep')
def test_allocate_network_succeeds_after_retries(
self, mock_sleep, mock_save, mock_allocate_for_instance):
self.flags(network_allocate_retries=8)
instance = fake_instance.fake_instance_obj(
self.context, expected_attrs=['system_metadata'])
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
final_result = 'meow'
dhcp_options = None
mock_allocate_for_instance.side_effect = [
test.TestingException()] * 7 + [final_result]
expected_sleep_times = [1, 2, 4, 8, 16, 30, 30, 30]
res = self.compute._allocate_network_async(self.context, instance,
req_networks,
macs,
sec_groups,
is_vpn,
dhcp_options)
mock_sleep.has_calls(expected_sleep_times)
self.assertEqual(final_result, res)
# Ensure save is not called in while allocating networks, the instance
# is saved after the allocation.
self.assertFalse(mock_save.called)
self.assertEqual('True', instance.system_metadata['network_allocated'])
def test_allocate_network_fails(self):
self.flags(network_allocate_retries=0)
nwapi = self.compute.network_api
self.mox.StubOutWithMock(nwapi, 'allocate_for_instance')
instance = {}
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
nwapi.allocate_for_instance(
self.context, instance, vpn=is_vpn,
requested_networks=req_networks, macs=macs,
security_groups=sec_groups,
dhcp_options=dhcp_options).AndRaise(test.TestingException())
self.mox.ReplayAll()
self.assertRaises(test.TestingException,
self.compute._allocate_network_async,
self.context, instance, req_networks, macs,
sec_groups, is_vpn, dhcp_options)
def test_allocate_network_neg_conf_value_treated_as_zero(self):
self.flags(network_allocate_retries=-1)
nwapi = self.compute.network_api
self.mox.StubOutWithMock(nwapi, 'allocate_for_instance')
instance = {}
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
# Only attempted once.
nwapi.allocate_for_instance(
self.context, instance, vpn=is_vpn,
requested_networks=req_networks, macs=macs,
security_groups=sec_groups,
dhcp_options=dhcp_options).AndRaise(test.TestingException())
self.mox.ReplayAll()
self.assertRaises(test.TestingException,
self.compute._allocate_network_async,
self.context, instance, req_networks, macs,
sec_groups, is_vpn, dhcp_options)
@mock.patch.object(network_api.API, 'allocate_for_instance')
@mock.patch.object(manager.ComputeManager, '_instance_update')
@mock.patch.object(time, 'sleep')
def test_allocate_network_with_conf_value_is_one(
self, sleep, _instance_update, allocate_for_instance):
self.flags(network_allocate_retries=1)
instance = fake_instance.fake_instance_obj(
self.context, expected_attrs=['system_metadata'])
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
final_result = 'zhangtralon'
allocate_for_instance.side_effect = [test.TestingException(),
final_result]
res = self.compute._allocate_network_async(self.context, instance,
req_networks,
macs,
sec_groups,
is_vpn,
dhcp_options)
self.assertEqual(final_result, res)
self.assertEqual(1, sleep.call_count)
@mock.patch('nova.utils.spawn_n')
@mock.patch('nova.compute.manager.ComputeManager.'
'_do_build_and_run_instance')
def _test_max_concurrent_builds(self, mock_dbari, mock_spawn):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
with mock.patch.object(self.compute,
'_build_semaphore') as mock_sem:
instance = objects.Instance(uuid=str(uuid.uuid4()))
for i in (1, 2, 3):
self.compute.build_and_run_instance(self.context, instance,
mock.sentinel.image,
mock.sentinel.request_spec,
{})
self.assertEqual(3, mock_sem.__enter__.call_count)
def test_max_concurrent_builds_limited(self):
self.flags(max_concurrent_builds=2)
self._test_max_concurrent_builds()
def test_max_concurrent_builds_unlimited(self):
self.flags(max_concurrent_builds=0)
self._test_max_concurrent_builds()
def test_max_concurrent_builds_semaphore_limited(self):
self.flags(max_concurrent_builds=123)
self.assertEqual(123,
manager.ComputeManager()._build_semaphore.balance)
def test_max_concurrent_builds_semaphore_unlimited(self):
self.flags(max_concurrent_builds=0)
compute = manager.ComputeManager()
self.assertEqual(0, compute._build_semaphore.balance)
self.assertIsInstance(compute._build_semaphore,
compute_utils.UnlimitedSemaphore)
def test_init_host(self):
our_host = self.compute.host
inst = fake_instance.fake_db_instance(
vm_state=vm_states.ACTIVE,
info_cache=dict(test_instance_info_cache.fake_info_cache,
network_info=None),
security_groups=None)
startup_instances = [inst, inst, inst]
def _do_mock_calls(defer_iptables_apply):
self.compute.driver.init_host(host=our_host)
context.get_admin_context().AndReturn(self.context)
db.instance_get_all_by_host(
self.context, our_host, columns_to_join=['info_cache'],
use_slave=False
).AndReturn(startup_instances)
if defer_iptables_apply:
self.compute.driver.filter_defer_apply_on()
self.compute._destroy_evacuated_instances(self.context)
self.compute._init_instance(self.context,
mox.IsA(objects.Instance))
self.compute._init_instance(self.context,
mox.IsA(objects.Instance))
self.compute._init_instance(self.context,
mox.IsA(objects.Instance))
if defer_iptables_apply:
self.compute.driver.filter_defer_apply_off()
self.mox.StubOutWithMock(self.compute.driver, 'init_host')
self.mox.StubOutWithMock(self.compute.driver,
'filter_defer_apply_on')
self.mox.StubOutWithMock(self.compute.driver,
'filter_defer_apply_off')
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(context, 'get_admin_context')
self.mox.StubOutWithMock(self.compute,
'_destroy_evacuated_instances')
self.mox.StubOutWithMock(self.compute,
'_init_instance')
# Test with defer_iptables_apply
self.flags(defer_iptables_apply=True)
_do_mock_calls(True)
self.mox.ReplayAll()
self.compute.init_host()
self.mox.VerifyAll()
# Test without defer_iptables_apply
self.mox.ResetAll()
self.flags(defer_iptables_apply=False)
_do_mock_calls(False)
self.mox.ReplayAll()
self.compute.init_host()
# tearDown() uses context.get_admin_context(), so we have
# to do the verification here and unstub it.
self.mox.VerifyAll()
self.mox.UnsetStubs()
@mock.patch('nova.objects.InstanceList')
@mock.patch('nova.objects.MigrationList.get_by_filters')
def test_cleanup_host(self, mock_miglist_get, mock_instance_list):
# just testing whether the cleanup_host method
# when fired will invoke the underlying driver's
# equivalent method.
mock_miglist_get.return_value = []
mock_instance_list.get_by_host.return_value = []
with mock.patch.object(self.compute, 'driver') as mock_driver:
self.compute.init_host()
mock_driver.init_host.assert_called_once_with(host='fake-mini')
self.compute.cleanup_host()
# register_event_listener is called on startup (init_host) and
# in cleanup_host
mock_driver.register_event_listener.assert_has_calls([
mock.call(self.compute.handle_events), mock.call(None)])
mock_driver.cleanup_host.assert_called_once_with(host='fake-mini')
def test_init_virt_events_disabled(self):
self.flags(handle_virt_lifecycle_events=False, group='workarounds')
with mock.patch.object(self.compute.driver,
'register_event_listener') as mock_register:
self.compute.init_virt_events()
self.assertFalse(mock_register.called)
@mock.patch('nova.objects.MigrationList.get_by_filters')
@mock.patch('nova.objects.Migration.save')
def test_init_host_with_evacuated_instance(self, mock_save, mock_mig_get):
our_host = self.compute.host
not_our_host = 'not-' + our_host
deleted_instance = fake_instance.fake_instance_obj(
self.context, host=not_our_host, uuid='fake-uuid')
migration = objects.Migration(instance_uuid=deleted_instance.uuid)
mock_mig_get.return_value = [migration]
self.mox.StubOutWithMock(self.compute.driver, 'init_host')
self.mox.StubOutWithMock(self.compute.driver, 'destroy')
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(context, 'get_admin_context')
self.mox.StubOutWithMock(self.compute, 'init_virt_events')
self.mox.StubOutWithMock(self.compute, '_get_instances_on_driver')
self.mox.StubOutWithMock(self.compute, '_init_instance')
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.compute.driver.init_host(host=our_host)
context.get_admin_context().AndReturn(self.context)
db.instance_get_all_by_host(self.context, our_host,
columns_to_join=['info_cache'],
use_slave=False
).AndReturn([])
self.compute.init_virt_events()
# simulate failed instance
self.compute._get_instances_on_driver(
self.context, {'deleted': False}).AndReturn([deleted_instance])
self.compute.network_api.get_instance_nw_info(
self.context, deleted_instance).AndRaise(
exception.InstanceNotFound(instance_id=deleted_instance['uuid']))
# ensure driver.destroy is called so that driver may
# clean up any dangling files
self.compute.driver.destroy(self.context, deleted_instance,
mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
self.compute.init_host()
# tearDown() uses context.get_admin_context(), so we have
# to do the verification here and unstub it.
self.mox.VerifyAll()
self.mox.UnsetStubs()
def test_init_instance_with_binding_failed_vif_type(self):
# this instance will plug a 'binding_failed' vif
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake-uuid',
info_cache=None,
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
task_state=None,
host=self.compute.host,
expected_attrs=['info_cache'])
with contextlib.nested(
mock.patch.object(context, 'get_admin_context',
return_value=self.context),
mock.patch.object(compute_utils, 'get_nw_info_for_instance',
return_value=network_model.NetworkInfo()),
mock.patch.object(self.compute.driver, 'plug_vifs',
side_effect=exception.VirtualInterfacePlugException(
"Unexpected vif_type=binding_failed")),
mock.patch.object(self.compute, '_set_instance_obj_error_state')
) as (get_admin_context, get_nw_info, plug_vifs, set_error_state):
self.compute._init_instance(self.context, instance)
set_error_state.assert_called_once_with(self.context, instance)
def test__get_power_state_InstanceNotFound(self):
instance = fake_instance.fake_instance_obj(
self.context,
power_state=power_state.RUNNING)
with mock.patch.object(self.compute.driver,
'get_info',
side_effect=exception.InstanceNotFound(instance_id=1)):
self.assertEqual(self.compute._get_power_state(self.context,
instance),
power_state.NOSTATE)
def test__get_power_state_NotFound(self):
instance = fake_instance.fake_instance_obj(
self.context,
power_state=power_state.RUNNING)
with mock.patch.object(self.compute.driver,
'get_info',
side_effect=exception.NotFound()):
self.assertRaises(exception.NotFound,
self.compute._get_power_state,
self.context, instance)
def test_init_instance_failed_resume_sets_error(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake-uuid',
info_cache=None,
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
task_state=None,
host=self.compute.host,
expected_attrs=['info_cache'])
self.flags(resume_guests_state_on_host_boot=True)
self.mox.StubOutWithMock(self.compute, '_get_power_state')
self.mox.StubOutWithMock(self.compute.driver, 'plug_vifs')
self.mox.StubOutWithMock(self.compute.driver,
'resume_state_on_host_boot')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute,
'_set_instance_obj_error_state')
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute.driver.plug_vifs(instance, mox.IgnoreArg())
self.compute._get_instance_block_device_info(mox.IgnoreArg(),
instance).AndReturn('fake-bdm')
self.compute.driver.resume_state_on_host_boot(mox.IgnoreArg(),
instance, mox.IgnoreArg(),
'fake-bdm').AndRaise(test.TestingException)
self.compute._set_instance_obj_error_state(mox.IgnoreArg(), instance)
self.mox.ReplayAll()
self.compute._init_instance('fake-context', instance)
def test_init_instance_stuck_in_deleting(self):
instance = fake_instance.fake_instance_obj(
self.context,
project_id='fake',
uuid='fake-uuid',
vcpus=1,
memory_mb=64,
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
host=self.compute.host,
task_state=task_states.DELETING)
self.mox.StubOutWithMock(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
self.mox.StubOutWithMock(self.compute, '_delete_instance')
self.mox.StubOutWithMock(instance, 'obj_load_attr')
self.mox.StubOutWithMock(self.compute, '_create_reservations')
bdms = []
quotas = objects.quotas.Quotas(self.context)
instance.obj_load_attr('metadata')
instance.obj_load_attr('system_metadata')
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, instance.uuid).AndReturn(bdms)
self.compute._create_reservations(self.context, instance,
instance.project_id,
instance.user_id).AndReturn(quotas)
self.compute._delete_instance(self.context, instance, bdms,
mox.IgnoreArg())
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
@mock.patch.object(objects.Instance, 'get_by_uuid')
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
def test_init_instance_stuck_in_deleting_raises_exception(
self, mock_get_by_instance_uuid, mock_get_by_uuid):
instance = fake_instance.fake_instance_obj(
self.context,
project_id='fake',
uuid='fake-uuid',
vcpus=1,
memory_mb=64,
metadata={},
system_metadata={},
host=self.compute.host,
vm_state=vm_states.ACTIVE,
task_state=task_states.DELETING,
expected_attrs=['metadata', 'system_metadata'])
bdms = []
reservations = ['fake-resv']
def _create_patch(name, attr):
patcher = mock.patch.object(name, attr)
mocked_obj = patcher.start()
self.addCleanup(patcher.stop)
return mocked_obj
mock_delete_instance = _create_patch(self.compute, '_delete_instance')
mock_set_instance_error_state = _create_patch(
self.compute, '_set_instance_obj_error_state')
mock_create_reservations = _create_patch(self.compute,
'_create_reservations')
mock_create_reservations.return_value = reservations
mock_get_by_instance_uuid.return_value = bdms
mock_get_by_uuid.return_value = instance
mock_delete_instance.side_effect = test.TestingException('test')
self.compute._init_instance(self.context, instance)
mock_set_instance_error_state.assert_called_once_with(
self.context, instance)
def _test_init_instance_reverts_crashed_migrations(self,
old_vm_state=None):
power_on = True if (not old_vm_state or
old_vm_state == vm_states.ACTIVE) else False
sys_meta = {
'old_vm_state': old_vm_state
}
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_states.ERROR,
task_state=task_states.RESIZE_MIGRATING,
power_state=power_state.SHUTDOWN,
system_metadata=sys_meta,
host=self.compute.host,
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(compute_utils, 'get_nw_info_for_instance')
self.mox.StubOutWithMock(self.compute.driver, 'plug_vifs')
self.mox.StubOutWithMock(self.compute.driver,
'finish_revert_migration')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute.driver, 'get_info')
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute, '_retry_reboot')
self.compute._retry_reboot(self.context, instance).AndReturn(
(False, None))
compute_utils.get_nw_info_for_instance(instance).AndReturn(
network_model.NetworkInfo())
self.compute.driver.plug_vifs(instance, [])
self.compute._get_instance_block_device_info(
self.context, instance).AndReturn([])
self.compute.driver.finish_revert_migration(self.context, instance,
[], [], power_on)
instance.save()
self.compute.driver.get_info(instance).AndReturn(
hardware.InstanceInfo(state=power_state.SHUTDOWN))
self.compute.driver.get_info(instance).AndReturn(
hardware.InstanceInfo(state=power_state.SHUTDOWN))
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.assertIsNone(instance.task_state)
def test_init_instance_reverts_crashed_migration_from_active(self):
self._test_init_instance_reverts_crashed_migrations(
old_vm_state=vm_states.ACTIVE)
def test_init_instance_reverts_crashed_migration_from_stopped(self):
self._test_init_instance_reverts_crashed_migrations(
old_vm_state=vm_states.STOPPED)
def test_init_instance_reverts_crashed_migration_no_old_state(self):
self._test_init_instance_reverts_crashed_migrations(old_vm_state=None)
def test_init_instance_resets_crashed_live_migration(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_states.ACTIVE,
host=self.compute.host,
task_state=task_states.MIGRATING)
with contextlib.nested(
mock.patch.object(instance, 'save'),
mock.patch('nova.compute.utils.get_nw_info_for_instance',
return_value=network_model.NetworkInfo())
) as (save, get_nw_info):
self.compute._init_instance(self.context, instance)
save.assert_called_once_with(expected_task_state=['migrating'])
get_nw_info.assert_called_once_with(instance)
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
def _test_init_instance_sets_building_error(self, vm_state,
task_state=None):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_state,
host=self.compute.host,
task_state=task_state)
with mock.patch.object(instance, 'save') as save:
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ERROR, instance.vm_state)
def test_init_instance_sets_building_error(self):
self._test_init_instance_sets_building_error(vm_states.BUILDING)
def test_init_instance_sets_rebuilding_errors(self):
tasks = [task_states.REBUILDING,
task_states.REBUILD_BLOCK_DEVICE_MAPPING,
task_states.REBUILD_SPAWNING]
vms = [vm_states.ACTIVE, vm_states.STOPPED]
for vm_state in vms:
for task_state in tasks:
self._test_init_instance_sets_building_error(
vm_state, task_state)
def _test_init_instance_sets_building_tasks_error(self, instance):
instance.host = self.compute.host
with mock.patch.object(instance, 'save') as save:
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ERROR, instance.vm_state)
def test_init_instance_sets_building_tasks_error_scheduling(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=None,
task_state=task_states.SCHEDULING)
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_block_device(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.BLOCK_DEVICE_MAPPING
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_networking(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.NETWORKING
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_spawning(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.SPAWNING
self._test_init_instance_sets_building_tasks_error(instance)
def _test_init_instance_cleans_image_states(self, instance):
with mock.patch.object(instance, 'save') as save:
self.compute._get_power_state = mock.Mock()
self.compute.driver.post_interrupted_snapshot_cleanup = mock.Mock()
instance.info_cache = None
instance.power_state = power_state.RUNNING
instance.host = self.compute.host
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.compute.driver.post_interrupted_snapshot_cleanup.\
assert_called_once_with(self.context, instance)
self.assertIsNone(instance.task_state)
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
def _test_init_instance_cleans_task_states(self, powerstate, state,
mock_get_uuid, mock_get_power_state):
instance = objects.Instance(self.context)
instance.uuid = 'fake-uuid'
instance.info_cache = None
instance.power_state = power_state.RUNNING
instance.vm_state = vm_states.ACTIVE
instance.task_state = state
instance.host = self.compute.host
mock_get_power_state.return_value = powerstate
self.compute._init_instance(self.context, instance)
return instance
def test_init_instance_cleans_image_state_pending_upload(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_PENDING_UPLOAD
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_uploading(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_UPLOADING
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_snapshot(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_SNAPSHOT
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_snapshot_pending(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_SNAPSHOT_PENDING
self._test_init_instance_cleans_image_states(instance)
@mock.patch.object(objects.Instance, 'save')
def test_init_instance_cleans_running_pausing(self, mock_save):
instance = self._test_init_instance_cleans_task_states(
power_state.RUNNING, task_states.PAUSING)
mock_save.assert_called_once_with()
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
@mock.patch.object(objects.Instance, 'save')
def test_init_instance_cleans_running_unpausing(self, mock_save):
instance = self._test_init_instance_cleans_task_states(
power_state.RUNNING, task_states.UNPAUSING)
mock_save.assert_called_once_with()
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
@mock.patch('nova.compute.manager.ComputeManager.unpause_instance')
def test_init_instance_cleans_paused_unpausing(self, mock_unpause):
def fake_unpause(context, instance):
instance.task_state = None
mock_unpause.side_effect = fake_unpause
instance = self._test_init_instance_cleans_task_states(
power_state.PAUSED, task_states.UNPAUSING)
mock_unpause.assert_called_once_with(self.context, instance)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
def test_init_instance_errors_when_not_migrating(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ERROR
instance.task_state = task_states.IMAGE_UPLOADING
instance.host = self.compute.host
self.mox.StubOutWithMock(compute_utils, 'get_nw_info_for_instance')
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.mox.VerifyAll()
def test_init_instance_deletes_error_deleting_instance(self):
instance = fake_instance.fake_instance_obj(
self.context,
project_id='fake',
uuid='fake-uuid',
vcpus=1,
memory_mb=64,
vm_state=vm_states.ERROR,
host=self.compute.host,
task_state=task_states.DELETING)
self.mox.StubOutWithMock(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
self.mox.StubOutWithMock(self.compute, '_delete_instance')
self.mox.StubOutWithMock(instance, 'obj_load_attr')
self.mox.StubOutWithMock(objects.quotas, 'ids_from_instance')
self.mox.StubOutWithMock(self.compute, '_create_reservations')
bdms = []
quotas = objects.quotas.Quotas(self.context)
instance.obj_load_attr('metadata')
instance.obj_load_attr('system_metadata')
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, instance.uuid).AndReturn(bdms)
objects.quotas.ids_from_instance(self.context, instance).AndReturn(
(instance.project_id, instance.user_id))
self.compute._create_reservations(self.context, instance,
instance.project_id,
instance.user_id).AndReturn(quotas)
self.compute._delete_instance(self.context, instance, bdms,
mox.IgnoreArg())
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.mox.VerifyAll()
def test_init_instance_resize_prep(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake',
vm_state=vm_states.ACTIVE,
host=self.compute.host,
task_state=task_states.RESIZE_PREP,
power_state=power_state.RUNNING)
with contextlib.nested(
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(compute_utils, 'get_nw_info_for_instance'),
mock.patch.object(instance, 'save', autospec=True)
) as (mock_get_power_state, mock_nw_info, mock_instance_save):
self.compute._init_instance(self.context, instance)
mock_instance_save.assert_called_once_with()
self.assertIsNone(instance.task_state)
@mock.patch('nova.context.RequestContext.elevated')
@mock.patch('nova.compute.utils.get_nw_info_for_instance')
@mock.patch(
'nova.compute.manager.ComputeManager._get_instance_block_device_info')
@mock.patch('nova.virt.driver.ComputeDriver.destroy')
@mock.patch('nova.virt.driver.ComputeDriver.get_volume_connector')
def _test_shutdown_instance_exception(self, exc, mock_connector,
mock_destroy, mock_blk_device_info, mock_nw_info, mock_elevated):
mock_connector.side_effect = exc
mock_elevated.return_value = self.context
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake',
vm_state=vm_states.ERROR,
task_state=task_states.DELETING)
bdms = [mock.Mock(id=1, is_volume=True)]
self.compute._shutdown_instance(self.context, instance, bdms,
notify=False, try_deallocate_networks=False)
def test_shutdown_instance_endpoint_not_found(self):
exc = cinder_exception.EndpointNotFound
self._test_shutdown_instance_exception(exc)
def test_shutdown_instance_client_exception(self):
exc = cinder_exception.ClientException
self._test_shutdown_instance_exception(exc)
def test_shutdown_instance_volume_not_found(self):
exc = exception.VolumeNotFound
self._test_shutdown_instance_exception(exc)
def test_shutdown_instance_disk_not_found(self):
exc = exception.DiskNotFound
self._test_shutdown_instance_exception(exc)
def _test_init_instance_retries_reboot(self, instance, reboot_type,
return_power_state):
instance.host = self.compute.host
with contextlib.nested(
mock.patch.object(self.compute, '_get_power_state',
return_value=return_power_state),
mock.patch.object(self.compute, 'reboot_instance'),
mock.patch.object(compute_utils, 'get_nw_info_for_instance')
) as (
_get_power_state,
reboot_instance,
get_nw_info_for_instance
):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance, block_device_info=None,
reboot_type=reboot_type)
reboot_instance.assert_has_calls([call])
def test_init_instance_retries_reboot_pending(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING
for state in vm_states.ALLOW_SOFT_REBOOT:
instance.vm_state = state
self._test_init_instance_retries_reboot(instance, 'SOFT',
power_state.RUNNING)
def test_init_instance_retries_reboot_pending_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING_HARD
for state in vm_states.ALLOW_HARD_REBOOT:
# NOTE(dave-mcnally) while a reboot of a vm in error state is
# possible we don't attempt to recover an error during init
if state == vm_states.ERROR:
continue
instance.vm_state = state
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.RUNNING)
def test_init_instance_retries_reboot_started(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.NOSTATE)
def test_init_instance_retries_reboot_started_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED_HARD
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.NOSTATE)
def _test_init_instance_cleans_reboot_state(self, instance):
instance.host = self.compute.host
with contextlib.nested(
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save', autospec=True),
mock.patch.object(compute_utils, 'get_nw_info_for_instance')
) as (
_get_power_state,
instance_save,
get_nw_info_for_instance
):
self.compute._init_instance(self.context, instance)
instance_save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
def test_init_instance_cleans_image_state_reboot_started(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED
instance.power_state = power_state.RUNNING
self._test_init_instance_cleans_reboot_state(instance)
def test_init_instance_cleans_image_state_reboot_started_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED_HARD
instance.power_state = power_state.RUNNING
self._test_init_instance_cleans_reboot_state(instance)
def test_init_instance_retries_power_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_OFF
instance.host = self.compute.host
with mock.patch.object(self.compute, 'stop_instance'):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance, True)
self.compute.stop_instance.assert_has_calls([call])
def test_init_instance_retries_power_on(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_ON
instance.host = self.compute.host
with mock.patch.object(self.compute, 'start_instance'):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance)
self.compute.start_instance.assert_has_calls([call])
def test_init_instance_retries_power_on_silent_exception(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_ON
instance.host = self.compute.host
with mock.patch.object(self.compute, 'start_instance',
return_value=Exception):
init_return = self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance)
self.compute.start_instance.assert_has_calls([call])
self.assertIsNone(init_return)
def test_init_instance_retries_power_off_silent_exception(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_OFF
instance.host = self.compute.host
with mock.patch.object(self.compute, 'stop_instance',
return_value=Exception):
init_return = self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance, True)
self.compute.stop_instance.assert_has_calls([call])
self.assertIsNone(init_return)
def test_get_instances_on_driver(self):
driver_instances = []
for x in range(10):
driver_instances.append(fake_instance.fake_db_instance())
self.mox.StubOutWithMock(self.compute.driver,
'list_instance_uuids')
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
self.compute.driver.list_instance_uuids().AndReturn(
[inst['uuid'] for inst in driver_instances])
db.instance_get_all_by_filters(
self.context,
{'uuid': [inst['uuid'] for
inst in driver_instances]},
'created_at', 'desc', columns_to_join=None,
limit=None, marker=None,
use_slave=True).AndReturn(
driver_instances)
self.mox.ReplayAll()
result = self.compute._get_instances_on_driver(self.context)
self.assertEqual([x['uuid'] for x in driver_instances],
[x['uuid'] for x in result])
@mock.patch('nova.virt.driver.ComputeDriver.list_instance_uuids')
@mock.patch('nova.db.api.instance_get_all_by_filters')
def test_get_instances_on_driver_empty(self, mock_list, mock_db):
mock_list.return_value = []
result = self.compute._get_instances_on_driver(self.context)
# instance_get_all_by_filters should not be called
self.assertEqual(0, mock_db.call_count)
self.assertEqual([],
[x['uuid'] for x in result])
def test_get_instances_on_driver_fallback(self):
# Test getting instances when driver doesn't support
# 'list_instance_uuids'
self.compute.host = 'host'
filters = {'host': self.compute.host}
self.flags(instance_name_template='inst-%i')
all_instances = []
driver_instances = []
for x in range(10):
instance = fake_instance.fake_db_instance(name='inst-%i' % x,
id=x)
if x % 2:
driver_instances.append(instance)
all_instances.append(instance)
self.mox.StubOutWithMock(self.compute.driver,
'list_instance_uuids')
self.mox.StubOutWithMock(self.compute.driver,
'list_instances')
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
self.compute.driver.list_instance_uuids().AndRaise(
NotImplementedError())
self.compute.driver.list_instances().AndReturn(
[inst['name'] for inst in driver_instances])
db.instance_get_all_by_filters(
self.context, filters,
'created_at', 'desc', columns_to_join=None,
limit=None, marker=None,
use_slave=True).AndReturn(all_instances)
self.mox.ReplayAll()
result = self.compute._get_instances_on_driver(self.context, filters)
self.assertEqual([x['uuid'] for x in driver_instances],
[x['uuid'] for x in result])
def test_instance_usage_audit(self):
instances = [objects.Instance(uuid='foo')]
@classmethod
def fake_task_log(*a, **k):
pass
@classmethod
def fake_get(*a, **k):
return instances
self.flags(instance_usage_audit=True)
self.stubs.Set(objects.TaskLog, 'get', fake_task_log)
self.stubs.Set(objects.InstanceList,
'get_active_by_window_joined', fake_get)
self.stubs.Set(objects.TaskLog, 'begin_task', fake_task_log)
self.stubs.Set(objects.TaskLog, 'end_task', fake_task_log)
self.mox.StubOutWithMock(compute_utils, 'notify_usage_exists')
compute_utils.notify_usage_exists(self.compute.notifier,
self.context, instances[0], ignore_missing_network_data=False)
self.mox.ReplayAll()
self.compute._instance_usage_audit(self.context)
@mock.patch.object(objects.InstanceList, 'get_by_host')
def test_sync_power_states(self, mock_get):
instance = mock.Mock()
mock_get.return_value = [instance]
with mock.patch.object(self.compute._sync_power_pool,
'spawn_n') as mock_spawn:
self.compute._sync_power_states(mock.sentinel.context)
mock_get.assert_called_with(mock.sentinel.context,
self.compute.host, expected_attrs=[],
use_slave=True)
mock_spawn.assert_called_once_with(mock.ANY, instance)
def _get_sync_instance(self, power_state, vm_state, task_state=None,
shutdown_terminate=False):
instance = objects.Instance()
instance.uuid = 'fake-uuid'
instance.power_state = power_state
instance.vm_state = vm_state
instance.host = self.compute.host
instance.task_state = task_state
instance.shutdown_terminate = shutdown_terminate
self.mox.StubOutWithMock(instance, 'refresh')
self.mox.StubOutWithMock(instance, 'save')
return instance
def test_sync_instance_power_state_match(self):
instance = self._get_sync_instance(power_state.RUNNING,
vm_states.ACTIVE)
instance.refresh(use_slave=False)
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
power_state.RUNNING)
def test_sync_instance_power_state_running_stopped(self):
instance = self._get_sync_instance(power_state.RUNNING,
vm_states.ACTIVE)
instance.refresh(use_slave=False)
instance.save()
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
power_state.SHUTDOWN)
self.assertEqual(instance.power_state, power_state.SHUTDOWN)
def _test_sync_to_stop(self, power_state, vm_state, driver_power_state,
stop=True, force=False, shutdown_terminate=False):
instance = self._get_sync_instance(
power_state, vm_state, shutdown_terminate=shutdown_terminate)
instance.refresh(use_slave=False)
instance.save()
self.mox.StubOutWithMock(self.compute.compute_api, 'stop')
self.mox.StubOutWithMock(self.compute.compute_api, 'delete')
self.mox.StubOutWithMock(self.compute.compute_api, 'force_stop')
if shutdown_terminate:
self.compute.compute_api.delete(self.context, instance)
elif stop:
if force:
self.compute.compute_api.force_stop(self.context, instance)
else:
self.compute.compute_api.stop(self.context, instance)
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
driver_power_state)
self.mox.VerifyAll()
self.mox.UnsetStubs()
def test_sync_instance_power_state_to_stop(self):
for ps in (power_state.SHUTDOWN, power_state.CRASHED,
power_state.SUSPENDED):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE, ps)
for ps in (power_state.SHUTDOWN, power_state.CRASHED):
self._test_sync_to_stop(power_state.PAUSED, vm_states.PAUSED, ps,
force=True)
self._test_sync_to_stop(power_state.SHUTDOWN, vm_states.STOPPED,
power_state.RUNNING, force=True)
def test_sync_instance_power_state_to_terminate(self):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE,
power_state.SHUTDOWN,
force=False, shutdown_terminate=True)
def test_sync_instance_power_state_to_no_stop(self):
for ps in (power_state.PAUSED, power_state.NOSTATE):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE, ps,
stop=False)
for vs in (vm_states.SOFT_DELETED, vm_states.DELETED):
for ps in (power_state.NOSTATE, power_state.SHUTDOWN):
self._test_sync_to_stop(power_state.RUNNING, vs, ps,
stop=False)
@mock.patch('nova.compute.manager.ComputeManager.'
'_sync_instance_power_state')
def test_query_driver_power_state_and_sync_pending_task(
self, mock_sync_power_state):
with mock.patch.object(self.compute.driver,
'get_info') as mock_get_info:
db_instance = objects.Instance(uuid='fake-uuid',
task_state=task_states.POWERING_OFF)
self.compute._query_driver_power_state_and_sync(self.context,
db_instance)
self.assertFalse(mock_get_info.called)
self.assertFalse(mock_sync_power_state.called)
@mock.patch('nova.compute.manager.ComputeManager.'
'_sync_instance_power_state')
def test_query_driver_power_state_and_sync_not_found_driver(
self, mock_sync_power_state):
error = exception.InstanceNotFound(instance_id=1)
with mock.patch.object(self.compute.driver,
'get_info', side_effect=error) as mock_get_info:
db_instance = objects.Instance(uuid='fake-uuid', task_state=None)
self.compute._query_driver_power_state_and_sync(self.context,
db_instance)
mock_get_info.assert_called_once_with(db_instance)
mock_sync_power_state.assert_called_once_with(self.context,
db_instance,
power_state.NOSTATE,
use_slave=True)
def test_run_pending_deletes(self):
self.flags(instance_delete_interval=10)
class FakeInstance(object):
def __init__(self, uuid, name, smd):
self.uuid = uuid
self.name = name
self.system_metadata = smd
self.cleaned = False
def __getitem__(self, name):
return getattr(self, name)
def save(self):
pass
a = FakeInstance('123', 'apple', {'clean_attempts': '100'})
b = FakeInstance('456', 'orange', {'clean_attempts': '3'})
c = FakeInstance('789', 'banana', {})
self.mox.StubOutWithMock(objects.InstanceList,
'get_by_filters')
objects.InstanceList.get_by_filters(
{'read_deleted': 'yes'},
{'deleted': True, 'soft_deleted': False, 'host': 'fake-mini',
'cleaned': False},
expected_attrs=['info_cache', 'security_groups',
'system_metadata'],
use_slave=True).AndReturn([a, b, c])
self.mox.StubOutWithMock(self.compute.driver, 'delete_instance_files')
self.compute.driver.delete_instance_files(
mox.IgnoreArg()).AndReturn(True)
self.compute.driver.delete_instance_files(
mox.IgnoreArg()).AndReturn(False)
self.mox.ReplayAll()
self.compute._run_pending_deletes({})
self.assertFalse(a.cleaned)
self.assertEqual('100', a.system_metadata['clean_attempts'])
self.assertTrue(b.cleaned)
self.assertEqual('4', b.system_metadata['clean_attempts'])
self.assertFalse(c.cleaned)
self.assertEqual('1', c.system_metadata['clean_attempts'])
def test_attach_interface_failure(self):
# Test that the fault methods are invoked when an attach fails
db_instance = fake_instance.fake_db_instance()
f_instance = objects.Instance._from_db_object(self.context,
objects.Instance(),
db_instance)
e = exception.InterfaceAttachFailed(instance_uuid=f_instance.uuid)
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
@mock.patch.object(self.compute.network_api,
'allocate_port_for_instance',
side_effect=e)
@mock.patch.object(self.compute, '_instance_update',
side_effect=lambda *a, **k: {})
def do_test(update, meth, add_fault):
self.assertRaises(exception.InterfaceAttachFailed,
self.compute.attach_interface,
self.context, f_instance, 'net_id', 'port_id',
None)
add_fault.assert_has_calls([
mock.call(self.context, f_instance, e,
mock.ANY)])
do_test()
def test_detach_interface_failure(self):
# Test that the fault methods are invoked when a detach fails
# Build test data that will cause a PortNotFound exception
f_instance = mock.MagicMock()
f_instance.info_cache = mock.MagicMock()
f_instance.info_cache.network_info = []
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
@mock.patch.object(self.compute, '_set_instance_obj_error_state')
def do_test(meth, add_fault):
self.assertRaises(exception.PortNotFound,
self.compute.detach_interface,
self.context, f_instance, 'port_id')
add_fault.assert_has_calls(
[mock.call(self.context, f_instance, mock.ANY, mock.ANY)])
do_test()
def test_swap_volume_volume_api_usage(self):
# This test ensures that volume_id arguments are passed to volume_api
# and that volume states are OK
volumes = {}
old_volume_id = uuidutils.generate_uuid()
volumes[old_volume_id] = {'id': old_volume_id,
'display_name': 'old_volume',
'status': 'detaching',
'size': 1}
new_volume_id = uuidutils.generate_uuid()
volumes[new_volume_id] = {'id': new_volume_id,
'display_name': 'new_volume',
'status': 'available',
'size': 2}
def fake_vol_api_roll_detaching(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
if volumes[volume_id]['status'] == 'detaching':
volumes[volume_id]['status'] = 'in-use'
fake_bdm = fake_block_device.FakeDbBlockDeviceDict(
{'device_name': '/dev/vdb', 'source_type': 'volume',
'destination_type': 'volume', 'instance_uuid': 'fake',
'connection_info': '{"foo": "bar"}'})
def fake_vol_api_func(context, volume, *args):
self.assertTrue(uuidutils.is_uuid_like(volume))
return {}
def fake_vol_get(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
return volumes[volume_id]
def fake_vol_unreserve(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
if volumes[volume_id]['status'] == 'attaching':
volumes[volume_id]['status'] = 'available'
def fake_vol_migrate_volume_completion(context, old_volume_id,
new_volume_id, error=False):
self.assertTrue(uuidutils.is_uuid_like(old_volume_id))
self.assertTrue(uuidutils.is_uuid_like(new_volume_id))
volumes[old_volume_id]['status'] = 'in-use'
return {'save_volume_id': new_volume_id}
def fake_func_exc(*args, **kwargs):
raise AttributeError # Random exception
def fake_swap_volume(old_connection_info, new_connection_info,
instance, mountpoint, resize_to):
self.assertEqual(resize_to, 2)
def fake_block_device_mapping_update(ctxt, id, updates, legacy):
self.assertEqual(2, updates['volume_size'])
return fake_bdm
self.stubs.Set(self.compute.volume_api, 'roll_detaching',
fake_vol_api_roll_detaching)
self.stubs.Set(self.compute.volume_api, 'get', fake_vol_get)
self.stubs.Set(self.compute.volume_api, 'initialize_connection',
fake_vol_api_func)
self.stubs.Set(self.compute.volume_api, 'unreserve_volume',
fake_vol_unreserve)
self.stubs.Set(self.compute.volume_api, 'terminate_connection',
fake_vol_api_func)
self.stubs.Set(db, 'block_device_mapping_get_by_volume_id',
lambda x, y, z: fake_bdm)
self.stubs.Set(self.compute.driver, 'get_volume_connector',
lambda x: {})
self.stubs.Set(self.compute.driver, 'swap_volume',
fake_swap_volume)
self.stubs.Set(self.compute.volume_api, 'migrate_volume_completion',
fake_vol_migrate_volume_completion)
self.stubs.Set(db, 'block_device_mapping_update',
fake_block_device_mapping_update)
self.stubs.Set(db,
'instance_fault_create',
lambda x, y:
test_instance_fault.fake_faults['fake-uuid'][0])
self.stubs.Set(self.compute, '_instance_update',
lambda c, u, **k: {})
# Good path
self.compute.swap_volume(self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
# Error paths
volumes[old_volume_id]['status'] = 'detaching'
volumes[new_volume_id]['status'] = 'attaching'
self.stubs.Set(self.compute.driver, 'swap_volume', fake_func_exc)
self.assertRaises(AttributeError, self.compute.swap_volume,
self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
volumes[old_volume_id]['status'] = 'detaching'
volumes[new_volume_id]['status'] = 'attaching'
self.stubs.Set(self.compute.volume_api, 'initialize_connection',
fake_func_exc)
self.assertRaises(AttributeError, self.compute.swap_volume,
self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
@mock.patch.object(compute_utils, 'EventReporter')
def test_check_can_live_migrate_source(self, event_mock):
is_volume_backed = 'volume_backed'
dest_check_data = dict(foo='bar')
db_instance = fake_instance.fake_db_instance()
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), db_instance)
expected_dest_check_data = dict(dest_check_data,
is_volume_backed=is_volume_backed)
self.mox.StubOutWithMock(self.compute.compute_api,
'is_volume_backed_instance')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_source')
self.compute.compute_api.is_volume_backed_instance(
self.context, instance).AndReturn(is_volume_backed)
self.compute._get_instance_block_device_info(
self.context, instance, refresh_conn_info=True
).AndReturn({'block_device_mapping': 'fake'})
self.compute.driver.check_can_live_migrate_source(
self.context, instance, expected_dest_check_data,
{'block_device_mapping': 'fake'})
self.mox.ReplayAll()
self.compute.check_can_live_migrate_source(
self.context, instance=instance,
dest_check_data=dest_check_data)
event_mock.assert_called_once_with(
self.context, 'compute_check_can_live_migrate_source',
instance.uuid)
@mock.patch.object(compute_utils, 'EventReporter')
def _test_check_can_live_migrate_destination(self, event_mock,
do_raise=False,
has_mig_data=False):
db_instance = fake_instance.fake_db_instance(host='fake-host')
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), db_instance)
instance.host = 'fake-host'
block_migration = 'block_migration'
disk_over_commit = 'disk_over_commit'
src_info = 'src_info'
dest_info = 'dest_info'
dest_check_data = dict(foo='bar')
mig_data = dict(cow='moo')
expected_result = dict(mig_data)
if has_mig_data:
dest_check_data['migrate_data'] = dict(cat='meow')
expected_result.update(cat='meow')
self.mox.StubOutWithMock(self.compute, '_get_compute_info')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination')
self.mox.StubOutWithMock(self.compute.compute_rpcapi,
'check_can_live_migrate_source')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination_cleanup')
self.compute._get_compute_info(self.context,
'fake-host').AndReturn(src_info)
self.compute._get_compute_info(self.context,
CONF.host).AndReturn(dest_info)
self.compute.driver.check_can_live_migrate_destination(
self.context, instance, src_info, dest_info,
block_migration, disk_over_commit).AndReturn(dest_check_data)
mock_meth = self.compute.compute_rpcapi.check_can_live_migrate_source(
self.context, instance, dest_check_data)
if do_raise:
mock_meth.AndRaise(test.TestingException())
self.mox.StubOutWithMock(db, 'instance_fault_create')
db.instance_fault_create(
self.context, mox.IgnoreArg()).AndReturn(
test_instance_fault.fake_faults['fake-uuid'][0])
else:
mock_meth.AndReturn(mig_data)
self.compute.driver.check_can_live_migrate_destination_cleanup(
self.context, dest_check_data)
self.mox.ReplayAll()
result = self.compute.check_can_live_migrate_destination(
self.context, instance=instance,
block_migration=block_migration,
disk_over_commit=disk_over_commit)
self.assertEqual(expected_result, result)
event_mock.assert_called_once_with(
self.context, 'compute_check_can_live_migrate_destination',
instance.uuid)
def test_check_can_live_migrate_destination_success(self):
self._test_check_can_live_migrate_destination()
def test_check_can_live_migrate_destination_success_w_mig_data(self):
self._test_check_can_live_migrate_destination(has_mig_data=True)
def test_check_can_live_migrate_destination_fail(self):
self.assertRaises(
test.TestingException,
self._test_check_can_live_migrate_destination,
do_raise=True)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_prepare_for_instance_event(self, lock_name_mock):
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
self.assertIn('foo', self.compute.instance_events._events)
self.assertIn('test-event',
self.compute.instance_events._events['foo'])
self.assertEqual(
result,
self.compute.instance_events._events['foo']['test-event'])
self.assertTrue(hasattr(result, 'send'))
lock_name_mock.assert_called_once_with(inst_obj)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_pop_instance_event(self, lock_name_mock):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'network-vif-plugged': event,
}
}
inst_obj = objects.Instance(uuid='foo')
event_obj = objects.InstanceExternalEvent(name='network-vif-plugged',
tag=None)
result = self.compute.instance_events.pop_instance_event(inst_obj,
event_obj)
self.assertEqual(result, event)
lock_name_mock.assert_called_once_with(inst_obj)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_clear_events_for_instance(self, lock_name_mock):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'test-event': event,
}
}
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events.clear_events_for_instance(
inst_obj)
self.assertEqual(result, {'test-event': event})
lock_name_mock.assert_called_once_with(inst_obj)
def test_instance_events_lock_name(self):
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events._lock_name(inst_obj)
self.assertEqual(result, 'foo-events')
def test_prepare_for_instance_event_again(self):
inst_obj = objects.Instance(uuid='foo')
self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
# A second attempt will avoid creating a new list; make sure we
# get the current list
result = self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
self.assertIn('foo', self.compute.instance_events._events)
self.assertIn('test-event',
self.compute.instance_events._events['foo'])
self.assertEqual(
result,
self.compute.instance_events._events['foo']['test-event'])
self.assertTrue(hasattr(result, 'send'))
def test_process_instance_event(self):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'network-vif-plugged': event,
}
}
inst_obj = objects.Instance(uuid='foo')
event_obj = objects.InstanceExternalEvent(name='network-vif-plugged',
tag=None)
self.compute._process_instance_event(inst_obj, event_obj)
self.assertTrue(event.ready())
self.assertEqual(event_obj, event.wait())
self.assertEqual({}, self.compute.instance_events._events)
def test_process_instance_vif_deleted_event(self):
vif1 = fake_network_cache_model.new_vif()
vif1['id'] = '1'
vif2 = fake_network_cache_model.new_vif()
vif2['id'] = '2'
nw_info = network_model.NetworkInfo([vif1, vif2])
info_cache = objects.InstanceInfoCache(network_info=nw_info,
instance_uuid='uuid')
inst_obj = objects.Instance(id=3, uuid='uuid', info_cache=info_cache)
@mock.patch.object(manager.base_net_api,
'update_instance_cache_with_nw_info')
@mock.patch.object(self.compute.driver, 'detach_interface')
def do_test(detach_interface, update_instance_cache_with_nw_info):
self.compute._process_instance_vif_deleted_event(self.context,
inst_obj,
vif2['id'])
update_instance_cache_with_nw_info.assert_called_once_with(
self.compute.network_api,
self.context,
inst_obj,
nw_info=[vif1])
detach_interface.assert_called_once_with(inst_obj, vif2)
do_test()
def test_external_instance_event(self):
instances = [
objects.Instance(id=1, uuid='uuid1'),
objects.Instance(id=2, uuid='uuid2'),
objects.Instance(id=3, uuid='uuid3')]
events = [
objects.InstanceExternalEvent(name='network-changed',
tag='tag1',
instance_uuid='uuid1'),
objects.InstanceExternalEvent(name='network-vif-plugged',
instance_uuid='uuid2',
tag='tag2'),
objects.InstanceExternalEvent(name='network-vif-deleted',
instance_uuid='uuid3',
tag='tag3')]
@mock.patch.object(self.compute, '_process_instance_vif_deleted_event')
@mock.patch.object(self.compute.network_api, 'get_instance_nw_info')
@mock.patch.object(self.compute, '_process_instance_event')
def do_test(_process_instance_event, get_instance_nw_info,
_process_instance_vif_deleted_event):
self.compute.external_instance_event(self.context,
instances, events)
get_instance_nw_info.assert_called_once_with(self.context,
instances[0])
_process_instance_event.assert_called_once_with(instances[1],
events[1])
_process_instance_vif_deleted_event.assert_called_once_with(
self.context, instances[2], events[2].tag)
do_test()
def test_external_instance_event_with_exception(self):
vif1 = fake_network_cache_model.new_vif()
vif1['id'] = '1'
vif2 = fake_network_cache_model.new_vif()
vif2['id'] = '2'
nw_info = network_model.NetworkInfo([vif1, vif2])
info_cache = objects.InstanceInfoCache(network_info=nw_info,
instance_uuid='uuid2')
instances = [
objects.Instance(id=1, uuid='uuid1'),
objects.Instance(id=2, uuid='uuid2', info_cache=info_cache),
objects.Instance(id=3, uuid='uuid3')]
events = [
objects.InstanceExternalEvent(name='network-changed',
tag='tag1',
instance_uuid='uuid1'),
objects.InstanceExternalEvent(name='network-vif-deleted',
instance_uuid='uuid2',
tag='2'),
objects.InstanceExternalEvent(name='network-vif-plugged',
instance_uuid='uuid3',
tag='tag3')]
# Make sure all the three events are handled despite the exception in
# processing event 2
@mock.patch.object(manager.base_net_api,
'update_instance_cache_with_nw_info')
@mock.patch.object(self.compute.driver, 'detach_interface',
side_effect=exception.NovaException)
@mock.patch.object(self.compute.network_api, 'get_instance_nw_info')
@mock.patch.object(self.compute, '_process_instance_event')
def do_test(_process_instance_event, get_instance_nw_info,
detach_interface, update_instance_cache_with_nw_info):
self.compute.external_instance_event(self.context,
instances, events)
get_instance_nw_info.assert_called_once_with(self.context,
instances[0])
update_instance_cache_with_nw_info.assert_called_once_with(
self.compute.network_api,
self.context,
instances[1],
nw_info=[vif1])
detach_interface.assert_called_once_with(instances[1], vif2)
_process_instance_event.assert_called_once_with(instances[2],
events[2])
do_test()
def test_cancel_all_events(self):
inst = objects.Instance(uuid='uuid')
fake_eventlet_event = mock.MagicMock()
self.compute.instance_events._events = {
inst.uuid: {
'network-vif-plugged-bar': fake_eventlet_event,
}
}
self.compute.instance_events.cancel_all_events()
self.assertTrue(fake_eventlet_event.send.called)
event = fake_eventlet_event.send.call_args_list[0][0][0]
self.assertEqual('network-vif-plugged', event.name)
self.assertEqual('bar', event.tag)
self.assertEqual('failed', event.status)
def test_cleanup_cancels_all_events(self):
with mock.patch.object(self.compute, 'instance_events') as mock_ev:
self.compute.cleanup_host()
mock_ev.cancel_all_events.assert_called_once_with()
def test_cleanup_blocks_new_events(self):
instance = objects.Instance(uuid='uuid')
self.compute.instance_events.cancel_all_events()
callback = mock.MagicMock()
body = mock.MagicMock()
with self.compute.virtapi.wait_for_instance_event(
instance, ['network-vif-plugged-bar'],
error_callback=callback):
body()
self.assertTrue(body.called)
callback.assert_called_once_with('network-vif-plugged-bar', instance)
def test_pop_events_fails_gracefully(self):
inst = objects.Instance(uuid='uuid')
event = mock.MagicMock()
self.compute.instance_events._events = None
self.assertIsNone(
self.compute.instance_events.pop_instance_event(inst, event))
def test_clear_events_fails_gracefully(self):
inst = objects.Instance(uuid='uuid')
self.compute.instance_events._events = None
self.assertEqual(
self.compute.instance_events.clear_events_for_instance(inst), {})
def test_retry_reboot_pending_soft(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING
instance.vm_state = vm_states.ACTIVE
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'SOFT')
def test_retry_reboot_pending_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING_HARD
instance.vm_state = vm_states.ACTIVE
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_soft_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.NOSTATE):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_hard_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED_HARD
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.NOSTATE):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_hard_on(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED_HARD
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertFalse(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_no_reboot(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = 'bar'
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertFalse(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
@mock.patch('nova.objects.BlockDeviceMapping.get_by_volume_id')
@mock.patch('nova.compute.manager.ComputeManager._driver_detach_volume')
@mock.patch('nova.objects.Instance._from_db_object')
def test_remove_volume_connection(self, inst_from_db, detach, bdm_get):
bdm = mock.sentinel.bdm
inst_obj = mock.sentinel.inst_obj
bdm_get.return_value = bdm
inst_from_db.return_value = inst_obj
with mock.patch.object(self.compute, 'volume_api'):
self.compute.remove_volume_connection(self.context, 'vol',
inst_obj)
detach.assert_called_once_with(self.context, inst_obj, bdm)
def test_detach_volume(self):
self._test_detach_volume()
def test_detach_volume_not_destroy_bdm(self):
self._test_detach_volume(destroy_bdm=False)
@mock.patch('nova.objects.BlockDeviceMapping.get_by_volume_id')
@mock.patch('nova.compute.manager.ComputeManager._driver_detach_volume')
@mock.patch('nova.compute.manager.ComputeManager.'
'_notify_about_instance_usage')
@mock.patch('nova.conductor.manager.ConductorManager.vol_usage_update')
def _test_detach_volume(self, vol_usage_update, notify_inst_usage, detach,
bdm_get, destroy_bdm=True):
volume_id = '123'
inst_obj = mock.sentinel.inst_obj
bdm = mock.MagicMock(spec=objects.BlockDeviceMapping)
bdm.device_name = 'vdb'
bdm_get.return_value = bdm
with mock.patch.object(self.compute, 'volume_api') as volume_api:
with mock.patch.object(self.compute, 'driver') as driver:
connector_sentinel = mock.sentinel.connector
driver.get_volume_connector.return_value = connector_sentinel
self.compute._detach_volume(self.context, volume_id,
inst_obj,
destroy_bdm=destroy_bdm)
detach.assert_called_once_with(self.context, inst_obj, bdm)
driver.get_volume_connector.assert_called_once_with(inst_obj)
volume_api.terminate_connection.assert_called_once_with(
self.context, volume_id, connector_sentinel)
volume_api.detach.assert_called_once_with(mock.ANY, volume_id)
notify_inst_usage.assert_called_once_with(
self.context, inst_obj, "volume.detach",
extra_usage_info={'volume_id': volume_id}
)
if destroy_bdm:
bdm.destroy.assert_called_once_with()
else:
self.assertFalse(bdm.destroy.called)
def _test_rescue(self, clean_shutdown=True):
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.ACTIVE)
fake_nw_info = network_model.NetworkInfo()
rescue_image_meta = {'id': 'fake', 'name': 'fake'}
with contextlib.nested(
mock.patch.object(self.context, 'elevated',
return_value=self.context),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=fake_nw_info),
mock.patch.object(self.compute, '_get_rescue_image',
return_value=rescue_image_meta),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute, '_power_off_instance'),
mock.patch.object(self.compute.driver, 'rescue'),
mock.patch.object(compute_utils, 'notify_usage_exists'),
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save')
) as (
elevated_context, get_nw_info,
get_rescue_image, notify_instance_usage, power_off_instance,
driver_rescue, notify_usage_exists, get_power_state, instance_save
):
self.compute.rescue_instance(
self.context, instance, rescue_password='verybadpass',
rescue_image_ref=None, clean_shutdown=clean_shutdown)
# assert the field values on the instance object
self.assertEqual(vm_states.RESCUED, instance.vm_state)
self.assertIsNone(instance.task_state)
self.assertEqual(power_state.RUNNING, instance.power_state)
self.assertIsNotNone(instance.launched_at)
# assert our mock calls
get_nw_info.assert_called_once_with(self.context, instance)
get_rescue_image.assert_called_once_with(
self.context, instance, None)
extra_usage_info = {'rescue_image_name': 'fake'}
notify_calls = [
mock.call(self.context, instance, "rescue.start",
extra_usage_info=extra_usage_info,
network_info=fake_nw_info),
mock.call(self.context, instance, "rescue.end",
extra_usage_info=extra_usage_info,
network_info=fake_nw_info)
]
notify_instance_usage.assert_has_calls(notify_calls)
power_off_instance.assert_called_once_with(self.context, instance,
clean_shutdown)
driver_rescue.assert_called_once_with(
self.context, instance, fake_nw_info, rescue_image_meta,
'verybadpass')
notify_usage_exists.assert_called_once_with(self.compute.notifier,
self.context, instance, current_period=True)
instance_save.assert_called_once_with(
expected_task_state=task_states.RESCUING)
def test_rescue(self):
self._test_rescue()
def test_rescue_forced_shutdown(self):
self._test_rescue(clean_shutdown=False)
def test_unrescue(self):
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.RESCUED)
fake_nw_info = network_model.NetworkInfo()
with contextlib.nested(
mock.patch.object(self.context, 'elevated',
return_value=self.context),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=fake_nw_info),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute.driver, 'unrescue'),
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save')
) as (
elevated_context, get_nw_info,
notify_instance_usage, driver_unrescue, get_power_state,
instance_save
):
self.compute.unrescue_instance(self.context, instance)
# assert the field values on the instance object
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
self.assertEqual(power_state.RUNNING, instance.power_state)
# assert our mock calls
get_nw_info.assert_called_once_with(self.context, instance)
notify_calls = [
mock.call(self.context, instance, "unrescue.start",
network_info=fake_nw_info),
mock.call(self.context, instance, "unrescue.end",
network_info=fake_nw_info)
]
notify_instance_usage.assert_has_calls(notify_calls)
driver_unrescue.assert_called_once_with(instance, fake_nw_info)
instance_save.assert_called_once_with(
expected_task_state=task_states.UNRESCUING)
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch.object(objects.Instance, 'save')
@mock.patch('nova.utils.generate_password', return_value='fake-pass')
def test_set_admin_password(self, gen_password_mock,
instance_save_mock, power_state_mock):
# Ensure instance can have its admin password set.
instance = fake_instance.fake_instance_obj(
self.context,
vm_state=vm_states.ACTIVE,
task_state=task_states.UPDATING_PASSWORD)
@mock.patch.object(self.context, 'elevated', return_value=self.context)
@mock.patch.object(self.compute.driver, 'set_admin_password')
def do_test(driver_mock, elevated_mock):
# call the manager method
self.compute.set_admin_password(self.context, instance, None)
# make our assertions
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
power_state_mock.assert_called_once_with(self.context, instance)
driver_mock.assert_called_once_with(instance, 'fake-pass')
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
do_test()
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.NOSTATE)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def test_set_admin_password_bad_state(self, add_fault_mock,
instance_save_mock,
update_mock,
power_state_mock):
# Test setting password while instance is rebuilding.
instance = fake_instance.fake_instance_obj(self.context)
with mock.patch.object(self.context, 'elevated',
return_value=self.context):
# call the manager method
self.assertRaises(exception.InstancePasswordSetFailed,
self.compute.set_admin_password,
self.context, instance, None)
# make our assertions
power_state_mock.assert_called_once_with(self.context, instance)
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
add_fault_mock.assert_called_once_with(
self.context, instance, mock.ANY, mock.ANY)
@mock.patch('nova.utils.generate_password', return_value='fake-pass')
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def _do_test_set_admin_password_driver_error(self, exc,
expected_vm_state,
expected_task_state,
expected_exception,
add_fault_mock,
instance_save_mock,
update_mock,
power_state_mock,
gen_password_mock):
# Ensure expected exception is raised if set_admin_password fails.
instance = fake_instance.fake_instance_obj(
self.context,
vm_state=vm_states.ACTIVE,
task_state=task_states.UPDATING_PASSWORD)
@mock.patch.object(self.context, 'elevated', return_value=self.context)
@mock.patch.object(self.compute.driver, 'set_admin_password',
side_effect=exc)
def do_test(driver_mock, elevated_mock):
# error raised from the driver should not reveal internal
# information so a new error is raised
self.assertRaises(expected_exception,
self.compute.set_admin_password,
self.context,
instance=instance,
new_pass=None)
if expected_exception == NotImplementedError:
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
else:
# setting the instance to error state
instance_save_mock.assert_called_once_with()
self.assertEqual(expected_vm_state, instance.vm_state)
# check revert_task_state decorator
update_mock.assert_called_once_with(
self.context, instance.uuid,
task_state=expected_task_state)
# check wrap_instance_fault decorator
add_fault_mock.assert_called_once_with(
self.context, instance, mock.ANY, mock.ANY)
do_test()
def test_set_admin_password_driver_not_authorized(self):
# Ensure expected exception is raised if set_admin_password not
# authorized.
exc = exception.Forbidden('Internal error')
expected_exception = exception.InstancePasswordSetFailed
self._do_test_set_admin_password_driver_error(
exc, vm_states.ERROR, None, expected_exception)
def test_set_admin_password_driver_not_implemented(self):
# Ensure expected exception is raised if set_admin_password not
# implemented by driver.
exc = NotImplementedError()
expected_exception = NotImplementedError
self._do_test_set_admin_password_driver_error(
exc, vm_states.ACTIVE, None, expected_exception)
def test_destroy_evacuated_instances(self):
our_host = self.compute.host
instance_1 = objects.Instance(self.context)
instance_1.uuid = 'foo'
instance_1.task_state = None
instance_1.vm_state = vm_states.ACTIVE
instance_1.host = 'not-' + our_host
instance_2 = objects.Instance(self.context)
instance_2.uuid = 'bar'
instance_2.task_state = None
instance_2.vm_state = vm_states.ACTIVE
instance_2.host = 'not-' + our_host
# Only instance 2 has a migration record
migration = objects.Migration(instance_uuid=instance_2.uuid)
with contextlib.nested(
mock.patch.object(self.compute, '_get_instances_on_driver',
return_value=[instance_1,
instance_2]),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=None),
mock.patch.object(self.compute, '_get_instance_block_device_info',
return_value={}),
mock.patch.object(self.compute, '_is_instance_storage_shared',
return_value=False),
mock.patch.object(self.compute.driver, 'destroy'),
mock.patch('nova.objects.MigrationList.get_by_filters'),
mock.patch('nova.objects.Migration.save')
) as (_get_instances_on_driver, get_instance_nw_info,
_get_instance_block_device_info, _is_instance_storage_shared,
destroy, migration_list, migration_save):
migration_list.return_value = [migration]
self.compute._destroy_evacuated_instances(self.context)
# Only instance 2 should be deleted. Instance 1 is still running
# here, but no migration from our host exists, so ignore it
destroy.assert_called_once_with(self.context, instance_2, None,
{}, True)
@mock.patch('nova.compute.manager.ComputeManager.'
'_destroy_evacuated_instances')
@mock.patch('nova.compute.manager.LOG')
def test_init_host_foreign_instance(self, mock_log, mock_destroy):
inst = mock.MagicMock()
inst.host = self.compute.host + '-alt'
self.compute._init_instance(mock.sentinel.context, inst)
self.assertFalse(inst.save.called)
self.assertTrue(mock_log.warning.called)
msg = mock_log.warning.call_args_list[0]
self.assertIn('appears to not be owned by this host', msg[0][0])
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
def test_error_out_instance_on_exception_not_implemented_err(self,
inst_update_mock):
instance = fake_instance.fake_instance_obj(self.context)
def do_test():
with self.compute._error_out_instance_on_exception(
self.context, instance, instance_state=vm_states.STOPPED):
raise NotImplementedError('test')
self.assertRaises(NotImplementedError, do_test)
inst_update_mock.assert_called_once_with(
self.context, instance.uuid,
vm_state=vm_states.STOPPED, task_state=None)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
def test_error_out_instance_on_exception_inst_fault_rollback(self,
inst_update_mock):
instance = fake_instance.fake_instance_obj(self.context)
def do_test():
with self.compute._error_out_instance_on_exception(self.context,
instance):
raise exception.InstanceFaultRollback(
inner_exception=test.TestingException('test'))
self.assertRaises(test.TestingException, do_test)
inst_update_mock.assert_called_once_with(
self.context, instance.uuid,
vm_state=vm_states.ACTIVE, task_state=None)
@mock.patch('nova.compute.manager.ComputeManager.'
'_set_instance_obj_error_state')
def test_error_out_instance_on_exception_unknown_with_quotas(self,
set_error):
instance = fake_instance.fake_instance_obj(self.context)
quotas = mock.create_autospec(objects.Quotas, spec_set=True)
def do_test():
with self.compute._error_out_instance_on_exception(
self.context, instance, quotas):
raise test.TestingException('test')
self.assertRaises(test.TestingException, do_test)
self.assertEqual(1, len(quotas.method_calls))
self.assertEqual(mock.call.rollback(), quotas.method_calls[0])
set_error.assert_called_once_with(self.context, instance)
def test_cleanup_volumes(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_do_not_delete_dict = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': False})
bdm_delete_dict = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_do_not_delete_dict, bdm_delete_dict])
with mock.patch.object(self.compute.volume_api,
'delete') as volume_delete:
self.compute._cleanup_volumes(self.context, instance.uuid, bdms)
volume_delete.assert_called_once_with(self.context,
bdms[1].volume_id)
def test_cleanup_volumes_exception_do_not_raise(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_dict1 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': True})
bdm_dict2 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_dict1, bdm_dict2])
with mock.patch.object(self.compute.volume_api,
'delete',
side_effect=[test.TestingException(), None]) as volume_delete:
self.compute._cleanup_volumes(self.context, instance.uuid, bdms,
raise_exc=False)
calls = [mock.call(self.context, bdm.volume_id) for bdm in bdms]
self.assertEqual(calls, volume_delete.call_args_list)
def test_cleanup_volumes_exception_raise(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_dict1 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': True})
bdm_dict2 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_dict1, bdm_dict2])
with mock.patch.object(self.compute.volume_api,
'delete',
side_effect=[test.TestingException(), None]) as volume_delete:
self.assertRaises(test.TestingException,
self.compute._cleanup_volumes, self.context, instance.uuid,
bdms)
calls = [mock.call(self.context, bdm.volume_id) for bdm in bdms]
self.assertEqual(calls, volume_delete.call_args_list)
def test_stop_instance_task_state_none_power_state_shutdown(self):
# Tests that stop_instance doesn't puke when the instance power_state
# is shutdown and the task_state is None.
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.ACTIVE,
task_state=None, power_state=power_state.SHUTDOWN)
@mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.SHUTDOWN)
@mock.patch.object(self.compute, '_notify_about_instance_usage')
@mock.patch.object(self.compute, '_power_off_instance')
@mock.patch.object(instance, 'save')
def do_test(save_mock, power_off_mock, notify_mock, get_state_mock):
# run the code
self.compute.stop_instance(self.context, instance, True)
# assert the calls
self.assertEqual(2, get_state_mock.call_count)
notify_mock.assert_has_calls([
mock.call(self.context, instance, 'power_off.start'),
mock.call(self.context, instance, 'power_off.end')
])
power_off_mock.assert_called_once_with(
self.context, instance, True)
save_mock.assert_called_once_with(
expected_task_state=[task_states.POWERING_OFF, None])
self.assertEqual(power_state.SHUTDOWN, instance.power_state)
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.STOPPED, instance.vm_state)
do_test()
def test_reset_network_driver_not_implemented(self):
instance = fake_instance.fake_instance_obj(self.context)
@mock.patch.object(self.compute.driver, 'reset_network',
side_effect=NotImplementedError())
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def do_test(mock_add_fault, mock_reset):
self.assertRaises(messaging.ExpectedException,
self.compute.reset_network,
self.context,
instance)
self.compute = utils.ExceptionHelper(self.compute)
self.assertRaises(NotImplementedError,
self.compute.reset_network,
self.context,
instance)
do_test()
def test_rebuild_default_impl(self):
def _detach(context, bdms):
pass
def _attach(context, instance, bdms, do_check_attach=True):
return {'block_device_mapping': 'shared_block_storage'}
def _spawn(context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
self.assertEqual(block_device_info['block_device_mapping'],
'shared_block_storage')
with contextlib.nested(
mock.patch.object(self.compute.driver, 'destroy',
return_value=None),
mock.patch.object(self.compute.driver, 'spawn',
side_effect=_spawn),
mock.patch.object(objects.Instance, 'save',
return_value=None)
) as(
mock_destroy,
mock_spawn,
mock_save
):
instance = fake_instance.fake_instance_obj(self.context)
instance.task_state = task_states.REBUILDING
instance.save(expected_task_state=[task_states.REBUILDING])
self.compute._rebuild_default_impl(self.context,
instance,
None,
[],
admin_password='new_pass',
bdms=[],
detach_block_devices=_detach,
attach_block_devices=_attach,
network_info=None,
recreate=True,
block_device_info=None,
preserve_ephemeral=False)
self.assertFalse(mock_destroy.called)
self.assertTrue(mock_save.called)
self.assertTrue(mock_spawn.called)
@mock.patch.object(utils, 'last_completed_audit_period',
return_value=(0, 0))
@mock.patch.object(time, 'time', side_effect=[10, 20, 21])
@mock.patch.object(objects.InstanceList, 'get_by_host', return_value=[])
@mock.patch.object(objects.BandwidthUsage, 'get_by_instance_uuid_and_mac')
@mock.patch.object(db, 'bw_usage_update')
def test_poll_bandwidth_usage(self, bw_usage_update, get_by_uuid_mac,
get_by_host, time, last_completed_audit):
bw_counters = [{'uuid': 'fake-uuid', 'mac_address': 'fake-mac',
'bw_in': 1, 'bw_out': 2}]
usage = objects.BandwidthUsage()
usage.bw_in = 3
usage.bw_out = 4
usage.last_ctr_in = 0
usage.last_ctr_out = 0
self.flags(bandwidth_poll_interval=1)
get_by_uuid_mac.return_value = usage
_time = timeutils.utcnow()
bw_usage_update.return_value = {'uuid': '', 'mac': '',
'start_period': _time, 'last_refreshed': _time, 'bw_in': 0,
'bw_out': 0, 'last_ctr_in': 0, 'last_ctr_out': 0, 'deleted': 0,
'created_at': _time, 'updated_at': _time, 'deleted_at': _time}
with mock.patch.object(self.compute.driver,
'get_all_bw_counters', return_value=bw_counters):
self.compute._poll_bandwidth_usage(self.context)
get_by_uuid_mac.assert_called_once_with(self.context, 'fake-uuid',
'fake-mac', start_period=0, use_slave=True)
# NOTE(sdague): bw_usage_update happens at some time in
# the future, so what last_refreshed is is irrelevant.
bw_usage_update.assert_called_once_with(self.context, 'fake-uuid',
'fake-mac', 0, 4, 6, 1, 2,
last_refreshed=mock.ANY,
update_cells=False)
def test_reverts_task_state_instance_not_found(self):
# Tests that the reverts_task_state decorator in the compute manager
# will not trace when an InstanceNotFound is raised.
instance = objects.Instance(uuid='fake')
instance_update_mock = mock.Mock(
side_effect=exception.InstanceNotFound(instance_id=instance.uuid))
self.compute._instance_update = instance_update_mock
log_mock = mock.Mock()
manager.LOG = log_mock
@manager.reverts_task_state
def fake_function(self, context, instance):
raise test.TestingException()
self.assertRaises(test.TestingException, fake_function,
self, self.context, instance)
self.assertFalse(log_mock.called)
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'update_instance_info')
def test_update_scheduler_instance_info(self, mock_update):
instance = objects.Instance(uuid='fake')
self.compute._update_scheduler_instance_info(self.context, instance)
self.assertEqual(mock_update.call_count, 1)
args = mock_update.call_args[0]
self.assertNotEqual(args[0], self.context)
self.assertIsInstance(args[0], self.context.__class__)
self.assertEqual(args[1], self.compute.host)
# Send a single instance; check that the method converts to an
# InstanceList
self.assertIsInstance(args[2], objects.InstanceList)
self.assertEqual(args[2].objects[0], instance)
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'delete_instance_info')
def test_delete_scheduler_instance_info(self, mock_delete):
self.compute._delete_scheduler_instance_info(self.context,
mock.sentinel.inst_uuid)
self.assertEqual(mock_delete.call_count, 1)
args = mock_delete.call_args[0]
self.assertNotEqual(args[0], self.context)
self.assertIsInstance(args[0], self.context.__class__)
self.assertEqual(args[1], self.compute.host)
self.assertEqual(args[2], mock.sentinel.inst_uuid)
@mock.patch.object(nova.context.RequestContext, 'elevated')
@mock.patch.object(nova.objects.InstanceList, 'get_by_host')
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'sync_instance_info')
def test_sync_scheduler_instance_info(self, mock_sync, mock_get_by_host,
mock_elevated):
inst1 = objects.Instance(uuid='fake1')
inst2 = objects.Instance(uuid='fake2')
inst3 = objects.Instance(uuid='fake3')
exp_uuids = [inst.uuid for inst in [inst1, inst2, inst3]]
mock_get_by_host.return_value = objects.InstanceList(
objects=[inst1, inst2, inst3])
fake_elevated = context.get_admin_context()
mock_elevated.return_value = fake_elevated
self.compute._sync_scheduler_instance_info(self.context)
mock_get_by_host.assert_called_once_with(
fake_elevated, self.compute.host, expected_attrs=[],
use_slave=True)
mock_sync.assert_called_once_with(fake_elevated, self.compute.host,
exp_uuids)
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'sync_instance_info')
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'delete_instance_info')
@mock.patch.object(nova.scheduler.client.SchedulerClient,
'update_instance_info')
def test_scheduler_info_updates_off(self, mock_update, mock_delete,
mock_sync):
mgr = self.compute
mgr.send_instance_updates = False
mgr._update_scheduler_instance_info(self.context,
mock.sentinel.instance)
mgr._delete_scheduler_instance_info(self.context,
mock.sentinel.instance_uuid)
mgr._sync_scheduler_instance_info(self.context)
# None of the calls should have been made
self.assertFalse(mock_update.called)
self.assertFalse(mock_delete.called)
self.assertFalse(mock_sync.called)
def test_refresh_instance_security_rules_takes_non_object(self):
inst = fake_instance.fake_db_instance()
with mock.patch.object(self.compute.driver,
'refresh_instance_security_rules') as mock_r:
self.compute.refresh_instance_security_rules(self.context, inst)
self.assertIsInstance(mock_r.call_args_list[0][0][0],
objects.Instance)
class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerBuildInstanceTestCase, self).setUp()
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
self.admin_pass = 'pass'
self.injected_files = []
self.image = {}
self.node = 'fake-node'
self.limits = {}
self.requested_networks = []
self.security_groups = []
self.block_device_mapping = []
self.filter_properties = {'retry': {'num_attempts': 1,
'hosts': [[self.compute.host,
'fake-node']]}}
def fake_network_info():
return network_model.NetworkInfo([{'address': '1.2.3.4'}])
self.network_info = network_model.NetworkInfoAsyncWrapper(
fake_network_info)
self.block_device_info = self.compute._prep_block_device(context,
self.instance, self.block_device_mapping)
# override tracker with a version that doesn't need the database:
fake_rt = fake_resource_tracker.FakeResourceTracker(self.compute.host,
self.compute.driver, self.node)
self.compute._resource_tracker_dict[self.node] = fake_rt
def _do_build_instance_update(self, reschedule_update=False):
self.mox.StubOutWithMock(self.instance, 'save')
self.instance.save(
expected_task_state=(task_states.SCHEDULING, None)).AndReturn(
self.instance)
if reschedule_update:
self.instance.save().AndReturn(self.instance)
def _build_and_run_instance_update(self):
self.mox.StubOutWithMock(self.instance, 'save')
self._build_resources_instance_update(stub=False)
self.instance.save(expected_task_state=
task_states.BLOCK_DEVICE_MAPPING).AndReturn(self.instance)
def _build_resources_instance_update(self, stub=True):
if stub:
self.mox.StubOutWithMock(self.instance, 'save')
self.instance.save().AndReturn(self.instance)
def _notify_about_instance_usage(self, event, stub=True, **kwargs):
if stub:
self.mox.StubOutWithMock(self.compute,
'_notify_about_instance_usage')
self.compute._notify_about_instance_usage(self.context, self.instance,
event, **kwargs)
def _instance_action_events(self):
self.mox.StubOutWithMock(objects.InstanceActionEvent, 'event_start')
self.mox.StubOutWithMock(objects.InstanceActionEvent,
'event_finish_with_failure')
objects.InstanceActionEvent.event_start(
self.context, self.instance.uuid, mox.IgnoreArg(),
want_result=False)
objects.InstanceActionEvent.event_finish_with_failure(
self.context, self.instance.uuid, mox.IgnoreArg(),
exc_val=mox.IgnoreArg(), exc_tb=mox.IgnoreArg(),
want_result=False)
@staticmethod
def _assert_build_instance_hook_called(mock_hooks, result):
# NOTE(coreywright): we want to test the return value of
# _do_build_and_run_instance, but it doesn't bubble all the way up, so
# mock the hooking, which allows us to test that too, though a little
# too intimately
mock_hooks.setdefault().run_post.assert_called_once_with(
'build_instance', result, mock.ANY, mock.ANY, f=None)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_build_and_run_instance_called_with_proper_args(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.ACTIVE)
# This test when sending an icehouse compatible rpc call to juno compute
# node, NetworkRequest object can load from three items tuple.
@mock.patch('nova.objects.Instance.save')
@mock.patch('nova.compute.manager.ComputeManager._build_and_run_instance')
@mock.patch('nova.utils.spawn_n')
def test_build_and_run_instance_with_icehouse_requested_network(
self, mock_spawn, mock_build_and_run, mock_save):
fake_server_actions.stub_out_action_events(self.stubs)
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
mock_save.return_value = self.instance
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=[objects.NetworkRequest(
network_id='fake_network_id',
address='10.0.0.1',
port_id='fake_port_id')],
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
requested_network = mock_build_and_run.call_args[0][5][0]
self.assertEqual('fake_network_id', requested_network.network_id)
self.assertEqual('10.0.0.1', str(requested_network.address))
self.assertEqual('fake_port_id', requested_network.port_id)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_build_abort_exception(self, mock_spawn, mock_hooks):
def fake_spawn(f, *args, **kwargs):
# NOTE(danms): Simulate the detached nature of spawn so that
# we confirm that the inner task has the fault logic
try:
return f(*args, **kwargs)
except Exception:
pass
mock_spawn.side_effect = fake_spawn
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self.mox.StubOutWithMock(compute_utils, 'add_instance_fault_from_exc')
self.mox.StubOutWithMock(self.compute, '_set_instance_obj_error_state')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.BuildAbortException(reason='',
instance_uuid=self.instance.uuid))
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
self.compute._cleanup_volumes(self.context, self.instance.uuid,
self.block_device_mapping, raise_exc=False)
compute_utils.add_instance_fault_from_exc(self.context,
self.instance, mox.IgnoreArg(), mox.IgnoreArg())
self.compute._set_instance_obj_error_state(self.context, self.instance)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception(self, mock_spawn, mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_set_instance_obj_error_state')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.network_api.cleanup_instance_network_on_host(self.context,
self.instance, self.compute.host)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
def test_rescheduled_exception_with_non_ascii_exception(self):
exc = exception.NovaException(u's\xe9quence')
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info).AndRaise(exc)
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.RescheduledException,
self.compute._build_and_run_instance,
self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
mock_save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(expected_task_state='block_device_mapping'),
])
@mock.patch.object(manager.ComputeManager, '_build_and_run_instance')
@mock.patch.object(conductor_api.ComputeTaskAPI, 'build_instances')
@mock.patch.object(network_api.API, 'cleanup_instance_network_on_host')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(objects.InstanceActionEvent, 'event_start')
@mock.patch.object(objects.InstanceActionEvent,
'event_finish_with_failure')
@mock.patch.object(virt_driver.ComputeDriver, 'macs_for_instance')
def test_rescheduled_exception_with_network_allocated(self,
mock_macs_for_instance, mock_event_finish,
mock_event_start, mock_ins_save, mock_cleanup_network,
mock_build_ins, mock_build_and_run):
instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
system_metadata={'network_allocated': 'True'},
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
mock_ins_save.return_value = instance
mock_macs_for_instance.return_value = []
mock_build_and_run.side_effect = exception.RescheduledException(
reason='', instance_uuid=self.instance.uuid)
self.compute._do_build_and_run_instance(self.context, instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
mock_build_and_run.assert_called_once_with(self.context,
instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
mock_cleanup_network.assert_called_once_with(
self.context, instance, self.compute.host)
mock_build_ins.assert_called_once_with(self.context,
[instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_without_retry(self, mock_spawn, mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(compute_utils, 'add_instance_fault_from_exc')
self.mox.StubOutWithMock(self.compute, '_set_instance_obj_error_state')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
{}).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
compute_utils.add_instance_fault_from_exc(self.context, self.instance,
mox.IgnoreArg(), mox.IgnoreArg())
self.compute._set_instance_obj_error_state(self.context,
self.instance)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties={},
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_do_not_deallocate_network(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute.driver,
'deallocate_networks_on_reschedule')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.driver.deallocate_networks_on_reschedule(
self.instance).AndReturn(False)
self.compute.network_api.cleanup_instance_network_on_host(
self.context, self.instance, self.compute.host)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_deallocate_network(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute.driver,
'deallocate_networks_on_reschedule')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.driver.deallocate_networks_on_reschedule(
self.instance).AndReturn(True)
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
def _test_build_and_run_exceptions(self, exc, set_error=False,
cleanup_volumes=False):
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(exc)
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
if cleanup_volumes:
self.compute._cleanup_volumes(self.context, self.instance.uuid,
self.block_device_mapping, raise_exc=False)
if set_error:
self.mox.StubOutWithMock(self.compute,
'_set_instance_obj_error_state')
self.mox.StubOutWithMock(compute_utils,
'add_instance_fault_from_exc')
compute_utils.add_instance_fault_from_exc(self.context,
self.instance, mox.IgnoreArg(), mox.IgnoreArg())
self.compute._set_instance_obj_error_state(self.context,
self.instance)
self._instance_action_events()
self.mox.ReplayAll()
with contextlib.nested(
mock.patch('nova.utils.spawn_n'),
mock.patch('nova.hooks._HOOKS')
) as (
mock_spawn,
mock_hooks
):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
def test_build_and_run_notfound_exception(self):
self._test_build_and_run_exceptions(exception.InstanceNotFound(
instance_id=''))
def test_build_and_run_unexpecteddeleting_exception(self):
self._test_build_and_run_exceptions(
exception.UnexpectedDeletingTaskStateError(
instance_uuid='fake_uuid', expected={}, actual={}))
def test_build_and_run_buildabort_exception(self):
self._test_build_and_run_exceptions(exception.BuildAbortException(
instance_uuid='', reason=''), set_error=True, cleanup_volumes=True)
def test_build_and_run_unhandled_exception(self):
self._test_build_and_run_exceptions(test.TestingException(),
set_error=True, cleanup_volumes=True)
def test_instance_not_found(self):
exc = exception.InstanceNotFound(instance_id=1)
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info).AndRaise(exc)
self._notify_about_instance_usage('create.end',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.InstanceNotFound,
self.compute._build_and_run_instance,
self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
mock_save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(expected_task_state='block_device_mapping'),
])
def test_reschedule_on_exception(self):
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
exc = test.TestingException()
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info).AndRaise(exc)
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.RescheduledException,
self.compute._build_and_run_instance,
self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
mock_save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(expected_task_state='block_device_mapping'),
])
def test_spawn_network_alloc_failure(self):
# Because network allocation is asynchronous, failures may not present
# themselves until the virt spawn method is called.
self._test_build_and_run_spawn_exceptions(exception.NoMoreNetworks())
def test_build_and_run_no_more_fixedips_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.NoMoreFixedIps("error messge"))
def test_build_and_run_flavor_disk_too_small_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.FlavorDiskTooSmall())
def test_build_and_run_flavor_memory_too_small_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.FlavorMemoryTooSmall())
def test_build_and_run_image_not_active_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.ImageNotActive(image_id=self.image.get('id')))
def test_build_and_run_image_unacceptable_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.ImageUnacceptable(image_id=self.image.get('id'),
reason=""))
def _test_build_and_run_spawn_exceptions(self, exc):
with contextlib.nested(
mock.patch.object(self.compute.driver, 'spawn',
side_effect=exc),
mock.patch.object(self.instance, 'save',
side_effect=[self.instance, self.instance, self.instance]),
mock.patch.object(self.compute,
'_build_networks_for_instance',
return_value=network_model.NetworkInfo()),
mock.patch.object(self.compute,
'_notify_about_instance_usage'),
mock.patch.object(self.compute,
'_shutdown_instance'),
mock.patch.object(self.compute,
'_validate_instance_group_policy')
) as (spawn, save,
_build_networks_for_instance, _notify_about_instance_usage,
_shutdown_instance, _validate_instance_group_policy):
self.assertRaises(exception.BuildAbortException,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping, self.node,
self.limits, self.filter_properties)
_validate_instance_group_policy.assert_called_once_with(
self.context, self.instance, self.filter_properties)
_build_networks_for_instance.assert_has_calls(
[mock.call(self.context, self.instance,
self.requested_networks, self.security_groups)])
_notify_about_instance_usage.assert_has_calls([
mock.call(self.context, self.instance, 'create.start',
extra_usage_info={'image_name': self.image.get('name')}),
mock.call(self.context, self.instance, 'create.error',
fault=exc)])
save.assert_has_calls([
mock.call(),
mock.call(),
mock.call(
expected_task_state=task_states.BLOCK_DEVICE_MAPPING)])
spawn.assert_has_calls([mock.call(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info)])
_shutdown_instance.assert_called_once_with(self.context,
self.instance, self.block_device_mapping,
self.requested_networks, try_deallocate_networks=True)
@mock.patch('nova.utils.spawn_n')
def test_reschedule_on_resources_unavailable(self, mock_spawn):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
reason = 'resource unavailable'
exc = exception.ComputeResourcesUnavailable(reason=reason)
class FakeResourceTracker(object):
def instance_claim(self, context, instance, limits):
raise exc
self.mox.StubOutWithMock(self.compute, '_get_resource_tracker')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self.compute._get_resource_tracker(self.node).AndReturn(
FakeResourceTracker())
self._do_build_instance_update(reschedule_update=True)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.compute.network_api.cleanup_instance_network_on_host(
self.context, self.instance, self.compute.host)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
def test_build_resources_buildabort_reraise(self):
exc = exception.BuildAbortException(
instance_uuid=self.instance.uuid, reason='')
self.mox.StubOutWithMock(self.compute, '_build_resources')
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups, self.image,
self.block_device_mapping).AndRaise(exc)
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.mox.ReplayAll()
with mock.patch.object(self.instance, 'save') as mock_save:
self.assertRaises(exception.BuildAbortException,
self.compute._build_and_run_instance,
self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping,
self.node, self.limits, self.filter_properties)
mock_save.assert_called_once_with()
def test_build_resources_reraises_on_failed_bdm_prep(self):
self.mox.StubOutWithMock(self.compute, '_prep_block_device')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self._build_resources_instance_update()
self.compute._prep_block_device(self.context, self.instance,
self.block_device_mapping).AndRaise(test.TestingException())
self.mox.ReplayAll()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
def test_failed_bdm_prep_from_delete_raises_unexpected(self):
with contextlib.nested(
mock.patch.object(self.compute,
'_build_networks_for_instance',
return_value=self.network_info),
mock.patch.object(self.instance, 'save',
side_effect=exception.UnexpectedDeletingTaskStateError(
instance_uuid='fake_uuid',
actual={'task_state': task_states.DELETING},
expected={'task_state': None})),
) as (_build_networks_for_instance, save):
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e,
exception.UnexpectedDeletingTaskStateError)
_build_networks_for_instance.assert_has_calls(
[mock.call(self.context, self.instance,
self.requested_networks, self.security_groups)])
save.assert_has_calls([mock.call()])
def test_build_resources_aborts_on_failed_network_alloc(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndRaise(
test.TestingException())
self.mox.ReplayAll()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups, self.image,
self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
def test_failed_network_alloc_from_delete_raises_unexpected(self):
with mock.patch.object(self.compute,
'_build_networks_for_instance') as _build_networks:
exc = exception.UnexpectedDeletingTaskStateError
_build_networks.side_effect = exc(
instance_uuid='fake_uuid',
actual={'task_state': task_states.DELETING},
expected={'task_state': None})
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exc)
_build_networks.assert_has_calls(
[mock.call(self.context, self.instance,
self.requested_networks, self.security_groups)])
def test_build_resources_with_network_info_obj_on_spawn_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
network_model.NetworkInfo([{'address': '1.2.3.4'}]))
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._build_resources_instance_update()
self.mox.ReplayAll()
test_exception = test.TestingException()
def fake_spawn():
raise test_exception
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertEqual(test_exception, e)
def test_build_resources_cleans_up_and_reraises_on_spawn_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._build_resources_instance_update()
self.mox.ReplayAll()
test_exception = test.TestingException()
def fake_spawn():
raise test_exception
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertEqual(test_exception, e)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_instance_not_found_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
expected_exc = exception.InstanceNotFound(
instance_id=self.instance.uuid)
mock_save.side_effect = expected_exc
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except Exception as e:
self.assertEqual(expected_exc, e)
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_unexpected_task_error_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
mock_save.side_effect = exception.UnexpectedTaskStateError(
instance_uuid='fake_uuid', expected={}, actual={})
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except exception.BuildAbortException:
pass
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_exception_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
mock_save.side_effect = Exception()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except exception.BuildAbortException:
pass
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
def test_build_resources_aborts_on_cleanup_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False).AndRaise(
test.TestingException())
self._build_resources_instance_update()
self.mox.ReplayAll()
def fake_spawn():
raise test.TestingException()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
def test_build_networks_if_not_allocated(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata={},
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.compute._allocate_network(self.context, instance,
self.requested_networks, None, self.security_groups, None)
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_build_networks_if_allocated_false(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata=dict(network_allocated='False'),
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.compute._allocate_network(self.context, instance,
self.requested_networks, None, self.security_groups, None)
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_return_networks_if_found(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata=dict(network_allocated='True'),
expected_attrs=['system_metadata'])
def fake_network_info():
return network_model.NetworkInfo([{'address': '123.123.123.123'}])
self.mox.StubOutWithMock(self.compute.network_api,
'get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.mox.StubOutWithMock(self.compute.network_api,
'setup_instance_network_on_host')
self.compute.network_api.setup_instance_network_on_host(
self.context, instance, instance.host)
self.compute.network_api.get_instance_nw_info(
self.context, instance).AndReturn(
network_model.NetworkInfoAsyncWrapper(fake_network_info))
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_cleanup_allocated_networks_instance_not_found(self):
with contextlib.nested(
mock.patch.object(self.compute, '_deallocate_network'),
mock.patch.object(self.instance, 'save',
side_effect=exception.InstanceNotFound(instance_id=''))
) as (_deallocate_network, save):
# Testing that this doesn't raise an exeption
self.compute._cleanup_allocated_networks(self.context,
self.instance, self.requested_networks)
save.assert_called_once_with()
self.assertEqual('False',
self.instance.system_metadata['network_allocated'])
@mock.patch.object(conductor_rpcapi.ConductorAPI, 'instance_update')
def test_launched_at_in_create_end_notification(self,
mock_instance_update):
def fake_notify(*args, **kwargs):
if args[2] == 'create.end':
# Check that launched_at is set on the instance
self.assertIsNotNone(args[1].launched_at)
with contextlib.nested(
mock.patch.object(self.compute,
'_update_scheduler_instance_info'),
mock.patch.object(self.compute.driver, 'spawn'),
mock.patch.object(self.compute,
'_build_networks_for_instance', return_value=[]),
mock.patch.object(self.instance, 'save'),
mock.patch.object(self.compute, '_notify_about_instance_usage',
side_effect=fake_notify)
) as (mock_upd, mock_spawn, mock_networks, mock_save, mock_notify):
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
expected_call = mock.call(self.context, self.instance,
'create.end', extra_usage_info={'message': u'Success'},
network_info=[])
create_end_call = mock_notify.call_args_list[
mock_notify.call_count - 1]
self.assertEqual(expected_call, create_end_call)
@mock.patch.object(conductor_rpcapi.ConductorAPI, 'instance_update')
def test_create_end_on_instance_delete(self, mock_instance_update):
def fake_notify(*args, **kwargs):
if args[2] == 'create.end':
# Check that launched_at is set on the instance
self.assertIsNotNone(args[1].launched_at)
exc = exception.InstanceNotFound(instance_id='')
with contextlib.nested(
mock.patch.object(self.compute.driver, 'spawn'),
mock.patch.object(self.compute,
'_build_networks_for_instance', return_value=[]),
mock.patch.object(self.instance, 'save',
side_effect=[None, None, None, exc]),
mock.patch.object(self.compute, '_notify_about_instance_usage',
side_effect=fake_notify)
) as (mock_spawn, mock_networks, mock_save, mock_notify):
self.assertRaises(exception.InstanceNotFound,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping, self.node,
self.limits, self.filter_properties)
expected_call = mock.call(self.context, self.instance,
'create.end', fault=exc)
create_end_call = mock_notify.call_args_list[
mock_notify.call_count - 1]
self.assertEqual(expected_call, create_end_call)
class ComputeManagerMigrationTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerMigrationTestCase, self).setUp()
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
self.image = {}
self.instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
self.migration = objects.Migration(context=self.context.elevated(),
new_instance_type_id=7)
self.migration.status = 'migrating'
fake_server_actions.stub_out_action_events(self.stubs)
@mock.patch.object(objects.Migration, 'save')
@mock.patch.object(objects.Migration, 'obj_as_admin')
def test_errors_out_migration_decorator(self, mock_save,
mock_obj_as_admin):
# Tests that errors_out_migration decorator in compute manager
# sets migration status to 'error' when an exception is raised
# from decorated method
instance = fake_instance.fake_instance_obj(self.context)
migration = objects.Migration()
migration.instance_uuid = instance.uuid
migration.status = 'migrating'
migration.id = 0
@manager.errors_out_migration
def fake_function(self, context, instance, migration):
raise test.TestingException()
mock_obj_as_admin.return_value = mock.MagicMock()
self.assertRaises(test.TestingException, fake_function,
self, self.context, instance, migration)
self.assertEqual('error', migration.status)
mock_save.assert_called_once_with()
mock_obj_as_admin.assert_called_once_with()
def test_finish_resize_failure(self):
with contextlib.nested(
mock.patch.object(self.compute, '_finish_resize',
side_effect=exception.ResizeError(reason='')),
mock.patch.object(db, 'instance_fault_create'),
mock.patch.object(self.compute, '_instance_update'),
mock.patch.object(self.instance, 'save'),
mock.patch.object(self.migration, 'save'),
mock.patch.object(self.migration, 'obj_as_admin',
return_value=mock.MagicMock())
) as (meth, fault_create, instance_update, instance_save,
migration_save, migration_obj_as_admin):
fault_create.return_value = (
test_instance_fault.fake_faults['fake-uuid'][0])
self.assertRaises(
exception.ResizeError, self.compute.finish_resize,
context=self.context, disk_info=[], image=self.image,
instance=self.instance, reservations=[],
migration=self.migration
)
self.assertEqual("error", self.migration.status)
migration_save.assert_called_once_with()
migration_obj_as_admin.assert_called_once_with()
def test_resize_instance_failure(self):
self.migration.dest_host = None
with contextlib.nested(
mock.patch.object(self.compute.driver,
'migrate_disk_and_power_off',
side_effect=exception.ResizeError(reason='')),
mock.patch.object(db, 'instance_fault_create'),
mock.patch.object(self.compute, '_instance_update'),
mock.patch.object(self.migration, 'save'),
mock.patch.object(self.migration, 'obj_as_admin',
return_value=mock.MagicMock()),
mock.patch.object(self.compute.network_api, 'get_instance_nw_info',
return_value=None),
mock.patch.object(self.instance, 'save'),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute,
'_get_instance_block_device_info',
return_value=None),
mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid',
return_value=None),
mock.patch.object(objects.Flavor,
'get_by_id',
return_value=None)
) as (meth, fault_create, instance_update,
migration_save, migration_obj_as_admin, nw_info, save_inst,
notify, vol_block_info, bdm, flavor):
fault_create.return_value = (
test_instance_fault.fake_faults['fake-uuid'][0])
self.assertRaises(
exception.ResizeError, self.compute.resize_instance,
context=self.context, instance=self.instance, image=self.image,
reservations=[], migration=self.migration,
instance_type='type', clean_shutdown=True)
self.assertEqual("error", self.migration.status)
self.assertEqual([mock.call(), mock.call()],
migration_save.mock_calls)
self.assertEqual([mock.call(), mock.call()],
migration_obj_as_admin.mock_calls)
def _test_revert_resize_instance_destroy_disks(self, is_shared=False):
# This test asserts that _is_instance_storage_shared() is called from
# revert_resize() and the return value is passed to driver.destroy().
# Otherwise we could regress this.
@mock.patch.object(self.compute.network_api, 'get_instance_nw_info')
@mock.patch.object(self.compute, '_is_instance_storage_shared')
@mock.patch.object(self.compute, 'finish_revert_resize')
@mock.patch.object(self.compute, '_instance_update')
@mock.patch.object(self.compute, '_get_resource_tracker')
@mock.patch.object(self.compute.driver, 'destroy')
@mock.patch.object(self.compute.network_api, 'setup_networks_on_host')
@mock.patch.object(self.compute.network_api, 'migrate_instance_start')
@mock.patch.object(compute_utils, 'notify_usage_exists')
@mock.patch.object(self.migration, 'save')
@mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
def do_test(get_by_instance_uuid,
migration_save,
notify_usage_exists,
migrate_instance_start,
setup_networks_on_host,
destroy,
_get_resource_tracker,
_instance_update,
finish_revert_resize,
_is_instance_storage_shared,
get_instance_nw_info):
self.migration.source_compute = self.instance['host']
# Inform compute that instance uses non-shared or shared storage
_is_instance_storage_shared.return_value = is_shared
self.compute.revert_resize(context=self.context,
migration=self.migration,
instance=self.instance,
reservations=None)
_is_instance_storage_shared.assert_called_once_with(
self.context, self.instance,
host=self.migration.source_compute)
# If instance storage is shared, driver destroy method
# should not destroy disks otherwise it should destroy disks.
destroy.assert_called_once_with(self.context, self.instance,
mock.ANY, mock.ANY, not is_shared)
do_test()
def test_revert_resize_instance_destroy_disks_shared_storage(self):
self._test_revert_resize_instance_destroy_disks(is_shared=True)
def test_revert_resize_instance_destroy_disks_non_shared_storage(self):
self._test_revert_resize_instance_destroy_disks(is_shared=False)
def test_consoles_enabled(self):
self.flags(enabled=False, group='vnc')
self.flags(enabled=False, group='spice')
self.flags(enabled=False, group='rdp')
self.flags(enabled=False, group='serial_console')
self.assertFalse(self.compute._consoles_enabled())
self.flags(enabled=True, group='vnc')
self.assertTrue(self.compute._consoles_enabled())
self.flags(enabled=False, group='vnc')
for console in ['spice', 'rdp', 'serial_console']:
self.flags(enabled=True, group=console)
self.assertTrue(self.compute._consoles_enabled())
self.flags(enabled=False, group=console)
@mock.patch('nova.utils.spawn_n')
@mock.patch('nova.compute.manager.ComputeManager.'
'_do_live_migration')
def _test_max_concurrent_live(self, mock_lm, mock_spawn):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
@mock.patch('nova.objects.Migration.save')
def _do_it(mock_mig_save):
instance = objects.Instance(uuid=str(uuid.uuid4()))
migration = objects.Migration()
self.compute.live_migration(self.context,
mock.sentinel.dest,
instance,
mock.sentinel.block_migration,
migration,
mock.sentinel.migrate_data)
self.assertEqual('queued', migration.status)
migration.save.assert_called_once_with()
with mock.patch.object(self.compute,
'_live_migration_semaphore') as mock_sem:
for i in (1, 2, 3):
_do_it()
self.assertEqual(3, mock_sem.__enter__.call_count)
def test_max_concurrent_live_limited(self):
self.flags(max_concurrent_live_migrations=2)
self._test_max_concurrent_live()
def test_max_concurrent_live_unlimited(self):
self.flags(max_concurrent_live_migrations=0)
self._test_max_concurrent_live()
def test_max_concurrent_live_semaphore_limited(self):
self.flags(max_concurrent_live_migrations=123)
self.assertEqual(
123,
manager.ComputeManager()._live_migration_semaphore.balance)
def test_max_concurrent_live_semaphore_unlimited(self):
self.flags(max_concurrent_live_migrations=0)
compute = manager.ComputeManager()
self.assertEqual(0, compute._live_migration_semaphore.balance)
self.assertIsInstance(compute._live_migration_semaphore,
compute_utils.UnlimitedSemaphore)
def test_max_concurrent_live_semaphore_negative(self):
self.flags(max_concurrent_live_migrations=-2)
compute = manager.ComputeManager()
self.assertEqual(0, compute._live_migration_semaphore.balance)
self.assertIsInstance(compute._live_migration_semaphore,
compute_utils.UnlimitedSemaphore)
|
markeTIC/OCB | refs/heads/8.0 | addons/point_of_sale/wizard/__init__.py | 382 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pos_box
import pos_confirm
import pos_details
import pos_discount
import pos_open_statement
import pos_payment
import pos_session_opening
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
odeke-em/restAssured | refs/heads/master | thebear/models.py | 1 | # Author: Emmanuel Odeke <[email protected]>
# Copyright (c) 2014
from django.db import models
# Local module
import theBearConstants
class Artist(models.Model):
name = models.CharField(max_length=theBearConstants.MAX_MISC_STR_LENGTH)
uri = models.CharField(max_length=theBearConstants.MAX_MISC_STR_LENGTH, blank=True)
extraInfo = models.CharField(max_length=theBearConstants.MAX_MISC_STR_LENGTH, blank=True)
dateCreated = models.DateTimeField(auto_now_add=True) # Auto-set by DB
lastEditTime = models.DateTimeField(auto_now=True) # Automatically changed by DB
def __unicode__(self):
return "Artist:{n}".format(n=self.name)
class Song(models.Model):
title = models.CharField(max_length = theBearConstants.MAX_MISC_STR_LENGTH)
artist = models.ForeignKey(Artist)
uri = models.URLField(max_length = theBearConstants.MAX_MISC_STR_LENGTH, blank=True)
playTime = models.DecimalField(
max_digits = theBearConstants.MAX_TIME_DIGITS,
decimal_places=theBearConstants.MAX_DECIMAL_PLACES
)
dateCreated = models.DateTimeField(auto_now_add=True) # Auto-set by DB
lastEditTime = models.DateTimeField(auto_now=True) # Automatically changed by DB
def __unicode__(self):
return "Song::{t}".format(t=self.title)
|
surajssd/kuma | refs/heads/master | vendor/packages/logilab/common/configuration.py | 85 | # copyright 2003-2012 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:[email protected]
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
"""Classes to handle advanced configuration in simple to complex applications.
Allows to load the configuration from a file or from command line
options, to generate a sample configuration file or to display
program's usage. Fills the gap between optik/optparse and ConfigParser
by adding data types (which are also available as a standalone optik
extension in the `optik_ext` module).
Quick start: simplest usage
---------------------------
.. python ::
>>> import sys
>>> from logilab.common.configuration import Configuration
>>> options = [('dothis', {'type':'yn', 'default': True, 'metavar': '<y or n>'}),
... ('value', {'type': 'string', 'metavar': '<string>'}),
... ('multiple', {'type': 'csv', 'default': ('yop',),
... 'metavar': '<comma separated values>',
... 'help': 'you can also document the option'}),
... ('number', {'type': 'int', 'default':2, 'metavar':'<int>'}),
... ]
>>> config = Configuration(options=options, name='My config')
>>> print config['dothis']
True
>>> print config['value']
None
>>> print config['multiple']
('yop',)
>>> print config['number']
2
>>> print config.help()
Usage: [options]
Options:
-h, --help show this help message and exit
--dothis=<y or n>
--value=<string>
--multiple=<comma separated values>
you can also document the option [current: none]
--number=<int>
>>> f = open('myconfig.ini', 'w')
>>> f.write('''[MY CONFIG]
... number = 3
... dothis = no
... multiple = 1,2,3
... ''')
>>> f.close()
>>> config.load_file_configuration('myconfig.ini')
>>> print config['dothis']
False
>>> print config['value']
None
>>> print config['multiple']
['1', '2', '3']
>>> print config['number']
3
>>> sys.argv = ['mon prog', '--value', 'bacon', '--multiple', '4,5,6',
... 'nonoptionargument']
>>> print config.load_command_line_configuration()
['nonoptionargument']
>>> print config['value']
bacon
>>> config.generate_config()
# class for simple configurations which don't need the
# manager / providers model and prefer delegation to inheritance
#
# configuration values are accessible through a dict like interface
#
[MY CONFIG]
dothis=no
value=bacon
# you can also document the option
multiple=4,5,6
number=3
Note : starting with Python 2.7 ConfigParser is able to take into
account the order of occurrences of the options into a file (by
using an OrderedDict). If you have two options changing some common
state, like a 'disable-all-stuff' and a 'enable-some-stuff-a', their
order of appearance will be significant : the last specified in the
file wins. For earlier version of python and logilab.common newer
than 0.61 the behaviour is unspecified.
"""
from __future__ import print_function
__docformat__ = "restructuredtext en"
__all__ = ('OptionsManagerMixIn', 'OptionsProviderMixIn',
'ConfigurationMixIn', 'Configuration',
'OptionsManager2ConfigurationAdapter')
import os
import sys
import re
from os.path import exists, expanduser
from copy import copy
from warnings import warn
from six import string_types
from six.moves import range, configparser as cp, input
from logilab.common.compat import str_encode as _encode
from logilab.common.deprecation import deprecated
from logilab.common.textutils import normalize_text, unquote
from logilab.common import optik_ext
OptionError = optik_ext.OptionError
REQUIRED = []
class UnsupportedAction(Exception):
"""raised by set_option when it doesn't know what to do for an action"""
def _get_encoding(encoding, stream):
encoding = encoding or getattr(stream, 'encoding', None)
if not encoding:
import locale
encoding = locale.getpreferredencoding()
return encoding
# validation functions ########################################################
# validators will return the validated value or raise optparse.OptionValueError
# XXX add to documentation
def choice_validator(optdict, name, value):
"""validate and return a converted value for option of type 'choice'
"""
if not value in optdict['choices']:
msg = "option %s: invalid value: %r, should be in %s"
raise optik_ext.OptionValueError(msg % (name, value, optdict['choices']))
return value
def multiple_choice_validator(optdict, name, value):
"""validate and return a converted value for option of type 'choice'
"""
choices = optdict['choices']
values = optik_ext.check_csv(None, name, value)
for value in values:
if not value in choices:
msg = "option %s: invalid value: %r, should be in %s"
raise optik_ext.OptionValueError(msg % (name, value, choices))
return values
def csv_validator(optdict, name, value):
"""validate and return a converted value for option of type 'csv'
"""
return optik_ext.check_csv(None, name, value)
def yn_validator(optdict, name, value):
"""validate and return a converted value for option of type 'yn'
"""
return optik_ext.check_yn(None, name, value)
def named_validator(optdict, name, value):
"""validate and return a converted value for option of type 'named'
"""
return optik_ext.check_named(None, name, value)
def file_validator(optdict, name, value):
"""validate and return a filepath for option of type 'file'"""
return optik_ext.check_file(None, name, value)
def color_validator(optdict, name, value):
"""validate and return a valid color for option of type 'color'"""
return optik_ext.check_color(None, name, value)
def password_validator(optdict, name, value):
"""validate and return a string for option of type 'password'"""
return optik_ext.check_password(None, name, value)
def date_validator(optdict, name, value):
"""validate and return a mx DateTime object for option of type 'date'"""
return optik_ext.check_date(None, name, value)
def time_validator(optdict, name, value):
"""validate and return a time object for option of type 'time'"""
return optik_ext.check_time(None, name, value)
def bytes_validator(optdict, name, value):
"""validate and return an integer for option of type 'bytes'"""
return optik_ext.check_bytes(None, name, value)
VALIDATORS = {'string': unquote,
'int': int,
'float': float,
'file': file_validator,
'font': unquote,
'color': color_validator,
'regexp': re.compile,
'csv': csv_validator,
'yn': yn_validator,
'bool': yn_validator,
'named': named_validator,
'password': password_validator,
'date': date_validator,
'time': time_validator,
'bytes': bytes_validator,
'choice': choice_validator,
'multiple_choice': multiple_choice_validator,
}
def _call_validator(opttype, optdict, option, value):
if opttype not in VALIDATORS:
raise Exception('Unsupported type "%s"' % opttype)
try:
return VALIDATORS[opttype](optdict, option, value)
except TypeError:
try:
return VALIDATORS[opttype](value)
except optik_ext.OptionValueError:
raise
except:
raise optik_ext.OptionValueError('%s value (%r) should be of type %s' %
(option, value, opttype))
# user input functions ########################################################
# user input functions will ask the user for input on stdin then validate
# the result and return the validated value or raise optparse.OptionValueError
# XXX add to documentation
def input_password(optdict, question='password:'):
from getpass import getpass
while True:
value = getpass(question)
value2 = getpass('confirm: ')
if value == value2:
return value
print('password mismatch, try again')
def input_string(optdict, question):
value = input(question).strip()
return value or None
def _make_input_function(opttype):
def input_validator(optdict, question):
while True:
value = input(question)
if not value.strip():
return None
try:
return _call_validator(opttype, optdict, None, value)
except optik_ext.OptionValueError as ex:
msg = str(ex).split(':', 1)[-1].strip()
print('bad value: %s' % msg)
return input_validator
INPUT_FUNCTIONS = {
'string': input_string,
'password': input_password,
}
for opttype in VALIDATORS.keys():
INPUT_FUNCTIONS.setdefault(opttype, _make_input_function(opttype))
# utility functions ############################################################
def expand_default(self, option):
"""monkey patch OptionParser.expand_default since we have a particular
way to handle defaults to avoid overriding values in the configuration
file
"""
if self.parser is None or not self.default_tag:
return option.help
optname = option._long_opts[0][2:]
try:
provider = self.parser.options_manager._all_options[optname]
except KeyError:
value = None
else:
optdict = provider.get_option_def(optname)
optname = provider.option_attrname(optname, optdict)
value = getattr(provider.config, optname, optdict)
value = format_option_value(optdict, value)
if value is optik_ext.NO_DEFAULT or not value:
value = self.NO_DEFAULT_VALUE
return option.help.replace(self.default_tag, str(value))
def _validate(value, optdict, name=''):
"""return a validated value for an option according to its type
optional argument name is only used for error message formatting
"""
try:
_type = optdict['type']
except KeyError:
# FIXME
return value
return _call_validator(_type, optdict, name, value)
convert = deprecated('[0.60] convert() was renamed _validate()')(_validate)
# format and output functions ##################################################
def comment(string):
"""return string as a comment"""
lines = [line.strip() for line in string.splitlines()]
return '# ' + ('%s# ' % os.linesep).join(lines)
def format_time(value):
if not value:
return '0'
if value != int(value):
return '%.2fs' % value
value = int(value)
nbmin, nbsec = divmod(value, 60)
if nbsec:
return '%ss' % value
nbhour, nbmin_ = divmod(nbmin, 60)
if nbmin_:
return '%smin' % nbmin
nbday, nbhour_ = divmod(nbhour, 24)
if nbhour_:
return '%sh' % nbhour
return '%sd' % nbday
def format_bytes(value):
if not value:
return '0'
if value != int(value):
return '%.2fB' % value
value = int(value)
prevunit = 'B'
for unit in ('KB', 'MB', 'GB', 'TB'):
next, remain = divmod(value, 1024)
if remain:
return '%s%s' % (value, prevunit)
prevunit = unit
value = next
return '%s%s' % (value, unit)
def format_option_value(optdict, value):
"""return the user input's value from a 'compiled' value"""
if isinstance(value, (list, tuple)):
value = ','.join(value)
elif isinstance(value, dict):
value = ','.join(['%s:%s' % (k, v) for k, v in value.items()])
elif hasattr(value, 'match'): # optdict.get('type') == 'regexp'
# compiled regexp
value = value.pattern
elif optdict.get('type') == 'yn':
value = value and 'yes' or 'no'
elif isinstance(value, string_types) and value.isspace():
value = "'%s'" % value
elif optdict.get('type') == 'time' and isinstance(value, (float, int, long)):
value = format_time(value)
elif optdict.get('type') == 'bytes' and hasattr(value, '__int__'):
value = format_bytes(value)
return value
def ini_format_section(stream, section, options, encoding=None, doc=None):
"""format an options section using the INI format"""
encoding = _get_encoding(encoding, stream)
if doc:
print(_encode(comment(doc), encoding), file=stream)
print('[%s]' % section, file=stream)
ini_format(stream, options, encoding)
def ini_format(stream, options, encoding):
"""format options using the INI format"""
for optname, optdict, value in options:
value = format_option_value(optdict, value)
help = optdict.get('help')
if help:
help = normalize_text(help, line_len=79, indent='# ')
print(file=stream)
print(_encode(help, encoding), file=stream)
else:
print(file=stream)
if value is None:
print('#%s=' % optname, file=stream)
else:
value = _encode(value, encoding).strip()
print('%s=%s' % (optname, value), file=stream)
format_section = ini_format_section
def rest_format_section(stream, section, options, encoding=None, doc=None):
"""format an options section using as ReST formatted output"""
encoding = _get_encoding(encoding, stream)
if section:
print('%s\n%s' % (section, "'"*len(section)), file=stream)
if doc:
print(_encode(normalize_text(doc, line_len=79, indent=''), encoding), file=stream)
print(file=stream)
for optname, optdict, value in options:
help = optdict.get('help')
print(':%s:' % optname, file=stream)
if help:
help = normalize_text(help, line_len=79, indent=' ')
print(_encode(help, encoding), file=stream)
if value:
value = _encode(format_option_value(optdict, value), encoding)
print(file=stream)
print(' Default: ``%s``' % value.replace("`` ", "```` ``"), file=stream)
# Options Manager ##############################################################
class OptionsManagerMixIn(object):
"""MixIn to handle a configuration from both a configuration file and
command line options
"""
def __init__(self, usage, config_file=None, version=None, quiet=0):
self.config_file = config_file
self.reset_parsers(usage, version=version)
# list of registered options providers
self.options_providers = []
# dictionary associating option name to checker
self._all_options = {}
self._short_options = {}
self._nocallback_options = {}
self._mygroups = dict()
# verbosity
self.quiet = quiet
self._maxlevel = 0
def reset_parsers(self, usage='', version=None):
# configuration file parser
self.cfgfile_parser = cp.ConfigParser()
# command line parser
self.cmdline_parser = optik_ext.OptionParser(usage=usage, version=version)
self.cmdline_parser.options_manager = self
self._optik_option_attrs = set(self.cmdline_parser.option_class.ATTRS)
def register_options_provider(self, provider, own_group=True):
"""register an options provider"""
assert provider.priority <= 0, "provider's priority can't be >= 0"
for i in range(len(self.options_providers)):
if provider.priority > self.options_providers[i].priority:
self.options_providers.insert(i, provider)
break
else:
self.options_providers.append(provider)
non_group_spec_options = [option for option in provider.options
if 'group' not in option[1]]
groups = getattr(provider, 'option_groups', ())
if own_group and non_group_spec_options:
self.add_option_group(provider.name.upper(), provider.__doc__,
non_group_spec_options, provider)
else:
for opt, optdict in non_group_spec_options:
self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
for gname, gdoc in groups:
gname = gname.upper()
goptions = [option for option in provider.options
if option[1].get('group', '').upper() == gname]
self.add_option_group(gname, gdoc, goptions, provider)
def add_option_group(self, group_name, doc, options, provider):
"""add an option group including the listed options
"""
assert options
# add option group to the command line parser
if group_name in self._mygroups:
group = self._mygroups[group_name]
else:
group = optik_ext.OptionGroup(self.cmdline_parser,
title=group_name.capitalize())
self.cmdline_parser.add_option_group(group)
group.level = provider.level
self._mygroups[group_name] = group
# add section to the config file
if group_name != "DEFAULT":
self.cfgfile_parser.add_section(group_name)
# add provider's specific options
for opt, optdict in options:
self.add_optik_option(provider, group, opt, optdict)
def add_optik_option(self, provider, optikcontainer, opt, optdict):
if 'inputlevel' in optdict:
warn('[0.50] "inputlevel" in option dictionary for %s is deprecated,'
' use "level"' % opt, DeprecationWarning)
optdict['level'] = optdict.pop('inputlevel')
args, optdict = self.optik_option(provider, opt, optdict)
option = optikcontainer.add_option(*args, **optdict)
self._all_options[opt] = provider
self._maxlevel = max(self._maxlevel, option.level or 0)
def optik_option(self, provider, opt, optdict):
"""get our personal option definition and return a suitable form for
use with optik/optparse
"""
optdict = copy(optdict)
others = {}
if 'action' in optdict:
self._nocallback_options[provider] = opt
else:
optdict['action'] = 'callback'
optdict['callback'] = self.cb_set_provider_option
# default is handled here and *must not* be given to optik if you
# want the whole machinery to work
if 'default' in optdict:
if ('help' in optdict
and optdict.get('default') is not None
and not optdict['action'] in ('store_true', 'store_false')):
optdict['help'] += ' [current: %default]'
del optdict['default']
args = ['--' + str(opt)]
if 'short' in optdict:
self._short_options[optdict['short']] = opt
args.append('-' + optdict['short'])
del optdict['short']
# cleanup option definition dict before giving it to optik
for key in list(optdict.keys()):
if not key in self._optik_option_attrs:
optdict.pop(key)
return args, optdict
def cb_set_provider_option(self, option, opt, value, parser):
"""optik callback for option setting"""
if opt.startswith('--'):
# remove -- on long option
opt = opt[2:]
else:
# short option, get its long equivalent
opt = self._short_options[opt[1:]]
# trick since we can't set action='store_true' on options
if value is None:
value = 1
self.global_set_option(opt, value)
def global_set_option(self, opt, value):
"""set option on the correct option provider"""
self._all_options[opt].set_option(opt, value)
def generate_config(self, stream=None, skipsections=(), encoding=None):
"""write a configuration file according to the current configuration
into the given stream or stdout
"""
options_by_section = {}
sections = []
for provider in self.options_providers:
for section, options in provider.options_by_section():
if section is None:
section = provider.name
if section in skipsections:
continue
options = [(n, d, v) for (n, d, v) in options
if d.get('type') is not None]
if not options:
continue
if not section in sections:
sections.append(section)
alloptions = options_by_section.setdefault(section, [])
alloptions += options
stream = stream or sys.stdout
encoding = _get_encoding(encoding, stream)
printed = False
for section in sections:
if printed:
print('\n', file=stream)
format_section(stream, section.upper(), options_by_section[section],
encoding)
printed = True
def generate_manpage(self, pkginfo, section=1, stream=None):
"""write a man page for the current configuration into the given
stream or stdout
"""
self._monkeypatch_expand_default()
try:
optik_ext.generate_manpage(self.cmdline_parser, pkginfo,
section, stream=stream or sys.stdout,
level=self._maxlevel)
finally:
self._unmonkeypatch_expand_default()
# initialization methods ##################################################
def load_provider_defaults(self):
"""initialize configuration using default values"""
for provider in self.options_providers:
provider.load_defaults()
def load_file_configuration(self, config_file=None):
"""load the configuration from file"""
self.read_config_file(config_file)
self.load_config_file()
def read_config_file(self, config_file=None):
"""read the configuration file but do not load it (i.e. dispatching
values to each options provider)
"""
helplevel = 1
while helplevel <= self._maxlevel:
opt = '-'.join(['long'] * helplevel) + '-help'
if opt in self._all_options:
break # already processed
def helpfunc(option, opt, val, p, level=helplevel):
print(self.help(level))
sys.exit(0)
helpmsg = '%s verbose help.' % ' '.join(['more'] * helplevel)
optdict = {'action' : 'callback', 'callback' : helpfunc,
'help' : helpmsg}
provider = self.options_providers[0]
self.add_optik_option(provider, self.cmdline_parser, opt, optdict)
provider.options += ( (opt, optdict), )
helplevel += 1
if config_file is None:
config_file = self.config_file
if config_file is not None:
config_file = expanduser(config_file)
if config_file and exists(config_file):
parser = self.cfgfile_parser
parser.read([config_file])
# normalize sections'title
for sect, values in parser._sections.items():
if not sect.isupper() and values:
parser._sections[sect.upper()] = values
elif not self.quiet:
msg = 'No config file found, using default configuration'
print(msg, file=sys.stderr)
return
def input_config(self, onlysection=None, inputlevel=0, stream=None):
"""interactively get configuration values by asking to the user and generate
a configuration file
"""
if onlysection is not None:
onlysection = onlysection.upper()
for provider in self.options_providers:
for section, option, optdict in provider.all_options():
if onlysection is not None and section != onlysection:
continue
if not 'type' in optdict:
# ignore action without type (callback, store_true...)
continue
provider.input_option(option, optdict, inputlevel)
# now we can generate the configuration file
if stream is not None:
self.generate_config(stream)
def load_config_file(self):
"""dispatch values previously read from a configuration file to each
options provider)
"""
parser = self.cfgfile_parser
for section in parser.sections():
for option, value in parser.items(section):
try:
self.global_set_option(option, value)
except (KeyError, OptionError):
# TODO handle here undeclared options appearing in the config file
continue
def load_configuration(self, **kwargs):
"""override configuration according to given parameters
"""
for opt, opt_value in kwargs.items():
opt = opt.replace('_', '-')
provider = self._all_options[opt]
provider.set_option(opt, opt_value)
def load_command_line_configuration(self, args=None):
"""override configuration according to command line parameters
return additional arguments
"""
self._monkeypatch_expand_default()
try:
if args is None:
args = sys.argv[1:]
else:
args = list(args)
(options, args) = self.cmdline_parser.parse_args(args=args)
for provider in self._nocallback_options.keys():
config = provider.config
for attr in config.__dict__.keys():
value = getattr(options, attr, None)
if value is None:
continue
setattr(config, attr, value)
return args
finally:
self._unmonkeypatch_expand_default()
# help methods ############################################################
def add_help_section(self, title, description, level=0):
"""add a dummy option section for help purpose """
group = optik_ext.OptionGroup(self.cmdline_parser,
title=title.capitalize(),
description=description)
group.level = level
self._maxlevel = max(self._maxlevel, level)
self.cmdline_parser.add_option_group(group)
def _monkeypatch_expand_default(self):
# monkey patch optik_ext to deal with our default values
try:
self.__expand_default_backup = optik_ext.HelpFormatter.expand_default
optik_ext.HelpFormatter.expand_default = expand_default
except AttributeError:
# python < 2.4: nothing to be done
pass
def _unmonkeypatch_expand_default(self):
# remove monkey patch
if hasattr(optik_ext.HelpFormatter, 'expand_default'):
# unpatch optik_ext to avoid side effects
optik_ext.HelpFormatter.expand_default = self.__expand_default_backup
def help(self, level=0):
"""return the usage string for available options """
self.cmdline_parser.formatter.output_level = level
self._monkeypatch_expand_default()
try:
return self.cmdline_parser.format_help()
finally:
self._unmonkeypatch_expand_default()
class Method(object):
"""used to ease late binding of default method (so you can define options
on the class using default methods on the configuration instance)
"""
def __init__(self, methname):
self.method = methname
self._inst = None
def bind(self, instance):
"""bind the method to its instance"""
if self._inst is None:
self._inst = instance
def __call__(self, *args, **kwargs):
assert self._inst, 'unbound method'
return getattr(self._inst, self.method)(*args, **kwargs)
# Options Provider #############################################################
class OptionsProviderMixIn(object):
"""Mixin to provide options to an OptionsManager"""
# those attributes should be overridden
priority = -1
name = 'default'
options = ()
level = 0
def __init__(self):
self.config = optik_ext.Values()
for option in self.options:
try:
option, optdict = option
except ValueError:
raise Exception('Bad option: %r' % option)
if isinstance(optdict.get('default'), Method):
optdict['default'].bind(self)
elif isinstance(optdict.get('callback'), Method):
optdict['callback'].bind(self)
self.load_defaults()
def load_defaults(self):
"""initialize the provider using default values"""
for opt, optdict in self.options:
action = optdict.get('action')
if action != 'callback':
# callback action have no default
default = self.option_default(opt, optdict)
if default is REQUIRED:
continue
self.set_option(opt, default, action, optdict)
def option_default(self, opt, optdict=None):
"""return the default value for an option"""
if optdict is None:
optdict = self.get_option_def(opt)
default = optdict.get('default')
if callable(default):
default = default()
return default
def option_attrname(self, opt, optdict=None):
"""get the config attribute corresponding to opt
"""
if optdict is None:
optdict = self.get_option_def(opt)
return optdict.get('dest', opt.replace('-', '_'))
option_name = deprecated('[0.60] OptionsProviderMixIn.option_name() was renamed to option_attrname()')(option_attrname)
def option_value(self, opt):
"""get the current value for the given option"""
return getattr(self.config, self.option_attrname(opt), None)
def set_option(self, opt, value, action=None, optdict=None):
"""method called to set an option (registered in the options list)
"""
if optdict is None:
optdict = self.get_option_def(opt)
if value is not None:
value = _validate(value, optdict, opt)
if action is None:
action = optdict.get('action', 'store')
if optdict.get('type') == 'named': # XXX need specific handling
optname = self.option_attrname(opt, optdict)
currentvalue = getattr(self.config, optname, None)
if currentvalue:
currentvalue.update(value)
value = currentvalue
if action == 'store':
setattr(self.config, self.option_attrname(opt, optdict), value)
elif action in ('store_true', 'count'):
setattr(self.config, self.option_attrname(opt, optdict), 0)
elif action == 'store_false':
setattr(self.config, self.option_attrname(opt, optdict), 1)
elif action == 'append':
opt = self.option_attrname(opt, optdict)
_list = getattr(self.config, opt, None)
if _list is None:
if isinstance(value, (list, tuple)):
_list = value
elif value is not None:
_list = []
_list.append(value)
setattr(self.config, opt, _list)
elif isinstance(_list, tuple):
setattr(self.config, opt, _list + (value,))
else:
_list.append(value)
elif action == 'callback':
optdict['callback'](None, opt, value, None)
else:
raise UnsupportedAction(action)
def input_option(self, option, optdict, inputlevel=99):
default = self.option_default(option, optdict)
if default is REQUIRED:
defaultstr = '(required): '
elif optdict.get('level', 0) > inputlevel:
return
elif optdict['type'] == 'password' or default is None:
defaultstr = ': '
else:
defaultstr = '(default: %s): ' % format_option_value(optdict, default)
print(':%s:' % option)
print(optdict.get('help') or option)
inputfunc = INPUT_FUNCTIONS[optdict['type']]
value = inputfunc(optdict, defaultstr)
while default is REQUIRED and not value:
print('please specify a value')
value = inputfunc(optdict, '%s: ' % option)
if value is None and default is not None:
value = default
self.set_option(option, value, optdict=optdict)
def get_option_def(self, opt):
"""return the dictionary defining an option given it's name"""
assert self.options
for option in self.options:
if option[0] == opt:
return option[1]
raise OptionError('no such option %s in section %r'
% (opt, self.name), opt)
def all_options(self):
"""return an iterator on available options for this provider
option are actually described by a 3-uple:
(section, option name, option dictionary)
"""
for section, options in self.options_by_section():
if section is None:
if self.name is None:
continue
section = self.name.upper()
for option, optiondict, value in options:
yield section, option, optiondict
def options_by_section(self):
"""return an iterator on options grouped by section
(section, [list of (optname, optdict, optvalue)])
"""
sections = {}
for optname, optdict in self.options:
sections.setdefault(optdict.get('group'), []).append(
(optname, optdict, self.option_value(optname)))
if None in sections:
yield None, sections.pop(None)
for section, options in sections.items():
yield section.upper(), options
def options_and_values(self, options=None):
if options is None:
options = self.options
for optname, optdict in options:
yield (optname, optdict, self.option_value(optname))
# configuration ################################################################
class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn):
"""basic mixin for simple configurations which don't need the
manager / providers model
"""
def __init__(self, *args, **kwargs):
if not args:
kwargs.setdefault('usage', '')
kwargs.setdefault('quiet', 1)
OptionsManagerMixIn.__init__(self, *args, **kwargs)
OptionsProviderMixIn.__init__(self)
if not getattr(self, 'option_groups', None):
self.option_groups = []
for option, optdict in self.options:
try:
gdef = (optdict['group'].upper(), '')
except KeyError:
continue
if not gdef in self.option_groups:
self.option_groups.append(gdef)
self.register_options_provider(self, own_group=False)
def register_options(self, options):
"""add some options to the configuration"""
options_by_group = {}
for optname, optdict in options:
options_by_group.setdefault(optdict.get('group', self.name.upper()), []).append((optname, optdict))
for group, options in options_by_group.items():
self.add_option_group(group, None, options, self)
self.options += tuple(options)
def load_defaults(self):
OptionsProviderMixIn.load_defaults(self)
def __iter__(self):
return iter(self.config.__dict__.iteritems())
def __getitem__(self, key):
try:
return getattr(self.config, self.option_attrname(key))
except (optik_ext.OptionValueError, AttributeError):
raise KeyError(key)
def __setitem__(self, key, value):
self.set_option(key, value)
def get(self, key, default=None):
try:
return getattr(self.config, self.option_attrname(key))
except (OptionError, AttributeError):
return default
class Configuration(ConfigurationMixIn):
"""class for simple configurations which don't need the
manager / providers model and prefer delegation to inheritance
configuration values are accessible through a dict like interface
"""
def __init__(self, config_file=None, options=None, name=None,
usage=None, doc=None, version=None):
if options is not None:
self.options = options
if name is not None:
self.name = name
if doc is not None:
self.__doc__ = doc
super(Configuration, self).__init__(config_file=config_file, usage=usage, version=version)
class OptionsManager2ConfigurationAdapter(object):
"""Adapt an option manager to behave like a
`logilab.common.configuration.Configuration` instance
"""
def __init__(self, provider):
self.config = provider
def __getattr__(self, key):
return getattr(self.config, key)
def __getitem__(self, key):
provider = self.config._all_options[key]
try:
return getattr(provider.config, provider.option_attrname(key))
except AttributeError:
raise KeyError(key)
def __setitem__(self, key, value):
self.config.global_set_option(self.config.option_attrname(key), value)
def get(self, key, default=None):
provider = self.config._all_options[key]
try:
return getattr(provider.config, provider.option_attrname(key))
except AttributeError:
return default
# other functions ##############################################################
def read_old_config(newconfig, changes, configfile):
"""initialize newconfig from a deprecated configuration file
possible changes:
* ('renamed', oldname, newname)
* ('moved', option, oldgroup, newgroup)
* ('typechanged', option, oldtype, newvalue)
"""
# build an index of changes
changesindex = {}
for action in changes:
if action[0] == 'moved':
option, oldgroup, newgroup = action[1:]
changesindex.setdefault(option, []).append((action[0], oldgroup, newgroup))
continue
if action[0] == 'renamed':
oldname, newname = action[1:]
changesindex.setdefault(newname, []).append((action[0], oldname))
continue
if action[0] == 'typechanged':
option, oldtype, newvalue = action[1:]
changesindex.setdefault(option, []).append((action[0], oldtype, newvalue))
continue
if action[1] in ('added', 'removed'):
continue # nothing to do here
raise Exception('unknown change %s' % action[0])
# build a config object able to read the old config
options = []
for optname, optdef in newconfig.options:
for action in changesindex.pop(optname, ()):
if action[0] == 'moved':
oldgroup, newgroup = action[1:]
optdef = optdef.copy()
optdef['group'] = oldgroup
elif action[0] == 'renamed':
optname = action[1]
elif action[0] == 'typechanged':
oldtype = action[1]
optdef = optdef.copy()
optdef['type'] = oldtype
options.append((optname, optdef))
if changesindex:
raise Exception('unapplied changes: %s' % changesindex)
oldconfig = Configuration(options=options, name=newconfig.name)
# read the old config
oldconfig.load_file_configuration(configfile)
# apply values reverting changes
changes.reverse()
done = set()
for action in changes:
if action[0] == 'renamed':
oldname, newname = action[1:]
newconfig[newname] = oldconfig[oldname]
done.add(newname)
elif action[0] == 'typechanged':
optname, oldtype, newvalue = action[1:]
newconfig[optname] = newvalue
done.add(optname)
for optname, optdef in newconfig.options:
if optdef.get('type') and not optname in done:
newconfig.set_option(optname, oldconfig[optname], optdict=optdef)
def merge_options(options, optgroup=None):
"""preprocess a list of options and remove duplicates, returning a new list
(tuple actually) of options.
Options dictionaries are copied to avoid later side-effect. Also, if
`otpgroup` argument is specified, ensure all options are in the given group.
"""
alloptions = {}
options = list(options)
for i in range(len(options)-1, -1, -1):
optname, optdict = options[i]
if optname in alloptions:
options.pop(i)
alloptions[optname].update(optdict)
else:
optdict = optdict.copy()
options[i] = (optname, optdict)
alloptions[optname] = optdict
if optgroup is not None:
alloptions[optname]['group'] = optgroup
return tuple(options)
|
d3trax/asuswrt-merlin | refs/heads/master | release/src/router/asusnatnl/pjproject-1.12/tests/pjsua/scripts-sendto/157_err_sdp_bad_addr_type.py | 59 | # $Id: 157_err_sdp_bad_addr_type.py 2066 2008-06-26 19:51:01Z bennylp $
import inc_sip as sip
import inc_sdp as sdp
sdp = \
"""
v=0
o=- 0 0 IN IP4 127.0.0.1
s=pjmedia
c=IN IP7 127.0.0.1
t=0 0
m=audio 4000 RTP/AVP 0 101
a=rtpmap:0 PCMU/8000
a=sendrecv
a=rtpmap:101 telephone-event/8000
a=fmtp:101 0-15
"""
pjsua_args = "--null-audio --auto-answer 200"
extra_headers = ""
include = [ "Warning: " ] # better have Warning header
exclude = []
sendto_cfg = sip.SendtoCfg("Bad SDP address type", pjsua_args, sdp, 400,
extra_headers=extra_headers,
resp_inc=include, resp_exc=exclude)
|
ddzialak/boto | refs/heads/develop | boto/cloudtrail/exceptions.py | 21 | """
Exceptions that are specific to the cloudtrail module.
"""
from boto.exception import BotoServerError
class InvalidSnsTopicNameException(BotoServerError):
"""
Raised when an invalid SNS topic name is passed to Cloudtrail.
"""
pass
class InvalidS3BucketNameException(BotoServerError):
"""
Raised when an invalid S3 bucket name is passed to Cloudtrail.
"""
pass
class TrailAlreadyExistsException(BotoServerError):
"""
Raised when the given trail name already exists.
"""
pass
class InsufficientSnsTopicPolicyException(BotoServerError):
"""
Raised when the SNS topic does not allow Cloudtrail to post
messages.
"""
pass
class InvalidTrailNameException(BotoServerError):
"""
Raised when the trail name is invalid.
"""
pass
class InternalErrorException(BotoServerError):
"""
Raised when there was an internal Cloudtrail error.
"""
pass
class TrailNotFoundException(BotoServerError):
"""
Raised when the given trail name is not found.
"""
pass
class S3BucketDoesNotExistException(BotoServerError):
"""
Raised when the given S3 bucket does not exist.
"""
pass
class TrailNotProvidedException(BotoServerError):
"""
Raised when no trail name was provided.
"""
pass
class InvalidS3PrefixException(BotoServerError):
"""
Raised when an invalid key prefix is given.
"""
pass
class MaximumNumberOfTrailsExceededException(BotoServerError):
"""
Raised when no more trails can be created.
"""
pass
class InsufficientS3BucketPolicyException(BotoServerError):
"""
Raised when the S3 bucket does not allow Cloudtrail to
write files into the prefix.
"""
pass
|
akeym/cyder | refs/heads/master | cyder/api/v1/endpoints/dns/api.py | 5 | from django.core.exceptions import ValidationError
from rest_framework import serializers
from cyder.api.v1.endpoints import api
from cyder.cydns.utils import ensure_label_domain
NestedKeyValueFields = api.NestedAVFields
class FQDNMixin(object):
def restore_object(self, attrs):
if self.fqdn:
try:
self.label, self.domain = ensure_label_domain(self.fqdn)
except ValidationError, e:
self._errors['fqdn'] = e.messages
class LabelDomainMixin(object):
label = serializers.CharField()
domain = serializers.HyperlinkedRelatedField(
many=False, read_only=True, view_name='api-dns-domain-detail')
class CommonDNSSerializer(api.CommonAPISerializer):
views = serializers.SlugRelatedField(
many=True, read_only=True, slug_field='name')
class CommonDNSMeta(api.CommonAPIMeta):
pass
class CommonDNSViewSet(api.CommonAPIViewSet):
pass
|
poo12138/gem5-stable | refs/heads/master | src/arch/x86/isa/insts/general_purpose/data_conversion/__init__.py | 91 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["ascii_adjust",
"bcd_adjust",
"endian_conversion",
"extract_sign_mask",
"sign_extension",
"translate"]
microcode = ""
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
|
tmpgit/intellij-community | refs/heads/master | python/testData/copyPaste/multiLine/IndentMulti33.after.py | 996 | class C:
def foo(self):
x = 1
y = 2
y = 2
|
Aplia/tfk-ansatte | refs/heads/master | backend/backend/urls.py | 1 | """backend URL Configuration """
from django.conf import settings
from django.conf.urls import url, include
from backend.api import router
from django.contrib import admin
urlpatterns = [
url(r'^backend/api/', include(router.urls)),
url(r'^backend/admin/', admin.site.urls),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^backend/__debug__/', include(debug_toolbar.urls)),
]
|
SidSachdev/SFrame | refs/heads/master | oss_src/unity/python/sframe/meta/asttools/visitors/pysourcegen.py | 15 | '''
Created on Jul 15, 2011
@author: sean
'''
from __future__ import print_function
import _ast
from ...asttools import Visitor
from string import Formatter
import sys
from ...utils import py3op, py2op
if sys.version_info.major < 3:
from StringIO import StringIO
else:
from io import StringIO
class ASTFormatter(Formatter):
def format_field(self, value, format_spec):
if format_spec == 'node':
gen = ExprSourceGen()
gen.visit(value)
return gen.dumps()
elif value == '':
return value
else:
return super(ASTFormatter, self).format_field(value, format_spec)
def get_value(self, key, args, kwargs):
if key == '':
return args[0]
elif key in kwargs:
return kwargs[key]
elif isinstance(key, int):
return args[key]
key = int(key)
return args[key]
raise Exception
def str_node(node):
gen = ExprSourceGen()
gen.visit(node)
return gen.dumps()
def simple_string(value):
def visitNode(self, node):
self.print(value, **node.__dict__)
return visitNode
class ExprSourceGen(Visitor):
def __init__(self):
self.out = StringIO()
self.formatter = ASTFormatter()
self.indent = ' '
self.level = 0
@property
def indenter(self):
return Indenter(self)
@property
def no_indent(self):
return NoIndent(self)
def dump(self, file=sys.stdout):
self.out.seek(0)
print(self.out.read(), file=file)
def dumps(self):
self.out.seek(0)
value = self.out.read()
return value
def print(self, line, *args, **kwargs):
line = self.formatter.format(line, *args, **kwargs)
level = kwargs.get('level')
prx = self.indent * (level if level else self.level)
print(prx, line, sep='', end='', file=self.out)
def print_lines(self, lines,):
prx = self.indent * self.level
for line in lines:
print(prx, line, sep='', file=self.out)
def visitName(self, node):
self.print(node.id)
@py2op
def visitarguments(self, node):
# ('args', 'vararg', 'kwarg', 'defaults')
defaults = [None] * (len(node.args) - len(node.defaults))
defaults.extend(node.defaults)
i = 0
args = list(node.args)
if args:
i += 1
arg = args.pop(0)
default = defaults.pop(0)
self.visit(arg)
if default is not None:
self.print('={:node}', default)
while args:
arg = args.pop(0)
default = defaults.pop(0)
self.print(', ')
self.visit(arg)
if default is not None:
self.print('={:node}', default)
if node.vararg:
self.print('{0}*{1}', ', ' if i else '', node.vararg)
if node.kwarg:
self.print('{0}**{1}', ', ' if i else '', node.kwarg)
@visitarguments.py3op
def visitarguments(self, node):
# ('args', 'vararg', 'kwarg', 'defaults')
defaults = [None] * (len(node.args) - len(node.defaults))
defaults.extend(node.defaults)
i = 0
args = list(node.args)
if args:
i += 1
arg = args.pop(0)
default = defaults.pop(0)
self.visit(arg)
if default is not None:
self.print('={:node}', default)
while args:
arg = args.pop(0)
default = defaults.pop(0)
self.print(', ')
self.visit(arg)
if default is not None:
self.print('={:node}', default)
if node.vararg:
self.print('{0}*{1}', ', ' if i else '', node.vararg)
if node.varargannotation:
self.print(':{:node}', node.varargannotation)
elif node.kwonlyargs:
self.print('{0}*', ', ' if i else '')
kwonlyargs = list(node.kwonlyargs)
if kwonlyargs:
i += 1
kw_defaults = [None] * (len(kwonlyargs) - len(node.kw_defaults))
kw_defaults.extend(node.kw_defaults)
while kwonlyargs:
kw_arg = kwonlyargs.pop(0)
kw_default = kw_defaults.pop(0)
self.print(', ')
self.visit(kw_arg)
if kw_default is not None:
self.print('={:node}', kw_default)
if node.kwarg:
self.print('{0}**{1}', ', ' if i else '', node.kwarg)
if node.varargannotation:
self.print(':{:node}', node.kwargannotation)
def visitNum(self, node):
self.print(repr(node.n))
def visitBinOp(self, node):
self.print('({left:node} {op:node} {right:node})', left=node.left, op=node.op, right=node.right)
def visitAdd(self, node):
self.print('+')
def visitalias(self, node):
if node.asname is None:
self.print("{0}", node.name)
else:
self.print("{0} as {1}", node.name, node.asname)
def visitCall(self, node):
self.print('{func:node}(' , func=node.func)
i = 0
print_comma = lambda i: self.print(", ") if i > 0 else None
with self.no_indent:
for arg in node.args:
print_comma(i)
self.print('{:node}', arg)
i += 1
for kw in node.keywords:
print_comma(i)
self.print('{:node}', kw)
i += 1
if node.starargs:
print_comma(i)
self.print('*{:node}', node.starargs)
i += 1
if node.kwargs:
print_comma(i)
self.print('**{:node}', node.kwargs)
i += 1
self.print(')')
def visitkeyword(self, node):
self.print("{0}={1:node}", node.arg, node.value)
def visitStr(self, node):
self.print(repr(node.s))
def visitMod(self, node):
self.print('%')
def visitTuple(self, node, brace='()'):
self.print(brace[0])
print_comma = lambda i: self.print(", ") if i > 0 else None
i = 0
with self.no_indent:
for elt in node.elts:
print_comma(i)
self.print('{:node}', elt)
i += 1
if len(node.elts) == 1:
self.print(',')
self.print(brace[1])
def visitCompare(self, node):
self.print('({0:node} ', node.left)
with self.no_indent:
for (op, right) in zip(node.ops, node.comparators):
self.print('{0:node} {1:node}' , op, right)
self.print(')')
@py2op
def visitRaise(self, node):
self.print('raise ')
with self.no_indent:
if node.type:
self.print('{:node}' , node.type)
if node.inst:
self.print(', {:node}' , node.inst)
if node.tback:
self.print(', {:node}' , node.tback)
@visitRaise.py3op
def visitRaise(self, node):
self.print('raise ')
with self.no_indent:
if node.exc:
self.print('{:node}' , node.exc)
if node.cause:
self.print(' from {:node}' , node.cause)
def visitAttribute(self, node):
self.print('{:node}.{attr}', node.value, attr=node.attr)
def visitDict(self, node):
self.print('{{')
items = zip(node.keys, node.values)
with self.no_indent:
i = 0
pc = lambda : self.print(", ") if i > 0 else None
for key, value in items:
pc()
self.print('{0:node}:{1:node}', key, value)
i += 1
self.print('}}')
def visitSet(self, node):
self.print('{{')
items = node.elts
with self.no_indent:
i = 0
pc = lambda : self.print(", ") if i > 0 else None
for value in items:
pc()
self.print('{0:node}', value)
i += 1
self.print('}}')
def visitList(self, node):
self.print('[')
with self.no_indent:
i = 0
pc = lambda : self.print(", ") if i > 0 else None
for item in node.elts:
pc()
self.print('{:node}', item)
i += 1
self.print(']')
def visitSubscript(self, node):
self.print('{0:node}[{1:node}]', node.value, node.slice)
def visitIndex(self, node):
if isinstance(node.value, _ast.Tuple):
with self.no_indent:
self.visit(node.value, brace=['', ''])
else:
self.print('{:node}', node.value)
def visitSlice(self, node):
with self.no_indent:
if node.lower is not None:
self.print('{:node}', node.lower)
self.print(':')
if node.upper is not None:
self.print('{:node}', node.upper)
if node.step is not None:
self.print(':')
self.print('{:node}', node.step)
def visitExtSlice(self, node):
dims = list(node.dims)
with self.no_indent:
dim = dims.pop(0)
self.print('{0:node}', dim)
while dims:
dim = dims.pop(0)
self.print(', {0:node}', dim)
def visitUnaryOp(self, node):
self.print('({0:node}{1:node})', node.op, node.operand)
def visitAssert(self, node):
self.print('assert {0:node}', node.test)
if node.msg:
with self.no_indent:
self.print(', {0:node}', node.msg)
visitUSub = simple_string('-')
visitUAdd = simple_string('+')
visitNot = simple_string('not ')
visitInvert = simple_string('~')
visitAnd = simple_string('and')
visitOr = simple_string('or')
visitSub = simple_string('-')
visitFloorDiv = simple_string('//')
visitDiv = simple_string('/')
visitMod = simple_string('%')
visitMult = simple_string('*')
visitPow = simple_string('**')
visitEq = simple_string('==')
visitNotEq = simple_string('!=')
visitLt = simple_string('<')
visitGt = simple_string('>')
visitLtE = simple_string('<=')
visitGtE = simple_string('>=')
visitLShift = simple_string('<<')
visitRShift = simple_string('>>')
visitIn = simple_string('in')
visitNotIn = simple_string('not in')
visitIs = simple_string('is')
visitIsNot = simple_string('is not')
visitBitAnd = simple_string('&')
visitBitOr = simple_string('|')
visitBitXor = simple_string('^')
visitEllipsis = simple_string('...')
visitYield = simple_string('yield {value:node}')
def visitBoolOp(self, node):
with self.no_indent:
values = list(node.values)
left = values.pop(0)
self.print('({:node} ', left)
while values:
left = values.pop(0)
self.print('{0:node} {1:node})', node.op, left)
def visitIfExp(self, node):
self.print('{body:node} if {test:node} else {orelse:node}', **node.__dict__)
def visitLambda(self, node):
self.print('lambda {0:node}: {1:node}', node.args, node.body)
def visitListComp(self, node):
self.print('[{0:node}', node.elt)
generators = list(node.generators)
with self.no_indent:
while generators:
generator = generators.pop(0)
self.print('{0:node}', generator)
self.print(']')
def visitSetComp(self, node):
self.print('{{{0:node}', node.elt)
generators = list(node.generators)
with self.no_indent:
while generators:
generator = generators.pop(0)
self.print('{0:node}', generator)
self.print('}}')
def visitDictComp(self, node):
self.print('{{{0:node}:{1:node}', node.key, node.value)
generators = list(node.generators)
with self.no_indent:
while generators:
generator = generators.pop(0)
self.print('{0:node}', generator)
self.print('}}')
def visitcomprehension(self, node):
self.print(' for {0:node} in {1:node}', node.target, node.iter)
ifs = list(node.ifs)
while ifs:
if_ = ifs.pop(0)
self.print(" if {0:node}", if_)
@py3op
def visitarg(self, node):
self.print(node.arg)
if node.annotation:
with self.no_indent:
self.print(':{0:node}', node.annotation)
def visit_expr(node):
gen = ExprSourceGen()
gen.visit(node)
return gen.dumps()
class NoIndent(object):
def __init__(self, gen):
self.gen = gen
def __enter__(self):
self.level = self.gen.level
self.gen.level = 0
def __exit__(self, *args):
self.gen.level = self.level
class Indenter(object):
def __init__(self, gen):
self.gen = gen
def __enter__(self):
self.gen.print('\n', level=0)
self.gen.level += 1
def __exit__(self, *args):
self.gen.level -= 1
class SourceGen(ExprSourceGen):
def __init__(self, header=''):
super(SourceGen, self).__init__()
print(header, file=self.out)
def visitModule(self, node):
children = list(self.children(node))
if children and isinstance(children[0], _ast.Expr):
if isinstance(children[0].value, _ast.Str):
doc = children.pop(0).value
self.print("'''")
self.print_lines(doc.s.split('\n'))
self.print_lines(["'''", '\n', '\n'])
for node in children:
self.visit(node)
def visitFor(self, node):
self.print('for {0:node} in {1:node}:', node.target, node.iter)
with self.indenter:
for stmnt in node.body:
self.visit(stmnt)
if node.orelse:
self.print('else:')
with self.indenter:
for stmnt in node.orelse:
self.visit(stmnt)
@py2op
def visitFunctionDef(self, node):
#fields = ('name', 'args', 'body', 'decorator_list')
for decorator in node.decorator_list:
self.print('@{decorator:node}\n', decorator=decorator)
args = visit_expr(node.args)
self.print('def {name}({args}):' , name=node.name, args=args)
with self.indenter:
for child in node.body:
self.visit(child)
return
@visitFunctionDef.py3op
def visitFunctionDef(self, node):
for decorator in node.decorator_list:
self.print('@{decorator:node}\n', decorator=decorator)
args = visit_expr(node.args)
self.print('def {name}({args})' , name=node.name, args=args)
with self.no_indent:
if node.returns:
self.print(' -> {:node}:', node.returns)
else:
self.print(':', node.returns)
with self.indenter:
for child in node.body:
self.visit(child)
return
def visitAssign(self, node):
targets = [visit_expr(target) for target in node.targets]
self.print('{targets} = {value:node}\n', targets=' = '.join(targets), value=node.value)
def visitAugAssign(self, node):
self.print('{target:node} {op:node}= {value:node}\n', **node.__dict__)
def visitIf(self, node, indent_first=True):
self.print('if {:node}:', node.test, level=self.level if indent_first else 0)
with self.indenter:
if node.body:
for expr in node.body:
self.visit(expr)
else:
self.print('pass')
if node.orelse and len(node.orelse) == 1 and isinstance(node.orelse[0], _ast.If):
self.print('el'); self.visit(node.orelse[0], indent_first=False)
elif node.orelse:
self.print('else:')
with self.indenter:
for expr in node.orelse:
self.visit(expr)
self.print('\n')
def visitImportFrom(self, node):
for name in node.names:
self.print("from {0} import {1:node}\n", node.module, name)
def visitImport(self, node):
for name in node.names:
self.print("import {:node}\n", name)
def visitPrint(self, node):
self.print("print ")
with self.no_indent:
if node.dest:
self.print(">> {:node}" , node.dest)
if not node.values and node.nl:
self.print("\n")
return
self.print(", ")
i = 0
pc = lambda : self.print(", ") if i > 0 else None
for value in node.values:
pc()
self.print("{:node}" , value)
if not node.nl:
self.print(",")
self.print("\n")
def visitExec(self, node):
self.print('exec {0:node} in {1}, {2}\n', node.body,
'None' if node.globals is None else str_node(node.globals),
'None' if node.locals is None else str_node(node.locals))
def visitWith(self, node):
self.print('with {0:node}', node.context_expr)
if node.optional_vars is not None:
self.print(' as {0:node}', node.optional_vars, level=0)
self.print(':', level=0)
with self.indenter:
if node.body:
for expr in node.body:
self.visit(expr)
else:
self.print('pass\n')
def visitGlobal(self, node):
self.print('global ')
with self.no_indent:
names = list(node.names)
if names:
name = names.pop(0)
self.print(name)
while names:
name = names.pop(0)
self.print(', {0}', name)
self.print('\n')
def visitDelete(self, node):
self.print('del ')
targets = list(node.targets)
with self.no_indent:
target = targets.pop(0)
self.print('{0:node}', target)
while targets:
target = targets.pop(0)
self.print(', {0:node}', target)
self.print('\n')
def visitWhile(self, node):
self.print('while {0:node}:', node.test)
with self.indenter:
if node.body:
for expr in node.body:
self.visit(expr)
else:
self.print("pass")
if node.orelse:
self.print('else:')
with self.indenter:
for expr in node.orelse:
self.visit(expr)
self.print('\n')
self.print('\n')
def visitExpr(self, node):
self.print('{:node}\n', node.value)
visitBreak = simple_string('break\n')
visitPass = simple_string('pass\n')
visitContinue = simple_string('continue\n')
def visitReturn(self, node):
if node.value is not None:
self.print('return {:node}\n', node.value)
def visitTryExcept(self, node):
self.print('try:')
with self.indenter:
if node.body:
for stmnt in node.body:
self.visit(stmnt)
else:
self.print('pass')
for hndlr in node.handlers:
self.visit(hndlr)
if node.orelse:
self.print('else:')
with self.indenter:
for stmnt in node.orelse:
self.visit(stmnt)
@py2op
def visitExceptHandler(self, node):
self.print('except')
with self.no_indent:
if node.type:
self.print(" {0:node}", node.type)
if node.name:
self.print(" as {0:node}", node.name)
self.print(":")
with self.indenter:
if node.body:
for stmnt in node.body:
self.visit(stmnt)
else:
self.print('pass')
@visitExceptHandler.py3op
def visitExceptHandler(self, node):
self.print('except')
with self.no_indent:
if node.type:
self.print(" {0:node}", node.type)
if node.name:
self.print(" as {0}", node.name)
self.print(":")
with self.indenter:
for stmnt in node.body:
self.visit(stmnt)
def visitTryFinally(self, node):
for item in node.body:
self.visit(item)
self.print('finally:')
with self.indenter:
for item in node.finalbody:
self.visit(item)
@py2op
def visitClassDef(self, node):
for decorator in node.decorator_list:
self.print('@{0:node}\n', decorator)
self.print('class {0}', node.name)
with self.no_indent:
self.print('(')
bases = list(node.bases)
if bases:
base = bases.pop(0)
self.print("{0:node}", base)
while bases:
base = bases.pop(0)
self.print(", {0:node}", base)
self.print(')')
self.print(":")
with self.indenter:
if node.body:
for stmnt in node.body:
self.visit(stmnt)
else:
self.print("pass\n\n")
@visitClassDef.py3op
def visitClassDef(self, node):
for decorator in node.decorator_list:
self.print('@{0:node}\n', decorator)
self.print('class {0}', node.name)
with self.no_indent:
self.print('(')
bases = list(node.bases)
i = 0
if bases:
i += 1
base = bases.pop(0)
self.print("{0:node}", base)
while bases:
base = bases.pop(0)
self.print(", {0:node}", base)
keywords = list(node.keywords)
if keywords:
if i: self.print(', ')
i += 1
keyword = keywords.pop(0)
self.print("{0:node}", keyword)
while keywords:
base = keywords.pop(0)
self.print(", {0:node}", keyword)
if node.starargs:
if i: self.print(', ')
i += 1
self.print("*{0:node}", node.starargs)
if node.kwargs:
if i: self.print(', ')
i += 1
self.print("*{0:node}", node.kwargs)
self.print(')')
self.print(":")
with self.indenter:
if node.body:
for stmnt in node.body:
self.visit(stmnt)
else:
self.print("pass\n\n")
def python_source(ast, file=sys.stdout):
'''
Generate executable python source code from an ast node.
:param ast: ast node
:param file: file to write output to.
'''
gen = SourceGen()
gen.visit(ast)
gen.dump(file)
def dump_python_source(ast):
'''
:return: a string containing executable python source code from an ast node.
:param ast: ast node
:param file: file to write output to.
'''
gen = SourceGen()
gen.visit(ast)
return gen.dumps()
|
vv1133/home_web | refs/heads/master | django/contrib/sites/managers.py | 118 | from django.conf import settings
from django.db import models
from django.db.models.fields import FieldDoesNotExist
class CurrentSiteManager(models.Manager):
"Use this to limit objects to those associated with the current site."
def __init__(self, field_name=None):
super(CurrentSiteManager, self).__init__()
self.__field_name = field_name
self.__is_validated = False
def _validate_field_name(self):
field_names = self.model._meta.get_all_field_names()
# If a custom name is provided, make sure the field exists on the model
if self.__field_name is not None and self.__field_name not in field_names:
raise ValueError("%s couldn't find a field named %s in %s." % \
(self.__class__.__name__, self.__field_name, self.model._meta.object_name))
# Otherwise, see if there is a field called either 'site' or 'sites'
else:
for potential_name in ['site', 'sites']:
if potential_name in field_names:
self.__field_name = potential_name
self.__is_validated = True
break
# Now do a type check on the field (FK or M2M only)
try:
field = self.model._meta.get_field(self.__field_name)
if not isinstance(field, (models.ForeignKey, models.ManyToManyField)):
raise TypeError("%s must be a ForeignKey or ManyToManyField." %self.__field_name)
except FieldDoesNotExist:
raise ValueError("%s couldn't find a field named %s in %s." % \
(self.__class__.__name__, self.__field_name, self.model._meta.object_name))
self.__is_validated = True
def get_queryset(self):
if not self.__is_validated:
self._validate_field_name()
return super(CurrentSiteManager, self).get_queryset().filter(**{self.__field_name + '__id__exact': settings.SITE_ID})
|
DiptoDas8/Biponi | refs/heads/master | lib/python2.7/site-packages/django/core/checks/__init__.py | 36 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from .messages import (CheckMessage,
Debug, Info, Warning, Error, Critical,
DEBUG, INFO, WARNING, ERROR, CRITICAL)
from .registry import register, run_checks, tag_exists, Tags
# Import these to force registration of checks
import django.core.checks.compatibility.django_1_7_0 # NOQA
import django.core.checks.compatibility.django_1_8_0 # NOQA
import django.core.checks.model_checks # NOQA
import django.core.checks.security.base # NOQA
import django.core.checks.security.csrf # NOQA
import django.core.checks.security.sessions # NOQA
__all__ = [
'CheckMessage',
'Debug', 'Info', 'Warning', 'Error', 'Critical',
'DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL',
'register', 'run_checks', 'tag_exists', 'Tags',
]
|
west-tandon/ReSearch | refs/heads/master | test/index/test_forward_index.py | 1 | import shutil
import tempfile
import unittest
from os import path
from research.coding.varbyte import Encoder
from research.index.common import IndexFactory
class ForwardIndexReadTest(unittest.TestCase):
def setUp(self):
self.test_dir = tempfile.mkdtemp()
self.meta_path = path.join(self.test_dir, 'metadata')
self.doc_info_path = path.join(self.test_dir, 'doc_info')
self.collection_path = path.join(self.test_dir, 'collection')
self.terms_path = path.join(self.test_dir, 'terms')
f = open(self.meta_path, 'w')
f.write('''
{{
"type" : "research.index.forward.ForwardIndex",
"name" : "fi",
"coding": "research.coding.varbyte",
"paths": {{
"doc_info": "{0}",
"collection": "{1}",
"terms": "{2}"
}}
}}
'''.format(self.doc_info_path, self.collection_path, self.terms_path))
f.close()
f = open(self.doc_info_path, 'w')
f.writelines(["Document1 0 0 3 3\n",
"Document2 1 3 3 3\n"])
f.close()
f = open(self.terms_path, 'w')
f.writelines(["0\n",
"1\n",
"2\n",
"3\n",
"4\n"])
f.close()
f = open(self.collection_path, 'bw')
self.doc1_terms = [0, 1, 2]
self.doc2_terms = [3, 4, 2]
encoder = Encoder(f)
for term_id in self.doc1_terms + self.doc2_terms:
encoder.encode(term_id)
f.close()
def tearDown(self):
shutil.rmtree(self.test_dir)
def test_forward_index(self):
forward_index = IndexFactory.from_path(self.meta_path)
reader = forward_index.reader()
with self.subTest(document=1):
document = reader.next_document()
self.assertEqual(document.title, "Document1")
self.assertEqual(document.doc_id, 0)
self.assertEqual(document.count, 3)
self.assertEqual(document.next_term_id(), 0)
self.assertEqual(document.next_term_id(), 1)
# Intentionally leaving out the next lines
# self.assertEqual(document.next_term_id(), 2)
# self.assertEqual(document.next_term_id(), None)
with self.subTest(document=2):
document = reader.next_document()
self.assertEqual(document.title, "Document2")
self.assertEqual(document.doc_id, 1)
self.assertEqual(document.count, 3)
self.assertEqual(document.next_term_id(), 3)
self.assertEqual(document.next_term_id(), 4)
self.assertEqual(document.next_term_id(), 2)
self.assertEqual(document.next_term_id(), None)
self.assertEqual(reader.next_document(), None)
def test_forward_index_read_terms(self):
forward_index = IndexFactory.from_path(self.meta_path)
reader = forward_index.reader()
with self.subTest(document=1):
document = reader.next_document()
self.assertEqual(document.next_term(), "0")
self.assertEqual(document.next_term(), "1")
self.assertEqual(document.next_term(), "2")
self.assertEqual(document.next_term(), None)
with self.subTest(document=2):
document = reader.next_document()
self.assertEqual(document.next_term(), "3")
self.assertEqual(document.next_term(), "4")
self.assertEqual(document.next_term(), "2")
self.assertEqual(document.next_term(), None)
def test_forward_index_skip_first(self):
forward_index = IndexFactory.from_path(self.meta_path)
reader = forward_index.reader()
reader.skip(1)
document = reader.next_document()
self.assertEqual(document.title, "Document2")
self.assertEqual(document.doc_id, 1)
self.assertEqual(document.count, 3)
def test_forward_index_skip_second(self):
forward_index = IndexFactory.from_path(self.meta_path)
reader = forward_index.reader()
document = reader.next_document()
reader.skip(1)
document = reader.next_document()
self.assertEqual(document, None)
def test_forward_index_skip_all(self):
forward_index = IndexFactory.from_path(self.meta_path)
reader = forward_index.reader()
reader.skip(2)
document = reader.next_document()
self.assertEqual(document, None)
def test_forward_index_find_by_title(self):
with self.subTest(title="Document1"):
forward_index = IndexFactory.from_path(self.meta_path)
reader = forward_index.reader()
title = "Document1"
document = reader.find_by_title(title)
self.assertEqual(document.title, "Document1")
self.assertEqual(document.doc_id, 0)
self.assertEqual(document.count, 3)
self.assertEqual(document.next_term_id(), 0)
self.assertEqual(document.next_term_id(), 1)
self.assertEqual(document.next_term_id(), 2)
self.assertEqual(document.next_term_id(), None)
with self.subTest(title="Document2"):
forward_index = IndexFactory.from_path(self.meta_path)
reader = forward_index.reader()
title = "Document2"
document = reader.find_by_title(title)
self.assertEqual(document.title, "Document2")
self.assertEqual(document.doc_id, 1)
self.assertEqual(document.count, 3)
self.assertEqual(document.next_term_id(), 3)
self.assertEqual(document.next_term_id(), 4)
self.assertEqual(document.next_term_id(), 2)
self.assertEqual(document.next_term_id(), None)
with self.subTest(title="Document3"):
forward_index = IndexFactory.from_path(self.meta_path)
reader = forward_index.reader()
title = "Document3"
document = reader.find_by_title(title)
self.assertIsNone(document)
def test_forward_index_find_by_id(self):
with self.subTest(id=0):
forward_index = IndexFactory.from_path(self.meta_path)
reader = forward_index.reader()
document = reader.find_by_id(id=0)
self.assertEqual(document.title, "Document1")
self.assertEqual(document.doc_id, 0)
self.assertEqual(document.count, 3)
self.assertEqual(document.next_term_id(), 0)
self.assertEqual(document.next_term_id(), 1)
self.assertEqual(document.next_term_id(), 2)
self.assertEqual(document.next_term_id(), None)
with self.subTest(id=1):
forward_index = IndexFactory.from_path(self.meta_path)
reader = forward_index.reader()
document = reader.find_by_id(id=1)
self.assertEqual(document.title, "Document2")
self.assertEqual(document.doc_id, 1)
self.assertEqual(document.count, 3)
self.assertEqual(document.next_term_id(), 3)
self.assertEqual(document.next_term_id(), 4)
self.assertEqual(document.next_term_id(), 2)
self.assertEqual(document.next_term_id(), None)
with self.subTest(id=2):
forward_index = IndexFactory.from_path(self.meta_path)
reader = forward_index.reader()
document = reader.find_by_id(id=2)
self.assertIsNone(document)
def test_pruning(self):
meta_path = path.join(self.test_dir, 'f-metadata')
doc_info_path = path.join(self.test_dir, 'f-doc_info')
collection_path = path.join(self.test_dir, 'f-collection')
terms_path = path.join(self.test_dir, 'f-terms')
f = open(meta_path, 'w')
f.write('''
{{
"type" : "research.index.forward.ForwardIndex",
"name" : "ofi",
"paths": {{
"doc_info": "{0}",
"collection": "{1}",
"terms": "{2}"
}}
}}
'''.format(doc_info_path, collection_path, terms_path))
f.close()
forward_index = IndexFactory.from_path(self.meta_path)
output_index = IndexFactory.from_path(meta_path)
class TermPruner:
def test(self, term):
for ch in term:
if ord(ch) > ord("2"):
return False
return True
forward_index.prune(TermPruner(), output_index)
reader = output_index.reader()
document = reader.next_document()
self.assertEqual(document.title, "Document2")
self.assertEqual(document.doc_id, 0)
self.assertEqual(document.count, 2)
self.assertEqual(document.next_term_id(), 0)
self.assertEqual(document.next_term_id(), 1)
self.assertEqual(document.next_term_id(), None)
self.assertEqual(reader.next_document(), None)
|
mrrrgn/olympia | refs/heads/master | apps/bandwagon/models.py | 9 | import collections
import hashlib
import os
import re
import time
import uuid
from datetime import datetime
from django.conf import settings
from django.core.cache import cache
from django.db import connection, models, transaction
import caching.base as caching
import amo
import amo.models
import sharing.utils as sharing
from access import acl
from addons.models import Addon, AddonRecommendation
from amo.helpers import absolutify, user_media_path, user_media_url
from amo.urlresolvers import reverse
from amo.utils import sorted_groupby
from stats.models import CollectionShareCountTotal
from translations.fields import (LinkifiedField, save_signal,
NoLinksNoMarkupField, TranslatedField)
from users.models import UserProfile
from versions import compare
SPECIAL_SLUGS = amo.COLLECTION_SPECIAL_SLUGS
class TopTags(object):
"""Descriptor to manage a collection's top tags in cache."""
def key(self, obj):
return '%s:top-tags:%s' % (settings.CACHE_PREFIX, obj.id)
def __get__(self, obj, type=None):
if obj is None:
return self
return cache.get(self.key(obj), [])
def __set__(self, obj, value):
two_days = 60 * 60 * 24 * 2
cache.set(self.key(obj), value, two_days)
class CollectionManager(amo.models.ManagerBase):
def get_query_set(self):
qs = super(CollectionManager, self).get_query_set()
return qs.transform(Collection.transformer)
def manual(self):
"""Only hand-crafted, favorites, and featured collections should appear
in this filter."""
types = (amo.COLLECTION_NORMAL, amo.COLLECTION_FAVORITES,
amo.COLLECTION_FEATURED, )
return self.filter(type__in=types)
def listed(self):
"""Return public collections only."""
return self.filter(listed=True)
def publishable_by(self, user):
"""Collections that are publishable by a user."""
owned_by = models.Q(author=user.id)
publishable_by = models.Q(users=user.id)
collections = self.filter(owned_by | publishable_by)
return collections.distinct().order_by('name__localized_string')
class CollectionBase:
"""A mixin with methods common to Collection and SyncedCollection."""
@classmethod
def make_index(cls, addon_ids):
ids = ':'.join(map(str, sorted(addon_ids)))
return hashlib.md5(ids).hexdigest()
def get_recs(self, app, version):
addons = list(self.addons.values_list('id', flat=True))
return self.get_recs_from_ids(addons, app, version)
@classmethod
def get_recs_from_ids(cls, addons, app, version, compat_mode='strict'):
vint = compare.version_int(version)
recs = RecommendedCollection.build_recs(addons)
qs = (Addon.objects.public()
.filter(id__in=recs, appsupport__app=app.id,
appsupport__min__lte=vint))
if compat_mode == 'strict':
qs = qs.filter(appsupport__max__gte=vint)
return recs, qs
class Collection(CollectionBase, amo.models.ModelBase):
TYPE_CHOICES = amo.COLLECTION_CHOICES.items()
uuid = models.CharField(max_length=36, blank=True, unique=True)
name = TranslatedField(require_locale=False)
# nickname is deprecated. Use slug.
nickname = models.CharField(max_length=30, blank=True, unique=True,
null=True)
slug = models.CharField(max_length=30, blank=True, null=True)
description = NoLinksNoMarkupField(require_locale=False)
default_locale = models.CharField(max_length=10, default='en-US',
db_column='defaultlocale')
type = models.PositiveIntegerField(db_column='collection_type',
choices=TYPE_CHOICES, default=0)
icontype = models.CharField(max_length=25, blank=True)
listed = models.BooleanField(
default=True, help_text='Collections are either listed or private.')
subscribers = models.PositiveIntegerField(default=0)
downloads = models.PositiveIntegerField(default=0)
weekly_subscribers = models.PositiveIntegerField(default=0)
monthly_subscribers = models.PositiveIntegerField(default=0)
application = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
db_column='application_id',
null=True)
addon_count = models.PositiveIntegerField(default=0,
db_column='addonCount')
upvotes = models.PositiveIntegerField(default=0)
downvotes = models.PositiveIntegerField(default=0)
rating = models.FloatField(default=0)
all_personas = models.BooleanField(
default=False,
help_text='Does this collection only contain Themes?')
addons = models.ManyToManyField(Addon, through='CollectionAddon',
related_name='collections')
author = models.ForeignKey(UserProfile, null=True,
related_name='collections')
users = models.ManyToManyField(UserProfile, through='CollectionUser',
related_name='collections_publishable')
addon_index = models.CharField(
max_length=40, null=True, db_index=True,
help_text='Custom index for the add-ons in this collection')
# This gets overwritten in the transformer.
share_counts = collections.defaultdict(int)
objects = CollectionManager()
top_tags = TopTags()
class Meta(amo.models.ModelBase.Meta):
db_table = 'collections'
unique_together = (('author', 'slug'),)
def __unicode__(self):
return u'%s (%s)' % (self.name, self.addon_count)
def flush_urls(self):
urls = ['*%s' % self.get_url_path(),
self.icon_url]
return urls
def save(self, **kw):
if not self.uuid:
self.uuid = unicode(uuid.uuid4())
if not self.slug:
self.slug = self.uuid[:30]
self.clean_slug()
# Maintain our index of add-on ids.
if self.id:
ids = self.addons.values_list('id', flat=True)
self.addon_index = self.make_index(ids)
super(Collection, self).save(**kw)
def clean_slug(self):
if self.type in SPECIAL_SLUGS:
self.slug = SPECIAL_SLUGS[self.type]
return
if self.slug in SPECIAL_SLUGS.values():
self.slug += '~'
if not self.author:
return
qs = self.author.collections.using('default')
slugs = dict((slug, id) for slug, id in qs.values_list('slug', 'id'))
if self.slug in slugs and slugs[self.slug] != self.id:
for idx in range(len(slugs)):
new = '%s-%s' % (self.slug, idx + 1)
if new not in slugs:
self.slug = new
return
def get_url_path(self):
return reverse('collections.detail',
args=[self.author_username, self.slug])
def get_abs_url(self):
return absolutify(self.get_url_path())
def get_img_dir(self):
return os.path.join(user_media_path('collection_icons'),
str(self.id / 1000))
def upvote_url(self):
return reverse('collections.vote',
args=[self.author_username, self.slug, 'up'])
def downvote_url(self):
return reverse('collections.vote',
args=[self.author_username, self.slug, 'down'])
def edit_url(self):
return reverse('collections.edit',
args=[self.author_username, self.slug])
def watch_url(self):
return reverse('collections.watch',
args=[self.author_username, self.slug])
def delete_url(self):
return reverse('collections.delete',
args=[self.author_username, self.slug])
def delete_icon_url(self):
return reverse('collections.delete_icon',
args=[self.author_username, self.slug])
def share_url(self):
return reverse('collections.share',
args=[self.author_username, self.slug])
def feed_url(self):
return reverse('collections.detail.rss',
args=[self.author_username, self.slug])
def stats_url(self):
return reverse('collections.stats',
args=[self.author_username, self.slug])
@property
def author_username(self):
return self.author.username if self.author else 'anonymous'
@classmethod
def get_fallback(cls):
return cls._meta.get_field('default_locale')
@property
def url_slug(self):
"""uuid or nickname if chosen"""
return self.nickname or self.uuid
@property
def icon_url(self):
modified = int(time.mktime(self.modified.timetuple()))
if self.icontype:
# [1] is the whole ID, [2] is the directory
split_id = re.match(r'((\d*?)\d{1,3})$', str(self.id))
path = "/".join([
split_id.group(2) or '0',
"%s.png?m=%s" % (self.id, modified)
])
return user_media_url('collection_icons') + path
elif self.type == amo.COLLECTION_FAVORITES:
return settings.STATIC_URL + 'img/icons/heart.png'
else:
return settings.STATIC_URL + 'img/icons/collection.png'
def set_addons(self, addon_ids, comments={}):
"""Replace the current add-ons with a new list of add-on ids."""
order = dict((a, idx) for idx, a in enumerate(addon_ids))
# Partition addon_ids into add/update/remove buckets.
existing = set(self.addons.using('default')
.values_list('id', flat=True))
add, update = [], []
for addon in addon_ids:
bucket = update if addon in existing else add
bucket.append((addon, order[addon]))
remove = existing.difference(addon_ids)
cursor = connection.cursor()
now = datetime.now()
if remove:
cursor.execute("DELETE FROM addons_collections "
"WHERE collection_id=%s AND addon_id IN (%s)" %
(self.id, ','.join(map(str, remove))))
if self.listed:
for addon in remove:
amo.log(amo.LOG.REMOVE_FROM_COLLECTION,
(Addon, addon), self)
if add:
insert = '(%s, %s, %s, NOW(), NOW(), 0)'
values = [insert % (a, self.id, idx) for a, idx in add]
cursor.execute("""
INSERT INTO addons_collections
(addon_id, collection_id, ordering, created,
modified, downloads)
VALUES %s""" % ','.join(values))
if self.listed:
for addon_id, idx in add:
amo.log(amo.LOG.ADD_TO_COLLECTION,
(Addon, addon_id), self)
for addon, ordering in update:
(CollectionAddon.objects.filter(collection=self.id, addon=addon)
.update(ordering=ordering, modified=now))
for addon, comment in comments.iteritems():
try:
c = (CollectionAddon.objects.using('default')
.get(collection=self.id, addon=addon))
except CollectionAddon.DoesNotExist:
pass
else:
c.comments = comment
c.save(force_update=True)
self.save()
def is_subscribed(self, user):
"""Determines if the user is subscribed to this collection."""
return self.following.filter(user=user).exists()
def add_addon(self, addon):
"Adds an addon to the collection."
CollectionAddon.objects.get_or_create(addon=addon, collection=self)
if self.listed:
amo.log(amo.LOG.ADD_TO_COLLECTION, addon, self)
self.save() # To invalidate Collection.
def remove_addon(self, addon):
CollectionAddon.objects.filter(addon=addon, collection=self).delete()
if self.listed:
amo.log(amo.LOG.REMOVE_FROM_COLLECTION, addon, self)
self.save() # To invalidate Collection.
def owned_by(self, user):
return (user.id == self.author_id)
def can_view_stats(self, request):
if request and request.amo_user:
return (self.publishable_by(request.amo_user) or
acl.action_allowed(request, 'CollectionStats', 'View'))
return False
@caching.cached_method
def publishable_by(self, user):
return bool(self.owned_by(user) or self.users.filter(pk=user.id))
@staticmethod
def transformer(collections):
if not collections:
return
author_ids = set(c.author_id for c in collections)
authors = dict((u.id, u) for u in
UserProfile.objects.filter(id__in=author_ids))
for c in collections:
c.author = authors.get(c.author_id)
c_dict = dict((c.pk, c) for c in collections)
sharing.attach_share_counts(CollectionShareCountTotal, 'collection',
c_dict)
@staticmethod
def post_save(sender, instance, **kwargs):
from . import tasks
if kwargs.get('raw'):
return
tasks.collection_meta.delay(instance.id, using='default')
tasks.index_collections.delay([instance.id])
@staticmethod
def post_delete(sender, instance, **kwargs):
from . import tasks
if kwargs.get('raw'):
return
tasks.unindex_collections.delay([instance.id])
def check_ownership(self, request, require_owner, require_author,
ignore_disabled, admin):
"""
Used by acl.check_ownership to see if request.user has permissions for
the collection.
"""
from access import acl
return acl.check_collection_ownership(request, self, require_owner)
models.signals.post_save.connect(Collection.post_save, sender=Collection,
dispatch_uid='coll.post_save')
models.signals.pre_save.connect(save_signal, sender=Collection,
dispatch_uid='coll_translations')
models.signals.post_delete.connect(Collection.post_delete, sender=Collection,
dispatch_uid='coll.post_delete')
class CollectionAddon(amo.models.ModelBase):
addon = models.ForeignKey(Addon)
collection = models.ForeignKey(Collection)
# category (deprecated: for "Fashion Your Firefox")
comments = LinkifiedField(null=True)
downloads = models.PositiveIntegerField(default=0)
user = models.ForeignKey(UserProfile, null=True)
ordering = models.PositiveIntegerField(
default=0,
help_text='Add-ons are displayed in ascending order '
'based on this field.')
class Meta(amo.models.ModelBase.Meta):
db_table = 'addons_collections'
unique_together = (('addon', 'collection'),)
@staticmethod
def post_save_or_delete(sender, instance, **kwargs):
"""Update Collection.addon_count."""
from . import tasks
tasks.collection_meta.delay(instance.collection_id, using='default')
models.signals.pre_save.connect(save_signal, sender=CollectionAddon,
dispatch_uid='coll_addon_translations')
# Update Collection.addon_count.
models.signals.post_save.connect(CollectionAddon.post_save_or_delete,
sender=CollectionAddon,
dispatch_uid='coll.post_save')
models.signals.post_delete.connect(CollectionAddon.post_save_or_delete,
sender=CollectionAddon,
dispatch_uid='coll.post_save')
class CollectionFeature(amo.models.ModelBase):
title = TranslatedField()
tagline = TranslatedField()
class Meta(amo.models.ModelBase.Meta):
db_table = 'collection_features'
models.signals.pre_save.connect(save_signal, sender=CollectionFeature,
dispatch_uid='collectionfeature_translations')
class CollectionPromo(amo.models.ModelBase):
collection = models.ForeignKey(Collection, null=True)
locale = models.CharField(max_length=10, null=True)
collection_feature = models.ForeignKey(CollectionFeature)
class Meta(amo.models.ModelBase.Meta):
db_table = 'collection_promos'
unique_together = ('collection', 'locale', 'collection_feature')
@staticmethod
def transformer(promos):
if not promos:
return
promo_dict = dict((p.id, p) for p in promos)
q = (Collection.objects.no_cache()
.filter(collectionpromo__in=promos)
.extra(select={'promo_id': 'collection_promos.id'}))
for promo_id, collection in (sorted_groupby(q, 'promo_id')):
promo_dict[promo_id].collection = collection.next()
class CollectionWatcher(amo.models.ModelBase):
collection = models.ForeignKey(Collection, related_name='following')
user = models.ForeignKey(UserProfile)
class Meta(amo.models.ModelBase.Meta):
db_table = 'collection_subscriptions'
def flush_urls(self):
urls = ['*/user/%d/' % self.user_id]
return urls
@staticmethod
def post_save_or_delete(sender, instance, **kw):
from . import tasks
tasks.collection_watchers(instance.collection_id, using='default')
models.signals.post_save.connect(CollectionWatcher.post_save_or_delete,
sender=CollectionWatcher)
models.signals.post_delete.connect(CollectionWatcher.post_save_or_delete,
sender=CollectionWatcher)
class CollectionUser(models.Model):
collection = models.ForeignKey(Collection)
user = models.ForeignKey(UserProfile)
role = models.SmallIntegerField(
default=1,
choices=amo.COLLECTION_AUTHOR_CHOICES.items())
class Meta:
db_table = 'collections_users'
class CollectionVote(models.Model):
collection = models.ForeignKey(Collection, related_name='votes')
user = models.ForeignKey(UserProfile, related_name='votes')
vote = models.SmallIntegerField(default=0)
created = models.DateTimeField(null=True, auto_now_add=True)
class Meta:
db_table = 'collections_votes'
def flush_urls(self):
urls = ['*%s' % self.collection.get_url_path()]
return urls
@staticmethod
def post_save_or_delete(sender, instance, **kwargs):
# There are some issues with cascade deletes, where the
# collection disappears before the votes. Make sure the
# collection exists before trying to update it in the task.
if Collection.objects.filter(id=instance.collection_id).exists():
from . import tasks
tasks.collection_votes(instance.collection_id, using='default')
models.signals.post_save.connect(CollectionVote.post_save_or_delete,
sender=CollectionVote)
models.signals.post_delete.connect(CollectionVote.post_save_or_delete,
sender=CollectionVote)
class SyncedCollection(CollectionBase, amo.models.ModelBase):
"""
We remember what add-ons a user has installed with this table.
The addon guids come in from the discovery pane and we translate those to
addon ids. If those addons match an addon_index of an existing
SyncedCollection its count is incremented; otherwise a new collection is
created for that bag of addons.
This uses separate tables because we don't want the high volume of data to
crush performance on normal collection tables. SyncedCollections are used
to generate recommendations and may be used for other data mining in the
future.
"""
addon_index = models.CharField(
max_length=40, null=True,
db_index=True, unique=True,
help_text='md5 of addon ids in this collection for fast comparisons')
addons = models.ManyToManyField(Addon, through='SyncedCollectionAddon',
related_name='synced_collections')
count = models.IntegerField("Number of users with this collection.",
default=0)
class Meta:
db_table = 'synced_collections'
def save(self, **kw):
return super(SyncedCollection, self).save(**kw)
def set_addons(self, addon_ids):
# SyncedCollections are only written once so we don't need to deal with
# updates or deletes.
relations = [
SyncedCollectionAddon(addon_id=addon_id, collection_id=self.pk)
for addon_id in addon_ids]
SyncedCollectionAddon.objects.bulk_create(relations)
if not self.addon_index:
self.addon_index = self.make_index(addon_ids)
self.save()
transaction.commit_unless_managed()
class SyncedCollectionAddon(models.Model):
addon = models.ForeignKey(Addon)
collection = models.ForeignKey(SyncedCollection)
class Meta(amo.models.ModelBase.Meta):
db_table = 'synced_addons_collections'
unique_together = (('addon', 'collection'),)
class RecommendedCollection(Collection):
class Meta:
proxy = True
def save(self, **kw):
self.type = amo.COLLECTION_RECOMMENDED
return super(RecommendedCollection, self).save(**kw)
@classmethod
def build_recs(cls, addon_ids):
"""Get the top ranking add-ons according to recommendation scores."""
scores = AddonRecommendation.scores(addon_ids)
d = collections.defaultdict(int)
for others in scores.values():
for addon, score in others.items():
d[addon] += score
addons = sorted(d.items(), key=lambda x: x[1], reverse=True)
return [addon for addon, score in addons if addon not in addon_ids]
class FeaturedCollection(amo.models.ModelBase):
application = models.PositiveIntegerField(choices=amo.APPS_CHOICES,
db_column='application_id')
collection = models.ForeignKey(Collection)
locale = models.CharField(max_length=10, null=True)
class Meta:
db_table = 'featured_collections'
def __unicode__(self):
return u'%s (%s: %s)' % (self.collection, self.application,
self.locale)
class MonthlyPick(amo.models.ModelBase):
addon = models.ForeignKey(Addon)
blurb = models.TextField()
image = models.URLField()
locale = models.CharField(max_length=10, unique=True, null=True,
blank=True)
class Meta:
db_table = 'monthly_pick'
|
southpawtech/TACTIC-DEV | refs/heads/master | src/pyasm/web/cherrypy30_adapter.py | 1 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['CherryPyException', 'CherryPyAdapter']
import types, os, re
from pyasm.common import TacticException
from web_environment import *
import cherrypy
class CherryPyException(Exception):
pass
def get_app_server():
server_cls = os.environ.get("TACTIC_SERVER_CLS")
if not server_cls:
from app_server import BaseAppServer
base_cls = BaseAppServer
elif server_cls == "pyasm.web.WidgetAppServer":
from widget_app_server import WidgetAppServer
base_cls = WidgetAppServer
else:
from simple_app_server import SimpleAppServer
base_cls = SimpleAppServer
class CherryPyAppServer(base_cls):
def get_adapter(my):
adapter = CherryPyAdapter()
return adapter
@cherrypy.expose()
def index(my, **kwargs):
my.hash = ()
return my.get_display()
# set the hash object as a list
@cherrypy.expose()
def default(my, *vpath, **kwargs):
my.hash = vpath
return my.get_display()
return CherryPyAppServer
def get_xmlrpc_server():
'''dynamically load in an xmlrpc server'''
from cherrypy import _cptools
class XmlrpcServer(_cptools.XMLRPCController):
def get_adapter(my):
adapter = CherryPyAdapter()
return adapter
return XmlrpcServer
from cherrypy_adapter import CherryPyAdapter as CherryPyAdapter20
class CherryPyAdapter(CherryPyAdapter20):
"""Encapsulates cherrypy environment. Implements the web interface"""
def __init__(my):
my.request = cherrypy.request
my.response = cherrypy.response
#my.request.wsgi_environ['REQUEST_URI'] = my.request.browser_url
my.request.wsgi_environ['REQUEST_URI'] = cherrypy.url()
def get_context_name(my):
'''this includes all of the subdirectories as well as the main
context'''
dir = my.request.path_info
p = re.compile( r"/(tactic|projects)/?(\w+)/")
m = p.search(dir)
if not m:
return "default"
context = m.groups()[1]
return context
def get_request_method(my):
return my.request.method
def get_request(my):
return my.request
def get_request_headers(my):
return my.request.headers
def get_response(my):
return my.response
def set_header(my, name, value):
my.response.headers[name] = value
def get_response(my):
return my.response
def set_content_type(my, content_type):
my.response.headers['Content-Type'] = content_type
def get_content_type(my):
return my.response.headers['Content-Type']
def set_force_download(my, filename):
my.response.headers['Content-Type'] = "application/force-download"
my.response.headers['Content-Disposition'] = "attachment; filename=%s" % filename
def set_csv_download(my, filename):
filename = os.path.basename(filename)
my.response.headers['Content-Type'] = "text/x-csv"
my.response.headers['Content-Disposition'] = "attachment; filename=%s" % filename
# form submission functions
def reset_form(my):
my.request.params = {}
def get_form_keys(my):
return my.request.params.keys()
def has_form_key(my, key):
return my.request.params.has_key(key)
def set_form_value(my, name, value):
'''Set the form value to appear like it was submitted'''
# protect from accidental null names. This can occur when an
# input widget has not name specified.
if not name:
return
my.request.params[name] = value
def get_form_data(my):
return my.request.params
# cookie functions
def set_cookie(my, name, value):
'''set a cookie'''
cherrypy.response.cookie[name] = value
cherrypy.response.cookie[name]['path'] = '/'
cherrypy.response.cookie[name]['max-age'] = 120*3600
def get_cookie(my, name):
'''get a cookie'''
try:
return cherrypy.request.cookie[name].value
except KeyError, e:
return ""
def get_cookies(my):
'''get a cookies'''
return cherrypy.request.cookie
# environment functions
"""
def get_env_keys(my):
env = my.request.wsgi_environ
return env.keys()
def get_env(my, env_var):
env = my.request.wsgi_environ
return env.get(env_var)
"""
|
alexmandujano/django | refs/heads/master | django/contrib/formtools/tests/wizard/wizardtests/forms.py | 313 | import os
import tempfile
from django import forms
from django.contrib.auth.models import User
from django.core.files.storage import FileSystemStorage
from django.forms.formsets import formset_factory
from django.forms.models import modelformset_factory
from django.http import HttpResponse
from django.template import Template, Context
from django.contrib.auth.models import User
from django.contrib.formtools.wizard.views import WizardView
temp_storage_location = tempfile.mkdtemp(dir=os.environ.get('DJANGO_TEST_TEMP_DIR'))
temp_storage = FileSystemStorage(location=temp_storage_location)
class Page1(forms.Form):
name = forms.CharField(max_length=100)
user = forms.ModelChoiceField(queryset=User.objects.all())
thirsty = forms.NullBooleanField()
class Page2(forms.Form):
address1 = forms.CharField(max_length=100)
address2 = forms.CharField(max_length=100)
file1 = forms.FileField()
class Page3(forms.Form):
random_crap = forms.CharField(max_length=100)
Page4 = formset_factory(Page3, extra=2)
class ContactWizard(WizardView):
file_storage = temp_storage
def done(self, form_list, **kwargs):
c = Context({
'form_list': [x.cleaned_data for x in form_list],
'all_cleaned_data': self.get_all_cleaned_data(),
})
for form in self.form_list.keys():
c[form] = self.get_cleaned_data_for_step(form)
c['this_will_fail'] = self.get_cleaned_data_for_step('this_will_fail')
return HttpResponse(Template('').render(c))
def get_context_data(self, form, **kwargs):
context = super(ContactWizard, self).get_context_data(form, **kwargs)
if self.storage.current_step == 'form2':
context.update({'another_var': True})
return context
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ('username', 'email')
UserFormSet = modelformset_factory(User, form=UserForm)
class SessionContactWizard(ContactWizard):
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class CookieContactWizard(ContactWizard):
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
|
WarrenWeckesser/numpngw | refs/heads/master | tests/test_write_png.py | 1 | from __future__ import division, print_function
import unittest
import io
import struct
import zlib
import numpy as np
from numpy.testing import (assert_, assert_equal, assert_array_equal,
assert_raises)
import numpngw
def next_chunk(s):
chunk_len = struct.unpack("!I", s[:4])[0]
chunk_type = s[4:8]
chunk_data = s[8:8+chunk_len]
crc = struct.unpack("!I", s[8+chunk_len:8+chunk_len+4])[0]
check = zlib.crc32(chunk_type + chunk_data) & 0xFFFFFFFF
if crc != check:
raise RuntimeError("CRC not correct, chunk_type=%r" % (chunk_type,))
return chunk_type, chunk_data, s[8+chunk_len+4:]
def check_signature(s):
signature = s[:8]
s = s[8:]
assert_equal(signature, b'\x89PNG\x0D\x0A\x1A\x0A')
return s
def check_ihdr(file_contents, width, height, bit_depth, color_type,
compression_method=0, filter_method=0, interlace=0):
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"IHDR")
values = struct.unpack("!IIBBBBB", chunk_data)
assert_equal(values[:2], (width, height), "wrong width and height")
assert_equal(values[2], bit_depth, "wrong bit depth")
assert_equal(values[3], color_type, "wrong color type")
assert_equal(values[4], compression_method, "wrong compression method")
assert_equal(values[5], filter_method, "wrong filter method")
assert_equal(values[6], interlace, "wrong interlace")
return file_contents
def check_trns(file_contents, color_type, transparent, palette=None):
if color_type == 3 and palette is None:
raise ValueError("color_type is 3 but no palette was provided.")
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"tRNS")
assert_(color_type not in [4, 6],
msg='Found tRNS chunk, but color_type is %r' % (color_type,))
if color_type == 0:
# Grayscale
trns = struct.unpack("!H", chunk_data)[0]
assert_equal(trns, transparent)
elif color_type == 2:
# RGB
trns = struct.unpack("!HHH", chunk_data)
assert_equal(trns, transparent)
elif color_type == 3:
# alphas for the first len(chunk_data) palette indices.
trns_index = np.frombuffer(chunk_data, dtype=np.uint8)[0]
trns_color = palette[trns_index]
assert_equal(trns_color, transparent)
else:
raise RuntimeError("check_trns called with invalid color_type %r" %
(color_type,))
return file_contents
def check_bkgd(file_contents, color, color_type, palette=None):
if color_type == 3 and palette is None:
raise ValueError("color_type is 3 but no palette was provided.")
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"bKGD")
if color_type == 0 or color_type == 4:
clr = struct.unpack("!H", chunk_data)
elif color_type == 2 or color_type == 6:
clr = struct.unpack("!HHH", chunk_data)
else:
# color_type is 3.
clr_index = struct.unpack("B", chunk_data)
clr = palette[clr_index]
assert_equal(clr, color,
"%r != %r color_type=%r" % (clr, color, color_type))
return file_contents
def check_phys(file_contents, phys):
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"pHYs")
xppu, yppu, unit = struct.unpack("!IIB", chunk_data)
assert_equal((xppu, yppu, unit), phys)
return file_contents
def check_chrm(file_contents, chrm):
# chrm must be the scaled integer version of the chromaticity values.
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"cHRM")
values = struct.unpack("!IIIIIIII", chunk_data)
assert_array_equal(np.array(values).reshape(4, 2), chrm)
return file_contents
def check_text(file_contents, keyword, text_string=None):
# If text_string is None, this code just checks the keyword.
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"tEXt")
assert_(b'\x00' in chunk_data)
key, text = chunk_data.split(b'\x00', 1)
assert_equal(key, keyword)
if text_string is not None:
assert_equal(text, text_string)
return file_contents
def check_sbit(file_contents, sbit, color_type):
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"sBIT")
# Mapping from color_type to required length of sbit:
# required_length = {0: 1, 2: 3, 3: 3, 4: 2, 6: 4}
values = struct.unpack('BBBB'[:len(chunk_data)], chunk_data)
assert_equal(values, sbit)
return file_contents
def check_idat(file_contents, color_type, bit_depth, interlace, img,
palette=None):
# This function assumes the entire image is in the chunk.
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"IDAT")
decompressed = zlib.decompress(chunk_data)
stream = np.frombuffer(decompressed, dtype=np.uint8)
height, width = img.shape[:2]
img2 = stream_to_array(stream, width, height, color_type, bit_depth,
interlace)
if palette is not None:
img2 = palette[img2]
assert_array_equal(img2, img)
return file_contents
def stream_to_array_old(stream, width, height, color_type, bit_depth):
# `stream` is 1-d numpy array with dytpe np.uint8 containing the
# data from one or more IDAT or fdAT chunks.
#
# This function converts `stream` to a numpy array.
ncols, rembits = divmod(width*bit_depth, 8)
ncols += rembits > 0
if bit_depth < 8:
bytes_per_pixel = 1 # Not really, but we need 1 here for later.
data_bytes_per_line = ncols
else:
# nchannels is a map from color_type to the number of color
# channels (e.g. an RGB image has three channels).
nchannels = {0: 1, 2: 3, 3: 1, 4: 2, 6: 4}
bytes_per_channel = bit_depth // 8
bytes_per_pixel = bytes_per_channel * nchannels[color_type]
data_bytes_per_line = bytes_per_pixel * width
data_width = data_bytes_per_line / bytes_per_pixel
lines = stream.reshape(height, data_bytes_per_line + 1)
prev = np.zeros((data_width, bytes_per_pixel), dtype=np.uint8)
p = np.empty((height, data_width, bytes_per_pixel), dtype=np.uint8)
for k in range(lines.shape[0]):
line_filter_type = lines[k, 0]
filtered = lines[k, 1:].reshape(-1, bytes_per_pixel)
if line_filter_type == 0:
p[k] = filtered
elif line_filter_type == 1:
p[k] = numpngw._filter1inv(filtered, prev)
elif line_filter_type == 2:
p[k] = numpngw._filter2inv(filtered, prev)
elif line_filter_type == 3:
p[k] = numpngw._filter3inv(filtered, prev)
elif line_filter_type == 4:
p[k] = numpngw._filter4inv(filtered, prev)
else:
raise ValueError('invalid filter type: %i' % (line_filter_type,))
prev = p[k]
# As this point, p has data type uint8 and has shape
# (height, width, bytes_per_pixel).
# 16 bit components of the pixel are stored in big-endian format.
uint8to16 = np.array([256, 1], dtype=np.uint16)
if color_type == 0:
# grayscale
if bit_depth == 16:
img = p.dot(uint8to16)
elif bit_depth == 8:
img = p[:, :, 0]
else: # bit_depth is 1, 2 or 4.
img = numpngw._unpack(p.reshape(height, -1),
bitdepth=bit_depth, width=width)
elif color_type == 2:
# RGB
if bit_depth == 16:
# Combine high and low bytes to 16-bit values.
img = p.reshape(height, width, 3, 2).dot(uint8to16)
else: # bit_depth is 8.
img = p
elif color_type == 3:
# indexed
img = p[:, :, 0]
elif color_type == 4:
# grayscale with alpha
if bit_depth == 16:
# Combine high and low bytes to 16-bit values.
img = p.reshape(height, width, 2, 2).dot(uint8to16)
else: # bit_depth is 8.
img = p
elif color_type == 6:
# RGBA
if bit_depth == 16:
# Combine high and low bytes to 16-bit values.
img = p.reshape(height, width, 4, 2).dot(uint8to16)
else: # bit_depth is 8.
img = p
else:
raise RuntimeError('invalid color type %r' % (color_type,))
return img
def img_color_format(color_type, bitdepth):
"""
Given a color type and a bit depth, return the length
of the color dimension and the data type of the numpy
array that will hold an image with those parameters.
"""
# nchannels is a map from color_type to the number of color
# channels (e.g. an RGB image has three channels).
nchannels = {0: 1, # grayscale
2: 3, # RGB
3: 1, # indexed RGB
4: 2, # grayscale+alpha
6: 4} # RGBA
if color_type == 3:
dtype = np.uint8
else:
dtype = np.uint8 if bitdepth <= 8 else np.uint16
return nchannels[color_type], dtype
def stream_to_array(stream, width, height, color_type, bit_depth, interlace=0):
# `stream` is 1-d numpy array with dytpe np.uint8 containing the
# data from one or more IDAT or fdAT chunks.
#
# This function converts `stream` to a numpy array.
img_color_dim, img_dtype = img_color_format(color_type, bit_depth)
if img_color_dim == 1:
img_shape = (height, width)
else:
img_shape = (height, width, img_color_dim)
img = np.empty(img_shape, dtype=img_dtype)
if interlace == 1:
passes = numpngw._interlace_passes(img)
else:
passes = [img]
pass_start_index = 0
for a in passes:
if a.size == 0:
continue
pass_height, pass_width = a.shape[:2]
ncols, rembits = divmod(pass_width*bit_depth, 8)
ncols += rembits > 0
if bit_depth < 8:
bytes_per_pixel = 1 # Not really, but we need 1 here for later.
data_bytes_per_line = ncols
else:
# nchannels is a map from color_type to the number of color
# channels (e.g. an RGB image has three channels).
nchannels = {0: 1, 2: 3, 3: 1, 4: 2, 6: 4}
bytes_per_channel = bit_depth // 8
bytes_per_pixel = bytes_per_channel * nchannels[color_type]
data_bytes_per_line = bytes_per_pixel * pass_width
data_width = data_bytes_per_line // bytes_per_pixel
pass_end_index = (pass_start_index +
pass_height * (data_bytes_per_line + 1))
shp = (pass_height, data_bytes_per_line + 1)
lines = stream[pass_start_index:pass_end_index].reshape(shp)
pass_start_index = pass_end_index
prev = np.zeros((data_width, bytes_per_pixel), dtype=np.uint8)
shp = (pass_height, data_width, bytes_per_pixel)
p = np.empty(shp, dtype=np.uint8)
for k in range(lines.shape[0]):
line_filter_type = lines[k, 0]
filtered = lines[k, 1:].reshape(-1, bytes_per_pixel)
if line_filter_type == 0:
p[k] = filtered
elif line_filter_type == 1:
p[k] = numpngw._filter1inv(filtered, prev)
elif line_filter_type == 2:
p[k] = numpngw._filter2inv(filtered, prev)
elif line_filter_type == 3:
p[k] = numpngw._filter3inv(filtered, prev)
elif line_filter_type == 4:
p[k] = numpngw._filter4inv(filtered, prev)
else:
raise ValueError('invalid filter type: %i' %
(line_filter_type,))
prev = p[k]
# As this point, p has data type uint8 and has shape
# (height, width, bytes_per_pixel).
# 16 bit components of the pixel are stored in big-endian format.
uint8to16 = np.array([256, 1], dtype=np.uint16)
if color_type == 0:
# grayscale
if bit_depth == 16:
pass_img = p.dot(uint8to16)
elif bit_depth == 8:
pass_img = p[:, :, 0]
else: # bit_depth is 1, 2 or 4.
pass_img = numpngw._unpack(p.reshape(pass_height, -1),
bitdepth=bit_depth,
width=pass_width)
elif color_type == 2:
# RGB
if bit_depth == 16:
# Combine high and low bytes to 16-bit values.
shp = (pass_height, pass_width, 3, 2)
pass_img = p.reshape(shp).dot(uint8to16)
else: # bit_depth is 8.
pass_img = p
elif color_type == 3:
# indexed
if bit_depth < 8:
pass_img = numpngw._unpack(p[:, :, 0], bitdepth=bit_depth,
width=pass_width)
else:
pass_img = p[:, :, 0]
elif color_type == 4:
# grayscale with alpha
if bit_depth == 16:
# Combine high and low bytes to 16-bit values.
shp = (pass_height, pass_width, 2, 2)
pass_img = p.reshape(shp).dot(uint8to16)
else: # bit_depth is 8.
pass_img = p
elif color_type == 6:
# RGBA
if bit_depth == 16:
# Combine high and low bytes to 16-bit values.
shp = (pass_height, pass_width, 4, 2)
pass_img = p.reshape(shp).dot(uint8to16)
else: # bit_depth is 8.
pass_img = p
else:
raise RuntimeError('invalid color type %r' % (color_type,))
a[...] = pass_img
return img
def check_actl(file_contents, num_frames, num_plays):
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"acTL")
values = struct.unpack("!II", chunk_data)
assert_equal(values, (num_frames, num_plays))
return file_contents
def check_fctl(file_contents, sequence_number, width, height,
x_offset=0, y_offset=0, delay_num=0, delay_den=1,
dispose_op=0, blend_op=0):
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"fcTL")
values = struct.unpack("!IIIIIHHBB", chunk_data)
expected_values = (sequence_number, width, height, x_offset, y_offset,
delay_num, delay_den, dispose_op, blend_op)
assert_equal(values, expected_values)
return file_contents
def check_time(file_contents, timestamp):
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"tIME")
values = struct.unpack("!HBBBBB", chunk_data)
assert_equal(values, timestamp)
return file_contents
def check_gama(file_contents, gamma):
# gamma is the floating point gamma value.
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"gAMA")
gama = struct.unpack("!I", chunk_data)[0]
igamma = int(gamma*100000 + 0.5)
assert_equal(gama, igamma)
return file_contents
def check_iend(file_contents):
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
assert_equal(chunk_type, b"IEND")
assert_equal(chunk_data, b"")
# The IEND chunk is the last chunk, so file_contents should now
# be empty.
assert_equal(file_contents, b"")
class TestWritePng(unittest.TestCase):
def test_write_png_nbit_grayscale(self):
# Test the creation of grayscale images for bit depths of 1, 2, 4
# 8 and 16, with or without a `transparent` color selected.
np.random.seed(123)
for filter_type in [0, 1, 2, 3, 4, "heuristic", "auto"]:
for bitdepth in [1, 2, 4, 8, 16]:
for transparent in [None, 0]:
for interlace in [0, 1]:
dt = np.uint16 if bitdepth == 16 else np.uint8
maxval = 2**bitdepth
sz = (3, 11)
img = np.random.randint(0, maxval, size=sz).astype(dt)
if transparent is not None:
img[2:4, 2:] = transparent
f = io.BytesIO()
numpngw.write_png(f, img, bitdepth=bitdepth,
transparent=transparent,
filter_type=filter_type,
interlace=interlace)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents,
width=img.shape[1],
height=img.shape[0],
bit_depth=bitdepth,
color_type=0,
interlace=interlace)
file_contents = check_text(file_contents,
b"Creation Time")
software = numpngw._software_text().encode('latin-1')
file_contents = check_text(file_contents, b"Software",
software)
if transparent is not None:
file_contents = check_trns(file_contents,
color_type=0,
transparent=transparent)
file_contents = check_idat(file_contents, color_type=0,
bit_depth=bitdepth,
interlace=interlace,
img=img)
check_iend(file_contents)
def test_write_png_with_alpha(self):
# Test creation of grayscale+alpha and RGBA images (color types 4
# and 6, resp.), with bit depths 8 and 16.
w = 25
h = 15
np.random.seed(12345)
for filter_type in [0, 1, 2, 3, 4, "heuristic", "auto"]:
for color_type in [4, 6]:
num_channels = 2 if color_type == 4 else 4
for bit_depth in [8, 16]:
for interlace in [0, 1]:
dt = np.uint8 if bit_depth == 8 else np.uint16
sz = (h, w, num_channels)
img = np.random.randint(0, 2**bit_depth,
size=sz).astype(dt)
f = io.BytesIO()
numpngw.write_png(f, img, filter_type=filter_type,
interlace=interlace)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents,
width=w, height=h,
bit_depth=bit_depth,
color_type=color_type,
interlace=interlace)
file_contents = check_text(file_contents,
b"Creation Time")
software = numpngw._software_text().encode('latin-1')
file_contents = check_text(file_contents, b"Software",
software)
file_contents = check_idat(file_contents,
color_type=color_type,
bit_depth=bit_depth,
interlace=interlace,
img=img)
check_iend(file_contents)
def test_write_png_RGB(self):
# Test creation of RGB images (color type 2), with and without
# a `transparent` color selected, and with bit depth 8 and 16.
w = 24
h = 10
np.random.seed(12345)
for filter_type in [0, 1, 2, 3, 4, "heuristic", "auto"]:
for transparent in [None, (0, 0, 0)]:
for bit_depth in [8, 16]:
for interlace in [0, 1]:
dt = np.uint16 if bit_depth == 16 else np.uint8
maxval = 2**bit_depth
img = np.random.randint(0, maxval,
size=(h, w, 3)).astype(dt)
if transparent:
img[2:4, 2:4] = transparent
f = io.BytesIO()
numpngw.write_png(f, img, transparent=transparent,
filter_type=filter_type,
interlace=interlace)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents,
width=w, height=h,
bit_depth=bit_depth,
color_type=2,
interlace=interlace)
file_contents = check_text(file_contents,
b"Creation Time")
software = numpngw._software_text().encode('latin-1')
file_contents = check_text(file_contents, b"Software",
software)
if transparent:
file_contents = check_trns(file_contents,
color_type=2,
transparent=transparent)
file_contents = check_idat(file_contents, color_type=2,
bit_depth=bit_depth,
interlace=interlace,
img=img)
check_iend(file_contents)
def test_write_png_8bit_RGB_palette(self):
for interlace in [0, 1]:
for transparent in [None, (0, 1, 2)]:
for bitdepth in [1, 2, 4, 8]:
w = 13
h = 4
ncolors = min(2**bitdepth, w*h)
idx = np.arange(w*h).reshape(h, w) % ncolors
colors = np.arange(ncolors*3).reshape(ncolors, 3)
colors = colors.astype(np.uint8)
img = colors[idx]
f = io.BytesIO()
numpngw.write_png(f, img, use_palette=True,
transparent=transparent,
interlace=interlace,
bitdepth=bitdepth)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents,
width=img.shape[1],
height=img.shape[0],
bit_depth=bitdepth,
color_type=3,
interlace=interlace)
file_contents = check_text(file_contents, b"Creation Time")
software = numpngw._software_text().encode('latin-1')
file_contents = check_text(file_contents, b"Software",
software)
# Check the PLTE chunk.
chunk_type, chunk_data, file_contents = \
next_chunk(file_contents)
self.assertEqual(chunk_type, b"PLTE")
p = np.frombuffer(chunk_data,
dtype=np.uint8).reshape(-1, 3)
n = ncolors*3
expected = np.arange(n, dtype=np.uint8).reshape(-1, 3)
assert_array_equal(p, expected)
if transparent is not None:
file_contents = check_trns(file_contents,
color_type=3,
transparent=transparent,
palette=p)
# Check the IDAT chunk.
chunk_type, chunk_data, file_contents = \
next_chunk(file_contents)
self.assertEqual(chunk_type, b"IDAT")
decompressed = zlib.decompress(chunk_data)
stream = np.frombuffer(decompressed, dtype=np.uint8)
height, width = img.shape[:2]
img2 = stream_to_array(stream, width, height, color_type=3,
bit_depth=bitdepth,
interlace=interlace)
expected = idx
assert_array_equal(img2, expected)
check_iend(file_contents)
def test_write_png_max_chunk_len(self):
# Create an 8-bit grayscale image.
w = 250
h = 150
max_chunk_len = 500
img = np.random.randint(0, 256, size=(h, w)).astype(np.uint8)
f = io.BytesIO()
numpngw.write_png(f, img, max_chunk_len=max_chunk_len)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents,
width=w, height=h,
bit_depth=8, color_type=0, interlace=0)
file_contents = check_text(file_contents, b"Creation Time")
file_contents = check_text(file_contents, b"Software",
numpngw._software_text().encode('latin-1'))
zstream = b''
while True:
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
if chunk_type != b"IDAT":
break
self.assertEqual(chunk_type, b"IDAT")
zstream += chunk_data
self.assertLessEqual(len(chunk_data), max_chunk_len)
data = zlib.decompress(zstream)
b = np.frombuffer(data, dtype=np.uint8)
lines = b.reshape(h, w + 1)
img2 = lines[:, 1:].reshape(h, w)
assert_array_equal(img2, img)
# Check the IEND chunk; chunk_type and chunk_data were read
# in the loop above.
self.assertEqual(chunk_type, b"IEND")
self.assertEqual(chunk_data, b"")
self.assertEqual(file_contents, b"")
def test_write_png_timestamp_gamma_chromaticity(self):
np.random.seed(123)
img = np.random.randint(0, 256, size=(10, 10)).astype(np.uint8)
f = io.BytesIO()
timestamp = (1452, 4, 15, 8, 9, 10)
gamma = 2.2
chromaticity = [[0.500, 0.750],
[0.125, 0.960],
[0.875, 0.625],
[0.750, 0.375]]
numpngw.write_png(f, img, timestamp=timestamp, gamma=gamma,
chromaticity=chromaticity)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents,
width=img.shape[1], height=img.shape[0],
bit_depth=8, color_type=0, interlace=0)
file_contents = check_text(file_contents, b"Creation Time")
file_contents = check_text(file_contents, b"Software",
numpngw._software_text().encode('latin-1'))
file_contents = check_time(file_contents, timestamp)
file_contents = check_gama(file_contents, gamma)
expected_chrm = (100000*np.array(chromaticity) + 0.5).astype(np.uint32)
file_contents = check_chrm(file_contents, expected_chrm)
file_contents = check_idat(file_contents, color_type=0, bit_depth=8,
interlace=0, img=img)
check_iend(file_contents)
def test_write_png_bkgd(self):
# Test creation of RGB images (color type 2), with a background color.
w = 16
h = 8
np.random.seed(123)
for bit_depth in [8, 16]:
maxval = 2**bit_depth
bg = (maxval - 1, maxval - 2, maxval - 3)
dt = np.uint16 if bit_depth == 16 else np.uint8
img = np.random.randint(0, maxval, size=(h, w, 3)).astype(dt)
f = io.BytesIO()
numpngw.write_png(f, img, background=bg, filter_type=0)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents, width=w, height=h,
bit_depth=bit_depth, color_type=2,
interlace=0)
file_contents = check_text(file_contents, b"Creation Time")
software = numpngw._software_text().encode('latin-1')
file_contents = check_text(file_contents, b"Software",
software)
file_contents = check_bkgd(file_contents, color=bg, color_type=2)
file_contents = check_idat(file_contents, color_type=2,
bit_depth=bit_depth, interlace=0,
img=img)
check_iend(file_contents)
def test_write_png_sbit(self):
# Test creation of sBIT chunks for color_type 0 and 2.
w = 7
h = 5
np.random.seed(123)
for bit_depth in [8, 16]:
for size in [(h, w), (h, w, 3)]:
maxval = 2**bit_depth
dt = np.uint16 if bit_depth == 16 else np.uint8
img = np.random.randint(0, maxval, size=size).astype(dt)
color_type = 0 if len(size) == 2 else 2
sbit = (bit_depth - 1,)
if color_type == 2:
sbit = sbit * 3
f = io.BytesIO()
numpngw.write_png(f, img, sbit=sbit)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents, width=w, height=h,
bit_depth=bit_depth,
color_type=color_type,
interlace=0)
file_contents = check_text(file_contents, b"Creation Time")
software = numpngw._software_text().encode('latin-1')
file_contents = check_text(file_contents, b"Software",
software)
file_contents = check_sbit(file_contents, sbit=sbit,
color_type=color_type)
file_contents = check_idat(file_contents,
color_type=color_type,
bit_depth=bit_depth,
interlace=0,
img=img)
check_iend(file_contents)
def test_write_png_bkgd_palette(self):
# Test creation of RGB images with a background color
# when use_palette is True.
w = 6
h = 8
np.random.seed(123)
for bg_in_img in [True, False]:
bit_depth = 8
maxval = 2**bit_depth
bg = (maxval - 1, maxval - 3, maxval - 2)
img = np.arange(1, w*h*3 + 1, dtype=np.uint8).reshape(h, w, 3)
if bg_in_img:
img[-1, -1] = bg
f = io.BytesIO()
numpngw.write_png(f, img, background=bg, use_palette=True)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents, width=w, height=h,
bit_depth=bit_depth, color_type=3,
interlace=0)
file_contents = check_text(file_contents, b"Creation Time")
software = numpngw._software_text().encode('latin-1')
file_contents = check_text(file_contents, b"Software",
software)
# Check the PLTE chunk.
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
self.assertEqual(chunk_type, b"PLTE")
plte = np.frombuffer(chunk_data, dtype=np.uint8).reshape(-1, 3)
expected_palette = np.arange(1, w*h*3+1,
dtype=np.uint8).reshape(-1, 3)
if bg_in_img:
expected_palette[-1] = bg
else:
expected_palette = np.append(expected_palette,
np.array([bg], dtype=np.uint8),
axis=0)
assert_array_equal(plte, expected_palette,
"unexpected palette %r %r" %
(plte[-2], expected_palette[-2]))
file_contents = check_bkgd(file_contents, color=bg, color_type=3,
palette=expected_palette)
file_contents = check_idat(file_contents, color_type=3,
bit_depth=bit_depth, interlace=0,
img=img, palette=plte)
check_iend(file_contents)
def test_text_and_phys(self):
img = np.arange(15).reshape(3, 5).astype(np.uint8)
text_list = [('Monster', 'Godzilla'), ('Creation Time', None)]
phys = (5, 4, 0)
f = io.BytesIO()
numpngw.write_png(f, img, filter_type=0, text_list=text_list,
phys=phys)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents,
width=img.shape[1],
height=img.shape[0],
bit_depth=8, color_type=0,
interlace=0)
file_contents = check_text(file_contents, b"Monster", b"Godzilla")
file_contents = check_text(file_contents, b"Software",
numpngw._software_text().encode('latin-1'))
file_contents = check_phys(file_contents, phys)
file_contents = check_idat(file_contents, color_type=0,
bit_depth=8, interlace=0,
img=img)
check_iend(file_contents)
def test_bad_text_keyword(self):
img = np.zeros((5, 10), dtype=np.uint8)
f = io.BytesIO()
# keyword too long
bad_keyword = "X"*90
text_list = [(bad_keyword, "foo")]
assert_raises(ValueError, numpngw.write_png, f, img,
dict(text_list=text_list))
# keyword starts with a space
bad_keyword = " ABC"
text_list = [(bad_keyword, "foo")]
assert_raises(ValueError, numpngw.write_png, f, img,
dict(text_list=text_list))
# keyword ends with a space
bad_keyword = "ABC "
text_list = [(bad_keyword, "foo")]
assert_raises(ValueError, numpngw.write_png, f, img,
dict(text_list=text_list))
# keyword contains consecutive spaces
bad_keyword = "A BC"
text_list = [(bad_keyword, "foo")]
assert_raises(ValueError, numpngw.write_png, f, img,
dict(text_list=text_list))
# keyword contains a nonprintable character (nonbreaking space,
# in this case)
bad_keyword = "ABC\xA0XYZ"
text_list = [(bad_keyword, "foo")]
assert_raises(ValueError, numpngw.write_png, f, img,
dict(text_list=text_list))
# keyword cannot be encoded as latin-1
bad_keyword = "ABC\u1234XYZ"
text_list = [(bad_keyword, "foo")]
assert_raises(ValueError, numpngw.write_png, f, img,
dict(text_list=text_list))
# text string contains the null character
bad_keyword = "ABC"
text_list = [(bad_keyword, "foo\0bar")]
assert_raises(ValueError, numpngw.write_png, f, img,
dict(text_list=text_list))
# text string cannot be encoded as latin-1
bad_keyword = "ABC"
text_list = [(bad_keyword, "foo\u1234bar")]
assert_raises(ValueError, numpngw.write_png, f, img,
dict(text_list=text_list))
def test_bad_phys(self):
img = np.zeros((5, 10), dtype=np.uint8)
f = io.BytesIO()
# Third value must be 0 or 1.
phys = (1, 2, 3)
assert_raises(ValueError, numpngw.write_png, f, img,
dict(phys=phys))
# pixel per unit values must be positive.
phys = (1, -2, 0)
assert_raises(ValueError, numpngw.write_png, f, img,
dict(phys=phys))
# pixel per unit values must be positive.
phys = (0, 2, 0)
assert_raises(ValueError, numpngw.write_png, f, img,
dict(phys=phys))
def test_too_many_colors_for_palette(self):
f = io.BytesIO()
img = np.zeros((4, 4, 3), dtype=np.uint8)
img[0, 0] = 1
img[0, 1] = 2
# img has 3 unique colors.
assert_raises(ValueError, numpngw.write_png, f, img,
use_palette=True, bitdepth=1)
class TestWritePngFilterType(unittest.TestCase):
def test_basic(self):
w = 22
h = 10
bitdepth = 8
np.random.seed(123)
img = np.random.randint(0, 256, size=(h, w)).astype(np.uint8)
f = io.BytesIO()
numpngw.write_png(f, img, filter_type=1)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents,
width=img.shape[1],
height=img.shape[0],
bit_depth=bitdepth, color_type=0,
interlace=0)
file_contents = check_text(file_contents, b"Creation Time")
file_contents = check_text(file_contents, b"Software",
numpngw._software_text().encode('latin-1'))
file_contents = check_idat(file_contents, color_type=0,
bit_depth=bitdepth, interlace=0,
img=img)
check_iend(file_contents)
class TestWriteApng(unittest.TestCase):
def test_write_apng_8bit_RGBA(self):
num_frames = 4
w = 25
h = 15
np.random.seed(12345)
seq_size = (num_frames, h, w, 4)
seq = np.random.randint(0, 256, size=seq_size).astype(np.uint8)
f = io.BytesIO()
numpngw.write_apng(f, seq)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents, width=w, height=h,
bit_depth=8, color_type=6, interlace=0)
file_contents = check_text(file_contents, b"Creation Time")
file_contents = check_text(file_contents, b"Software",
numpngw._software_text().encode('latin-1'))
file_contents = check_actl(file_contents, num_frames=num_frames,
num_plays=0)
sequence_number = 0
file_contents = check_fctl(file_contents,
sequence_number=sequence_number,
width=w, height=h)
sequence_number += 1
file_contents = check_idat(file_contents, color_type=6, bit_depth=8,
interlace=0, img=seq[0])
for k in range(1, num_frames):
file_contents = check_fctl(file_contents,
sequence_number=sequence_number,
width=w, height=h)
sequence_number += 1
# Check the fdAT chunk.
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
self.assertEqual(chunk_type, b"fdAT")
actual_seq_num = struct.unpack("!I", chunk_data[:4])[0]
self.assertEqual(actual_seq_num, sequence_number)
sequence_number += 1
decompressed = zlib.decompress(chunk_data[4:])
b = np.frombuffer(decompressed, dtype=np.uint8)
lines = b.reshape(h, 4*w+1)
expected_col0 = np.zeros(h, dtype=np.uint8)
assert_array_equal(lines[:, 0], expected_col0)
img2 = lines[:, 1:].reshape(h, w, 4)
assert_array_equal(img2, seq[k])
check_iend(file_contents)
def test_default_image(self):
num_frames = 2
w = 16
h = 8
np.random.seed(12345)
seq_size = (num_frames, h, w, 4)
seq = np.random.randint(0, 256, size=seq_size).astype(np.uint8)
default_image = np.zeros((h, w, 4), dtype=np.uint8)
f = io.BytesIO()
numpngw.write_apng(f, seq, default_image=default_image)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents, width=w, height=h,
bit_depth=8, color_type=6, interlace=0)
file_contents = check_text(file_contents, b"Creation Time")
file_contents = check_text(file_contents, b"Software",
numpngw._software_text().encode('latin-1'))
file_contents = check_actl(file_contents, num_frames=num_frames,
num_plays=0)
sequence_number = 0
file_contents = check_idat(file_contents, color_type=6, bit_depth=8,
interlace=0, img=default_image)
for k in range(0, num_frames):
file_contents = check_fctl(file_contents,
sequence_number=sequence_number,
width=w, height=h)
sequence_number += 1
# Check the fdAT chunk.
chunk_type, chunk_data, file_contents = next_chunk(file_contents)
self.assertEqual(chunk_type, b"fdAT")
actual_seq_num = struct.unpack("!I", chunk_data[:4])[0]
self.assertEqual(actual_seq_num, sequence_number)
sequence_number += 1
decompressed = zlib.decompress(chunk_data[4:])
b = np.frombuffer(decompressed, dtype=np.uint8)
lines = b.reshape(h, 4*w+1)
expected_col0 = np.zeros(h, dtype=np.uint8)
assert_array_equal(lines[:, 0], expected_col0)
img2 = lines[:, 1:].reshape(h, w, 4)
assert_array_equal(img2, seq[k])
check_iend(file_contents)
def test_write_apng_bkgd(self):
# Test creation of RGB images (color type 2), with a background color.
# Also test the chromaticity argument.
w = 16
h = 8
np.random.seed(123)
num_frames = 3
chromaticity = [[0.500, 0.750],
[0.125, 0.960],
[0.875, 0.625],
[0.750, 0.375]]
for bit_depth in [8, 16]:
maxval = 2**bit_depth
bg = (maxval - 1, maxval - 2, maxval - 3)
dt = np.uint16 if bit_depth == 16 else np.uint8
seq = np.random.randint(0, maxval,
size=(num_frames, h, w, 3)).astype(dt)
f = io.BytesIO()
numpngw.write_apng(f, seq, background=bg, filter_type=0,
chromaticity=chromaticity)
file_contents = f.getvalue()
file_contents = check_signature(file_contents)
file_contents = check_ihdr(file_contents, width=w, height=h,
bit_depth=bit_depth, color_type=2,
interlace=0)
file_contents = check_text(file_contents, b"Creation Time")
software = numpngw._software_text().encode('latin-1')
file_contents = check_text(file_contents, b"Software",
software)
t = 100000*np.array(chromaticity) + 0.5
expected_chrm = t.astype(np.uint32)
file_contents = check_chrm(file_contents, expected_chrm)
file_contents = check_bkgd(file_contents, color=bg, color_type=2)
file_contents = check_actl(file_contents, num_frames=num_frames,
num_plays=0)
sequence_number = 0
file_contents = check_fctl(file_contents,
sequence_number=sequence_number,
width=w, height=h)
sequence_number += 1
file_contents = check_idat(file_contents, color_type=2,
bit_depth=bit_depth,
interlace=0, img=seq[0])
for k in range(1, num_frames):
file_contents = check_fctl(file_contents,
sequence_number=sequence_number,
width=w, height=h)
sequence_number += 1
# Check the fdAT chunk.
nxt = next_chunk(file_contents)
chunk_type, chunk_data, file_contents = nxt
self.assertEqual(chunk_type, b"fdAT")
actual_seq_num = struct.unpack("!I", chunk_data[:4])[0]
self.assertEqual(actual_seq_num, sequence_number)
sequence_number += 1
decompressed = zlib.decompress(chunk_data[4:])
b = np.frombuffer(decompressed, dtype=np.uint8)
img2 = stream_to_array(b, w, h, color_type=2,
bit_depth=bit_depth, interlace=0)
assert_array_equal(img2, seq[k])
check_iend(file_contents)
def test_too_many_colors_for_palette(self):
f = io.BytesIO()
img1 = np.zeros((4, 4, 3), dtype=np.uint8)
img1[0, 0] = 1
img1[0, 1] = 2
img2 = np.zeros_like(img1)
# [img1, img2] has 3 unique colors.
assert_raises(ValueError, numpngw.write_apng, f, [img1, img2],
use_palette=True, bitdepth=1)
if __name__ == '__main__':
unittest.main()
|
Alwnikrotikz/huhamhire-hosts | refs/heads/master | tui/curses_d.py | 24 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# curses_d.py: Operations for TUI window.
#
# Copyleft (C) 2014 - huhamhire <[email protected]>
# =====================================================================
# Licensed under the GNU General Public License, version 3. You should
# have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# =====================================================================
__author__ = "huhamhire <[email protected]>"
import curses
import json
import os
import shutil
import socket
import urllib
import sys
from curses_ui import CursesUI
from _update import FetchUpdate
sys.path.append("..")
from util import CommonUtil, RetrieveData
from util import MakeHosts
class CursesDaemon(CursesUI):
"""
CursesDaemon class contains methods to deal with the operations related to
the TUI window of `Hosts Setup Utility`. Including methods to interactive
with users.
.. note:: This class is subclass of :class:`~tui.curses_ui.CursesUI`
class.
:ivar dict _update: Update information of the current data file on server.
:ivar int _writable: Indicating whether the program is run with admin/root
privileges. The value could be `1` or `0`.
.. seealso:: `_update` and `_writable` in
:class:`~gui.qdialog_d.QDialogDaemon` class.
:ivar dict make_cfg: A set of module selection control bytes used to
control whether a specified method is used or not while generate a
hosts file.
* `Keys` of :attr:`make_cfg` are typically 8-bit control byte
indicating which part of the hosts data file would be effected by
the corresponding `Value`.
+----+----------------+
|Key |Part |
+====+================+
|0x02|Localhost |
+----+----------------+
|0x08|Shared hosts |
+----+----------------+
|0x10|IPv4 hosts |
+----+----------------+
|0x20|IPv6 hosts |
+----+----------------+
|0x40|AD block hosts |
+----+----------------+
* `Values` of :attr:`make_cfg` are typically 16-bit control bytes that
decides which of the modules in a specified part would be inserted
into the `hosts` file.
* `Value` of `Localhost` part. The Value used in `Localhost` part
are usually bytes indicating the current operating system.
+---------------+-------------------+
|Hex |OS |
+===============+===================+
|0x0001 |Windows |
+---------------+-------------------+
|0x0002 |Linux, Unix |
+---------------+-------------------+
|0x0004 |Mac OS X |
+---------------+-------------------+
* `Values` of `Shared hosts`, `IPv4 hosts`, `IPv6 hosts`, and
`AD block hosts` parts are usually sum of module IDs selected
by user.
.. note::
If modules in specified part whose IDs are `0x0002` and
`0x0010`, the value here should be `0x0002 + 0x0010 = 0x0012`,
which is `0b0000000000000010 + 0b0000000000010000 =
0b0000000000010010` in binary.
.. warning::
Only one bit could be `1` in the binary form of a module ID,
which means `0b0000000000010010` is an INVALID module ID while
it could be a VALID `Value` in `make_cfg`.
:ivar str platform: Platform of current operating system. The value could
be `Windows`, `Linux`, `Unix`, `OS X`, and of course `Unknown`.
:ivar str hostname: The hostname of current operating system.
.. note:: This attribute would only be used on linux.
:ivar str hosts_path: The absolute path to the hosts file on current
operating system.
:ivar str make_mode: Operation mode for making hosts file. The valid value
could be one of `system`, `ansi`, and `utf-8`.
.. seealso:: :attr:`make_mode` in
:class:`~util.makehosts.MakeHosts` class.
:ivar str make_path: Temporary path to store generated hosts file. The
default value of :attr:`make_path` is "`./hosts`".
:ivar list _ops_keys: Hot keys used to start a specified operation.
Default operation keys are `F5`, `F6`, and `F10`.
:ivar list _hot_keys: Hot keys used to select a item or confirm an
operation. And the default :attr:`_hot_keys` is defined as::
_hot_keys = [curses.KEY_UP, curses.KEY_DOWN, 10, 32]
.. seealso:: :attr:`~tui.curses_ui.CursesUI.funckeys` in
:class:`~tui.curses_ui.CursesUI` class.
"""
_update = {}
_writable = 0
make_cfg = {}
platform = ''
hostname = ''
hosts_path = ''
make_mode = ''
make_path = "./hosts"
_ops_keys = [curses.KEY_F5, curses.KEY_F6, curses.KEY_F10]
_hot_keys = [curses.KEY_UP, curses.KEY_DOWN, 10, 32]
def __init__(self):
super(CursesDaemon, self).__init__()
self.check_writable()
def check_writable(self):
"""
Check if current session has write privileges to the hosts file.
.. note:: IF current session does not has the write privileges to the
hosts file of current system, a warning message box would popup.
.. note:: ALL operation would change the `hosts` file on current
system could only be done while current session has write
privileges to the file.
"""
self._writable = CommonUtil.check_privileges()[1]
if not self._writable:
self.messagebox("Please check if you have writing\n"
"privileges to the hosts file!", 1)
exit()
def session_daemon(self):
"""
Operations processed while running a TUI session of `Hosts Setup
Utility`.
:return: A flag indicating whether to reload the current session or
all operations have been finished. The return value could only be
`0` or `1`. To be specific:
==== =========
flag operation
==== =========
0 Finish
1 Reload
==== =========
.. note:: Reload operation is called only when a new data file is
retrieved from server.
:rtype: int
.. note:: IF hosts data file does not exists in current working
directory, a warning message box would popup. And operations to
change the hosts file on current system could be done only until
a new data file has been downloaded.
"""
screen = self._stdscr.subwin(0, 0, 0, 0)
screen.keypad(1)
# Draw Menu
self.banner()
self.footer()
# Key Press Operations
key_in = None
tab = 0
pos = 0
tab_entry = [self.configure_settings, self.select_func]
while key_in != 27:
self.setup_menu()
self.status()
self.process_bar(0, 0, 0, 0)
for i, sec in enumerate(tab_entry):
tab_entry[i](pos if i == tab else None)
if key_in is None:
test = self.settings[0][2][0]["test_url"]
self.check_connection(test)
key_in = screen.getch()
if key_in == 9:
if self.choice == [[], []]:
tab = 0
else:
tab = not tab
pos = 0
elif key_in in self._hot_keys:
pos = tab_entry[tab](pos, key_in)
elif key_in in self._ops_keys:
if key_in == curses.KEY_F10:
if self._funcs == [[], []]:
self.messagebox("No data file found! Press F6 to get "
"data file first.", 1)
else:
msg = "Apply Changes to hosts file?"
confirm = self.messagebox(msg, 2)
if confirm:
self.set_config_bytes()
self.make_mode = "system"
maker = MakeHosts(self)
maker.make()
self.move_hosts()
elif key_in == curses.KEY_F5:
self._update = self.check_update()
elif key_in == curses.KEY_F6:
if self._update == {}:
self._update = self.check_update()
# Check if data file up-to-date
if self.new_version():
self.fetch_update()
return 1
else:
self.messagebox("Data file is up-to-date!", 1)
else:
pass
return 0
def configure_settings(self, pos=None, key_in=None):
"""
Perform operations to config settings if `Configure Setting` frame is
active, or just draw the `Configure Setting` frame with no items
selected while it is inactive.
.. note:: Whether the `Configure Setting` frame is inactive is decided
by if :attr:`pos` is `None` or not.
=========== ========
:attr:`pos` Status
=========== ========
None Inactive
int Active
=========== ========
:param pos: Index of selected item in `Configure Setting` frame. The
default value of `pos` is `None`.
:type pos: int or None
:param key_in: A flag indicating the key pressed by user. The default
value of `key_in` is `None`.
:type key_in: int or None
:return: Index of selected item in `Configure Setting` frame.
:rtype: int or None
"""
id_num = range(len(self.settings))
if pos is not None:
if key_in == curses.KEY_DOWN:
pos = list(id_num[1:] + id_num[:1])[pos]
elif key_in == curses.KEY_UP:
pos = list(id_num[-1:] + id_num[:-1])[pos]
elif key_in in [10, 32]:
self.sub_selection(pos)
self.info(pos, 0)
self.configure_settings_frame(pos)
return pos
def select_func(self, pos=None, key_in=None):
"""
Perform operations if `function selection list` is active, or just
draw the `function selection list` with no items selected while it is
inactive.
.. note:: Whether the `function selection list` is inactive is decided
by if :attr:`pos` is `None` or not.
.. seealso:: :meth:`~tui.curses_d.CursesDaemon.configure_settings`.
:param pos: Index of selected item in `function selection list`. The
default value of `pos` is `None`.
:type pos: int or None
:param key_in: A flag indicating the key pressed by user. The default
value of `key_in` is `None`.
:type key_in: int or None
:return: Index of selected item in `function selection list`.
:rtype: int or None
"""
list_height = 15
ip = self.settings[1][1]
# Key Press Operations
item_len = len(self.choice[ip])
item_sup, item_inf = self._item_sup, self._item_inf
if pos is not None:
if item_len > list_height:
if pos <= 1:
item_sup = 0
item_inf = list_height - 1
elif pos >= item_len - 2:
item_sup = item_len - list_height + 1
item_inf = item_len
else:
item_sup = 0
item_inf = item_len
if key_in == curses.KEY_DOWN:
pos += 1
if pos >= item_len:
pos = 0
if pos not in range(item_sup, item_inf):
item_sup += 2 if item_sup == 0 else 1
item_inf += 1
elif key_in == curses.KEY_UP:
pos -= 1
if pos < 0:
pos = item_len - 1
if pos not in range(item_sup, item_inf):
item_inf -= 2 if item_inf == item_len else 1
item_sup -= 1
elif key_in in [10, 32]:
self._funcs[ip][pos] = not self._funcs[ip][pos]
mutex = RetrieveData.get_ids(self.choice[ip][pos][2])
for c_id, c in enumerate(self.choice[ip]):
if c[0] == self.choice[ip][pos][0]:
if c[1] in mutex and self._funcs[ip][c_id] == 1:
self._funcs[ip][c_id] = 0
self.info(pos, 1)
else:
item_sup = 0
if item_len > list_height:
item_inf = list_height - 1
else:
item_inf = item_len
self.show_funclist(pos, item_sup, item_inf)
return pos
def sub_selection(self, pos):
"""
Let user to choose settings from `Selection Dialog` specified by
:attr:`pos`.
:param pos: Index of selected item in `Configure Setting` frame.
:type pos: int
.. warning:: The value of `pos` MUST NOT be `None`.
.. seealso:: :meth:`~tui.curses_ui.CursesUI.sub_selection_dialog` in
:class:`~tui.curses_ui.CursesUI`.
"""
screen = self.sub_selection_dialog(pos)
i_pos = self.settings[pos][1]
# Key Press Operations
id_num = range(len(self.settings[pos][2]))
key_in = None
while key_in != 27:
self.sub_selection_dialog_items(pos, i_pos, screen)
key_in = screen.getch()
if key_in == curses.KEY_DOWN:
i_pos = list(id_num[1:] + id_num[:1])[i_pos]
elif key_in == curses.KEY_UP:
i_pos = list(id_num[-1:] + id_num[:-1])[i_pos]
elif key_in in [10, 32]:
if pos == 0 and i_pos != self.settings[pos][1]:
test = self.settings[pos][2][i_pos]["test_url"]
self.check_connection(test)
self.settings[pos][1] = i_pos
return
def check_connection(self, url):
"""
Check connection status to the server currently selected by user and
show a status box indicating current operation.
:param url: The link of the server chose by user.This string could be
a domain name or the IP address of a server.
.. seealso:: :attr:`link` in
:meth:`~util.common.CommonUtil.check_connection`.
:type url: str
:return: A flag indicating connection status is good or not.
.. seealso:: :meth:`~util.common.CommonUtil.check_connection`. in
:class:`~util.common.CommonUtil` class.
:rtype: int
"""
self.messagebox("Checking Server Status...")
conn = CommonUtil.check_connection(url)
if conn:
self.statusinfo[0][1] = "OK"
self.statusinfo[0][2] = "GREEN"
else:
self.statusinfo[0][1] = "Error"
self.statusinfo[0][2] = "RED"
self.status()
return conn
def check_update(self):
"""
Check the metadata of the latest hosts data file from server and
show a status box indicating current operation.
:return: A dictionary containing the `Version`, `Release Date` of
current hosts data file and the `Latest Version` of the data file
on server.
IF error occurs while checking update, the dictionary would be
defined as::
{"version": "[Error]"}
:rtype: dict
"""
self.messagebox("Checking Update...")
srv_id = self.settings[0][1]
url = self.settings[0][2][srv_id]["update"] + self.infofile
try:
socket.setdefaulttimeout(5)
url_obj = urllib.urlopen(url)
j_str = url_obj.read()
url_obj.close()
info = json.loads(j_str)
except:
info = {"version": "[Error]"}
self.hostsinfo["Latest"] = info["version"]
self.status()
return info
def new_version(self):
"""
Compare version of local data file to the version from the server.
:return: A flag indicating whether the local data file is up-to-date
or not.
====== ============================================
Return Data file status
====== ============================================
1 The version of data file on server is newer.
0 The local data file is up-to-date.
====== ============================================
:rtype: int
"""
local_ver = self.hostsinfo["Version"]
if local_ver == "N/A":
return 1
server_ver = self._update["version"]
local_ver = local_ver.split('.')
server_ver = server_ver.split('.')
for i, ver_num in enumerate(local_ver):
if server_ver[i] > ver_num:
return 1
return 0
def fetch_update(self):
"""
Retrieve the latest hosts data file from server and show a status box
indicating current operation.
"""
self.messagebox("Downloading...")
fetch_d = FetchUpdate(self)
fetch_d.get_file()
def set_config_bytes(self):
"""
Calculate the module configuration byte words by the selection from
function list on the main dialog.
"""
ip_flag = self.settings[1][1]
selection = {}
localhost_word = {
"Windows": 0x0001, "Linux": 0x0002,
"Unix": 0x0002, "OS X": 0x0004}[self.platform]
selection[0x02] = localhost_word
ch_parts = [0x08, 0x20 if ip_flag else 0x10, 0x40]
# Set customized module if exists
if os.path.isfile(self.custom):
ch_parts.insert(0, 0x04)
slices = self.slices[ip_flag]
for i, part in enumerate(ch_parts):
part_cfg = self._funcs[ip_flag][slices[i]:slices[i + 1]]
part_word = 0
for i, cfg in enumerate(part_cfg):
part_word += cfg << i
selection[part] = part_word
self.make_cfg = selection
def move_hosts(self):
"""
Move hosts file to the system path after making operations are
finished.
"""
filepath = "hosts"
try:
shutil.copy2(filepath, self.hosts_path)
except IOError:
os.remove(filepath)
return
os.remove(filepath)
self.messagebox("Operation completed!", 1)
|
starcraftman/pakit | refs/heads/master | pakit/shell.py | 2 | # pylint: disable=too-many-lines
"""
All code related to running system commands.
Command: Class to run arbitrary system commands.
Archive: Used to fetch a source archive.
Git: Used to fetch a git repository.
Hg: Used to fetch a mercurial repository.
"""
from __future__ import absolute_import
from abc import ABCMeta, abstractmethod, abstractproperty
import atexit
import functools
import glob
import inspect
import logging
import os
import shlex
import shutil
import signal
import subprocess
import sys
from tempfile import NamedTemporaryFile as TempFile
import threading
import time
import hashlib
import tarfile
# pylint: disable=import-error
try:
import urllib2 as ulib
except ImportError: # pragma: no cover
import urllib.request as ulib # pylint: disable=no-name-in-module
# pylint: enable=import-error
import zipfile
import pakit.conf
from pakit.exc import (
PakitError, PakitCmdError, PakitCmdTimeout, PakitLinkError
)
EXT_FUNCS = {
'application/x-7z-compressed': 'extract_7z',
'application/x-rar': 'extract_rar',
'application/gzip': 'extract_tar_gz',
'application/x-gzip': 'extract_tar_gz',
'application/x-bzip2': 'extract_tar_gz',
'application/x-tar': 'extract_tar_gz',
'application/x-xz': 'extract_tar_xz',
'application/zip': 'extract_zip',
}
@atexit.register
def cmd_cleanup():
"""
Cleans up any command stdout files left over,
"""
shutil.rmtree(pakit.conf.TMP_DIR)
def check_connectivity():
"""
Returns true iff and only iff can reach github.
"""
connected = True
try:
ulib.urlopen('https://github.com/starcraftman/pakit', timeout=2)
except ulib.URLError:
connected = False
return connected
def user_input(msg):
"""
Get user input, works on python 2 and 3.
Args:
msg: The message to print to user.
Returns:
Whatever the user typed.
"""
if sys.version_info < (3, 0):
return raw_input(msg)
else: # pragma: no cover
return input(msg) # pylint: disable=bad-builtin
def wrap_extract(extract_func):
"""
A decorator that handles some boiler plate between
extract functions.
Condition: extract_func must extract the folder with source
into the tmp_dir. Rest is handled automatically.
"""
@functools.wraps(extract_func)
def inner(filename, target):
"""
Inner part of decorator.
"""
tmp_dir = os.path.join(pakit.conf.TMP_DIR, os.path.basename(filename))
extract_func(filename, tmp_dir)
extracted = glob.glob(os.path.join(tmp_dir, '*'))[0]
shutil.move(extracted, target)
os.rmdir(tmp_dir)
return inner
@wrap_extract
def extract_7z(filename, tmp_dir):
"""
Extracts a 7z archive
"""
try:
Command('7z x -o{tmp} {file}'.format(file=filename,
tmp=tmp_dir)).wait()
except (OSError, PakitCmdError):
raise PakitCmdError('Need `7z` to extract: ' + filename)
try:
os.rmdir(tmp_dir)
except OSError:
pass
@wrap_extract
def extract_rar(filename, tmp_dir):
"""
Extracts a rar archive
"""
success = False
cmd_str = 'rar x {file} {tmp}'.format(file=filename, tmp=tmp_dir)
for cmd in [cmd_str, 'un' + cmd_str]:
try:
os.makedirs(tmp_dir)
Command(cmd).wait()
success = True
except (OSError, PakitCmdError):
pass
finally:
try:
os.rmdir(tmp_dir)
except OSError:
pass
if not success:
raise PakitCmdError('Need `rar` or `unrar` command to extract: ' +
filename)
@wrap_extract
def extract_tar_gz(filename, tmp_dir):
"""
Extracts a tar.gz archive to a temp dir
"""
tarf = tarfile.open(filename)
tarf.extractall(tmp_dir)
@wrap_extract
def extract_tar_xz(filename, tmp_dir):
"""
Extracts a tar.xz archive to a temp dir
"""
tar_file = filename.split('.')
tar_file = tar_file[0:-2] if 'tar' in tar_file else tar_file[0:-1]
tar_file = os.path.join(os.path.dirname(filename),
'.'.join(tar_file + ['tar']))
try:
os.makedirs(tmp_dir)
except OSError: # pragma: no cover
pass
try:
Command('xz --keep --decompress ' + filename).wait()
Command('tar -C {0} -xf {1}'.format(tmp_dir, tar_file)).wait()
except (OSError, PakitCmdError):
raise PakitCmdError('Need commands `xz` and `tar` to extract: ' +
filename)
finally:
os.remove(tar_file)
try:
os.rmdir(tmp_dir)
except OSError:
pass
@wrap_extract
def extract_zip(filename, tmp_dir):
"""
Extracts a zip archive
"""
zipf = zipfile.ZipFile(filename)
zipf.extractall(tmp_dir)
def get_extract_func(arc_path):
"""
Check mimetype of archive to select extraction method.
Args:
arc_path: The absolute path to an archive.
Returns:
The function of the form extract(filename, target).
Raises:
PakitError: Could not determine function from mimetype.
"""
cmd = Command('file --mime-type ' + arc_path)
cmd.wait()
mtype = cmd.output()[0].split()[1]
if mtype not in EXT_FUNCS.keys():
raise PakitError('Unsupported Archive: mimetype ' + mtype)
return getattr(sys.modules[__name__], EXT_FUNCS[mtype])
def hash_archive(archive, hash_alg='sha256'):
"""
Hash an archive.
Args:
archive: Path to an archive.
hash_alg: Hashing algorithm to use, available algorithms
are in hashlib.algorithms
Returns:
The hex based hash of the archive, using hash_alg.
"""
hasher = hashlib.new(hash_alg)
blk_size = 1024 ** 2
with open(archive, 'rb') as fin:
block = fin.read(blk_size)
while block:
hasher.update(block)
block = fin.read(blk_size)
return hasher.hexdigest()
def common_suffix(path1, path2):
"""
Given two paths, find the largest common suffix.
Args:
path1: The first path.
path2: The second path.
"""
suffix = []
parts1 = path1.split(os.path.sep)
parts2 = path2.split(os.path.sep)
if len(parts2) < len(parts1):
parts1, parts2 = parts2, parts1
while len(parts1) and parts1[-1] == parts2[-1]:
suffix.insert(0, parts1.pop())
parts2.pop()
return os.path.sep.join(suffix)
def walk_and_link(src, dst):
"""
Recurse down the tree from src and symbollically link
the files to their counterparts under dst.
Args:
src: The source path with the files to link.
dst: The destination path where links should be made.
Raises:
PakitLinkError: When anything goes wrong linking.
"""
for dirpath, _, filenames in os.walk(src, followlinks=True, topdown=True):
link_all_files(dirpath, dirpath.replace(src, dst), filenames)
def walk_and_unlink(src, dst):
"""
Recurse down the tree from src and unlink the files
that have counterparts under dst.
Args:
src: The source path with the files to link.
dst: The destination path where links should be removed.
"""
for dirpath, _, filenames in os.walk(src, followlinks=True, topdown=False):
unlink_all_files(dirpath, dirpath.replace(src, dst), filenames)
try:
os.makedirs(dst)
except OSError:
pass
def walk_and_unlink_all(link_root, build_root):
"""
Walk the tree from bottom up and remove all symbolic links
pointing into the build_root. Cleans up any empty folders.
Args:
build_root: The path where all installations are. Any symlink
pakit makes will have this as a prefix of the target path.
link_root: All links are located below this folder.
"""
for dirpath, _, filenames in os.walk(link_root, followlinks=True,
topdown=False):
to_remove = []
for fname in filenames:
abs_file = os.path.join(dirpath, fname)
if os.path.realpath(abs_file).find(build_root) == 0:
to_remove.append(fname)
unlink_all_files(dirpath, dirpath, to_remove)
try:
os.makedirs(link_root)
except OSError:
pass
def link_all_files(src, dst, filenames):
"""
From src directory link all filenames into dst.
Args:
src: The directory where the source files exist.
dst: The directory where the links should be made.
filenames: A list of filenames in src.
"""
try:
os.makedirs(dst)
except OSError:
pass # The folder already existed
for fname in filenames:
sfile = os.path.join(src, fname)
dfile = os.path.join(dst, fname)
try:
os.symlink(sfile, dfile)
except OSError:
msg = 'Could not symlink {0} -> {1}'.format(sfile, dfile)
logging.error(msg)
raise PakitLinkError(msg)
def unlink_all_files(_, dst, filenames):
"""
Unlink all links in dst that are in filenames.
Args:
src: The directory where the source files exist.
dst: The directory where the links should be made.
filenames: A list of filenames in src.
"""
for fname in filenames:
try:
os.remove(os.path.join(dst, fname))
except OSError:
pass # The link was not there
try:
os.rmdir(dst)
except OSError:
pass # Folder probably had files left.
def link_man_pages(link_dir):
"""
Silently links project man pages into link dir.
"""
src = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'extra')
dst = os.path.join(link_dir, 'share', 'man', 'man1')
try:
os.makedirs(dst)
except OSError:
pass
for page in glob.glob(os.path.join(src, '*.1')):
try:
os.symlink(page, page.replace(src, dst))
except OSError: # pragma: no cover
pass
def unlink_man_pages(link_dir):
"""
Unlink all man pages from the link directory.
"""
src = os.path.join(os.path.dirname(__file__), 'extra')
dst = os.path.join(link_dir, 'share', 'man', 'man1')
for page in glob.glob(os.path.join(src, '*.1')):
try:
os.remove(page.replace(src, dst))
except OSError: # pragma: no cover
pass
for paths in os.walk(link_dir, topdown=False):
try:
os.rmdir(paths[0])
except OSError: # pragma: no cover
pass
try:
os.makedirs(link_dir)
except OSError: # pragma: no cover
pass
def vcs_factory(uri, **kwargs):
"""
Given a uri, match it with the right VersionRepo subclass.
Args:
uri: The version control URI.
Returns:
The instantiated VersionRepo subclass. Any kwargs, are
passed along to the constructor of the subclass.
Raises:
PakitError: The URI is not supported.
"""
subclasses = []
for _, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and issubclass(obj, VersionRepo):
subclasses.append(obj)
subclasses.remove(VersionRepo)
for cls in subclasses:
if cls.valid_uri(uri):
return cls(uri, **kwargs)
raise PakitError('Unssupported URI: ' + uri)
def write_config(config_file):
"""
Writes the DEFAULT config to the config file.
Overwrites the file if present.
Raises:
PakitError: File exists and is a directory.
PakitError: File could not be written to.
"""
try:
os.remove(config_file)
except OSError:
if os.path.isdir(config_file):
raise PakitError('Config path is a directory.')
try:
config = pakit.conf.Config(config_file)
config.reset()
config.write()
except (IOError, OSError):
raise PakitError('Failed to write to ' + config.filename)
class Fetchable(object):
"""
Establishes an abstract interface for fetching source code.
Subclasses are destined for Recipe.repos to be used to retrieve source
from the wild.
Attributes:
target: The folder the source code should end up in.
uri: The location of the source code.
"""
__metaclass__ = ABCMeta
def __init__(self, uri, target):
self.target = target
self.uri = uri
@abstractmethod
def __enter__(self):
"""
Guarantees that source is available at target
"""
raise NotImplementedError
@abstractmethod
def __exit__(self, exc_type, exc_value, exc_tb):
"""
Handles errors as needed
"""
raise NotImplementedError
@abstractproperty
def ready(self):
"""
True iff the source code is available at target
"""
raise NotImplementedError
@abstractproperty
def src_hash(self):
"""
A hash that identifies the source snapshot
"""
raise NotImplementedError
def clean(self):
"""
Purges the source tree from the system
"""
Command('rm -rf ' + self.target).wait()
@abstractmethod
def download(self):
"""
Retrieves code from the remote, may require additional steps
"""
raise NotImplementedError
class Dummy(Fetchable):
"""
Creates the target directory when invoked.
This is a dummy repository, useful for testing and when a recipe
does NOT rely on a source repository or archive.
"""
def __init__(self, uri=None, **kwargs):
"""
Constructor for a Dummy repository.
Target must be specified before entering context.
Args:
uri: Default None, serves no purpose.
Kwargs:
target: Path that will be created on the system.
"""
super(Dummy, self).__init__(uri, kwargs.get('target', None))
def __str__(self):
return 'DummyTask: No source code to fetch.'
def __enter__(self):
"""
Guarantees that source is available at target
"""
try:
self.clean()
os.makedirs(self.target)
except OSError:
raise PakitError('Could not create folder: ' + self.target)
def __exit__(self, exc_type, exc_value, exc_tb):
"""
Handles errors as needed
"""
pass
@property
def ready(self):
"""
True iff the source code is available at target
"""
return os.path.isdir(self.target) and len(os.listdir(self.target)) == 0
@property
def src_hash(self):
"""
A hash that identifies the source snapshot
"""
return 'dummy_hash'
def download(self):
"""
Retrieves code from the remote, may require additional steps
"""
raise NotImplementedError
class Archive(Fetchable):
"""
Retrieve an archive from a remote URI and extract it to target.
Supports any extension that has an extract function in this module
of the form `extract_ext`. For example, if given a zip will use the
extract_zip function.
Attributes:
actual_hash: The actual sha256 hash of the archive.
filename: The filename of the archive.
src_hash: The expected sha256 hash of the archive.
target: The folder the source code should end up in.
uri: The location of the source code.
"""
def __init__(self, uri, **kwargs):
"""
Constructor for Archive. *uri* and *hash* are required.
Args:
uri: The URI to retrieve the archive from.
Kwargs:
filename: The filename to use, else a tempfile will be used.
hash: The sha256 hash of the archive.
target: Path on system to extract to.
"""
super(Archive, self).__init__(uri, kwargs.get('target', None))
self.__src_hash = kwargs.get('hash', '')
self.filename = kwargs.get('filename')
if self.filename is None:
self.__tfile = TempFile(mode='wb', delete=False,
dir=pakit.conf.TMP_DIR,
prefix='arc')
self.filename = self.__tfile.name
def __enter__(self):
"""
Guarantees that source is available at target
"""
if self.ready:
return
logging.info('Downloading %s', self.arc_file)
self.download()
logging.info('Extracting %s to %s', self.arc_file, self.target)
get_extract_func(self.arc_file)(self.arc_file, self.target)
with open(os.path.join(self.target, '.archive'), 'wb') as fout:
fout.write(self.src_hash.encode())
os.remove(self.arc_file)
def __exit__(self, exc_type, exc_value, exc_tb):
"""
Handles errors as needed
"""
self.clean()
def __str__(self):
return '{name}: {uri}'.format(name=self.__class__.__name__,
uri=self.uri)
@property
def arc_file(self):
"""
The path to the downloaded archive.
"""
target = self.target
if target.find('./') == 0:
target = target.replace('./', '')
return os.path.join(os.path.dirname(target), self.filename)
@property
def ready(self):
"""
True iff the source code is available at target
"""
try:
with open(os.path.join(self.target, '.archive'), 'rb') as fin:
file_hash = fin.readlines()[0].decode()
return file_hash == self.src_hash
except IOError:
return False
@property
def src_hash(self):
"""
The expected hash of the archive.
"""
return self.__src_hash
def actual_hash(self):
"""
The actual hash of the downloaded archive file.
"""
arc_clean = False
if not os.path.exists(self.arc_file):
self.download()
arc_clean = True
hash_str = hash_archive(self.arc_file)
if arc_clean:
os.remove(self.arc_file)
return hash_str
def clean(self):
"""
Guarantee no trace of archive file or source target.
"""
try:
os.remove(self.arc_file)
except OSError:
pass
super(Archive, self).clean()
def download(self):
"""
Retrieves the archive from the remote URI.
If the URI is a local file, simply copy it.
"""
if not os.path.isfile(self.uri):
resp = ulib.urlopen(self.uri, timeout=30)
with open(self.arc_file, 'wb') as fout:
fout.write(resp.read())
elif self.uri != self.arc_file:
shutil.copy(self.uri, self.arc_file)
arc_hash = self.actual_hash()
if arc_hash != self.src_hash:
self.clean()
raise PakitError('Hash mismatch on archive.\n Expected: {exp}'
'\n Actual: {act}'.format(exp=self.src_hash,
act=arc_hash))
class VersionRepo(Fetchable):
"""
Base class for all version control support.
When a 'tag' is set, check out a specific revision of the repository.
When a 'branch' is set, checkout out the latest commit on the branch of
the repository.
These two options are mutually exclusive.
Attributes:
branch: A branch to checkout during clone.
src_hash: The hash of the current commit.
tag: A tag to checkout during clone.
target: The folder the source code should end up in.
uri: The location of the source code.
"""
def __init__(self, uri, **kwargs):
super(VersionRepo, self).__init__(uri, kwargs.get('target', None))
tag = kwargs.get('tag', None)
if tag is not None:
self.__tag = tag
self.on_branch = False
else:
self.__tag = kwargs.get('branch', None)
self.on_branch = True
def __enter__(self):
"""
Guarantees that the repo is downloaded and on the right commit.
"""
if not self.ready:
self.clean()
self.download()
else:
self.checkout()
if self.on_branch:
self.update()
def __exit__(self, exc_type, exc_value, exc_tb):
"""
Handles errors as needed
"""
self.reset()
def __str__(self):
if self.on_branch:
tag = 'HEAD' if self.tag is None else self.tag
tag = 'branch: ' + tag
else:
tag = 'tag: ' + self.tag
return '{name}: {tag}, uri: {uri}'.format(
name=self.__class__.__name__, uri=self.uri, tag=tag)
@property
def branch(self):
"""
A branch of the repository.
"""
return self.__tag
@branch.setter
def branch(self, new_branch):
"""
Set the branch to checkout from the repository.
"""
self.on_branch = True
self.__tag = new_branch
@property
def tag(self):
"""
A revision or tag of the repository.
"""
return self.__tag
@tag.setter
def tag(self, new_tag):
"""
Set the tag to checkout from the repository.
"""
self.on_branch = False
self.__tag = new_tag
@abstractproperty
def ready(self):
"""
Returns true iff the repository is available and the
right tag or branch is checked out.
"""
raise NotImplementedError
@abstractproperty
def src_hash(self):
"""
The hash of the current commit.
"""
raise NotImplementedError
@staticmethod
def valid_uri(uri):
"""
Validate that the supplied uri is handled by this class.
Returns:
True if the URI is valid for this class, else False.
"""
raise NotImplementedError
@abstractmethod
def checkout(self):
"""
Equivalent to git checkout for the version system.
"""
raise NotImplementedError
@abstractmethod
def download(self):
"""
Download the repository to the target.
"""
raise NotImplementedError
@abstractmethod
def reset(self):
"""
Clears away all build files from repo.
"""
raise NotImplementedError
@abstractmethod
def update(self):
"""
Fetches latest commit when branch is set.
"""
raise NotImplementedError
class Git(VersionRepo):
"""
Fetch a git repository from the given URI.
When a 'tag' is set, check out a specific revision of the repository.
When a 'branch' is set, checkout out the latest commit on the branch of
the repository.
If neither provided, will checkout 'master' branch.
These two options are mutually exclusive.
Attributes:
branch: A branch to checkout during clone.
src_hash: The hash of the current commit.
tag: A tag to checkout during clone.
target: The folder the source code should end up in.
uri: The location of the source code.
"""
def __init__(self, uri, **kwargs):
"""
Constructor for a git repository.
By default checks out the default branch.
The *branch* and *tag* kwargs are mutually exclusive.
Args:
uri: The URI that hosts the repository.
Kwargs:
branch: A branch to checkout and track.
tag: Any fixed tag like a revision or tagged commit.
target: Path on system to clone to.
"""
super(Git, self).__init__(uri, **kwargs)
if self.on_branch and kwargs.get('tag') is None:
self.branch = 'master'
@property
def ready(self):
"""
Returns true iff the repository is available and
the right tag or branch is checked out.
"""
if not os.path.exists(os.path.join(self.target, '.git')):
return False
cmd = Command('git remote show origin', self.target)
cmd.wait()
return self.uri in cmd.output()[1]
@property
def src_hash(self):
"""
Return the current hash of the repository.
"""
with self:
cmd = Command('git rev-parse HEAD', self.target)
cmd.wait()
return cmd.output()[0]
@staticmethod
def valid_uri(uri):
"""
Validate that the supplied uri is handled by this class.
Returns:
True if the URI is valid for this class, else False.
"""
try:
cmd = Command('git ls-remote ' + uri)
cmd.wait()
return cmd.rcode == 0
except PakitError:
return False
def checkout(self):
"""
Checkout the right tag or branch.
"""
Command('git checkout ' + self.tag, self.target).wait()
def download(self):
"""
Download the repository to the target.
"""
tag = '' if self.tag is None else '-b ' + self.tag
cmd = Command('git clone --recursive {tag} {uri} {target}'.format(
tag=tag, uri=self.uri, target=self.target))
cmd.wait()
def reset(self):
"""
Clears away all build files from repo.
"""
Command('git clean -f', self.target).wait()
def update(self):
"""
Fetches latest commit when branch is set.
"""
cmd = Command('git fetch origin +{0}:new{0}'.format(self.branch),
self.target)
cmd.wait()
cmd = Command('git merge --ff-only new' + self.branch, self.target)
cmd.wait()
class Hg(VersionRepo):
"""
Fetch a mercurial repository from the given URI.
When a 'tag' is set, check out a specific revision of the repository.
When a 'branch' is set, checkout out the latest commit on the branch of
the repository.
If neither provided, will checkout 'default' branch.
These two options are mutually exclusive.
Attributes:
branch: A branch to checkout during clone.
src_hash: The hash of the current commit.
tag: A tag to checkout during clone.
target: The folder the source code should end up in.
uri: The location of the source code.
"""
def __init__(self, uri, **kwargs):
"""
Constructor for a mercurial repository.
By default checks out the default branch.
The *branch* and *tag* kwargs are mutually exclusive.
Args:
uri: The URI that hosts the repository.
Kwargs:
branch: A branch to checkout and track.
tag: Any fixed tag like a revision or tagged commit.
target: Path on system to clone to.
"""
super(Hg, self).__init__(uri, **kwargs)
if self.on_branch and kwargs.get('tag') is None:
self.branch = 'default'
@property
def ready(self):
"""
Returns true iff the repository is available and the
right tag or branch is checked out.
"""
if not os.path.exists(os.path.join(self.target, '.hg')):
return False
found = False
with open(os.path.join(self.target, '.hg', 'hgrc')) as fin:
for line in fin:
if self.uri in line:
found = True
break
return found
@property
def src_hash(self):
"""
Return the current hash of the repository.
"""
with self:
cmd = Command('hg identify', self.target)
cmd.wait()
return cmd.output()[0].split()[0]
@staticmethod
def valid_uri(uri):
"""
Validate that the supplied uri is handled by this class.
Returns:
True if the URI is valid for this class, else False.
"""
try:
cmd = Command('hg identify ' + uri)
cmd.wait()
return cmd.rcode == 0
except PakitError:
return False
def checkout(self):
"""
Checkout the right tag or branch.
"""
Command('hg update ' + self.tag, self.target).wait()
def download(self):
"""
Download the repository to the target.
"""
tag = '' if self.tag is None else '-u ' + self.tag
cmd = Command('hg clone {tag} {uri} {target}'.format(
tag=tag, uri=self.uri, target=self.target))
cmd.wait()
def reset(self):
"""
Clears away all build files from repo.
"""
cmd = Command('hg status -un', self.target)
cmd.wait()
for path in cmd.output():
os.remove(os.path.join(self.target, path))
def update(self):
"""
Fetches latest commit when branch is set.
"""
cmd = Command('hg pull -b ' + self.branch, self.target)
cmd.wait()
cmd = Command('hg update', self.target)
cmd.wait()
class Command(object):
"""
Execute a command on the host system.
Once the constructor returns, the command is running.
At that point, either wait for it to complete or go about your business.
The process and all children will be part of the same process group,
this allows for easy termination via signals.
Attributes:
alive: True only if the command is still running.
rcode: When the command finishes, is the return code.
"""
def __init__(self, cmd, cmd_dir=None, prev_cmd=None, env=None):
"""
Run a command on the system.
Note: Don't use '|' or '&', instead execute commands
one after another & supply prev_cmd.
Args:
cmd: A string that you would type into the shell.
If shlex.split would not correctly split the line
then pass a list.
cmd_dir: Change to this directory before executing.
env: A dictionary of environment variables to change.
For instance, env={'HOME': '/tmp'} would change
HOME variable for the duration of the Command.
prev_cmd: Read the stdout of this command for stdin.
Raises:
PakitCmdError: The command could not find command on system
or the cmd_dir did not exist during subprocess execution.
"""
super(Command, self).__init__()
if isinstance(cmd, list):
self._cmd = cmd
else:
self._cmd = shlex.split(cmd)
if self._cmd[0].find('./') != 0:
self._cmd.insert(0, '/usr/bin/env')
self._cmd_dir = cmd_dir
stdin = None
if prev_cmd:
stdin = open(prev_cmd.stdout.name, 'r')
if env:
to_update = env
env = os.environ.copy()
env.update(to_update)
logging.debug('CMD START: %s', self)
try:
self.stdout = TempFile(mode='wb', delete=False,
dir=pakit.conf.TMP_DIR,
prefix='cmd', suffix='.log')
self._proc = subprocess.Popen(
self._cmd, cwd=self._cmd_dir, env=env, preexec_fn=os.setsid,
stdin=stdin, stdout=self.stdout, stderr=subprocess.STDOUT
)
except OSError as exc:
if cmd_dir and not os.path.exists(cmd_dir):
raise PakitCmdError('Command directory does not exist: ' +
self._cmd_dir)
else:
raise PakitCmdError('General OSError:\n' + str(exc))
def __del__(self):
"""
When the command object is garbage collected:
- Terminate processes if still running.
- Write the entire output of the command to the log.
"""
try:
if self.alive:
self.terminate() # pragma: no cover
self.stdout.close()
prefix = '\n '
msg = prefix + prefix.join(self.output())
logging.debug("CMD LOG: %s%s", self, msg)
except AttributeError:
logging.error('Could not execute command: ' + str(self))
except (IOError, OSError) as exc:
logging.error(exc)
def __str__(self):
return 'Command: {0}, {1}'.format(self._cmd, self._cmd_dir)
@property
def alive(self):
"""
The command is still running.
"""
return self._proc.poll() is None
@property
def rcode(self):
"""
The return code of the command.
"""
return self._proc.returncode
def output(self, last_n=0):
"""
The output of the run command.
Args:
last_n: Return last n lines from output, default all output.
Returns:
A list of lines from the output of the command.
"""
if self._proc is None or not os.path.exists(self.stdout.name):
return [] # pragma: no cover
# TODO: Handle encoding better?
with open(self.stdout.name, 'rb') as out:
lines = [line.strip().decode('utf-8', 'ignore')
for line in out.readlines()]
return lines[-last_n:]
def terminate(self):
"""
Terminates the subprocess running the command and all
children spawned by the command.
On return, they are all dead.
"""
if self.alive:
os.killpg(self._proc.pid, signal.SIGTERM)
self._proc.wait()
def wait(self, timeout=None):
"""
Block here until the command is done.
Args:
timeout: If no stdout for this interval
terminate the command and raise error.
Raises:
PakitCmdTimeout: When stdout stops getting output for max_time.
PakitCmdError: When return code is not 0.
"""
if not timeout:
timeout = pakit.conf.CONFIG.get('pakit.command.timeout')
thrd = threading.Thread(target=(lambda proc: proc.wait()),
args=(self._proc,))
thrd.start()
thread_not_started = True
while thread_not_started:
try:
thrd.join(0.1)
thread_not_started = False
except RuntimeError: # pragma: no cover
pass
while self._proc.poll() is None:
thrd.join(0.5)
interval = time.time() - os.path.getmtime(self.stdout.name)
if interval > timeout:
self.terminate()
raise PakitCmdTimeout('\n'.join(self.output(10)))
if self.rcode != 0:
raise PakitCmdError('\n'.join(self.output(10)))
|
olivierb2/openchange | refs/heads/master | mapiproxy/services/utils/genpass.py | 9 | #!/usr/bin/python
import os
import sys
import hashlib
from base64 import urlsafe_b64encode as encode
def main():
if len(sys.argv) != 2:
print '%s password' % (sys.argv[0])
salt = os.urandom(4)
h = hashlib.sha1(sys.argv[1])
h.update(salt)
print "{SSHA}" + encode(h.digest() + salt)
sys.exit()
if __name__ == "__main__":
main()
|
zhjunlang/kbengine | refs/heads/master | kbe/src/lib/python/Doc/includes/sqlite3/ctx_manager.py | 51 | import sqlite3
con = sqlite3.connect(":memory:")
con.execute("create table person (id integer primary key, firstname varchar unique)")
# Successful, con.commit() is called automatically afterwards
with con:
con.execute("insert into person(firstname) values (?)", ("Joe",))
# con.rollback() is called after the with block finishes with an exception, the
# exception is still raised and must be caught
try:
with con:
con.execute("insert into person(firstname) values (?)", ("Joe",))
except sqlite3.IntegrityError:
print("couldn't add Joe twice")
|
ArchiFleKs/magnum | refs/heads/master | magnum/tests/unit/objects/test_fields.py | 2 | # Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_versionedobjects.tests import test_fields
from magnum.objects import fields
class TestClusterStatus(test_fields.TestField):
def setUp(self):
super(TestClusterStatus, self).setUp()
self.field = fields.ClusterStatusField()
self.coerce_good_values = [('CREATE_IN_PROGRESS',
'CREATE_IN_PROGRESS'),
('CREATE_FAILED', 'CREATE_FAILED'),
('CREATE_COMPLETE', 'CREATE_COMPLETE'),
('UPDATE_IN_PROGRESS',
'UPDATE_IN_PROGRESS'),
('UPDATE_FAILED', 'UPDATE_FAILED'),
('UPDATE_COMPLETE', 'UPDATE_COMPLETE'),
('DELETE_IN_PROGRESS',
'DELETE_IN_PROGRESS'),
('DELETE_FAILED', 'DELETE_FAILED'),
('RESUME_COMPLETE', 'RESUME_COMPLETE'),
('RESTORE_COMPLETE', 'RESTORE_COMPLETE'),
('ROLLBACK_COMPLETE', 'ROLLBACK_COMPLETE'),
('SNAPSHOT_COMPLETE', 'SNAPSHOT_COMPLETE'),
('CHECK_COMPLETE', 'CHECK_COMPLETE'),
('ADOPT_COMPLETE', 'ADOPT_COMPLETE')]
self.coerce_bad_values = ['DELETE_STOPPED']
self.to_primitive_values = self.coerce_good_values[0:1]
self.from_primitive_values = self.coerce_good_values[0:1]
def test_stringify(self):
self.assertEqual("'UPDATE_FAILED'",
self.field.stringify('UPDATE_FAILED'))
def test_stringify_invalid(self):
self.assertRaises(ValueError, self.field.stringify, 'DELETE_STOPPED')
class TestClusterHealthStatus(test_fields.TestField):
def setUp(self):
super(TestClusterHealthStatus, self).setUp()
self.field = fields.ClusterHealthStatusField()
self.coerce_good_values = [('HEALTHY', 'HEALTHY'),
('UNHEALTHY', 'UNHEALTHY')]
self.coerce_bad_values = ['FAKE']
self.to_primitive_values = self.coerce_good_values[0:1]
self.from_primitive_values = self.coerce_good_values[0:1]
def test_stringify(self):
self.assertEqual("'UNHEALTHY'",
self.field.stringify('UNHEALTHY'))
def test_stringify_invalid(self):
self.assertRaises(ValueError, self.field.stringify, 'FAKE')
class TestContainerStatus(test_fields.TestField):
def setUp(self):
super(TestContainerStatus, self).setUp()
self.field = fields.ContainerStatusField()
self.coerce_good_values = [('Error', 'Error'), ('Running', 'Running'),
('Stopped', 'Stopped'),
('Paused', 'Paused'),
('Unknown', 'Unknown'), ]
self.coerce_bad_values = ['DELETED']
self.to_primitive_values = self.coerce_good_values[0:1]
self.from_primitive_values = self.coerce_good_values[0:1]
def test_stringify(self):
self.assertEqual("'Stopped'",
self.field.stringify('Stopped'))
def test_stringify_invalid(self):
self.assertRaises(ValueError, self.field.stringify, 'DELETED')
class TestClusterType(test_fields.TestField):
def setUp(self):
super(TestClusterType, self).setUp()
self.field = fields.ClusterTypeField()
self.coerce_good_values = [('kubernetes', 'kubernetes'),
('swarm', 'swarm'),
('mesos', 'mesos'), ]
self.coerce_bad_values = ['invalid']
self.to_primitive_values = self.coerce_good_values[0:1]
self.from_primitive_values = self.coerce_good_values[0:1]
def test_stringify(self):
self.assertEqual("'kubernetes'",
self.field.stringify('kubernetes'))
def test_stringify_invalid(self):
self.assertRaises(ValueError, self.field.stringify, 'invalid')
class TestMagnumServiceBinary(test_fields.TestField):
def setUp(self):
super(TestMagnumServiceBinary, self).setUp()
self.field = fields.MagnumServiceBinaryField()
self.coerce_good_values = [('magnum-conductor', 'magnum-conductor')]
self.coerce_bad_values = ['invalid']
self.to_primitive_values = self.coerce_good_values[0:1]
self.from_primitive_values = self.coerce_good_values[0:1]
def test_stringify(self):
self.assertEqual("'magnum-conductor'",
self.field.stringify('magnum-conductor'))
def test_stringify_invalid(self):
self.assertRaises(ValueError, self.field.stringify, 'invalid')
class TestServerType(test_fields.TestField):
def setUp(self):
super(TestServerType, self).setUp()
self.field = fields.ServerTypeField()
self.coerce_good_values = [('vm', 'vm'),
('bm', 'bm'), ]
self.coerce_bad_values = ['invalid']
self.to_primitive_values = self.coerce_good_values[0:1]
self.from_primitive_values = self.coerce_good_values[0:1]
def test_stringify(self):
self.assertEqual("'vm'",
self.field.stringify('vm'))
def test_stringify_invalid(self):
self.assertRaises(ValueError, self.field.stringify, 'invalid')
|
jbzdak/edx-platform | refs/heads/master | common/test/acceptance/tests/lms/test_lms_course_discovery.py | 69 | """
Test course discovery.
"""
import datetime
import json
from bok_choy.web_app_test import WebAppTest
from ..helpers import remove_file
from ...pages.common.logout import LogoutPage
from ...pages.studio.auto_auth import AutoAuthPage
from ...pages.lms.discovery import CourseDiscoveryPage
from ...fixtures.course import CourseFixture
class CourseDiscoveryTest(WebAppTest):
"""
Test searching for courses.
"""
STAFF_USERNAME = "STAFF_TESTER"
STAFF_EMAIL = "[email protected]"
TEST_INDEX_FILENAME = "test_root/index_file.dat"
def setUp(self):
"""
Create course page and courses to find
"""
# create index file
with open(self.TEST_INDEX_FILENAME, "w+") as index_file:
json.dump({}, index_file)
self.addCleanup(remove_file, self.TEST_INDEX_FILENAME)
super(CourseDiscoveryTest, self).setUp()
self.page = CourseDiscoveryPage(self.browser)
for i in range(10):
org = self.unique_id
number = unicode(i)
run = "test_run"
name = "test course"
settings = {'enrollment_start': datetime.datetime(1970, 1, 1).isoformat()}
CourseFixture(org, number, run, name, settings=settings).install()
for i in range(2):
org = self.unique_id
number = unicode(i)
run = "test_run"
name = "grass is always greener"
CourseFixture(
org,
number,
run,
name,
settings={
'enrollment_start': datetime.datetime(1970, 1, 1).isoformat()
}
).install()
def _auto_auth(self, username, email, staff):
"""
Logout and login with given credentials.
"""
LogoutPage(self.browser).visit()
AutoAuthPage(self.browser, username=username, email=email, staff=staff).visit()
def test_page_existence(self):
"""
Make sure that the page is accessible.
"""
self.page.visit()
def test_search(self):
"""
Make sure you can search for courses.
"""
self.page.visit()
self.assertEqual(len(self.page.result_items), 12)
self.page.search("grass")
self.assertEqual(len(self.page.result_items), 2)
self.page.clear_search()
self.assertEqual(len(self.page.result_items), 12)
|
timakaryo/antrean | refs/heads/master | backend/api/tests.py | 873 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import TestCase
# Create your tests here.
|
xiaoyang2022/V2EX | refs/heads/master | money.py | 16 | #!/usr/bin/env python
# coding=utf-8
import os
import re
import time
import datetime
import hashlib
import string
import random
from google.appengine.ext import webapp
from google.appengine.api import memcache
from google.appengine.ext import db
from google.appengine.ext.webapp import util
from google.appengine.ext.webapp import template
from google.appengine.api.labs import taskqueue
from v2ex.babel import Member
from v2ex.babel import Counter
from v2ex.babel import Section
from v2ex.babel import Node
from v2ex.babel import Topic
from v2ex.babel import Reply
from v2ex.babel import Note
from v2ex.babel import Notification
from v2ex.babel import SYSTEM_VERSION
from v2ex.babel.security import *
from v2ex.babel.ua import *
from v2ex.babel.da import *
from v2ex.babel.l10n import *
from v2ex.babel.ext.cookies import Cookies
from v2ex.babel.handlers import BaseHandler
import config
template.register_template_library('v2ex.templatetags.filters')
class MoneyDashboardHandler(BaseHandler):
def get(self):
if self.member:
self.set_title(u'账户查询')
self.finalize(template_name='money_dashboard')
else:
self.redirect('/signin')
def main():
application = webapp.WSGIApplication([
('/money/dashboard/?', MoneyDashboardHandler)
],
debug=True)
util.run_wsgi_app(application)
if __name__ == '__main__':
main() |
PaddlePaddle/models | refs/heads/develop | PaddleCV/video/metrics/bsn_metrics/bsn_pem_metrics.py | 1 | # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
import numpy as np
import datetime
import logging
import json
import pandas as pd
from models.bsn.bsn_utils import soft_nms, bsn_post_processing
import time
logger = logging.getLogger(__name__)
import os
class MetricsCalculator():
def __init__(self, cfg, name='BsnPem', mode='train'):
self.name = name
self.mode = mode # 'train', 'valid', 'test', 'infer'
self.subset = cfg[self.mode.upper()][
"subset"] # 'train', 'validation', 'test'
self.anno_file = cfg["MODEL"]["anno_file"]
self.file_list = cfg["INFER"]["filelist"]
self.get_dataset_dict()
if self.mode == "test" or self.mode == "infer":
self.output_path_pem = cfg[self.mode.upper()]["output_path_pem"]
self.result_path_pem = cfg[self.mode.upper()]["result_path_pem"]
self.reset()
def get_dataset_dict(self):
if self.mode == "infer":
annos = json.load(open(self.file_list))
self.video_dict = {}
for video_name in annos.keys():
self.video_dict[video_name] = annos[video_name]
else:
annos = json.load(open(self.anno_file))
self.video_dict = {}
for video_name in annos.keys():
video_subset = annos[video_name]["subset"]
if self.subset in video_subset:
self.video_dict[video_name] = annos[video_name]
self.video_list = list(self.video_dict.keys())
self.video_list.sort()
def reset(self):
logger.info('Resetting {} metrics...'.format(self.mode))
self.aggr_loss = 0.0
self.aggr_batch_size = 0
if self.mode == 'test' or self.mode == 'infer':
if not os.path.exists(self.output_path_pem):
os.makedirs(self.output_path_pem)
def save_results(self, pred_iou, props_info, fid):
if self.mode == 'infer':
video_name = self.video_list[fid[0]]
else:
video_name = self.video_list[fid[0][0]]
df = pd.DataFrame()
df["xmin"] = props_info[0, :, 0]
df["xmax"] = props_info[0, :, 1]
df["xmin_score"] = props_info[0, :, 2]
df["xmax_score"] = props_info[0, :, 3]
df["iou_score"] = pred_iou.squeeze()
df.to_csv(
os.path.join(self.output_path_pem, video_name + ".csv"),
index=False)
def accumulate(self, fetch_list):
cur_batch_size = 1 # iteration counter
total_loss = fetch_list[0]
self.aggr_loss += np.mean(np.array(total_loss))
self.aggr_batch_size += cur_batch_size
if self.mode == 'test':
pred_iou = np.array(fetch_list[1])
props_info = np.array(fetch_list[2])
fid = np.array(fetch_list[3])
self.save_results(pred_iou, props_info, fid)
def accumulate_infer_results(self, fetch_list):
pred_iou = np.array(fetch_list[0])
props_info = np.array([item[0] for item in fetch_list[1]])
fid = [item[1] for item in fetch_list[1]]
self.save_results(pred_iou, props_info, fid)
def finalize_metrics(self):
self.avg_loss = self.aggr_loss / self.aggr_batch_size
if self.mode == 'test':
bsn_post_processing(self.video_dict, self.subset,
self.output_path_pem, self.result_path_pem)
def finalize_infer_metrics(self):
bsn_post_processing(self.video_dict, self.subset, self.output_path_pem,
self.result_path_pem)
def get_computed_metrics(self):
json_stats = {}
json_stats['avg_loss'] = self.avg_loss
return json_stats
|
DiptoDas8/Biponi | refs/heads/master | lib/python2.7/site-packages/django/core/files/uploadhandler.py | 102 | """
Base file upload handler classes, and the built-in concrete subclasses
"""
from __future__ import unicode_literals
from io import BytesIO
from django.conf import settings
from django.core.files.uploadedfile import (
InMemoryUploadedFile, TemporaryUploadedFile,
)
from django.utils.encoding import python_2_unicode_compatible
from django.utils.module_loading import import_string
__all__ = [
'UploadFileException', 'StopUpload', 'SkipFile', 'FileUploadHandler',
'TemporaryFileUploadHandler', 'MemoryFileUploadHandler', 'load_handler',
'StopFutureHandlers'
]
class UploadFileException(Exception):
"""
Any error having to do with uploading files.
"""
pass
@python_2_unicode_compatible
class StopUpload(UploadFileException):
"""
This exception is raised when an upload must abort.
"""
def __init__(self, connection_reset=False):
"""
If ``connection_reset`` is ``True``, Django knows will halt the upload
without consuming the rest of the upload. This will cause the browser to
show a "connection reset" error.
"""
self.connection_reset = connection_reset
def __str__(self):
if self.connection_reset:
return 'StopUpload: Halt current upload.'
else:
return 'StopUpload: Consume request data, then halt.'
class SkipFile(UploadFileException):
"""
This exception is raised by an upload handler that wants to skip a given file.
"""
pass
class StopFutureHandlers(UploadFileException):
"""
Upload handers that have handled a file and do not want future handlers to
run should raise this exception instead of returning None.
"""
pass
class FileUploadHandler(object):
"""
Base class for streaming upload handlers.
"""
chunk_size = 64 * 2 ** 10 # : The default chunk size is 64 KB.
def __init__(self, request=None):
self.file_name = None
self.content_type = None
self.content_length = None
self.charset = None
self.content_type_extra = None
self.request = request
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Handle the raw input from the client.
Parameters:
:input_data:
An object that supports reading via .read().
:META:
``request.META``.
:content_length:
The (integer) value of the Content-Length header from the
client.
:boundary: The boundary from the Content-Type header. Be sure to
prepend two '--'.
"""
pass
def new_file(self, field_name, file_name, content_type, content_length, charset=None, content_type_extra=None):
"""
Signal that a new file has been started.
Warning: As with any data from the client, you should not trust
content_length (and sometimes won't even get it).
"""
self.field_name = field_name
self.file_name = file_name
self.content_type = content_type
self.content_length = content_length
self.charset = charset
self.content_type_extra = content_type_extra
def receive_data_chunk(self, raw_data, start):
"""
Receive data from the streamed upload parser. ``start`` is the position
in the file of the chunk.
"""
raise NotImplementedError('subclasses of FileUploadHandler must provide a receive_data_chunk() method')
def file_complete(self, file_size):
"""
Signal that a file has completed. File size corresponds to the actual
size accumulated by all the chunks.
Subclasses should return a valid ``UploadedFile`` object.
"""
raise NotImplementedError('subclasses of FileUploadHandler must provide a file_complete() method')
def upload_complete(self):
"""
Signal that the upload is complete. Subclasses should perform cleanup
that is necessary for this handler.
"""
pass
class TemporaryFileUploadHandler(FileUploadHandler):
"""
Upload handler that streams data into a temporary file.
"""
def __init__(self, *args, **kwargs):
super(TemporaryFileUploadHandler, self).__init__(*args, **kwargs)
def new_file(self, file_name, *args, **kwargs):
"""
Create the file object to append to as data is coming in.
"""
super(TemporaryFileUploadHandler, self).new_file(file_name, *args, **kwargs)
self.file = TemporaryUploadedFile(self.file_name, self.content_type, 0, self.charset, self.content_type_extra)
def receive_data_chunk(self, raw_data, start):
self.file.write(raw_data)
def file_complete(self, file_size):
self.file.seek(0)
self.file.size = file_size
return self.file
class MemoryFileUploadHandler(FileUploadHandler):
"""
File upload handler to stream uploads into memory (used for small files).
"""
def handle_raw_input(self, input_data, META, content_length, boundary, encoding=None):
"""
Use the content_length to signal whether or not this handler should be in use.
"""
# Check the content-length header to see if we should
# If the post is too large, we cannot use the Memory handler.
if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
self.activated = False
else:
self.activated = True
def new_file(self, *args, **kwargs):
super(MemoryFileUploadHandler, self).new_file(*args, **kwargs)
if self.activated:
self.file = BytesIO()
raise StopFutureHandlers()
def receive_data_chunk(self, raw_data, start):
"""
Add the data to the BytesIO file.
"""
if self.activated:
self.file.write(raw_data)
else:
return raw_data
def file_complete(self, file_size):
"""
Return a file object if we're activated.
"""
if not self.activated:
return
self.file.seek(0)
return InMemoryUploadedFile(
file=self.file,
field_name=self.field_name,
name=self.file_name,
content_type=self.content_type,
size=file_size,
charset=self.charset,
content_type_extra=self.content_type_extra
)
def load_handler(path, *args, **kwargs):
"""
Given a path to a handler, return an instance of that handler.
E.g.::
>>> from django.http import HttpRequest
>>> request = HttpRequest()
>>> load_handler('django.core.files.uploadhandler.TemporaryFileUploadHandler', request)
<TemporaryFileUploadHandler object at 0x...>
"""
return import_string(path)(*args, **kwargs)
|
Akylas/CouchPotatoServer | refs/heads/master | libs/sqlalchemy/dialects/mysql/mysqlconnector.py | 17 | # mysql/mysqlconnector.py
# Copyright (C) 2005-2012 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Support for the MySQL database via the MySQL Connector/Python adapter.
MySQL Connector/Python is available at:
https://launchpad.net/myconnpy
Connecting
-----------
Connect string format::
mysql+mysqlconnector://<user>:<password>@<host>[:<port>]/<dbname>
"""
import re
from sqlalchemy.dialects.mysql.base import (MySQLDialect,
MySQLExecutionContext, MySQLCompiler, MySQLIdentifierPreparer,
BIT)
from sqlalchemy.engine import base as engine_base, default
from sqlalchemy.sql import operators as sql_operators
from sqlalchemy import exc, log, schema, sql, types as sqltypes, util
from sqlalchemy import processors
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
def get_lastrowid(self):
return self.cursor.lastrowid
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod(self, binary, **kw):
return self.process(binary.left) + " %% " + self.process(binary.right)
def post_process_text(self, text):
return text.replace('%', '%%')
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
return value.replace("%", "%%")
class _myconnpyBIT(BIT):
def result_processor(self, dialect, coltype):
"""MySQL-connector already converts mysql bits, so."""
return None
class MySQLDialect_mysqlconnector(MySQLDialect):
driver = 'mysqlconnector'
supports_unicode_statements = True
supports_unicode_binds = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
supports_native_decimal = True
default_paramstyle = 'format'
execution_ctx_cls = MySQLExecutionContext_mysqlconnector
statement_compiler = MySQLCompiler_mysqlconnector
preparer = MySQLIdentifierPreparer_mysqlconnector
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
BIT: _myconnpyBIT,
}
)
@classmethod
def dbapi(cls):
from mysql import connector
return connector
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
opts.update(url.query)
util.coerce_kw_type(opts, 'buffered', bool)
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
opts['buffered'] = True
opts['raise_on_warnings'] = True
# FOUND_ROWS must be set in ClientFlag to enable
# supports_sane_rowcount.
if self.dbapi is not None:
try:
from mysql.connector.constants import ClientFlag
client_flags = opts.get('client_flags', ClientFlag.get_default())
client_flags |= ClientFlag.FOUND_ROWS
opts['client_flags'] = client_flags
except:
pass
return [[], opts]
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
from mysql.connector.constants import ClientFlag
dbapi_con.set_client_flag(ClientFlag.FOUND_ROWS)
version = dbapi_con.get_server_version()
return tuple(version)
def _detect_charset(self, connection):
return connection.connection.get_characterset_info()
def _extract_error_code(self, exception):
return exception.errno
def is_disconnect(self, e, connection, cursor):
errnos = (2006, 2013, 2014, 2045, 2055, 2048)
exceptions = (self.dbapi.OperationalError,self.dbapi.InterfaceError)
if isinstance(e, exceptions):
return e.errno in errnos
else:
return False
def _compat_fetchall(self, rp, charset=None):
return rp.fetchall()
def _compat_fetchone(self, rp, charset=None):
return rp.fetchone()
dialect = MySQLDialect_mysqlconnector
|
mephizzle/wagtail | refs/heads/master | wagtail/wagtailcore/migrations/0012_extend_page_slug_field.py | 27 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0011_page_first_published_at'),
]
operations = [
migrations.AlterField(
model_name='page',
name='slug',
field=models.SlugField(help_text='The name of the page as it will appear in URLs e.g http://domain.com/blog/[my-slug]/', max_length=255),
preserve_default=True,
),
]
|
lucasoldaini/simple_caching | refs/heads/master | simple_caching.py | 1 | """Caching decorator for dictionary/tuples."""
import json
import os
from functools import wraps
import gzip
import sys
from string import punctuation
import codecs
from hashlib import md5
_OK_JSON = set((dict, list, str, int, float))
class _DumpAdapter(object):
""" Flexible interlace to blindly use codecs module or
gzip module
"""
def __init__(self, func, accepted_args):
self.func = func
self.accepted_args = accepted_args
def __call__(self, **kwargs):
keyword_arguments = {k: kwargs[k] for k in kwargs
if k in self.accepted_args}
return self.func(**keyword_arguments)
def simple_caching(cachedir=None,
mode=None,
cache_comment=None,
force_refresh=False,
cache_format='gzip'):
""" Caching decorator for dictionary/tuples.
Caches gzipped json in specified cache folder
Accepts the following kwargs:
mode (default='hash')
accepted modes:
(1) method-name: the name of the decorated method
is used as name for the cache.
(2) hash: a hash of the parameters is used as
name of the cache
cachedir (default=None)
Location of the folder where to cache. cachedir
doesn't need to be configured if simple_caching
is caching a method of a class with cachedir attribute.
cache_comment (default=None)
A comment to add to the name of the cache.
If no comment is provided, the name of the cache
if the name of the method that is being cachedonly.
force_refresh (default=False)
rebuilts cache if set to True
cache_format (default=gzip)
it could either be gzip or json
The kwargs can be set either (a) at decoration time
or (b) when the decorated method is called:
example (a):
@simple_caching(cachedir='/path/to/cache')
def foo(s):
...
example (b):
@simple_caching()
def foo(s):
...
...
foo('baz', cachedir='/path/to/cache')
A combination of both is also fine, of course.
kwargs provided at call time have precedence, though.
"""
# Without the use of this decorator factory,
# the name of the method would have been 'wrapper'
# and the docstring of the original method would have been lost.
# from python docs:
# https://docs.python.org/2/library/functools.html#module-functools
def caching_decorator(method):
# cachedir, cache_comment and autodetect are out
# of scope for method_wrapper, thus local variables
# need to be instantiated.
local_cachedir = cachedir
local_cache_comment = (cache_comment or '')
local_force_refresh = force_refresh
local_cache_format = cache_format
local_mode = mode
if local_mode is None:
local_mode = 'hash'
if (local_mode not in ('hash', 'method-name')):
print >> sys.stderr, ("[cache error] '{0}' is not " +
"a valid caching mode; use 'method-name' " +
"or 'hash'.").format(local_mode)
sys.exit(1)
@wraps(method)
def method_wrapper(*args, **kwargs):
# looks for cachedir folder in self instance
# if not found, it looks for it in keyword
# arguments.
try:
cachedir = args[0].cachedir
except AttributeError:
cachedir = kwargs.pop('cachedir', local_cachedir)
# if no cachedir is specified, then it simply returns
# the original method and does nothing
if not cachedir:
return method(*args, **kwargs)
# checks if the global parameters are overwritten by
# values @ call time or if some of the missing parameters
# have been provided at call time
cache_comment = kwargs.pop('cache_comment', local_cache_comment)
force_refresh = kwargs.pop('force_refresh', local_force_refresh)
mode = kwargs.pop('mode', ((local_mode is not None) and
local_mode) or 'hash')
if not os.path.exists(cachedir):
cachedir = os.path.join(os.getcwd(), cachedir)
if not os.path.exists(cachedir):
print >> sys.stderr, ("[cache error] {0} is not " +
"a valid dir.").format(cachedir)
sys.exit(1)
cache_format = kwargs.pop('cache_format', local_cache_format)
if cache_format == 'json':
dump_func = _DumpAdapter(codecs.open,
['filename', 'mode', 'encoding'])
ext = 'json'
elif cache_format == 'gzip':
dump_func = _DumpAdapter(gzip.open,
['filename', 'mode'])
ext = 'gz'
else:
print >> sys.stderr, ("[cache error] {0} is not a valid " +
"cache format. Use json or gzip." +
"").format(cache_format)
sys.exit(1)
if mode == 'method-name':
name = method.__name__
if mode == 'hash':
to_hash = json.dumps({'args': [a for a in args
if a in _OK_JSON],
'kwargs': {k: v for k, v in kwargs.items()
if v in _OK_JSON}
})
name = md5(to_hash).hexdigest()
# the ...and...or... makes sure that there is an underscore
# between cache file name and cache comment if cache_comment
# exists.
cachename = '%s%s.cache.%s' % (name,
(cache_comment and
'_%s' % cache_comment) or '',
ext)
# removes prefix/suffix punctuation from method name
# (e.g. __call__ will become call)
while cachename[0] in punctuation:
cachename = cachename[1:]
while cachename[(len(cachename) - 1)] in punctuation:
cachename = cachename[:(len(cachename) - 1)]
cachepath = os.path.join(cachedir, cachename)
# loads creates cache
if os.path.exists(cachepath) and not force_refresh:
with dump_func(filename=cachepath,
mode='r', encoding='utf-8') as cachefile:
return json.loads(cachefile.read())
else:
print '[cache] generating %s' % cachepath
tocache = method(*args, **kwargs)
with dump_func(filename=cachepath, mode='w',
encoding='utf-8') as cachefile:
try:
json.dump(tocache, cachefile)
except TypeError:
cachefile.close()
os.remove(cachepath)
raise
return tocache
return method_wrapper
return caching_decorator
|
zrhans/pythonanywhere | refs/heads/master | .virtualenvs/django19/lib/python3.4/site-packages/numpy/doc/misc.py | 124 | """
=============
Miscellaneous
=============
IEEE 754 Floating Point Special Values
--------------------------------------
Special values defined in numpy: nan, inf,
NaNs can be used as a poor-man's mask (if you don't care what the
original value was)
Note: cannot use equality to test NaNs. E.g.: ::
>>> myarr = np.array([1., 0., np.nan, 3.])
>>> np.where(myarr == np.nan)
>>> np.nan == np.nan # is always False! Use special numpy functions instead.
False
>>> myarr[myarr == np.nan] = 0. # doesn't work
>>> myarr
array([ 1., 0., NaN, 3.])
>>> myarr[np.isnan(myarr)] = 0. # use this instead find
>>> myarr
array([ 1., 0., 0., 3.])
Other related special value functions: ::
isinf(): True if value is inf
isfinite(): True if not nan or inf
nan_to_num(): Map nan to 0, inf to max float, -inf to min float
The following corresponds to the usual functions except that nans are excluded
from the results: ::
nansum()
nanmax()
nanmin()
nanargmax()
nanargmin()
>>> x = np.arange(10.)
>>> x[3] = np.nan
>>> x.sum()
nan
>>> np.nansum(x)
42.0
How numpy handles numerical exceptions
--------------------------------------
The default is to ``'warn'`` for ``invalid``, ``divide``, and ``overflow``
and ``'ignore'`` for ``underflow``. But this can be changed, and it can be
set individually for different kinds of exceptions. The different behaviors
are:
- 'ignore' : Take no action when the exception occurs.
- 'warn' : Print a `RuntimeWarning` (via the Python `warnings` module).
- 'raise' : Raise a `FloatingPointError`.
- 'call' : Call a function specified using the `seterrcall` function.
- 'print' : Print a warning directly to ``stdout``.
- 'log' : Record error in a Log object specified by `seterrcall`.
These behaviors can be set for all kinds of errors or specific ones:
- all : apply to all numeric exceptions
- invalid : when NaNs are generated
- divide : divide by zero (for integers as well!)
- overflow : floating point overflows
- underflow : floating point underflows
Note that integer divide-by-zero is handled by the same machinery.
These behaviors are set on a per-thread basis.
Examples
--------
::
>>> oldsettings = np.seterr(all='warn')
>>> np.zeros(5,dtype=np.float32)/0.
invalid value encountered in divide
>>> j = np.seterr(under='ignore')
>>> np.array([1.e-100])**10
>>> j = np.seterr(invalid='raise')
>>> np.sqrt(np.array([-1.]))
FloatingPointError: invalid value encountered in sqrt
>>> def errorhandler(errstr, errflag):
... print "saw stupid error!"
>>> np.seterrcall(errorhandler)
<function err_handler at 0x...>
>>> j = np.seterr(all='call')
>>> np.zeros(5, dtype=np.int32)/0
FloatingPointError: invalid value encountered in divide
saw stupid error!
>>> j = np.seterr(**oldsettings) # restore previous
... # error-handling settings
Interfacing to C
----------------
Only a survey of the choices. Little detail on how each works.
1) Bare metal, wrap your own C-code manually.
- Plusses:
- Efficient
- No dependencies on other tools
- Minuses:
- Lots of learning overhead:
- need to learn basics of Python C API
- need to learn basics of numpy C API
- need to learn how to handle reference counting and love it.
- Reference counting often difficult to get right.
- getting it wrong leads to memory leaks, and worse, segfaults
- API will change for Python 3.0!
2) Cython
- Plusses:
- avoid learning C API's
- no dealing with reference counting
- can code in pseudo python and generate C code
- can also interface to existing C code
- should shield you from changes to Python C api
- has become the de-facto standard within the scientific Python community
- fast indexing support for arrays
- Minuses:
- Can write code in non-standard form which may become obsolete
- Not as flexible as manual wrapping
4) ctypes
- Plusses:
- part of Python standard library
- good for interfacing to existing sharable libraries, particularly
Windows DLLs
- avoids API/reference counting issues
- good numpy support: arrays have all these in their ctypes
attribute: ::
a.ctypes.data a.ctypes.get_strides
a.ctypes.data_as a.ctypes.shape
a.ctypes.get_as_parameter a.ctypes.shape_as
a.ctypes.get_data a.ctypes.strides
a.ctypes.get_shape a.ctypes.strides_as
- Minuses:
- can't use for writing code to be turned into C extensions, only a wrapper
tool.
5) SWIG (automatic wrapper generator)
- Plusses:
- around a long time
- multiple scripting language support
- C++ support
- Good for wrapping large (many functions) existing C libraries
- Minuses:
- generates lots of code between Python and the C code
- can cause performance problems that are nearly impossible to optimize
out
- interface files can be hard to write
- doesn't necessarily avoid reference counting issues or needing to know
API's
7) scipy.weave
- Plusses:
- can turn many numpy expressions into C code
- dynamic compiling and loading of generated C code
- can embed pure C code in Python module and have weave extract, generate
interfaces and compile, etc.
- Minuses:
- Future very uncertain: it's the only part of Scipy not ported to Python 3
and is effectively deprecated in favor of Cython.
8) Psyco
- Plusses:
- Turns pure python into efficient machine code through jit-like
optimizations
- very fast when it optimizes well
- Minuses:
- Only on intel (windows?)
- Doesn't do much for numpy?
Interfacing to Fortran:
-----------------------
The clear choice to wrap Fortran code is
`f2py <http://docs.scipy.org/doc/numpy-dev/f2py/>`_.
Pyfort is an older alternative, but not supported any longer.
Fwrap is a newer project that looked promising but isn't being developed any
longer.
Interfacing to C++:
-------------------
1) Cython
2) CXX
3) Boost.python
4) SWIG
5) SIP (used mainly in PyQT)
"""
from __future__ import division, absolute_import, print_function
|
jamesblunt/sympy | refs/heads/master | sympy/physics/quantum/commutator.py | 24 | """The commutator: [A,B] = A*B - B*A."""
from __future__ import print_function, division
from sympy import S, Expr, Mul, Add
from sympy.core.compatibility import u
from sympy.printing.pretty.stringpict import prettyForm
from sympy.physics.quantum.dagger import Dagger
from sympy.physics.quantum.operator import Operator
__all__ = [
'Commutator'
]
#-----------------------------------------------------------------------------
# Commutator
#-----------------------------------------------------------------------------
class Commutator(Expr):
"""The standard commutator, in an unevaluated state.
Evaluating a commutator is defined [1]_ as: ``[A, B] = A*B - B*A``. This
class returns the commutator in an unevaluated form. To evaluate the
commutator, use the ``.doit()`` method.
Cannonical ordering of a commutator is ``[A, B]`` for ``A < B``. The
arguments of the commutator are put into canonical order using ``__cmp__``.
If ``B < A``, then ``[B, A]`` is returned as ``-[A, B]``.
Parameters
==========
A : Expr
The first argument of the commutator [A,B].
B : Expr
The second argument of the commutator [A,B].
Examples
========
>>> from sympy.physics.quantum import Commutator, Dagger, Operator
>>> from sympy.abc import x, y
>>> A = Operator('A')
>>> B = Operator('B')
>>> C = Operator('C')
Create a commutator and use ``.doit()`` to evaluate it:
>>> comm = Commutator(A, B)
>>> comm
[A,B]
>>> comm.doit()
A*B - B*A
The commutator orders it arguments in canonical order:
>>> comm = Commutator(B, A); comm
-[A,B]
Commutative constants are factored out:
>>> Commutator(3*x*A, x*y*B)
3*x**2*y*[A,B]
Using ``.expand(commutator=True)``, the standard commutator expansion rules
can be applied:
>>> Commutator(A+B, C).expand(commutator=True)
[A,C] + [B,C]
>>> Commutator(A, B+C).expand(commutator=True)
[A,B] + [A,C]
>>> Commutator(A*B, C).expand(commutator=True)
[A,C]*B + A*[B,C]
>>> Commutator(A, B*C).expand(commutator=True)
[A,B]*C + B*[A,C]
Adjoint operations applied to the commutator are properly applied to the
arguments:
>>> Dagger(Commutator(A, B))
-[Dagger(A),Dagger(B)]
References
==========
.. [1] http://en.wikipedia.org/wiki/Commutator
"""
is_commutative = False
def __new__(cls, A, B):
r = cls.eval(A, B)
if r is not None:
return r
obj = Expr.__new__(cls, A, B)
return obj
@classmethod
def eval(cls, a, b):
if not (a and b):
return S.Zero
if a == b:
return S.Zero
if a.is_commutative or b.is_commutative:
return S.Zero
# [xA,yB] -> xy*[A,B]
# from sympy.physics.qmul import QMul
ca, nca = a.args_cnc()
cb, ncb = b.args_cnc()
c_part = ca + cb
if c_part:
return Mul(Mul(*c_part), cls(Mul._from_args(nca), Mul._from_args(ncb)))
# Canonical ordering of arguments
# The Commutator [A, B] is in canonical form if A < B.
if a.compare(b) == 1:
return S.NegativeOne*cls(b, a)
def _eval_expand_commutator(self, **hints):
A = self.args[0]
B = self.args[1]
if isinstance(A, Add):
# [A + B, C] -> [A, C] + [B, C]
sargs = []
for term in A.args:
comm = Commutator(term, B)
if isinstance(comm, Commutator):
comm = comm._eval_expand_commutator()
sargs.append(comm)
return Add(*sargs)
elif isinstance(B, Add):
# [A, B + C] -> [A, B] + [A, C]
sargs = []
for term in B.args:
comm = Commutator(A, term)
if isinstance(comm, Commutator):
comm = comm._eval_expand_commutator()
sargs.append(comm)
return Add(*sargs)
elif isinstance(A, Mul):
# [A*B, C] -> A*[B, C] + [A, C]*B
a = A.args[0]
b = Mul(*A.args[1:])
c = B
comm1 = Commutator(b, c)
comm2 = Commutator(a, c)
if isinstance(comm1, Commutator):
comm1 = comm1._eval_expand_commutator()
if isinstance(comm2, Commutator):
comm2 = comm2._eval_expand_commutator()
first = Mul(a, comm1)
second = Mul(comm2, b)
return Add(first, second)
elif isinstance(B, Mul):
# [A, B*C] -> [A, B]*C + B*[A, C]
a = A
b = B.args[0]
c = Mul(*B.args[1:])
comm1 = Commutator(a, b)
comm2 = Commutator(a, c)
if isinstance(comm1, Commutator):
comm1 = comm1._eval_expand_commutator()
if isinstance(comm2, Commutator):
comm2 = comm2._eval_expand_commutator()
first = Mul(comm1, c)
second = Mul(b, comm2)
return Add(first, second)
# No changes, so return self
return self
def doit(self, **hints):
""" Evaluate commutator """
A = self.args[0]
B = self.args[1]
if isinstance(A, Operator) and isinstance(B, Operator):
try:
comm = A._eval_commutator(B, **hints)
except NotImplementedError:
try:
comm = -1*B._eval_commutator(A, **hints)
except NotImplementedError:
comm = None
if comm is not None:
return comm.doit(**hints)
return (A*B - B*A).doit(**hints)
def _eval_adjoint(self):
return Commutator(Dagger(self.args[1]), Dagger(self.args[0]))
def _sympyrepr(self, printer, *args):
return "%s(%s,%s)" % (
self.__class__.__name__, printer._print(
self.args[0]), printer._print(self.args[1])
)
def _sympystr(self, printer, *args):
return "[%s,%s]" % (self.args[0], self.args[1])
def _pretty(self, printer, *args):
pform = printer._print(self.args[0], *args)
pform = prettyForm(*pform.right((prettyForm(u(',')))))
pform = prettyForm(*pform.right((printer._print(self.args[1], *args))))
pform = prettyForm(*pform.parens(left='[', right=']'))
return pform
def _latex(self, printer, *args):
return "\\left[%s,%s\\right]" % tuple([
printer._print(arg, *args) for arg in self.args])
|
vadimtk/chrome4sdp | refs/heads/master | tools/telemetry/third_party/gsutilz/third_party/boto/tests/integration/dynamodb/test_layer2.py | 114 | # Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Tests for Layer2 of Amazon DynamoDB
"""
import time
import uuid
from decimal import Decimal
from tests.unit import unittest
from boto.dynamodb.exceptions import DynamoDBKeyNotFoundError
from boto.dynamodb.exceptions import DynamoDBConditionalCheckFailedError
from boto.dynamodb.layer2 import Layer2
from boto.dynamodb.types import get_dynamodb_type, Binary
from boto.dynamodb.condition import BEGINS_WITH, CONTAINS, GT
from boto.compat import six, long_type
class DynamoDBLayer2Test(unittest.TestCase):
dynamodb = True
def setUp(self):
self.dynamodb = Layer2()
self.hash_key_name = 'forum_name'
self.hash_key_proto_value = ''
self.range_key_name = 'subject'
self.range_key_proto_value = ''
self.table_name = 'sample_data_%s' % int(time.time())
def create_sample_table(self):
schema = self.dynamodb.create_schema(
self.hash_key_name, self.hash_key_proto_value,
self.range_key_name,
self.range_key_proto_value)
table = self.create_table(self.table_name, schema, 5, 5)
table.refresh(wait_for_active=True)
return table
def create_table(self, table_name, schema, read_units, write_units):
result = self.dynamodb.create_table(table_name, schema, read_units, write_units)
self.addCleanup(self.dynamodb.delete_table, result)
return result
def test_layer2_basic(self):
print('--- running Amazon DynamoDB Layer2 tests ---')
c = self.dynamodb
# First create a schema for the table
schema = c.create_schema(self.hash_key_name, self.hash_key_proto_value,
self.range_key_name,
self.range_key_proto_value)
# Create another schema without a range key
schema2 = c.create_schema('post_id', '')
# Now create a table
index = int(time.time())
table_name = 'test-%d' % index
read_units = 5
write_units = 5
table = self.create_table(table_name, schema, read_units, write_units)
assert table.name == table_name
assert table.schema.hash_key_name == self.hash_key_name
assert table.schema.hash_key_type == get_dynamodb_type(self.hash_key_proto_value)
assert table.schema.range_key_name == self.range_key_name
assert table.schema.range_key_type == get_dynamodb_type(self.range_key_proto_value)
assert table.read_units == read_units
assert table.write_units == write_units
assert table.item_count == 0
assert table.size_bytes == 0
# Create the second table
table2_name = 'test-%d' % (index + 1)
table2 = self.create_table(table2_name, schema2, read_units, write_units)
# Wait for table to become active
table.refresh(wait_for_active=True)
table2.refresh(wait_for_active=True)
# List tables and make sure new one is there
table_names = c.list_tables()
assert table_name in table_names
assert table2_name in table_names
# Update the tables ProvisionedThroughput
new_read_units = 10
new_write_units = 5
table.update_throughput(new_read_units, new_write_units)
# Wait for table to be updated
table.refresh(wait_for_active=True)
assert table.read_units == new_read_units
assert table.write_units == new_write_units
# Put an item
item1_key = 'Amazon DynamoDB'
item1_range = 'DynamoDB Thread 1'
item1_attrs = {
'Message': 'DynamoDB thread 1 message text',
'LastPostedBy': 'User A',
'Views': 0,
'Replies': 0,
'Answered': 0,
'Public': True,
'Tags': set(['index', 'primarykey', 'table']),
'LastPostDateTime': '12/9/2011 11:36:03 PM'}
# Test a few corner cases with new_item
# Try supplying a hash_key as an arg and as an item in attrs
item1_attrs[self.hash_key_name] = 'foo'
foobar_item = table.new_item(item1_key, item1_range, item1_attrs)
assert foobar_item.hash_key == item1_key
# Try supplying a range_key as an arg and as an item in attrs
item1_attrs[self.range_key_name] = 'bar'
foobar_item = table.new_item(item1_key, item1_range, item1_attrs)
assert foobar_item.range_key == item1_range
# Try supplying hash and range key in attrs dict
foobar_item = table.new_item(attrs=item1_attrs)
assert foobar_item.hash_key == 'foo'
assert foobar_item.range_key == 'bar'
del item1_attrs[self.hash_key_name]
del item1_attrs[self.range_key_name]
item1 = table.new_item(item1_key, item1_range, item1_attrs)
# make sure the put() succeeds
try:
item1.put()
except c.layer1.ResponseError as e:
raise Exception("Item put failed: %s" % e)
# Try to get an item that does not exist.
self.assertRaises(DynamoDBKeyNotFoundError,
table.get_item, 'bogus_key', item1_range)
# Now do a consistent read and check results
item1_copy = table.get_item(item1_key, item1_range,
consistent_read=True)
assert item1_copy.hash_key == item1.hash_key
assert item1_copy.range_key == item1.range_key
for attr_name in item1_attrs:
val = item1_copy[attr_name]
if isinstance(val, (int, long_type, float, six.string_types)):
assert val == item1[attr_name]
# Try retrieving only select attributes
attributes = ['Message', 'Views']
item1_small = table.get_item(item1_key, item1_range,
attributes_to_get=attributes,
consistent_read=True)
for attr_name in item1_small:
# The item will include the attributes we asked for as
# well as the hashkey and rangekey, so filter those out.
if attr_name not in (item1_small.hash_key_name,
item1_small.range_key_name):
assert attr_name in attributes
self.assertTrue(table.has_item(item1_key, range_key=item1_range,
consistent_read=True))
# Try to delete the item with the wrong Expected value
expected = {'Views': 1}
self.assertRaises(DynamoDBConditionalCheckFailedError,
item1.delete, expected_value=expected)
# Try to delete a value while expecting a non-existant attribute
expected = {'FooBar': True}
try:
item1.delete(expected_value=expected)
except c.layer1.ResponseError:
pass
# Now update the existing object
item1.add_attribute('Replies', 2)
removed_attr = 'Public'
item1.delete_attribute(removed_attr)
removed_tag = item1_attrs['Tags'].copy().pop()
item1.delete_attribute('Tags', set([removed_tag]))
replies_by_set = set(['Adam', 'Arnie'])
item1.put_attribute('RepliesBy', replies_by_set)
retvals = item1.save(return_values='ALL_OLD')
# Need more tests here for variations on return_values
assert 'Attributes' in retvals
# Check for correct updates
item1_updated = table.get_item(item1_key, item1_range,
consistent_read=True)
assert item1_updated['Replies'] == item1_attrs['Replies'] + 2
self.assertFalse(removed_attr in item1_updated)
self.assertTrue(removed_tag not in item1_updated['Tags'])
self.assertTrue('RepliesBy' in item1_updated)
self.assertTrue(item1_updated['RepliesBy'] == replies_by_set)
# Put a few more items into the table
item2_key = 'Amazon DynamoDB'
item2_range = 'DynamoDB Thread 2'
item2_attrs = {
'Message': 'DynamoDB thread 2 message text',
'LastPostedBy': 'User A',
'Views': 0,
'Replies': 0,
'Answered': 0,
'Tags': set(["index", "primarykey", "table"]),
'LastPost2DateTime': '12/9/2011 11:36:03 PM'}
item2 = table.new_item(item2_key, item2_range, item2_attrs)
item2.put()
item3_key = 'Amazon S3'
item3_range = 'S3 Thread 1'
item3_attrs = {
'Message': 'S3 Thread 1 message text',
'LastPostedBy': 'User A',
'Views': 0,
'Replies': 0,
'Answered': 0,
'Tags': set(['largeobject', 'multipart upload']),
'LastPostDateTime': '12/9/2011 11:36:03 PM'
}
item3 = table.new_item(item3_key, item3_range, item3_attrs)
item3.put()
# Put an item into the second table
table2_item1_key = uuid.uuid4().hex
table2_item1_attrs = {
'DateTimePosted': '25/1/2011 12:34:56 PM',
'Text': 'I think boto rocks and so does DynamoDB'
}
table2_item1 = table2.new_item(table2_item1_key,
attrs=table2_item1_attrs)
table2_item1.put()
# Try a few queries
items = table.query('Amazon DynamoDB', range_key_condition=BEGINS_WITH('DynamoDB'))
n = 0
for item in items:
n += 1
assert n == 2
assert items.consumed_units > 0
items = table.query('Amazon DynamoDB', range_key_condition=BEGINS_WITH('DynamoDB'),
request_limit=1, max_results=1)
n = 0
for item in items:
n += 1
assert n == 1
assert items.consumed_units > 0
# Try a few scans
items = table.scan()
n = 0
for item in items:
n += 1
assert n == 3
assert items.consumed_units > 0
items = table.scan(scan_filter={'Replies': GT(0)})
n = 0
for item in items:
n += 1
assert n == 1
assert items.consumed_units > 0
# Test some integer and float attributes
integer_value = 42
float_value = 345.678
item3['IntAttr'] = integer_value
item3['FloatAttr'] = float_value
# Test booleans
item3['TrueBoolean'] = True
item3['FalseBoolean'] = False
# Test some set values
integer_set = set([1, 2, 3, 4, 5])
float_set = set([1.1, 2.2, 3.3, 4.4, 5.5])
mixed_set = set([1, 2, 3.3, 4, 5.555])
str_set = set(['foo', 'bar', 'fie', 'baz'])
item3['IntSetAttr'] = integer_set
item3['FloatSetAttr'] = float_set
item3['MixedSetAttr'] = mixed_set
item3['StrSetAttr'] = str_set
item3.put()
# Now do a consistent read
item4 = table.get_item(item3_key, item3_range, consistent_read=True)
assert item4['IntAttr'] == integer_value
assert item4['FloatAttr'] == float_value
assert bool(item4['TrueBoolean']) is True
assert bool(item4['FalseBoolean']) is False
# The values will not necessarily be in the same order as when
# we wrote them to the DB.
for i in item4['IntSetAttr']:
assert i in integer_set
for i in item4['FloatSetAttr']:
assert i in float_set
for i in item4['MixedSetAttr']:
assert i in mixed_set
for i in item4['StrSetAttr']:
assert i in str_set
# Try a batch get
batch_list = c.new_batch_list()
batch_list.add_batch(table, [(item2_key, item2_range),
(item3_key, item3_range)])
response = batch_list.submit()
assert len(response['Responses'][table.name]['Items']) == 2
# Try an empty batch get
batch_list = c.new_batch_list()
batch_list.add_batch(table, [])
response = batch_list.submit()
assert response == {}
# Try a few batch write operations
item4_key = 'Amazon S3'
item4_range = 'S3 Thread 2'
item4_attrs = {
'Message': 'S3 Thread 2 message text',
'LastPostedBy': 'User A',
'Views': 0,
'Replies': 0,
'Answered': 0,
'Tags': set(['largeobject', 'multipart upload']),
'LastPostDateTime': '12/9/2011 11:36:03 PM'
}
item5_key = 'Amazon S3'
item5_range = 'S3 Thread 3'
item5_attrs = {
'Message': 'S3 Thread 3 message text',
'LastPostedBy': 'User A',
'Views': 0,
'Replies': 0,
'Answered': 0,
'Tags': set(['largeobject', 'multipart upload']),
'LastPostDateTime': '12/9/2011 11:36:03 PM'
}
item4 = table.new_item(item4_key, item4_range, item4_attrs)
item5 = table.new_item(item5_key, item5_range, item5_attrs)
batch_list = c.new_batch_write_list()
batch_list.add_batch(table, puts=[item4, item5])
response = batch_list.submit()
# should really check for unprocessed items
# Do some generator gymnastics
results = table.scan(scan_filter={'Tags': CONTAINS('table')})
assert results.scanned_count == 5
results = table.scan(request_limit=2, max_results=5)
assert results.count == 2
for item in results:
if results.count == 2:
assert results.remaining == 4
results.remaining -= 2
results.next_response()
else:
assert results.count == 4
assert results.remaining in (0, 1)
assert results.count == 4
results = table.scan(request_limit=6, max_results=4)
assert len(list(results)) == 4
assert results.count == 4
batch_list = c.new_batch_write_list()
batch_list.add_batch(table, deletes=[(item4_key, item4_range),
(item5_key, item5_range)])
response = batch_list.submit()
# Try queries
results = table.query('Amazon DynamoDB', range_key_condition=BEGINS_WITH('DynamoDB'))
n = 0
for item in results:
n += 1
assert n == 2
# Try to delete the item with the right Expected value
expected = {'Views': 0}
item1.delete(expected_value=expected)
self.assertFalse(table.has_item(item1_key, range_key=item1_range,
consistent_read=True))
# Now delete the remaining items
ret_vals = item2.delete(return_values='ALL_OLD')
# some additional checks here would be useful
assert ret_vals['Attributes'][self.hash_key_name] == item2_key
assert ret_vals['Attributes'][self.range_key_name] == item2_range
item3.delete()
table2_item1.delete()
print('--- tests completed ---')
def test_binary_attrs(self):
c = self.dynamodb
schema = c.create_schema(self.hash_key_name, self.hash_key_proto_value,
self.range_key_name,
self.range_key_proto_value)
index = int(time.time())
table_name = 'test-%d' % index
read_units = 5
write_units = 5
table = self.create_table(table_name, schema, read_units, write_units)
table.refresh(wait_for_active=True)
item1_key = 'Amazon S3'
item1_range = 'S3 Thread 1'
item1_attrs = {
'Message': 'S3 Thread 1 message text',
'LastPostedBy': 'User A',
'Views': 0,
'Replies': 0,
'Answered': 0,
'BinaryData': Binary(b'\x01\x02\x03\x04'),
'BinarySequence': set([Binary(b'\x01\x02'), Binary(b'\x03\x04')]),
'Tags': set(['largeobject', 'multipart upload']),
'LastPostDateTime': '12/9/2011 11:36:03 PM'
}
item1 = table.new_item(item1_key, item1_range, item1_attrs)
item1.put()
retrieved = table.get_item(item1_key, item1_range, consistent_read=True)
self.assertEqual(retrieved['Message'], 'S3 Thread 1 message text')
self.assertEqual(retrieved['Views'], 0)
self.assertEqual(retrieved['Tags'],
set(['largeobject', 'multipart upload']))
self.assertEqual(retrieved['BinaryData'], Binary(b'\x01\x02\x03\x04'))
# Also comparable directly to bytes:
self.assertEqual(retrieved['BinaryData'], b'\x01\x02\x03\x04')
self.assertEqual(retrieved['BinarySequence'],
set([Binary(b'\x01\x02'), Binary(b'\x03\x04')]))
def test_put_decimal_attrs(self):
self.dynamodb.use_decimals()
table = self.create_sample_table()
item = table.new_item('foo', 'bar')
item['decimalvalue'] = Decimal('1.12345678912345')
item.put()
retrieved = table.get_item('foo', 'bar')
self.assertEqual(retrieved['decimalvalue'], Decimal('1.12345678912345'))
@unittest.skipIf(six.PY3, "skipping lossy_float_conversion test for Python 3.x")
def test_lossy_float_conversion(self):
table = self.create_sample_table()
item = table.new_item('foo', 'bar')
item['floatvalue'] = 1.12345678912345
item.put()
retrieved = table.get_item('foo', 'bar')['floatvalue']
# Notice how this is not equal to the original value.
self.assertNotEqual(1.12345678912345, retrieved)
# Instead, it's truncated:
self.assertEqual(1.12345678912, retrieved)
def test_large_integers(self):
# It's not just floating point numbers, large integers
# can trigger rouding issues.
self.dynamodb.use_decimals()
table = self.create_sample_table()
item = table.new_item('foo', 'bar')
item['decimalvalue'] = Decimal('129271300103398600')
item.put()
retrieved = table.get_item('foo', 'bar')
self.assertEqual(retrieved['decimalvalue'], Decimal('129271300103398600'))
# Also comparable directly to an int.
self.assertEqual(retrieved['decimalvalue'], 129271300103398600)
def test_put_single_letter_attr(self):
# When an attr is added that is a single letter, if it overlaps with
# the built-in "types", the decoding used to fall down. Assert that
# it's now working correctly.
table = self.create_sample_table()
item = table.new_item('foo', 'foo1')
item.put_attribute('b', 4)
stored = item.save(return_values='UPDATED_NEW')
self.assertEqual(stored['Attributes'], {'b': 4})
|
florianholzapfel/home-assistant | refs/heads/dev | homeassistant/components/climate/netatmo.py | 12 | """
Support for Netatmo Smart Thermostat.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/climate.netatmo/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.const import TEMP_CELSIUS, ATTR_TEMPERATURE
from homeassistant.components.climate import (
STATE_HEAT, STATE_IDLE, ClimateDevice, PLATFORM_SCHEMA)
from homeassistant.util import Throttle
from homeassistant.loader import get_component
import homeassistant.helpers.config_validation as cv
DEPENDENCIES = ['netatmo']
_LOGGER = logging.getLogger(__name__)
CONF_RELAY = 'relay'
CONF_THERMOSTAT = 'thermostat'
DEFAULT_AWAY_TEMPERATURE = 14
# # The default offeset is 2 hours (when you use the thermostat itself)
DEFAULT_TIME_OFFSET = 7200
# # Return cached results if last scan was less then this time ago
# # NetAtmo Data is uploaded to server every hour
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=300)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_RELAY): cv.string,
vol.Optional(CONF_THERMOSTAT, default=[]):
vol.All(cv.ensure_list, [cv.string]),
})
def setup_platform(hass, config, add_callback_devices, discovery_info=None):
"""Setup the NetAtmo Thermostat."""
netatmo = get_component('netatmo')
device = config.get(CONF_RELAY)
import lnetatmo
try:
data = ThermostatData(netatmo.NETATMO_AUTH, device)
for module_name in data.get_module_names():
if CONF_THERMOSTAT in config:
if config[CONF_THERMOSTAT] != [] and \
module_name not in config[CONF_THERMOSTAT]:
continue
add_callback_devices([NetatmoThermostat(data, module_name)])
except lnetatmo.NoDevice:
return None
class NetatmoThermostat(ClimateDevice):
"""Representation a Netatmo thermostat."""
def __init__(self, data, module_name, away_temp=None):
"""Initialize the sensor."""
self._data = data
self._state = None
self._name = module_name
self._target_temperature = None
self._away = None
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._target_temperature
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
@property
def current_temperature(self):
"""Return the current temperature."""
return self._data.current_temperature
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temperature
@property
def current_operation(self):
"""Return the current state of the thermostat."""
state = self._data.thermostatdata.relay_cmd
if state == 0:
return STATE_IDLE
elif state == 100:
return STATE_HEAT
@property
def is_away_mode_on(self):
"""Return true if away mode is on."""
return self._away
def turn_away_mode_on(self):
"""Turn away on."""
mode = "away"
temp = None
self._data.thermostatdata.setthermpoint(mode, temp, endTimeOffset=None)
self._away = True
self.update_ha_state()
def turn_away_mode_off(self):
"""Turn away off."""
mode = "program"
temp = None
self._data.thermostatdata.setthermpoint(mode, temp, endTimeOffset=None)
self._away = False
self.update_ha_state()
def set_temperature(self, endTimeOffset=DEFAULT_TIME_OFFSET, **kwargs):
"""Set new target temperature for 2 hours."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return
mode = "manual"
self._data.thermostatdata.setthermpoint(
mode, temperature, endTimeOffset)
self._target_temperature = temperature
self._away = False
self.update_ha_state()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from NetAtmo API and updates the states."""
self._data.update()
self._target_temperature = self._data.thermostatdata.setpoint_temp
self._away = self._data.setpoint_mode == 'away'
class ThermostatData(object):
"""Get the latest data from Netatmo."""
def __init__(self, auth, device=None):
"""Initialize the data object."""
self.auth = auth
self.thermostatdata = None
self.module_names = []
self.device = device
self.current_temperature = None
self.target_temperature = None
self.setpoint_mode = None
# self.operation =
def get_module_names(self):
"""Return all module available on the API as a list."""
self.update()
if not self.device:
for device in self.thermostatdata.modules:
for module in self.thermostatdata.modules[device].values():
self.module_names.append(module['module_name'])
else:
for module in self.thermostatdata.modules[self.device].values():
self.module_names.append(module['module_name'])
return self.module_names
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Call the NetAtmo API to update the data."""
import lnetatmo
self.thermostatdata = lnetatmo.ThermostatData(self.auth)
self.target_temperature = self.thermostatdata.setpoint_temp
self.setpoint_mode = self.thermostatdata.setpoint_mode
self.current_temperature = self.thermostatdata.temp
|
TejasM/picasso | refs/heads/master | picasso/picasso/index/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
pneerincx/easybuild-framework | refs/heads/master | easybuild/toolchains/intel-para.py | 5 | ##
# Copyright 2012-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for intel compiler toolchain (includes Intel compilers (icc, ifort), Parastation MPICH,
Intel Math Kernel Library (MKL), and Intel FFTW wrappers).
"""
from easybuild.toolchains.ipsmpi import Ipsmpi
from easybuild.toolchains.fft.intelfftw import IntelFFTW
from easybuild.toolchains.linalg.intelmkl import IntelMKL
class IntelPara(Ipsmpi, IntelMKL, IntelFFTW):
"""
Compiler toolchain with Intel compilers (icc/ifort), Parastation MPICH,
Intel Math Kernel Library (MKL) and Intel FFTW wrappers.
"""
NAME = 'intel-para'
|
djangocali/blog-api | refs/heads/master | blog-api/config/__init__.py | 78 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .local import Local # noqa
from .production import Production # noqa
|
atosorigin/ansible | refs/heads/devel | lib/ansible/utils/vars.py | 24 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import keyword
import random
import uuid
from json import dumps
from ansible import constants as C
from ansible import context
from ansible.errors import AnsibleError, AnsibleOptionsError
from ansible.module_utils.six import iteritems, string_types, PY3
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.common._collections_compat import MutableMapping, MutableSequence
from ansible.parsing.splitter import parse_kv
ADDITIONAL_PY2_KEYWORDS = frozenset(("True", "False", "None"))
_MAXSIZE = 2 ** 32
cur_id = 0
node_mac = ("%012x" % uuid.getnode())[:12]
random_int = ("%08x" % random.randint(0, _MAXSIZE))[:8]
def get_unique_id():
global cur_id
cur_id += 1
return "-".join([
node_mac[0:8],
node_mac[8:12],
random_int[0:4],
random_int[4:8],
("%012x" % cur_id)[:12],
])
def _validate_mutable_mappings(a, b):
"""
Internal convenience function to ensure arguments are MutableMappings
This checks that all arguments are MutableMappings or raises an error
:raises AnsibleError: if one of the arguments is not a MutableMapping
"""
# If this becomes generally needed, change the signature to operate on
# a variable number of arguments instead.
if not (isinstance(a, MutableMapping) and isinstance(b, MutableMapping)):
myvars = []
for x in [a, b]:
try:
myvars.append(dumps(x))
except Exception:
myvars.append(to_native(x))
raise AnsibleError("failed to combine variables, expected dicts but got a '{0}' and a '{1}': \n{2}\n{3}".format(
a.__class__.__name__, b.__class__.__name__, myvars[0], myvars[1])
)
def combine_vars(a, b):
"""
Return a copy of dictionaries of variables based on configured hash behavior
"""
if C.DEFAULT_HASH_BEHAVIOUR == "merge":
return merge_hash(a, b)
else:
# HASH_BEHAVIOUR == 'replace'
_validate_mutable_mappings(a, b)
result = a.copy()
result.update(b)
return result
def merge_hash(x, y, recursive=True, list_merge='replace'):
"""
Return a new dictionary result of the merges of y into x,
so that keys from y take precedence over keys from x.
(x and y aren't modified)
"""
if list_merge not in ('replace', 'keep', 'append', 'prepend', 'append_rp', 'prepend_rp'):
raise AnsibleError("merge_hash: 'list_merge' argument can only be equal to 'replace', 'keep', 'append', 'prepend', 'append_rp' or 'prepend_rp'")
# verify x & y are dicts
_validate_mutable_mappings(x, y)
# to speed things up: if x is empty or equal to y, return y
# (this `if` can be remove without impact on the function
# except performance)
if x == {} or x == y:
return y.copy()
# in the following we will copy elements from y to x, but
# we don't want to modify x, so we create a copy of it
x = x.copy()
# to speed things up: use dict.update if possible
# (this `if` can be remove without impact on the function
# except performance)
if not recursive and list_merge == 'replace':
x.update(y)
return x
# insert each element of y in x, overriding the one in x
# (as y has higher priority)
# we copy elements from y to x instead of x to y because
# there is a high probability x will be the "default" dict the user
# want to "patch" with y
# therefore x will have much more elements than y
for key, y_value in iteritems(y):
# if `key` isn't in x
# update x and move on to the next element of y
if key not in x:
x[key] = y_value
continue
# from this point we know `key` is in x
x_value = x[key]
# if both x's element and y's element are dicts
# recursively "combine" them or override x's with y's element
# depending on the `recursive` argument
# and move on to the next element of y
if isinstance(x_value, MutableMapping) and isinstance(y_value, MutableMapping):
if recursive:
x[key] = merge_hash(x_value, y_value, recursive, list_merge)
else:
x[key] = y_value
continue
# if both x's element and y's element are lists
# "merge" them depending on the `list_merge` argument
# and move on to the next element of y
if isinstance(x_value, MutableSequence) and isinstance(y_value, MutableSequence):
if list_merge == 'replace':
# replace x value by y's one as it has higher priority
x[key] = y_value
elif list_merge == 'append':
x[key] = x_value + y_value
elif list_merge == 'prepend':
x[key] = y_value + x_value
elif list_merge == 'append_rp':
# append all elements from y_value (high prio) to x_value (low prio)
# and remove x_value elements that are also in y_value
# we don't remove elements from x_value nor y_value that were already in double
# (we assume that there is a reason if there where such double elements)
# _rp stands for "remove present"
x[key] = [z for z in x_value if z not in y_value] + y_value
elif list_merge == 'prepend_rp':
# same as 'append_rp' but y_value elements are prepend
x[key] = y_value + [z for z in x_value if z not in y_value]
# else 'keep'
# keep x value even if y it's of higher priority
# it's done by not changing x[key]
continue
# else just override x's element with y's one
x[key] = y_value
return x
def load_extra_vars(loader):
extra_vars = {}
for extra_vars_opt in context.CLIARGS.get('extra_vars', tuple()):
data = None
extra_vars_opt = to_text(extra_vars_opt, errors='surrogate_or_strict')
if extra_vars_opt is None or not extra_vars_opt:
continue
if extra_vars_opt.startswith(u"@"):
# Argument is a YAML file (JSON is a subset of YAML)
data = loader.load_from_file(extra_vars_opt[1:])
elif extra_vars_opt[0] in [u'/', u'.']:
raise AnsibleOptionsError("Please prepend extra_vars filename '%s' with '@'" % extra_vars_opt)
elif extra_vars_opt[0] in [u'[', u'{']:
# Arguments as YAML
data = loader.load(extra_vars_opt)
else:
# Arguments as Key-value
data = parse_kv(extra_vars_opt)
if isinstance(data, MutableMapping):
extra_vars = combine_vars(extra_vars, data)
else:
raise AnsibleOptionsError("Invalid extra vars data supplied. '%s' could not be made into a dictionary" % extra_vars_opt)
return extra_vars
def load_options_vars(version):
if version is None:
version = 'Unknown'
options_vars = {'ansible_version': version}
attrs = {'check': 'check_mode',
'diff': 'diff_mode',
'forks': 'forks',
'inventory': 'inventory_sources',
'skip_tags': 'skip_tags',
'subset': 'limit',
'tags': 'run_tags',
'verbosity': 'verbosity'}
for attr, alias in attrs.items():
opt = context.CLIARGS.get(attr)
if opt is not None:
options_vars['ansible_%s' % alias] = opt
return options_vars
def _isidentifier_PY3(ident):
if not isinstance(ident, string_types):
return False
# NOTE Python 3.7 offers str.isascii() so switch over to using it once
# we stop supporting 3.5 and 3.6 on the controller
try:
# Python 2 does not allow non-ascii characters in identifiers so unify
# the behavior for Python 3
ident.encode('ascii')
except UnicodeEncodeError:
return False
if not ident.isidentifier():
return False
if keyword.iskeyword(ident):
return False
return True
def _isidentifier_PY2(ident):
if not isinstance(ident, string_types):
return False
if not ident:
return False
if C.INVALID_VARIABLE_NAMES.search(ident):
return False
if keyword.iskeyword(ident) or ident in ADDITIONAL_PY2_KEYWORDS:
return False
return True
if PY3:
isidentifier = _isidentifier_PY3
else:
isidentifier = _isidentifier_PY2
isidentifier.__doc__ = """Determine if string is valid identifier.
The purpose of this function is to be used to validate any variables created in
a play to be valid Python identifiers and to not conflict with Python keywords
to prevent unexpected behavior. Since Python 2 and Python 3 differ in what
a valid identifier is, this function unifies the validation so playbooks are
portable between the two. The following changes were made:
* disallow non-ascii characters (Python 3 allows for them as opposed to Python 2)
* True, False and None are reserved keywords (these are reserved keywords
on Python 3 as opposed to Python 2)
:arg ident: A text string of identifier to check. Note: It is callers
responsibility to convert ident to text if it is not already.
Originally posted at http://stackoverflow.com/a/29586366
"""
|
tvidas/a5 | refs/heads/master | scripts/bin/printalljobs.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# printalljobs.py
import beanstalkc
from pprint import pprint
#Read and execute global config
sys.path.append('../config')
from config import *
beanstalk = beanstalkc.Connection(host=BSHOST, port=BSPORT)
pprint(beanstalk.stats())
print "tubes: "
print beanstalk.tubes()
if BSMSBOXQ in beanstalk.tubes():
pprint(beanstalk.stats_tube(BSMSBOXQ))
beanstalk.use(BSMSBOXQ)
#next ready one
for i in range(5000):
job = beanstalk.peek(i)
if job is None:
#print "peek ready " + str(i) + " failed"
y = 0
else:
print str(i) + "=" +job.body
#pprint(job.stats())
print "next job: "
job = beanstalk.peek_ready()
if job is None:
print "peek ready failed"
else:
print job.body
pprint(job.stats())
print str(beanstalk.stats_tube(BSMSBOXQ)['current-jobs-ready']) + " samples ready to process"
print str(beanstalk.stats_tube(BSMSBOXQ)['current-jobs-reserved']) + " samples are processing"
print str(beanstalk.stats_tube(BSMSBOXQ)['current-jobs-delayed']) + " samples are delayed"
if job is not None:
beanstalk.watch(BSMSBOXQ)
job = beanstalk.reserve()
job.release(delay=10)
else:
print 'malware tube is empty'
|
popazerty/enigma2-4.3 | refs/heads/master | lib/python/Components/Converter/EcmCryptoInfo.py | 15 | #
# EcmCryptoInfo Converter by mcbain // v0.1 // 20111109
#
from Components.Converter.Converter import Converter
from Components.Element import cached
from Components.config import config
from Poll import Poll
import os
ECM_INFO = '/tmp/ecm.info'
old_ecm_mtime = None
data = None
class EcmCryptoInfo(Poll, Converter, object):
def __init__(self, type):
Converter.__init__(self, type)
Poll.__init__(self)
self.active = False
self.visible = config.usage.show_cryptoinfo.value
self.textvalue = ''
self.poll_interval = 2*1000
if self.visible:
self.poll_enabled = True
else:
self.poll_enabled = False
@cached
def getText(self):
if not self.visible:
return ''
ecmdata = self.getEcmData()
return ecmdata
text = property(getText)
def getEcmData(self):
global old_ecm_mtime
global data
try:
ecm_mtime = os.stat(ECM_INFO).st_mtime
except:
ecm_mtime = None
if ecm_mtime != old_ecm_mtime:
old_ecm_mtime = ecm_mtime
data = self.getEcmInfo()
return data
def getEcmInfo(self):
try:
ecm = open(ECM_INFO, 'rb').readlines()
ecminfo = {}
for line in ecm:
d = line.split(':', 1)
if len(d) > 1:
ecminfo[d[0].strip()] = d[1].strip()
# ecminfo is dictionary
if (ecminfo == ''):
return 'No info from emu or FTA'
using = ecminfo.get('using', '')
if using:
# CCcam
if using == 'fta':
return 'Free to Air'
ecmInfoString=''
casys=''
state='Source: '
caid = ecminfo.get('caid', '')
address = ecminfo.get('address', '')
hops = ecminfo.get('hops', '')
ecmtime = ecminfo.get('ecm time', '')
if caid:
if caid.__contains__('x'):
idx = caid.index('x')
caid = caid[idx+1:]
if len(caid) == 3:
caid = '0%s' % caid
caid = caid.upper()
casys = 'Caid: '+caid
if address:
retaddress = '%s %s' % (_(' Source:'), address)
if address == ('/dev/sci0'):
state = (' Source: Lower slot')
if address == ('/dev/sci1'):
state = (' Source: Upper slot')
if address != ('/dev/sci0') and address != ('/dev/sci1'):
state = retaddress
if len(state) > 28:
state = ('%s...') % state[:25]
if hops:
hops = '%s %s' % (_(' Hops:'), hops)
if ecmtime:
ecmtime = '%s %ss' % (_(' Time:'), ecmtime)
if casys != '':
ecmInfoString = '%s ' % casys
if state != 'Source: ':
ecmInfoString = '%s%s ' % (ecmInfoString, state)
if state == 'Source: ':
ecmInfoString += state
ecmInfoString = '%s%s ' % (ecmInfoString, using)
if hops != '' and hops != ' Hops: 0':
ecmInfoString = '%s%s ' % (ecmInfoString, hops)
if ecmtime != '':
ecmInfoString = '%s%s ' % (ecmInfoString, ecmtime)
self.textvalue = ecmInfoString
else:
return 'No info from emu or unknown emu'
except:
self.textvalue = ''
return self.textvalue
|
michaelgallacher/intellij-community | refs/heads/master | python/testData/editing/sectionIndentInsideGoogleDocStringCustomIndent.after.py | 48 | def f(param):
"""
Args:
param<caret>
""" |
mpld3/mpld3_rewrite | refs/heads/master | mpld3_rewrite/__init__.py | 1 | """
Interactive D3 rendering of matplotlib images
=============================================
Functions: General Use
----------------------
- :func:`fig_to_html` : convert a figure to an html string
- :func:`fig_to_dict` : convert a figure to a dictionary representation
- :func:`save_html` : save a figure to an html file
- :func:`save_json` : save a JSON representation of a figure to file
- :func:`show` : launch a web server to view an d3/html figure representation
Functions: IPython Notebook
---------------------------
- :func:`display` : display a figure in an IPython notebook
- :func:`enable_notebook` : enable automatic D3 display of figures
in the IPython notebook.
- :func:`disable_notebook` : disable automatic D3 display of figures
in the IPython
"""
__version__ = '0.1'
from .urls import *
from ._display import *
|
IKholopov/HackUPC2017 | refs/heads/master | hackupc/static/js/bootstrap/Respond-master/test/test.html.py | 1 | XXXXXXXXX XXXXX
XXXXXX
XXXXXX
XXXXX XXXXXXXXXXXXXXX XX
XXXXXXXXXXXXXX XX XXXX XXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXX XXXXXXXXXXXXXXXXXX
XXXXX XXXXXXXXXXXXXXXX XXXXXXXXXXXXX XXX XXXXXXXXXXX XXXXXXXX XXXXXXXXXXXXXXXXXX XXXX XXXXXX X XXXXX X XXXX XXX
XXXXXXX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
XXXXXXX
XXXXXX
XXXXXXX XX XXXX X XXXXXX XXXX XXXXX XXX XX XXXX XXX XX XXXXX XXX XXXX XXXXXX XXXXX XXX XX XXXXXXXXXXXXX XXXXXXXXXX XXXX XXXX XXXXXXXXXXXXX
XXXXXX XXXXX XXXXXXX XX XXX XXXXXXXX XXX XXXX XXXXXX XXXXXX XXX XXXXXX XXXXXXXXXX XXXXX XXXXXXXXX XX XXX XXXXXXX XXXXXX XX XXX XXX XXX XXXXXX XXXXX XXXX XXXXXX XXXX XXX XXXXX XXXXXXX XXX XXXXXXX XX XXXX XXXXXXXX XXX XXX XXXXXX XXXX XXXXXXX XXXXXX XX XXX XX XXXXXX XX XXX XXXXXXXX
XXXXXX X XXXXXXXXX XXX XXXX XXX XXXXX XXXXXXXX XXXX XX XX XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXXXXXXXXX
XX XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX XXX XXXXXXX XXXX XXXX XXXXXX XX XXXXXXX XXXXX XXXXXXXXXXX
XXXXXXX
XXXXXXX
|
sailthru/stolos | refs/heads/master | stolos/__init__.py | 1 | import logging as _logging
log = _logging.getLogger('stolos')
import os.path as _p
import pkg_resources as _pkg_resources
__version__ = _pkg_resources.get_distribution(
_p.basename(_p.dirname(_p.abspath(__file__)))).version
class Uninitialized(Exception):
msg = (
"Before you use Stolos, please initialize it."
" You probably just want to call stolos.api.initialize()'")
def __getattr__(self, *args, **kwargs):
raise Uninitialized(Uninitialized.msg)
def __repr__(self):
return "Stolos Not Initialized. %s" % Uninitialized.msg
def __str__(self):
return repr(self)
def get_NS():
"""Returns a namespace containing configuration variables. Stolos must be
initialized before NS is set. This ensures that relevant configuration is
properly defined.
Users of stolos can just call stolos.api.initialize()
Developers of stolos need to ensure that either they are using the api or,
for if developing on internals, that stolos.initializer.initialize(...) is
called for the the module(s) you are working on. Keep in mind that only
the api and Stolos's runner.py should initialize Stolos normally
"""
try:
return NS
except NameError:
raise Uninitialized(Uninitialized.msg)
__all__ = ['api']
|
wartman4404/servo | refs/heads/master | tests/wpt/css-tests/tools/html5lib/html5lib/tests/test_serializer.py | 451 | from __future__ import absolute_import, division, unicode_literals
import json
import unittest
from .support import get_data_files
try:
unittest.TestCase.assertEqual
except AttributeError:
unittest.TestCase.assertEqual = unittest.TestCase.assertEquals
import html5lib
from html5lib import constants
from html5lib.serializer import HTMLSerializer, serialize
from html5lib.treewalkers._base import TreeWalker
optionals_loaded = []
try:
from lxml import etree
optionals_loaded.append("lxml")
except ImportError:
pass
default_namespace = constants.namespaces["html"]
class JsonWalker(TreeWalker):
def __iter__(self):
for token in self.tree:
type = token[0]
if type == "StartTag":
if len(token) == 4:
namespace, name, attrib = token[1:4]
else:
namespace = default_namespace
name, attrib = token[1:3]
yield self.startTag(namespace, name, self._convertAttrib(attrib))
elif type == "EndTag":
if len(token) == 3:
namespace, name = token[1:3]
else:
namespace = default_namespace
name = token[1]
yield self.endTag(namespace, name)
elif type == "EmptyTag":
if len(token) == 4:
namespace, name, attrib = token[1:]
else:
namespace = default_namespace
name, attrib = token[1:]
for token in self.emptyTag(namespace, name, self._convertAttrib(attrib)):
yield token
elif type == "Comment":
yield self.comment(token[1])
elif type in ("Characters", "SpaceCharacters"):
for token in self.text(token[1]):
yield token
elif type == "Doctype":
if len(token) == 4:
yield self.doctype(token[1], token[2], token[3])
elif len(token) == 3:
yield self.doctype(token[1], token[2])
else:
yield self.doctype(token[1])
else:
raise ValueError("Unknown token type: " + type)
def _convertAttrib(self, attribs):
"""html5lib tree-walkers use a dict of (namespace, name): value for
attributes, but JSON cannot represent this. Convert from the format
in the serializer tests (a list of dicts with "namespace", "name",
and "value" as keys) to html5lib's tree-walker format."""
attrs = {}
for attrib in attribs:
name = (attrib["namespace"], attrib["name"])
assert(name not in attrs)
attrs[name] = attrib["value"]
return attrs
def serialize_html(input, options):
options = dict([(str(k), v) for k, v in options.items()])
stream = JsonWalker(input)
serializer = HTMLSerializer(alphabetical_attributes=True, **options)
return serializer.render(stream, options.get("encoding", None))
def runSerializerTest(input, expected, options):
encoding = options.get("encoding", None)
if encoding:
encode = lambda x: x.encode(encoding)
expected = list(map(encode, expected))
result = serialize_html(input, options)
if len(expected) == 1:
assert expected[0] == result, "Expected:\n%s\nActual:\n%s\nOptions:\n%s" % (expected[0], result, str(options))
elif result not in expected:
assert False, "Expected: %s, Received: %s" % (expected, result)
class EncodingTestCase(unittest.TestCase):
def throwsWithLatin1(self, input):
self.assertRaises(UnicodeEncodeError, serialize_html, input, {"encoding": "iso-8859-1"})
def testDoctypeName(self):
self.throwsWithLatin1([["Doctype", "\u0101"]])
def testDoctypePublicId(self):
self.throwsWithLatin1([["Doctype", "potato", "\u0101"]])
def testDoctypeSystemId(self):
self.throwsWithLatin1([["Doctype", "potato", "potato", "\u0101"]])
def testCdataCharacters(self):
runSerializerTest([["StartTag", "http://www.w3.org/1999/xhtml", "style", {}], ["Characters", "\u0101"]],
["<style>ā"], {"encoding": "iso-8859-1"})
def testCharacters(self):
runSerializerTest([["Characters", "\u0101"]],
["ā"], {"encoding": "iso-8859-1"})
def testStartTagName(self):
self.throwsWithLatin1([["StartTag", "http://www.w3.org/1999/xhtml", "\u0101", []]])
def testEmptyTagName(self):
self.throwsWithLatin1([["EmptyTag", "http://www.w3.org/1999/xhtml", "\u0101", []]])
def testAttributeName(self):
self.throwsWithLatin1([["StartTag", "http://www.w3.org/1999/xhtml", "span", [{"namespace": None, "name": "\u0101", "value": "potato"}]]])
def testAttributeValue(self):
runSerializerTest([["StartTag", "http://www.w3.org/1999/xhtml", "span",
[{"namespace": None, "name": "potato", "value": "\u0101"}]]],
["<span potato=ā>"], {"encoding": "iso-8859-1"})
def testEndTagName(self):
self.throwsWithLatin1([["EndTag", "http://www.w3.org/1999/xhtml", "\u0101"]])
def testComment(self):
self.throwsWithLatin1([["Comment", "\u0101"]])
if "lxml" in optionals_loaded:
class LxmlTestCase(unittest.TestCase):
def setUp(self):
self.parser = etree.XMLParser(resolve_entities=False)
self.treewalker = html5lib.getTreeWalker("lxml")
self.serializer = HTMLSerializer()
def testEntityReplacement(self):
doc = """<!DOCTYPE html SYSTEM "about:legacy-compat"><html>β</html>"""
tree = etree.fromstring(doc, parser=self.parser).getroottree()
result = serialize(tree, tree="lxml", omit_optional_tags=False)
self.assertEqual("""<!DOCTYPE html SYSTEM "about:legacy-compat"><html>\u03B2</html>""", result)
def testEntityXML(self):
doc = """<!DOCTYPE html SYSTEM "about:legacy-compat"><html>></html>"""
tree = etree.fromstring(doc, parser=self.parser).getroottree()
result = serialize(tree, tree="lxml", omit_optional_tags=False)
self.assertEqual("""<!DOCTYPE html SYSTEM "about:legacy-compat"><html>></html>""", result)
def testEntityNoResolve(self):
doc = """<!DOCTYPE html SYSTEM "about:legacy-compat"><html>β</html>"""
tree = etree.fromstring(doc, parser=self.parser).getroottree()
result = serialize(tree, tree="lxml", omit_optional_tags=False,
resolve_entities=False)
self.assertEqual("""<!DOCTYPE html SYSTEM "about:legacy-compat"><html>β</html>""", result)
def test_serializer():
for filename in get_data_files('serializer', '*.test'):
with open(filename) as fp:
tests = json.load(fp)
for index, test in enumerate(tests['tests']):
yield runSerializerTest, test["input"], test["expected"], test.get("options", {})
|
espadrine/opera | refs/heads/master | chromium/src/tools/site_compare/scrapers/chrome/__init__.py | 179 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Selects the appropriate scraper for Chrome."""
def GetScraper(version):
"""Returns the scraper module for the given version.
Args:
version: version string of Chrome, or None for most recent
Returns:
scrape module for given version
"""
if version is None:
version = "0.1.101.0"
parsed_version = [int(x) for x in version.split(".")]
if (parsed_version[0] > 0 or
parsed_version[1] > 1 or
parsed_version[2] > 97 or
parsed_version[3] > 0):
scraper_version = "chrome011010"
else:
scraper_version = "chrome01970"
return __import__(scraper_version, globals(), locals(), [''])
# if invoked rather than imported, test
if __name__ == "__main__":
print GetScraper("0.1.101.0").version
|
zhuwenping/python-for-android | refs/heads/master | python3-alpha/python3-src/Lib/test/test_reprlib.py | 56 | """
Test cases for the repr module
Nick Mathewson
"""
import sys
import os
import shutil
import unittest
from test.support import run_unittest
from reprlib import repr as r # Don't shadow builtin repr
from reprlib import Repr
from reprlib import recursive_repr
def nestedTuple(nesting):
t = ()
for i in range(nesting):
t = (t,)
return t
class ReprTests(unittest.TestCase):
def test_string(self):
eq = self.assertEqual
eq(r("abc"), "'abc'")
eq(r("abcdefghijklmnop"),"'abcdefghijklmnop'")
s = "a"*30+"b"*30
expected = repr(s)[:13] + "..." + repr(s)[-14:]
eq(r(s), expected)
eq(r("\"'"), repr("\"'"))
s = "\""*30+"'"*100
expected = repr(s)[:13] + "..." + repr(s)[-14:]
eq(r(s), expected)
def test_tuple(self):
eq = self.assertEqual
eq(r((1,)), "(1,)")
t3 = (1, 2, 3)
eq(r(t3), "(1, 2, 3)")
r2 = Repr()
r2.maxtuple = 2
expected = repr(t3)[:-2] + "...)"
eq(r2.repr(t3), expected)
def test_container(self):
from array import array
from collections import deque
eq = self.assertEqual
# Tuples give up after 6 elements
eq(r(()), "()")
eq(r((1,)), "(1,)")
eq(r((1, 2, 3)), "(1, 2, 3)")
eq(r((1, 2, 3, 4, 5, 6)), "(1, 2, 3, 4, 5, 6)")
eq(r((1, 2, 3, 4, 5, 6, 7)), "(1, 2, 3, 4, 5, 6, ...)")
# Lists give up after 6 as well
eq(r([]), "[]")
eq(r([1]), "[1]")
eq(r([1, 2, 3]), "[1, 2, 3]")
eq(r([1, 2, 3, 4, 5, 6]), "[1, 2, 3, 4, 5, 6]")
eq(r([1, 2, 3, 4, 5, 6, 7]), "[1, 2, 3, 4, 5, 6, ...]")
# Sets give up after 6 as well
eq(r(set([])), "set([])")
eq(r(set([1])), "set([1])")
eq(r(set([1, 2, 3])), "set([1, 2, 3])")
eq(r(set([1, 2, 3, 4, 5, 6])), "set([1, 2, 3, 4, 5, 6])")
eq(r(set([1, 2, 3, 4, 5, 6, 7])), "set([1, 2, 3, 4, 5, 6, ...])")
# Frozensets give up after 6 as well
eq(r(frozenset([])), "frozenset([])")
eq(r(frozenset([1])), "frozenset([1])")
eq(r(frozenset([1, 2, 3])), "frozenset([1, 2, 3])")
eq(r(frozenset([1, 2, 3, 4, 5, 6])), "frozenset([1, 2, 3, 4, 5, 6])")
eq(r(frozenset([1, 2, 3, 4, 5, 6, 7])), "frozenset([1, 2, 3, 4, 5, 6, ...])")
# collections.deque after 6
eq(r(deque([1, 2, 3, 4, 5, 6, 7])), "deque([1, 2, 3, 4, 5, 6, ...])")
# Dictionaries give up after 4.
eq(r({}), "{}")
d = {'alice': 1, 'bob': 2, 'charles': 3, 'dave': 4}
eq(r(d), "{'alice': 1, 'bob': 2, 'charles': 3, 'dave': 4}")
d['arthur'] = 1
eq(r(d), "{'alice': 1, 'arthur': 1, 'bob': 2, 'charles': 3, ...}")
# array.array after 5.
eq(r(array('i')), "array('i', [])")
eq(r(array('i', [1])), "array('i', [1])")
eq(r(array('i', [1, 2])), "array('i', [1, 2])")
eq(r(array('i', [1, 2, 3])), "array('i', [1, 2, 3])")
eq(r(array('i', [1, 2, 3, 4])), "array('i', [1, 2, 3, 4])")
eq(r(array('i', [1, 2, 3, 4, 5])), "array('i', [1, 2, 3, 4, 5])")
eq(r(array('i', [1, 2, 3, 4, 5, 6])),
"array('i', [1, 2, 3, 4, 5, ...])")
def test_numbers(self):
eq = self.assertEqual
eq(r(123), repr(123))
eq(r(123), repr(123))
eq(r(1.0/3), repr(1.0/3))
n = 10**100
expected = repr(n)[:18] + "..." + repr(n)[-19:]
eq(r(n), expected)
def test_instance(self):
eq = self.assertEqual
i1 = ClassWithRepr("a")
eq(r(i1), repr(i1))
i2 = ClassWithRepr("x"*1000)
expected = repr(i2)[:13] + "..." + repr(i2)[-14:]
eq(r(i2), expected)
i3 = ClassWithFailingRepr()
eq(r(i3), ("<ClassWithFailingRepr instance at %x>"%id(i3)))
s = r(ClassWithFailingRepr)
self.assertTrue(s.startswith("<class "))
self.assertTrue(s.endswith(">"))
self.assertIn(s.find("..."), [12, 13])
def test_lambda(self):
self.assertTrue(repr(lambda x: x).startswith(
"<function <lambda"))
# XXX anonymous functions? see func_repr
def test_builtin_function(self):
eq = self.assertEqual
# Functions
eq(repr(hash), '<built-in function hash>')
# Methods
self.assertTrue(repr(''.split).startswith(
'<built-in method split of str object at 0x'))
def test_range(self):
eq = self.assertEqual
eq(repr(range(1)), 'range(0, 1)')
eq(repr(range(1, 2)), 'range(1, 2)')
eq(repr(range(1, 4, 3)), 'range(1, 4, 3)')
def test_nesting(self):
eq = self.assertEqual
# everything is meant to give up after 6 levels.
eq(r([[[[[[[]]]]]]]), "[[[[[[[]]]]]]]")
eq(r([[[[[[[[]]]]]]]]), "[[[[[[[...]]]]]]]")
eq(r(nestedTuple(6)), "(((((((),),),),),),)")
eq(r(nestedTuple(7)), "(((((((...),),),),),),)")
eq(r({ nestedTuple(5) : nestedTuple(5) }),
"{((((((),),),),),): ((((((),),),),),)}")
eq(r({ nestedTuple(6) : nestedTuple(6) }),
"{((((((...),),),),),): ((((((...),),),),),)}")
eq(r([[[[[[{}]]]]]]), "[[[[[[{}]]]]]]")
eq(r([[[[[[[{}]]]]]]]), "[[[[[[[...]]]]]]]")
def test_cell(self):
# XXX Hmm? How to get at a cell object?
pass
def test_descriptors(self):
eq = self.assertEqual
# method descriptors
eq(repr(dict.items), "<method 'items' of 'dict' objects>")
# XXX member descriptors
# XXX attribute descriptors
# XXX slot descriptors
# static and class methods
class C:
def foo(cls): pass
x = staticmethod(C.foo)
self.assertTrue(repr(x).startswith('<staticmethod object at 0x'))
x = classmethod(C.foo)
self.assertTrue(repr(x).startswith('<classmethod object at 0x'))
def test_unsortable(self):
# Repr.repr() used to call sorted() on sets, frozensets and dicts
# without taking into account that not all objects are comparable
x = set([1j, 2j, 3j])
y = frozenset(x)
z = {1j: 1, 2j: 2}
r(x)
r(y)
r(z)
def touch(path, text=''):
fp = open(path, 'w')
fp.write(text)
fp.close()
class LongReprTest(unittest.TestCase):
def setUp(self):
longname = 'areallylongpackageandmodulenametotestreprtruncation'
self.pkgname = os.path.join(longname)
self.subpkgname = os.path.join(longname, longname)
# Make the package and subpackage
shutil.rmtree(self.pkgname, ignore_errors=True)
os.mkdir(self.pkgname)
touch(os.path.join(self.pkgname, '__init__.py'))
shutil.rmtree(self.subpkgname, ignore_errors=True)
os.mkdir(self.subpkgname)
touch(os.path.join(self.subpkgname, '__init__.py'))
# Remember where we are
self.here = os.getcwd()
sys.path.insert(0, self.here)
def tearDown(self):
actions = []
for dirpath, dirnames, filenames in os.walk(self.pkgname):
for name in dirnames + filenames:
actions.append(os.path.join(dirpath, name))
actions.append(self.pkgname)
actions.sort()
actions.reverse()
for p in actions:
if os.path.isdir(p):
os.rmdir(p)
else:
os.remove(p)
del sys.path[0]
def test_module(self):
eq = self.assertEqual
touch(os.path.join(self.subpkgname, self.pkgname + '.py'))
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import areallylongpackageandmodulenametotestreprtruncation
eq(repr(areallylongpackageandmodulenametotestreprtruncation),
"<module '%s' from '%s'>" % (areallylongpackageandmodulenametotestreprtruncation.__name__, areallylongpackageandmodulenametotestreprtruncation.__file__))
eq(repr(sys), "<module 'sys' (built-in)>")
def test_type(self):
eq = self.assertEqual
touch(os.path.join(self.subpkgname, 'foo.py'), '''\
class foo(object):
pass
''')
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import foo
eq(repr(foo.foo),
"<class '%s.foo'>" % foo.__name__)
def test_object(self):
# XXX Test the repr of a type with a really long tp_name but with no
# tp_repr. WIBNI we had ::Inline? :)
pass
def test_class(self):
touch(os.path.join(self.subpkgname, 'bar.py'), '''\
class bar:
pass
''')
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import bar
# Module name may be prefixed with "test.", depending on how run.
self.assertEqual(repr(bar.bar), "<class '%s.bar'>" % bar.__name__)
def test_instance(self):
touch(os.path.join(self.subpkgname, 'baz.py'), '''\
class baz:
pass
''')
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import baz
ibaz = baz.baz()
self.assertTrue(repr(ibaz).startswith(
"<%s.baz object at 0x" % baz.__name__))
def test_method(self):
eq = self.assertEqual
touch(os.path.join(self.subpkgname, 'qux.py'), '''\
class aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa:
def amethod(self): pass
''')
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import qux
# Unbound methods first
self.assertTrue(repr(qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod).startswith(
'<function amethod'))
# Bound method next
iqux = qux.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa()
self.assertTrue(repr(iqux.amethod).startswith(
'<bound method aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.amethod of <%s.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa object at 0x' \
% (qux.__name__,) ))
def test_builtin_function(self):
# XXX test built-in functions and methods with really long names
pass
class ClassWithRepr:
def __init__(self, s):
self.s = s
def __repr__(self):
return "ClassWithRepr(%r)" % self.s
class ClassWithFailingRepr:
def __repr__(self):
raise Exception("This should be caught by Repr.repr_instance")
class MyContainer:
'Helper class for TestRecursiveRepr'
def __init__(self, values):
self.values = list(values)
def append(self, value):
self.values.append(value)
@recursive_repr()
def __repr__(self):
return '<' + ', '.join(map(str, self.values)) + '>'
class MyContainer2(MyContainer):
@recursive_repr('+++')
def __repr__(self):
return '<' + ', '.join(map(str, self.values)) + '>'
class TestRecursiveRepr(unittest.TestCase):
def test_recursive_repr(self):
m = MyContainer(list('abcde'))
m.append(m)
m.append('x')
m.append(m)
self.assertEqual(repr(m), '<a, b, c, d, e, ..., x, ...>')
m = MyContainer2(list('abcde'))
m.append(m)
m.append('x')
m.append(m)
self.assertEqual(repr(m), '<a, b, c, d, e, +++, x, +++>')
def test_main():
run_unittest(ReprTests)
run_unittest(LongReprTest)
run_unittest(TestRecursiveRepr)
if __name__ == "__main__":
test_main()
|
Avicennasis/AvicBot | refs/heads/master | misc/mysandboxes.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
This bot resets a (user) sandbox with predefined text.
This script understands the following command-line arguments:
¶ms;
Furthermore, the following command line parameters are supported:
-hours:# Use this parameter if to make the script repeat itself
after # hours. Hours can be defined as a decimal. 0.01
hours are 36 seconds; 0.1 are 6 minutes.
-delay:# Use this parameter for a wait time after the last edit
was made. If no parameter is given it takes it from
hours and limits it between 5 and 15 minutes.
The minimum delay time is 5 minutes.
-text The text that substitutes in the sandbox, you can use this
when you haven't configured clean_candbox for your wiki.
-summary Summary of the edit made by bot.
"""
#
# (C) Leonardo Gregianin, 2006
# (C) Wikipedian, 2006-2007
# (C) Andre Engels, 2007
# (C) Siebrand Mazeland, 2007
# (C) xqt, 2009-2014
# (C) Dr. Trigon, 2012
# (C) Pywikibot team, 2012-2014
#
# Distributed under the terms of the MIT license.
#
from __future__ import division, unicode_literals
__version__ = '$Id: b6d9824feba3246bb9ef2c41134696378c2e84e0 $'
#
import time
import datetime
import pywikibot
from pywikibot import i18n, Bot, pagegenerators
content = {
'commons': u'{{User:Avicennasis/sandboxnotice}}\n==Test below here==',
'meta': u'{{User:Avicennasis/sandboxnotice}}\n==Test below here==',
'en': u'{{User:Avicennasis/sandboxnotice}}\n==Test below here==',
'eo': u'{{Uzanto:Avicennasis/sandboxnotice}}\n==Test below here==',
'simple': u'{{User:Avicennasis/sandboxnotice}}\n==Test below here==',
'sco': u'{{User:Avicennasis/sandboxnotice}}\n==Test below here==',
}
sandboxTitle = {
'commons': [u'User talk:Avicennasis/sandbox', 'User talk:Avicennasis/sandbox2', 'User talk:Avicennasis/sandbox3', 'User talk:Avicennasis/sandbox4', 'User talk:Avicennasis/sandbox5', 'User talk:Avicennasis/sandbox6', 'User talk:Avicennasis/sandbox7', 'User talk:Avicennasis/sandbox8', 'User talk:Avicennasis/sandbox9', 'User talk:Avicennasis/sandbox10', 'User:Avicennasis/sandbox', 'User:Avicennasis/sandbox2', 'User:Avicennasis/sandbox3', 'User:Avicennasis/sandbox4', 'User:Avicennasis/sandbox5', 'User:Avicennasis/sandbox6', 'User:Avicennasis/sandbox7', 'User:Avicennasis/sandbox8', 'User:Avicennasis/sandbox9', 'User:Avicennasis/sandbox10'],
'meta': [u'User talk:Avicennasis/sandbox', 'User talk:Avicennasis/sandbox2', 'User talk:Avicennasis/sandbox3', 'User talk:Avicennasis/sandbox4', 'User talk:Avicennasis/sandbox5', 'User talk:Avicennasis/sandbox6', 'User talk:Avicennasis/sandbox7', 'User talk:Avicennasis/sandbox8', 'User talk:Avicennasis/sandbox9', 'User talk:Avicennasis/sandbox10', 'User:Avicennasis/sandbox', 'User:Avicennasis/sandbox2', 'User:Avicennasis/sandbox3', 'User:Avicennasis/sandbox4', 'User:Avicennasis/sandbox5', 'User:Avicennasis/sandbox6', 'User:Avicennasis/sandbox7', 'User:Avicennasis/sandbox8', 'User:Avicennasis/sandbox9', 'User:Avicennasis/sandbox10'],
'en': [u'User talk:Avicennasis/sandbox', 'User talk:Avicennasis/sandbox2', 'User talk:Avicennasis/sandbox3', 'User talk:Avicennasis/sandbox4', 'User talk:Avicennasis/sandbox5', 'User talk:Avicennasis/sandbox6', 'User talk:Avicennasis/sandbox7', 'User talk:Avicennasis/sandbox8', 'User talk:Avicennasis/sandbox9', 'User talk:Avicennasis/sandbox10', 'User:Avicennasis/sandbox', 'User:Avicennasis/sandbox2', 'User:Avicennasis/sandbox3', 'User:Avicennasis/sandbox4', 'User:Avicennasis/sandbox5', 'User:Avicennasis/sandbox6', 'User:Avicennasis/sandbox7', 'User:Avicennasis/sandbox8', 'User:Avicennasis/sandbox9', 'User:Avicennasis/sandbox10'],
'eo': [u'Uzanto-Diskuto:Avicennasis/sandbox', 'Uzanto-Diskuto:Avicennasis/sandbox2', 'Uzanto-Diskuto:Avicennasis/sandbox3', 'Uzanto-Diskuto:Avicennasis/sandbox4', 'Uzanto-Diskuto:Avicennasis/sandbox5', 'Uzanto-Diskuto:Avicennasis/sandbox6', 'Uzanto-Diskuto:Avicennasis/sandbox7', 'Uzanto-Diskuto:Avicennasis/sandbox8', 'Uzanto-Diskuto:Avicennasis/sandbox9', 'Uzanto-Diskuto:Avicennasis/sandbox10', 'Uzanto:Avicennasis/sandbox', 'Uzanto:Avicennasis/sandbox2', 'Uzanto:Avicennasis/sandbox3', 'Uzanto:Avicennasis/sandbox4', 'Uzanto:Avicennasis/sandbox5', 'Uzanto:Avicennasis/sandbox6', 'Uzanto:Avicennasis/sandbox7', 'Uzanto:Avicennasis/sandbox8', 'Uzanto:Avicennasis/sandbox9', 'Uzanto:Avicennasis/sandbox10'],
'simple': [u'User talk:Avicennasis/sandbox', 'User talk:Avicennasis/sandbox2', 'User talk:Avicennasis/sandbox3', 'User talk:Avicennasis/sandbox4', 'User talk:Avicennasis/sandbox5', 'User talk:Avicennasis/sandbox6', 'User talk:Avicennasis/sandbox7', 'User talk:Avicennasis/sandbox8', 'User talk:Avicennasis/sandbox9', 'User talk:Avicennasis/sandbox10', 'User:Avicennasis/sandbox', 'User:Avicennasis/sandbox2', 'User:Avicennasis/sandbox3', 'User:Avicennasis/sandbox4', 'User:Avicennasis/sandbox5', 'User:Avicennasis/sandbox6', 'User:Avicennasis/sandbox7', 'User:Avicennasis/sandbox8', 'User:Avicennasis/sandbox9', 'User:Avicennasis/sandbox10'],
'sco': [u'User talk:Avicennasis/sandbox', 'User talk:Avicennasis/sandbox2', 'User talk:Avicennasis/sandbox3', 'User talk:Avicennasis/sandbox4', 'User talk:Avicennasis/sandbox5', 'User talk:Avicennasis/sandbox6', 'User talk:Avicennasis/sandbox7', 'User talk:Avicennasis/sandbox8', 'User talk:Avicennasis/sandbox9', 'User talk:Avicennasis/sandbox10', 'User:Avicennasis/sandbox', 'User:Avicennasis/sandbox2', 'User:Avicennasis/sandbox3', 'User:Avicennasis/sandbox4', 'User:Avicennasis/sandbox5', 'User:Avicennasis/sandbox6', 'User:Avicennasis/sandbox7', 'User:Avicennasis/sandbox8', 'User:Avicennasis/sandbox9', 'User:Avicennasis/sandbox10'],
}
# This is required for the text that is shown when you run this script
# with the parameter -help.
docuReplacements = {
'¶ms;': pagegenerators.parameterHelp,
}
class SandboxBot(Bot):
"""Sandbox reset bot."""
availableOptions = {
'hours': 1,
'no_repeat': True,
'delay': None,
'delay_td': None,
'text': "",
'summary': "",
}
def __init__(self, **kwargs):
"""Constructor."""
super(SandboxBot, self).__init__(**kwargs)
if self.getOption('delay') is None:
d = min(15, max(5, int(self.getOption('hours') * 60)))
self.availableOptions['delay_td'] = datetime.timedelta(minutes=d)
else:
d = max(5, self.getOption('delay'))
self.availableOptions['delay_td'] = datetime.timedelta(minutes=d)
self.site = pywikibot.Site()
if not content.get(self.site.code) and not self.getOption('text'):
pywikibot.error(u'No content is given for pages, exiting.')
raise RuntimeError
if not self.generator:
if self.site.code not in sandboxTitle:
pywikibot.error(u'No generator is given for this site'
u'(%s), exiting.' % self.site)
raise RuntimeError
local_sandbox_title = sandboxTitle[self.site.code]
if not isinstance(local_sandbox_title, list):
local_sandbox_title = [local_sandbox_title]
self.generator = [pywikibot.Page(self.site, page_name) for
page_name in local_sandbox_title]
def run(self):
"""Run bot."""
self.site.login()
while True:
wait = False
now = time.strftime("%d %b %Y %H:%M:%S (UTC)", time.gmtime())
for sandboxPage in self.generator:
pywikibot.output(u'Preparing to process sandbox page %s'
% sandboxPage.title(asLink=True))
if sandboxPage.isRedirectPage():
pywikibot.warning(
u'%s is a redirect page, cleaning it anyway'
% sandboxPage.title(asLink=True))
try:
text = sandboxPage.text
if not self.getOption('text'):
translatedContent = i18n.translate(self.site, content)
else:
translatedContent = self.getOption('text')
if self.getOption('summary'):
translatedMsg = self.getOption('summary')
else:
translatedMsg = i18n.twtranslate(
self.site, 'clean_sandbox-cleaned')
subst = 'subst:' in translatedContent
pos = text.find(translatedContent.strip())
if text.strip() == translatedContent.strip():
pywikibot.output(
u'The sandbox is still clean, no change necessary.')
elif subst and \
sandboxPage.userName() == self.site.user():
pywikibot.output(
u'The sandbox might be clean, no change necessary.')
elif pos != 0 and not subst:
sandboxPage.put(translatedContent, translatedMsg)
pywikibot.showDiff(text, translatedContent)
pywikibot.output(u'Standard content was changed, '
u'sandbox cleaned.')
else:
edit_delta = (datetime.datetime.utcnow() -
sandboxPage.editTime())
delta = self.getOption('delay_td') - edit_delta
# Is the last edit more than 'delay' minutes ago?
if delta <= datetime.timedelta(0):
sandboxPage.put(translatedContent, translatedMsg)
pywikibot.showDiff(text, translatedContent)
pywikibot.output(u'Standard content was changed, '
u'sandbox cleaned.')
else: # wait for the rest
pywikibot.output(
u'Sandbox edited %.1f minutes ago...'
% (edit_delta.seconds / 60.0))
pywikibot.output(u'Sleeping for %d minutes.'
% (delta.seconds // 60))
time.sleep(delta.seconds)
wait = True
except pywikibot.EditConflict:
pywikibot.output(
u'*** Loading again because of edit conflict.\n')
except pywikibot.NoPage:
pywikibot.output(
u'*** The sandbox is not existent, skipping.')
continue
if self.getOption('no_repeat'):
pywikibot.output(u'\nDone.')
return
elif not wait:
if self.getOption('hours') < 1.0:
pywikibot.output('\nSleeping %s minutes, now %s'
% ((self.getOption('hours') * 60), now))
else:
pywikibot.output('\nSleeping %s hours, now %s'
% (self.getOption('hours'), now))
time.sleep(self.getOption('hours') * 60 * 60)
def main(*args):
"""
Process command line arguments and invoke bot.
If args is an empty list, sys.argv is used.
@param args: command line arguments
@type args: list of unicode
"""
opts = {}
local_args = pywikibot.handle_args(args)
gen_factory = pagegenerators.GeneratorFactory()
for arg in local_args:
if arg.startswith('-hours:'):
opts['hours'] = float(arg[7:])
opts['no_repeat'] = False
elif arg.startswith('-delay:'):
opts['delay'] = int(arg[7:])
elif arg.startswith('-text'):
if len(arg) == 5:
opts['text'] = pywikibot.input(
u'What text do you want to substitute?')
else:
opts['text'] = arg[6:]
elif arg.startswith('-summary'):
if len(arg) == len('-summary'):
opts['summary'] = pywikibot.input(u'Enter the summary:')
else:
opts['summary'] = arg[9:]
else:
gen_factory.handleArg(arg)
generator = gen_factory.getCombinedGenerator()
bot = SandboxBot(generator=generator, **opts)
bot.run()
if __name__ == "__main__":
main()
|
Bitergia/allura | refs/heads/master | ForgeDiscussion/forgediscussion/tests/functional/test_import.py | 3 | import os
import json
from datetime import datetime, timedelta
from nose.tools import assert_equal
import ming
import pylons
pylons.c = pylons.tmpl_context
pylons.g = pylons.app_globals
from pylons import g, c
from allura import model as M
from alluratest.controller import TestController, TestRestApiBase
class TestImportController(TestRestApiBase):#TestController):
def setUp(self):
super(TestImportController, self).setUp()
here_dir = os.path.dirname(__file__)
self.app.get('/discussion/')
self.json_text = open(here_dir + '/data/sf.json').read()
def test_no_capability(self):
self.set_api_ticket({'import2': ['Projects', 'test']})
resp = self.api_post('/rest/p/test/discussion/perform_import',
doc=self.json_text)
assert resp.status_int == 403
self.set_api_ticket({'import': ['Projects', 'test2']})
resp = self.api_post('/rest/p/test/discussion/perform_import',
doc=self.json_text)
assert resp.status_int == 403
self.set_api_ticket({'import': ['Projects', 'test']})
resp = self.api_post('/rest/p/test/discussion/perform_import',
doc=self.json_text)
assert resp.status_int == 200
def test_validate_import(self):
r = self.api_post('/rest/p/test/discussion/validate_import',
doc=self.json_text)
assert not r.json['errors']
def test_import_anon(self):
api_ticket = M.ApiTicket(user_id=self.user._id, capabilities={'import': ['Projects', 'test']},
expires=datetime.utcnow() + timedelta(days=1))
ming.orm.session(api_ticket).flush()
self.set_api_token(api_ticket)
r = self.api_post('/rest/p/test/discussion/perform_import',
doc=self.json_text)
assert not r.json['errors'], r.json['errors']
r = self.app.get('/p/test/discussion/')
assert 'Open Discussion' in str(r)
assert 'Welcome to Open Discussion' in str(r)
for link in r.html.findAll('a'):
if 'Welcome to Open Discussion' in str(link): break
r = self.app.get(link.get('href'))
assert '2009-11-19' in str(r)
assert 'Welcome to Open Discussion' in str(r)
assert 'Anonymous' in str(r)
def test_import_map(self):
api_ticket = M.ApiTicket(user_id=self.user._id, capabilities={'import': ['Projects', 'test']},
expires=datetime.utcnow() + timedelta(days=1))
ming.orm.session(api_ticket).flush()
self.set_api_token(api_ticket)
r = self.api_post('/rest/p/test/discussion/perform_import',
doc=self.json_text,
username_mapping=json.dumps(dict(rick446='test-user')))
assert not r.json['errors'], r.json['errors']
r = self.app.get('/p/test/discussion/')
assert 'Open Discussion' in str(r)
assert 'Welcome to Open Discussion' in str(r)
for link in r.html.findAll('a'):
if 'Welcome to Open Discussion' in str(link): break
r = self.app.get(link.get('href'))
assert '2009-11-19' in str(r)
assert 'Welcome to Open Discussion' in str(r)
assert 'Test User' in str(r)
assert 'Anonymous' not in str(r)
def test_import_create(self):
api_ticket = M.ApiTicket(user_id=self.user._id, capabilities={'import': ['Projects', 'test']},
expires=datetime.utcnow() + timedelta(days=1))
ming.orm.session(api_ticket).flush()
self.set_api_token(api_ticket)
r = self.api_post('/rest/p/test/discussion/perform_import',
doc=self.json_text, create_users='True')
assert not r.json['errors'], r.json['errors']
r = self.app.get('/p/test/discussion/')
assert 'Open Discussion' in str(r)
assert 'Welcome to Open Discussion' in str(r)
for link in r.html.findAll('a'):
if 'Welcome to Open Discussion' in str(link): break
r = self.app.get(link.get('href'))
assert '2009-11-19' in str(r)
assert 'Welcome to Open Discussion' in str(r)
assert 'Anonymous' not in str(r)
assert 'test-rick446' in str(r)
def set_api_ticket(self, caps={'import': ['Projects', 'test']}):
api_ticket = M.ApiTicket(user_id=self.user._id, capabilities=caps,
expires=datetime.utcnow() + timedelta(days=1))
ming.orm.session(api_ticket).flush()
self.set_api_token(api_ticket)
@staticmethod
def time_normalize(t):
return t.replace('T', ' ').replace('Z', '')
def verify_ticket(self, from_api, org):
assert_equal(from_api['status'], org['status'])
assert_equal(from_api['description'], org['description'])
assert_equal(from_api['summary'], org['summary'])
assert_equal(from_api['ticket_num'], org['id'])
assert_equal(from_api['created_date'], self.time_normalize(org['date']))
assert_equal(from_api['mod_date'], self.time_normalize(org['date_updated']))
assert_equal(from_api['custom_fields']['_resolution'], org['resolution'])
assert_equal(from_api['custom_fields']['_cc'], org['cc'])
assert_equal(from_api['custom_fields']['_private'], org['private'])
|
DavidTingley/ephys-processing-pipeline | refs/heads/master | installation/klustaviewa-0.3.0/klustaviewa/control/tests/test_stack.py | 2 | """Unit tests for stack module."""
# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
from klustaviewa.control.stack import Stack
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
def test_stack1():
s = Stack()
assert s.get_current() == None
assert s.can_undo() == False
assert s.can_redo() == False
s.add("action 0")
assert s.can_undo() == True
assert s.can_redo() == False
s.add("action 1")
s.add("action 2")
assert s.get_current() == "action 2"
assert s.can_undo() == True
assert s.can_redo() == False
s.undo()
assert s.get_current() == "action 1"
assert s.can_undo() == True
assert s.can_redo() == True
s.redo()
assert s.get_current() == "action 2"
assert s.can_undo() == True
assert s.can_redo() == False
s.undo()
s.undo()
s.add("action 1 bis")
assert s.get_current() == "action 1 bis"
assert s.can_undo() == True
assert s.can_redo() == False
s.undo()
assert s.get_current() == "action 0"
assert s.can_undo() == True
assert s.can_redo() == True
def test_stack_maxsize():
s = Stack(maxsize=10)
[s.add("action {0:d}".format(i)) for i in xrange(20)]
assert len(s.get_stack()) == 10
assert s.get_current() == "action 19"
assert s.can_undo() == True
assert s.can_redo() == False
[s.undo() for _ in xrange(10)]
assert s.can_undo() == False
assert s.can_redo() == True
|
gtest-org/test12 | refs/heads/master | tests/wrappers/test_wrappers.py | 37 | # Joint copyright:
# - Copyright 2012,2013 Wikimedia Foundation
# - Copyright 2012,2013 Antoine "hashar" Musso
# - Copyright 2013 Arnaud Fabre
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from testtools import TestCase
from testscenarios.testcase import TestWithScenarios
from jenkins_jobs.modules import wrappers
from tests.base import get_scenarios, BaseTestCase
class TestCaseModuleWrappers(TestWithScenarios, TestCase, BaseTestCase):
fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures')
scenarios = get_scenarios(fixtures_path)
klass = wrappers.Wrappers
|
hernad/frappe | refs/heads/develop | frappe/email/smtp.py | 6 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import smtplib
import _socket
from frappe.utils import cint
from frappe import _
def send(email, append_to=None):
"""send the message or add it to Outbox Email"""
if frappe.flags.in_test:
frappe.flags.sent_mail = email.as_string()
return
if frappe.flags.mute_emails or frappe.conf.get("mute_emails") or False:
frappe.msgprint(_("Emails are muted"))
return
try:
smtpserver = SMTPServer(append_to=append_to)
if hasattr(smtpserver, "always_use_account_email_id_as_sender") and \
cint(smtpserver.always_use_account_email_id_as_sender) and smtpserver.login:
if not email.reply_to:
email.reply_to = email.sender
email.sender = smtpserver.login
smtpserver.sess.sendmail(email.sender, email.recipients + (email.cc or []),
email.as_string())
except smtplib.SMTPSenderRefused:
frappe.msgprint(_("Invalid login or password"))
raise
except smtplib.SMTPRecipientsRefused:
frappe.msgprint(_("Invalid recipient address"))
raise
def get_outgoing_email_account(raise_exception_not_set=True, append_to=None):
"""Returns outgoing email account based on `append_to` or the default
outgoing account. If default outgoing account is not found, it will
try getting settings from `site_config.json`."""
if not getattr(frappe.local, "outgoing_email_account", None):
frappe.local.outgoing_email_account = {}
if not frappe.local.outgoing_email_account.get(append_to or "default"):
email_account = None
if append_to:
email_account = _get_email_account({"enable_outgoing": 1, "append_to": append_to})
if not email_account:
email_account = get_default_outgoing_email_account(raise_exception_not_set=raise_exception_not_set)
if not email_account and raise_exception_not_set:
frappe.throw(_("Please setup default Email Account from Setup > Email > Email Account"),
frappe.OutgoingEmailError)
frappe.local.outgoing_email_account[append_to or "default"] = email_account
return frappe.local.outgoing_email_account[append_to or "default"]
def get_default_outgoing_email_account(raise_exception_not_set=True):
email_account = _get_email_account({"enable_outgoing": 1, "default_outgoing": 1})
if not email_account and frappe.conf.get("mail_server"):
# from site_config.json
email_account = frappe.new_doc("Email Account")
email_account.update({
"smtp_server": frappe.conf.get("mail_server"),
"smtp_port": frappe.conf.get("mail_port"),
"use_tls": cint(frappe.conf.get("use_ssl") or 0),
"email_id": frappe.conf.get("mail_login"),
"password": frappe.conf.get("mail_password"),
"sender": frappe.conf.get("auto_email_id", "[email protected]")
})
email_account.from_site_config = True
if not email_account and not raise_exception_not_set:
return None
if frappe.flags.mute_emails or frappe.conf.get("mute_emails") or False:
# create a stub
email_account = frappe.new_doc("Email Account")
email_account.update({
"sender": "[email protected]"
})
return email_account
def _get_email_account(filters):
name = frappe.db.get_value("Email Account", filters)
return frappe.get_doc("Email Account", name) if name else None
class SMTPServer:
def __init__(self, login=None, password=None, server=None, port=None, use_ssl=None, append_to=None):
# get defaults from mail settings
self._sess = None
self.email_account = None
self.server = None
if server:
self.server = server
self.port = port
self.use_ssl = cint(use_ssl)
self.login = login
self.password = password
else:
self.setup_email_account(append_to)
def setup_email_account(self, append_to=None):
self.email_account = get_outgoing_email_account(raise_exception_not_set=False, append_to=append_to)
if self.email_account:
self.server = self.email_account.smtp_server
self.login = getattr(self.email_account, "login_id", None) \
or self.email_account.email_id
self.password = self.email_account.password
self.port = self.email_account.smtp_port
self.use_ssl = self.email_account.use_tls
self.sender = self.email_account.email_id
self.always_use_account_email_id_as_sender = self.email_account.get("always_use_account_email_id_as_sender")
@property
def sess(self):
"""get session"""
if self._sess:
return self._sess
# check if email server specified
if not getattr(self, 'server'):
err_msg = _('Email Account not setup. Please create a new Email Account from Setup > Email > Email Account')
frappe.msgprint(err_msg)
raise frappe.OutgoingEmailError, err_msg
try:
if self.use_ssl and not self.port:
self.port = 587
self._sess = smtplib.SMTP((self.server or "").encode('utf-8'),
cint(self.port) or None)
if not self._sess:
err_msg = _('Could not connect to outgoing email server')
frappe.msgprint(err_msg)
raise frappe.OutgoingEmailError, err_msg
if self.use_ssl:
self._sess.ehlo()
self._sess.starttls()
self._sess.ehlo()
if self.login and self.password:
ret = self._sess.login((self.login or "").encode('utf-8'),
(self.password or "").encode('utf-8'))
# check if logged correctly
if ret[0]!=235:
frappe.msgprint(ret[1])
raise frappe.OutgoingEmailError, ret[1]
return self._sess
except _socket.error:
# Invalid mail server -- due to refusing connection
frappe.throw(_('Invalid Outgoing Mail Server or Port'))
except smtplib.SMTPAuthenticationError:
frappe.throw(_("Invalid login or password"))
except smtplib.SMTPException:
frappe.msgprint(_('Unable to send emails at this time'))
raise
|
msebire/intellij-community | refs/heads/master | plugins/hg4idea/testData/bin/hgext/purge.py | 90 | # Copyright (C) 2006 - Marco Barisione <[email protected]>
#
# This is a small extension for Mercurial (http://mercurial.selenic.com/)
# that removes files not known to mercurial
#
# This program was inspired by the "cvspurge" script contained in CVS
# utilities (http://www.red-bean.com/cvsutils/).
#
# For help on the usage of "hg purge" use:
# hg help purge
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
'''command to delete untracked files from the working directory'''
from mercurial import util, commands, cmdutil, scmutil
from mercurial.i18n import _
import os, stat
cmdtable = {}
command = cmdutil.command(cmdtable)
testedwith = 'internal'
@command('purge|clean',
[('a', 'abort-on-err', None, _('abort if an error occurs')),
('', 'all', None, _('purge ignored files too')),
('p', 'print', None, _('print filenames instead of deleting them')),
('0', 'print0', None, _('end filenames with NUL, for use with xargs'
' (implies -p/--print)')),
] + commands.walkopts,
_('hg purge [OPTION]... [DIR]...'))
def purge(ui, repo, *dirs, **opts):
'''removes files not tracked by Mercurial
Delete files not known to Mercurial. This is useful to test local
and uncommitted changes in an otherwise-clean source tree.
This means that purge will delete:
- Unknown files: files marked with "?" by :hg:`status`
- Empty directories: in fact Mercurial ignores directories unless
they contain files under source control management
But it will leave untouched:
- Modified and unmodified tracked files
- Ignored files (unless --all is specified)
- New files added to the repository (with :hg:`add`)
If directories are given on the command line, only files in these
directories are considered.
Be careful with purge, as you could irreversibly delete some files
you forgot to add to the repository. If you only want to print the
list of files that this program would delete, use the --print
option.
'''
act = not opts['print']
eol = '\n'
if opts['print0']:
eol = '\0'
act = False # --print0 implies --print
def remove(remove_func, name):
if act:
try:
remove_func(repo.wjoin(name))
except OSError:
m = _('%s cannot be removed') % name
if opts['abort_on_err']:
raise util.Abort(m)
ui.warn(_('warning: %s\n') % m)
else:
ui.write('%s%s' % (name, eol))
def removefile(path):
try:
os.remove(path)
except OSError:
# read-only files cannot be unlinked under Windows
s = os.stat(path)
if (s.st_mode & stat.S_IWRITE) != 0:
raise
os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
os.remove(path)
directories = []
match = scmutil.match(repo[None], dirs, opts)
match.dir = directories.append
status = repo.status(match=match, ignored=opts['all'], unknown=True)
for f in sorted(status[4] + status[5]):
ui.note(_('removing file %s\n') % f)
remove(removefile, f)
for f in sorted(directories, reverse=True):
if match(f) and not os.listdir(repo.wjoin(f)):
ui.note(_('removing directory %s\n') % f)
remove(os.rmdir, f)
|
legaultmarc/grstools | refs/heads/master | grstools/tests/test_grs_compute.py | 1 | """
Test the GRS computation algorithm
"""
import unittest
from pkg_resources import resource_filename
import geneparse
import geneparse.testing
import numpy as np
from ..scripts import build_grs
class TestCompute(unittest.TestCase):
def test_weight_unambiguous(self):
# _weight_ambiguous(g, info, quality_weight):
# Geno: reference: G / coded: T
# Stats: risk: T
# *No need to flip*
v1 = geneparse.Variant("rs12345", 1, 123, "GT")
g1 = geneparse.Genotypes(
variant=v1,
genotypes=np.array([0, 1, np.nan, 1, 0, 1, 0, 2, np.nan, 2]),
reference="G",
coded="T",
multiallelic=False
)
info1 = build_grs.ScoreInfo(0.1, reference="G", risk="T")
mean1 = np.nanmean(g1.genotypes)
# Geno: reference: C / coded: A
# Stats: risk: C
# *Need to flip*
v2 = geneparse.Variant("rs98174", 1, 456, "CA")
g2 = geneparse.Genotypes(
variant=v2,
# For the GRS, we will use:
# NA, 0, 1, 2, 2, 0, 1, NA, 2, NA
genotypes=np.array([np.nan, 2, 1, 0, 0, 2, 1, np.nan, 0, np.nan]),
reference="C",
coded="A",
multiallelic=False
)
info2 = build_grs.ScoreInfo(0.2, reference="A", risk="C")
mean2 = np.nanmean(g2.genotypes)
assert g1.genotypes.shape[0] == g2.genotypes.shape[0]
# When computing GRS, missing genotypes are counted as the expected
# value of the risk allele.
expected = np.array([
0 + (2 - mean2) * info2.effect,
1 * info1.effect + 0,
mean1 * info1.effect + 1 * info2.effect,
1 * info1.effect + 2 * info2.effect,
0 + 2 * info2.effect,
1 * info1.effect + 0,
0 + info2.effect,
2 * info1.effect + (2 - mean2) * info2.effect,
mean1 * info1.effect + 2 * info2.effect,
2 * info1.effect + (2 - mean2) * info2.effect
])
grs = np.zeros(g1.genotypes.shape[0])
grs += build_grs._weight_unambiguous(g1, info1, False)
grs += build_grs._weight_unambiguous(g2, info2, False)
np.testing.assert_array_almost_equal(expected, grs)
def test_weight_unambiguous_quality(self):
# _weight_ambiguous(g, info, quality_weight):
# Geno: reference: G / coded: T
# Stats: risk: T
v1 = geneparse.ImputedVariant("rs12345", 1, 123, "GT", quality=0.5)
g1 = geneparse.Genotypes(
variant=v1,
genotypes=np.array([0, 1, np.nan, 1, 0, 1, 0, 2, np.nan, 2]),
reference="G",
coded="T",
multiallelic=False
)
info1 = build_grs.ScoreInfo(0.1, reference="G", risk="T")
mean1 = np.nanmean(g1.genotypes)
# Geno: reference: C / coded: A
# Stats: risk: C
v2 = geneparse.ImputedVariant("rs98174", 1, 456, "CA", quality=0.8)
g2 = geneparse.Genotypes(
variant=v2,
genotypes=np.array([np.nan, 2, 1, 0, 0, 2, 1, np.nan, 0, np.nan]),
reference="C",
coded="A",
multiallelic=False
)
info2 = build_grs.ScoreInfo(0.2, reference="A", risk="C")
mean2 = np.nanmean(g2.genotypes)
expected = np.array([
((2 - mean2) * info2.effect) * 0.8,
1 * info1.effect * 0.5,
(mean1 * info1.effect) * 0.5 + info2.effect * 0.8,
(info1.effect * 0.5) + (2 * info2.effect * 0.8),
2 * info2.effect * 0.8,
info1.effect * 0.5,
info2.effect * 0.8,
2 * info1.effect * 0.5 + (2 - mean2) * info2.effect * 0.8,
mean1 * info1.effect * 0.5 + 2 * info2.effect * 0.8,
2 * info1.effect * 0.5 + (2 - mean2) * info2.effect * 0.8
])
grs = np.zeros(10)
grs += build_grs._weight_unambiguous(g1, info1, True)
grs += build_grs._weight_unambiguous(g2, info2, True)
np.testing.assert_array_almost_equal(expected, grs)
def test_weight_unambiguous_negative_effect(self):
v1 = geneparse.Variant("testing", 1, 12345, "TC")
g1 = geneparse.testing.simulate_genotypes_for_variant(
v1, "T", 0.2, 1000, call_rate=0.99
)
info1 = build_grs.ScoreInfo(-0.2, reference="C", risk="T")
v2 = geneparse.Variant("testing2", 2, 15161, "GA")
g2 = geneparse.testing.simulate_genotypes_for_variant(
v2, "G", 0.34, 1000, call_rate=0.99
)
info2 = build_grs.ScoreInfo(0.2, reference="G", risk="A")
# Set the expected value for missing data.
g1.genotypes[np.isnan(g1.genotypes)] = np.nanmean(g1.genotypes)
g2.genotypes[np.isnan(g2.genotypes)] = np.nanmean(g2.genotypes)
expected = (2 - g1.genotypes) * -info1.effect
expected += (2 - g2.genotypes) * info2.effect
observed = np.zeros(1000)
observed += build_grs._weight_unambiguous(g1, info1, True)
observed += build_grs._weight_unambiguous(g2, info2, True)
np.testing.assert_array_almost_equal(expected, observed)
def test_weight_unambiguous_bad_alleles(self):
v1 = geneparse.Variant("testing", 1, 12345, "AG")
g1 = geneparse.testing.simulate_genotypes_for_variant(
v1, "A", 0.2, 1000, call_rate=0.98
)
info1 = build_grs.ScoreInfo(0.3, reference="T", risk="A")
with self.assertRaises(RuntimeError):
build_grs._weight_unambiguous(g1, info1, True)
def test_id_strand_frequency_noflip(self):
my_v = geneparse.Variant("rs12345", 1, 1234151, "GC")
my_g = geneparse.testing.simulate_genotypes_for_variant(
my_v, "C", 0.28, 1000, call_rate=0.97
)
reference_g = geneparse.testing.simulate_genotypes_for_variant(
my_v, "C", 0.28, 400, call_rate=0.99
)
reference = _FakeReader({my_v: reference_g})
need_strand_flip = build_grs._id_strand_by_frequency(my_g, reference)
self.assertFalse(need_strand_flip)
def test_id_strand_frequency_noflip_genotypes_flipped(self):
my_v = geneparse.Variant("rs12345", 1, 1234151, "GC")
my_g = geneparse.testing.simulate_genotypes_for_variant(
my_v, "C", 0.28, 1000, call_rate=0.97
)
reference_g = geneparse.testing.simulate_genotypes_for_variant(
my_v, "G", 0.72, 400, call_rate=0.99
)
reference = _FakeReader({my_v: reference_g})
need_strand_flip = build_grs._id_strand_by_frequency(my_g, reference)
self.assertFalse(need_strand_flip)
def test_id_strand_frequency_flip(self):
my_v = geneparse.Variant("rs12345", 1, 1234151, "GC")
my_g = geneparse.testing.simulate_genotypes_for_variant(
my_v, "G", 0.28, 1000, call_rate=0.97
)
reference_g = geneparse.testing.simulate_genotypes_for_variant(
my_v, "C", 0.28, 400, call_rate=0.99
)
reference = _FakeReader({my_v: reference_g})
need_strand_flip = build_grs._id_strand_by_frequency(my_g, reference)
self.assertTrue(need_strand_flip)
def test_id_strand_frequency_close_50(self):
my_v = geneparse.Variant("rs12345", 1, 1234151, "GC")
my_g = geneparse.testing.simulate_genotypes_for_variant(
my_v, "G", 0.5, 1000, call_rate=0.97
)
reference_g = geneparse.testing.simulate_genotypes_for_variant(
my_v, "C", 0.5, 400, call_rate=0.99
)
reference = _FakeReader({my_v: reference_g})
need_strand_flip = build_grs._id_strand_by_frequency(my_g, reference)
self.assertTrue(need_strand_flip is None)
def test_id_strand_frequency_large_freq_difference(self):
my_v = geneparse.Variant("rs12345", 1, 1234151, "GC")
my_g = geneparse.testing.simulate_genotypes_for_variant(
my_v, "G", 0.01, 1000, call_rate=0.97
)
reference_g = geneparse.testing.simulate_genotypes_for_variant(
my_v, "C", 0.4, 400, call_rate=0.99
)
reference = _FakeReader({my_v: reference_g})
need_strand_flip = build_grs._id_strand_by_frequency(my_g, reference)
self.assertTrue(need_strand_flip is None)
def test_replace_by_tag(self):
reference = get_reference()
v = geneparse.Variant("rs35391999", 2, 85626581, "AT")
g = _FakeGenotypes(v)
info = build_grs.ScoreInfo(0.2, reference="A", risk="T")
g, tag_info, r2 = build_grs._replace_by_tag(
g, info, reference, reference
)
reference.close()
self.assertEqual(g.variant.name, "rs6419687")
self.assertEqual(g.variant.chrom, 2)
self.assertEqual(g.variant.pos, 85629817)
self.assertEqual(g.variant.alleles_set, {"G", "A"})
# 0.996 was computed by plink
self.assertAlmostEqual(r2, 0.996, places=3)
self.assertAlmostEqual(tag_info.effect, r2 * 0.2)
self.assertEqual(tag_info.risk, "G") # G=T, according to plink
def test_replace_by_tag_computation_non_coded(self):
reference = get_reference()
v = geneparse.Variant("rs35391999", 2, 85626581, "AT")
g = _FakeGenotypes(v)
info = build_grs.ScoreInfo(0.2, reference="A", risk="T")
g, tag_info, r2 = build_grs._replace_by_tag(
g, info, reference, reference
)
raw_tag_geno = reference.get_variant_genotypes(g.variant)[0]
reference.close()
# In the file, the A alleles for both variants are coded.
# The risk allele is T and T=G, so we need to flip the genotype
# alleles for the expected computation.
grs = build_grs._weight_unambiguous(g, tag_info, False)
np.testing.assert_array_almost_equal(
grs,
r2 * 0.2 * (2 - raw_tag_geno.genotypes)
)
def test_replace_by_tag_computation_coded(self):
reference = get_reference()
v = geneparse.Variant("rs35391999", 2, 85626581, "AT")
g = _FakeGenotypes(v)
info = build_grs.ScoreInfo(0.2, reference="T", risk="A")
g, tag_info, r2 = build_grs._replace_by_tag(
g, info, reference, reference
)
raw_tag_geno = reference.get_variant_genotypes(g.variant)[0]
reference.close()
# In the file, the A alleles for both variants are coded.
# The risk allele is A and A=A, so we need to use the tag's
# genotype as is for the computation.
grs = build_grs._weight_unambiguous(g, tag_info, False)
np.testing.assert_array_almost_equal(
grs,
r2 * 0.2 * raw_tag_geno.genotypes
)
def test_replace_by_tag_notag(self):
reference = get_reference()
v = geneparse.Variant("rs12714148", 2, 85859835, "AT")
g = _FakeGenotypes(v)
info = build_grs.ScoreInfo(0.2, reference="A", risk="T")
with self.assertRaises(build_grs.CouldNotFindTag):
g, tag_info, r2 = build_grs._replace_by_tag(
g, info, reference, reference
)
reference.close()
class _FakeReader(object):
def __init__(self, d):
self.d = d
def get_variant_genotypes(self, v):
return [self.d[v]]
class _FakeGenotypes(object):
def __init__(self, variant):
self.variant = variant
def get_reference():
return geneparse.parsers["plink"](
resource_filename(__name__, "data/extract_tag_test.bed")[:-4]
)
|
oinopion/django | refs/heads/master | django/views/decorators/gzip.py | 720 | from django.middleware.gzip import GZipMiddleware
from django.utils.decorators import decorator_from_middleware
gzip_page = decorator_from_middleware(GZipMiddleware)
gzip_page.__doc__ = "Decorator for views that gzips pages if the client supports it."
|
mmoya/ansible | refs/heads/devel | v2/ansible/utils/boolean.py | 256 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
def boolean(value):
val = str(value)
if val.lower() in [ "true", "t", "y", "1", "yes" ]:
return True
else:
return False
|
chandlercr/aima-python | refs/heads/master | submissions/Porter/puzzles.py | 18 | import search
from math import(cos, pi)
sumner_map = search.UndirectedGraph(dict(
# Portland=dict(Mitchellville=7, Fairfield=17, Cottontown=18),
# Cottontown=dict(Portland=18),
# Fairfield=dict(Mitchellville=21, Portland=17),
# Mitchellville=dict(Portland=7, Fairfield=21),
Dallas=dict(Austin=50, Houston=100, SanAntonio = 30, Galveston = 10, ElPaso = 200),
Austin=dict(Dallas=50, Houston=20, ElPaso = 20, SanAntonio = 300, Galveston = 500),
Houston=dict(Dallas=100, Austin=20, ElPaso = 50, SanAntonio = 200, Galveston = 100),
ElPaso=dict(Dallas=200, Austin = 20, Houston = 50, SanAntonio = 50, Galveston = 200),
SanAntonio=dict(Dallas = 30, ElPaso=50, Houston = 200, Austin = 300, Galveston = 500),
Galveston=dict(Dallas = 10, SanAntonio = 500, ElPaso = 200, Houston = 100, Austin = 300),
))
sumner_puzzle = search.GraphProblem('Dallas', 'SanAntonio', sumner_map)
sumner_puzzle.description = '''
An abbreviated map of Sumner County, TN.
This map is unique, to the best of my knowledge.
'''
myPuzzles = [
sumner_puzzle,
]
|
GMadorell/coursera-machine-learning | refs/heads/master | theory/08.02 - Dimensionality Reduction/scripts/initialize.py | 5 | import requests
import encoding
LATEX_TEMPLATE_URL = "https://raw.githubusercontent.com/Skabed/cookiecutter-latex-markdown/master/%7B%7Bcookiecutter.name%7D%7D/templates/latex.template"
def main():
# Reload latex template because cookiecutter removes tags from it.
print("Reloading latex template.")
reload_latex_template()
print("Done.")
def reload_latex_template():
latex_template = requests.get(LATEX_TEMPLATE_URL).text
with open("./templates/latex.template", "w") as latex_template_file:
latex_template_file.write(latex_template)
if __name__ == "__main__":
encoding.set_system_encoding_utf8()
main() |
LouKingGood/LouKingGood | refs/heads/master | plugin.program.super.favourites/default.py | 2 | #
# Copyright (C) 2014-
# Sean Poyser ([email protected])
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with XBMC; see the file COPYING. If not, write to
# the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# http://www.gnu.org/copyleft/gpl.html
#
import xbmc
import xbmcaddon
import xbmcplugin
import xbmcgui
import os
import urllib
import re
import quicknet
import player
import favourite
import history
import utils
import cache
import sfile
import parameters
ADDONID = utils.ADDONID
ADDON = utils.ADDON
HOME = utils.HOME
ROOT = utils.ROOT
PROFILE = utils.PROFILE
VERSION = utils.VERSION
ICON = utils.ICON
FANART = utils.FANART
SEARCH = utils.SEARCH
BLANK = 'NULL'
GETTEXT = utils.GETTEXT
TITLE = utils.TITLE
FRODO = utils.FRODO
GOTHAM = utils.GOTHAM
HELIX = utils.HELIX
FILENAME = utils.FILENAME
FOLDERCFG = utils.FOLDERCFG
PLAYMEDIA_MODE = utils.PLAYMEDIA_MODE
ACTIVATEWINDOW_MODE = utils.ACTIVATEWINDOW_MODE
RUNPLUGIN_MODE = utils.RUNPLUGIN_MODE
ACTION_MODE = utils.ACTION_MODE
MANUAL_CMD = 'SF_MANUAL_CMD_'
DISPLAYNAME = utils.DISPLAYNAME
# -----Addon Modes ----- #
_IGNORE = -10
_MAIN = -2
_SUPERSEARCH = 0 #also in capture.py
_SUPERSEARCHDEF = 10 #also in capture.py
_EXTSEARCH = 25 #used to trigger new Super Search from outside of addon
_SEPARATOR = 50
_SETTINGS = 100
_VIEWTYPE = 150
_ADDTOXBMC = 200
_ADDTOSF = 250
_COPYTOSF = 275
_XBMC = 300
_FOLDER = utils._FOLDER
_NEWFOLDER = 500
_PLAYMEDIA = 600
_ACTIVATEWINDOW = 650
_ACTIVATEWINDOW_XBMC = 660
_ACTIVATESEARCH = 675
_REMOVEFOLDER = 700
_REMOVEFAVE = 800
_RENAMEFOLDER = 900
_RENAMEFAVE = 1000
_THUMBFAVE = 1500
_THUMBFOLDER = 1600
_PLAYBACKMODE = 1700
_EDITTERM = 1900
_EDITFOLDER = 2000
_EDITFAVE = 2100
_SECURE = 2200
_UNSECURE = 2300
_PLAYLIST = 2400
_COLOURFOLDER = 2500
_COLOURFAVE = 2600
_RECOMMEND_KEY = 2700 #also in capture.py
_RECOMMEND_KEY_A = 2710
_RECOMMEND_IMDB = 2800
_PLAYTRAILER = 2900
_EDITSEARCH = 3000
_IMPORT = 3100
_IPLAY = 3200
_PLAYLISTFILE = 3300
_PLAYLISTITEM = 3400
_PLAYLISTBROWSE = 3500
_DELETEPLAYLIST = 3600
_COPY_PLAY_TO_SF = 3700
_COPY_PLAY_TO_SF_ITEM = 3800
_PLAYPLAYLIST = 3900
_URLPLAYLIST = 4000
_HISTORYSHOW = 4100
_HISTORYADD = 4200
_HISTORYREMOVE = 4300
_MANUAL = 4400
_CUT = 4500
_COPY = 4600
_PASTE = 4700
_CUTFOLDER = 4800
_COPYFOLDER = 4900
_PASTEFOLDER = 5000
_IEXPLORE = 5100
_PLAY_FILE = 5200
_PLAY_FOLDER = 5300
_PLAY_SUPER_FOLDER = 5400
_PLAY_SUPER_FOLDER_EXT = 5450
# --------------------- Addon Settings --------------------- #
SHOWNEW = ADDON.getSetting('SHOWNEW') == 'true'
SHOWIMPORT = ADDON.getSetting('SHOWIMPORT') == 'true'
SHOWSEP = ADDON.getSetting('SHOWSEP') == 'true'
SHOWSS = ADDON.getSetting('SHOWSS') == 'true'
CONTEXTSS = ADDON.getSetting('CONTEXTSS') == 'true'
SHOW_FANART = ADDON.getSetting('SHOW_FANART') == 'true'
SHOWRECOMMEND = ADDON.getSetting('SHOWRECOMMEND') == 'true'
PLAY_PLAYLISTS = ADDON.getSetting('PLAY_PLAYLISTS') == 'true'
METARECOMMEND = ADDON.getSetting('METARECOMMEND') == 'true'
SYNOPSRECOMMEND = ADDON.getSetting('SYNOPSRECOMMEND') == 'true'
RECOMMENDAUTO = ADDON.getSetting('RECOMMENDFIRST') == 'true'
CONTEXTRECOMMEND = ADDON.getSetting('CONTEXTRECOMMEND') == 'true'
REMOTE = ADDON.getSetting('REMOTE') == 'true'
SHOWIPLAY = ADDON.getSetting('SHOWIPLAY') == 'true'
SHOWIHISTORY = ADDON.getSetting('SHOWREMEMBER') == 'true'
SHOWIEXPLORE = ADDON.getSetting('SHOWIEXPLORE') == 'true'
COPY_PLAYLISTS = ADDON.getSetting('COPY_PLAYLISTS') == 'true'
ALLOW_PLAYLIST_DELETE = ADDON.getSetting('ALLOW_PLAYLIST_DELETE') == 'true'
DISABLEMOVIEVIEW = ADDON.getSetting('DISABLEMOVIEVIEW') == 'true'
DEBUG = ADDON.getSetting('DEBUG') == 'true'
DLG_MENU = ADDON.getSetting('CONTEXT_STYLE') == '1'
DEFAULT_FANART = ADDON.getSetting('DEFAULT_FANART')
SHOWXBMC = utils.SHOWXBMC
INHERIT = utils.INHERIT
ALPHA_SORT = utils.ALPHA_SORT
LABEL_NUMERIC = utils.LABEL_NUMERIC
if xbmcgui.getCurrentWindowId() == 10000:
LABEL_NUMERIC = False
TMDB_API_KEY='302783d0fefc7b8a97ab7cc7f42a2cde'
try: VIEWTYPE = int(ADDON.getSetting('VIEWTYPE'))
except: VIEWTYPE = 0
ART_LANDSCAPE = int(ADDON.getSetting('ART_LANDSCAPE'))
ART_BANNER = int(ADDON.getSetting('ART_BANNER'))
ART_POSTER = int(ADDON.getSetting('ART_POSTER'))
CONTENTTYPE = ADDON.getSetting('CONTENTTYPE')
CONTENTTYPES = {}
CONTENTTYPES[GETTEXT(35029)] = 'files'
CONTENTTYPES[GETTEXT(35030)] = 'songs'
CONTENTTYPES[GETTEXT(35031)] = 'artists'
CONTENTTYPES[GETTEXT(35032)] = 'albums'
CONTENTTYPES[GETTEXT(35033)] = 'movies'
CONTENTTYPES[GETTEXT(35034)] = 'tvshows'
CONTENTTYPES[GETTEXT(35035)] = 'episodes'
CONTENTTYPES[GETTEXT(35036)] = 'musicvideos'
CONTENTTYPES[GETTEXT(35037)] = ''
if ADDON.getSetting('SHOW_STARTUP_TXT') == 'true':
utils.DialogOK(ADDON.getSetting('STARTUP_TXT'))
ADDON.setSetting('SHOW_STARTUP_TXT', 'false')
if REMOTE:
LOCATION = len(ADDON.getSetting('LOCATION')) > 0
else:
LOCATION = False
if DEFAULT_FANART == '1':
FANART = ADDON.getSetting('DEFAULT_IMAGE')
if DEFAULT_FANART == '2':
FANART = BLANK
CONTENTMODE = False
ISEARCH_EMPTY = '__iSearch__'
NUMBER_SEP = utils.NUMBER_SEP
# ---------------------------------------------------------- #
utils.CheckVersion()
global nItem
nItem = 0
global separator
separator = False
global currentFolder
currentFolder = PROFILE
global addingMainItems
addingMainItems = False
def main():
global addingMainItems
addingMainItems = True
addMainItems()
addingMainItems = False
parseFolder(PROFILE)
def setViewType():
view = utils.getViewType()
if view == None:
return False
ADDON.setSetting('VIEWTYPE', str(view))
return True
def addSuperSearch():
global separator
if not SHOWSS:
return
separator = False
addDir(GETTEXT(30054), _SUPERSEARCH, thumbnail=SEARCH, isFolder=True, infolabels={'plot':GETTEXT(30195)})
separator = True
def addNewFolderItem(path):
global currentFolder
currentFolder = path
global separator
if SHOWNEW:
separator = False
addDir(GETTEXT(30004), _NEWFOLDER, path=path, thumbnail=ICON, isFolder=False, infolabels={'plot':GETTEXT(30199)})
separator = True
def addSeparatorItem(menu=None):
global separator
separator = False
if SHOWSEP:
addDir('', _SEPARATOR, thumbnail=BLANK, isFolder=False, menu=menu)
def populatePasteMenu(menu):
global currentFolder
type = xbmcgui.Window(10000).getProperty('SF_TYPE').lower()
if len(type) == 0:
return
if type == 'capture':
menu.append((GETTEXT(30258), 'XBMC.RunPlugin(%s?mode=%d&paste=%s)' % (sys.argv[0], _PASTE, urllib.quote_plus(currentFolder))))
return
folder = 'folder' in type
cut = 'cut' in type
src = xbmcgui.Window(10000).getProperty('SF_FOLDER')
if folder:
if cut and currentFolder == xbmcgui.Window(10000).getProperty('SF_FILE'):
return
menu.append((GETTEXT(30182), 'XBMC.RunPlugin(%s?mode=%d&paste=%s)' % (sys.argv[0], _PASTEFOLDER, urllib.quote_plus(currentFolder))))
return
if src == currentFolder:
return
menu.append((GETTEXT(30179), 'XBMC.RunPlugin(%s?mode=%d&paste=%s)' % (sys.argv[0], _PASTE, urllib.quote_plus(currentFolder))))
def addGlobalMenuItem(menu, item, ignore, label, thumbnail, u, keyword, fanart, meta):
if mode == _FOLDER or mode == _MAIN or mode == _XBMC:
populatePasteMenu(menu)
if not ignore:
try: addFavouriteMenuItem(menu, label, thumbnail, u, keyword, fanart, meta)
except: pass
if mode != _XBMC:
cmd = '%s?mode=%d' % (sys.argv[0], _XBMC)
label = GETTEXT(30040) % DISPLAYNAME
menu.append((label, 'XBMC.Container.Update(%s)' % cmd))
if mode == _FOLDER or mode == _MAIN:
path = thepath
if path == '':
path = PROFILE
menu.append((GETTEXT(30004), 'XBMC.RunPlugin(%s?mode=%d&path=%s)' % (sys.argv[0], _NEWFOLDER, urllib.quote_plus(path))))
menu.append((GETTEXT(30166), 'XBMC.RunPlugin(%s?mode=%d&path=%s)' % (sys.argv[0], _MANUAL, urllib.quote_plus(path))))
menu.append((GETTEXT(30204), 'XBMC.RunPlugin(%s?mode=%d)' % (sys.argv[0], _VIEWTYPE)))
menu.append((GETTEXT(30005), 'XBMC.RunPlugin(%s?mode=%d)' % (sys.argv[0], _SETTINGS)))
addon = utils.findAddon(item)
if addon == None or addon == ADDONID:
return
label = utils.getSettingsLabel(addon)
menu.append((label, 'XBMC.RunPlugin(%s?mode=%d&addon=%s)' % (sys.argv[0], _SETTINGS, urllib.quote_plus(addon))))
def addFavouriteMenuItem(menu, name, thumb, cmd, keyword, fanart, meta):
if mode == _XBMC:
return
if len(name) < 1:
return
#add iRecommend context menu item
if mode is not _SUPERSEARCH and not addingMainItems:
label = removeNumeric(name)
if utils.ADDON.getSetting('STRIPNUMBERS') == 'true':
label = utils.Clean(label)
prefix = GETTEXT(30268)
if label.startswith(prefix):
label = label[len(prefix):]
if CONTEXTSS:
menu.append((GETTEXT(30054), 'XBMC.Container.Update(%s?mode=%d&keyword=%s&image=%s&fanart=%s)' % (sys.argv[0], _SUPERSEARCH, urllib.quote_plus(label), urllib.quote_plus(thumb), urllib.quote_plus(fanart))))
if CONTEXTRECOMMEND:
menu.append((GETTEXT(30088), 'XBMC.Container.Update(%s?mode=%d&keyword=%s&image=%s&fanart=%s)' % (sys.argv[0], _RECOMMEND_KEY, urllib.quote_plus(label), urllib.quote_plus(thumb), urllib.quote_plus(fanart))))
label = GETTEXT(30006) % DISPLAYNAME
menu.append((label, 'XBMC.RunPlugin(%s?mode=%d&name=%s&thumb=%s&cmd=%s&keyword=%s)' % (sys.argv[0], _ADDTOXBMC, urllib.quote_plus(name), urllib.quote_plus(thumb), urllib.quote_plus(cmd), urllib.quote_plus(keyword))))
try: meta = urllib.quote_plus(utils.convertDictToURL(meta))
except: meta = ''
if not addableToSF():
return
menu.append((GETTEXT(30047), 'XBMC.RunPlugin(%s?mode=%d&name=%s&thumb=%s&cmd=%s&keyword=%s&meta=%s)' % (sys.argv[0], _ADDTOSF, urllib.quote_plus(name), urllib.quote_plus(thumb), urllib.quote_plus(cmd), urllib.quote_plus(keyword), meta)))
fave = convertSFToFave(name, thumb, cmd, keyword)
fave[2] = favourite.updateSFOption(fave[2], 'meta', meta)
menu.append((GETTEXT(30209), 'XBMC.RunPlugin(%s?mode=%d&name=%s&thumb=%s&cmd=%s&fanart=%s&meta=%s)' % (sys.argv[0], _COPYTOSF, urllib.quote_plus(fave[0]), urllib.quote_plus(fave[1]), urllib.quote_plus(fave[2]), urllib.quote_plus(fanart), meta)))
def addableToSF():
if mode == _SUPERSEARCH:
return True
if mode == _HISTORYSHOW:
return True
if mode == _RECOMMEND_KEY:
return True
if mode == _RECOMMEND_IMDB:
return True
if mode == _RECOMMEND_KEY_A:
return True
return False
def removeNumeric(text):
if not LABEL_NUMERIC:
return text
root = text.split(NUMBER_SEP, 1)[0]
if root.startswith('['):
root = root.rsplit(']', 1)[0] + ']'
else:
root = ''
return root + text.split(NUMBER_SEP, 1)[-1]
def convertSFToFave(name, thumb, cmd, keyword):
p = utils.get_params(cmd.replace('?', '&'))
name = removeNumeric(name)
try:
mode = int(p['mode'])
if mode == _FOLDER:
label = urllib.unquote_plus(p['label'])
path = urllib.unquote_plus(p['path'])
path = utils.convertToHome(path)
label = removeNumeric(label)
cmd = '%s?label=%s&mode=%d&path=%s' % (sys.argv[0], label, _FOLDER, urllib.quote_plus(path))
except:
mode = _IGNORE
cmd = '"%s"' % cmd
folder = mode == _FOLDER
search = mode == _SUPERSEARCH
edit = mode == _EDITTERM
activate = mode == _ACTIVATESEARCH
activateW = mode == _ACTIVATEWINDOW
recommend = mode == _RECOMMEND_KEY or mode == _RECOMMEND_IMDB
iPlay = mode == _IPLAY
history = mode == _HISTORYSHOW
playSF = mode == _PLAY_SUPER_FOLDER
isSF = cmd.startswith('"plugin://%s' % ADDONID)
isCached = False
if activateW:
isCached = utils.DialogYesNo(GETTEXT(30207))
try: fanart = urllib.unquote_plus(p['fanart'])
except: fanart = ''
if activate:
cmd = urllib.unquote_plus(p['cmd'])
elif isCached or playSF:
cmd = 'PlayMedia(%s)' % cmd
elif isSF:
cmd = cmd.replace('+', '%20')
cmd = 'ActivateWindow(%d,%s,return)' % (utils.getCurrentWindowId(), cmd)
if activateW:
cmd = cmd.replace('mode=%d' % _ACTIVATEWINDOW, 'mode=%d' % _ACTIVATEWINDOW_XBMC)
else:
fanart = ''
cmd = 'PlayMedia(%s)' % cmd
if search:
name = GETTEXT(30054)
if edit:
name = GETTEXT(30054)
cmd = cmd.replace('&mode=%d' % _EDITTERM, '&mode=%d' % _SUPERSEARCH)
if recommend:
name = GETTEXT(30088)
if search and ('keyword' not in cmd):
find = '&mode=%d' % _SUPERSEARCH
replace = '%s&keyword=%s' % (find, ISEARCH_EMPTY)
cmd = cmd.replace(find, replace)
if not cmd.lower().endswith(',return)'):
cmd = cmd[:-1] + ',return)'
if folder:
thumbnail, fanart = utils.getFolderThumb(path)
#cmd = favourite.addFanart(cmd, fanart)
cmd = favourite.addFanart(cmd, fanart)
keyword = urllib.unquote_plus(keyword)
if len(keyword) > 0:
name += ' - %s' % keyword
fave = [name, thumb, cmd]
return fave
def addToSF(name, thumb, cmd, keyword, meta):
text = GETTEXT(30019)
folder = utils.GetSFFolder(text)
if not folder:
return False
fave = convertSFToFave(name, thumb, cmd, keyword)
fave[2] = favourite.updateSFOption(fave[2], 'meta', urllib.quote_plus(meta))
file = os.path.join(folder, FILENAME)
#if it is already in there don't add again
if favourite.findFave(file, fave[2])[0]:
return False
return addToFile(fave, file)
def addToFile(fave, file):
cmd = fave[2]
#if it is already in there don't add again
if favourite.findFave(file, cmd)[0]:
return False
faves = favourite.getFavourites(file, validate=False)
faves.append(fave)
favourite.writeFavourites(file, faves)
return True
def addToXBMC(name, thumb, cmd, keyword):
fave = convertSFToFave(name, thumb, cmd, keyword)
file = os.path.join('special://profile', FILENAME)
return addToFile(fave, file)
def refresh():
xbmc.executebuiltin('Container.Refresh')
def removeAutoplay(path):
cfg = os.path.join(path, FOLDERCFG)
parameters.clearParam('AUTOPLAY', cfg)
refresh()
def addAutoplay(path):
cfg = os.path.join(path, FOLDERCFG)
parameters.setParam('AUTOPLAY', 'True', cfg)
refresh()
def showXBMCFolder():
global currentFolder
currentFolder = 'special://profile'
file = os.path.join(currentFolder, FILENAME)
parseFile(file)
def parseFile(file, sortorder=0, label_numeric=None, index=0):
if sortorder == 0:
sortorder = 1 if ALPHA_SORT else 2
global separator
faves = favourite.getFavourites(file)
if label_numeric == None:
label_numeric = LABEL_NUMERIC
text = GETTEXT(30099) % DISPLAYNAME if mode == _XBMC else GETTEXT(30068)
if sortorder == 1: #ALPHA_SORT:
faves = sorted(faves, key=lambda x: utils.CleanForSort(x))
for fave in faves:
utils.log(fave)
label = fave[0]
thumb = fave[1]
cmd = fave[2]
fanart = favourite.getFanart(cmd)
desc = favourite.getOption(cmd, 'desc')
meta = favourite.getOption(cmd, 'meta')
manualUnset = MANUAL_CMD in cmd
infolabel = utils.convertURLToDict(meta)
if len(desc) > 0:
infolabel['plot'] = urllib.unquote(desc)
menu = []
menu.append((text, 'XBMC.RunPlugin(%s?mode=%d&file=%s&cmd=%s&name=%s&thumb=%s)' % (sys.argv[0], _EDITFAVE, urllib.quote_plus(file), urllib.quote_plus(cmd), urllib.quote_plus(label), urllib.quote_plus(thumb))))
if not PLAY_PLAYLISTS:
import playlist
if playlist.isPlaylist(cmd):
menu.append((GETTEXT(30084), 'XBMC.RunPlugin(%s?mode=%d&file=%s&cmd=%s)' % (sys.argv[0], _PLAYLIST, urllib.quote_plus(file), urllib.quote_plus(cmd))))
menu.append((GETTEXT(30178), 'XBMC.RunPlugin(%s?mode=%d&file=%s&cmd=%s)' % (sys.argv[0], _COPY, urllib.quote_plus(file), urllib.quote_plus(cmd))))
#menu.append((GETTEXT(30177), 'XBMC.RunPlugin(%s?mode=%d&file=%s&cmd=%s)' % (sys.argv[0], _CUT, urllib.quote_plus(file), urllib.quote_plus(cmd))))
type = _ACTIVATEWINDOW
isFolder = True
if manualUnset:
type = _IGNORE
isFolder = False
elif 'playmedia(' in cmd.lower():
type = _PLAYMEDIA
isFolder = False
label, index = utils.addPrefixToLabel(index, label, label_numeric)
path = checkForSuperFolderLink(cmd)
if path:
addDir(label, _FOLDER, path=path, thumbnail=thumb, isFolder=True, menu=menu, fanart=fanart, infolabels=infolabel)
else:
addDir(label, type, cmd=cmd, thumbnail=thumb, isFolder=isFolder, menu=menu, fanart=fanart, infolabels=infolabel)
separator = len(faves) > 0
def checkForSuperFolderLink(cmd):
try:
if not cmd.startswith('ActivateWindow'):
return None
if 'plugin://plugin.program.super.favourites' not in cmd:
return None
params = utils.get_params(cmd)
mode = int(params['mode'])
if mode <> _FOLDER:
return None
if 'path' in params:
path = params['path'].replace('\\', '/')
if path.replace('\\', '/').startswith(PROFILE):
return path
if 'folder' in params:
folder = params['folder'].replace('\\', '/')
folder = os.path.join(PROFILE, folder)
return folder
except:
pass
return None
def addMainItems():
if mode != _MAIN:
return
global separator
addSuperSearch()
if SHOWIEXPLORE:
separator = False
folder = 'DefaultFolder.png'
addDir(GETTEXT(30225), _IEXPLORE, thumbnail=folder, isFolder=True, infolabels={'plot':GETTEXT(30227)})
separator = True
if SHOWIPLAY:
separator = False
thumbnail = 'DefaultVideoPlaylists.png'
fanart = ''
label = GETTEXT(30146) % DISPLAYNAME
addDir(label, _IPLAY, thumbnail=thumbnail, isFolder=True, fanart=fanart, infolabels={'plot':GETTEXT(30196)})
separator = True
addNewFolderItem(PROFILE)
if SHOWXBMC:
separator = False
thumbnail, fanart = utils.getFolderThumb(PROFILE, True)
label = GETTEXT(30040) % DISPLAYNAME
addDir(label, _XBMC, thumbnail=thumbnail, isFolder=True, fanart=fanart, infolabels={'plot':GETTEXT(30197)})
separator = True
if SHOWIMPORT and LOCATION:
separator = False
thumbnail = 'DefaultFile.png'
fanart = ''
addDir(GETTEXT(30125), _IMPORT, thumbnail=thumbnail, isFolder=False, fanart=fanart, infolabels={'plot':GETTEXT(30198)})
separator = True
def playSuperFolder(path, id):
if not path:
path = PROFILE
import locking
if not locking.unlock(path):
return
file = os.path.join(path, FILENAME)
faves = favourite.getFavourites(file)
if ALPHA_SORT:
faves = sorted(faves, key=lambda x: utils.CleanForSort(x))
items = []
for fave in faves:
label = fave[0]
thumb = fave[1]
cmd = fave[2]
try:
cmd = re.compile('"(.+?)"').search(cmd).group(1)
cmd = favourite.removeSFOptions(cmd)
items.append([label, cmd, thumb])
except:
pass
utils.log('Play Super Folder - Items')
utils.log(items)
utils.playItems(items, id)
def playFolder(folder):
if sfile.isfile(folder):
folder = sfile.getfolder(folder)
files = utils.parseFolder(folder, subfolders=False)
items = []
for file in files:
items.append([file[0], file[1]])
utils.playItems(items)
def playFile(filename):
#cmd = 'PlayMedia(%s)' % path
#xbmc.executebuiltin(cmd)
items = [[sfile.getfilename(filename), filename]]
utils.playItems(items)
def iExplore(path=None):
file = 'DefaultMovies.png'
folder = 'DefaultFolder.png'
if not path:
path = xbmcgui.Dialog().browse(3, GETTEXT(30226), 'files', '', False, False)
try: path = path.rsplit(os.sep, 1)[0] + os.sep
except: return
try: items = utils.parseFolder(path)
except: items = []
index = 0
for item in items:
label = item[0]
url = item[1].replace('\\', '/')
isFile = item[2]
title = label
label, index = utils.addPrefixToLabel(index, label)
menu = []
if isFile:
menu.append((GETTEXT(30230), 'XBMC.RunPlugin(%s?mode=%d&path=%s)' % (sys.argv[0], _PLAY_FOLDER, urllib.quote_plus(url))))
menu.append((GETTEXT(30047), 'XBMC.RunPlugin(%s?mode=%d&path=%s&label=%s&thumb=%s)' % (sys.argv[0], _COPY_PLAY_TO_SF_ITEM, urllib.quote_plus(url), urllib.quote_plus(title), urllib.quote_plus(file))))
addDir(label, _PLAY_FILE, path=url, thumbnail=file, isFolder=False, menu=menu, infolabels={'plot':GETTEXT(30229) % title})
else:
menu.append((GETTEXT(30231), 'XBMC.RunPlugin(%s?mode=%d&path=%s)' % (sys.argv[0], _PLAY_FOLDER, urllib.quote_plus(url))))
menu.append((GETTEXT(30047), 'XBMC.RunPlugin(%s?mode=%d&path=%s&label=%s&thumb=%s)' % (sys.argv[0], _COPY_PLAY_TO_SF, urllib.quote_plus(url), urllib.quote_plus(title), urllib.quote_plus(folder))))
addDir(label, _IEXPLORE, path=url, thumbnail=folder, isFolder=True, menu=menu, infolabels={'plot':GETTEXT(30228) % title})
def parseFolder(folder):
global separator
global currentFolder
currentFolder = folder
try: current, dirs, files = sfile.walk(folder)
except: return
dirs = sorted(dirs, key=str.lower)
folderConfig = os.path.join(folder, FOLDERCFG)
folderConfig = parameters.getParams(folderConfig)
label_numeric = parameters.getParam('NUMERICAL', folderConfig)
sortorder = parameters.getParam('SORT', folderConfig)
if label_numeric:
label_numeric = label_numeric.lower() == 'true'
else:
label_numeric = LABEL_NUMERIC
try: sortorder = int(sortorder)
except: sortorder = 0
index = 0
for dir in dirs:
path = os.path.join(current, dir)
folderConfig = os.path.join(path, FOLDERCFG)
visible = 'true' if DEBUG else parameters.getParam('VISIBLE', folderConfig)
if visible and visible.lower() == 'false':
continue
folderConfig = parameters.getParams(folderConfig)
lock = parameters.getParam('LOCK', folderConfig)
colour = parameters.getParam('COLOUR', folderConfig)
desc = parameters.getParam('DESC', folderConfig)
autoplay = parameters.getParam('AUTOPLAY', folderConfig)
meta = parameters.getParam('META', folderConfig)
infolabel = utils.convertURLToDict(meta)
if desc:
infolabel = {'plot':desc}
menu = []
menu.append((GETTEXT(30067), 'XBMC.RunPlugin(%s?mode=%d&path=%s&name=%s)' % (sys.argv[0], _EDITFOLDER, urllib.quote_plus(path), urllib.quote_plus(dir))))
if autoplay:
menu.append((GETTEXT(30235), 'XBMC.Container.Update(%s?mode=%d&path=%s)' % (sys.argv[0], _FOLDER, urllib.quote_plus(path))))
else:
menu.append((GETTEXT(30232), 'XBMC.RunPlugin(%s?mode=%d&path=%s)' % (sys.argv[0], _PLAY_SUPER_FOLDER, urllib.quote_plus(path))))
#if lock: #lock is now on Edit Super Folder menu
# menu.append((GETTEXT(30077), 'XBMC.RunPlugin(%s?mode=%d&path=%s&name=%s)' % (sys.argv[0], _UNSECURE, urllib.quote_plus(path), urllib.quote_plus(dir))))
#else:
# menu.append((GETTEXT(30076), 'XBMC.RunPlugin(%s?mode=%d&path=%s&name=%s)' % (sys.argv[0], _SECURE, urllib.quote_plus(path), urllib.quote_plus(dir))))
thumbnail, fanart = utils.getFolderThumb(path)
if not lock:
menu.append((GETTEXT(30181), 'XBMC.RunPlugin(%s?mode=%d&path=%s)' % (sys.argv[0], _COPYFOLDER, urllib.quote_plus(path))))
#menu.append((GETTEXT(30180), 'XBMC.RunPlugin(%s?mode=%d&path=%s)' % (sys.argv[0], _CUTFOLDER, urllib.quote_plus(path))))
if label_numeric:
prefix, index = utils.getPrefix(index)
dir = prefix + dir
if colour and colour.lower() <> 'none':
dir = '[COLOR %s]%s[/COLOR]' % (colour, dir)
mode = _PLAY_SUPER_FOLDER if autoplay else _FOLDER
isFolder = not autoplay
isPlayable = not isFolder
addDir(dir, mode, path=path, thumbnail=thumbnail, isFolder=isFolder, menu=menu, fanart=fanart, infolabels=infolabel, isPlayable=isPlayable)
if len(dirs) > 0:
separator = True
file = os.path.join(folder, FILENAME)
parseFile(file, sortorder, label_numeric, index)
def getColour():
filename = os.path.join(HOME, 'resources', 'colours', 'Color.xml')
if not sfile.exists(filename):
return None
menu = [[GETTEXT(30087), 'SF_RESET']]
f = sfile.readlines(filename)
for line in f:
if 'name' in line:
name = line.split('"')[1]
menu.append(['[COLOR %s]%s[/COLOR]' % (name, name), name])
if len(menu) < 2:
return None
import menus
if DLG_MENU:
option = menus.selectMenu(GETTEXT(30086), menu)
else:
option = menus.showMenu(ADDONID, menu)
#option = menus.selectMenu(GETTEXT(30086), menu)
if option < 0:
return None
return option
def getImage():
root = HOME.split(os.sep, 1)[0] + os.sep
image = xbmcgui.Dialog().browse(2,GETTEXT(30044), 'files', '', False, False, root)
if image and image != root:
return image
return None
def getSkinImage():
image = ''
skin = xbmc.getSkinDir().lower()
icon = os.path.join(HOME, 'resources', skin, 'icons')
items = ['Super Favourite']
if sfile.exists(icon):
f = sfile.file(icon, 'r')
for line in f:
items.append(line.strip())
f.close()
if (len(items) > 1) and utils.DialogYesNo(GETTEXT(30046)):
import imagebrowser
return imagebrowser.getImage(ADDONID, items)
return getImage()
def removeFanartFolder(path):
folderConfig = os.path.join(path, FOLDERCFG)
parameters.setParam('FANART', '', folderConfig)
return True
def removeThumbFolder(path):
folderConfig = os.path.join(path, FOLDERCFG)
parameters.setParam('ICON', '', folderConfig)
return True
def fanartFolder(path,image=None):
if not image:
image = getImage()
if not image:
return False
image = utils.convertToHome(image)
folderConfig = os.path.join(path, FOLDERCFG)
parameters.setParam('FANART', image, folderConfig)
return True
def thumbFolder(path, image=None):
if not image:
image = getImage()
if not image:
return False
folderConfig = os.path.join(path, FOLDERCFG)
parameters.setParam('ICON', utils.convertToHome(image), folderConfig)
return True
def removeMetaFave(file, cmd):
return pasteMetaFave(file, cmd, '')
def pasteMetaFave(file, cmd, meta_clip):
fave, index, nFaves = favourite.findFave(file, cmd)
if not fave:
return False
fave[2] = favourite.updateSFOption(cmd, 'meta', urllib.quote_plus(meta_clip))
favourite.updateFave(file, fave)
return True
def removeThumbFave(file, cmd):
fave, index, nFaves = favourite.findFave(file, cmd)
if len(fave[1]) < 1:
return False
fave[1] = ''
favourite.updateFave(file, fave)
return True
def removeFanartFave(file, cmd):
fave, index, nFaves = favourite.findFave(file, cmd)
fave[2] = favourite.updateSFOption(cmd, 'fanart', '')
favourite.updateFave(file, fave)
return True
def fanartFave(file, cmd, image=None):
if not image:
image = getImage()
if not image:
return False
fave, index, nFaves = favourite.findFave(file, cmd)
fave[2] = favourite.addFanart(fave[2], image)
return favourite.updateFave(file, fave)
def thumbFave(file, cmd, image=None):
if not image:
image = getImage()
if not image:
return False
fave, index, nFaves = favourite.findFave(file, cmd)
fave[1] = image
return favourite.updateFave(file, fave)
def getFolder(title):
return utils.GetFolder(title)
def createNewFolder(current):
text = utils.fileSystemSafe(utils.GetText(GETTEXT(30013)))
if not text:
return False
folder = os.path.join(current, text)
if sfile.exists(folder):
utils.DialogOK('', GETTEXT(30014) % text)
return False
sfile.makedirs(folder)
return True
def changePlaybackMode(file, cmd):
OPTION = 'mode'
try: mode = int(favourite.getOption(cmd, OPTION))
except: mode = 0
playMedia = GETTEXT(30142)
activateWindow = GETTEXT(30143)
runPlugin = GETTEXT(30144)
if mode == PLAYMEDIA_MODE:
playMedia = '[COLOR selected]%s[/COLOR]' % playMedia
if mode == ACTIVATEWINDOW_MODE:
activateWindow = '[COLOR selected]%s[/COLOR]' % activateWindow
if mode == RUNPLUGIN_MODE:
runPlugin = '[COLOR selected]%s[/COLOR]' % runPlugin
options = []
options.append([playMedia, PLAYMEDIA_MODE])
options.append([activateWindow, ACTIVATEWINDOW_MODE])
options.append([runPlugin, RUNPLUGIN_MODE])
import menus
if DLG_MENU:
option = menus.selectMenu(GETTEXT(30052), options)
else:
option = menus.showMenu(ADDONID, options)
if option == mode:
return False
if option == -1:
return False
fave, index, nFaves = favourite.findFave(file, cmd)
if len(fave[2]) < 1:
return False
fave[2] = favourite.updateSFOption(fave[2], OPTION, option)
favourite.updateFave(file, fave)
return True
def editFolder(path, name):
cfg = os.path.join(path, FOLDERCFG)
cfg = parameters.getParams(cfg)
thumb = parameters.getParam('ICON', cfg)
fanart = parameters.getParam('FANART', cfg)
lock = parameters.getParam('LOCK', cfg)
autoplay = parameters.getParam('AUTOPLAY', cfg)
sortorder = parameters.getParam('SORT', cfg)
meta = parameters.getParam('META', cfg)
hasThumb = thumb and len(thumb) > 0
hasFanart = fanart and len(fanart) > 0
hasMeta = meta and len(meta) > 0
REMOVE = 0
RENAME = 1
CHOOSETHUMB = 2
PASTETHUMB = 3
CHOOSEFANART = 4
PASTEFANART = 5
REMOVETHUMB = 6
REMOVEFANART = 7
EDITDESC = 8
PASTEDESC = 9
COLOUR = 10
LOCK = 11
AUTOPLAY = 12
SORTORDER = 13
PASTEMETA = 14
REMOVEMETA = 15
DESC_MENU = 100
META_MENU = 101
THUMB_MENU = 102
FANART_MENU = 103
import clipboard
thumb_clip = clipboard.getThumb()
fanart_clip = clipboard.getFanart()
desc_clip = clipboard.getDesc()
meta_clip = clipboard.getMeta()
# --------- Build Menu -----------
options = []
options.append([GETTEXT(30011), REMOVE])
options.append([GETTEXT(30012), RENAME])
#lock/unlock
label = GETTEXT(30077) if lock else GETTEXT(30076)
options.append([label, LOCK])
#autoplay
label = GETTEXT(30234) if autoplay else GETTEXT(30233)
options.append([label, AUTOPLAY])
#sortorder
options.append([GETTEXT(30237), SORTORDER])
#description
if len(desc_clip) > 0:
options.append([GETTEXT(30264), DESC_MENU])
else:
options.append([GETTEXT(30194), EDITDESC])
#meta
if hasMeta and len(meta_clip) > 0:
options.append([GETTEXT(30265), META_MENU])
else:
if (hasMeta):
options.append([GETTEXT(30263), REMOVEMETA])
if len(meta_clip) > 0:
options.append([GETTEXT(30262), PASTEMETA])
#thumb
if hasThumb or len(thumb_clip) > 0:
options.append([GETTEXT(30266), THUMB_MENU])
else:
options.append([GETTEXT(30043), CHOOSETHUMB])
#fanart
if hasFanart or len(fanart_clip) > 0:
options.append([GETTEXT(30267), FANART_MENU])
else:
options.append([GETTEXT(30107), CHOOSEFANART])
options.append([GETTEXT(30085), COLOUR])
# --------- Get Choice -----------
import menus
if DLG_MENU:
option = menus.selectMenu(name, options)
else:
option = menus.showMenu(ADDONID, options)
options = []
# --------- Handle Further Menus -----------
if option == DESC_MENU:
options.append([GETTEXT(30194), EDITDESC])
options.append([GETTEXT(30212), PASTEDESC])
if option == META_MENU:
options.append([GETTEXT(30263), REMOVEMETA])
options.append([GETTEXT(30262), PASTEMETA])
if option == THUMB_MENU:
options.append([GETTEXT(30043), CHOOSETHUMB])
if len(thumb_clip) > 0:
options.append([GETTEXT(30210), PASTETHUMB])
if hasThumb:
options.append([GETTEXT(30097), REMOVETHUMB])
if option == FANART_MENU:
options.append([GETTEXT(30107), CHOOSEFANART])
if len(fanart_clip) > 0:
options.append([GETTEXT(30211), PASTEFANART])
if hasFanart:
options.append([GETTEXT(30108), REMOVEFANART])
# --------- Get Choice from 2nd Menu if Necessary-----------
if len(options) > 0:
if DLG_MENU:
option = menus.selectMenu(name, options)
else:
option = menus.showMenu(ADDONID, options)
# --------- Handle Choice -----------
if option == REMOVE:
return removeFolder(path)
if option == RENAME:
return renameFolder(path)
if option == EDITDESC:
return editFolderDescription(path, name)
if option == PASTEDESC:
return editFolderDescription(path, name, desc_clip)
if option == CHOOSETHUMB:
return thumbFolder(path)
if option == PASTETHUMB:
return thumbFolder(path, thumb_clip)
if option == CHOOSEFANART:
return fanartFolder(path)
if option == PASTEFANART:
return fanartFolder(path, fanart_clip)
if option == REMOVETHUMB:
return removeThumbFolder(path)
if option == REMOVEFANART:
return removeFanartFolder(path)
if option == COLOUR:
return colourFolder(path)
if option == LOCK:
import locking
dir = sfile.getfilename(path)
if lock:
locking.remove(path, name)
else:
locking.add(path, name)
refresh()
if option == AUTOPLAY:
if autoplay:
removeAutoplay(path)
else:
addAutoplay(path)
if option == SORTORDER:
setFolderSortOrder(path, sortorder)
if option == REMOVEMETA:
return removeMetaFolder(path)
if option == PASTEMETA:
return pasteMetaFolder(path, meta_clip)
return False
def setFolderSortOrder(path, current):
try: current = int(current)
except: current = 0
config = GETTEXT(30238) # 0
alpha = GETTEXT(30239) # 1
manual = GETTEXT(30240) # 2
if current == 0:
config = '[COLOR selected]%s[/COLOR]' % config
if current == 1:
alpha = '[COLOR selected]%s[/COLOR]' % alpha
if current == 2:
manual = '[COLOR selected]%s[/COLOR]' % manual
options = []
options.append([config, 0])
options.append([alpha, 1])
options.append([manual, 2])
import menus
if DLG_MENU:
option = menus.selectMenu(GETTEXT(30237), options)
else:
option = menus.showMenu(ADDONID, options)
if option == current:
return False
if option == -1:
return False
cfg = os.path.join(path, FOLDERCFG)
parameters.setParam('SORT', option, cfg)
def editFave(file, cmd, name, thumb):
# this method really needs refactoring!
global itemIndex
fanart = favourite.getFanart(cmd)
hasThumb = len(thumb) > 0
hasFanart = len(fanart) > 0
hasMode = cmd.startswith('PlayMedia')
hasMeta = len(favourite.getOption(cmd, 'meta')) > 0
UP = 0
DOWN = 1
COPY = 2
MOVE = 3
REMOVE = 4
RENAME = 5
EDITDESC = 6
PASTEDESC = 7
CHOOSETHUMB = 8
PASTETHUMB = 9
CHOOSEFANART = 10
PASTEFANART = 11
REMOVETHUMB = 12
REMOVEFANART = 13
COLOUR = 14
PLAYBACKMODE = 15
MANUALEDIT = 16
REMOVEMETA = 17
PASTEMETA = 18
DESC_MENU = 100
META_MENU = 101
THUMB_MENU = 102
FANART_MENU = 103
import clipboard
thumb_clip = clipboard.getThumb()
fanart_clip = clipboard.getFanart()
desc_clip = clipboard.getDesc()
meta_clip = clipboard.getMeta()
path = sfile.getfolder(file)
cfg = os.path.join(path, FOLDERCFG)
try: sortorder = int(parameters.getParam('SORT', cfg))
except: sortorder = 0
if sortorder == 0:
sortorder = 1 if ALPHA_SORT else 2
# --------- Build Menu -----------
options = []
if sortorder == 2:
options.append([GETTEXT(30041), UP])
options.append([GETTEXT(30042), DOWN])
options.append([GETTEXT(30007), COPY])
options.append([GETTEXT(30008), MOVE])
options.append([GETTEXT(30009), REMOVE])
options.append([GETTEXT(30010), RENAME])
#description
if len(desc_clip) > 0:
options.append([GETTEXT(30264), DESC_MENU])
else:
options.append([GETTEXT(30194), EDITDESC])
#meta
if hasMeta and len(meta_clip) > 0:
options.append([GETTEXT(30265), META_MENU])
else:
if (hasMeta):
options.append([GETTEXT(30263), REMOVEMETA])
if len(meta_clip) > 0:
options.append([GETTEXT(30262), PASTEMETA])
#thumb
if hasThumb or len(thumb_clip) > 0:
options.append([GETTEXT(30266), THUMB_MENU])
else:
options.append([GETTEXT(30043), CHOOSETHUMB])
#fanart
if hasFanart or len(fanart_clip) > 0:
options.append([GETTEXT(30267), FANART_MENU])
else:
options.append([GETTEXT(30107), CHOOSEFANART])
options.append([GETTEXT(30085), COLOUR])
if hasMode:
options.append([GETTEXT(30052), PLAYBACKMODE])
options.append([GETTEXT(30168), MANUALEDIT])
# --------- Get Choice -----------
import menus
if DLG_MENU:
option = menus.selectMenu(name, options)
else:
option = menus.showMenu(ADDONID, options)
options = []
# --------- Handle Further Menus -----------
if option == DESC_MENU:
options.append([GETTEXT(30194), EDITDESC])
options.append([GETTEXT(30212), PASTEDESC])
if option == META_MENU:
options.append([GETTEXT(30263), REMOVEMETA])
options.append([GETTEXT(30262), PASTEMETA])
if option == THUMB_MENU:
options.append([GETTEXT(30043), CHOOSETHUMB])
if len(thumb_clip) > 0:
options.append([GETTEXT(30210), PASTETHUMB])
if hasThumb:
options.append([GETTEXT(30097), REMOVETHUMB])
if option == FANART_MENU:
options.append([GETTEXT(30107), CHOOSEFANART])
if len(fanart_clip) > 0:
options.append([GETTEXT(30211), PASTEFANART])
if hasFanart:
options.append([GETTEXT(30108), REMOVEFANART])
# --------- Get Choice from 2nd Menu if Necessary-----------
if len(options) > 0:
if DLG_MENU:
option = menus.selectMenu(name, options)
else:
option = menus.showMenu(ADDONID, options)
# --------- Handle Choice -----------
if option == UP:
#itemIndex = -1
return favourite.shiftFave(file, cmd, up=True)
if option == DOWN:
#itemIndex = -1
return favourite.shiftFave(file, cmd, up=False)
if option == COPY:
return copyFave(file, cmd)
if option == MOVE:
return moveFave(file, cmd)
if option == REMOVE:
return favourite.removeFave(file, cmd)
if option == RENAME:
return renameFave(file, cmd)
if option == EDITDESC:
return editDescription(file, cmd, name)
if option == PASTEDESC:
return editDescription(file, cmd, name, desc_clip)
if option == CHOOSETHUMB:
return thumbFave(file, cmd)
if option == PASTETHUMB:
return thumbFave(file, cmd, thumb_clip)
if option == CHOOSEFANART:
return fanartFave(file, cmd)
if option == PASTEFANART:
return fanartFave(file, cmd, fanart_clip)
if option == REMOVETHUMB:
return removeThumbFave(file, cmd)
if option == REMOVEFANART:
return removeFanartFave(file, cmd)
if option == COLOUR:
return colourFave(file, cmd)
if option == PLAYBACKMODE:
return changePlaybackMode(file, cmd)
if option == MANUALEDIT:
return manualEdit(file, cmd, name, thumb)
if option == REMOVEMETA:
return removeMetaFave(file, cmd)
if option == PASTEMETA:
return pasteMetaFave(file, cmd, meta_clip)
return False
def removeMetaFolder(path):
cfg = os.path.join(path, FOLDERCFG)
parameters.clearParam('META', cfg)
return True
def pasteMetaFolder(path, meta):
cfg = os.path.join(path, FOLDERCFG)
parameters.setParam('META', urllib.quote(meta), cfg)
return True
def editFolderDescription(path, name, desc=None):
cfg = os.path.join(path, FOLDERCFG)
if desc == None:
desc = parameters.getParam('DESC', cfg)
desc = utils.GetText(name, text=desc, hidden=False, allowEmpty=True)
if desc == None:
return False
parameters.setParam('DESC', desc, cfg)
return True
def editDescription(file, cmd, name, desc=None):
fave, index, nFaves = favourite.findFave(file, cmd)
if not fave:
return False
if desc == None:
desc = favourite.getOption(cmd, 'desc')
desc = urllib.unquote(desc)
desc = utils.GetText(name, text=desc, hidden=False, allowEmpty=True)
if desc == None:
return False
fave[2] = favourite.updateSFOption(cmd, 'desc', urllib.quote(desc))
favourite.updateFave(file, fave)
return True
def manualEdit(file, _cmd, name='', thumb='', editName=True):
cmd = _cmd
if editName:
name = utils.GetText(GETTEXT(30021), name, allowEmpty=True)
if name == None:
return False
type = manualType(name, cmd)
if type < 0:
return False
windowID = '-1'
originalID = ''
if type == ACTIVATEWINDOW_MODE:
windowID, originalID = getWindowID(cmd, name)
if windowID == '0':
return False
newCmd = ''
manualUnset = MANUAL_CMD in cmd
title = GETTEXT(30170) % name
sfOptions = ''
prefix = ''
if manualUnset:
newCmd = utils.GetText(title, '', allowEmpty=True)
else:
if type == ACTION_MODE:
cmd = cmd.split('ExecuteBuiltin("', 1)[-1]
cmd = cmd.rsplit('")')[0]
else:
cmd = cmd.split('(', 1)[-1].rsplit(')', 1)[0] #split only on very outer brackets
if _cmd.lower().startswith('activatewindow'):
if cmd == originalID:
cmd = ''
else:
cmd = cmd.split(',', 1)[-1].strip()
if cmd.endswith(',return'):
cmd = cmd[:-7]
if cmd.startswith('"'):
cmd = cmd[6]
if cmd.endswith('"'):
cmd = cmd[:-6]
if cmd.startswith('"'):
cmd = cmd[1:]
if cmd.endswith('"'):
cmd = cmd[:-1]
if cmd.lower() == 'return':
cmd = ''
cmd = cmd.replace('"', '')
if '?sf_options' in cmd:
prefix = '?sf_options'
cmd, sfOptions = cmd.split(prefix)
elif '&sf_options' in cmd:
prefix = '&sf_options'
cmd, sfOptions = cmd.split(prefix)
newCmd = utils.GetText(title, cmd, allowEmpty=True)
if newCmd == None:
return False
if len(prefix) > 0 and len(sfOptions) > 0:
newCmd += prefix + sfOptions
newCmd = buildManualFave(type, newCmd, windowID)
fave = []
fave.append(name)
fave.append(thumb)
fave.append(newCmd)
return favourite.replaceFave(file, fave, _cmd)
def buildManualFave(type, cmd, windowID='-1'):
if type == ACTIVATEWINDOW_MODE:
if cmd:
return 'ActivateWindow("%s","%s",return)' % (windowID, cmd)
else:
return 'ActivateWindow("%s",return)' % (windowID)
if len(cmd) == 0:
return getDefaultManualCmd()
if type == PLAYMEDIA_MODE:
return 'PlayMedia("%s")' % cmd
elif type == RUNPLUGIN_MODE:
cmd = cmd.replace(',', '","')
return 'RunScript("%s")' % cmd
elif type == ACTION_MODE:
return 'ExecuteBuiltin("%s")' % cmd
return getDefaultManualCmd()
def getWindowID(cmd, name):
try:
originalID = re.compile('activatewindow\((.+?),').search(cmd.lower()).group(1)
except:
try: originalID = re.compile('activatewindow\((.+?)\)').search(cmd.lower()).group(1)
except: originalID = ''
try:
if len(name) > 0:
title = GETTEXT(30175) % name
else:
title = GETTEXT(30176)
windowID = utils.GetText(title, originalID )
if windowID:
return windowID, originalID
except:
pass
return '0', originalID
def colourise(cmd, type, action):
if cmd.lower().startswith(type):
action = '[COLOR selected]%s[/COLOR]' % action
return action
def manualType(name, cmd):
title = GETTEXT(30169) % name
playMedia = colourise(cmd, 'playmedia', GETTEXT(30172))
activateWindow = colourise(cmd, 'activatewindow', GETTEXT(30173))
runScript = colourise(cmd, 'runscript', GETTEXT(30174))
action = colourise(cmd, 'executebuiltin', GETTEXT(30193))
options = []
options.append([playMedia, PLAYMEDIA_MODE])
options.append([activateWindow, ACTIVATEWINDOW_MODE])
options.append([runScript, RUNPLUGIN_MODE])
options.append([action, ACTION_MODE])
import menus
option = menus.selectMenu(title, options)
#if DLG_MENU:
# option = menus.selectMenu(title, options)
#else:
# option = menus.showMenu(ADDONID, options)
return option
def manualAdd(folder):
name = utils.GetText(GETTEXT(30021), '', allowEmpty=True)
if name == None:
return False
cmd = getDefaultManualCmd()
thumb = ''
file = os.path.join(folder, FILENAME)
return manualEdit(file, cmd, name, thumb, editName=False)
def getDefaultManualCmd():
id = int(ADDON.getSetting('MANUAL_ID'))
ADDON.setSetting('MANUAL_ID', str(id+1))
return MANUAL_CMD + str(id)
def editSearch(file, cmd, name, thumb):
global itemIndex
fanart = favourite.getFanart(cmd)
hasThumb = len(thumb) > 0
hasFanart = len(fanart) > 0
UP = 0
DOWN = 1
RENAME = 2
CHOOSETHUMB = 3
PASTETHUMB = 4
CHOOSEFANART = 5
PASTEFANART = 6
REMOVETHUMB = 7
REMOVEFANART = 8
COLOUR = 9
thumb_clip = xbmcgui.Window(10000).getProperty('SF_THUMB')
fanart_clip = xbmcgui.Window(10000).getProperty('SF_FANART')
options = []
if not ALPHA_SORT:
options.append([GETTEXT(30041), UP])
options.append([GETTEXT(30042), DOWN])
options.append([GETTEXT(30010), RENAME])
#thumb
options.append([GETTEXT(30043), CHOOSETHUMB])
if len(thumb_clip) > 0:
options.append([GETTEXT(30210), PASTETHUMB])
if hasThumb:
options.append([GETTEXT(30097), REMOVETHUMB])
#fanart
options.append([GETTEXT(30107), CHOOSEFANART])
if len(fanart_clip) > 0:
options.append([GETTEXT(30211), PASTEFANART])
if hasFanart:
options.append([GETTEXT(30108), REMOVEFANART])
options.append([GETTEXT(30085), COLOUR])
import menus
if DLG_MENU:
option = menus.selectMenu(name, options)
else:
option = menus.showMenu(ADDONID, options)
if option == UP:
#itemIndex = -1
return favourite.shiftFave(file, cmd, up=True)
if option == DOWN:
#itemIndex = -1
return favourite.shiftFave(file, cmd, up=False)
if option == RENAME:
return renameFave(file, cmd)
if option == CHOOSETHUMB:
return thumbFave(file, cmd)
if option == PASTETHUMB:
return thumbFave(file, cmd, thumb_clip)
if option == REMOVETHUMB:
return removeThumbFave(file, cmd)
if option == CHOOSEFANART:
return fanartFave(file, cmd)
if option == PASTEFANART:
return fanartFave(file, cmd, fanart_clip)
if option == REMOVEFANART:
return removeFanartFave(file, cmd)
if option == COLOUR:
return colourFave(file, cmd)
return False
def renameFolder(path):
label = path.rsplit(os.sep, 1)[-1]
title = label
try: text = utils.fileSystemSafe(utils.GetText(GETTEXT(30015) % title, label))
except: title = utils.fix(title)
if not text:
return False
root = path.rsplit(os.sep, 1)[0]
newName = os.path.join(root, text)
sfile.rename(path, newName)
return True
def colourFolder(path):
colour = getColour()
if not colour:
return False
cfg = os.path.join(path, FOLDERCFG)
if colour == 'SF_RESET':
parameters.clearParam('COLOUR', cfg)
else:
parameters.setParam('COLOUR', colour, cfg)
return True
def removeFolder(path):
label = path.rsplit(os.sep, 1)[-1]
label = utils.fix(label)
if not utils.DialogYesNo(GETTEXT(30016) % label, GETTEXT(30017), GETTEXT(30018)):
return False
sfile.rmtree(path)
return True
def moveFave(file, cmd):
if not copyFave(file, cmd, move=True):
return False
return favourite.removeFave(file, cmd)
def copyFave(file, cmd, move=False):
copy, index, nFaves = favourite.findFave(file, cmd)
if not copy:
return False
text = GETTEXT(30020) if move else GETTEXT(30019)
folder = getFolder(text)
if not folder:
return False
file = os.path.join(folder, FILENAME)
return favourite.copyFave(file, copy)
def renameFave(file, cmd):
fave, index, nFaves = favourite.findFave(file, cmd)
if not fave:
return False
newName = utils.GetText(GETTEXT(30021), text=fave[0], allowEmpty=True)
if newName == None:
return False
return favourite.renameFave(file, cmd, newName)
def decolourize(text):
text = re.sub('\[COLOR (.+?)\]', '', text)
text = re.sub('\[/COLOR\]', '', text)
return text
def colourFave(file, cmd):
colour = getColour()
if not colour:
return False
copy = []
faves = favourite.getFavourites(file)
for fave in faves:
if favourite.equals(fave[2], cmd):
fave[0] = decolourize(fave[0])
if colour != 'SF_RESET':
fave[0] = '[COLOR %s]%s[/COLOR]' % (colour, fave[0])
copy.append(fave)
favourite.writeFavourites(file, copy)
return True
def getTVDB(imdb):
try:
import json
if not imdb.endswith('?'):
imdb = imdb + '?'
url = 'http://api.themoviedb.org/3/find/%sapi_key=57983e31fb435df4df77afb854740ea9&external_source=imdb_id' % imdb
html = quicknet.getURL(url, maxSec=5*86400, agent='Firefox')
jsn = json.loads(html)
thumbnail = BLANK
fanart = FANART
movies = jsn['movie_results']
tvs = jsn['tv_results']
source = None
if len(movies) > 0:
source = movies[0]
elif len(tvs) > 0:
source = tvs[0]
if source:
try: thumbnail = 'http://image.tmdb.org/t/p/w342' + source['poster_path']
except: pass
try: fanart = 'http://image.tmdb.org/t/p/w780' + source['backdrop_path']
except: pass
return thumbnail, fanart
except:
pass
return BLANK, FANART
def getMeta(grabber, name, type, year=None, season=None, episode=None, imdb=None):
infoLabels = {}
if not imdb:
imdb = ''
imdb = imdb.replace('/?', '')
if year=='':
year = None
if year == None:
try: year = re.search('\s*\((\d\d\d\d)\)',name).group(1)
except: year = None
if year is not None:
name = name.replace(' ('+year+')','').replace('('+year+')','')
if 'movie' in type:
meta = grabber.get_meta('movie', name, imdb, None, year, overlay=6)
meta['trailer_url'] = '%s?mode=%d&path=%s' % (sys.argv[0], _PLAYTRAILER, meta['trailer_url'])
infoLabels = {'rating': meta['rating'],'trailer': meta['trailer_url'],'duration': meta['duration'],'genre': meta['genre'],'mpaa':"rated %s"%meta['mpaa'],'plot': meta['plot'],'title': meta['title'],'writer': meta['writer'],'cover_url': meta['cover_url'],'director': meta['director'],'castandrole': meta['cast'],'fanart': meta['backdrop_url'],'tmdb_id': meta['tmdb_id'],'year': meta['year']}
elif 'tvshow' in type:
meta = grabber.get_episode_meta(name, imdb, season, episode)
infoLabels = {'rating': meta['rating'],'genre': meta['genre'],'mpaa':"rated %s"%meta['mpaa'],'plot': meta['plot'],'title': meta['title'],'cover_url': meta['cover_url'],'fanart': meta['backdrop_url'],'Episode': meta['episode'],'Aired': meta['premiered']}
return infoLabels
def getMovieMenu(infolabels, menu=None):
if not menu:
menu = []
if len(infolabels) == 0:
return menu
menu.append((GETTEXT(30090), 'Action(Info)'))
try:
if 'trailer' in infolabels and len(infolabels['trailer']) > 0:
menu.append((GETTEXT(30091), 'XBMC.RunPlugin(%s)' % infolabels['trailer']))
except:
pass
return menu
def hasIMDBRecommendations(imdb):
url = 'http://imdb.com/title/%s' % imdb
html = quicknet.getURL(url, maxSec=86400, agent='Firefox')
items = re.compile('<div class="rec_details".+?<a href="/title/(.+?)/?ref_=tt_rec_tt"><b>(.+?)</b></a>.+?<div class="rec-outline">(.+?)</p>').findall(html)
return len(items) > 0
def getMetaGrabber():
if METARECOMMEND:
try:
from metahandler import metahandlers
return metahandlers.MetaData(tmdb_api_key=TMDB_API_KEY)
except:
pass
return None
def recommendIMDB(imdb, keyword, fallback=True):
grabber = getMetaGrabber()
url = 'http://imdb.com/title/%s' % imdb
html = quicknet.getURL(url, maxSec=86400, agent='Firefox')
items = re.compile('<div class="rec_details".+?<a href="/title/(.+?)/?ref_=tt_rec_tt"><b>(.+?)</b></a>.+?<div class="rec-outline">(.+?)</p>').findall(html)
if len(items) == 0 and fallback:
return recommendKey(keyword, RECOMMENDAUTO)
count = 0
infolabels = {}
for item in items:
imdb = item[0]
name = item[1]
thumbnail = BLANK
fanart = FANART
if grabber:
infolabels = getMeta(grabber, '', 'movie', year=None, imdb=imdb)
thumbnail = infolabels['cover_url']
fanart = infolabels['fanart']
try:
outline = utils.RemoveTags(item[2]).strip() if grabber else None
if outline:
if ('plot' not in infolabels) or (not infolabels['plot']):
infolabels['plot'] = outline
if SYNOPSRECOMMEND:
name += ' - [I]%s[/I]' % utils.unescape(utils.fix(infolabels['plot']))
menu = getMovieMenu(infolabels)
getHistoryItem(menu, name, thumbnail, fanart, infolabels, False)
label, count = utils.addPrefixToLabel(count, name)
addDir(label, _SUPERSEARCH, thumbnail=thumbnail, isFolder=True, menu=menu, fanart=fanart, keyword=name, imdb=imdb, infolabels=infolabels, totalItems=len(items))
except:
pass
return count
def recommendKey(keyword, autoRecommend):
grabber = getMetaGrabber()
url = 'http://m.imdb.com/find?q=%s' % urllib.quote(keyword)
html = quicknet.getURL(url, maxSec=86400, agent='Apple-iPhone/')
items = re.compile('<div class="title">.+?<a href="/title/(.+?)/">(.+?)</a>(.+?)</div>').findall(html)
infolabels = {}
index = 0
for item in items:
imdb = item[0]
name = item[1]
if autoRecommend:
autoRecommend = hasIMDBRecommendations(imdb)
if 'video game' in item[2].lower():
continue
label = name + ' ' + item[2].strip()
thumbnail = BLANK
fanart = FANART
if grabber:
infolabels = getMeta(grabber, name, 'movie', year=None, imdb=imdb)
thumbnail = infolabels['cover_url']
fanart = infolabels['fanart']
if SYNOPSRECOMMEND:
if ('plot' in infolabels) and (infolabels['plot']):
label += ' - [I]%s[/I]' % utils.unescape(infolabels ['plot'])
menu = getMovieMenu(infolabels)
getHistoryItem(menu, name, thumbnail, fanart, infolabels, False)
label, index = utils.addPrefixToLabel(index, label)
if autoRecommend:
autoRecommend = False
addDir(label, _RECOMMEND_KEY_A, thumbnail=thumbnail, isFolder=True, menu=menu, fanart=fanart, keyword=name, infolabels=infolabels)
if recommendIMDB(imdb, keyword, fallback=False) > 0:
return
else:
addDir(label, _SUPERSEARCH, thumbnail=thumbnail, isFolder=True, menu=menu, fanart=fanart, keyword=name, imdb=imdb, infolabels=infolabels, totalItems=len(items))
def editSearchTerm(_keyword):
keyword = utils.GetText(GETTEXT(30057), _keyword)
if (not keyword) or len(keyword) < 1:
keyword = _keyword
winID = xbmcgui.getCurrentWindowId()
cmd = 'ActivateWindow(%d,"%s?mode=%d&keyword=%s")' % (winID, sys.argv[0], _SUPERSEARCH, urllib.quote_plus(keyword))
activateWindowCommand(cmd)
def externalSearch():
xbmcplugin.endOfDirectory(int(sys.argv[1]))
keyword = ''
kb = xbmc.Keyboard(keyword, GETTEXT(30054))
kb.doModal()
if kb.isConfirmed():
keyword = kb.getText()
cmd = '%s?mode=%d&keyword=%s' % (sys.argv[0], _SUPERSEARCH, urllib.quote_plus(keyword))
xbmc.executebuiltin('XBMC.Container.Refresh(%s)' % cmd)
def iHistoryBrowse():
items = history.browse()
items = sorted(items, key=lambda x: x[0].lower())
index = 0
for item in items:
label = item[0]
thumb = item[1]
fanart = favourite.getFanart(item[2])
meta = favourite.getOption(item[2], 'meta')
menu = []
menu.append((GETTEXT(30165) % label, 'XBMC.RunPlugin(%s?mode=%d&name=%s)' % (sys.argv[0], _HISTORYREMOVE, urllib.quote_plus(label))))
infolabels = utils.convertURLToDict(meta)
if not 'plot' in infolabels:
infolabels['plot'] = GETTEXT(30200) % label
text, index = utils.addPrefixToLabel(index, label)
addDir(text, _SUPERSEARCH, thumbnail=thumb, isFolder=True, fanart=fanart, keyword=label, menu=menu, infolabels=infolabels)
return False
def iHistoryAdd(keyword, image, fanart, meta):
return history.add(keyword, image, fanart, meta)
def iHistoryRemove(name):
return history.remove(name)
def iPlay():
#add browse item
#addDir(GETTEXT(30148), _PLAYLISTBROWSE, thumbnail='DefaultMusicPlaylists.png', isFolder=False)
import playlist
nItems = 0
folder = os.path.join(ROOT, 'PL')
sfile.makedirs(folder)
#parse SF folder
nItems += addPlaylistItems(playlist.parseFolder(folder), 'DefaultMusicVideos.png', delete=True)
#parse SF list
file = os.path.join(folder, FILENAME)
playlists = favourite.getFavourites(file, validate=False)
items = []
for plist in playlists:
name = plist[0]
thumb = plist[1]
cmd = plist[2]
items.append([cmd, name, thumb])
nItems += addPlaylistItems(items, delete=True)
#parse Kodi folders
folder = 'special://profile/playlists'
nItems += addPlaylistItems(playlist.parseFolder(os.path.join(folder, 'video')), 'DefaultMovies.png', delete=ALLOW_PLAYLIST_DELETE)
nItems += addPlaylistItems(playlist.parseFolder(os.path.join(folder, 'music')), 'DefaultMusicSongs.png', delete=ALLOW_PLAYLIST_DELETE)
nItems += addPlaylistItems(playlist.parseFolder(os.path.join(folder, 'mixed')), 'DefaultMusicVideos.png', delete=ALLOW_PLAYLIST_DELETE)
return nItems > 0
def addPlaylistItems(items, thumbnail='DefaultMovies.png', delete=False):
for item in items:
path = item[0]
title = item[1]
if len(item) > 2:
thumbnail = item[2]
menu = []
#browse
cmd = '%s?mode=%d' % (sys.argv[0], _PLAYLISTBROWSE)
#menu.append((GETTEXT(30148), 'XBMC.Container.Update(%s)' % cmd))
menu.append((GETTEXT(30148), 'XBMC.RunPlugin(%s)' % cmd))
#browse for URL
cmd = '%s?mode=%d' % (sys.argv[0], _URLPLAYLIST)
#menu.append((GETTEXT(30153), 'XBMC.Container.Update(%s)' % cmd))
menu.append((GETTEXT(30153), 'XBMC.RunPlugin(%s)' % cmd))
#menu.append((GETTEXT(30084), 'XBMC.PlayMedia(%s)' % path))
menu.append((GETTEXT(30084), 'XBMC.RunPlugin(%s?mode=%d&cmd=%s)' % (sys.argv[0], _PLAYLIST, urllib.quote_plus(path))))
if delete:
menu.append((GETTEXT(30150), 'XBMC.RunPlugin(%s?mode=%d&path=%s)' % (sys.argv[0], _DELETEPLAYLIST, urllib.quote_plus(path))))
menu.append((GETTEXT(30047), 'XBMC.RunPlugin(%s?mode=%d&path=%s&label=%s&thumb=%s)' % (sys.argv[0], _COPY_PLAY_TO_SF, urllib.quote_plus(path), urllib.quote_plus(title), urllib.quote_plus(thumbnail))))
addDir(title, _PLAYLISTFILE, path=path, thumbnail=thumbnail, menu=menu)
return len(items)
def iPlaylistDelete(path):
#delete from SF list of Playlists
folder = os.path.join(ROOT, 'PL')
file = os.path.join(folder, FILENAME)
playlists = favourite.getFavourites(file, validate=False)
updated = []
for playlist in playlists:
if playlist[2] != path:
updated.append(playlist)
if len(updated) < len(playlists):
favourite.writeFavourites(file, updated)
return True
if sfile.exists(path):
utils.DeleteFile(path)
return True
return False
def iPlaylistItem(path, title='', thumb='DefaultMovies.png'):
#if currently in program menu play directly
if xbmcgui.getCurrentWindowId() == 10001:
liz = xbmcgui.ListItem(title, iconImage=thumb, thumbnailImage=thumb)
pl = xbmc.PlayList(xbmc.PLAYLIST_VIDEO)
pl.clear()
pl.add(path, liz)
xbmc.Player().play(pl)
return
item = xbmcgui.ListItem(path=path)
xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, item)
def iPlaylistURL(url):
try: html = quicknet.getURL(url, maxSec=600, tidy=False)
except: html = ''
nItems = addItems(html.split('\n'))
if nItems == 0:
utils.DialogOK(GETTEXT(30155), url)
return False
return True
def iPlaylistFile(path):
if not sfile.exists(path):
return iPlaylistURL(path)
playlist = sfile.readlines(path)
return addItems(playlist)
def addItems(playList):
import playlist
items = playlist.parse(playList)
nItem = len(items)
for item in items:
menu = []
title = item[0]
path = item[1]
isAudio = title.lower().endswith('.mp3')
if isAudio:
title = title.replace('.mp3', '')
thumb = ICON #'DefaultAudio.png'
else:
thumb = ICON #'DefaultFile.png'
title = utils.unescape(title).strip()
menu.append((GETTEXT(30047), 'XBMC.RunPlugin(%s?mode=%d&path=%s&label=%s&thumb=%s)' % (sys.argv[0], _COPY_PLAY_TO_SF_ITEM, urllib.quote_plus(path), urllib.quote_plus(title), urllib.quote_plus(thumb))))
isPlayable = xbmcgui.getCurrentWindowId() != 10001
addDir(title, _PLAYLISTITEM, path=path, thumbnail=thumb, isFolder=False, menu=menu, totalItems=nItem, isPlayable=isPlayable)
return nItem > 0
def iPlaylistURLBrowse():
valid = False
text = 'http://'
while not valid:
text = utils.GetText(GETTEXT(30153), text)
if not text:
return False
if text == 'http://':
return
try: html = quicknet.getURL(text, maxSec=0, tidy=False)
except: html = ''
import playlist
items = playlist.parse(html.split('\n'))
valid = len(items) > 0
if not valid:
utils.DialogOK(GETTEXT(30155), text)
name = utils.GetText(GETTEXT(30156))
if not name:
return False
if COPY_PLAYLISTS:
name += '.m3u'
file = os.path.join(ROOT, 'PL', name)
f = sfile.file(file, 'w')
f.write(html)
f.close()
return True
cmd = text
thumb = 'DefaultFile.png'
addPlaylistToSF(name, cmd, thumb)
return True
def iPlaylistBrowse():
import playlist
playlist = playlist.getPlaylist()
if not playlist:
return False
folder = os.path.join(ROOT, 'PL')
sfile.makedirs(folder)
if COPY_PLAYLISTS:
try:
import shutil
shutil.copy(playlist , folder)
return True
except:
return False
name = playlist.rsplit(os.sep, 1)[-1].rsplit('.', 1)[0]
cmd = playlist
thumb = 'DefaultMovies.png'
return addPlaylistToSF(name, cmd, thumb)
def addPlaylistToSF(name, cmd, thumb):
folder = os.path.join(ROOT, 'PL')
sfile.makedirs(folder)
file = os.path.join(folder, FILENAME)
cmd = utils.convertToHome(cmd)
if favourite.findFave(file, cmd)[0]:
return False
playlist = [name, thumb, cmd]
playlists = favourite.getFavourites(file, validate=False)
playlists.append(playlist)
favourite.writeFavourites(file, playlists)
return True
def copyPlayToSF(cmd, title, thumb, playMedia):
folder = utils.GetFolder(title)
if not folder:
return
copy = ['', '', '']
copy[0] = title
copy[1] = thumb
if playMedia:
copy[2] = 'PlayMedia("%s")' % path
else:
copy[2] = 'ActivateWindow(10025,"%s",return)' % cmd
file = os.path.join(folder, FILENAME)
favourite.copyFave(file, copy)
def shortenText(text, length):
text = utils.fix(text)
text = text.strip()
if len(text) <= length:
return text
short = ''
for c in text:
short += c
if len(short) == length:
break
short += '...'
return short
def getHistoryItem(menu, keyword, image, fanart, meta, refresh):
#NOTE the SF@V is to workaround a bug in XBMC where the string is erroneously converted to lowercase when the menu item triggers
refresh = 'true' if refresh else 'false'
historyItem = None
if SHOWIHISTORY and not history.contains(keyword):
label = GETTEXT(30164) % shortenText(keyword, 10)
historyItem = (label, 'RunPlugin(%s?mode=%d&keyword=%s&meta=%s&image=%s&fanart=%s&refresh=%s)' % (sys.argv[0], _HISTORYADD, urllib.quote_plus(keyword), urllib.quote_plus(utils.convertDictToURL(meta)), urllib.quote_plus(image), urllib.quote_plus(fanart), refresh))
menu.append(historyItem)
return historyItem
def superSearch(keyword='', image=SEARCH, fanart=FANART, imdb='', meta={}):
if len(keyword) < 0:
kb = xbmc.Keyboard(keyword, GETTEXT(30054))
kb.doModal()
if kb.isConfirmed():
keyword = kb.getText()
if len(keyword) < 1:
keyword = ISEARCH_EMPTY
if len(keyword) > -1:
mode = _SUPERSEARCH
cmd = '%s?mode=%d&keyword=%s&image=%s&fanart=%s' % (sys.argv[0], mode, urllib.quote_plus(keyword), image, fanart)
xbmc.executebuiltin('XBMC.Container.Update(%s)' % cmd)
return
if len(keyword) < 0:
return
if keyword == ISEARCH_EMPTY:
keyword = ''
if not SHOW_FANART:
fanart = BLANK
keyword = keyword.split(' - [I]', 1)[0]
if keyword.startswith('tt'): #assume IMDB number
url = 'http://m.imdb.com/title/%s/' % keyword
html = quicknet.getURL(url)
try:
keyword = re.compile('<meta property=\'og:title\' content="(.+?)"').search(html).group(1)
keyword = utils.Clean(keyword)
keyword = utils.unescape(keyword)
keyword = utils.fix(keyword)
except:
pass
editItem = (GETTEXT(30057), 'XBMC.Container.Update(%s?mode=%d&keyword=%s)' % (sys.argv[0], _EDITTERM, keyword))
menu = []
menu.append(editItem)
infolabels = {}
grabber = getMetaGrabber()
if grabber and len(imdb) > 0:
imdb = imdb.replace('/?', '')
infolabels = getMeta(grabber, '', 'movie', year=None, imdb=imdb)
getMovieMenu(infolabels, menu)
else:
infolabels = meta
historyItem = getHistoryItem(menu, keyword, image, fanart, infolabels, True)
addDir(GETTEXT(30066) % keyword.strip(), _EDITTERM, thumbnail=image, isFolder=True, menu=menu, fanart=fanart, keyword=keyword, imdb=imdb, infolabels=infolabels)
#reset menu
menu = []
menu.append(editItem)
if historyItem:
menu.append(historyItem)
addSeparatorItem(menu)
index = 0
if SHOWIHISTORY and history.exists():
#reset menu
menu = []
menu.append(editItem)
if historyItem:
menu.append(historyItem)
label, index = utils.addPrefixToLabel(index, GETTEXT(30163))
addDir(label, _HISTORYSHOW, thumbnail=SEARCH, isFolder=True, menu=menu, infolabels={'plot':GETTEXT(30201)})
if SHOWRECOMMEND and len(keyword) > 0:
#reset menu
menu = []
menu.append(editItem)
if historyItem:
menu.append(historyItem)
getMovieMenu(infolabels, menu)
label, index = utils.addPrefixToLabel(index, GETTEXT(30088))
if len(imdb) > 0:
addDir(label, _RECOMMEND_IMDB, thumbnail=image, isFolder=True, menu=menu, fanart=fanart, keyword=keyword, imdb=imdb, infolabels={'plot':GETTEXT(30202) % (keyword, imdb.replace('/?', ''))})
elif RECOMMENDAUTO:
addDir(label, _RECOMMEND_KEY, thumbnail=image, isFolder=True, menu=menu, fanart=fanart, keyword=keyword, infolabels={'plot':GETTEXT(30215) % keyword})
else:
addDir(label, _RECOMMEND_KEY, thumbnail=image, isFolder=True, menu=menu, fanart=fanart, keyword=keyword, infolabels={'plot':GETTEXT(30205) % keyword})
keyword = urllib.quote_plus(keyword.replace('&', ''))
file = os.path.join(ROOT, 'S', FILENAME)
faves = favourite.getFavourites(file, superSearch=True)
if len(faves) == 0:
#try shipped search file
file = os.path.join(HOME, 'resources', 'search', FILENAME)
faves = favourite.getFavourites(file)
if ALPHA_SORT:
faves = sorted(faves, key=lambda x: utils.CleanForSort(x))
for fave in faves:
label = fave[0]
thumb = fave[1]
cmd = fave[2]
cmd = cmd.replace('[%SF%]', keyword)
cmd = cmd.replace('[%SF+%]', keyword.replace('+', '%2b'))
fan = fanart
if SHOW_FANART:
fan = favourite.getFanart(cmd)
if len(fan) == 0:
fan = fanart
menu = []
menu.append((GETTEXT(30103), 'XBMC.RunPlugin(%s?mode=%d&file=%s&cmd=%s&name=%s&thumb=%s)' % (sys.argv[0], _EDITSEARCH, urllib.quote_plus(file), urllib.quote_plus(cmd), urllib.quote_plus(label), urllib.quote_plus(thumb))))
menu.append(editItem)
if historyItem:
menu.append(historyItem)
#special fix for GlobalSearch, use local launcher (globalsearch.py) to bypass keyboard
cmd = cmd.replace('script.globalsearch', os.path.join(HOME, 'globalsearch.py'))
infolabel = {'plot':GETTEXT(30206) % (label, urllib.unquote_plus(keyword))}
label, index = utils.addPrefixToLabel(index, label)
addDir(label, _ACTIVATESEARCH, cmd=cmd, thumbnail=thumb, isFolder=True, menu=menu, fanart=fan, keyword=keyword, infolabels=infolabel)
return True
def playCommand(originalCmd):
player.playCommand(originalCmd, contentMode)
def playCommandFromHome(cmd, name):
try:
cmd = re.compile('"(.+?)"').search(cmd).group(1)
cmd = favourite.removeSFOptions(cmd)
liz = xbmcgui.ListItem(name, iconImage='', thumbnailImage='')
liz.setPath(cmd)
import xbmcplugin
xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, liz)
except:
pass
def activateWindowCommand(cmd):
player.activateWindowCommand(cmd)
def addArtwork(setting, thumbnail, fanart):
if setting == 0:
return ''
if setting == 1:
return thumbnail
if setting == 2:
return fanart
return ''
def addDir(label, mode, index=-1, path = '', cmd = '', thumbnail='', isFolder=True, menu=None, fanart=FANART, keyword='', imdb='', infolabels={}, totalItems=0, isPlayable=False):
global separator
u = sys.argv[0]
u += '?label='
try:
u += urllib.quote_plus(label)
except:
label = utils.fix(label)
u += urllib.quote_plus(label)
u += '&mode=' + str(mode)
if index > -1:
u += '&index=' + str(index)
if len(path) > 0:
u += '&path=' + urllib.quote_plus(path)
if len(cmd) > 0:
u += '&cmd=' + urllib.quote_plus(cmd)
if len(keyword) > 0:
u += '&keyword=' + urllib.quote_plus(keyword)
if not imdb:
try: imdb = infolabels['imdbnumber']
except: imdb = ''
if len(imdb) > 0:
u += '&imdb=' + urllib.quote_plus(imdb)
if len(thumbnail) > 0:
u += '&image=' + urllib.quote_plus(thumbnail)
if len(fanart) > 0:
u += '&fanart=' + urllib.quote_plus(fanart)
if CONTENTMODE:
u += '&contentMode=true'
u += '&content_type=' + urllib.quote_plus(launchMode)
if not menu:
menu = []
if len(thumbnail) == 0:
thumbnail = BLANK
label = label.replace(''', '\'')
#sanity check on empty [COLOR] block
if label.startswith('[COLOR ]'):
label = label.split('[COLOR ]', 1)[-1]
if label.endswith('[/COLOR]'):
label = label.rsplit('[/COLOR]', 1)[0]
liz = xbmcgui.ListItem(label, iconImage=thumbnail, thumbnailImage=thumbnail)
if isPlayable:
liz.setProperty('IsPlayable', 'true')
if infolabels and len(infolabels) > 0:
liz.setInfo(type='Video', infoLabels=infolabels)
if len(fanart) == 0:
fanart = FANART
if fanart != BLANK and SHOW_FANART:
liz.setProperty('Fanart_Image', fanart)
if hasattr(liz, 'setArt'):
art = {}
art['landscape'] = addArtwork(ART_LANDSCAPE, thumbnail, fanart)
art['banner'] = addArtwork(ART_BANNER, thumbnail, fanart)
art['poster'] = addArtwork(ART_POSTER, thumbnail, fanart)
art['thumb'] = thumbnail
art['fanart'] = fanart
if len(art) > 0:
liz.setArt(art)
#this property can be accessed in a skin via: $INFO[ListItem.Property(Super_Favourites_Folder)]
#or in Python via: xbmc.getInfoLabel('ListItem.Property(Super_Favourites_Folder)')
liz.setProperty('Super_Favourites_Folder', removeNumeric(theFolder))
#special case
if mode == _XBMC:
menu.append((GETTEXT(30043), 'XBMC.RunPlugin(%s?mode=%d&path=%s)' % (sys.argv[0], _THUMBFOLDER, urllib.quote_plus(PROFILE))))
ignoreFave = False
if mode == _NEWFOLDER:
ignoreFave = True
elif (mode == _EDITTERM) or (mode == _ACTIVATESEARCH):
ignoreFave = len(keyword) == 0
addGlobalMenuItem(menu, cmd, ignoreFave, label, thumbnail, u, keyword, fanart, infolabels)
if separator:
addSeparatorItem()
global nItem
_menu = menu
menu = []
for m in _menu:
menu.append((m[0], m[1].replace('/?', '/?index=%d&' % nItem)))
nItem += 1
liz.addContextMenuItems(menu, replaceItems=True)
xbmcplugin.addDirectoryItem(handle=int(sys.argv[1]), url=u, listitem=liz, isFolder=isFolder, totalItems=totalItems)
## --------------------------------------------------------------------------------------------------------------------------
params = utils.get_params(sys.argv[2])
doRefresh = False
doEnd = True
cacheToDisc = False
handle = int(sys.argv[1])
theFolder = ''
thepath = ''
try: mode = int(params['mode'])
except: mode = _MAIN
try: cmd = params['cmd']
except: cmd = None
#----------------------------------------------------------------
if mode == _ACTIVATEWINDOW:
#if cmd is a SF cmd then pull out params from cmd and use them
inSF = xbmc.getInfoLabel('Container.FolderName') == TITLE
isSF = 'plugin://%s' % ADDONID in cmd
if inSF and isSF:
cmd = re.compile('"(.+?)"').search(cmd).group(1)
params = utils.get_params(cmd)
try: mode = int(params['mode'])
except: mode = _MAIN
try: cmd = params['cmd']
except: cmd = None
#----------------------------------------------------------------
global itemIndex
try: itemIndex = int(params['index'])
except: itemIndex = -1
try: file = params['file']
except: file = None
try: path = params['path']
except: path = None
try: name = params['name']
except: name = ''
try: label = params['label']
except: label = ''
try: folder = params['folder']
except: folder = ''
try: content = params['content']
except: content = ''
try: contentMode = params['contentMode'].lower() == 'true'
except: contentMode = False
#which menu was SF launched from
try: launchMode = params['content_type']
except: launchMode = ''
contentType = CONTENTTYPE
#convert visible (in settings) string to a string that Kodi recognises
if contentType in CONTENTTYPES:
contentType = CONTENTTYPES[contentType]
else:
contentType = ''
if len(content) > 0:
mode = _IGNORE
folder = content
try:
path = xbmc.getInfoLabel('Skin.String(%s.Path)' % folder)
if len(path) > 0:
folder = ''
plugin = re.compile('.+?"(.+?)"').search(path).group(1)
prams = get_params(plugin)
try: folder = prams['folder']
except: pass
if len(folder) == 0:
mode = _FOLDER
path = PROFILE
except Exception, e:
pass
SHOWNEW = False
SHOWXBMC = False
SHOWSEP = False
CONTENTMODE = True
if len(folder) > 0:
if mode == _MAIN:
mode = _FOLDER
path = os.path.join(PROFILE, folder)
isHome = False
try:
if cmd.startswith('HOME:'):
cmd = cmd.split(':', 1)[-1]
isHome = True
except:
pass
utils.log('------------------ Launch Parameters ------------------')
utils.log(sys.argv[2])
utils.log(sys.argv)
utils.log('mode = %d' % mode)
utils.log('cmd = %s' % cmd)
utils.log('folder = %s' % folder)
utils.log('params = %s' % params)
utils.log('launchMode = %s' % launchMode)
utils.log('contentType = %s' % contentType)
utils.log('viewType = %s' % VIEWTYPE)
utils.log('isHome = %s' % str(isHome))
utils.log('itemIndex = %s' % str(itemIndex))
utils.log('ident = %s' % str(handle))
utils.log('-------------------------------------------------------')
if mode == _PLAYMEDIA:
if not contentMode:
mode = _IGNORE
if isHome:
playCommandFromHome(cmd, label)
else:
playCommand(cmd)
elif mode == _ACTIVATEWINDOW:
if not contentMode and not isHome:
mode = _IGNORE
doEnd = False
playCommand(cmd)
elif mode == _ACTIVATEWINDOW_XBMC:
mode = _IGNORE
import playlist
if PLAY_PLAYLISTS and playlist.isPlaylist(cmd):
playCommand(cmd)
#Container.Update removes current item from history to stop looping
update = '%s' % (sys.argv[0])
update = 'Container.Update(%s,replace)' % update
xbmc.executebuiltin(update)
xbmc.executebuiltin('Dialog.Close(busydialog)') #Isengard fix
xbmc.executebuiltin('ActivateWindow(Home)')
else:
script = os.path.join(HOME, 'cmdLauncher.py')
cmd = 'AlarmClock(%s,RunScript(%s,%s),%d,True)' % ('SF_CMDLAUNCHER', script, cmd, 0)
xbmc.executebuiltin(cmd)
elif mode == _PLAYLIST:
import playlist
playlist.play(cmd)
if mode == _ACTIVATESEARCH:
doEnd = False
playCommand(cmd)
elif mode == _XBMC:
showXBMCFolder()
xbmc.executebuiltin('Container.Update')
elif mode == _FOLDER:
thepath = path
theFolder = label
import locking
if locking.unlock(thepath):
if mode != _MAIN:
addNewFolderItem(thepath)
parseFolder(thepath)
else:
pass
#Reset to main somehow!!!
elif mode == _REMOVEFOLDER:
doRefresh = removeFolder(path)
elif mode == _RENAMEFOLDER:
doRefresh = renameFolder(path)
elif mode == _EDITFOLDER:
import locking
if locking.unlock(path):
doRefresh = editFolder(path, name)
elif mode == _EDITFAVE:
try: thumb = params['thumb']
except: thumb = 'null'
doRefresh = editFave(file, cmd, name, thumb)
elif mode == _EDITSEARCH:
try: thumb = params['thumb']
except: thumb = 'null'
doRefresh = editSearch(file, cmd, name, thumb)
elif mode == _NEWFOLDER:
doRefresh = createNewFolder(path)
elif mode == _CUT:
import clipboard
doRefresh = clipboard.cutCopy(file, cmd, cut=True)
elif mode == _COPY:
import clipboard
doRefresh = clipboard.cutCopy(file, cmd, cut=False)
elif mode == _PASTE:
import clipboard
try: folder = params['paste']
except: folder
doRefresh = clipboard.paste(folder)
elif mode == _CUTFOLDER:
import clipboard
doRefresh = clipboard.cutCopyFolder(path, cut=True)
elif mode == _COPYFOLDER:
import clipboard
doRefresh = clipboard.cutCopyFolder(path, cut=False)
elif mode == _PASTEFOLDER:
import clipboard
try: folder = params['paste']
except: folder
doRefresh = clipboard.pasteFolder(folder, sys.argv[0])
elif mode == _REMOVEFAVE:
doRefresh = removeFave(file, cmd)
elif mode == _RENAMEFAVE:
doRefresh = renameFave(file, cmd)
elif mode == _ADDTOXBMC:
thumb = params['thumb']
keyword = params['keyword']
addToXBMC(name, thumb, cmd, keyword)
elif mode == _ADDTOSF:
thumb = params['thumb']
keyword = params['keyword']
meta = params['meta']
addToSF(name, thumb, cmd, keyword, meta)
elif mode == _COPYTOSF:
thumb = params['thumb']
fanart = params['fanart']
try: meta = utils.convertURLToDict(urllib.quote_plus(params['meta']))
except: meta = {}
try: desc = meta['plot']
except: desc = ''
import clipboard
clipboard.setPasteProperties(thumb, fanart, desc, name, cmd, meta)
elif mode == _THUMBFAVE:
doRefresh = thumbFave(file, cmd)
elif mode == _THUMBFOLDER:
doRefresh = thumbFolder(path)
elif mode == _PLAYBACKMODE:
doRefresh = changePlaybackMode(file, cmd)
elif mode == _SETTINGS:
try :
addonID = params['addon']
utils.openSettings(addonID)
except:
utils.openSettings(ADDONID)
doRefresh = True
elif mode == _SEPARATOR:
pass
elif mode == _EXTSEARCH:
externalSearch()
elif mode == _SUPERSEARCH:
try: keyword = params['keyword']
except: keyword = ''
try: imdb = params['imdb']
except: imdb = ''
try: image = params['image']
except: image = BLANK
try: fanart = params['fanart']
except: fanart = BLANK
try: meta = utils.convertURLToDict(urllib.quote_plus(params['meta']))
except: meta = {}
superSearch(keyword, image, fanart, imdb, meta)
if len(imdb) > 0:
contentType = 'movies'
elif mode == _EDITTERM:
try: keyword = params['keyword']
except: keyword = ''
editSearchTerm(keyword)
cacheToDisc=True
xbmc.sleep(250)
doEnd = False
elif mode == _SECURE:
import locking
doRefresh = locking.add(path, name)
elif mode == _UNSECURE:
import locking
doRefresh = locking.remove(path, name)
elif mode == _IMPORT:
import importer
importer.doImport()
#doRefresh = True
elif mode == _RECOMMEND_KEY or mode == _RECOMMEND_KEY_A:
try: keyword = params['keyword']
except: keyword = ''
cacheToDisc = True
doEnd = True
contentType = 'movies'
if mode == _RECOMMEND_KEY:
recommendKey(keyword, RECOMMENDAUTO)
else:
recommendKey(keyword, False)
elif mode == _RECOMMEND_IMDB:
try: imdb = params['imdb']
except: imdb = ''
try: keyword = params['keyword']
except: keyword = ''
try:
if ADDON.getSetting('CACHERECOMMEND') != 'true':
callback = params['callback']
cacheToDisc = True
doEnd = True
contentType = 'movies'
recommendIMDB(imdb, keyword)
except Exception, e:
winID = xbmcgui.getCurrentWindowId()
cmd = '%s?mode=%d&keyword=%s&imdb=%s&callback=%s' % (sys.argv[0], _RECOMMEND_IMDB, urllib.quote_plus(keyword), urllib.quote_plus(imdb), 'callback')
xbmc.executebuiltin('Container.Refresh(%s)' % cmd)
cacheToDisc = False
doEnd = False
elif mode == _PLAYTRAILER:
import yt
if not yt.PlayVideo(path):
utils.DialogOK(GETTEXT(30092))
elif mode == _IPLAY:
iPlay()
elif mode == _PLAYLISTFILE:
iPlaylistFile(path)
elif mode == _PLAYLISTITEM:
try: image = params['image']
except: image = BLANK
iPlaylistItem(path, label, image)
elif mode == _PLAYLISTBROWSE:
doRefresh = iPlaylistBrowse()
elif mode == _DELETEPLAYLIST:
doRefresh = iPlaylistDelete(path)
elif mode == _COPY_PLAY_TO_SF:
doRefresh = False
thumb = params['thumb']
copyPlayToSF(path, label, thumb, playMedia=False)
elif mode == _COPY_PLAY_TO_SF_ITEM:
doRefresh = False
thumb = params['thumb']
copyPlayToSF(path, label, thumb, playMedia=True)
elif mode == _URLPLAYLIST:
doRefresh = iPlaylistURLBrowse()
elif mode == _HISTORYSHOW:
doRefresh = iHistoryBrowse()
elif mode == _HISTORYADD:
try: keyword = params['keyword']
except: keyword = ''
try: image = params['image']
except: image = BLANK
try: fanart = params['fanart']
except: fanart = FANART
try: meta = params['meta']
except: meta = ''
#image = image.replace('SF@V', '/')
#fanart = fanart.replace('SF@V', '/')
doRefresh = iHistoryAdd(keyword, image, fanart, meta)
elif mode == _HISTORYREMOVE:
doRefresh = iHistoryRemove(name)
elif mode == _MANUAL:
doRefresh = manualAdd(path)
elif mode == _VIEWTYPE:
doRefresh = setViewType()
elif mode == _IEXPLORE:
cacheToDisc = True
iExplore(path)
elif mode == _PLAY_FILE:
playFile(path)
elif mode == _PLAY_FOLDER:
playFolder(path)
elif mode == _PLAY_SUPER_FOLDER_EXT:
playSuperFolder(path, id=-1)
#now remove this item from history to prevent looping
cmd = 'XBMC.Container.Update(%s?mode=%d,replace)' % (sys.argv[0], _MAIN)
xbmc.executebuiltin(cmd)
doEnd = True
elif mode == _PLAY_SUPER_FOLDER:
playSuperFolder(path, id=int(sys.argv[1]))
elif mode == _MAIN:
main()
else:
#do nothing
nItem = 1
#make sure at least 1 line is showing to allow context menu to be displayed
if nItem < 1:
if mode == _IPLAY:
menu = []
#browse
cmd = '%s?mode=%d' % (sys.argv[0], _PLAYLISTBROWSE)
menu.append((GETTEXT(30148), 'XBMC.Container.Update(%s)' % cmd))
#browse for URL
cmd = '%s?mode=%d' % (sys.argv[0], _URLPLAYLIST)
menu.append((GETTEXT(30153), 'XBMC.Container.Update(%s)' % cmd))
addDir('', _SEPARATOR, thumbnail=BLANK, isFolder=False, menu=menu)
else:
addDir('', _SEPARATOR, thumbnail=BLANK, isFolder=False)
parentItem = xbmc.getCondVisibility('system.getbool(filelists.showparentdiritems)') == 1
if doRefresh:
refresh()
if parentItem and itemIndex > -1:
itemIndex += 1
import selector
selector.select(itemIndex)
if doEnd:
if len(contentType) > 0:
xbmcplugin.setContent(handle, contentType)
if handle > -1:
xbmcgui.Window(10000).setProperty('SF_NMR_ITEMS', str(nItem if not parentItem else nItem+1))
xbmcplugin.endOfDirectory(handle, cacheToDisc=cacheToDisc)
if VIEWTYPE > 0:
xbmc.executebuiltin('Container.SetViewMode(%d)' % VIEWTYPE)
if mode == _PLAYMEDIA:
xbmc.sleep(250)
playCommand(cmd)
elif mode == _ACTIVATEWINDOW:
if len(launchMode) == 0:
script = os.path.join(HOME, 'cmdLauncher.py')
cmd = 'AlarmClock(%s,RunScript(%s,%s),%d,True)' % ('SF_CMDLAUNCHER', script, cmd.replace('"', ''), 0)
xbmc.executebuiltin(cmd)
else:
xbmc.sleep(250)
playCommand(cmd) |
jeroendecroos/pronunciation_master | refs/heads/master | pronunciation_master/tests/unittests/src/test_get_frequent_words.py | 1 | import mock
import os
import StringIO
import tempfile
import unittest
from pronunciation_master.tests.testlib import testcase
from pronunciation_master.src import get_frequent_words
class GetFrequencyListFromFile(testcase.BaseTestCase):
def setUp(self):
_, self.temp_filepath = tempfile.mkstemp()
self.fun = get_frequent_words._frequency_list_from_filestream
def tearDown(self):
os.remove(self.temp_filepath)
def _create_freq_list(self):
self.freq_list = [
('word1', 1),
('word2', 2),
('word3', 3),
('word4', 4),
('word5', 5),
('word6', 6),
('word7', 7),
('word8', 8),
('word9', 9),
('word10', 10),
]
self.words = [word for word, freq in self.freq_list]
with open(self.temp_filepath, 'w') as temp_stream:
for word, freq in self.freq_list:
temp_stream.write('{}\t{}\n'.format(word, freq))
def test_word_freq_list_extended(self):
self._create_freq_list()
with open(self.temp_filepath) as instream:
freq_list_answer = self.fun(instream, True)
ranked_list = [(word, i+1, freq)
for i, (word, freq) in enumerate(self.freq_list)]
self.assertEqual(freq_list_answer, ranked_list)
def test_word_freq_list(self):
self._create_freq_list()
with open(self.temp_filepath) as instream:
freq_list_answer = self.fun(instream)
self.assertEqual(freq_list_answer, self.words)
def test_empty_line_file(self):
with open(self.temp_filepath, 'w') as temp_stream:
temp_stream.write('\n')
with self.assertRaises(RuntimeError):
with open(self.temp_filepath) as instream:
self.fun(instream)
class GetHermitdavePage(testcase.BaseTestCase):
def test_dutch_first_line(self):
page = get_frequent_words._get_hermitdave_page('nl')
line = page.readline()
self.assertEqual(line, 'ik 8106228\n')
class GetFrequencyList(testcase.BaseTestCase):
def setUp(self):
self.fun = get_frequent_words.get_frequency_list
def test_one_word(self):
data = mock.Mock()
get_frequent_words.FrequencySources = data
data.language_code = mock.Mock(side_effect=lambda x: x)
word_stream = StringIO.StringIO('word 5\n')
data.frequency_filestream = mock.Mock(return_value=word_stream)
self.assertEqual(self.fun('dutch'), ['word'])
if __name__ == '__main__':
unittest.main()
|
fujicoin/electrum-fjc | refs/heads/master | electrum/plugins/coldcard/cmdline.py | 2 | from electrum.plugin import hook
from electrum.util import print_msg, raw_input, print_stderr
from electrum.logging import get_logger
from .coldcard import ColdcardPlugin
_logger = get_logger(__name__)
class ColdcardCmdLineHandler:
def get_passphrase(self, msg, confirm):
raise NotImplementedError
def get_pin(self, msg):
raise NotImplementedError
def prompt_auth(self, msg):
raise NotImplementedError
def yes_no_question(self, msg):
print_msg(msg)
return raw_input() in 'yY'
def stop(self):
pass
def show_message(self, msg, on_cancel=None):
print_stderr(msg)
def show_error(self, msg, blocking=False):
print_stderr(msg)
def update_status(self, b):
_logger.info(f'hw device status {b}')
def finished(self):
pass
class Plugin(ColdcardPlugin):
handler = ColdcardCmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
def create_handler(self, window):
return self.handler
# EOF
|
timopulkkinen/BubbleFish | refs/heads/master | build/android/pylib/instrumentation/dispatch.py | 1 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Dispatches the instrumentation tests."""
import logging
import os
from pylib import android_commands
from pylib.base import shard
from pylib.base import test_result
from pylib.uiautomator import test_package as uiautomator_package
import test_package
import test_runner
def Dispatch(options):
"""Dispatches instrumentation tests onto connected device(s).
If possible, this method will attempt to shard the tests to
all connected devices. Otherwise, dispatch and run tests on one device.
Args:
options: Command line options.
Returns:
A TestResults object holding the results of the Java tests.
Raises:
Exception: when there are no attached devices.
"""
is_uiautomator_test = False
if hasattr(options, 'uiautomator_jar'):
test_pkg = uiautomator_package.TestPackage(
options.uiautomator_jar, options.uiautomator_info_jar)
is_uiautomator_test = True
else:
test_pkg = test_package.TestPackage(options.test_apk_path,
options.test_apk_jar_path)
# The default annotation for tests which do not have any sizes annotation.
default_size_annotation = 'SmallTest'
def _GetTestsMissingAnnotation(test_pkg):
test_size_annotations = frozenset(['Smoke', 'SmallTest', 'MediumTest',
'LargeTest', 'EnormousTest', 'FlakyTest',
'DisabledTest', 'Manual', 'PerfTest'])
tests_missing_annotations = []
for test_method in test_pkg.GetTestMethods():
annotations = frozenset(test_pkg.GetTestAnnotations(test_method))
if (annotations.isdisjoint(test_size_annotations) and
not test_pkg.IsPythonDrivenTest(test_method)):
tests_missing_annotations.append(test_method)
return sorted(tests_missing_annotations)
if options.annotation:
available_tests = test_pkg.GetAnnotatedTests(options.annotation)
if options.annotation.count(default_size_annotation) > 0:
tests_missing_annotations = _GetTestsMissingAnnotation(test_pkg)
if tests_missing_annotations:
logging.warning('The following tests do not contain any annotation. '
'Assuming "%s":\n%s',
default_size_annotation,
'\n'.join(tests_missing_annotations))
available_tests += tests_missing_annotations
else:
available_tests = [m for m in test_pkg.GetTestMethods()
if not test_pkg.IsPythonDrivenTest(m)]
coverage = os.environ.get('EMMA_INSTRUMENT') == 'true'
tests = []
if options.test_filter:
# |available_tests| are in adb instrument format: package.path.class#test.
filter_without_hash = options.test_filter.replace('#', '.')
tests = [t for t in available_tests
if filter_without_hash in t.replace('#', '.')]
else:
tests = available_tests
if not tests:
logging.warning('No instrumentation tests to run with current args.')
return test_result.TestResults()
tests *= options.number_of_runs
attached_devices = android_commands.GetAttachedDevices()
if not attached_devices:
raise Exception('There are no devices online.')
if options.device:
attached_devices = [options.device]
logging.info('Will run: %s', str(tests))
if len(attached_devices) > 1 and (coverage or options.wait_for_debugger):
logging.warning('Coverage / debugger can not be sharded, '
'using first available device')
attached_devices = attached_devices[:1]
def TestRunnerFactory(device, shard_index):
return test_runner.TestRunner(
options, device, shard_index, False, test_pkg, [], is_uiautomator_test)
return shard.ShardAndRunTests(TestRunnerFactory, attached_devices, tests,
options.build_type)
|
Drapegnik/bsu | refs/heads/master | statistical-modeling/lab4/lab4.py | 1 | #!/usr/bin/env python
# coding: utf-8
# # lab4
# Метод Монте-Карло
#
# ## tasks
# 1. По методу Монте-Карло вычислить приближенное значения интегралов.
# 2. Сравнить полученное значение либо с точным значением (если его получится вычислить), либо с приближенным, полученным в каком-либо математическом пакете (например, в `Mathematica`). Для этого построить график зависимости точности вычисленного методом Монте-Карло интеграла от числа итераций `n`.
#
# ## integrals
# # 1. $\int\limits_{-\infty}^{\infty} e^{-x^4} \sqrt{1+x^4} dx$
# # 2. $\iint\limits_{1 \leq x^2 + y^2 \leq 4} \dfrac{dx dy}{x^2 + y^4}$
# ## solution
# In[107]:
import sys
import numpy as np
import scipy.integrate as integrate
import matplotlib.pyplot as plt
from scipy.stats import cauchy
from math import e, pow, sqrt
from random import uniform
# ### Определим подинтегральные функции и вычислим приближенные значения интегралов:
# In[2]:
def integrand_1(x):
return e**(-x**4) * (1 + x**4)**0.5
i1 = integrate.quad(integrand_1, -np.inf, np.inf)[0]
i1
# ### Для подсчёта второго интеграла разобём его на несколько:
#
# ## $
# \iint\limits_{1 \leq x^2 + y^2 \leq 4} = \int\limits_{-2}^2\int\limits_{(-\sqrt{4-x^2})}^{\sqrt{4-x^2}} - \int\limits_{-1}^1\int\limits_{(-\sqrt{1-x^2})}^{\sqrt{1-x^2}} = \left(\int\limits_{-2}^{-0}\int\limits_{(-\sqrt{4-x^2})}^{\sqrt{4-x^2}} + \int\limits_{+0}^2\int\limits_{(-\sqrt{4-x^2})}^{\sqrt{4-x^2}}\right) - \left(\int\limits_{-1}^{-0}\int\limits_{(-\sqrt{1-x^2})}^{\sqrt{1-x^2}} + \int\limits_{+0}^1\int\limits_{(-\sqrt{1-x^2})}^{\sqrt{1-x^2}}\right)
# $
# In[35]:
epsilon = 0.006
def integrand_2(x, y):
return 1.0 / (x**2 + y**4)
def get_x(frm, to):
return lambda: [frm, to]
def get_y(to):
def bounds_y(x):
y = (to - x**2)**0.5
return [-y, y]
return bounds_y
ig1 = integrate.nquad(integrand_2, [get_y(4), get_x(-2, -epsilon)])[0]
ig2 = integrate.nquad(integrand_2, [get_y(4), get_x(epsilon, 2)])[0]
ig3 = integrate.nquad(integrand_2, [get_y(1), get_x(-1, -epsilon)])[0]
ig4 = integrate.nquad(integrand_2, [get_y(1), get_x(epsilon, 1)])[0]
i2 = ig1 + ig2 - (ig3 + ig4)
i2
# ### Определим функцию вычисления интеграла по методу Монте-Карло:
# In[4]:
def calculate_integral(integrand, values, distr):
return sum([integrand(el) / distr(el) for el in values]) / len(values)
# In[13]:
def calculate_first(n=1000):
return calculate_integral(integrand_1, cauchy.rvs(size=n), cauchy.pdf)
calculate_first()
# ### Для второго интеграла, ограничим область интегрирования квадратом `4x4`:
# In[180]:
fig = plt.figure(figsize=(5, 5))
ax = fig.add_subplot(111, aspect='equal')
square = plt.Rectangle((-2, -2), 4, 4, color='#C1FFF9', label='bounding box')
circle1 = plt.Circle((0, 0), 2, color='#338AF3', label='1≤x^2+y^2≤4')
circle2 = plt.Circle((0, 0), 1, color='#C1FFF9')
ax.add_patch(square)
ax.add_patch(circle1)
ax.add_patch(circle2)
plt.axis([-2.5, 2.5, -2.5, 2.5])
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.show()
# ### Определим функцию-индикатор попадания в область интегрирования:
# In[24]:
def region(x, y):
return 1 <= x**2 + y**2 <=4
def second_f(args):
return integrand_2(*args) if region(*args) else 0
# In[78]:
def uniform_pdf(x):
return 0.25 if -2 <= x <= 2 else 0
def distr(args):
x, y = args
return uniform_pdf(x) * uniform_pdf(y)
def calculate_second(n=1000):
x = [uniform(-2, 2) for _ in range(n)]
y = [uniform(-2, 2) for _ in range(n)]
return calculate_integral(second_f, list(zip(x, y)), distr)
calculate_second()
# ### Проеведем серии экспериментов и построим графики:
# In[103]:
m = 10
def get_numbers():
return (2**x for x in range(0, 16))
def test(func):
return [sum([func(n) for _ in range(m)]) / m for n in get_numbers()]
i1_real = test(calculate_first)
i2_real = test(calculate_second)
# In[194]:
import matplotlib
def draw(real, theory):
matplotlib.rc('ytick', labelsize=15)
matplotlib.rc('xtick', labelsize=15)
plt.figure(figsize=(20, 8))
x = list(get_numbers())
plt.plot(x, [theory]*len(x), label='theory')
plt.plot(x, real, label='real')
plt.xscale('log')
plt.xticks(x, x)
plt.xlabel('n - number of iterations', fontsize=20)
plt.ylabel('integral value', fontsize=20)
plt.legend()
plt.show()
draw(i1_real, i1)
draw(i2_real, i2)
|
MSusik/invenio | refs/heads/master | invenio/legacy/bibauthorid/string_utils.py | 25 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
'''
bibauthorid_string_utils
Bibauthorid utilities used by many parts of the framework
'''
def string_partition(s, sep, direc='l'):
'''
Partition a string by the first occurrence of the separator.
Mimics the string.partition function, which is not available in Python2.4
@param s: string to be partitioned
@type s: string
@param sep: separator to partition by
@type sep: string
@param dir: direction (left 'l' or right 'r') to search the separator from
@type dir: string
@return: tuple of (left or sep, sep, right of sep)
@rtype: tuple
'''
if direc == 'r':
i = s.rfind(sep)
else:
i = s.find(sep)
if i < 0:
return (s, '', '')
else:
return (s[0:i], s[i:i + 1], s[i + 1:])
def unpackbib(bibrecref):
"""
Creates a tuple (700, 123, 456) from a bibrecref string("100:123,456").
@param bibrecref and return: bibrecref
@type bibrecref: string
@type return: (int, int int)
"""
table, tail = bibrecref.split(":")
bibref, bibrec = tail.split(",")
return (int(table), int(bibref), int(bibrec))
|
ManageIQ/integration_tests | refs/heads/master | cfme/tests/physical_infrastructure/test_redfish_physical_server_details.py | 3 | import pytest
from cfme.physical.provider.redfish import RedfishProvider
from cfme.utils.appliance.implementations.ui import navigate_to
pytestmark = [pytest.mark.provider([RedfishProvider], scope="function")]
@pytest.fixture(scope="function")
def physical_server(appliance, provider, setup_provider_funcscope):
"""Get and return the first physical server."""
yield appliance.collections.redfish_physical_servers.all(provider)[0]
def assert_message(physical_server, expected_message):
"""
Assert that the physical server details view displays the requested message.
Args:
physical_server: check the details view of this physical server.
expected_message: the message we expect to be displayed in the flash alert
"""
view = navigate_to(physical_server, "Details")
view.flash.assert_message(expected_message)
def test_redfish_physical_server_details_stats(physical_server):
"""Navigate to the physical server details page and verify that the stats match
Polarion:
assignee: rhcf3_machine
casecomponent: Infra
initialEstimate: 1/4h
"""
physical_server.validate_stats(ui=True)
def test_redfish_power_buttons(physical_server, provider):
"""
Test that pressing of the power buttons for physical server succeeds.
The test assumes that the buttons can be pressed in any order and that we
will get a flash that tells us of success regardless of the state that the
physical server is in. Here we only test that the request in the gui
succeeds.
Polarion:
assignee: rhcf3_machine
casecomponent: Infra
initialEstimate: 1/4h
"""
power_actions = [
("power_off", lambda: physical_server.power_off()),
("power_on", lambda: physical_server.power_on()),
("power_off_now", lambda: physical_server.power_off_immediately()),
("restart", lambda: physical_server.restart()),
("restart_now", lambda: physical_server.restart_immediately()),
("turn_on_loc_led", lambda: physical_server.turn_on_led()),
("turn_off_loc_led", lambda: physical_server.turn_off_led()),
("blink_loc_led", lambda: physical_server.turn_blink_led()),
]
for action_name, action in power_actions:
action()
assert_message(physical_server, "Requested {} of selected item.".format(
action_name))
|
kuiwei/edx-platform | refs/heads/master | common/djangoapps/student/tests/test_microsite.py | 6 | """
Test for User Creation from Micro-Sites
"""
from django.test import TestCase
from student.models import UserSignupSource
import mock
import json
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
FAKE_MICROSITE = {
"SITE_NAME": "openedx.localhost",
"university": "fakeuniversity",
"course_org_filter": "fakeorg",
"REGISTRATION_EXTRA_FIELDS": {
"address1": "required",
"city": "required",
"state": "required",
"country": "required",
"company": "required",
"title": "required"
},
"extended_profile_fields": [
"address1", "state", "company", "title"
]
}
def fake_site_name(name, default=None): # pylint: disable=W0613
"""
create a fake microsite site name
"""
if name == 'SITE_NAME':
return 'openedx.localhost'
else:
return default
def fake_microsite_get_value(name, default=None): # pylint: disable=W0613
"""
create a fake microsite site name
"""
return FAKE_MICROSITE.get(name, default)
class TestMicrosite(TestCase):
"""Test for Account Creation from a white labeled Micro-Sites"""
def setUp(self):
self.username = "test_user"
self.url = reverse("create_account")
self.params = {
"username": self.username,
"email": "[email protected]",
"password": "testpass",
"name": "Test User",
"honor_code": "true",
"terms_of_service": "true",
}
self.extended_params = dict(self.params.items() + {
"address1": "foo",
"city": "foo",
"state": "foo",
"country": "foo",
"company": "foo",
"title": "foo"
}.items())
@mock.patch("microsite_configuration.microsite.get_value", fake_site_name)
def test_user_signup_source(self):
"""
test to create a user form the microsite and see that it record has been
saved in the UserSignupSource Table
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 200)
self.assertGreater(len(UserSignupSource.objects.filter(site='openedx.localhost')), 0)
def test_user_signup_from_non_micro_site(self):
"""
test to create a user form the non-microsite. The record should not be saved
in the UserSignupSource Table
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(UserSignupSource.objects.filter(site='openedx.localhost')), 0)
@mock.patch("microsite_configuration.microsite.get_value", fake_microsite_get_value)
def test_user_signup_missing_enhanced_profile(self):
"""
test to create a user form the microsite but don't provide any of the microsite specific
profile information
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 400)
@mock.patch("microsite_configuration.microsite.get_value", fake_microsite_get_value)
def test_user_signup_including_enhanced_profile(self):
"""
test to create a user form the microsite but don't provide any of the microsite specific
profile information
"""
response = self.client.post(self.url, self.extended_params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(username=self.username)
meta = json.loads(user.profile.meta)
self.assertEqual(meta['address1'], 'foo')
self.assertEqual(meta['state'], 'foo')
self.assertEqual(meta['company'], 'foo')
self.assertEqual(meta['title'], 'foo')
|
nareshganatra/apiai | refs/heads/master | lib/jinja2/asyncsupport.py | 117 | # -*- coding: utf-8 -*-
"""
jinja2.asyncsupport
~~~~~~~~~~~~~~~~~~~
Has all the code for async support which is implemented as a patch
for supported Python versions.
:copyright: (c) 2017 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import sys
import asyncio
import inspect
from functools import update_wrapper
from jinja2.utils import concat, internalcode, Markup
from jinja2.environment import TemplateModule
from jinja2.runtime import LoopContextBase, _last_iteration
async def concat_async(async_gen):
rv = []
async def collect():
async for event in async_gen:
rv.append(event)
await collect()
return concat(rv)
async def generate_async(self, *args, **kwargs):
vars = dict(*args, **kwargs)
try:
async for event in self.root_render_func(self.new_context(vars)):
yield event
except Exception:
exc_info = sys.exc_info()
else:
return
yield self.environment.handle_exception(exc_info, True)
def wrap_generate_func(original_generate):
def _convert_generator(self, loop, args, kwargs):
async_gen = self.generate_async(*args, **kwargs)
try:
while 1:
yield loop.run_until_complete(async_gen.__anext__())
except StopAsyncIteration:
pass
def generate(self, *args, **kwargs):
if not self.environment.is_async:
return original_generate(self, *args, **kwargs)
return _convert_generator(self, asyncio.get_event_loop(), args, kwargs)
return update_wrapper(generate, original_generate)
async def render_async(self, *args, **kwargs):
if not self.environment.is_async:
raise RuntimeError('The environment was not created with async mode '
'enabled.')
vars = dict(*args, **kwargs)
ctx = self.new_context(vars)
try:
return await concat_async(self.root_render_func(ctx))
except Exception:
exc_info = sys.exc_info()
return self.environment.handle_exception(exc_info, True)
def wrap_render_func(original_render):
def render(self, *args, **kwargs):
if not self.environment.is_async:
return original_render(self, *args, **kwargs)
loop = asyncio.get_event_loop()
return loop.run_until_complete(self.render_async(*args, **kwargs))
return update_wrapper(render, original_render)
def wrap_block_reference_call(original_call):
@internalcode
async def async_call(self):
rv = await concat_async(self._stack[self._depth](self._context))
if self._context.eval_ctx.autoescape:
rv = Markup(rv)
return rv
@internalcode
def __call__(self):
if not self._context.environment.is_async:
return original_call(self)
return async_call(self)
return update_wrapper(__call__, original_call)
def wrap_macro_invoke(original_invoke):
@internalcode
async def async_invoke(self, arguments, autoescape):
rv = await self._func(*arguments)
if autoescape:
rv = Markup(rv)
return rv
@internalcode
def _invoke(self, arguments, autoescape):
if not self._environment.is_async:
return original_invoke(self, arguments, autoescape)
return async_invoke(self, arguments, autoescape)
return update_wrapper(_invoke, original_invoke)
@internalcode
async def get_default_module_async(self):
if self._module is not None:
return self._module
self._module = rv = await self.make_module_async()
return rv
def wrap_default_module(original_default_module):
@internalcode
def _get_default_module(self):
if self.environment.is_async:
raise RuntimeError('Template module attribute is unavailable '
'in async mode')
return original_default_module(self)
return _get_default_module
async def make_module_async(self, vars=None, shared=False, locals=None):
context = self.new_context(vars, shared, locals)
body_stream = []
async for item in self.root_render_func(context):
body_stream.append(item)
return TemplateModule(self, context, body_stream)
def patch_template():
from jinja2 import Template
Template.generate = wrap_generate_func(Template.generate)
Template.generate_async = update_wrapper(
generate_async, Template.generate_async)
Template.render_async = update_wrapper(
render_async, Template.render_async)
Template.render = wrap_render_func(Template.render)
Template._get_default_module = wrap_default_module(
Template._get_default_module)
Template._get_default_module_async = get_default_module_async
Template.make_module_async = update_wrapper(
make_module_async, Template.make_module_async)
def patch_runtime():
from jinja2.runtime import BlockReference, Macro
BlockReference.__call__ = wrap_block_reference_call(
BlockReference.__call__)
Macro._invoke = wrap_macro_invoke(Macro._invoke)
def patch_filters():
from jinja2.filters import FILTERS
from jinja2.asyncfilters import ASYNC_FILTERS
FILTERS.update(ASYNC_FILTERS)
def patch_all():
patch_template()
patch_runtime()
patch_filters()
async def auto_await(value):
if inspect.isawaitable(value):
return await value
return value
async def auto_aiter(iterable):
if hasattr(iterable, '__aiter__'):
async for item in iterable:
yield item
return
for item in iterable:
yield item
class AsyncLoopContext(LoopContextBase):
def __init__(self, async_iterator, after, length, recurse=None,
depth0=0):
LoopContextBase.__init__(self, recurse, depth0)
self._async_iterator = async_iterator
self._after = after
self._length = length
@property
def length(self):
if self._length is None:
raise TypeError('Loop length for some iterators cannot be '
'lazily calculated in async mode')
return self._length
def __aiter__(self):
return AsyncLoopContextIterator(self)
class AsyncLoopContextIterator(object):
__slots__ = ('context',)
def __init__(self, context):
self.context = context
def __aiter__(self):
return self
async def __anext__(self):
ctx = self.context
ctx.index0 += 1
if ctx._after is _last_iteration:
raise StopAsyncIteration()
next_elem = ctx._after
try:
ctx._after = await ctx._async_iterator.__anext__()
except StopAsyncIteration:
ctx._after = _last_iteration
return next_elem, ctx
async def make_async_loop_context(iterable, recurse=None, depth0=0):
# Length is more complicated and less efficient in async mode. The
# reason for this is that we cannot know if length will be used
# upfront but because length is a property we cannot lazily execute it
# later. This means that we need to buffer it up and measure :(
#
# We however only do this for actual iterators, not for async
# iterators as blocking here does not seem like the best idea in the
# world.
try:
length = len(iterable)
except (TypeError, AttributeError):
if not hasattr(iterable, '__aiter__'):
iterable = tuple(iterable)
length = len(iterable)
else:
length = None
async_iterator = auto_aiter(iterable)
try:
after = await async_iterator.__anext__()
except StopAsyncIteration:
after = _last_iteration
return AsyncLoopContext(async_iterator, after, length, recurse, depth0)
|
google/or-tools | refs/heads/stable | ortools/constraint_solver/doc/routing_svg.py | 1 | # Copyright 2010-2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate SVG for a Routing problem."""
# [START import]
import argparse
from ortools.constraint_solver import pywrapcp
from ortools.constraint_solver import routing_enums_pb2
# [END import]
# [START data_model]
class DataModel(object): # pylint: disable=too-many-instance-attributes
"""Stores the data for the problem."""
def __init__(self, args):
# Locations in block units
locations = \
[(4, 4), # depot
(2, 0), (8, 0), # locations to visit
(0, 1), (1, 1),
(5, 2), (7, 2),
(3, 3), (6, 3),
(5, 5), (8, 5),
(1, 6), (2, 6),
(3, 7), (6, 7),
(0, 8), (7, 8),]
# Convert locations in meters using a city block dimension of 114m x 80m.
self._locations = [(l[0] * 114, l[1] * 80) for l in locations]
self._distance_matrix = [
[
0, 548, 776, 696, 582, 274, 502, 194, 308, 194, 536, 502, 388,
354, 468, 776, 662
],
[
548, 0, 684, 308, 194, 502, 730, 354, 696, 742, 1084, 594, 480,
674, 1016, 868, 1210
],
[
776, 684, 0, 992, 878, 502, 274, 810, 468, 742, 400, 1278, 1164,
1130, 788, 1552, 754
],
[
696, 308, 992, 0, 114, 650, 878, 502, 844, 890, 1232, 514, 628,
822, 1164, 560, 1358
],
[
582, 194, 878, 114, 0, 536, 764, 388, 730, 776, 1118, 400, 514,
708, 1050, 674, 1244
],
[
274, 502, 502, 650, 536, 0, 228, 308, 194, 240, 582, 776, 662,
628, 514, 1050, 708
],
[
502, 730, 274, 878, 764, 228, 0, 536, 194, 468, 354, 1004, 890,
856, 514, 1278, 480
],
[
194, 354, 810, 502, 388, 308, 536, 0, 342, 388, 730, 468, 354,
320, 662, 742, 856
],
[
308, 696, 468, 844, 730, 194, 194, 342, 0, 274, 388, 810, 696,
662, 320, 1084, 514
],
[
194, 742, 742, 890, 776, 240, 468, 388, 274, 0, 342, 536, 422,
388, 274, 810, 468
],
[
536, 1084, 400, 1232, 1118, 582, 354, 730, 388, 342, 0, 878,
764, 730, 388, 1152, 354
],
[
502, 594, 1278, 514, 400, 776, 1004, 468, 810, 536, 878, 0, 114,
308, 650, 274, 844
],
[
388, 480, 1164, 628, 514, 662, 890, 354, 696, 422, 764, 114, 0,
194, 536, 388, 730
],
[
354, 674, 1130, 822, 708, 628, 856, 320, 662, 388, 730, 308,
194, 0, 342, 422, 536
],
[
468, 1016, 788, 1164, 1050, 514, 514, 662, 320, 274, 388, 650,
536, 342, 0, 764, 194
],
[
776, 868, 1552, 560, 674, 1050, 1278, 742, 1084, 810, 1152, 274,
388, 422, 764, 0, 798
],
[
662, 1210, 754, 1358, 1244, 708, 480, 856, 514, 468, 354, 844,
730, 536, 194, 798, 0
],
]
self._time_matrix = [
[0, 6, 9, 8, 7, 3, 6, 2, 3, 2, 6, 6, 4, 4, 5, 9, 7],
[6, 0, 8, 3, 2, 6, 8, 4, 8, 8, 13, 7, 5, 8, 12, 10, 14],
[9, 8, 0, 11, 10, 6, 3, 9, 5, 8, 4, 15, 14, 13, 9, 18, 9],
[8, 3, 11, 0, 1, 7, 10, 6, 10, 10, 14, 6, 7, 9, 14, 6, 16],
[7, 2, 10, 1, 0, 6, 9, 4, 8, 9, 13, 4, 6, 8, 12, 8, 14],
[3, 6, 6, 7, 6, 0, 2, 3, 2, 2, 7, 9, 7, 7, 6, 12, 8],
[6, 8, 3, 10, 9, 2, 0, 6, 2, 5, 4, 12, 10, 10, 6, 15, 5],
[2, 4, 9, 6, 4, 3, 6, 0, 4, 4, 8, 5, 4, 3, 7, 8, 10],
[3, 8, 5, 10, 8, 2, 2, 4, 0, 3, 4, 9, 8, 7, 3, 13, 6],
[2, 8, 8, 10, 9, 2, 5, 4, 3, 0, 4, 6, 5, 4, 3, 9, 5],
[6, 13, 4, 14, 13, 7, 4, 8, 4, 4, 0, 10, 9, 8, 4, 13, 4],
[6, 7, 15, 6, 4, 9, 12, 5, 9, 6, 10, 0, 1, 3, 7, 3, 10],
[4, 5, 14, 7, 6, 7, 10, 4, 8, 5, 9, 1, 0, 2, 6, 4, 8],
[4, 8, 13, 9, 8, 7, 10, 3, 7, 4, 8, 3, 2, 0, 4, 5, 6],
[5, 12, 9, 14, 12, 6, 6, 7, 3, 3, 4, 7, 6, 4, 0, 9, 2],
[9, 10, 18, 6, 8, 12, 15, 8, 13, 9, 13, 3, 4, 5, 9, 0, 9],
[7, 14, 9, 16, 14, 8, 5, 10, 6, 5, 4, 10, 8, 6, 2, 9, 0],
]
self._time_windows = [
(0, 5), # depot
(7, 12), # 1
(10, 15), # 2
(5, 14), # 3
(5, 13), # 4
(0, 5), # 5
(5, 10), # 6
(0, 10), # 7
(5, 10), # 8
(0, 5), # 9
(10, 16), # 10
(10, 15), # 11
(0, 5), # 12
(5, 10), # 13
(7, 12), # 14
(10, 15), # 15
(5, 15), # 16
]
if args['drop_nodes']:
self._demands = [0, 1, 1, 3, 6, 3, 6, 8, 8, 1, 2, 1, 2, 6, 6, 8, 8]
else:
self._demands = [0, 1, 1, 2, 4, 2, 4, 8, 8, 1, 2, 1, 2, 4, 4, 8, 8]
self._pickups_deliveries = [
[1, 6],
[2, 10],
[4, 3],
[5, 9],
[7, 8],
[15, 11],
[13, 12],
[16, 14],
]
if args['tsp']:
self._num_vehicles = 1
else:
self._num_vehicles = 4
self._vehicle_capacities = [15, 15, 15, 15]
if args['resources']:
self._vehicle_load_time = 5
self._vehicle_unload_time = 5
self._depot = 0
self._depot_capacity = 2
self._starts = [1, 2, 15, 16]
self._ends = [0, 0, 0, 0]
@property
def locations(self):
"""Gets the locations."""
return self._locations
@property
def distance_matrix(self):
"""Gets the distance matrix."""
return self._distance_matrix
@property
def time_matrix(self):
"""Gets the time matrix."""
return self._time_matrix
@property
def time_windows(self):
"""Gets the time windows."""
return self._time_windows
@property
def demands(self):
"""Gets the locations demands."""
return self._demands
@property
def pickups_deliveries(self):
"""Gets the pickups deliveries."""
return self._pickups_deliveries
@property
def num_vehicles(self):
"""Gets the number of vehicles."""
return self._num_vehicles
@property
def vehicle_capacities(self):
"""Gets the capacity of each vehicles."""
return self._vehicle_capacities
@property
def vehicle_load_time(self):
"""Gets the load time of each vehicles."""
return self._vehicle_load_time
@property
def vehicle_unload_time(self):
"""Gets the unload time of each vehicles."""
return self._vehicle_unload_time
@property
def depot_capacity(self):
"""Gets the depot capacity."""
return self._depot_capacity
@property
def depot(self):
"""Gets the depot node index."""
return self._depot
@property
def starts(self):
"""Gets the start nodes indices."""
return self._starts
@property
def ends(self):
"""Gets the end nodes indices."""
return self._ends
# [END data_model]
###########
# Printer #
###########
class GoogleColorPalette(object):
"""Google color codes palette."""
def __init__(self):
"""Initialize Google ColorPalette."""
self._colors = [('blue', r'#4285F4'), ('red', r'#EA4335'),
('yellow', r'#FBBC05'), ('green', r'#34A853'),
('black', r'#101010'), ('white', r'#FFFFFF')]
def __getitem__(self, key):
"""Gets color name from idx."""
return self._colors[key][0]
def __len__(self):
"""Gets the number of colors."""
return len(self._colors)
@property
def colors(self):
"""Gets the colors list."""
return self._colors
def name(self, idx):
"""Return color name from idx."""
return self._colors[idx][0]
def value(self, idx):
"""Return color value from idx."""
return self._colors[idx][1]
def value_from_name(self, name):
"""Return color value from name."""
return dict(self._colors)[name]
class SVG(object):
"""SVG draw primitives."""
@staticmethod
def header(size, margin):
"""Writes header."""
print(r'<svg xmlns:xlink="http://www.w3.org/1999/xlink" '
'xmlns="http://www.w3.org/2000/svg" version="1.1"\n'
'width="{width}" height="{height}" '
'viewBox="-{margin} -{margin} {width} {height}">'.format(
width=size[0] + 2 * margin,
height=size[1] + 2 * margin,
margin=margin))
@staticmethod
def definitions(colors):
"""Writes definitions."""
print(r'<!-- Need this definition to make an arrow marker,'
' from https://www.w3.org/TR/svg-markers/ -->')
print(r'<defs>')
for color in colors:
print(
r' <marker id="arrow_{colorname}" viewBox="0 0 16 16" '
'refX="8" refY="8" markerUnits="strokeWidth" markerWidth="5" markerHeight="5" '
'orient="auto">'.format(colorname=color[0]))
print(
r' <path d="M 0 0 L 16 8 L 0 16 z" stroke="none" fill="{color}"/>'
.format(color=color[1]))
print(r' </marker>')
print(r'</defs>')
@staticmethod
def footer():
"""Writes svg footer."""
print(r'</svg>')
@staticmethod
def draw_line(position_1, position_2, size, fg_color):
"""Draws a line."""
line_style = (
r'style="stroke-width:{sz};stroke:{fg};fill:none"').format(
sz=size, fg=fg_color)
print(r'<line x1="{x1}" y1="{y1}" x2="{x2}" y2="{y2}" {style}/>'.format(
x1=position_1[0],
y1=position_1[1],
x2=position_2[0],
y2=position_2[1],
style=line_style))
@staticmethod
def draw_polyline(position_1, position_2, size, fg_color, colorname):
"""Draws a line with arrow maker in the middle."""
polyline_style = (r'style="stroke-width:{sz};stroke:{fg};fill:none;'
'marker-mid:url(#arrow_{colorname})"').format(
sz=size, fg=fg_color, colorname=colorname)
print(r'<polyline points="{x1},{y1} {x2},{y2} {x3},{y3}" {style}/>'.
format(x1=position_1[0],
y1=position_1[1],
x2=(position_1[0] + position_2[0]) / 2,
y2=(position_1[1] + position_2[1]) / 2,
x3=position_2[0],
y3=position_2[1],
style=polyline_style))
@staticmethod
def draw_circle(position, radius, size, fg_color, bg_color='white'):
"""Print a circle."""
circle_style = (
r'style="stroke-width:{sz};stroke:{fg};fill:{bg}"').format(
sz=size, fg=fg_color, bg=bg_color)
print(r'<circle cx="{cx}" cy="{cy}" r="{r}" {style}/>'.format(
cx=position[0], cy=position[1], r=radius, style=circle_style))
@staticmethod
def draw_text(text, position, size, fg_color='none', bg_color='black'):
"""Print a middle centred text."""
text_style = (r'style="text-anchor:middle;font-weight:bold;'
'font-size:{sz};stroke:{fg};fill:{bg}"').format(
sz=size, fg=fg_color, bg=bg_color)
print(r'<text x="{x}" y="{y}" dy="{dy}" {style}>{txt}</text>'.format(
x=position[0],
y=position[1],
dy=size / 3,
style=text_style,
txt=text))
class SVGPrinter(object): # pylint: disable=too-many-instance-attributes
"""Generate Problem as svg file to stdout."""
# pylint: disable=too-many-arguments
def __init__(self, args, data, manager=None, routing=None, assignment=None):
"""Initializes the printer."""
self._args = args
self._data = data
self._manager = manager
self._routing = routing
self._assignment = assignment
# Design variables
self._color_palette = GoogleColorPalette()
self._svg = SVG()
# City block size 114mx80m
self._radius = min(114, 80) / 3
self._stroke_width = self._radius / 4
@property
def data(self):
"""Gets the Data Model."""
return self._data
@property
def manager(self):
"""Gets the RoutingIndexManager."""
return self._manager
@property
def routing(self):
"""Gets the Routing solver."""
return self._routing
@property
def assignment(self):
"""Gets the assignment."""
return self._assignment
@property
def color_palette(self):
"""Gets the color palette."""
return self._color_palette
@property
def svg(self):
"""Gets the svg."""
return self._svg
def draw_grid(self):
"""Draws the city grid."""
print(r'<!-- Print city streets -->')
color = '#969696'
# Horizontal streets
for i in range(9):
p_1 = [0, i * 80]
p_2 = [8 * 114, p_1[1]]
self._svg.draw_line(p_1, p_2, 2, color)
# Vertical streets
for i in range(9):
p_1 = [i * 114, 0]
p_2 = [p_1[0], 8 * 80]
self._svg.draw_line(p_1, p_2, 2, color)
def draw_depot(self):
"""Draws the depot."""
print(r'<!-- Print depot -->')
color = self._color_palette.value_from_name('black')
loc = self._data.locations[self._data.depot]
self._svg.draw_circle(loc, self._radius, self._stroke_width, color,
'white')
self._svg.draw_text(self._data.depot, loc, self._radius, 'none', color)
def draw_depots(self):
"""Draws the depot."""
print(r'<!-- Print depots -->')
# print starts
for vehicle_idx, start in enumerate(self._data.starts):
del vehicle_idx
color = self._color_palette.value_from_name('black')
# color = self._color_palette.value(vehicle_idx)
loc = self._data.locations[start]
self._svg.draw_circle(loc, self._radius, self._stroke_width, color,
'white')
self._svg.draw_text(start, loc, self._radius, 'none', color)
# print end
color = self._color_palette.value_from_name('black')
loc = self._data.locations[0]
self._svg.draw_circle(loc, self._radius, self._stroke_width, color,
'white')
self._svg.draw_text(0, loc, self._radius, 'none', color)
def draw_locations(self):
"""Draws all the locations but the depot."""
print(r'<!-- Print locations -->')
color = self._color_palette.value_from_name('blue')
if not self._args['starts_ends']:
for idx, loc in enumerate(self._data.locations):
if idx == self._data.depot:
continue
self._svg.draw_circle(loc, self._radius, self._stroke_width,
color, 'white')
self._svg.draw_text(idx, loc, self._radius, 'none', color)
else:
for idx, loc in enumerate(self._data.locations):
if idx in self._data.starts + self._data.ends:
continue
self._svg.draw_circle(loc, self._radius, self._stroke_width,
color, 'white')
self._svg.draw_text(idx, loc, self._radius, 'none', color)
def draw_demands(self):
"""Draws all the demands."""
print(r'<!-- Print demands -->')
for idx, loc in enumerate(self._data.locations):
if idx == self._data.depot:
continue
demand = self._data.demands[idx]
position = [
x + y
for x, y in zip(loc, [self._radius * 1.2, self._radius * 1.1])
]
color = self._color_palette.value_from_name('red')
# color = self._color_palette.value(int(math.log(demand, 2)))
self._svg.draw_text(demand, position, self._radius, 'none', color)
def draw_pickups_deliveries(self):
"""Draws all pickups deliveries."""
print(r'<!-- Print pickups deliveries -->')
colorname = 'red'
color = self._color_palette.value_from_name(colorname)
for pickup_delivery in self._data.pickups_deliveries:
self._svg.draw_polyline(self._data.locations[pickup_delivery[0]],
self._data.locations[pickup_delivery[1]],
self._stroke_width, color, colorname)
def draw_time_windows(self):
"""Draws all the time windows."""
print(r'<!-- Print time windows -->')
for idx, loc in enumerate(self._data.locations):
if idx == self._data.depot:
continue
time_window = self._data.time_windows[idx]
position = [
x + y
for x, y in zip(loc, [self._radius * 0, -self._radius * 1.6])
]
color = self._color_palette.value_from_name('red')
self._svg.draw_text(
'[{t1},{t2}]'.format(t1=time_window[0], t2=time_window[1]),
position, self._radius * 0.75, 'white', color)
##############
## ROUTES ##
##############
def draw_drop_nodes(self):
"""Draws the dropped nodes."""
print(r'<!-- Print drop nodes -->')
if self._assignment is None:
print('<!-- No solution found. -->')
# Display dropped nodes.
dropped_nodes = []
for node in range(self._routing.Size()):
if self._routing.IsStart(node) or self._routing.IsEnd(node):
continue
if self._assignment.Value(self._routing.NextVar(node)) == node:
dropped_nodes.append(self._manager.IndexToNode(node))
color = self._color_palette.value_from_name('black')
for node_idx in dropped_nodes:
loc = self._data.locations[node_idx]
self._svg.draw_circle(loc, self._radius, self._stroke_width, color,
'white')
self._svg.draw_text(node_idx, loc, self._radius, 'none', color)
def routes(self):
"""Creates the route list from the assignment."""
if self._assignment is None:
print('<!-- No solution found. -->')
return []
routes = []
for vehicle_id in range(self._data.num_vehicles):
index = self._routing.Start(vehicle_id)
route = []
while not self._routing.IsEnd(index):
node_index = self._manager.IndexToNode(index)
route.append(node_index)
index = self._assignment.Value(self._routing.NextVar(index))
node_index = self._manager.IndexToNode(index)
route.append(node_index)
routes.append(route)
return routes
def draw_route(self, route, color, colorname):
"""Draws a Route."""
# First print route
previous_loc_idx = None
for loc_idx in route:
if previous_loc_idx and previous_loc_idx != loc_idx:
self._svg.draw_polyline(self._data.locations[previous_loc_idx],
self._data.locations[loc_idx],
self._stroke_width, color, colorname)
previous_loc_idx = loc_idx
# Then print location along the route
for loc_idx in route:
if loc_idx != self._data.depot:
loc = self._data.locations[loc_idx]
self._svg.draw_circle(loc, self._radius, self._stroke_width,
color, 'white')
self._svg.draw_text(loc_idx, loc, self._radius, 'none', color)
def draw_routes(self):
"""Draws the routes."""
print(r'<!-- Print routes -->')
for route_idx, route in enumerate(self.routes()):
print(r'<!-- Print route {idx} -->'.format(idx=route_idx))
color = self._color_palette.value(route_idx)
colorname = self._color_palette.name(route_idx)
self.draw_route(route, color, colorname)
def tw_routes(self):
"""Creates the route time window list from the assignment."""
if self._assignment is None:
print('<!-- No solution found. -->')
return []
time_dimension = self._routing.GetDimensionOrDie('Time')
loc_routes = []
tw_routes = []
for vehicle_id in range(self._data.num_vehicles):
index = self._routing.Start(vehicle_id)
# index = self._assignment.Value(self._routing.NextVar(index))
loc_route = []
tw_route = []
while True:
node_index = self._manager.IndexToNode(index)
loc_route.append(node_index)
time_var = time_dimension.CumulVar(index)
t_min = self._assignment.Min(time_var)
t_max = self._assignment.Max(time_var)
tw_route.append((t_min, t_max))
if self._routing.IsEnd(index):
break
index = self._assignment.Value(self._routing.NextVar(index))
loc_routes.append(loc_route)
tw_routes.append(tw_route)
return zip(loc_routes, tw_routes)
def draw_tw_route(self, route_idx, locations, tw_route, color):
"""Draws the time windows for a Route."""
is_start = -1
for loc_idx, time_window in zip(locations, tw_route):
loc = self._data.locations[loc_idx]
if loc_idx == 0: # special case for depot
position = [
x + y for x, y in zip(loc, [
self._radius * is_start, self._radius *
(1.8 + route_idx)
])
]
is_start = 1
else:
position = [
x + y
for x, y in zip(loc, [self._radius * 0, self._radius * 1.8])
]
self._svg.draw_text('[{t_min}]'.format(t_min=time_window[0]),
position, self._radius * 0.75, 'white', color)
def draw_tw_routes(self):
"""Draws the time window routes."""
print(r'<!-- Print time window routes -->')
for route_idx, loc_tw in enumerate(self.tw_routes()):
print(r'<!-- Print time window route {} -->'.format(route_idx))
color = self._color_palette.value(route_idx)
self.draw_tw_route(route_idx, loc_tw[0], loc_tw[1], color)
def print_to_console(self):
"""Prints a full svg document on stdout."""
margin = self._radius * 2 + 2
size = [8 * 114, 8 * 80]
self._svg.header(size, margin)
self._svg.definitions(self._color_palette.colors)
self.draw_grid()
if not self._args['solution']:
if self._args['pickup_delivery']:
self.draw_pickups_deliveries()
self.draw_locations()
else:
self.draw_routes()
self.draw_drop_nodes()
if self._args['starts_ends']:
self.draw_depots()
else:
self.draw_depot()
if self._args['capacity']:
self.draw_demands()
if self._args['drop_nodes']:
self.draw_demands()
if self._args['time_windows'] or self._args['resources']:
self.draw_time_windows()
if ((self._args['time_windows'] or self._args['resources']) and
self._args['solution']):
self.draw_tw_routes()
self._svg.footer()
########
# Main #
########
def main(): # pylint: disable=too-many-locals,too-many-branches
"""Entry point of the program."""
parser = argparse.ArgumentParser(description='Output VRP as svg image.')
parser.add_argument('-tsp',
'--tsp',
action='store_true',
help='use 1 vehicle')
parser.add_argument('-vrp',
'--vrp',
action='store_true',
help='use 4 vehicle')
parser.add_argument('-gs',
'--global-span',
action='store_true',
help='use global span constraints')
parser.add_argument('-c',
'--capacity',
action='store_true',
help='use capacity constraints')
parser.add_argument('-r',
'--resources',
action='store_true',
help='use resources constraints')
parser.add_argument('-dn',
'--drop-nodes',
action='store_true',
help='allow drop nodes (disjuntion constraints)')
parser.add_argument('-tw',
'--time-windows',
action='store_true',
help='use time-window constraints')
parser.add_argument('-se',
'--starts-ends',
action='store_true',
help='use multiple starts & ends')
parser.add_argument('-pd',
'--pickup-delivery',
action='store_true',
help='use pickup & delivery constraints')
parser.add_argument('-fifo',
'--fifo',
action='store_true',
help='use pickup & delivery FIFO Policy')
parser.add_argument('-lifo',
'--lifo',
action='store_true',
help='use pickup & delivery LIFO Policy')
parser.add_argument('-s',
'--solution',
action='store_true',
help='print solution')
args = vars(parser.parse_args())
# Instantiate the data problem.
# [START data]
data = DataModel(args)
# [END data]
if not args['solution']:
# Print svg on cout
printer = SVGPrinter(args, data)
printer.print_to_console()
return 0
# Create the routing index manager.
# [START index_manager]
if args['starts_ends']:
manager = pywrapcp.RoutingIndexManager(len(data.locations),
data.num_vehicles, data.starts,
data.ends)
else:
manager = pywrapcp.RoutingIndexManager(len(data.locations),
data.num_vehicles, data.depot)
# [END index_manager]
# Create Routing Model.
# [START routing_model]
routing = pywrapcp.RoutingModel(manager)
# [END routing_model]
# Register distance callback
def distance_callback(from_index, to_index):
"""Returns the manhattan distance between the two nodes."""
# Convert from routing variable Index to distance matrix NodeIndex.
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
return data.distance_matrix[from_node][to_node]
distance_callback_index = routing.RegisterTransitCallback(distance_callback)
# Register time callback
def time_callback(from_index, to_index):
"""Returns the manhattan distance travel time between the two nodes."""
# Convert from routing variable Index to distance matrix NodeIndex.
from_node = manager.IndexToNode(from_index)
to_node = manager.IndexToNode(to_index)
return data.time_matrix[from_node][to_node]
time_callback_index = routing.RegisterTransitCallback(time_callback)
# Register demands callback
def demand_callback(from_index):
"""Returns the demand of the node."""
# Convert from routing variable Index to demands NodeIndex.
from_node = manager.IndexToNode(from_index)
return data.demands[from_node]
demand_callback_index = routing.RegisterUnaryTransitCallback(
demand_callback)
if args['time_windows'] or args['resources']:
routing.SetArcCostEvaluatorOfAllVehicles(time_callback_index)
else:
routing.SetArcCostEvaluatorOfAllVehicles(distance_callback_index)
if args['global_span'] or args['pickup_delivery']:
dimension_name = 'Distance'
routing.AddDimension(distance_callback_index, 0, 3000, True,
dimension_name)
distance_dimension = routing.GetDimensionOrDie(dimension_name)
distance_dimension.SetGlobalSpanCostCoefficient(100)
if args['capacity'] or args['drop_nodes']:
routing.AddDimensionWithVehicleCapacity(demand_callback_index, 0,
data.vehicle_capacities, True,
'Capacity')
if args['drop_nodes']:
# Allow to drop nodes.
penalty = 1000
for node in range(1, len(data.locations)):
routing.AddDisjunction([manager.NodeToIndex(node)], penalty)
if args['pickup_delivery']:
dimension_name = 'Distance'
routing.AddDimension(distance_callback_index, 0, 3000, True,
dimension_name)
distance_dimension = routing.GetDimensionOrDie(dimension_name)
distance_dimension.SetGlobalSpanCostCoefficient(100)
for request in data.pickups_deliveries:
pickup_index = manager.NodeToIndex(request[0])
delivery_index = manager.NodeToIndex(request[1])
routing.AddPickupAndDelivery(pickup_index, delivery_index)
routing.solver().Add(
routing.VehicleVar(pickup_index) == routing.VehicleVar(
delivery_index))
routing.solver().Add(
distance_dimension.CumulVar(pickup_index) <=
distance_dimension.CumulVar(delivery_index))
if args['fifo']:
routing.SetPickupAndDeliveryPolicyOfAllVehicles(
pywrapcp.RoutingModel.FIFO)
if args['lifo']:
routing.SetPickupAndDeliveryPolicyOfAllVehicles(
pywrapcp.RoutingModel.LIFO)
if args['starts_ends']:
dimension_name = 'Distance'
routing.AddDimension(distance_callback_index, 0, 2000, True,
dimension_name)
distance_dimension = routing.GetDimensionOrDie(dimension_name)
distance_dimension.SetGlobalSpanCostCoefficient(100)
time = 'Time'
if args['time_windows'] or args['resources']:
routing.AddDimension(time_callback_index, 30, 30, False, time)
time_dimension = routing.GetDimensionOrDie(time)
# Add time window constraints for each location except depot and 'copy' the
# slack var in the solution object (aka Assignment) to print it.
for location_idx, time_window in enumerate(data.time_windows):
if location_idx == 0:
continue
index = manager.NodeToIndex(location_idx)
time_dimension.CumulVar(index).SetRange(time_window[0],
time_window[1])
routing.AddToAssignment(time_dimension.SlackVar(index))
# Add time window constraints for each vehicle start node and 'copy' the
# slack var in the solution object (aka Assignment) to print it.
for vehicle_id in range(data.num_vehicles):
index = routing.Start(vehicle_id)
time_window = data.time_windows[0]
time_dimension.CumulVar(index).SetRange(time_window[0],
time_window[1])
routing.AddToAssignment(time_dimension.SlackVar(index))
# Instantiate route start and end times to produce feasible times.
for vehicle_id in range(data.num_vehicles):
routing.AddVariableMinimizedByFinalizer(
time_dimension.CumulVar(routing.End(vehicle_id)))
routing.AddVariableMinimizedByFinalizer(
time_dimension.CumulVar(routing.Start(vehicle_id)))
if args['resources']:
# Add resource constraints at the depot.
time_dimension = routing.GetDimensionOrDie(time)
solver = routing.solver()
intervals = []
for i in range(data.num_vehicles):
# Add loading time at start of routes
intervals.append(
solver.FixedDurationIntervalVar(
time_dimension.CumulVar(routing.Start(i)),
data.vehicle_load_time, 'depot_interval'))
# Add unloading time at end of routes.
intervals.append(
solver.FixedDurationIntervalVar(
time_dimension.CumulVar(routing.End(i)),
data.vehicle_unload_time, 'depot_interval '))
depot_usage = [1 for i in range(data.num_vehicles * 2)]
solver.AddConstraint(
solver.Cumulative(intervals, depot_usage, data.depot_capacity,
'depot'))
# Setting first solution heuristic (cheapest addition).
search_parameters = pywrapcp.DefaultRoutingSearchParameters()
# pylint: disable=no-member
if not args['pickup_delivery']:
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC)
else:
search_parameters.first_solution_strategy = (
routing_enums_pb2.FirstSolutionStrategy.PARALLEL_CHEAPEST_INSERTION)
search_parameters.local_search_metaheuristic = (
routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH)
search_parameters.time_limit.FromSeconds(2)
# Solve the problem.
assignment = routing.SolveWithParameters(search_parameters)
# Print the solution.
printer = SVGPrinter(args, data, manager, routing, assignment)
printer.print_to_console()
return 0
if __name__ == '__main__':
main()
|
liangazhou/django-rdp | refs/heads/master | packages/Django-1.8.6/django/contrib/gis/db/backends/mysql/operations.py | 101 | from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.db.backends.base.operations import \
BaseSpatialOperations
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import aggregates
from django.db.backends.mysql.operations import DatabaseOperations
class MySQLOperations(BaseSpatialOperations, DatabaseOperations):
mysql = True
name = 'mysql'
select = 'AsText(%s)'
from_wkb = 'GeomFromWKB'
from_text = 'GeomFromText'
Adapter = WKTAdapter
Adaptor = Adapter # Backwards-compatibility alias.
gis_operators = {
'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API
'bboverlaps': SpatialOperator(func='MBROverlaps'), # .. ..
'contained': SpatialOperator(func='MBRWithin'), # .. ..
'contains': SpatialOperator(func='MBRContains'),
'disjoint': SpatialOperator(func='MBRDisjoint'),
'equals': SpatialOperator(func='MBREqual'),
'exact': SpatialOperator(func='MBREqual'),
'intersects': SpatialOperator(func='MBRIntersects'),
'overlaps': SpatialOperator(func='MBROverlaps'),
'same_as': SpatialOperator(func='MBREqual'),
'touches': SpatialOperator(func='MBRTouches'),
'within': SpatialOperator(func='MBRWithin'),
}
disallowed_aggregates = (aggregates.Collect, aggregates.Extent, aggregates.Extent3D, aggregates.MakeLine, aggregates.Union)
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, f, value, compiler):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'as_sql'):
placeholder, _ = compiler.compile(value)
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
|
clickbaron/userinfuser | refs/heads/master | serverside/fantasm/exceptions.py | 28 | """ Fantasm: A taskqueue-based Finite State Machine for App Engine Python
Docs and examples: http://code.google.com/p/fantasm/
Copyright 2010 VendAsta Technologies Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from fantasm import constants
class FSMRuntimeError(Exception):
""" The parent class of all Fantasm runtime errors. """
pass
class UnknownMachineError(FSMRuntimeError):
""" A machine could not be found. """
def __init__(self, machineName):
""" Initialize exception """
message = 'Cannot find machine "%s".' % machineName
super(UnknownMachineError, self).__init__(message)
class UnknownStateError(FSMRuntimeError):
""" A state could not be found """
def __init__(self, machineName, stateName):
""" Initialize exception """
message = 'State "%s" is unknown. (Machine %s)' % (stateName, machineName)
super(UnknownStateError, self).__init__(message)
class UnknownEventError(FSMRuntimeError):
""" An event and the transition bound to it could not be found. """
def __init__(self, event, machineName, stateName):
""" Initialize exception """
message = 'Cannot find transition for event "%s". (Machine %s, State %s)' % (event, machineName, stateName)
super(UnknownEventError, self).__init__(message)
class InvalidEventNameRuntimeError(FSMRuntimeError):
""" Event returned from dispatch is invalid (and would cause problems with task name restrictions). """
def __init__(self, event, machineName, stateName, instanceName):
""" Initialize exception """
message = 'Event "%r" returned by state is invalid. It must be a string and match pattern "%s". ' \
'(Machine %s, State %s, Instance %s)' % \
(event, constants.NAME_PATTERN, machineName, stateName, instanceName)
super(InvalidEventNameRuntimeError, self).__init__(message)
class InvalidFinalEventRuntimeError(FSMRuntimeError):
""" Event returned when a final state action returns an event. """
def __init__(self, event, machineName, stateName, instanceName):
""" Initialize exception """
message = 'Event "%r" returned by final state is invalid. ' \
'(Machine %s, State %s, Instance %s)' % \
(event, machineName, stateName, instanceName)
super(InvalidFinalEventRuntimeError, self).__init__(message)
class FanInWriteLockFailureRuntimeError(FSMRuntimeError):
""" Exception when fan-in writers are unable to acquire a lock. """
def __init__(self, event, machineName, stateName, instanceName):
""" Initialize exception """
message = 'Event "%r" unable to to be fanned-in due to write lock failure. ' \
'(Machine %s, State %s, Instance %s)' % \
(event, machineName, stateName, instanceName)
super(FanInWriteLockFailureRuntimeError, self).__init__(message)
class FanInReadLockFailureRuntimeError(FSMRuntimeError):
""" Exception when fan-in readers are unable to acquire a lock. """
def __init__(self, event, machineName, stateName, instanceName):
""" Initialize exception """
message = 'Event "%r" unable to to be fanned-in due to read lock failure. ' \
'(Machine %s, State %s, Instance %s)' % \
(event, machineName, stateName, instanceName)
super(FanInReadLockFailureRuntimeError, self).__init__(message)
class RequiredServicesUnavailableRuntimeError(FSMRuntimeError):
""" Some of the required API services are not available. """
def __init__(self, unavailableServices):
""" Initialize exception """
message = 'The following services will not be available in the %d seconds: %s. This task will be retried.' % \
(constants.REQUEST_LENGTH, unavailableServices)
super(RequiredServicesUnavailableRuntimeError, self).__init__(message)
class ConfigurationError(Exception):
""" Parent class for all Fantasm configuration errors. """
pass
class YamlFileNotFoundError(ConfigurationError):
""" The Yaml file could not be found. """
def __init__(self, filename):
""" Initialize exception """
message = 'Yaml configuration file "%s" not found.' % filename
super(YamlFileNotFoundError, self).__init__(message)
class YamlFileCircularImportError(ConfigurationError):
""" The Yaml is involved in a circular import. """
def __init__(self, filename):
""" Initialize exception """
message = 'Yaml configuration file "%s" involved in a circular import.' % filename
super(YamlFileCircularImportError, self).__init__(message)
class StateMachinesAttributeRequiredError(ConfigurationError):
""" The YAML file requires a 'state_machines' attribute. """
def __init__(self):
""" Initialize exception """
message = '"%s" is required attribute of yaml file.' % constants.STATE_MACHINES_ATTRIBUTE
super(StateMachinesAttributeRequiredError, self).__init__(message)
class MachineNameRequiredError(ConfigurationError):
""" Each machine requires a name. """
def __init__(self):
""" Initialize exception """
message = '"%s" is required attribute of machine.' % constants.MACHINE_NAME_ATTRIBUTE
super(MachineNameRequiredError, self).__init__(message)
class InvalidQueueNameError(ConfigurationError):
""" The queue name was not valid. """
def __init__(self, queueName, machineName):
""" Initialize exception """
message = 'Queue name "%s" must exist in queue.yaml. (Machine %s)' % (queueName, machineName)
super(InvalidQueueNameError, self).__init__(message)
class InvalidMachineNameError(ConfigurationError):
""" The machine name was not valid. """
def __init__(self, machineName):
""" Initialize exception """
message = 'Machine name must match pattern "%s". (Machine %s)' % (constants.NAME_PATTERN, machineName)
super(InvalidMachineNameError, self).__init__(message)
class MachineNameNotUniqueError(ConfigurationError):
""" Each machine in a YAML file must have a unique name. """
def __init__(self, machineName):
""" Initialize exception """
message = 'Machine names must be unique. (Machine %s)' % machineName
super(MachineNameNotUniqueError, self).__init__(message)
class MachineHasMultipleInitialStatesError(ConfigurationError):
""" Each machine must have exactly one initial state. """
def __init__(self, machineName):
""" Initialize exception """
message = 'Machine has multiple initial states, but only one is allowed. (Machine %s)' % machineName
super(MachineHasMultipleInitialStatesError, self).__init__(message)
class MachineHasNoInitialStateError(ConfigurationError):
""" Each machine must have exactly one initial state. """
def __init__(self, machineName):
""" Initialize exception """
message = 'Machine has no initial state, exactly one is required. (Machine %s)' % machineName
super(MachineHasNoInitialStateError, self).__init__(message)
class MachineHasNoFinalStateError(ConfigurationError):
""" Each machine must have at least one final state. """
def __init__(self, machineName):
""" Initialize exception """
message = 'Machine has no final states, but at least one is required. (Machine %s)' % machineName
super(MachineHasNoFinalStateError, self).__init__(message)
class StateNameRequiredError(ConfigurationError):
""" Each state requires a name. """
def __init__(self, machineName):
""" Initialize exception """
message = '"%s" is required attribute of state. (Machine %s)' % (constants.STATE_NAME_ATTRIBUTE, machineName)
super(StateNameRequiredError, self).__init__(message)
class InvalidStateNameError(ConfigurationError):
""" The state name was not valid. """
def __init__(self, machineName, stateName):
""" Initialize exception """
message = 'State name must match pattern "%s". (Machine %s, State %s)' % \
(constants.NAME_PATTERN, machineName, stateName)
super(InvalidStateNameError, self).__init__(message)
class StateNameNotUniqueError(ConfigurationError):
""" Each state within a machine must have a unique name. """
def __init__(self, machineName, stateName):
""" Initialize exception """
message = 'State names within a machine must be unique. (Machine %s, State %s)' % \
(machineName, stateName)
super(StateNameNotUniqueError, self).__init__(message)
class StateActionRequired(ConfigurationError):
""" Each state requires an action. """
def __init__(self, machineName, stateName):
""" Initialize exception """
message = '"%s" is required attribute of state. (Machine %s, State %s)' % \
(constants.STATE_ACTION_ATTRIBUTE, machineName, stateName)
super(StateActionRequired, self).__init__(message)
class UnknownModuleError(ConfigurationError):
""" When resolving actions, the module was not found. """
def __init__(self, moduleName, importError):
""" Initialize exception """
message = 'Module "%s" cannot be imported due to "%s".' % (moduleName, importError)
super(UnknownModuleError, self).__init__(message)
class UnknownClassError(ConfigurationError):
""" When resolving actions, the class was not found. """
def __init__(self, moduleName, className):
""" Initialize exception """
message = 'Class "%s" was not found in module "%s".' % (className, moduleName)
super(UnknownClassError, self).__init__(message)
class UnknownObjectError(ConfigurationError):
""" When resolving actions, the object was not found. """
def __init__(self, objectName):
""" Initialize exception """
message = 'Object "%s" was not found.' % (objectName)
super(UnknownObjectError, self).__init__(message)
class UnexpectedObjectTypeError(ConfigurationError):
""" When resolving actions, the object was not found. """
def __init__(self, objectName, expectedType):
""" Initialize exception """
message = 'Object "%s" is not of type "%s".' % (objectName, expectedType)
super(UnexpectedObjectTypeError, self).__init__(message)
class InvalidMaxRetriesError(ConfigurationError):
""" max_retries must be a positive integer. """
def __init__(self, machineName, maxRetries):
""" Initialize exception """
message = '%s "%s" is invalid. Must be an integer. (Machine %s)' % \
(constants.MAX_RETRIES_ATTRIBUTE, maxRetries, machineName)
super(InvalidMaxRetriesError, self).__init__(message)
class InvalidTaskRetryLimitError(ConfigurationError):
""" task_retry_limit must be a positive integer. """
def __init__(self, machineName, taskRetryLimit):
""" Initialize exception """
message = '%s "%s" is invalid. Must be an integer. (Machine %s)' % \
(constants.TASK_RETRY_LIMIT_ATTRIBUTE, taskRetryLimit, machineName)
super(InvalidTaskRetryLimitError, self).__init__(message)
class InvalidMinBackoffSecondsError(ConfigurationError):
""" min_backoff_seconds must be a positive integer. """
def __init__(self, machineName, minBackoffSeconds):
""" Initialize exception """
message = '%s "%s" is invalid. Must be an integer. (Machine %s)' % \
(constants.MIN_BACKOFF_SECONDS_ATTRIBUTE, minBackoffSeconds, machineName)
super(InvalidMinBackoffSecondsError, self).__init__(message)
class InvalidMaxBackoffSecondsError(ConfigurationError):
""" max_backoff_seconds must be a positive integer. """
def __init__(self, machineName, maxBackoffSeconds):
""" Initialize exception """
message = '%s "%s" is invalid. Must be an integer. (Machine %s)' % \
(constants.MAX_BACKOFF_SECONDS_ATTRIBUTE, maxBackoffSeconds, machineName)
super(InvalidMaxBackoffSecondsError, self).__init__(message)
class InvalidTaskAgeLimitError(ConfigurationError):
""" task_age_limit must be a positive integer. """
def __init__(self, machineName, taskAgeLimit):
""" Initialize exception """
message = '%s "%s" is invalid. Must be an integer. (Machine %s)' % \
(constants.TASK_AGE_LIMIT_ATTRIBUTE, taskAgeLimit, machineName)
super(InvalidTaskAgeLimitError, self).__init__(message)
class InvalidMaxDoublingsError(ConfigurationError):
""" max_doublings must be a positive integer. """
def __init__(self, machineName, maxDoublings):
""" Initialize exception """
message = '%s "%s" is invalid. Must be an integer. (Machine %s)' % \
(constants.MAX_DOUBLINGS_ATTRIBUTE, maxDoublings, machineName)
super(InvalidMaxDoublingsError, self).__init__(message)
class MaxRetriesAndTaskRetryLimitMutuallyExclusiveError(ConfigurationError):
""" max_retries and task_retry_limit cannot both be specified on a machine. """
def __init__(self, machineName):
""" Initialize exception """
message = 'max_retries and task_retry_limit cannot both be specified on a machine. (Machine %s)' % \
machineName
super(MaxRetriesAndTaskRetryLimitMutuallyExclusiveError, self).__init__(message)
class InvalidLoggingError(ConfigurationError):
""" The logging value was not valid. """
def __init__(self, machineName, loggingValue):
""" Initialize exception """
message = 'logging attribute "%s" is invalid (must be one of "%s"). (Machine %s)' % \
(loggingValue, constants.VALID_LOGGING_VALUES, machineName)
super(InvalidLoggingError, self).__init__(message)
class TransitionNameRequiredError(ConfigurationError):
""" Each transition requires a name. """
def __init__(self, machineName):
""" Initialize exception """
message = '"%s" is required attribute of transition. (Machine %s)' % \
(constants.TRANS_NAME_ATTRIBUTE, machineName)
super(TransitionNameRequiredError, self).__init__(message)
class InvalidTransitionNameError(ConfigurationError):
""" The transition name was invalid. """
def __init__(self, machineName, transitionName):
""" Initialize exception """
message = 'Transition name must match pattern "%s". (Machine %s, Transition %s)' % \
(constants.NAME_PATTERN, machineName, transitionName)
super(InvalidTransitionNameError, self).__init__(message)
class TransitionNameNotUniqueError(ConfigurationError):
""" Each transition within a machine must have a unique name. """
def __init__(self, machineName, transitionName):
""" Initialize exception """
message = 'Transition names within a machine must be unique. (Machine %s, Transition %s)' % \
(machineName, transitionName)
super(TransitionNameNotUniqueError, self).__init__(message)
class InvalidTransitionEventNameError(ConfigurationError):
""" The transition's event name was invalid. """
def __init__(self, machineName, fromStateName, eventName):
""" Initialize exception """
message = 'Transition event name must match pattern "%s". (Machine %s, State %s, Event %s)' % \
(constants.NAME_PATTERN, machineName, fromStateName, eventName)
super(InvalidTransitionEventNameError, self).__init__(message)
class TransitionUnknownToStateError(ConfigurationError):
""" Each transition must specify a to state. """
def __init__(self, machineName, transitionName, toState):
""" Initialize exception """
message = 'Transition to state is undefined. (Machine %s, Transition %s, To %s)' % \
(machineName, transitionName, toState)
super(TransitionUnknownToStateError, self).__init__(message)
class TransitionToRequiredError(ConfigurationError):
""" The specified to state is unknown. """
def __init__(self, machineName, transitionName):
""" Initialize exception """
message = '"%s" is required attribute of transition. (Machine %s, Transition %s)' % \
(constants.TRANS_TO_ATTRIBUTE, machineName, transitionName)
super(TransitionToRequiredError, self).__init__(message)
class TransitionEventRequiredError(ConfigurationError):
""" Each transition requires an event to be bound to. """
def __init__(self, machineName, fromStateName):
""" Initialize exception """
message = '"%s" is required attribute of transition. (Machine %s, State %s)' % \
(constants.TRANS_EVENT_ATTRIBUTE, machineName, fromStateName)
super(TransitionEventRequiredError, self).__init__(message)
class InvalidCountdownError(ConfigurationError):
""" Countdown must be a positive integer. """
def __init__(self, countdown, machineName, fromStateName):
""" Initialize exception """
message = 'Countdown "%s" must be a positive integer. (Machine %s, State %s)' % \
(countdown, machineName, fromStateName)
super(InvalidCountdownError, self).__init__(message)
class InvalidMachineAttributeError(ConfigurationError):
""" Unknown machine attributes were found. """
def __init__(self, machineName, badAttributes):
""" Initialize exception """
message = 'The following are invalid attributes a machine: %s. (Machine %s)' % \
(badAttributes, machineName)
super(InvalidMachineAttributeError, self).__init__(message)
class InvalidStateAttributeError(ConfigurationError):
""" Unknown state attributes were found. """
def __init__(self, machineName, stateName, badAttributes):
""" Initialize exception """
message = 'The following are invalid attributes a state: %s. (Machine %s, State %s)' % \
(badAttributes, machineName, stateName)
super(InvalidStateAttributeError, self).__init__(message)
class InvalidTransitionAttributeError(ConfigurationError):
""" Unknown transition attributes were found. """
def __init__(self, machineName, fromStateName, badAttributes):
""" Initialize exception """
message = 'The following are invalid attributes a transition: %s. (Machine %s, State %s)' % \
(badAttributes, machineName, fromStateName)
super(InvalidTransitionAttributeError, self).__init__(message)
class InvalidInterfaceError(ConfigurationError):
""" Interface errors. """
pass
class InvalidContinuationInterfaceError(InvalidInterfaceError):
""" The specified state was denoted as a continuation, but it does not have a continuation method. """
def __init__(self, machineName, stateName):
message = 'The state was specified as continuation=True, but the action class does not have a ' + \
'continuation() method. (Machine %s, State %s)' % (machineName, stateName)
super(InvalidContinuationInterfaceError, self).__init__(message)
class InvalidActionInterfaceError(InvalidInterfaceError):
""" The specified state's action class does not have an execute() method. """
def __init__(self, machineName, stateName):
message = 'The state\'s action class does not have an execute() method. (Machine %s, State %s)' % \
(machineName, stateName)
super(InvalidActionInterfaceError, self).__init__(message)
class InvalidEntryInterfaceError(InvalidInterfaceError):
""" The specified state's entry class does not have an execute() method. """
def __init__(self, machineName, stateName):
message = 'The state\'s entry class does not have an execute() method. (Machine %s, State %s)' % \
(machineName, stateName)
super(InvalidEntryInterfaceError, self).__init__(message)
class InvalidExitInterfaceError(InvalidInterfaceError):
""" The specified state's exit class does not have an execute() method. """
def __init__(self, machineName, stateName):
message = 'The state\'s exit class does not have an execute() method. (Machine %s, State %s)' % \
(machineName, stateName)
super(InvalidExitInterfaceError, self).__init__(message)
class InvalidFanInError(ConfigurationError):
""" fan_in must be a positive integer. """
def __init__(self, machineName, stateName, fanInPeriod):
""" Initialize exception """
message = '%s "%s" is invalid. Must be an integer. (Machine %s, State %s)' % \
(constants.STATE_FAN_IN_ATTRIBUTE, fanInPeriod, machineName, stateName)
super(InvalidFanInError, self).__init__(message)
class FanInContinuationNotSupportedError(ConfigurationError):
""" Cannot have fan_in and continuation on the same state, because it hurts our head at the moment. """
def __init__(self, machineName, stateName):
""" Initialize exception """
message = '%s and %s are not supported on the same state. Maybe some day... (Machine %s, State %s)' % \
(constants.STATE_CONTINUATION_ATTRIBUTE, constants.STATE_FAN_IN_ATTRIBUTE,
machineName, stateName)
super(FanInContinuationNotSupportedError, self).__init__(message)
class UnsupportedConfigurationError(ConfigurationError):
""" Some exit and transition actions are not allowed near fan_in and continuation. At least not at the moment. """
def __init__(self, machineName, stateName, message):
""" Initialize exception """
message = '%s (Machine %s, State %s)' % (message, machineName, stateName)
super(UnsupportedConfigurationError, self).__init__(message) |
lsaffre/lino-welfare | refs/heads/master | lino_welfare/projects/gerd/tests/dumps/18.8.0/dashboard_widget.py | 4 | # -*- coding: UTF-8 -*-
logger.info("Loading 0 objects to table dashboard_widget...")
# fields: id, seqno, user, item_name, visible
loader.flush_deferred_objects()
|
ManoSeimas/manoseimas.lt | refs/heads/master | manoseimas/flatpages/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
atomic83/youtube-dl | refs/heads/master | youtube_dl/extractor/__init__.py | 1 | from __future__ import unicode_literals
from .abc import ABCIE
from .abc7news import Abc7NewsIE
from .academicearth import AcademicEarthCourseIE
from .acast import (
ACastIE,
ACastChannelIE,
)
from .addanime import AddAnimeIE
from .adobetv import (
AdobeTVIE,
AdobeTVShowIE,
AdobeTVChannelIE,
AdobeTVVideoIE,
)
from .adultswim import AdultSwimIE
from .aftonbladet import AftonbladetIE
from .airmozilla import AirMozillaIE
from .aljazeera import AlJazeeraIE
from .alphaporno import AlphaPornoIE
from .anitube import AnitubeIE
from .anysex import AnySexIE
from .aol import AolIE
from .allocine import AllocineIE
from .aparat import AparatIE
from .appleconnect import AppleConnectIE
from .appletrailers import (
AppleTrailersIE,
AppleTrailersSectionIE,
)
from .archiveorg import ArchiveOrgIE
from .ard import (
ARDIE,
ARDMediathekIE,
SportschauIE,
)
from .arte import (
ArteTvIE,
ArteTVPlus7IE,
ArteTVCreativeIE,
ArteTVConcertIE,
ArteTVFutureIE,
ArteTVDDCIE,
ArteTVEmbedIE,
)
from .atresplayer import AtresPlayerIE
from .atttechchannel import ATTTechChannelIE
from .audimedia import AudiMediaIE
from .audiomack import AudiomackIE, AudiomackAlbumIE
from .azubu import AzubuIE
from .baidu import BaiduVideoIE
from .bambuser import BambuserIE, BambuserChannelIE
from .bandcamp import BandcampIE, BandcampAlbumIE
from .bbc import (
BBCCoUkIE,
BBCCoUkArticleIE,
BBCIE,
)
from .beeg import BeegIE
from .behindkink import BehindKinkIE
from .beatportpro import BeatportProIE
from .bet import BetIE
from .bild import BildIE
from .bilibili import BiliBiliIE
from .bleacherreport import (
BleacherReportIE,
BleacherReportCMSIE,
)
from .blinkx import BlinkxIE
from .bloomberg import BloombergIE
from .bpb import BpbIE
from .br import BRIE
from .breakcom import BreakIE
from .brightcove import (
BrightcoveLegacyIE,
BrightcoveNewIE,
)
from .buzzfeed import BuzzFeedIE
from .byutv import BYUtvIE
from .c56 import C56IE
from .camdemy import (
CamdemyIE,
CamdemyFolderIE
)
from .canalplus import CanalplusIE
from .canalc2 import Canalc2IE
from .cbs import CBSIE
from .cbsnews import CBSNewsIE
from .cbssports import CBSSportsIE
from .ccc import CCCIE
from .ceskatelevize import CeskaTelevizeIE
from .channel9 import Channel9IE
from .chaturbate import ChaturbateIE
from .chilloutzone import ChilloutzoneIE
from .chirbit import (
ChirbitIE,
ChirbitProfileIE,
)
from .cinchcast import CinchcastIE
from .cinemassacre import CinemassacreIE
from .clipfish import ClipfishIE
from .cliphunter import CliphunterIE
from .clipsyndicate import ClipsyndicateIE
from .cloudy import CloudyIE
from .clubic import ClubicIE
from .clyp import ClypIE
from .cmt import CMTIE
from .cnet import CNETIE
from .cnn import (
CNNIE,
CNNBlogsIE,
CNNArticleIE,
)
from .collegehumor import CollegeHumorIE
from .collegerama import CollegeRamaIE
from .comedycentral import ComedyCentralIE, ComedyCentralShowsIE
from .comcarcoff import ComCarCoffIE
from .commonmistakes import CommonMistakesIE, UnicodeBOMIE
from .condenast import CondeNastIE
from .cracked import CrackedIE
from .criterion import CriterionIE
from .crooksandliars import CrooksAndLiarsIE
from .crunchyroll import (
CrunchyrollIE,
CrunchyrollShowPlaylistIE
)
from .cspan import CSpanIE
from .ctsnews import CtsNewsIE
from .dailymotion import (
DailymotionIE,
DailymotionPlaylistIE,
DailymotionUserIE,
DailymotionCloudIE,
)
from .daum import (
DaumIE,
DaumClipIE,
)
from .dbtv import DBTVIE
from .dcn import (
DCNIE,
DCNVideoIE,
DCNLiveIE,
DCNSeasonIE,
)
from .dctp import DctpTvIE
from .deezer import DeezerPlaylistIE
from .democracynow import DemocracynowIE
from .dfb import DFBIE
from .dhm import DHMIE
from .dotsub import DotsubIE
from .douyutv import DouyuTVIE
from .dplay import DPlayIE
from .dramafever import (
DramaFeverIE,
DramaFeverSeriesIE,
)
from .dreisat import DreiSatIE
from .drbonanza import DRBonanzaIE
from .drtuber import DrTuberIE
from .drtv import DRTVIE
from .dvtv import DVTVIE
from .dump import DumpIE
from .dumpert import DumpertIE
from .defense import DefenseGouvFrIE
from .discovery import DiscoveryIE
from .dropbox import DropboxIE
from .eagleplatform import EaglePlatformIE
from .ebaumsworld import EbaumsWorldIE
from .echomsk import EchoMskIE
from .ehow import EHowIE
from .eighttracks import EightTracksIE
from .einthusan import EinthusanIE
from .eitb import EitbIE
from .ellentv import (
EllenTVIE,
EllenTVClipsIE,
)
from .elpais import ElPaisIE
from .embedly import EmbedlyIE
from .engadget import EngadgetIE
from .eporner import EpornerIE
from .eroprofile import EroProfileIE
from .escapist import EscapistIE
from .espn import ESPNIE
from .esri import EsriVideoIE
from .europa import EuropaIE
from .everyonesmixtape import EveryonesMixtapeIE
from .exfm import ExfmIE
from .expotv import ExpoTVIE
from .extremetube import ExtremeTubeIE
from .facebook import FacebookIE
from .faz import FazIE
from .fc2 import FC2IE
from .fczenit import FczenitIE
from .firstpost import FirstpostIE
from .firsttv import FirstTVIE
from .fivemin import FiveMinIE
from .fivetv import FiveTVIE
from .fktv import FKTVIE
from .flickr import FlickrIE
from .folketinget import FolketingetIE
from .footyroom import FootyRoomIE
from .fourtube import FourTubeIE
from .fox import FOXIE
from .foxgay import FoxgayIE
from .foxnews import FoxNewsIE
from .foxsports import FoxSportsIE
from .franceculture import (
FranceCultureIE,
FranceCultureEmissionIE,
)
from .franceinter import FranceInterIE
from .francetv import (
PluzzIE,
FranceTvInfoIE,
FranceTVIE,
GenerationQuoiIE,
CultureboxIE,
)
from .freesound import FreesoundIE
from .freespeech import FreespeechIE
from .freevideo import FreeVideoIE
from .funimation import FunimationIE
from .funnyordie import FunnyOrDieIE
from .gameinformer import GameInformerIE
from .gamekings import GamekingsIE
from .gameone import (
GameOneIE,
GameOnePlaylistIE,
)
from .gamersyde import GamersydeIE
from .gamespot import GameSpotIE
from .gamestar import GameStarIE
from .gametrailers import GametrailersIE
from .gazeta import GazetaIE
from .gdcvault import GDCVaultIE
from .generic import GenericIE
from .gfycat import GfycatIE
from .giantbomb import GiantBombIE
from .giga import GigaIE
from .glide import GlideIE
from .globo import (
GloboIE,
GloboArticleIE,
)
from .godtube import GodTubeIE
from .goldenmoustache import GoldenMoustacheIE
from .golem import GolemIE
from .googledrive import GoogleDriveIE
from .googleplus import GooglePlusIE
from .googlesearch import GoogleSearchIE
from .goshgay import GoshgayIE
from .gputechconf import GPUTechConfIE
from .groupon import GrouponIE
from .hark import HarkIE
from .hearthisat import HearThisAtIE
from .heise import HeiseIE
from .hellporno import HellPornoIE
from .helsinki import HelsinkiIE
from .hentaistigma import HentaiStigmaIE
from .historicfilms import HistoricFilmsIE
from .history import HistoryIE
from .hitbox import HitboxIE, HitboxLiveIE
from .hornbunny import HornBunnyIE
from .hotnewhiphop import HotNewHipHopIE
from .hotstar import HotStarIE
from .howcast import HowcastIE
from .howstuffworks import HowStuffWorksIE
from .huffpost import HuffPostIE
from .hypem import HypemIE
from .iconosquare import IconosquareIE
from .ign import (
IGNIE,
OneUPIE,
PCMagIE,
)
from .imdb import (
ImdbIE,
ImdbListIE
)
from .imgur import (
ImgurIE,
ImgurAlbumIE,
)
from .ina import InaIE
from .indavideo import (
IndavideoIE,
IndavideoEmbedIE,
)
from .infoq import InfoQIE
from .instagram import InstagramIE, InstagramUserIE
from .internetvideoarchive import InternetVideoArchiveIE
from .iprima import IPrimaIE
from .iqiyi import IqiyiIE
from .ir90tv import Ir90TvIE
from .ivi import (
IviIE,
IviCompilationIE
)
from .ivideon import IvideonIE
from .izlesene import IzleseneIE
from .jadorecettepub import JadoreCettePubIE
from .jeuxvideo import JeuxVideoIE
from .jove import JoveIE
from .jwplatform import JWPlatformIE
from .jpopsukitv import JpopsukiIE
from .kaltura import KalturaIE
from .kanalplay import KanalPlayIE
from .kankan import KankanIE
from .karaoketv import KaraoketvIE
from .karrierevideos import KarriereVideosIE
from .keezmovies import KeezMoviesIE
from .khanacademy import KhanAcademyIE
from .kickstarter import KickStarterIE
from .keek import KeekIE
from .kontrtube import KontrTubeIE
from .krasview import KrasViewIE
from .ku6 import Ku6IE
from .kuwo import (
KuwoIE,
KuwoAlbumIE,
KuwoChartIE,
KuwoSingerIE,
KuwoCategoryIE,
KuwoMvIE,
)
from .la7 import LA7IE
from .laola1tv import Laola1TvIE
from .lecture2go import Lecture2GoIE
from .letv import (
LetvIE,
LetvTvIE,
LetvPlaylistIE
)
from .libsyn import LibsynIE
from .lifenews import (
LifeNewsIE,
LifeEmbedIE,
)
from .limelight import (
LimelightMediaIE,
LimelightChannelIE,
LimelightChannelListIE,
)
from .liveleak import LiveLeakIE
from .livestream import (
LivestreamIE,
LivestreamOriginalIE,
LivestreamShortenerIE,
)
from .lnkgo import LnkGoIE
from .lrt import LRTIE
from .lynda import (
LyndaIE,
LyndaCourseIE
)
from .m6 import M6IE
from .macgamestore import MacGameStoreIE
from .mailru import MailRuIE
from .makertv import MakerTVIE
from .malemotion import MalemotionIE
from .mdr import MDRIE
from .metacafe import MetacafeIE
from .metacritic import MetacriticIE
from .mgoon import MgoonIE
from .minhateca import MinhatecaIE
from .ministrygrid import MinistryGridIE
from .miomio import MioMioIE
from .mit import TechTVMITIE, MITIE, OCWMITIE
from .mitele import MiTeleIE
from .mixcloud import MixcloudIE
from .mlb import MLBIE
from .mpora import MporaIE
from .moevideo import MoeVideoIE
from .mofosex import MofosexIE
from .mojvideo import MojvideoIE
from .moniker import MonikerIE
from .mooshare import MooshareIE
from .morningstar import MorningstarIE
from .motherless import MotherlessIE
from .motorsport import MotorsportIE
from .movieclips import MovieClipsIE
from .moviezine import MoviezineIE
from .mtv import (
MTVIE,
MTVServicesEmbeddedIE,
MTVIggyIE,
MTVDEIE,
)
from .muenchentv import MuenchenTVIE
from .musicplayon import MusicPlayOnIE
from .muzu import MuzuTVIE
from .mwave import MwaveIE
from .myspace import MySpaceIE, MySpaceAlbumIE
from .myspass import MySpassIE
from .myvi import MyviIE
from .myvideo import MyVideoIE
from .myvidster import MyVidsterIE
from .nationalgeographic import NationalGeographicIE
from .naver import NaverIE
from .nba import NBAIE
from .nbc import (
NBCIE,
NBCNewsIE,
NBCSportsIE,
NBCSportsVPlayerIE,
MSNBCIE,
)
from .ndr import (
NDRIE,
NJoyIE,
NDREmbedBaseIE,
NDREmbedIE,
NJoyEmbedIE,
)
from .ndtv import NDTVIE
from .netzkino import NetzkinoIE
from .nerdcubed import NerdCubedFeedIE
from .nerdist import NerdistIE
from .neteasemusic import (
NetEaseMusicIE,
NetEaseMusicAlbumIE,
NetEaseMusicSingerIE,
NetEaseMusicListIE,
NetEaseMusicMvIE,
NetEaseMusicProgramIE,
NetEaseMusicDjRadioIE,
)
from .newgrounds import NewgroundsIE
from .newstube import NewstubeIE
from .nextmedia import (
NextMediaIE,
NextMediaActionNewsIE,
AppleDailyIE,
)
from .nextmovie import NextMovieIE
from .nfb import NFBIE
from .nfl import NFLIE
from .nhl import (
NHLIE,
NHLNewsIE,
NHLVideocenterIE,
)
from .nick import NickIE
from .niconico import NiconicoIE, NiconicoPlaylistIE
from .ninegag import NineGagIE
from .noco import NocoIE
from .normalboots import NormalbootsIE
from .nosvideo import NosVideoIE
from .nova import NovaIE
from .novamov import (
NovaMovIE,
WholeCloudIE,
NowVideoIE,
VideoWeedIE,
CloudTimeIE,
)
from .nowness import (
NownessIE,
NownessPlaylistIE,
NownessSeriesIE,
)
from .nowtv import (
NowTVIE,
NowTVListIE,
)
from .npo import (
NPOIE,
NPOLiveIE,
NPORadioIE,
NPORadioFragmentIE,
VPROIE,
WNLIE
)
from .npr import NprIE
from .nrk import (
NRKIE,
NRKPlaylistIE,
NRKTVIE,
)
from .ntvde import NTVDeIE
from .ntvru import NTVRuIE
from .nytimes import (
NYTimesIE,
NYTimesArticleIE,
)
from .nuvid import NuvidIE
from .odnoklassniki import OdnoklassnikiIE
from .oktoberfesttv import OktoberfestTVIE
from .onionstudios import OnionStudiosIE
from .ooyala import (
OoyalaIE,
OoyalaExternalIE,
)
from .ora import OraTVIE
from .orf import (
ORFTVthekIE,
ORFOE1IE,
ORFFM4IE,
ORFIPTVIE,
)
from .pandoratv import PandoraTVIE
from .parliamentliveuk import ParliamentLiveUKIE
from .patreon import PatreonIE
from .pbs import PBSIE
from .periscope import PeriscopeIE
from .philharmoniedeparis import PhilharmonieDeParisIE
from .phoenix import PhoenixIE
from .photobucket import PhotobucketIE
from .pinkbike import PinkbikeIE
from .planetaplay import PlanetaPlayIE
from .pladform import PladformIE
from .played import PlayedIE
from .playfm import PlayFMIE
from .playtvak import PlaytvakIE
from .playvid import PlayvidIE
from .playwire import PlaywireIE
from .pluralsight import (
PluralsightIE,
PluralsightCourseIE,
)
from .podomatic import PodomaticIE
from .porn91 import Porn91IE
from .pornhd import PornHdIE
from .pornhub import (
PornHubIE,
PornHubPlaylistIE,
)
from .pornotube import PornotubeIE
from .pornovoisines import PornoVoisinesIE
from .pornoxo import PornoXOIE
from .primesharetv import PrimeShareTVIE
from .promptfile import PromptFileIE
from .prosiebensat1 import ProSiebenSat1IE
from .puls4 import Puls4IE
from .pyvideo import PyvideoIE
from .qqmusic import (
QQMusicIE,
QQMusicSingerIE,
QQMusicAlbumIE,
QQMusicToplistIE,
QQMusicPlaylistIE,
)
from .quickvid import QuickVidIE
from .r7 import R7IE
from .radiode import RadioDeIE
from .radiojavan import RadioJavanIE
from .radiobremen import RadioBremenIE
from .radiofrance import RadioFranceIE
from .rai import (
RaiTVIE,
RaiIE,
)
from .rbmaradio import RBMARadioIE
from .rds import RDSIE
from .redtube import RedTubeIE
from .regiotv import RegioTVIE
from .restudy import RestudyIE
from .reverbnation import ReverbNationIE
from .revision3 import Revision3IE
from .ringtv import RingTVIE
from .ro220 import Ro220IE
from .rottentomatoes import RottenTomatoesIE
from .roxwel import RoxwelIE
from .rtbf import RTBFIE
from .rte import RteIE, RteRadioIE
from .rtlnl import RtlNlIE
from .rtl2 import RTL2IE
from .rtp import RTPIE
from .rts import RTSIE
from .rtve import RTVEALaCartaIE, RTVELiveIE, RTVEInfantilIE
from .rtvnh import RTVNHIE
from .ruhd import RUHDIE
from .rutube import (
RutubeIE,
RutubeChannelIE,
RutubeEmbedIE,
RutubeMovieIE,
RutubePersonIE,
)
from .rutv import RUTVIE
from .ruutu import RuutuIE
from .sandia import SandiaIE
from .safari import (
SafariIE,
SafariCourseIE,
)
from .sapo import SapoIE
from .savefrom import SaveFromIE
from .sbs import SBSIE
from .scivee import SciVeeIE
from .screencast import ScreencastIE
from .screencastomatic import ScreencastOMaticIE
from .screenwavemedia import ScreenwaveMediaIE, TeamFourIE
from .senateisvp import SenateISVPIE
from .servingsys import ServingSysIE
from .sexu import SexuIE
from .sexykarma import SexyKarmaIE
from .shahid import ShahidIE
from .shared import SharedIE
from .sharesix import ShareSixIE
from .sina import SinaIE
from .skynewsarabia import (
SkyNewsArabiaIE,
SkyNewsArabiaArticleIE,
)
from .slideshare import SlideshareIE
from .slutload import SlutloadIE
from .smotri import (
SmotriIE,
SmotriCommunityIE,
SmotriUserIE,
SmotriBroadcastIE,
)
from .snagfilms import (
SnagFilmsIE,
SnagFilmsEmbedIE,
)
from .snotr import SnotrIE
from .sohu import SohuIE
from .soundcloud import (
SoundcloudIE,
SoundcloudSetIE,
SoundcloudUserIE,
SoundcloudPlaylistIE,
SoundcloudSearchIE
)
from .soundgasm import (
SoundgasmIE,
SoundgasmProfileIE
)
from .southpark import (
SouthParkIE,
SouthParkDeIE,
SouthParkDkIE,
SouthParkEsIE,
SouthParkNlIE
)
from .space import SpaceIE
from .spankbang import SpankBangIE
from .spankwire import SpankwireIE
from .spiegel import SpiegelIE, SpiegelArticleIE
from .spiegeltv import SpiegeltvIE
from .spike import SpikeIE
from .stitcher import StitcherIE
from .sport5 import Sport5IE
from .sportbox import (
SportBoxIE,
SportBoxEmbedIE,
)
from .sportdeutschland import SportDeutschlandIE
from .srgssr import (
SRGSSRIE,
SRGSSRPlayIE,
)
from .srmediathek import SRMediathekIE
from .ssa import SSAIE
from .stanfordoc import StanfordOpenClassroomIE
from .steam import SteamIE
from .streamcloud import StreamcloudIE
from .streamcz import StreamCZIE
from .streetvoice import StreetVoiceIE
from .sunporno import SunPornoIE
from .svt import (
SVTIE,
SVTPlayIE,
)
from .swrmediathek import SWRMediathekIE
from .syfy import SyfyIE
from .sztvhu import SztvHuIE
from .tagesschau import TagesschauIE
from .tapely import TapelyIE
from .tass import TassIE
from .teachertube import (
TeacherTubeIE,
TeacherTubeUserIE,
)
from .teachingchannel import TeachingChannelIE
from .teamcoco import TeamcocoIE
from .techtalks import TechTalksIE
from .ted import TEDIE
from .tele13 import Tele13IE
from .telebruxelles import TeleBruxellesIE
from .telecinco import TelecincoIE
from .telegraaf import TelegraafIE
from .telemb import TeleMBIE
from .teletask import TeleTaskIE
from .tenplay import TenPlayIE
from .testurl import TestURLIE
from .tf1 import TF1IE
from .theintercept import TheInterceptIE
from .theonion import TheOnionIE
from .theplatform import (
ThePlatformIE,
ThePlatformFeedIE,
)
from .thesixtyone import TheSixtyOneIE
from .thisamericanlife import ThisAmericanLifeIE
from .thisav import ThisAVIE
from .tinypic import TinyPicIE
from .tlc import TlcDeIE
from .tmz import (
TMZIE,
TMZArticleIE,
)
from .tnaflix import (
TNAFlixIE,
EMPFlixIE,
MovieFapIE,
)
from .toggle import ToggleIE
from .thvideo import (
THVideoIE,
THVideoPlaylistIE
)
from .toutv import TouTvIE
from .toypics import ToypicsUserIE, ToypicsIE
from .traileraddict import TrailerAddictIE
from .trilulilu import TriluliluIE
from .trutube import TruTubeIE
from .tube8 import Tube8IE
from .tubitv import TubiTvIE
from .tudou import TudouIE
from .tumblr import TumblrIE
from .tunein import (
TuneInClipIE,
TuneInStationIE,
TuneInProgramIE,
TuneInTopicIE,
TuneInShortenerIE,
)
from .turbo import TurboIE
from .tutv import TutvIE
from .tv2 import (
TV2IE,
TV2ArticleIE,
)
from .tv4 import TV4IE
from .tvc import (
TVCIE,
TVCArticleIE,
)
from .tvigle import TvigleIE
from .tvland import TVLandIE
from .tvp import TvpIE, TvpSeriesIE
from .tvplay import TVPlayIE
from .tweakers import TweakersIE
from .twentyfourvideo import TwentyFourVideoIE
from .twentymin import TwentyMinutenIE
from .twentytwotracks import (
TwentyTwoTracksIE,
TwentyTwoTracksGenreIE
)
from .twitch import (
TwitchVideoIE,
TwitchChapterIE,
TwitchVodIE,
TwitchProfileIE,
TwitchPastBroadcastsIE,
TwitchBookmarksIE,
TwitchStreamIE,
)
from .twitter import TwitterCardIE, TwitterIE
from .ubu import UbuIE
from .udemy import (
UdemyIE,
UdemyCourseIE
)
from .udn import UDNEmbedIE
from .ultimedia import UltimediaIE
from .unistra import UnistraIE
from .urort import UrortIE
from .ustream import UstreamIE, UstreamChannelIE
from .varzesh3 import Varzesh3IE
from .vbox7 import Vbox7IE
from .veehd import VeeHDIE
from .veoh import VeohIE
from .vessel import VesselIE
from .vesti import VestiIE
from .vevo import VevoIE
from .vgtv import (
BTArticleIE,
BTVestlendingenIE,
VGTVIE,
)
from .vh1 import VH1IE
from .vice import ViceIE
from .viddler import ViddlerIE
from .videodetective import VideoDetectiveIE
from .videofyme import VideofyMeIE
from .videomega import VideoMegaIE
from .videomore import (
VideomoreIE,
VideomoreVideoIE,
VideomoreSeasonIE,
)
from .videopremium import VideoPremiumIE
from .videott import VideoTtIE
from .vidme import VidmeIE
from .vidzi import VidziIE
from .vier import VierIE, VierVideosIE
from .viewster import ViewsterIE
from .viidea import ViideaIE
from .vimeo import (
VimeoIE,
VimeoAlbumIE,
VimeoChannelIE,
VimeoGroupsIE,
VimeoLikesIE,
VimeoReviewIE,
VimeoUserIE,
VimeoWatchLaterIE,
)
from .vimple import VimpleIE
from .vine import (
VineIE,
VineUserIE,
)
from .viki import (
VikiIE,
VikiChannelIE,
)
from .vk import (
VKIE,
VKUserVideosIE,
)
from .vlive import VLiveIE
from .vodlocker import VodlockerIE
from .voicerepublic import VoiceRepublicIE
from .vporn import VpornIE
from .vrt import VRTIE
from .vube import VubeIE
from .vuclip import VuClipIE
from .vulture import VultureIE
from .walla import WallaIE
from .washingtonpost import WashingtonPostIE
from .wat import WatIE
from .wayofthemaster import WayOfTheMasterIE
from .wdr import (
WDRIE,
WDRMobileIE,
WDRMausIE,
)
from .webofstories import (
WebOfStoriesIE,
WebOfStoriesPlaylistIE,
)
from .weibo import WeiboIE
from .wimp import WimpIE
from .wistia import WistiaIE
from .worldstarhiphop import WorldStarHipHopIE
from .wrzuta import WrzutaIE
from .wsj import WSJIE
from .xbef import XBefIE
from .xboxclips import XboxClipsIE
from .xfileshare import XFileShareIE
from .xhamster import (
XHamsterIE,
XHamsterEmbedIE,
)
from .xminus import XMinusIE
from .xnxx import XNXXIE
from .xstream import XstreamIE
from .xtube import XTubeUserIE, XTubeIE
from .xuite import XuiteIE
from .xvideos import XVideosIE
from .xxxymovies import XXXYMoviesIE
from .yahoo import (
YahooIE,
YahooSearchIE,
)
from .yam import YamIE
from .yandexmusic import (
YandexMusicTrackIE,
YandexMusicAlbumIE,
YandexMusicPlaylistIE,
)
from .yesjapan import YesJapanIE
from .yinyuetai import YinYueTaiIE
from .ynet import YnetIE
from .youjizz import YouJizzIE
from .youku import YoukuIE
from .youporn import YouPornIE
from .yourupload import YourUploadIE
from .youtube import (
YoutubeIE,
YoutubeChannelIE,
YoutubeFavouritesIE,
YoutubeHistoryIE,
YoutubePlaylistIE,
YoutubeRecommendedIE,
YoutubeSearchDateIE,
YoutubeSearchIE,
YoutubeSearchURLIE,
YoutubeShowIE,
YoutubeSubscriptionsIE,
YoutubeTruncatedIDIE,
YoutubeTruncatedURLIE,
YoutubeUserIE,
YoutubePlaylistsIE,
YoutubeWatchLaterIE,
)
from .zapiks import ZapiksIE
from .zdf import ZDFIE, ZDFChannelIE
from .zingmp3 import (
ZingMp3SongIE,
ZingMp3AlbumIE,
)
_ALL_CLASSES = [
klass
for name, klass in globals().items()
if name.endswith('IE') and name != 'GenericIE'
]
_ALL_CLASSES.append(GenericIE)
def gen_extractors():
""" Return a list of an instance of every supported extractor.
The order does matter; the first extractor matched is the one handling the URL.
"""
return [klass() for klass in _ALL_CLASSES]
def list_extractors(age_limit):
"""
Return a list of extractors that are suitable for the given age,
sorted by extractor ID.
"""
return sorted(
filter(lambda ie: ie.is_suitable(age_limit), gen_extractors()),
key=lambda ie: ie.IE_NAME.lower())
def get_info_extractor(ie_name):
"""Returns the info extractor class with the given ie_name"""
return globals()[ie_name + 'IE']
|
MarkRunWu/buck | refs/heads/master | scripts/verify-javadoc.py | 24 | #!/usr/bin/env python
#
# Examines the output from running Javadoc via Ant and checks to see if any
# warnings were emitted. If so, fail the build unless the warning is in the
# whitelist. When run in a CI build, Ant may not be able to reach external
# URLs, so warnings about errors fetching expected URLs should be ignored.
import sets
import sys
WARNING_WHITELIST = sets.ImmutableSet(map(
lambda url: ' [javadoc] javadoc: warning - Error fetching URL: ' + url,
[
'http://docs.oracle.com/javase/7/docs/api/package-list',
'https://junit-team.github.io/junit/javadoc/latest/package-list',
]) + [' [javadoc] 2 warnings'])
def main(log_file):
"""Exit with a non-zero return code if line is not in the warning whitelist."""
errors = []
with open(log_file) as f:
for line in f.xreadlines():
line = line.rstrip()
# If there is a warning from `javadoc`, check whether it is in the whitelist.
if 'warning' in line.lower() and line not in WARNING_WHITELIST:
errors.append(line)
if len(errors):
print 'Unexpected Javadoc errors (%d):' % len(errors)
for error in errors:
print error
sys.exit(1)
if __name__ == '__main__':
main(sys.argv[1])
|
Subsets and Splits