repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
chekunkov/scrapy
|
tests/test_proxy_connect.py
|
15
|
3897
|
import json
import os
import time
from threading import Thread
from libmproxy import controller, proxy
from netlib import http_auth
from twisted.internet import defer
from twisted.trial.unittest import TestCase
from scrapy.utils.test import get_testlog, get_crawler
from scrapy.http import Request
from tests.spiders import SimpleSpider, SingleRequestSpider
from tests.mockserver import MockServer
class HTTPSProxy(controller.Master, Thread):
def __init__(self, port):
password_manager = http_auth.PassManSingleUser('scrapy', 'scrapy')
authenticator = http_auth.BasicProxyAuth(password_manager, "mitmproxy")
cert_path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'keys', 'mitmproxy-ca.pem')
server = proxy.ProxyServer(proxy.ProxyConfig(
authenticator = authenticator,
cacert = cert_path),
port)
Thread.__init__(self)
controller.Master.__init__(self, server)
class ProxyConnectTestCase(TestCase):
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
self._oldenv = os.environ.copy()
self._proxy = HTTPSProxy(8888)
self._proxy.start()
# Wait for the proxy to start.
time.sleep(1.0)
os.environ['http_proxy'] = 'http://scrapy:scrapy@localhost:8888'
os.environ['https_proxy'] = 'http://scrapy:scrapy@localhost:8888'
def tearDown(self):
self.mockserver.__exit__(None, None, None)
self._proxy.shutdown()
os.environ = self._oldenv
@defer.inlineCallbacks
def test_https_connect_tunnel(self):
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("https://localhost:8999/status?n=200")
self._assert_got_response_code(200)
@defer.inlineCallbacks
def test_https_noconnect(self):
os.environ['https_proxy'] = 'http://scrapy:scrapy@localhost:8888?noconnect'
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("https://localhost:8999/status?n=200")
self._assert_got_response_code(200)
os.environ['https_proxy'] = 'http://scrapy:scrapy@localhost:8888'
@defer.inlineCallbacks
def test_https_connect_tunnel_error(self):
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("https://localhost:99999/status?n=200")
self._assert_got_tunnel_error()
@defer.inlineCallbacks
def test_https_tunnel_auth_error(self):
os.environ['https_proxy'] = 'http://wrong:wronger@localhost:8888'
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("https://localhost:8999/status?n=200")
# The proxy returns a 407 error code but it does not reach the client;
# he just sees a TunnelError.
self._assert_got_tunnel_error()
os.environ['https_proxy'] = 'http://scrapy:scrapy@localhost:8888'
@defer.inlineCallbacks
def test_https_tunnel_without_leak_proxy_authorization_header(self):
request = Request("https://localhost:8999/echo")
crawler = get_crawler(SingleRequestSpider)
yield crawler.crawl(seed=request)
self._assert_got_response_code(200)
echo = json.loads(crawler.spider.meta['responses'][0].body)
self.assertTrue('Proxy-Authorization' not in echo['headers'])
@defer.inlineCallbacks
def test_https_noconnect_auth_error(self):
os.environ['https_proxy'] = 'http://wrong:wronger@localhost:8888?noconnect'
crawler = get_crawler(SimpleSpider)
yield crawler.crawl("https://localhost:8999/status?n=200")
self._assert_got_response_code(407)
def _assert_got_response_code(self, code):
log = get_testlog()
self.assertEqual(log.count('Crawled (%d)' % code), 1)
def _assert_got_tunnel_error(self):
log = get_testlog()
self.assertEqual(log.count('TunnelError'), 1)
|
bsd-3-clause
|
jlegendary/orange
|
Orange/orng/orngDimRed.py
|
6
|
4948
|
#
# Module Orange Dimension Reduction
# ---------------------------------
#
# CVS Status: $Id$
#
# Author: Aleks Jakulin ([email protected])
# (Copyright (C)2004 Aleks Jakulin)
#
# Purpose: Dimension reduction
#
# Bibliography: Tom Minka, "36-350: Data Mining, Fall 2003", Lecture Notes, Carnegie Mellon University.
#
# ChangeLog:
# - 2003/10/28: project initiated
# - 2003/11/20: returning the parameters of the transform
import numpy
import numpy.linalg as LinearAlgebra
# before running PCA, it is helpful to apply the transformation
# operators on individual vectors.
class PCA:
def __init__(self, data, components=1):
(u,d,v) = LinearAlgebra.svd(data)
self.loading = u # transformed data points
self.variance = d # principal components' variance
self.factors = v # the principal basis
d2 = numpy.power(d,2)
s = numpy.sum(d2)
if s > 1e-6:
s = d2/s
else:
s = 1.0
self.R_squared = s # percentage of total variance explained by individual components
def Centering(vector, m = None, inverse=0):
assert(len(numpy.shape(vector))==1) # this must be a vector
if m == None:
m = numpy.average(vector)
if inverse==0:
return (vector-m,m)
else:
return vector+m
def MaxScaling(vector, param = None):
if param == None:
(v,m) = Centering(vector)
s = max(abs(v))
if s > 1e-6:
s = 1.0/s
else:
(m,s) = param
(v,m_) = Centering(vector,m)
return (v*s,(m,s))
def VarianceScaling(vector,param=None,inverse=0):
if param == None:
(v,m) = Centering(vector)
s = numpy.sqrt(numpy.average(numpy.power(v,2)))
if s > 1e-6:
s = 1.0/s
else:
(m,s) = param
if inverse == 0:
(v,m_) = Centering(vector,m)
else:
v = Centering(vector,m,1)
if inverse == 0:
return (s*v,(m,s))
else:
return s/v
def _BC(vector,lambd):
if lambd != 0.0:
return (numpy.power(vector,lambd)-1)/lambd
else:
return numpy.log(vector)
class _BCskewness:
def __init__(self,vector):
self.v = vector
def __call__(self,lambd):
nv = _BC(self.v,lambd)
mean = numpy.average(nv)
cv = nv-mean
skewness = numpy.average(numpy.power(cv,3))/numpy.power(numpy.average(numpy.power(cv,2)),1.5)
# kurtosis = numpy.average(numpy.power(cv,4))/numpy.power(numpy.average(numpy.power(cv,2)),2)-3
return skewness**2
# def BoxCoxTransform(vector,lambd=None):
# v = -min(vector)+1+vector
# print "shifting by ",-min(vector)+1
# if lambd==None:
# # find the value of lambda that will minimize skew
# lambd = mathutil.minimum(_BCskewness(v))
# print "best-fitting lambda = ",lambd
# return _BC(v,lambd)
def RankConversion(vector,reverse=0):
assert(len(numpy.shape(vector))==1) # this must be a vector
newv = numpy.zeros(numpy.size(vector),numpy.float)
l = []
for x in xrange(len(vector)):
l.append((vector[x],x))
l.sort()
if reverse:
l.reverse()
pi = -1
pv = 'a'
idx = []
pr = 0
cr = 0
for (v,i) in l:
if v != pv:
r = pr+(cr-pr+1)/2.0
for j in idx:
newv[j] = r
idx = []
pr = cr
pv = v
cr += 1
idx.append(i)
r = pr+(cr-pr+1)/2.0
for j in idx:
newv[j] = r
return newv
if __name__== "__main__":
v = numpy.array([6, 6, 6, 6, 4, 6, 12, 12, 12, 4, 4, 4, 6, 6, 8, 6, 8, 8, 8, 4, 4, 8, 8, 8, 6, 6, 6, 6, 6, 6, 8, 8, 6, 6, 8, 6, 6, 8, 6, 6, 6, 6, 6, 6, 8, 8, 8, 8, 8, 6, 6, 8, 6, 6, 4, 4, 8, 8, 8, 6, 6, 6, 6, 6, 6, 4, 6, 8, 8, 8, 8, 8, 8, 8, 8, 4, 6, 6, 6, 6, 6, 6, 4, 6, 4, 4, 6, 6, 6, 6, 8, 6, 6, 4, 6, 6, 6, 8, 8, 8, 5, 5, 6, 6, 10, 8, 12, 12, 12, 8, 6, 6, 8, 8, 6, 4, 8, 8, 6, 6, 6, 8, 8, 8, 8, 4, 4, 4, 6, 6, 6, 6, 6, 8, 6, 6, 6, 6, 6, 6, 8, 6, 6, 6, 6, 8, 8, 8, 8, 4, 8, 8, 4, 4, 4, 4, 4, 4, 3, 6, 6, 4, 8, 8, 4, 4, 4, 4, 4, 4, 4, 6, 6, 8, 6, 6, 6, 8, 8, 6, 6, 6, 4, 4, 8, 6, 8, 8, 8, 6, 6, 6, 4, 4, 4, 6, 6, 4, 4, 12, 8, 6, 8, 6, 6, 8, 8, 6, 6, 8, 8, 6, 8, 8, 6, 8, 8, 8, 8, 4, 4, 6, 4, 4, 4, 4, 4, 4, 4, 6, 8, 6, 6, 6, 6, 8, 6, 8, 8, 4, 8, 8, 6, 6, 6, 4, 6, 4, 4, 4, 4, 4, 6, 6, 4, 6, 4, 6, 6, 6, 6, 4, 6, 4, 4, 8, 6, 6, 8, 6, 6, 6, 6, 6, 6, 6, 4, 4, 6, 6, 6, 8, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 6, 4, 4, 4, 6, 4, 6, 6, 6, 4, 4, 4, 4, 4, 4, 4, 4, 8, 4, 4, 4, 4, 8, 6, 4, 6, 6, 4, 4, 4, 4, 4, 4, 4, 4, 5, 4, 5, 6, 4, 5, 5, 5], numpy.float)
print "original:"
print v
print "rank-transformed:"
print RankConversion(v)
print "centered"
print Centering(v)
print "minmax scaled"
print MaxScaling(v)
print "variance scaling"
print VarianceScaling(v)
# print "Box-Cox"
# print BoxCoxTransform(v)
|
gpl-3.0
|
SatoshiNXSimudrone/sl4a-damon-clone
|
python-build/python-libs/gdata/tests/run_data_tests.py
|
87
|
1858
|
#!/usr/bin/python
import sys
import unittest
import module_test_runner
import getopt
import getpass
# Modules whose tests we will run.
import gdata_test
import atom_test
import atom_tests.http_interface_test
import atom_tests.mock_http_test
import atom_tests.token_store_test
import atom_tests.url_test
import atom_tests.core_test
import gdata_tests.apps_test
import gdata_tests.auth_test
import gdata_tests.base_test
import gdata_tests.books_test
import gdata_tests.blogger_test
import gdata_tests.calendar_test
import gdata_tests.client_test
import gdata_tests.codesearch_test
import gdata_tests.contacts_test
import gdata_tests.docs_test
import gdata_tests.health_test
import gdata_tests.photos_test
import gdata_tests.spreadsheet_test
import gdata_tests.youtube_test
import gdata_tests.webmastertools_test
def RunAllTests():
test_runner = module_test_runner.ModuleTestRunner()
test_runner.modules = [gdata_test, atom_test, atom_tests.url_test,
atom_tests.http_interface_test,
atom_tests.mock_http_test,
atom_tests.core_test,
atom_tests.token_store_test,
gdata_tests.client_test,
gdata_tests.apps_test, gdata_tests.auth_test,
gdata_tests.base_test, gdata_tests.books_test,
gdata_tests.calendar_test, gdata_tests.docs_test,
gdata_tests.health_test, gdata_tests.spreadsheet_test,
gdata_tests.photos_test, gdata_tests.codesearch_test,
gdata_tests.contacts_test,
gdata_tests.youtube_test, gdata_tests.blogger_test,
gdata_tests.webmastertools_test]
test_runner.RunAllTests()
if __name__ == '__main__':
RunAllTests()
|
apache-2.0
|
JioEducation/edx-platform
|
lms/djangoapps/lti_provider/tests/test_views.py
|
9
|
7546
|
"""
Tests for the LTI provider views
"""
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import RequestFactory
from mock import patch, MagicMock
from nose.plugins.attrib import attr
from courseware.testutils import RenderXBlockTestMixin
from lti_provider import views, models
from opaque_keys.edx.locator import CourseLocator, BlockUsageLocator
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
LTI_DEFAULT_PARAMS = {
'roles': u'Instructor,urn:lti:instrole:ims/lis/Administrator',
'context_id': u'lti_launch_context_id',
'oauth_version': u'1.0',
'oauth_consumer_key': u'consumer_key',
'oauth_signature': u'OAuth Signature',
'oauth_signature_method': u'HMAC-SHA1',
'oauth_timestamp': u'OAuth Timestamp',
'oauth_nonce': u'OAuth Nonce',
'user_id': u'LTI_User',
}
LTI_OPTIONAL_PARAMS = {
'lis_result_sourcedid': u'result sourcedid',
'lis_outcome_service_url': u'outcome service URL',
'tool_consumer_instance_guid': u'consumer instance guid'
}
COURSE_KEY = CourseLocator(org='some_org', course='some_course', run='some_run')
USAGE_KEY = BlockUsageLocator(course_key=COURSE_KEY, block_type='problem', block_id='block_id')
COURSE_PARAMS = {
'course_key': COURSE_KEY,
'usage_key': USAGE_KEY
}
ALL_PARAMS = dict(LTI_DEFAULT_PARAMS.items() + COURSE_PARAMS.items())
def build_launch_request(authenticated=True):
"""
Helper method to create a new request object for the LTI launch.
"""
request = RequestFactory().post('/')
request.user = UserFactory.create()
request.user.is_authenticated = MagicMock(return_value=authenticated)
request.session = {}
request.POST.update(LTI_DEFAULT_PARAMS)
return request
class LtiTestMixin(object):
"""
Mixin for LTI tests
"""
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_LTI_PROVIDER': True})
def setUp(self):
super(LtiTestMixin, self).setUp()
# Always accept the OAuth signature
self.mock_verify = MagicMock(return_value=True)
patcher = patch('lti_provider.signature_validator.SignatureValidator.verify', self.mock_verify)
patcher.start()
self.addCleanup(patcher.stop)
self.consumer = models.LtiConsumer(
consumer_name='consumer',
consumer_key=LTI_DEFAULT_PARAMS['oauth_consumer_key'],
consumer_secret='secret'
)
self.consumer.save()
class LtiLaunchTest(LtiTestMixin, TestCase):
"""
Tests for the lti_launch view
"""
@patch('lti_provider.views.render_courseware')
@patch('lti_provider.views.authenticate_lti_user')
def test_valid_launch(self, _authenticate, render):
"""
Verifies that the LTI launch succeeds when passed a valid request.
"""
request = build_launch_request()
views.lti_launch(request, unicode(COURSE_KEY), unicode(USAGE_KEY))
render.assert_called_with(request, USAGE_KEY)
@patch('lti_provider.views.render_courseware')
@patch('lti_provider.views.store_outcome_parameters')
@patch('lti_provider.views.authenticate_lti_user')
def test_outcome_service_registered(self, _authenticate, store_params, _render):
"""
Verifies that the LTI launch succeeds when passed a valid request.
"""
request = build_launch_request()
views.lti_launch(
request,
unicode(COURSE_PARAMS['course_key']),
unicode(COURSE_PARAMS['usage_key'])
)
store_params.assert_called_with(ALL_PARAMS, request.user, self.consumer)
def launch_with_missing_parameter(self, missing_param):
"""
Helper method to remove a parameter from the LTI launch and call the view
"""
request = build_launch_request()
del request.POST[missing_param]
return views.lti_launch(request, None, None)
def test_launch_with_missing_parameters(self):
"""
Runs through all required LTI parameters and verifies that the lti_launch
view returns Bad Request if any of them are missing.
"""
for missing_param in views.REQUIRED_PARAMETERS:
response = self.launch_with_missing_parameter(missing_param)
self.assertEqual(
response.status_code, 400,
'Launch should fail when parameter ' + missing_param + ' is missing'
)
def test_launch_with_disabled_feature_flag(self):
"""
Verifies that the LTI launch will fail if the ENABLE_LTI_PROVIDER flag
is not set
"""
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_LTI_PROVIDER': False}):
request = build_launch_request()
response = views.lti_launch(request, None, None)
self.assertEqual(response.status_code, 403)
def test_forbidden_if_signature_fails(self):
"""
Verifies that the view returns Forbidden if the LTI OAuth signature is
incorrect.
"""
self.mock_verify.return_value = False
request = build_launch_request()
response = views.lti_launch(request, None, None)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.status_code, 403)
@patch('lti_provider.views.render_courseware')
def test_lti_consumer_record_supplemented_with_guid(self, _render):
self.mock_verify.return_value = False
request = build_launch_request()
request.POST.update(LTI_OPTIONAL_PARAMS)
with self.assertNumQueries(3):
views.lti_launch(request, None, None)
consumer = models.LtiConsumer.objects.get(
consumer_key=LTI_DEFAULT_PARAMS['oauth_consumer_key']
)
self.assertEqual(consumer.instance_guid, u'consumer instance guid')
@attr('shard_3')
class LtiLaunchTestRender(LtiTestMixin, RenderXBlockTestMixin, ModuleStoreTestCase):
"""
Tests for the rendering returned by lti_launch view.
This class overrides the get_response method, which is used by
the tests defined in RenderXBlockTestMixin.
"""
def get_response(self, url_encoded_params=None):
"""
Overridable method to get the response from the endpoint that is being tested.
"""
lti_launch_url = reverse(
'lti_provider_launch',
kwargs={
'course_id': unicode(self.course.id),
'usage_id': unicode(self.html_block.location)
}
)
if url_encoded_params:
lti_launch_url += '?' + url_encoded_params
return self.client.post(lti_launch_url, data=LTI_DEFAULT_PARAMS)
# The following test methods override the base tests for verifying access
# by unenrolled and unauthenticated students, since there is a discrepancy
# of access rules between the 2 endpoints (LTI and xBlock_render).
# TODO fix this access discrepancy to the same underlying data.
def test_unenrolled_student(self):
"""
Override since LTI allows access to unenrolled students.
"""
self.setup_course()
self.setup_user(admin=False, enroll=False, login=True)
self.verify_response()
def test_unauthenticated(self):
"""
Override since LTI allows access to unauthenticated users.
"""
self.setup_course()
self.setup_user(admin=False, enroll=True, login=False)
self.verify_response()
|
agpl-3.0
|
foospidy/DbDat
|
plugins/mysql/check_privilege_super.py
|
1
|
1062
|
class check_privilege_super():
"""
check_privilege_super:
The following accounts have the SUPER privilege. Do not grant to non Admin users.
"""
# References:
# https://benchmarks.cisecurity.org/downloads/show-single/index.cfm?file=mysql.102
TITLE = 'SUPER Privilege'
CATEGORY = 'Privilege'
TYPE = 'sql'
SQL = "SELECT user, host FROM mysql.user WHERE Super_priv='Y'"
verbose = False
skip = False
result = {}
def do_check(self, *results):
if not self.skip:
output = ''
self.result['level'] = 'GREEN'
for rows in results:
for row in rows:
self.result['level'] = 'RED'
output += row[0] + '\t' + row[1] + '\n'
if 'GREEN' == self.result['level']:
output = 'No users found with SUPER privilege.'
self.result['output'] = output
return self.result
def __init__(self, parent):
print('Performing check: ' + self.TITLE)
|
gpl-2.0
|
spudmind/parlparse
|
members/wikipedia-commons.py
|
1
|
2989
|
#!/usr/bin/env python
# -*- coding: latin-1 -*-
# $Id: bbcconv.py,v 1.4 2005/03/25 23:33:35 theyworkforyou Exp $
# Screen scrape list of links to Lords on Wikipedia, so we can link to the articles.
# The Public Whip, Copyright (C) 2003 Francis Irving and Julian Todd
# This is free software, and you are welcome to redistribute it under
# certain conditions. However, it comes with ABSOLUTELY NO WARRANTY.
# For details see the file LICENSE.html in the top level of the source.
import datetime
import sys
import urllib
import urlparse
import re
# import sets
sys.path.append("../pyscraper")
sys.path.append("../pyscraper/lords")
from resolvemembernames import memberList
# Get region pages
wiki_index_url = "http://en.wikipedia.org/wiki/MPs_elected_in_the_UK_general_election,_2005"
date_parl = {
1997: '1999-01-01',
2001: '2003-01-01',
2005: '2007-08-01',
2010: datetime.date.today().isoformat()
}
wikimembers = {}
# Grab page
for year in (1997, 2001, 2005, 2010):
ur = open('../rawdata/Members_of_the_House_of_Commons_%d' % year)
content = ur.read()
ur.close()
# <tr>
#<td><a href="/wiki/West_Ham_%28UK_Parliament_constituency%29" title="West Ham (UK Parliament constituency)">West Ham</a></td>
#<td><a href="/wiki/Lyn_Brown" title="Lyn Brown">Lyn Brown</a></td>
#<td>Labour</td>
matcher = '<tr>\s+<td><a href="/wiki/[^"]+" [^>]*?title="[^"]+">([^<]+)</a>(?:<br />\s+<small>.*?</small>)?</td>\s+(?:<td style="[^"]*"></td>\s*<td[^>]*><a[^>]*>[^<]*</a></td>\s*<td style="[^"]*"></td>\s*)?<td>(?:Dr |Sir |The Rev\. )?<a href="(/wiki/[^"]+)" [^>]*?title="[^"]+"[^>]*>([^<]+)</a>(?: \(.*?\))?</td>|by-election,[^"]+">([^<]+)</a> [^ ]{1,3} <a href="(/wiki/[^"]+)" title="[^"]+">([^<]+)</a>';
matches = re.findall(matcher, content)
for (cons, url, name, cons2, url2, name2) in matches:
id = None
if cons2:
cons = cons2
name = name2
url = url2
cons = cons.decode('utf-8')
cons = cons.replace('&', '&')
name = name.decode('utf-8')
try:
(id, canonname, canoncons) = memberList.matchfullnamecons(name, cons, date_parl[year])
except Exception, e:
print >>sys.stderr, e
if not id:
continue
pid = memberList.membertoperson(id)
wikimembers[pid] = url
print '''<?xml version="1.0" encoding="ISO-8859-1"?>
<publicwhip>'''
k = wikimembers.keys()
k.sort()
for id in k:
url = urlparse.urljoin(wiki_index_url, wikimembers[id])
print '<personinfo id="%s" wikipedia_url="%s" />' % (id, url)
print '</publicwhip>'
#wikimembers = sets.Set(wikimembers.keys())
#print "len: ", len(wikimembers)
# Check we have everybody -- ha! not likely yet
#allmembers = sets.Set(memberList.currentmpslist())
#symdiff = allmembers.symmetric_difference(wikimembers)
#if len(symdiff) > 0:
# print >>sys.stderr, "Failed to get all MPs, these ones in symmetric difference"
# print >>sys.stderr, symdiff
|
agpl-3.0
|
zeaphoo/cocopot
|
tests/test_response.py
|
2
|
6988
|
import pytest
from cocopot.response import Response, make_response, redirect, jsonify
from cocopot.datastructures import MultiDict
from cocopot.http import parse_date
from cocopot.exceptions import BadRequest
from cocopot.utils import json
import copy
import datetime
def test_basic_response():
r = make_response('text')
assert r.body == 'text'
assert r.status_line == '200 OK'
assert r.status_code == 200
assert r.charset.lower() == 'utf-8'
r = make_response('redirect', 302)
assert r.status_line == '302 Found'
assert r.status_code == 302
r = make_response('', 999)
assert r.status_line == '999 Unknown'
assert r.status_code == 999
with pytest.raises(ValueError):
r = make_response('', 1099)
with pytest.raises(ValueError):
r = make_response('', 99)
r = make_response('', '999 Who knows?') # Illegal, but acceptable three digit code
assert r.status_line == '999 Who knows?'
assert r.status_code == 999
with pytest.raises(ValueError):
r = make_response(None)
with pytest.raises(ValueError):
r = make_response('', '555')
assert r.status_line == '999 Who knows?'
assert r.status_code == 999
r = make_response('', [('Custom-Header', 'custom-value')])
assert r.status_code == 200
assert 'Custom-Header' in r
with pytest.raises(ValueError):
r = make_response(object())
r0 = make_response('text')
r = make_response(r0, 200, [('Custom-Header', 'custom-value')])
assert r.status_code == 200
assert 'Custom-Header' in r
r0 = make_response('text')
r = make_response(r0, '200 OK', {'Custom-Header':'custom-value'})
assert r.status_code == 200
assert 'Custom-Header' in r
assert r.get_header('Custom-Header') == 'custom-value'
assert 'Custom-Header' in dict(r.iter_headers())
assert r.status_line == '200 OK'
r.set_cookie('name1', 'value')
r1 = r.copy()
assert r1.status_line == r.status_line
assert r1.headers == r.headers
assert r1.body == r.body
assert repr(r1) == repr(r)
r = make_response('', 304)
assert r.status_code == 304
assert 'Content-Type' not in dict(r.iter_headers())
r = make_response(BadRequest(''))
assert r.status_code == 400
def test_jsonify():
r = jsonify(username='admin',
email='admin@localhost',
id=42)
assert r.status_code == 200
assert r.headers['Content-Type'] == 'application/json'
assert json.loads(r.body)['username'] == 'admin'
def test_set_cookie():
r = Response()
r.set_cookie('name1', 'value', max_age=5)
r.set_cookie('name2', 'value 2', path='/foo')
r.set_cookie('name4', 'value4', secret=True)
with pytest.raises(TypeError):
r.set_cookie('name3', 3)
cookies = [value for name, value in r.headerlist
if name.title() == 'Set-Cookie']
cookies.sort()
assert cookies[0] == 'name1=value; Max-Age=5'
assert cookies[1] == 'name2="value 2"; Path=/foo'
def test_set_cookie_maxage():
import datetime
r = Response()
r.set_cookie('name1', 'value', max_age=5)
r.set_cookie('name2', 'value', max_age=datetime.timedelta(days=1))
cookies = sorted([value for name, value in r.headerlist
if name.title() == 'Set-Cookie'])
assert cookies[0] == 'name1=value; Max-Age=5'
assert cookies[1] == 'name2=value; Max-Age=86400'
def test_set_cookie_expires():
r = Response()
r.set_cookie('name1', 'value', expires=42)
r.set_cookie('name2', 'value', expires=datetime.datetime(1970,1,1,0,0,43))
cookies = sorted([value for name, value in r.headerlist
if name.title() == 'Set-Cookie'])
assert cookies[0] == 'name1=value; expires=Thu, 01 Jan 1970 00:00:42 GMT'
assert cookies[1] == 'name2=value; expires=Thu, 01 Jan 1970 00:00:43 GMT'
def test_set_cookie_secure():
r = Response()
r.set_cookie('name1', 'value', secure=True)
r.set_cookie('name2', 'value', secure=False)
cookies = sorted([value for name, value in r.headerlist
if name.title() == 'Set-Cookie'])
assert cookies[0].lower() == 'name1=value; secure'
assert cookies[1] == 'name2=value'
def test_set_cookie_httponly():
r = Response()
r.set_cookie('name1', 'value', httponly=True)
r.set_cookie('name2', 'value', httponly=False)
cookies = sorted([value for name, value in r.headerlist
if name.title() == 'Set-Cookie'])
assert cookies[0].lower() == 'name1=value; httponly'
assert cookies[1] == 'name2=value'
def test_delete_cookie():
response = Response()
response.set_cookie('name', 'value')
response.delete_cookie('name')
cookies = [value for name, value in response.headerlist
if name.title() == 'Set-Cookie']
assert 'name=;' in cookies[0] or 'name="";' in cookies[0]
def test_redirect():
r = redirect('http://example.com/foo/new', 302)
assert r.status_line == '302 Found'
assert r.headers['location'] == 'http://example.com/foo/new'
r = redirect('http://example.com/foo/new2', 301)
assert r.status_line == '301 Moved Permanently'
assert r.headers['location'] == 'http://example.com/foo/new2'
def test_set_header():
response = Response()
response['x-test'] = 'foo'
headers = [value for name, value in response.headerlist
if name.title() == 'X-Test']
assert ['foo'] == headers
assert 'foo' == response['x-test']
response['X-Test'] = 'bar'
headers = [value for name, value in response.headerlist
if name.title() == 'X-Test']
assert ['bar'] == headers
assert 'bar' == response['x-test']
def test_append_header():
response = Response()
response.set_header('x-test', 'foo')
headers = [value for name, value in response.headerlist
if name.title() == 'X-Test']
assert ['foo'] == headers
assert 'foo' == response['x-test']
response.add_header('X-Test', 'bar')
headers = [value for name, value in response.headerlist
if name.title() == 'X-Test']
assert ['foo', 'bar'] == headers
assert 'foo' == response['x-test']
def test_delete_header():
response = Response()
response['x-test'] = 'foo'
assert 'foo', response['x-test']
del response['X-tESt']
with pytest.raises(KeyError):
response['x-test']
def test_non_string_header():
response = Response()
response['x-test'] = 5
assert '5' == response['x-test']
response['x-test'] = None
assert 'None' == response['x-test']
def test_expires_header():
import datetime
response = Response()
now = datetime.datetime.now()
response.expires = now
def seconds(a, b):
td = max(a,b) - min(a,b)
return td.days*360*24 + td.seconds
assert 0 == seconds(response.expires, now)
now2 = datetime.datetime.utcfromtimestamp(
parse_date(response.headers['Expires']))
assert 0 == seconds(now, now2)
|
mit
|
vmthunder/nova
|
nova/virt/libvirt/dmcrypt.py
|
55
|
2226
|
# Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from nova.virt.libvirt import utils
_dmcrypt_suffix = '-dmcrypt'
def volume_name(base):
"""Returns the suffixed dmcrypt volume name.
This is to avoid collisions with similarly named device mapper names for
LVM volumes
"""
return base + _dmcrypt_suffix
def is_encrypted(path):
"""Returns true if the path corresponds to an encrypted disk."""
if path.startswith('/dev/mapper'):
return path.rpartition('/')[2].endswith(_dmcrypt_suffix)
else:
return False
def create_volume(target, device, cipher, key_size, key):
"""Sets up a dmcrypt mapping
:param target: device mapper logical device name
:param device: underlying block device
:param cipher: encryption cipher string digestible by cryptsetup
:param key_size: encryption key size
:param key: encryption key as an array of unsigned bytes
"""
cmd = ('cryptsetup',
'create',
target,
device,
'--cipher=' + cipher,
'--key-size=' + str(key_size),
'--key-file=-')
key = ''.join(map(lambda byte: "%02x" % byte, key))
utils.execute(*cmd, process_input=key, run_as_root=True)
def delete_volume(target):
"""Deletes a dmcrypt mapping
:param target: name of the mapped logical device
"""
utils.execute('cryptsetup', 'remove', target, run_as_root=True)
def list_volumes():
"""Function enumerates encrypted volumes."""
return [dmdev for dmdev in os.listdir('/dev/mapper')
if dmdev.endswith('-dmcrypt')]
|
apache-2.0
|
ysekky/GPy
|
GPy/testing/kernel_tests.py
|
3
|
33410
|
# Copyright (c) 2012, 2013 GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
import unittest
from unittest.case import skip
import GPy
from GPy.core.parameterization.param import Param
import numpy as np
from ..util.config import config
verbose = 0
try:
from ..util import linalg_cython
config.set('cython', 'working', 'True')
except ImportError:
config.set('cython', 'working', 'False')
class Kern_check_model(GPy.core.Model):
"""
This is a dummy model class used as a base class for checking that the
gradients of a given kernel are implemented correctly. It enables
checkgrad() to be called independently on a kernel.
"""
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
super(Kern_check_model, self).__init__('kernel_test_model')
if kernel==None:
kernel = GPy.kern.RBF(1)
kernel.randomize(loc=1, scale=0.1)
if X is None:
X = np.random.randn(20, kernel.input_dim)
if dL_dK is None:
if X2 is None:
dL_dK = np.random.rand(X.shape[0], X.shape[0])
else:
dL_dK = np.random.rand(X.shape[0], X2.shape[0])
self.kernel = kernel
self.X = X
self.X2 = X2
self.dL_dK = dL_dK
def is_positive_semi_definite(self):
v = np.linalg.eig(self.kernel.K(self.X))[0]
if any(v.real<=-1e-10):
print(v.real.min())
return False
else:
return True
def log_likelihood(self):
return np.sum(self.dL_dK*self.kernel.K(self.X, self.X2))
class Kern_check_dK_dtheta(Kern_check_model):
"""
This class allows gradient checks for the gradient of a kernel with
respect to parameters.
"""
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2)
self.link_parameter(self.kernel)
def parameters_changed(self):
return self.kernel.update_gradients_full(self.dL_dK, self.X, self.X2)
class Kern_check_dKdiag_dtheta(Kern_check_model):
"""
This class allows gradient checks of the gradient of the diagonal of a
kernel with respect to the parameters.
"""
def __init__(self, kernel=None, dL_dK=None, X=None):
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None)
self.link_parameter(self.kernel)
def log_likelihood(self):
return (np.diag(self.dL_dK)*self.kernel.Kdiag(self.X)).sum()
def parameters_changed(self):
self.kernel.update_gradients_diag(np.diag(self.dL_dK), self.X)
class Kern_check_dK_dX(Kern_check_model):
"""This class allows gradient checks for the gradient of a kernel with respect to X. """
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2)
self.X = Param('X',X)
self.link_parameter(self.X)
def parameters_changed(self):
self.X.gradient[:] = self.kernel.gradients_X(self.dL_dK, self.X, self.X2)
class Kern_check_dKdiag_dX(Kern_check_dK_dX):
"""This class allows gradient checks for the gradient of a kernel diagonal with respect to X. """
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
Kern_check_dK_dX.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=None)
def log_likelihood(self):
return (np.diag(self.dL_dK)*self.kernel.Kdiag(self.X)).sum()
def parameters_changed(self):
self.X.gradient[:] = self.kernel.gradients_X_diag(self.dL_dK.diagonal(), self.X)
class Kern_check_d2K_dXdX(Kern_check_model):
"""This class allows gradient checks for the secondderivative of a kernel with respect to X. """
def __init__(self, kernel=None, dL_dK=None, X=None, X2=None):
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X, X2=X2)
self.X = Param('X',X.copy())
self.link_parameter(self.X)
self.Xc = X.copy()
def log_likelihood(self):
if self.X2 is None:
return self.kernel.gradients_X(self.dL_dK, self.X, self.Xc).sum()
return self.kernel.gradients_X(self.dL_dK, self.X, self.X2).sum()
def parameters_changed(self):
#if self.kernel.name == 'rbf':
# import ipdb;ipdb.set_trace()
if self.X2 is None:
grads = -self.kernel.gradients_XX(self.dL_dK, self.X).sum(1).sum(1)
else:
grads = -self.kernel.gradients_XX(self.dL_dK.T, self.X2, self.X).sum(0).sum(1)
self.X.gradient[:] = grads
class Kern_check_d2Kdiag_dXdX(Kern_check_model):
"""This class allows gradient checks for the second derivative of a kernel with respect to X. """
def __init__(self, kernel=None, dL_dK=None, X=None):
Kern_check_model.__init__(self,kernel=kernel,dL_dK=dL_dK, X=X)
self.X = Param('X',X)
self.link_parameter(self.X)
self.Xc = X.copy()
def log_likelihood(self):
l = 0.
for i in range(self.X.shape[0]):
l += self.kernel.gradients_X(self.dL_dK[[i],[i]], self.X[[i]], self.Xc[[i]]).sum()
return l
def parameters_changed(self):
grads = -self.kernel.gradients_XX_diag(self.dL_dK.diagonal(), self.X)
self.X.gradient[:] = grads.sum(-1)
def check_kernel_gradient_functions(kern, X=None, X2=None, output_ind=None, verbose=False, fixed_X_dims=None):
"""
This function runs on kernels to check the correctness of their
implementation. It checks that the covariance function is positive definite
for a randomly generated data set.
:param kern: the kernel to be tested.
:type kern: GPy.kern.Kernpart
:param X: X input values to test the covariance function.
:type X: ndarray
:param X2: X2 input values to test the covariance function.
:type X2: ndarray
"""
pass_checks = True
if X is None:
X = np.random.randn(10, kern.input_dim)
if output_ind is not None:
X[:, output_ind] = np.random.randint(kern.output_dim, X.shape[0])
if X2 is None:
X2 = np.random.randn(20, kern.input_dim)
if output_ind is not None:
X2[:, output_ind] = np.random.randint(kern.output_dim, X2.shape[0])
if verbose:
print("Checking covariance function is positive definite.")
result = Kern_check_model(kern, X=X).is_positive_semi_definite()
if result and verbose:
print("Check passed.")
if not result:
print(("Positive definite check failed for " + kern.name + " covariance function."))
pass_checks = False
assert(result)
return False
if verbose:
print("Checking gradients of K(X, X) wrt theta.")
result = Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=verbose)
if result and verbose:
print("Check passed.")
if not result:
print(("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:"))
Kern_check_dK_dtheta(kern, X=X, X2=None).checkgrad(verbose=True)
pass_checks = False
assert(result)
return False
if verbose:
print("Checking gradients of K(X, X2) wrt theta.")
try:
result = Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print(("update_gradients_full, with differing X and X2, not implemented for " + kern.name))
if result and verbose:
print("Check passed.")
if not result:
print(("Gradient of K(X, X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:"))
Kern_check_dK_dtheta(kern, X=X, X2=X2).checkgrad(verbose=True)
pass_checks = False
assert(result)
return False
if verbose:
print("Checking gradients of Kdiag(X) wrt theta.")
try:
result = Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print(("update_gradients_diag not implemented for " + kern.name))
if result and verbose:
print("Check passed.")
if not result:
print(("Gradient of Kdiag(X) wrt theta failed for " + kern.name + " covariance function. Gradient values as follows:"))
Kern_check_dKdiag_dtheta(kern, X=X).checkgrad(verbose=True)
pass_checks = False
assert(result)
return False
if verbose:
print("Checking gradients of K(X, X) wrt X.")
try:
testmodel = Kern_check_dK_dX(kern, X=X, X2=None)
if fixed_X_dims is not None:
testmodel.X[:,fixed_X_dims].fix()
result = testmodel.checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print(("gradients_X not implemented for " + kern.name))
if result and verbose:
print("Check passed.")
if not result:
print(("Gradient of K(X, X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:"))
testmodel.checkgrad(verbose=True)
assert(result)
pass_checks = False
return False
if verbose:
print("Checking gradients of K(X, X2) wrt X.")
try:
testmodel = Kern_check_dK_dX(kern, X=X, X2=X2)
if fixed_X_dims is not None:
testmodel.X[:,fixed_X_dims].fix()
result = testmodel.checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print(("gradients_X not implemented for " + kern.name))
if result and verbose:
print("Check passed.")
if not result:
print(("Gradient of K(X, X2) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:"))
testmodel.checkgrad(verbose=True)
assert(result)
pass_checks = False
return False
if verbose:
print("Checking gradients of Kdiag(X) wrt X.")
try:
testmodel = Kern_check_dKdiag_dX(kern, X=X)
if fixed_X_dims is not None:
testmodel.X[:,fixed_X_dims].fix()
result = testmodel.checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print(("gradients_X not implemented for " + kern.name))
if result and verbose:
print("Check passed.")
if not result:
print(("Gradient of Kdiag(X) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:"))
Kern_check_dKdiag_dX(kern, X=X).checkgrad(verbose=True)
pass_checks = False
assert(result)
return False
if verbose:
print("Checking gradients of dK(X, X2) wrt X2 with full cov in dimensions")
try:
testmodel = Kern_check_d2K_dXdX(kern, X=X, X2=X2)
if fixed_X_dims is not None:
testmodel.X[:,fixed_X_dims].fix()
result = testmodel.checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print(("gradients_X not implemented for " + kern.name))
if result and verbose:
print("Check passed.")
if not result:
print(("Gradient of dK(X, X2) wrt X failed for " + kern.name + " covariance function. Gradient values as follows:"))
testmodel.checkgrad(verbose=True)
assert(result)
pass_checks = False
return False
if verbose:
print("Checking gradients of dK(X, X) wrt X with full cov in dimensions")
try:
testmodel = Kern_check_d2K_dXdX(kern, X=X, X2=None)
if fixed_X_dims is not None:
testmodel.X[:,fixed_X_dims].fix()
result = testmodel.checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print(("gradients_X not implemented for " + kern.name))
if result and verbose:
print("Check passed.")
if not result:
print(("Gradient of dK(X, X) wrt X with full cov in dimensions failed for " + kern.name + " covariance function. Gradient values as follows:"))
testmodel.checkgrad(verbose=True)
assert(result)
pass_checks = False
return False
if verbose:
print("Checking gradients of dKdiag(X, X) wrt X with cov in dimensions")
try:
testmodel = Kern_check_d2Kdiag_dXdX(kern, X=X)
if fixed_X_dims is not None:
testmodel.X[:,fixed_X_dims].fix()
result = testmodel.checkgrad(verbose=verbose)
except NotImplementedError:
result=True
if verbose:
print(("gradients_X not implemented for " + kern.name))
if result and verbose:
print("Check passed.")
if not result:
print(("Gradient of dKdiag(X, X) wrt X with cov in dimensions failed for " + kern.name + " covariance function. Gradient values as follows:"))
testmodel.checkgrad(verbose=True)
assert(result)
pass_checks = False
return False
return pass_checks
class KernelGradientTestsContinuous(unittest.TestCase):
def setUp(self):
self.N, self.D = 10, 5
self.X = np.random.randn(self.N,self.D+1)
self.X2 = np.random.randn(self.N+10,self.D+1)
continuous_kerns = ['RBF', 'Linear']
self.kernclasses = [getattr(GPy.kern, s) for s in continuous_kerns]
def test_MLP(self):
k = GPy.kern.MLP(self.D,ARD=True)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Matern32(self):
k = GPy.kern.Matern32(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Prod(self):
k = GPy.kern.Matern32(2, active_dims=[2,3]) * GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Prod1(self):
k = GPy.kern.RBF(self.D) * GPy.kern.Linear(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Prod2(self):
k = GPy.kern.RBF(2, active_dims=[0,4]) * GPy.kern.Linear(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Prod3(self):
k = GPy.kern.RBF(self.D) * GPy.kern.Linear(self.D) * GPy.kern.Bias(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Prod4(self):
k = GPy.kern.RBF(2, active_dims=[0,4]) * GPy.kern.Linear(self.D) * GPy.kern.Matern32(2, active_dims=[0,1])
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Add(self):
k = GPy.kern.Matern32(2, active_dims=[2,3]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k += GPy.kern.Matern32(2, active_dims=[2,3]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Add_dims(self):
k = GPy.kern.Matern32(2, active_dims=[2,self.D]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k.randomize()
self.assertRaises(IndexError, k.K, self.X[:, :self.D])
k = GPy.kern.Matern32(2, active_dims=[2,self.D-1]) + GPy.kern.RBF(2, active_dims=[0,4]) + GPy.kern.Linear(self.D)
k.randomize()
# assert it runs:
try:
k.K(self.X)
except AssertionError:
raise AssertionError("k.K(X) should run on self.D-1 dimension")
def test_Matern52(self):
k = GPy.kern.Matern52(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_RBF(self):
k = GPy.kern.RBF(self.D-1, ARD=True)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_OU(self):
k = GPy.kern.OU(self.D-1, ARD=True)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_RatQuad(self):
k = GPy.kern.RatQuad(self.D-1, ARD=True)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_ExpQuad(self):
k = GPy.kern.ExpQuad(self.D-1, ARD=True)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_integral(self):
k = GPy.kern.Integral(1)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_multidimensional_integral_limits(self):
k = GPy.kern.Multidimensional_Integral_Limits(2)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_integral_limits(self):
k = GPy.kern.Integral_Limits(2)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Linear(self):
k = GPy.kern.Linear(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_LinearFull(self):
k = GPy.kern.LinearFull(self.D, self.D-1)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Fixed(self):
cov = np.dot(self.X, self.X.T)
X = np.arange(self.N).reshape(self.N, 1)
k = GPy.kern.Fixed(1, cov)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=X, X2=None, verbose=verbose))
def test_Poly(self):
k = GPy.kern.Poly(self.D, order=5)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_WhiteHeteroscedastic(self):
k = GPy.kern.WhiteHeteroscedastic(self.D, self.X.shape[0])
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_standard_periodic(self):
k = GPy.kern.StdPeriodic(self.D)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_Precomputed(self):
Xall = np.concatenate([self.X, self.X2])
cov = np.dot(Xall, Xall.T)
X = np.arange(self.N).reshape(self.N, 1)
X2 = np.arange(self.N,2*self.N+10).reshape(self.N+10, 1)
k = GPy.kern.Precomputed(1, cov)
k.randomize()
self.assertTrue(check_kernel_gradient_functions(k, X=X, X2=X2, verbose=verbose, fixed_X_dims=[0]))
def test_basis_func_linear_slope(self):
start_stop = np.random.uniform(self.X.min(0), self.X.max(0), (4, self.X.shape[1])).T
start_stop.sort(axis=1)
ks = []
for i in range(start_stop.shape[0]):
start, stop = np.split(start_stop[i], 2)
ks.append(GPy.kern.LinearSlopeBasisFuncKernel(1, start, stop, ARD=i%2==0, active_dims=[i]))
k = GPy.kern.Add(ks)
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_basis_func_changepoint(self):
points = np.random.uniform(self.X.min(0), self.X.max(0), (self.X.shape[1]))
ks = []
for i in range(points.shape[0]):
ks.append(GPy.kern.ChangePointBasisFuncKernel(1, points[i], ARD=i%2==0, active_dims=[i]))
k = GPy.kern.Add(ks)
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_basis_func_poly(self):
ks = []
for i in range(self.X.shape[1]):
ks.append(GPy.kern.PolynomialBasisFuncKernel(1, 5, ARD=i%2==0, active_dims=[i]))
k = GPy.kern.Add(ks)
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
def test_basis_func_domain(self):
start_stop = np.random.uniform(self.X.min(0), self.X.max(0), (4, self.X.shape[1])).T
start_stop.sort(axis=1)
ks = []
for i in range(start_stop.shape[0]):
start, stop = np.split(start_stop[i], 2)
ks.append(GPy.kern.DomainKernel(1, start, stop, ARD=i%2==0, active_dims=[i]))
k = GPy.kern.Add(ks)
self.assertTrue(check_kernel_gradient_functions(k, X=self.X, X2=self.X2, verbose=verbose))
class KernelTestsMiscellaneous(unittest.TestCase):
def setUp(self):
N, D = 100, 10
self.X = np.linspace(-np.pi, +np.pi, N)[:,None] * np.random.uniform(-10,10,D)
self.rbf = GPy.kern.RBF(2, active_dims=np.arange(0,4,2))
self.rbf.randomize()
self.linear = GPy.kern.Linear(2, active_dims=(3,9))
self.linear.randomize()
self.matern = GPy.kern.Matern32(3, active_dims=np.array([1,7,9]))
self.matern.randomize()
self.sumkern = self.rbf + self.linear
self.sumkern += self.matern
#self.sumkern.randomize()
def test_which_parts(self):
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.matern]), self.linear.K(self.X)+self.matern.K(self.X)))
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=[self.linear, self.rbf]), self.linear.K(self.X)+self.rbf.K(self.X)))
self.assertTrue(np.allclose(self.sumkern.K(self.X, which_parts=self.sumkern.parts[0]), self.rbf.K(self.X)))
def test_active_dims(self):
np.testing.assert_array_equal(self.sumkern.active_dims, [0,1,2,3,7,9])
np.testing.assert_array_equal(self.sumkern._all_dims_active, range(10))
tmp = self.linear+self.rbf
np.testing.assert_array_equal(tmp.active_dims, [0,2,3,9])
np.testing.assert_array_equal(tmp._all_dims_active, range(10))
tmp = self.matern+self.rbf
np.testing.assert_array_equal(tmp.active_dims, [0,1,2,7,9])
np.testing.assert_array_equal(tmp._all_dims_active, range(10))
tmp = self.matern+self.rbf*self.linear
np.testing.assert_array_equal(tmp.active_dims, [0,1,2,3,7,9])
np.testing.assert_array_equal(tmp._all_dims_active, range(10))
tmp = self.matern+self.rbf+self.linear
np.testing.assert_array_equal(tmp.active_dims, [0,1,2,3,7,9])
np.testing.assert_array_equal(tmp._all_dims_active, range(10))
tmp = self.matern*self.rbf*self.linear
np.testing.assert_array_equal(tmp.active_dims, [0,1,2,3,7,9])
np.testing.assert_array_equal(tmp._all_dims_active, range(10))
class KernelTestsNonContinuous(unittest.TestCase):
def setUp(self):
N0 = 3
N1 = 9
N2 = 4
N = N0+N1+N2
self.D = 3
self.X = np.random.randn(N, self.D+1)
indices = np.random.random_integers(0, 2, size=N)
self.X[indices==0, -1] = 0
self.X[indices==1, -1] = 1
self.X[indices==2, -1] = 2
#self.X = self.X[self.X[:, -1].argsort(), :]
self.X2 = np.random.randn((N0+N1)*2, self.D+1)
self.X2[:(N0*2), -1] = 0
self.X2[(N0*2):, -1] = 1
def test_IndependentOutputs(self):
k = [GPy.kern.RBF(1, active_dims=[1], name='rbf1'), GPy.kern.RBF(self.D, active_dims=range(self.D), name='rbf012'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf02')]
kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
np.testing.assert_array_equal(kern.active_dims, [-1,0,1,2])
np.testing.assert_array_equal(kern._all_dims_active, [0,1,2,-1])
def testIndependendGradients(self):
k = GPy.kern.RBF(self.D, active_dims=range(self.D))
kern = GPy.kern.IndependentOutputs(k, -1, 'ind_single')
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
k = [GPy.kern.RBF(1, active_dims=[1], name='rbf1'), GPy.kern.RBF(self.D, active_dims=range(self.D), name='rbf012'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf02')]
kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
def test_Hierarchical(self):
k = [GPy.kern.RBF(2, active_dims=[0,2], name='rbf1'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf2')]
kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
np.testing.assert_array_equal(kern.active_dims, [-1,0,2])
np.testing.assert_array_equal(kern._all_dims_active, [0,1,2,-1])
def test_Hierarchical_gradients(self):
k = [GPy.kern.RBF(2, active_dims=[0,2], name='rbf1'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf2')]
kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
def test_ODE_UY(self):
kern = GPy.kern.ODE_UY(2, active_dims=[0, self.D])
X = self.X[self.X[:,-1]!=2]
X2 = self.X2[self.X2[:,-1]!=2]
self.assertTrue(check_kernel_gradient_functions(kern, X=X, X2=X2, verbose=verbose, fixed_X_dims=-1))
def test_Coregionalize(self):
kern = GPy.kern.Coregionalize(1, output_dim=3, active_dims=[-1])
self.assertTrue(check_kernel_gradient_functions(kern, X=self.X, X2=self.X2, verbose=verbose, fixed_X_dims=-1))
@unittest.skipIf(not config.getboolean('cython', 'working'),"Cython modules have not been built on this machine")
class Coregionalize_cython_test(unittest.TestCase):
"""
Make sure that the coregionalize kernel work with and without cython enabled
"""
def setUp(self):
self.k = GPy.kern.Coregionalize(1, output_dim=12)
self.N1, self.N2 = 100, 200
self.X = np.random.randint(0,12,(self.N1,1))
self.X2 = np.random.randint(0,12,(self.N2,1))
def test_sym(self):
dL_dK = np.random.randn(self.N1, self.N1)
GPy.util.config.config.set('cython', 'working', 'True')
K_cython = self.k.K(self.X)
self.k.update_gradients_full(dL_dK, self.X)
grads_cython = self.k.gradient.copy()
GPy.util.config.config.set('cython', 'working', 'False')
K_numpy = self.k.K(self.X)
self.k.update_gradients_full(dL_dK, self.X)
grads_numpy = self.k.gradient.copy()
self.assertTrue(np.allclose(K_numpy, K_cython))
self.assertTrue(np.allclose(grads_numpy, grads_cython))
#reset the cython state for any other tests
GPy.util.config.config.set('cython', 'working', 'true')
def test_nonsym(self):
dL_dK = np.random.randn(self.N1, self.N2)
GPy.util.config.config.set('cython', 'working', 'True')
K_cython = self.k.K(self.X, self.X2)
self.k.gradient = 0.
self.k.update_gradients_full(dL_dK, self.X, self.X2)
grads_cython = self.k.gradient.copy()
GPy.util.config.config.set('cython', 'working', 'False')
K_numpy = self.k.K(self.X, self.X2)
self.k.gradient = 0.
self.k.update_gradients_full(dL_dK, self.X, self.X2)
grads_numpy = self.k.gradient.copy()
self.assertTrue(np.allclose(K_numpy, K_cython))
self.assertTrue(np.allclose(grads_numpy, grads_cython))
#reset the cython state for any other tests
GPy.util.config.config.set('cython', 'working', 'true')
class KernelTestsProductWithZeroValues(unittest.TestCase):
def setUp(self):
self.X = np.array([[0,1],[1,0]])
self.k = GPy.kern.Linear(2) * GPy.kern.Bias(2)
def test_zero_valued_kernel_full(self):
self.k.update_gradients_full(1, self.X)
self.assertFalse(np.isnan(self.k['linear.variances'].gradient),
"Gradient resulted in NaN")
def test_zero_valued_kernel_gradients_X(self):
target = self.k.gradients_X(1, self.X)
self.assertFalse(np.any(np.isnan(target)),
"Gradient resulted in NaN")
class Kernel_Psi_statistics_GradientTests(unittest.TestCase):
def setUp(self):
from GPy.core.parameterization.variational import NormalPosterior
N,M,Q = 100,20,3
X = np.random.randn(N,Q)
X_var = np.random.rand(N,Q)+0.01
self.Z = np.random.randn(M,Q)
self.qX = NormalPosterior(X, X_var)
self.w1 = np.random.randn(N)
self.w2 = np.random.randn(N,M)
self.w3 = np.random.randn(M,M)
self.w3 = self.w3#+self.w3.T
self.w3n = np.random.randn(N,M,M)
self.w3n = self.w3n+np.swapaxes(self.w3n, 1,2)
def test_kernels(self):
from GPy.kern import RBF,Linear,MLP,Bias,White
Q = self.Z.shape[1]
kernels = [RBF(Q,ARD=True), Linear(Q,ARD=True),MLP(Q,ARD=True), RBF(Q,ARD=True)+Linear(Q,ARD=True)+Bias(Q)+White(Q)
,RBF(Q,ARD=True)+Bias(Q)+White(Q), Linear(Q,ARD=True)+Bias(Q)+White(Q)]
for k in kernels:
k.randomize()
self._test_kernel_param(k)
self._test_Z(k)
self._test_qX(k)
self._test_kernel_param(k, psi2n=True)
self._test_Z(k, psi2n=True)
self._test_qX(k, psi2n=True)
def _test_kernel_param(self, kernel, psi2n=False):
def f(p):
kernel.param_array[:] = p
psi0 = kernel.psi0(self.Z, self.qX)
psi1 = kernel.psi1(self.Z, self.qX)
if not psi2n:
psi2 = kernel.psi2(self.Z, self.qX)
return (self.w1*psi0).sum() + (self.w2*psi1).sum() + (self.w3*psi2).sum()
else:
psi2 = kernel.psi2n(self.Z, self.qX)
return (self.w1*psi0).sum() + (self.w2*psi1).sum() + (self.w3n*psi2).sum()
def df(p):
kernel.param_array[:] = p
kernel.update_gradients_expectations(self.w1, self.w2, self.w3 if not psi2n else self.w3n, self.Z, self.qX)
return kernel.gradient.copy()
from GPy.models import GradientChecker
m = GradientChecker(f, df, kernel.param_array.copy())
m.checkgrad(verbose=1)
self.assertTrue(m.checkgrad())
def _test_Z(self, kernel, psi2n=False):
def f(p):
psi0 = kernel.psi0(p, self.qX)
psi1 = kernel.psi1(p, self.qX)
psi2 = kernel.psi2(p, self.qX)
if not psi2n:
psi2 = kernel.psi2(p, self.qX)
return (self.w1*psi0).sum() + (self.w2*psi1).sum() + (self.w3*psi2).sum()
else:
psi2 = kernel.psi2n(p, self.qX)
return (self.w1*psi0).sum() + (self.w2*psi1).sum() + (self.w3n*psi2).sum()
def df(p):
return kernel.gradients_Z_expectations(self.w1, self.w2, self.w3 if not psi2n else self.w3n, p, self.qX)
from GPy.models import GradientChecker
m = GradientChecker(f, df, self.Z.copy())
self.assertTrue(m.checkgrad())
def _test_qX(self, kernel, psi2n=False):
def f(p):
self.qX.param_array[:] = p
self.qX._trigger_params_changed()
psi0 = kernel.psi0(self.Z, self.qX)
psi1 = kernel.psi1(self.Z, self.qX)
if not psi2n:
psi2 = kernel.psi2(self.Z, self.qX)
return (self.w1*psi0).sum() + (self.w2*psi1).sum() + (self.w3*psi2).sum()
else:
psi2 = kernel.psi2n(self.Z, self.qX)
return (self.w1*psi0).sum() + (self.w2*psi1).sum() + (self.w3n*psi2).sum()
def df(p):
self.qX.param_array[:] = p
self.qX._trigger_params_changed()
grad = kernel.gradients_qX_expectations(self.w1, self.w2, self.w3 if not psi2n else self.w3n, self.Z, self.qX)
self.qX.set_gradients(grad)
return self.qX.gradient.copy()
from GPy.models import GradientChecker
m = GradientChecker(f, df, self.qX.param_array.copy())
self.assertTrue(m.checkgrad())
if __name__ == "__main__":
print("Running unit tests, please be (very) patient...")
unittest.main()
# np.random.seed(0)
# N0 = 3
# N1 = 9
# N2 = 4
# N = N0+N1+N2
# D = 3
# X = np.random.randn(N, D+1)
# indices = np.random.random_integers(0, 2, size=N)
# X[indices==0, -1] = 0
# X[indices==1, -1] = 1
# X[indices==2, -1] = 2
# #X = X[X[:, -1].argsort(), :]
# X2 = np.random.randn((N0+N1)*2, D+1)
# X2[:(N0*2), -1] = 0
# X2[(N0*2):, -1] = 1
# k = [GPy.kern.RBF(1, active_dims=[1], name='rbf1'), GPy.kern.RBF(D, name='rbf012'), GPy.kern.RBF(2, active_dims=[0,2], name='rbf02')]
# kern = GPy.kern.IndependentOutputs(k, -1, name='ind_split')
# assert(check_kernel_gradient_functions(kern, X=X, X2=X2, verbose=verbose, fixed_X_dims=-1))
# k = GPy.kern.RBF(D)
# kern = GPy.kern.IndependentOutputs(k, -1, 'ind_single')
# assert(check_kernel_gradient_functions(kern, X=X, X2=X2, verbose=verbose, fixed_X_dims=-1))
|
bsd-3-clause
|
iw3hxn/server
|
openerp/report/render/rml2txt/rml2txt.py
|
14
|
17786
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009, P. Christeas, Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sys
import StringIO
import copy
from lxml import etree
import base64
import utils
Font_size= 10.0
def verbose(text):
sys.stderr.write(text+"\n");
class textbox(object):
"""A box containing plain text.
It can have an offset, in chars.
Lines can be either text strings, or textbox'es, recursively.
"""
def __init__(self,x=0, y=0):
self.posx = x
self.posy = y
self.lines = []
self.curline = ''
self.endspace = False
def newline(self):
if isinstance(self.curline, textbox):
self.lines.extend(self.curline.renderlines())
else:
self.lines.append(self.curline)
self.curline = ''
def fline(self):
if isinstance(self.curline, textbox):
self.lines.extend(self.curline.renderlines())
elif len(self.curline):
self.lines.append(self.curline)
self.curline = ''
def appendtxt(self,txt):
"""Append some text to the current line.
Mimic the HTML behaviour, where all whitespace evaluates to
a single space """
if not txt:
return
bs = es = False
if txt[0].isspace():
bs = True
if txt[len(txt)-1].isspace():
es = True
if bs and not self.endspace:
self.curline += " "
self.curline += txt.strip().replace("\n"," ").replace("\t"," ")
if es:
self.curline += " "
self.endspace = es
def rendertxt(self,xoffset=0):
result = ''
lineoff = ""
for i in range(self.posy):
result +="\n"
for i in range(self.posx+xoffset):
lineoff+=" "
for l in self.lines:
result+= lineoff+ l +"\n"
return result
def renderlines(self,pad=0):
"""Returns a list of lines, from the current object
pad: all lines must be at least pad characters.
"""
result = []
lineoff = ""
for i in range(self.posx):
lineoff+=" "
for l in self.lines:
lpad = ""
if pad and len(l) < pad :
for i in range(pad - len(l)):
lpad += " "
#elif pad and len(l) > pad ?
result.append(lineoff+ l+lpad)
return result
def haplines(self,arr,offset,cc= ''):
""" Horizontaly append lines
"""
while (len(self.lines) < len(arr)):
self.lines.append("")
for i in range(len(self.lines)):
while (len(self.lines[i]) < offset):
self.lines[i] += " "
for i in range(len(arr)):
self.lines[i] += cc +arr[i]
class _flowable(object):
def __init__(self, template, doc,localcontext):
self._tags = {
'1title': self._tag_title,
'1spacer': self._tag_spacer,
'para': self._tag_para,
'font': self._tag_font,
'section': self._tag_section,
'1nextFrame': self._tag_next_frame,
'blockTable': self._tag_table,
'1pageBreak': self._tag_page_break,
'1setNextTemplate': self._tag_next_template,
}
self.template = template
self.doc = doc
self.localcontext = localcontext
self.nitags = []
self.tbox = None
def warn_nitag(self,tag):
if tag not in self.nitags:
verbose("Unknown tag \"%s\", please implement it." % tag)
self.nitags.append(tag)
def _tag_page_break(self, node):
return "\f"
def _tag_next_template(self, node):
return ''
def _tag_next_frame(self, node):
result=self.template.frame_stop()
result+='\n'
result+=self.template.frame_start()
return result
def _tag_title(self, node):
node.tagName='h1'
return node.toxml()
def _tag_spacer(self, node):
length = 1+int(utils.unit_get(node.get('length')))/35
return "\n"*length
def _tag_table(self, node):
self.tb.fline()
saved_tb = self.tb
self.tb = None
sizes = None
if node.get('colWidths'):
sizes = map(lambda x: utils.unit_get(x), node.get('colWidths').split(','))
trs = []
for n in utils._child_get(node,self):
if n.tag == 'tr':
tds = []
for m in utils._child_get(n,self):
if m.tag == 'td':
self.tb = textbox()
self.rec_render_cnodes(m)
tds.append(self.tb)
self.tb = None
if len(tds):
trs.append(tds)
if not sizes:
verbose("computing table sizes..")
for tds in trs:
trt = textbox()
off=0
for i in range(len(tds)):
p = int(sizes[i]/Font_size)
trl = tds[i].renderlines(pad=p)
trt.haplines(trl,off)
off += sizes[i]/Font_size
saved_tb.curline = trt
saved_tb.fline()
self.tb = saved_tb
return
def _tag_para(self, node):
#TODO: styles
self.rec_render_cnodes(node)
self.tb.newline()
def _tag_section(self, node):
#TODO: styles
self.rec_render_cnodes(node)
self.tb.newline()
def _tag_font(self, node):
"""We do ignore fonts.."""
self.rec_render_cnodes(node)
def rec_render_cnodes(self,node):
self.tb.appendtxt(utils._process_text(self, node.text or ''))
for n in utils._child_get(node,self):
self.rec_render(n)
self.tb.appendtxt(utils._process_text(self, node.tail or ''))
def rec_render(self,node):
""" Recursive render: fill outarr with text of current node
"""
if node.tag != None:
if node.tag in self._tags:
self._tags[node.tag](node)
else:
self.warn_nitag(node.tag)
def render(self, node):
self.tb= textbox()
#result = self.template.start()
#result += self.template.frame_start()
self.rec_render_cnodes(node)
#result += self.template.frame_stop()
#result += self.template.end()
result = self.tb.rendertxt()
del self.tb
return result
class _rml_tmpl_tag(object):
def __init__(self, *args):
pass
def tag_start(self):
return ''
def tag_end(self):
return False
def tag_stop(self):
return ''
def tag_mergeable(self):
return True
class _rml_tmpl_frame(_rml_tmpl_tag):
def __init__(self, posx, width):
self.width = width
self.posx = posx
def tag_start(self):
return "frame start"
return '<table border="0" width="%d"><tr><td width="%d"> </td><td>' % (self.width+self.posx,self.posx)
def tag_end(self):
return True
def tag_stop(self):
return "frame stop"
return '</td></tr></table><br/>'
def tag_mergeable(self):
return False
# An awfull workaround since I don't really understand the semantic behind merge.
def merge(self, frame):
pass
class _rml_tmpl_draw_string(_rml_tmpl_tag):
def __init__(self, node, style):
self.posx = utils.unit_get(node.get('x'))
self.posy = utils.unit_get(node.get('y'))
aligns = {
'drawString': 'left',
'drawRightString': 'right',
'drawCentredString': 'center'
}
align = aligns[node.localName]
self.pos = [(self.posx, self.posy, align, utils.text_get(node), style.get('td'), style.font_size_get('td'))]
def tag_start(self):
return "draw string \"%s\" @(%d,%d)..\n" %("txt",self.posx,self.posy)
self.pos.sort()
res = '\\table ...'
posx = 0
i = 0
for (x,y,align,txt, style, fs) in self.pos:
if align=="left":
pos2 = len(txt)*fs
res+='<td width="%d"></td><td style="%s" width="%d">%s</td>' % (x - posx, style, pos2, txt)
posx = x+pos2
if align=="right":
res+='<td width="%d" align="right" style="%s">%s</td>' % (x - posx, style, txt)
posx = x
if align=="center":
res+='<td width="%d" align="center" style="%s">%s</td>' % ((x - posx)*2, style, txt)
posx = 2*x-posx
i+=1
res+='\\table end'
return res
def merge(self, ds):
self.pos+=ds.pos
class _rml_tmpl_draw_lines(_rml_tmpl_tag):
def __init__(self, node, style):
coord = [utils.unit_get(x) for x in utils.text_get(node).split(' ')]
self.ok = False
self.posx = coord[0]
self.posy = coord[1]
self.width = coord[2]-coord[0]
self.ok = coord[1]==coord[3]
self.style = style
self.style = style.get('hr')
def tag_start(self):
return "draw lines..\n"
if self.ok:
return '<table border="0" cellpadding="0" cellspacing="0" width="%d"><tr><td width="%d"></td><td><hr width="100%%" style="margin:0px; %s"></td></tr></table>' % (self.posx+self.width,self.posx,self.style)
else:
return ''
class _rml_stylesheet(object):
def __init__(self, stylesheet, doc):
self.doc = doc
self.attrs = {}
self._tags = {
'fontSize': lambda x: ('font-size',str(utils.unit_get(x))+'px'),
'alignment': lambda x: ('text-align',str(x))
}
result = ''
for ps in stylesheet.findall('paraStyle'):
attr = {}
attrs = ps.attributes
for i in range(attrs.length):
name = attrs.item(i).localName
attr[name] = ps.get(name)
attrs = []
for a in attr:
if a in self._tags:
attrs.append("%s:%s" % self._tags[a](attr[a]))
if len(attrs):
result += "p."+attr['name']+" {"+'; '.join(attrs)+"}\n"
self.result = result
def render(self):
return ''
class _rml_draw_style(object):
def __init__(self):
self.style = {}
self._styles = {
'fill': lambda x: {'td': {'color':x.get('color')}},
'setFont': lambda x: {'td': {'font-size':x.get('size')+'px'}},
'stroke': lambda x: {'hr': {'color':x.get('color')}},
}
def update(self, node):
if node.localName in self._styles:
result = self._styles[node.localName](node)
for key in result:
if key in self.style:
self.style[key].update(result[key])
else:
self.style[key] = result[key]
def font_size_get(self,tag):
size = utils.unit_get(self.style.get('td', {}).get('font-size','16'))
return size
def get(self,tag):
if not tag in self.style:
return ""
return ';'.join(['%s:%s' % (x[0],x[1]) for x in self.style[tag].items()])
class _rml_template(object):
def __init__(self, localcontext, out, node, doc, images=None, path='.', title=None):
self.localcontext = localcontext
self.frame_pos = -1
self.frames = []
self.template_order = []
self.page_template = {}
self.loop = 0
self._tags = {
'drawString': _rml_tmpl_draw_string,
'drawRightString': _rml_tmpl_draw_string,
'drawCentredString': _rml_tmpl_draw_string,
'lines': _rml_tmpl_draw_lines
}
self.style = _rml_draw_style()
for pt in node.findall('pageTemplate'):
frames = {}
id = pt.get('id')
self.template_order.append(id)
for tmpl in pt.findall('frame'):
posy = int(utils.unit_get(tmpl.get('y1'))) #+utils.unit_get(tmpl.get('height')))
posx = int(utils.unit_get(tmpl.get('x1')))
frames[(posy,posx,tmpl.get('id'))] = _rml_tmpl_frame(posx, utils.unit_get(tmpl.get('width')))
for tmpl in node.findall('pageGraphics'):
for n in tmpl.getchildren():
if n.nodeType==n.ELEMENT_NODE:
if n.localName in self._tags:
t = self._tags[n.localName](n, self.style)
frames[(t.posy,t.posx,n.localName)] = t
else:
self.style.update(n)
keys = frames.keys()
keys.sort()
keys.reverse()
self.page_template[id] = []
for key in range(len(keys)):
if key>0 and keys[key-1][0] == keys[key][0]:
if type(self.page_template[id][-1]) == type(frames[keys[key]]):
if self.page_template[id][-1].tag_mergeable():
self.page_template[id][-1].merge(frames[keys[key]])
continue
self.page_template[id].append(frames[keys[key]])
self.template = self.template_order[0]
def _get_style(self):
return self.style
def set_next_template(self):
self.template = self.template_order[(self.template_order.index(name)+1) % self.template_order]
self.frame_pos = -1
def set_template(self, name):
self.template = name
self.frame_pos = -1
def frame_start(self):
result = ''
frames = self.page_template[self.template]
ok = True
while ok:
self.frame_pos += 1
if self.frame_pos>=len(frames):
self.frame_pos=0
self.loop=1
ok = False
continue
f = frames[self.frame_pos]
result+=f.tag_start()
ok = not f.tag_end()
if ok:
result+=f.tag_stop()
return result
def frame_stop(self):
frames = self.page_template[self.template]
f = frames[self.frame_pos]
result=f.tag_stop()
return result
def start(self):
return ''
def end(self):
return "template end\n"
result = ''
while not self.loop:
result += self.frame_start()
result += self.frame_stop()
return result
class _rml_doc(object):
def __init__(self, node, localcontext=None, images=None, path='.', title=None):
self.localcontext = {} if localcontext is None else localcontext
self.etree = node
self.filename = self.etree.get('filename')
self.result = ''
def render(self, out):
#el = self.etree.findall('docinit')
#if el:
#self.docinit(el)
#el = self.etree.findall('stylesheet')
#self.styles = _rml_styles(el,self.localcontext)
el = self.etree.findall('template')
self.result =""
if len(el):
pt_obj = _rml_template(self.localcontext, out, el[0], self)
stories = utils._child_get(self.etree, self, 'story')
for story in stories:
if self.result:
self.result += '\f'
f = _flowable(pt_obj,story,self.localcontext)
self.result += f.render(story)
del f
else:
self.result = "<cannot render w/o template>"
self.result += '\n'
out.write( self.result)
def parseNode(rml, localcontext=None,fout=None, images=None, path='.',title=None):
node = etree.XML(rml)
r = _rml_doc(node, localcontext, images, path, title=title)
fp = StringIO.StringIO()
r.render(fp)
return fp.getvalue()
def parseString(rml, localcontext=None,fout=None, images=None, path='.',title=None):
node = etree.XML(rml)
r = _rml_doc(node, localcontext, images, path, title=title)
if fout:
fp = file(fout,'wb')
r.render(fp)
fp.close()
return fout
else:
fp = StringIO.StringIO()
r.render(fp)
return fp.getvalue()
def trml2pdf_help():
print 'Usage: rml2txt input.rml >output.html'
print 'Render the standard input (RML) and output an TXT file'
sys.exit(0)
if __name__=="__main__":
if len(sys.argv)>1:
if sys.argv[1]=='--help':
trml2pdf_help()
print parseString(file(sys.argv[1], 'r').read()).encode('iso8859-7')
else:
print 'Usage: trml2txt input.rml >output.pdf'
print 'Try \'trml2txt --help\' for more information.'
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
silkyar/570_Big_Little
|
src/arch/x86/isa/insts/x87/control/clear_exceptions.py
|
91
|
2159
|
# Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
# FCLEX
# FNCLEX
'''
|
bsd-3-clause
|
Ballz0fSteel/Umeko
|
lib/youtube_dl/extractor/worldstarhiphop.py
|
57
|
1344
|
from __future__ import unicode_literals
from .common import InfoExtractor
class WorldStarHipHopIE(InfoExtractor):
_VALID_URL = r'https?://(?:www|m)\.worldstar(?:candy|hiphop)\.com/(?:videos|android)/video\.php\?.*?\bv=(?P<id>[^&]+)'
_TESTS = [{
'url': 'http://www.worldstarhiphop.com/videos/video.php?v=wshh6a7q1ny0G34ZwuIO',
'md5': '9d04de741161603bf7071bbf4e883186',
'info_dict': {
'id': 'wshh6a7q1ny0G34ZwuIO',
'ext': 'mp4',
'title': 'KO Of The Week: MMA Fighter Gets Knocked Out By Swift Head Kick!'
}
}, {
'url': 'http://m.worldstarhiphop.com/android/video.php?v=wshh6a7q1ny0G34ZwuIO',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
entries = self._parse_html5_media_entries(url, webpage, video_id)
if not entries:
return self.url_result(url, 'Generic')
title = self._html_search_regex(
[r'(?s)<div class="content-heading">\s*<h1>(.*?)</h1>',
r'<span[^>]+class="tc-sp-pinned-title">(.*)</span>'],
webpage, 'title')
info = entries[0]
info.update({
'id': video_id,
'title': title,
})
return info
|
gpl-3.0
|
uni-peter-zheng/tp-libvirt
|
libvirt/tests/src/numa/numa_capabilities.py
|
4
|
1976
|
import logging
from virttest import libvirt_xml
from virttest import utils_libvirtd
from virttest import utils_misc
from autotest.client.shared import error
def run(test, params, env):
"""
Test capabilities with host numa node topology
"""
libvirtd = utils_libvirtd.Libvirtd()
libvirtd.start()
try:
new_cap = libvirt_xml.CapabilityXML()
if not libvirtd.is_running():
raise error.TestFail("Libvirtd is not running")
topo = new_cap.cells_topology
logging.debug("topo xml is %s", topo.xmltreefile)
cell_list = topo.get_cell()
numa_info = utils_misc.NumaInfo()
node_list = numa_info.online_nodes
for cell_num in range(len(cell_list)):
# check node distances
node_num = node_list[cell_num]
cell_distance = cell_list[cell_num].sibling
logging.debug("cell %s distance is %s", node_num, cell_distance)
node_distance = numa_info.distances[node_num]
for j in range(len(cell_list)):
if cell_distance[j]['value'] != node_distance[j]:
raise error.TestFail("cell distance value not expected.")
# check node cell cpu
cell_xml = cell_list[cell_num]
cpu_list_from_xml = cell_xml.cpu
node_ = numa_info.nodes[node_num]
cpu_list = node_.cpus
logging.debug("cell %s cpu list is %s", node_num, cpu_list)
cpu_topo_list = []
for cpu_id in cpu_list:
cpu_dict = node_.get_cpu_topology(cpu_id)
cpu_topo_list.append(cpu_dict)
logging.debug("cpu topology list from capabilities xml is %s",
cpu_list_from_xml)
if cpu_list_from_xml != cpu_topo_list:
raise error.TestFail("cpu list %s from capabilities xml not "
"expected.")
finally:
libvirtd.restart()
|
gpl-2.0
|
frankenjoe/pyabp
|
python/gui_init.py
|
1
|
4202
|
import sys
import os
import warnings
import shutil
import glob
import logging
from PyQt5.QtGui import (QIcon, QFont, QStandardItemModel, QPixmap)
from PyQt5.QtCore import (QDate, QDateTime, QRegExp, QSortFilterProxyModel, Qt, QTime, QEvent, QSize)
from PyQt5.QtWidgets import (qApp, QApplication, QCheckBox, QComboBox, QPushButton, QGridLayout, QGroupBox, QHBoxLayout, QFormLayout, QLabel, QLineEdit, QTextEdit, QTreeView, QVBoxLayout, QWidget, QAbstractItemView, QMessageBox, QLayout, QFileDialog, QProgressDialog)
import tools
import define
from player import Player
from playlist import Playlist
from scanner import Scanner
from config import Config
from server import Server
from database import Database
from gui_library import Library
from gui_control import Control
from gui_control_thread import ControlThread
class Init(QWidget):
app = None
logger = None
config = None
database = None
font = None
scanner = None
playlists = []
server = None
player = None
label = None
layout = None
def __init__(self, app, config, logger=None):
super().__init__()
self.app = app
self.logger = logger
self.config = config
# font
font = QFont()
font.setPixelSize(self.config.fontSize)
# label
icon = QLabel()
image = QPixmap(define.ICONFILE)
icon.setPixmap(image)
icon.setAlignment(Qt.AlignCenter)
self.label = QLabel()
# main
self.layout = QVBoxLayout()
self.layout.addWidget(icon)
self.layout.addWidget(self.label)
self.setLayout(self.layout)
# show
self.setWindowFlags(Qt.CustomizeWindowHint)
self.setFixedHeight(image.size().height() + 50)
self.setFixedWidth(600)
self.show()
# database
self.info('open database..')
self.database = Database(logger=self)
try:
self.database.open(define.DBFILE)
except:
tools.error('could not open database ', self.logger)
## scan
self.info('update library..')
self.scanner = Scanner(self.config, self.database, logger=self)
try:
self.playlists = self.scanner.scan()
except:
tools.error('could not scan playlists', self.logger)
# mpd
self.info('start mpd..')
if self.config.startMpd:
if not tools.islinux():
self.server = Server(logger=self)
self.server.start(define.MPDFILE, conf=os.path.realpath(self.config.confPath))
# player
self.info('start player..')
self.player = Player(self.database, logger=self)
if self.player.connect():
self.player.update()
else:
tools.error('could not connect player', self.logger)
# redirect logging
if not tools.islinux():
self.server.logger = self.logger
self.player.logger = self.logger
def info(self, message : str):
self.label.setText(message)
self.update()
tools.info(message, self.logger)
self.app.processEvents()
def exception(self, message : str):
self.label.setText(message)
self.update()
tools.error(message, self.logger)
self.app.processEvents()
if __name__ == '__main__':
# logger
logger = logging.getLogger('pyabp')
logFileHandler = logging.FileHandler(define.LOGFILE)
logFormatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
logFileHandler.setFormatter(logFormatter)
logger.addHandler(logFileHandler)
logger.setLevel(logging.DEBUG)
# config
config = tools.readConfig(define.CONFFILE,logger=logger)
# run
app = QApplication(sys.argv)
init = Init(app, config, logger=logger)
sys.exit(app.exec_())
|
gpl-3.0
|
cackharot/fbeazt
|
src/test/test_order.py
|
1
|
3860
|
import os
import unittest
from random import randint
from bson import json_util
from bson.objectid import ObjectId
from foodbeazt import fapp
from foodbeazt import initdb as fdb
from foodbeazt.service.ProductService import ProductService
from foodbeazt.service.StoreService import StoreService
class CreateOrderTestCase(unittest.TestCase):
"""
Testing order api
"""
def setUp(self):
fapp.app.config['TESTING'] = True
fapp.app.config['MONGO_AUTO_START_REQUEST'] = False
self.dbname = 'testFoodbeaztDb'
fapp.app.config['MONGO_DBNAME'] = self.dbname
self.app = fapp.app.test_client()
with fapp.app.app_context():
fdb.drop_db(dbname=self.dbname)
fdb.setup(dbname=self.dbname, sample_data=True, debug=False)
self.price_table_item, self.no_discount_item, self.discount_item = fdb.setup_test_product(
dbname=self.dbname)
def tearDown(self):
with fapp.app.app_context():
fapp.mongo.cx.drop_database(self.dbname)
fapp.mongo.cx.close()
def test_get_invalid_order(self):
"""Test invalid order fetch"""
result = self.app.get('/api/order/' + str(ObjectId()))
self.assertEqual(result.status_code, 404)
def test_get_order(self):
"""Test valid order fetch"""
order_id = self.test_create_order()
result = self.app.get('/api/order/' + str(order_id))
self.assertEqual(result.status_code, 200)
order = json_util.loads(result.data.decode('utf-8'))
self.assertEqual(order.get('status'), 'PENDING')
self.assertEqual(order.get('payment_type'), 'cod')
self.assertEqual(order.get('total'), 90.0 + 333.0 + (100.0-(100.0*3.3/100.0)) + 40.0)
def test_create_order(self):
"""Test create order"""
hdrs = {'Content-Type': 'application/json'}
request_data = json_util.dumps(self._get_order_data())
result = self.app.post(
'/api/order/-1', data=request_data, headers=hdrs)
if result.status_code != 200:
print(result.data)
self.assertEqual(result.status_code, 200)
res = json_util.loads(result.data.decode('utf-8'))
self.assertEqual(res.get('status'), 'success')
order = res.get('data')
self.assertEqual(order.get('delivery_charges'), 40)
self.assertEqual(order.get('total'), 90.0 + 333.0 + (100.0-(100.0*3.3/100.0)) + 40.0)
self.assertEqual(order.get('payment_status'), 'success')
self.assertEqual(order.get('status'), 'PENDING')
self.assertEqual(len(order.get('store_delivery_status')), 1)
for s, v in order.get('store_delivery_status').items():
self.assertEqual(v.get('status'), 'PENDING')
self.assertIsNotNone(v.get('sid'))
self.assertIsNotNone(v.get('notified_at'))
return order.get('_id')
def _get_order_data(self):
item1 = {
'name': 'test item 1',
'quantity': 1.0,
'product_id': self.price_table_item,
'price_detail': {'no': 1}
}
item2 = {
'name': 'test item 2',
'quantity': 1.0,
'product_id': self.no_discount_item,
}
item3 = {
'name': 'test item 3',
'quantity': 1.0,
'product_id': self.discount_item,
}
data = {
'items': [item1, item2, item3],
'delivery_details': {
'name': 'some hungry fellow',
'email': '[email protected]',
'phone': str(randint(9000000000, 9999999999)),
'pincode': '605001',
'address': 'some dude address'
},
'payment_type': 'cod'
}
return data
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
django-nonrel/django-nonrel
|
django/core/management/commands/loaddata.py
|
250
|
11042
|
import sys
import os
import gzip
import zipfile
from optparse import make_option
from django.conf import settings
from django.core import serializers
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.db import connections, router, transaction, DEFAULT_DB_ALIAS
from django.db.models import get_apps
from django.utils.itercompat import product
try:
import bz2
has_bz2 = True
except ImportError:
has_bz2 = False
class Command(BaseCommand):
help = 'Installs the named fixture(s) in the database.'
args = "fixture [fixture ...]"
option_list = BaseCommand.option_list + (
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a specific database to load '
'fixtures into. Defaults to the "default" database.'),
)
def handle(self, *fixture_labels, **options):
using = options.get('database', DEFAULT_DB_ALIAS)
connection = connections[using]
self.style = no_style()
verbosity = int(options.get('verbosity', 1))
show_traceback = options.get('traceback', False)
# commit is a stealth option - it isn't really useful as
# a command line option, but it can be useful when invoking
# loaddata from within another script.
# If commit=True, loaddata will use its own transaction;
# if commit=False, the data load SQL will become part of
# the transaction in place when loaddata was invoked.
commit = options.get('commit', True)
# Keep a count of the installed objects and fixtures
fixture_count = 0
loaded_object_count = 0
fixture_object_count = 0
models = set()
humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'
# Get a cursor (even though we don't need one yet). This has
# the side effect of initializing the test database (if
# it isn't already initialized).
cursor = connection.cursor()
# Start transaction management. All fixtures are installed in a
# single transaction to ensure that all references are resolved.
if commit:
transaction.commit_unless_managed(using=using)
transaction.enter_transaction_management(using=using)
transaction.managed(True, using=using)
class SingleZipReader(zipfile.ZipFile):
def __init__(self, *args, **kwargs):
zipfile.ZipFile.__init__(self, *args, **kwargs)
if settings.DEBUG:
assert len(self.namelist()) == 1, "Zip-compressed fixtures must contain only one file."
def read(self):
return zipfile.ZipFile.read(self, self.namelist()[0])
compression_types = {
None: file,
'gz': gzip.GzipFile,
'zip': SingleZipReader
}
if has_bz2:
compression_types['bz2'] = bz2.BZ2File
app_module_paths = []
for app in get_apps():
if hasattr(app, '__path__'):
# It's a 'models/' subpackage
for path in app.__path__:
app_module_paths.append(path)
else:
# It's a models.py module
app_module_paths.append(app.__file__)
app_fixtures = [os.path.join(os.path.dirname(path), 'fixtures') for path in app_module_paths]
for fixture_label in fixture_labels:
parts = fixture_label.split('.')
if len(parts) > 1 and parts[-1] in compression_types:
compression_formats = [parts[-1]]
parts = parts[:-1]
else:
compression_formats = compression_types.keys()
if len(parts) == 1:
fixture_name = parts[0]
formats = serializers.get_public_serializer_formats()
else:
fixture_name, format = '.'.join(parts[:-1]), parts[-1]
if format in serializers.get_public_serializer_formats():
formats = [format]
else:
formats = []
if formats:
if verbosity >= 2:
self.stdout.write("Loading '%s' fixtures...\n" % fixture_name)
else:
self.stderr.write(
self.style.ERROR("Problem installing fixture '%s': %s is not a known serialization format.\n" %
(fixture_name, format)))
if commit:
transaction.rollback(using=using)
transaction.leave_transaction_management(using=using)
return
if os.path.isabs(fixture_name):
fixture_dirs = [fixture_name]
else:
fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']
for fixture_dir in fixture_dirs:
if verbosity >= 2:
self.stdout.write("Checking %s for fixtures...\n" % humanize(fixture_dir))
label_found = False
for combo in product([using, None], formats, compression_formats):
database, format, compression_format = combo
file_name = '.'.join(
p for p in [
fixture_name, database, format, compression_format
]
if p
)
if verbosity >= 3:
self.stdout.write("Trying %s for %s fixture '%s'...\n" % \
(humanize(fixture_dir), file_name, fixture_name))
full_path = os.path.join(fixture_dir, file_name)
open_method = compression_types[compression_format]
try:
fixture = open_method(full_path, 'r')
if label_found:
fixture.close()
self.stderr.write(self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting.\n" %
(fixture_name, humanize(fixture_dir))))
if commit:
transaction.rollback(using=using)
transaction.leave_transaction_management(using=using)
return
else:
fixture_count += 1
objects_in_fixture = 0
loaded_objects_in_fixture = 0
if verbosity >= 2:
self.stdout.write("Installing %s fixture '%s' from %s.\n" % \
(format, fixture_name, humanize(fixture_dir)))
try:
objects = serializers.deserialize(format, fixture, using=using)
for obj in objects:
objects_in_fixture += 1
if router.allow_syncdb(using, obj.object.__class__):
loaded_objects_in_fixture += 1
models.add(obj.object.__class__)
obj.save(using=using)
loaded_object_count += loaded_objects_in_fixture
fixture_object_count += objects_in_fixture
label_found = True
except (SystemExit, KeyboardInterrupt):
raise
except Exception:
import traceback
fixture.close()
if commit:
transaction.rollback(using=using)
transaction.leave_transaction_management(using=using)
if show_traceback:
traceback.print_exc()
else:
self.stderr.write(
self.style.ERROR("Problem installing fixture '%s': %s\n" %
(full_path, ''.join(traceback.format_exception(sys.exc_type,
sys.exc_value, sys.exc_traceback)))))
return
fixture.close()
# If the fixture we loaded contains 0 objects, assume that an
# error was encountered during fixture loading.
if objects_in_fixture == 0:
self.stderr.write(
self.style.ERROR("No fixture data found for '%s'. (File format may be invalid.)\n" %
(fixture_name)))
if commit:
transaction.rollback(using=using)
transaction.leave_transaction_management(using=using)
return
except Exception, e:
if verbosity >= 2:
self.stdout.write("No %s fixture '%s' in %s.\n" % \
(format, fixture_name, humanize(fixture_dir)))
# If we found even one object in a fixture, we need to reset the
# database sequences.
if loaded_object_count > 0:
sequence_sql = connection.ops.sequence_reset_sql(self.style, models)
if sequence_sql:
if verbosity >= 2:
self.stdout.write("Resetting sequences\n")
for line in sequence_sql:
cursor.execute(line)
if commit:
transaction.commit(using=using)
transaction.leave_transaction_management(using=using)
if fixture_object_count == 0:
if verbosity >= 1:
self.stdout.write("No fixtures found.\n")
else:
if verbosity >= 1:
if fixture_object_count == loaded_object_count:
self.stdout.write("Installed %d object(s) from %d fixture(s)\n" % (
loaded_object_count, fixture_count))
else:
self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)\n" % (
loaded_object_count, fixture_object_count, fixture_count))
# Close the DB connection. This is required as a workaround for an
# edge case in MySQL: if the same connection is used to
# create tables, load data, and query, the query can return
# incorrect results. See Django #7572, MySQL #37735.
if commit:
connection.close()
|
bsd-3-clause
|
OndinaHQ/Tracker
|
markdown/extensions/extra.py
|
130
|
1761
|
#!/usr/bin/env python
"""
Python-Markdown Extra Extension
===============================
A compilation of various Python-Markdown extensions that imitates
[PHP Markdown Extra](http://michelf.com/projects/php-markdown/extra/).
Note that each of the individual extensions still need to be available
on your PYTHONPATH. This extension simply wraps them all up as a
convenience so that only one extension needs to be listed when
initiating Markdown. See the documentation for each individual
extension for specifics about that extension.
In the event that one or more of the supported extensions are not
available for import, Markdown will issue a warning and simply continue
without that extension.
There may be additional extensions that are distributed with
Python-Markdown that are not included here in Extra. Those extensions
are not part of PHP Markdown Extra, and therefore, not part of
Python-Markdown Extra. If you really would like Extra to include
additional extensions, we suggest creating your own clone of Extra
under a differant name. You could also edit the `extensions` global
variable defined below, but be aware that such changes may be lost
when you upgrade to any future version of Python-Markdown.
"""
import markdown
extensions = ['fenced_code',
'footnotes',
'headerid',
'def_list',
'tables',
'abbr',
]
class ExtraExtension(markdown.Extension):
""" Add various extensions to Markdown class."""
def extendMarkdown(self, md, md_globals):
""" Register extension instances. """
md.registerExtensions(extensions, self.config)
def makeExtension(configs={}):
return ExtraExtension(configs=dict(configs))
|
gpl-3.0
|
drawquest/drawquest-web
|
website/canvas/migrations/0101_auto__chg_field_usermoderationlog_moderator__chg_field_commentmoderati.py
|
2
|
16002
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'UserModerationLog.moderator'
db.alter_column('canvas_usermoderationlog', 'moderator_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True))
# Changing field 'CommentModerationLog.moderator'
db.alter_column('canvas_commentmoderationlog', 'moderator_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True))
def backwards(self, orm):
# Changing field 'UserModerationLog.moderator'
db.alter_column('canvas_usermoderationlog', 'moderator_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User']))
# Changing field 'CommentModerationLog.moderator'
db.alter_column('canvas_commentmoderationlog', 'moderator_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User']))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'canvas.bestof': {
'Meta': {'object_name': 'BestOf'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'best_of'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Category']"}),
'chosen_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'best_of'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {})
},
'canvas.category': {
'Meta': {'object_name': 'Category'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'founded': ('django.db.models.fields.FloatField', [], {'default': '1298956320'}),
'founder': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'founded_categories'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderators': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'moderated_categories'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.comment': {
'Meta': {'object_name': 'Comment'},
'anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'comments'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'judged': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ot_hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'replies'", 'null': 'True', 'blank': 'True', 'to': "orm['canvas.Comment']"}),
'parent_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'replied_comment': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['canvas.Comment']", 'null': 'True', 'blank': 'True'}),
'reply_content': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'used_in_comments'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'reply_text': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'blank': 'True'}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0', 'db_index': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.commentflag': {
'Meta': {'object_name': 'CommentFlag'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'undone': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'flags'", 'to': "orm['auth.User']"})
},
'canvas.commentmoderationlog': {
'Meta': {'object_name': 'CommentModerationLog'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderated_comments_log'", 'to': "orm['auth.User']"}),
'visibility': ('django.db.models.fields.IntegerField', [], {})
},
'canvas.commentpin': {
'Meta': {'object_name': 'CommentPin'},
'auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'comment': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.commentsticker': {
'Meta': {'object_name': 'CommentSticker'},
'comment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'stickers'", 'to': "orm['canvas.Comment']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'type_id': ('django.db.models.fields.IntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
'canvas.content': {
'Meta': {'object_name': 'Content'},
'alpha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'animated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'default': "'0.0.0.0'", 'max_length': '15'}),
'remix_of': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'remixes'", 'null': 'True', 'to': "orm['canvas.Content']"}),
'remix_text': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'source_url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '4000', 'blank': 'True'}),
'stamps_used': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'used_as_stamp'", 'blank': 'True', 'to': "orm['canvas.Content']"}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'url_mapping': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.ContentUrlMapping']", 'null': 'True', 'blank': 'True'}),
'visibility': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'canvas.contenturlmapping': {
'Meta': {'object_name': 'ContentUrlMapping'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'canvas.facebookuser': {
'Meta': {'object_name': 'FacebookUser'},
'email': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'fb_uid': ('django.db.models.fields.BigIntegerField', [], {'unique': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_invited': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'canvas.followcategory': {
'Meta': {'object_name': 'FollowCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'followers'", 'to': "orm['canvas.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'following'", 'to': "orm['auth.User']"})
},
'canvas.invitecode': {
'Meta': {'object_name': 'InviteCode'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invitee': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'invited_from'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"}),
'inviter': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'sent_invites'", 'null': 'True', 'blank': 'True', 'to': "orm['auth.User']"})
},
'canvas.stashcontent': {
'Meta': {'object_name': 'StashContent'},
'content': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['canvas.Content']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'canvas.userinfo': {
'Meta': {'object_name': 'UserInfo'},
'free_invites': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invite_bypass': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'last_activity_check': ('canvas.util.UnixTimestampField', [], {'default': '0'}),
'post_anonymously': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'power_level': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'})
},
'canvas.usermoderationlog': {
'Meta': {'object_name': 'UserModerationLog'},
'action': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'moderator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'note': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('canvas.util.UnixTimestampField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'moderation_log'", 'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['canvas']
|
bsd-3-clause
|
tareqalayan/ansible
|
test/integration/targets/connection/plugin/dummy.py
|
56
|
1089
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
author:
- John Doe
connection: dummy
short_description: defective connection plugin
description:
- defective connection plugin
version_added: "2.0"
options: {}
"""
import ansible.constants as C
from ansible.errors import AnsibleError
from ansible.plugins.connection import ConnectionBase
class Connection(ConnectionBase):
transport = 'dummy'
has_pipelining = True
become_methods = frozenset(C.BECOME_METHODS)
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
raise AnsibleError('an error with {{ some Jinja }}')
def transport(self):
pass
def _connect(self):
pass
def exec_command(self, cmd, in_data=None, sudoable=True):
pass
def put_file(self, in_path, out_path):
pass
def fetch_file(self, in_path, out_path):
pass
def close(self):
pass
|
gpl-3.0
|
gunesacar/tor-browser-bundle-testsuite
|
selenium-tests/test_fp_screen_coords.py
|
2
|
1032
|
#!/usr/bin/python
import tbbtest
class Test(tbbtest.TBBTest):
def test_screen_coords(self):
# https://gitweb.torproject.org/torbrowser.git/blob/HEAD:/src/current-patches/firefox/0021-Do-not-expose-physical-screen-info.-via-window-and-w.patch
driver = self.driver
js = driver.execute_script
# check that screenX, screenY are 0
self.assertEqual(True, js("return screenX === 0"))
self.assertEqual(True, js("return screenY === 0"))
# check that mozInnerScreenX, mozInnerScreenY are 0
self.assertEqual(True, js("return mozInnerScreenX === 0"))
self.assertEqual(True, js("return mozInnerScreenY === 0"))
# check that screenLeft, screenTop are 0
self.assertEqual(True, js("return screen.left === 0"))
self.assertEqual(True, js("return screen.top === 0"))
# check that availLeft, availTop are 0
self.assertEqual(True, js("return screen.availLeft === 0"))
self.assertEqual(True, js("return screen.availTop === 0"))
|
cc0-1.0
|
zestrada/nova-cs498cc
|
nova/tests/api/openstack/compute/contrib/test_used_limits.py
|
9
|
4910
|
# vim: tabstop=5 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack.compute.contrib import used_limits
from nova.api.openstack.compute import limits
from nova.api.openstack import wsgi
import nova.context
from nova import quota
from nova import test
class FakeRequest(object):
def __init__(self, context, reserved=False):
self.environ = {'nova.context': context}
self.reserved = reserved
self.GET = {'reserved': 1} if reserved else {}
class UsedLimitsTestCase(test.TestCase):
def setUp(self):
"""Run before each test."""
super(UsedLimitsTestCase, self).setUp()
self.controller = used_limits.UsedLimitsController()
self.fake_context = nova.context.RequestContext('fake', 'fake')
def _do_test_used_limits(self, reserved):
fake_req = FakeRequest(self.fake_context, reserved=reserved)
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
res = wsgi.ResponseObject(obj)
quota_map = {
'totalRAMUsed': 'ram',
'totalCoresUsed': 'cores',
'totalInstancesUsed': 'instances',
'totalFloatingIpsUsed': 'floating_ips',
'totalSecurityGroupsUsed': 'security_groups',
}
limits = {}
for display_name, q in quota_map.iteritems():
limits[q] = {'limit': len(display_name),
'in_use': len(display_name) / 2,
'reserved': len(display_name) / 3}
def stub_get_project_quotas(context, project_id, usages=True):
return limits
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.controller.index(fake_req, res)
abs_limits = res.obj['limits']['absolute']
for used_limit, value in abs_limits.iteritems():
r = limits[quota_map[used_limit]]['reserved'] if reserved else 0
self.assertEqual(value,
limits[quota_map[used_limit]]['in_use'] + r)
def test_used_limits_basic(self):
self._do_test_used_limits(False)
def test_used_limits_with_reserved(self):
self._do_test_used_limits(True)
def test_used_ram_added(self):
fake_req = FakeRequest(self.fake_context)
obj = {
"limits": {
"rate": [],
"absolute": {
"maxTotalRAMSize": 512,
},
},
}
res = wsgi.ResponseObject(obj)
def stub_get_project_quotas(context, project_id, usages=True):
return {'ram': {'limit': 512, 'in_use': 256}}
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.controller.index(fake_req, res)
abs_limits = res.obj['limits']['absolute']
self.assertTrue('totalRAMUsed' in abs_limits)
self.assertEqual(abs_limits['totalRAMUsed'], 256)
def test_no_ram_quota(self):
fake_req = FakeRequest(self.fake_context)
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
res = wsgi.ResponseObject(obj)
def stub_get_project_quotas(context, project_id, usages=True):
return {}
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.controller.index(fake_req, res)
abs_limits = res.obj['limits']['absolute']
self.assertFalse('totalRAMUsed' in abs_limits)
def test_used_limits_xmlns(self):
fake_req = FakeRequest(self.fake_context)
obj = {
"limits": {
"rate": [],
"absolute": {},
},
}
res = wsgi.ResponseObject(obj, xml=limits.LimitsTemplate)
res.preserialize('xml')
def stub_get_project_quotas(context, project_id, usages=True):
return {}
self.stubs.Set(quota.QUOTAS, "get_project_quotas",
stub_get_project_quotas)
self.controller.index(fake_req, res)
response = res.serialize(None, 'xml')
self.assertTrue(used_limits.XMLNS in response.body)
|
apache-2.0
|
Endika/odoo-grupoesoc-addons
|
important_fields_lead/__openerp__.py
|
2
|
1168
|
# -*- encoding: utf-8 -*-
# Odoo, Open Source Management Solution
# Copyright (C) 2014-2015 Grupo ESOC <www.grupoesoc.es>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
{
"name": "Important Fields for Leads",
"version": "2.0",
"category": "Customer Relationship Management",
"summary": "Highlight important fields in leads",
"author": "Grupo ESOC",
"website": "http://www.grupoesoc.es",
"license": "AGPL-3",
"depends": [
"crm",
"important_fields",
],
"data": [
"view/crm_lead.xml",
],
}
|
agpl-3.0
|
vritant/subscription-manager
|
src/subscription_manager/i18n.py
|
3
|
1255
|
# Copyright (c) 2011 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
# Localization domain:
APP = 'rhsm'
# Directory where translations are deployed:
DIR = '/usr/share/locale/'
def configure_i18n(with_glade=False):
"""
Configure internationalization for the application. Should only be
called once per invocation. (once for CLI, once for GUI)
"""
import gettext
import locale
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error:
locale.setlocale(locale.LC_ALL, 'C')
gettext.bindtextdomain(APP, DIR)
gettext.textdomain(APP)
# if (with_glade):
# import gtk.glade
# gtk.glade.bindtextdomain(APP, DIR)
# gtk.glade.textdomain(APP)
|
gpl-2.0
|
antontsv/r2d2
|
sound_manager.py
|
1
|
2110
|
#!/usr/bin/env python
import sys
import os
import time
import random
import alsaaudio
import pyttsx
from subprocess import call
#
# Our sound coordinator
#
class SoundManager ():
SOUND_PLAYER = 'mpg123'
SOUNDS_DIR = os.path.dirname(__file__) + '/sounds'
SOUND_FILE_EXT = 'mp3'
sound_list = []
next_in_queue = []
def __init__(self):
self.requires(self.SOUND_PLAYER)
self.mixer = alsaaudio.Mixer('PCM')
self.speech_engine = pyttsx.init()
ext = ".%s" % self.SOUND_FILE_EXT
for dirpath, dirnames, filenames in os.walk(self.SOUNDS_DIR):
for filename in filenames:
if filename.endswith(ext):
full_path = os.path.join(dirpath, filename)
self.sound_list.append(full_path)
@staticmethod
def requires(utility):
devnull = open(os.devnull, 'w')
if call(['which', utility], stdout=devnull, stderr=devnull) != 0:
print "Sound manager requires '%s' utility" % utility
devnull.close()
sys.exit(1)
else:
devnull.close()
def play(self, filepath):
devnull = open(os.devnull, 'w')
ret = call([self.SOUND_PLAYER, filepath], stdout=devnull, stderr=devnull)
devnull.close()
return ret == 0
def play_random(self):
l = len(self.sound_list)
if l > 0:
return self.play(self.sound_list[random.randint(0,l-1)])
else:
return False
def play_next(self):
if len(self.next_in_queue) <= 0:
l = len(self.sound_list)
if l > 0:
self.next_in_queue = range(0,l)
random.shuffle(self.next_in_queue)
else:
return False
sound_position_id = self.next_in_queue.pop()
return self.play(self.sound_list[sound_position_id])
def say(self, text):
self.speech_engine.say(text)
self.speech_engine.runAndWait()
def mute(self):
self.mixer.setmute(1)
def unmute(self):
self.mixer.setmute(0)
|
mit
|
msmbuilder/msmbuilder
|
msmbuilder/decomposition/kernel_approximation.py
|
9
|
4210
|
# Author: Carlos Xavier Hernandez <[email protected]>
# Contributors: Muneeb Sultan <[email protected]>, Evan Feinberg <[email protected]>
# Copyright (c) 2015, Stanford University and the Authors
# All rights reserved.
from __future__ import absolute_import
import numpy as np
from scipy.linalg import svd
from sklearn import kernel_approximation
from sklearn.metrics.pairwise import pairwise_kernels
from .base import MultiSequenceDecompositionMixin
__all__ = ['Nystroem', 'LandmarkNystroem']
class Nystroem(MultiSequenceDecompositionMixin, kernel_approximation.Nystroem):
__doc__ = kernel_approximation.Nystroem.__doc__
class LandmarkNystroem(Nystroem):
"""Approximate a kernel map using a subset of the training data.
Constructs an approximate feature map for an arbitrary kernel
using a subset of the data as basis.
Read more in the :ref:`User Guide <nystroem_kernel_approx>`.
Parameters
----------
landmarks : ndarray of shape (n_frames, n_features)
Custom landmark points for the Nyostroem approximation
kernel : string or callable, default="rbf"
Kernel map to be approximated. A callable should accept two arguments
and the keyword arguments passed to this object as kernel_params, and
should return a floating point number.
n_components : int
Number of features to construct.
How many data points will be used to construct the mapping.
gamma : float, default=None
Gamma parameter for the RBF, polynomial, exponential chi2 and
sigmoid kernels. Interpretation of the default value is left to
the kernel; see the documentation for sklearn.metrics.pairwise.
Ignored by other kernels.
degree : float, default=3
Degree of the polynomial kernel. Ignored by other kernels.
coef0 : float, default=1
Zero coefficient for polynomial and sigmoid kernels.
Ignored by other kernels.
kernel_params : mapping of string to any, optional
Additional parameters (keyword arguments) for kernel function passed
as callable object.
random_state : {int, RandomState}, optional
If int, random_state is the seed used by the random number generator;
if RandomState instance, random_state is the random number generator.
Attributes
----------
components_ : array, shape (n_components, n_features)
Subset of training points used to construct the feature map.
component_indices_ : array, shape (n_components)
Indices of ``components_`` in the training set.
normalization_ : array, shape (n_components, n_components)
Normalization matrix needed for embedding.
Square root of the kernel matrix on ``components_``.
References
----------
.. [1] Williams, C.K.I. and Seeger, M.
"Using the Nystroem method to speed up kernel machines",
Advances in neural information processing systems 2001
.. [2] T. Yang, Y. Li, M. Mahdavi, R. Jin and Z. Zhou
"Nystroem Method vs Random Fourier Features: A Theoretical and Empirical
Comparison",
Advances in Neural Information Processing Systems 2012
See also
--------
Nystroem : Approximate a kernel map using a subset of the training data.
"""
def __init__(self, landmarks=None, **kwargs):
if (landmarks is not None and
not isinstance(landmarks, (int, np.ndarray))):
raise ValueError('landmarks should be an int, ndarray, or None.')
self.landmarks = landmarks
super(LandmarkNystroem, self).__init__(**kwargs)
def fit(self, sequences, y=None):
if self.landmarks is not None:
basis_kernel = pairwise_kernels(self.landmarks, metric=self.kernel,
filter_params=True,
**self._get_kernel_params())
U, S, V = svd(basis_kernel)
S = np.maximum(S, 1e-12)
self.normalization_ = np.dot(U * 1. / np.sqrt(S), V)
self.components_ = self.landmarks
self.component_indices_ = None
return self
super(Nystroem, self).fit(sequences, y=y)
|
lgpl-2.1
|
manahl/PythonTrainingExercises
|
Beginners/strings/solution/strings.py
|
1
|
2019
|
"""With this string:
'monty pythons flying circus'
Create a function that returns a sorted string with no duplicate characters
(keep any whitespace):
Example: ' cfghilmnoprstuy'
Create a function that returns the words in reverse order:
Example: ['circus', 'flying', 'pythons', 'monty']
Create a function that returns a list of 4 character strings:
Example: ['mont', 'y py', 'thon', 's fl', 'ying', ' cir', 'cus']
Created on 3 Nov 2015
@author: paulross
"""
import pytest
def no_duplicates(a_string):
# set(a_string) will remove duplicates
# sorted(sequence) will create a sorted list of sequence
# ''.join(sequence) will create a single string out of a sequence of strings
# This can all be done in one line
return ''.join(sorted(set(a_string)))
def reversed_words(a_string):
# a_string.split() will create a list of words
# reversed(sequence) will create a reversed sequence iterator
# list(iterator) will iterate across the sequence and create a list of those objects
# This can all be done in one line
return list(reversed(a_string.split()))
def four_char_strings(a_string):
# The key to this puzzle is to build it up in stages
# Note that:
# range(0,len(a_string),4)
# Gives: [0, 4, 8, 12, 16, 20, 24]
# And a_string[0:4]
# Gives 'mont'
# And a_string[4:8]
# Gives 'y py'
# And so on so a_string[i:i+4] seems useful
# This can all be done in one line
return [a_string[i:i+4] for i in range(0,len(a_string),4)]
def test_no_duplicates():
s = 'monty pythons flying circus'
assert no_duplicates(s) == ' cfghilmnoprstuy'
def test_reversed_words():
s = 'monty pythons flying circus'
assert reversed_words(s) == ['circus', 'flying', 'pythons', 'monty']
def test_four_char_strings():
s = 'monty pythons flying circus'
assert four_char_strings(s) == ['mont', 'y py', 'thon', 's fl', 'ying', ' cir', 'cus']
def main():
return pytest.main(__file__)
if __name__ == '__main__':
main()
|
bsd-3-clause
|
JianyuWang/nova
|
nova/virt/diagnostics.py
|
103
|
7609
|
# Copyright (c) 2014 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from nova import exception
from nova.i18n import _
class CpuDiagnostics(object):
def __init__(self, time=0):
"""Create a new CpuDiagnostics object
:param time: CPU Time in nano seconds (Integer)
"""
self.time = time
class NicDiagnostics(object):
def __init__(self, mac_address='00:00:00:00:00:00',
rx_octets=0, rx_errors=0, rx_drop=0, rx_packets=0,
tx_octets=0, tx_errors=0, tx_drop=0, tx_packets=0):
"""Create a new NicDiagnostics object
:param mac_address: Mac address of the interface (String)
:param rx_octets: Received octets (Integer)
:param rx_errors: Received errors (Integer)
:param rx_drop: Received packets dropped (Integer)
:param rx_packets: Received packets (Integer)
:param tx_octets: Transmitted Octets (Integer)
:param tx_errors: Transmit errors (Integer)
:param tx_drop: Transmit dropped packets (Integer)
:param tx_packets: Transmit packets (Integer)
"""
self.mac_address = mac_address
self.rx_octets = rx_octets
self.rx_errors = rx_errors
self.rx_drop = rx_drop
self.rx_packets = rx_packets
self.tx_octets = tx_octets
self.tx_errors = tx_errors
self.tx_drop = tx_drop
self.tx_packets = tx_packets
class DiskDiagnostics(object):
def __init__(self, id='', read_bytes=0, read_requests=0,
write_bytes=0, write_requests=0, errors_count=0):
"""Create a new DiskDiagnostics object
:param id: Disk ID (String)
:param read_bytes: Disk reads in bytes(Integer)
:param read_requests: Read requests (Integer)
:param write_bytes: Disk writes in bytes (Integer)
:param write_requests: Write requests (Integer)
:param errors_count: Disk errors (Integer)
"""
self.id = id
self.read_bytes = read_bytes
self.read_requests = read_requests
self.write_bytes = write_bytes
self.write_requests = write_requests
self.errors_count = errors_count
class MemoryDiagnostics(object):
def __init__(self, maximum=0, used=0):
"""Create a new MemoryDiagnostics object
:param maximum: Amount of memory provisioned for the VM in MB (Integer)
:param used: Amount of memory used by the VM in MB (Integer)
"""
self.maximum = maximum
self.used = used
class Diagnostics(object):
# Version 1.0: Initial version
version = '1.0'
def __init__(self, state=None, driver=None, hypervisor_os=None,
uptime=0, cpu_details=None, nic_details=None,
disk_details=None, config_drive=False):
"""Create a new diagnostics object
:param state: The current state of the VM. Example values are:
'pending', 'running', 'paused', 'shutdown', 'crashed',
'suspended' and 'building' (String)
:param driver: A string denoting the driver on which the VM is running.
Examples may be: 'libvirt', 'xenapi', 'hyperv' and
'vmwareapi' (String)
:param hypervisor_os: A string denoting the hypervisor OS (String)
:param uptime: The amount of time in seconds that the VM has been
running (Integer)
:param cpu_details: And array of CpuDiagnostics or None.
:param nic_details: And array of NicDiagnostics or None.
:param disk_details: And array of DiskDiagnostics or None.
:param config_drive: Indicates if the config drive is supported on the
instance (Boolean)
"""
self.state = state
self.driver = driver
self.hypervisor_os = hypervisor_os
self.uptime = uptime
self.config_drive = config_drive
if cpu_details:
self._validate_type(cpu_details, CpuDiagnostics, 'cpu_details')
self.cpu_details = cpu_details
else:
self.cpu_details = []
if nic_details:
self._validate_type(nic_details, NicDiagnostics, 'nic_details')
self.nic_details = nic_details
else:
self.nic_details = []
if disk_details:
self._validate_type(disk_details, DiskDiagnostics, 'disk_details')
self.disk_details = disk_details
else:
self.disk_details = []
self.memory_details = MemoryDiagnostics()
def _validate_type(self, input, type, str_input):
if not isinstance(input, list):
reason = _("Invalid type for %s") % str_input
raise exception.InvalidInput(reason=reason)
for i in input:
if not isinstance(i, type):
reason = _("Invalid type for %s entry") % str_input
raise exception.InvalidInput(reason=reason)
def add_cpu(self, time=0):
self.cpu_details.append(CpuDiagnostics(time=time))
def add_nic(self, mac_address='00:00:00:00:00:00',
rx_octets=0, rx_errors=0, rx_drop=0, rx_packets=0,
tx_octets=0, tx_errors=0, tx_drop=0, tx_packets=0):
self.nic_details.append(NicDiagnostics(mac_address=mac_address,
rx_octets=rx_octets,
rx_errors=rx_errors,
rx_drop=rx_drop,
rx_packets=rx_packets,
tx_octets=tx_octets,
tx_errors=tx_errors,
tx_drop=tx_drop,
tx_packets=tx_packets))
def add_disk(self, id='', read_bytes=0, read_requests=0,
write_bytes=0, write_requests=0, errors_count=0):
self.disk_details.append(DiskDiagnostics(id=id,
read_bytes=read_bytes,
read_requests=read_requests,
write_bytes=write_bytes,
write_requests=write_requests,
errors_count=errors_count))
def serialize(self):
s = {}
for k, v in six.iteritems(self.__dict__):
# Treat case of CpuDiagnostics, NicDiagnostics and
# DiskDiagnostics - these are lists
if isinstance(v, list):
l = []
for value in v:
l.append(value.__dict__)
s[k] = l
# Treat case of MemoryDiagnostics
elif isinstance(v, MemoryDiagnostics):
s[k] = v.__dict__
else:
s[k] = v
s['version'] = self.version
return s
|
apache-2.0
|
M4sse/chromium.src
|
tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/smoke.py
|
9
|
2496
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SmokePage(page_module.Page):
def __init__(self, url, page_set, name=''):
super(SmokePage, self).__init__(url=url, page_set=page_set, name=name)
self.archive_data_file = '../data/chrome_proxy_smoke.json'
class Page1(SmokePage):
"""
Why: Check chrome proxy response headers.
"""
def __init__(self, page_set):
super(Page1, self).__init__(
url='http://aws1.mdw.la/fw/',
page_set=page_set,
name='header validation')
class Page2(SmokePage):
"""
Why: Check data compression
"""
def __init__(self, page_set):
super(Page2, self).__init__(
url='http://aws1.mdw.la/static/',
page_set=page_set,
name='compression: image')
class Page3(SmokePage):
"""
Why: Check bypass
"""
def __init__(self, page_set):
super(Page3, self).__init__(
url='http://aws1.mdw.la/bypass/',
page_set=page_set,
name='bypass')
self.restart_after = True
class Page4(SmokePage):
"""
Why: Check data compression
"""
def __init__(self, page_set):
super(Page4, self).__init__(
url='http://aws1.mdw.la/static/',
page_set=page_set,
name='compression: javascript')
class Page5(SmokePage):
"""
Why: Check data compression
"""
def __init__(self, page_set):
super(Page5, self).__init__(
url='http://aws1.mdw.la/static/',
page_set=page_set,
name='compression: css')
class Page6(SmokePage):
"""
Why: Expect 'malware ahead' page. Use a short navigation timeout because no
response will be received.
"""
def __init__(self, page_set):
super(Page6, self).__init__(
url='http://www.ianfette.org/',
page_set=page_set,
name='safebrowsing')
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self, timeout_in_seconds=5)
class SmokePageSet(page_set_module.PageSet):
""" Chrome proxy test sites """
def __init__(self):
super(SmokePageSet, self).__init__(
archive_data_file='../data/chrome_proxy_smoke.json')
self.AddUserStory(Page1(self))
self.AddUserStory(Page2(self))
self.AddUserStory(Page3(self))
self.AddUserStory(Page4(self))
self.AddUserStory(Page5(self))
self.AddUserStory(Page6(self))
|
bsd-3-clause
|
cogeorg/black_rhino
|
examples/firesales_simple/networkx/algorithms/boundary.py
|
49
|
2604
|
"""
Routines to find the boundary of a set of nodes.
Edge boundaries are edges that have only one end
in the set of nodes.
Node boundaries are nodes outside the set of nodes
that have an edge to a node in the set.
"""
__author__ = """Aric Hagberg ([email protected])\nPieter Swart ([email protected])\nDan Schult ([email protected])"""
# Copyright (C) 2004-2008 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
__all__=['edge_boundary','node_boundary']
def edge_boundary(G, nbunch1, nbunch2=None):
"""Return the edge boundary.
Edge boundaries are edges that have only one end
in the given set of nodes.
Parameters
-----------
G : graph
A networkx graph
nbunch1 : list, container
Interior node set
nbunch2 : list, container
Exterior node set. If None then it is set to all of the
nodes in G not in nbunch1.
Returns
-------
elist : list
List of edges
Notes
------
Nodes in nbunch1 and nbunch2 that are not in G are ignored.
nbunch1 and nbunch2 are usually meant to be disjoint,
but in the interest of speed and generality, that is
not required here.
"""
if nbunch2 is None: # Then nbunch2 is complement of nbunch1
nset1=set((n for n in nbunch1 if n in G))
return [(n1,n2) for n1 in nset1 for n2 in G[n1] \
if n2 not in nset1]
nset2=set(nbunch2)
return [(n1,n2) for n1 in nbunch1 if n1 in G for n2 in G[n1] \
if n2 in nset2]
def node_boundary(G, nbunch1, nbunch2=None):
"""Return the node boundary.
The node boundary is all nodes in the edge boundary of a given
set of nodes that are in the set.
Parameters
-----------
G : graph
A networkx graph
nbunch1 : list, container
Interior node set
nbunch2 : list, container
Exterior node set. If None then it is set to all of the
nodes in G not in nbunch1.
Returns
-------
nlist : list
List of nodes.
Notes
------
Nodes in nbunch1 and nbunch2 that are not in G are ignored.
nbunch1 and nbunch2 are usually meant to be disjoint,
but in the interest of speed and generality, that is
not required here.
"""
nset1=set(n for n in nbunch1 if n in G)
bdy=set()
for n1 in nset1:
bdy.update(G[n1])
bdy -= nset1
if nbunch2 is not None: # else nbunch2 is complement of nbunch1
bdy &= set(nbunch2)
return list(bdy)
|
gpl-3.0
|
glibin/tortik
|
tortik_tests/postprocessor_test.py
|
1
|
1248
|
# -*- coding: utf-8 -*-
import tornado.web
import tornado.ioloop
from tornado.testing import AsyncHTTPTestCase
from tortik.page import RequestHandler
import tornado.curl_httpclient
def first_postprocessor(handler, data, callback):
callback(handler, data.replace('Hello,', 'Good'))
def second_postprocessor(handler, data, callback):
callback(handler, data.replace('Good world', 'Good bye'))
class MainHandler(RequestHandler):
postprocessors = [
first_postprocessor,
second_postprocessor,
]
def get(self):
self.complete('Hello, world!')
class Application(tornado.web.Application):
def __init__(self):
handlers = [
(r'/', MainHandler),
]
settings = dict(
debug=True,
)
tornado.web.Application.__init__(self, handlers, **settings)
class PostprocessorHTTPTestCase(AsyncHTTPTestCase):
def get_app(self):
return Application()
def get_new_ioloop(self):
return tornado.ioloop.IOLoop.instance()
def test_main(self):
self.http_client.fetch(self.get_url('/'), self.stop)
response = self.wait()
self.assertEqual(200, response.code)
self.assertIn(b'Good bye!', response.body)
|
mit
|
AgileInstitute/GtestSpike
|
GtestSpike/gtest-1.7.0/test/gtest_test_utils.py
|
1100
|
10812
|
#!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test utilities for Google C++ Testing Framework."""
__author__ = '[email protected] (Zhanyong Wan)'
import atexit
import os
import shutil
import sys
import tempfile
import unittest
_test_module = unittest
# Suppresses the 'Import not at the top of the file' lint complaint.
# pylint: disable-msg=C6204
try:
import subprocess
_SUBPROCESS_MODULE_AVAILABLE = True
except:
import popen2
_SUBPROCESS_MODULE_AVAILABLE = False
# pylint: enable-msg=C6204
GTEST_OUTPUT_VAR_NAME = 'GTEST_OUTPUT'
IS_WINDOWS = os.name == 'nt'
IS_CYGWIN = os.name == 'posix' and 'CYGWIN' in os.uname()[0]
# The environment variable for specifying the path to the premature-exit file.
PREMATURE_EXIT_FILE_ENV_VAR = 'TEST_PREMATURE_EXIT_FILE'
environ = os.environ.copy()
def SetEnvVar(env_var, value):
"""Sets/unsets an environment variable to a given value."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
# Here we expose a class from a particular module, depending on the
# environment. The comment suppresses the 'Invalid variable name' lint
# complaint.
TestCase = _test_module.TestCase # pylint: disable-msg=C6409
# Initially maps a flag to its default value. After
# _ParseAndStripGTestFlags() is called, maps a flag to its actual value.
_flag_map = {'source_dir': os.path.dirname(sys.argv[0]),
'build_dir': os.path.dirname(sys.argv[0])}
_gtest_flags_are_parsed = False
def _ParseAndStripGTestFlags(argv):
"""Parses and strips Google Test flags from argv. This is idempotent."""
# Suppresses the lint complaint about a global variable since we need it
# here to maintain module-wide state.
global _gtest_flags_are_parsed # pylint: disable-msg=W0603
if _gtest_flags_are_parsed:
return
_gtest_flags_are_parsed = True
for flag in _flag_map:
# The environment variable overrides the default value.
if flag.upper() in os.environ:
_flag_map[flag] = os.environ[flag.upper()]
# The command line flag overrides the environment variable.
i = 1 # Skips the program name.
while i < len(argv):
prefix = '--' + flag + '='
if argv[i].startswith(prefix):
_flag_map[flag] = argv[i][len(prefix):]
del argv[i]
break
else:
# We don't increment i in case we just found a --gtest_* flag
# and removed it from argv.
i += 1
def GetFlag(flag):
"""Returns the value of the given flag."""
# In case GetFlag() is called before Main(), we always call
# _ParseAndStripGTestFlags() here to make sure the --gtest_* flags
# are parsed.
_ParseAndStripGTestFlags(sys.argv)
return _flag_map[flag]
def GetSourceDir():
"""Returns the absolute path of the directory where the .py files are."""
return os.path.abspath(GetFlag('source_dir'))
def GetBuildDir():
"""Returns the absolute path of the directory where the test binaries are."""
return os.path.abspath(GetFlag('build_dir'))
_temp_dir = None
def _RemoveTempDir():
if _temp_dir:
shutil.rmtree(_temp_dir, ignore_errors=True)
atexit.register(_RemoveTempDir)
def GetTempDir():
"""Returns a directory for temporary files."""
global _temp_dir
if not _temp_dir:
_temp_dir = tempfile.mkdtemp()
return _temp_dir
def GetTestExecutablePath(executable_name, build_dir=None):
"""Returns the absolute path of the test binary given its name.
The function will print a message and abort the program if the resulting file
doesn't exist.
Args:
executable_name: name of the test binary that the test script runs.
build_dir: directory where to look for executables, by default
the result of GetBuildDir().
Returns:
The absolute path of the test binary.
"""
path = os.path.abspath(os.path.join(build_dir or GetBuildDir(),
executable_name))
if (IS_WINDOWS or IS_CYGWIN) and not path.endswith('.exe'):
path += '.exe'
if not os.path.exists(path):
message = (
'Unable to find the test binary. Please make sure to provide path\n'
'to the binary via the --build_dir flag or the BUILD_DIR\n'
'environment variable.')
print >> sys.stderr, message
sys.exit(1)
return path
def GetExitStatus(exit_code):
"""Returns the argument to exit(), or -1 if exit() wasn't called.
Args:
exit_code: the result value of os.system(command).
"""
if os.name == 'nt':
# On Windows, os.WEXITSTATUS() doesn't work and os.system() returns
# the argument to exit() directly.
return exit_code
else:
# On Unix, os.WEXITSTATUS() must be used to extract the exit status
# from the result of os.system().
if os.WIFEXITED(exit_code):
return os.WEXITSTATUS(exit_code)
else:
return -1
class Subprocess:
def __init__(self, command, working_dir=None, capture_stderr=True, env=None):
"""Changes into a specified directory, if provided, and executes a command.
Restores the old directory afterwards.
Args:
command: The command to run, in the form of sys.argv.
working_dir: The directory to change into.
capture_stderr: Determines whether to capture stderr in the output member
or to discard it.
env: Dictionary with environment to pass to the subprocess.
Returns:
An object that represents outcome of the executed process. It has the
following attributes:
terminated_by_signal True iff the child process has been terminated
by a signal.
signal Sygnal that terminated the child process.
exited True iff the child process exited normally.
exit_code The code with which the child process exited.
output Child process's stdout and stderr output
combined in a string.
"""
# The subprocess module is the preferrable way of running programs
# since it is available and behaves consistently on all platforms,
# including Windows. But it is only available starting in python 2.4.
# In earlier python versions, we revert to the popen2 module, which is
# available in python 2.0 and later but doesn't provide required
# functionality (Popen4) under Windows. This allows us to support Mac
# OS X 10.4 Tiger, which has python 2.3 installed.
if _SUBPROCESS_MODULE_AVAILABLE:
if capture_stderr:
stderr = subprocess.STDOUT
else:
stderr = subprocess.PIPE
p = subprocess.Popen(command,
stdout=subprocess.PIPE, stderr=stderr,
cwd=working_dir, universal_newlines=True, env=env)
# communicate returns a tuple with the file obect for the child's
# output.
self.output = p.communicate()[0]
self._return_code = p.returncode
else:
old_dir = os.getcwd()
def _ReplaceEnvDict(dest, src):
# Changes made by os.environ.clear are not inheritable by child
# processes until Python 2.6. To produce inheritable changes we have
# to delete environment items with the del statement.
for key in dest.keys():
del dest[key]
dest.update(src)
# When 'env' is not None, backup the environment variables and replace
# them with the passed 'env'. When 'env' is None, we simply use the
# current 'os.environ' for compatibility with the subprocess.Popen
# semantics used above.
if env is not None:
old_environ = os.environ.copy()
_ReplaceEnvDict(os.environ, env)
try:
if working_dir is not None:
os.chdir(working_dir)
if capture_stderr:
p = popen2.Popen4(command)
else:
p = popen2.Popen3(command)
p.tochild.close()
self.output = p.fromchild.read()
ret_code = p.wait()
finally:
os.chdir(old_dir)
# Restore the old environment variables
# if they were replaced.
if env is not None:
_ReplaceEnvDict(os.environ, old_environ)
# Converts ret_code to match the semantics of
# subprocess.Popen.returncode.
if os.WIFSIGNALED(ret_code):
self._return_code = -os.WTERMSIG(ret_code)
else: # os.WIFEXITED(ret_code) should return True here.
self._return_code = os.WEXITSTATUS(ret_code)
if self._return_code < 0:
self.terminated_by_signal = True
self.exited = False
self.signal = -self._return_code
else:
self.terminated_by_signal = False
self.exited = True
self.exit_code = self._return_code
def Main():
"""Runs the unit test."""
# We must call _ParseAndStripGTestFlags() before calling
# unittest.main(). Otherwise the latter will be confused by the
# --gtest_* flags.
_ParseAndStripGTestFlags(sys.argv)
# The tested binaries should not be writing XML output files unless the
# script explicitly instructs them to.
# TODO([email protected]): Move this into Subprocess when we implement
# passing environment into it as a parameter.
if GTEST_OUTPUT_VAR_NAME in os.environ:
del os.environ[GTEST_OUTPUT_VAR_NAME]
_test_module.main()
|
mit
|
fenglu-g/incubator-airflow
|
airflow/hooks/presto_hook.py
|
5
|
4772
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from builtins import str
from pyhive import presto
from pyhive.exc import DatabaseError
from requests.auth import HTTPBasicAuth
from airflow.hooks.dbapi_hook import DbApiHook
class PrestoException(Exception):
pass
class PrestoHook(DbApiHook):
"""
Interact with Presto through PyHive!
>>> ph = PrestoHook()
>>> sql = "SELECT count(1) AS num FROM airflow.static_babynames"
>>> ph.get_records(sql)
[[340698]]
"""
conn_name_attr = 'presto_conn_id'
default_conn_name = 'presto_default'
def get_conn(self):
"""Returns a connection object"""
db = self.get_connection(self.presto_conn_id)
reqkwargs = None
if db.password is not None:
reqkwargs = {'auth': HTTPBasicAuth(db.login, db.password)}
return presto.connect(
host=db.host,
port=db.port,
username=db.login,
source=db.extra_dejson.get('source', 'airflow'),
protocol=db.extra_dejson.get('protocol', 'http'),
catalog=db.extra_dejson.get('catalog', 'hive'),
requests_kwargs=reqkwargs,
schema=db.schema)
@staticmethod
def _strip_sql(sql):
return sql.strip().rstrip(';')
@staticmethod
def _get_pretty_exception_message(e):
"""
Parses some DatabaseError to provide a better error message
"""
if (hasattr(e, 'message') and
'errorName' in e.message and
'message' in e.message):
return ('{name}: {message}'.format(
name=e.message['errorName'],
message=e.message['message']))
else:
return str(e)
def get_records(self, hql, parameters=None):
"""
Get a set of records from Presto
"""
try:
return super(PrestoHook, self).get_records(
self._strip_sql(hql), parameters)
except DatabaseError as e:
raise PrestoException(self._get_pretty_exception_message(e))
def get_first(self, hql, parameters=None):
"""
Returns only the first row, regardless of how many rows the query
returns.
"""
try:
return super(PrestoHook, self).get_first(
self._strip_sql(hql), parameters)
except DatabaseError as e:
raise PrestoException(self._get_pretty_exception_message(e))
def get_pandas_df(self, hql, parameters=None):
"""
Get a pandas dataframe from a sql query.
"""
import pandas
cursor = self.get_cursor()
try:
cursor.execute(self._strip_sql(hql), parameters)
data = cursor.fetchall()
except DatabaseError as e:
raise PrestoException(self._get_pretty_exception_message(e))
column_descriptions = cursor.description
if data:
df = pandas.DataFrame(data)
df.columns = [c[0] for c in column_descriptions]
else:
df = pandas.DataFrame()
return df
def run(self, hql, parameters=None):
"""
Execute the statement against Presto. Can be used to create views.
"""
return super(PrestoHook, self).run(self._strip_sql(hql), parameters)
# TODO Enable commit_every once PyHive supports transaction.
# Unfortunately, PyHive 0.5.1 doesn't support transaction for now,
# whereas Presto 0.132+ does.
def insert_rows(self, table, rows, target_fields=None):
"""
A generic way to insert a set of tuples into a table.
:param table: Name of the target table
:type table: str
:param rows: The rows to insert into the table
:type rows: iterable of tuples
:param target_fields: The names of the columns to fill in the table
:type target_fields: iterable of strings
"""
super(PrestoHook, self).insert_rows(table, rows, target_fields, 0)
|
apache-2.0
|
antoinearnoud/openfisca-france
|
openfisca_france/scripts/parameters/baremes_ipp/convert_ipp_xlsx_to_openfisca_xml.py
|
1
|
5031
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Convertit les barèmes de l'IPP au format XLSX vers le format XML des paramètres d'OpenFisca.
Nécessite l'installation de :
- ssconvert :
- Debian : `apt install gnumeric`
- macOS : `brew install gnumeric`
- xlrd : `pip install xlrd`
"""
import argparse
import glob
import logging
import os
import subprocess
import sys
import tempfile
import urllib
import zipfile
import xls_to_yaml_raw
import yaml_clean_to_xml
import yaml_raw_to_yaml_clean
from merge_ipp_xml_files_with_openfisca_parameters import merge_ipp_xml_files_with_openfisca_parameters
app_name = os.path.splitext(os.path.basename(__file__))[0]
log = logging.getLogger(app_name)
def cmd_exists(cmd):
return subprocess.call("type " + cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0
def main():
parser = argparse.ArgumentParser(description = __doc__)
parser.add_argument('--ref-ipp', default = None, help = u"Reference of the repository ipp-tax-and-benefit-tables-xlsx to use")
parser.add_argument('--zip-url', default = None, help = u"URL of the ZIP file to download")
parser.add_argument('--tmp-dir', default = None, help = u"Where to write intermediary files")
parser.add_argument('--xml-dir', default = None, help = u"Where to write XML files")
parser.add_argument('-v', '--verbose', action = 'store_true', default = False, help = u"Increase output verbosity")
parser.add_argument('--merge', action = 'store_true', default = False, help = u"Merge the generated XML with the OpenFisca France parameters")
args = parser.parse_args()
logging.basicConfig(level = logging.DEBUG if args.verbose else logging.INFO)
if args.zip_url:
assert not args.zip_url, "Arguments --zip-url and --ref-ipp are incompatible."
zip_url = u'https://framagit.org/french-tax-and-benefit-tables/ipp-tax-and-benefit-tables-xlsx/repository/archive.zip?ref=master'
else:
ref_ipp = args.ref_ipp or 'master'
zip_url = u'https://framagit.org/french-tax-and-benefit-tables/ipp-tax-and-benefit-tables-xlsx/repository/archive.zip?ref={}'.format(ref_ipp)
if args.xml_dir is not None and not os.path.exists(args.xml_dir):
log.error(u'Directory {!r} does not exist.'.format(args.xml_dir))
return 1
if not cmd_exists('ssconvert'):
log.error(u'Command "ssconvert" must be installed. It is provided by the "gnumeric" spreadsheet. '
u'Under a Debian GNU/Linux distribution, type `sudo apt install gnumeric`. '
u'Under macOS, type `brew install gnumeric`.')
return 1
tmp_dir = tempfile.mkdtemp(prefix='baremes-ipp-') \
if args.tmp_dir is None \
else args.tmp_dir
log.info(u'Temporary directory is {!r}.'.format(tmp_dir))
zip_file_path = os.path.join(tmp_dir, u"xlsx_files.zip")
urllib.urlretrieve(zip_url, zip_file_path)
log.info(u'ZIP file downloaded and saved as {!r}.'.format(zip_file_path))
with zipfile.ZipFile(zip_file_path, "r") as zip_file:
zip_file.extractall(tmp_dir)
# Find the name of the only directory in `xlsx_dir_path`, ending by the git commit ID in SHA-1 format.
xlsx_dir_path = glob.glob(os.path.join(tmp_dir, 'ipp-tax-and-benefit-tables-xlsx-*'))[0]
log.info(u'ZIP file extracted to {!r}.'.format(xlsx_dir_path))
log.info(u'Converting XLSX files to XLS...')
xls_dir_path = os.path.join(tmp_dir, 'xls')
os.mkdir(xls_dir_path)
for xlsx_file_name in os.listdir(xlsx_dir_path):
if not xlsx_file_name.endswith('.xlsx'):
continue
source_path = os.path.join(xlsx_dir_path, xlsx_file_name)
target_path = os.path.join(xls_dir_path, '{}.xls'.format(os.path.splitext(xlsx_file_name)[0]))
subprocess.check_call(['ssconvert', '--export-type=Gnumeric_Excel:excel_biff8', source_path, target_path])
log.info(u'XLS files written to {!r}.'.format(xls_dir_path))
log.info(u'Converting XLS files to YAML raw...')
yaml_raw_dir_path = os.path.join(tmp_dir, 'yaml_raw')
os.mkdir(yaml_raw_dir_path)
xls_to_yaml_raw.transform(xls_dir_path, yaml_raw_dir_path)
log.info(u'YAML raw files written to {!r}.'.format(yaml_raw_dir_path))
log.info(u'Converting YAML raw files to YAML clean...')
yaml_clean_dir_path = os.path.join(tmp_dir, 'yaml_clean')
os.mkdir(yaml_clean_dir_path)
yaml_raw_to_yaml_clean.clean(yaml_raw_dir_path, yaml_clean_dir_path)
log.info(u'YAML clean files written to {!r}.'.format(yaml_clean_dir_path))
log.info(u'Converting YAML clean files to XML...')
if args.xml_dir is None:
xml_dir_path = os.path.join(tmp_dir, 'xml')
os.mkdir(xml_dir_path)
else:
xml_dir_path = args.xml_dir
yaml_clean_to_xml.transform(yaml_clean_dir_path, xml_dir_path)
log.info(u'XML files written to {!r}'.format(xml_dir_path))
if args.merge:
merge_ipp_xml_files_with_openfisca_parameters(xml_dir_path)
if __name__ == "__main__":
sys.exit(main())
|
agpl-3.0
|
analyseuc3m/ANALYSE-v1
|
common/lib/xmodule/xmodule/modulestore/store_utilities.py
|
64
|
4611
|
import re
import logging
from collections import namedtuple
import uuid
from xblock.core import XBlock
DETACHED_XBLOCK_TYPES = set(name for name, __ in XBlock.load_tagged_classes("detached"))
def _prefix_only_url_replace_regex(pattern):
"""
Match urls in quotes pulling out the fields from pattern
"""
return re.compile(ur"""
(?x) # flags=re.VERBOSE
(?P<quote>\\?['"]) # the opening quotes
{}
(?P=quote) # the first matching closing quote
""".format(pattern))
def rewrite_nonportable_content_links(source_course_id, dest_course_id, text):
"""
rewrite any non-portable links to (->) relative links:
/c4x/<org>/<course>/asset/<name> -> /static/<name>
/jump_to/i4x://<org>/<course>/<category>/<name> -> /jump_to_id/<id>
"""
def portable_asset_link_subtitution(match):
quote = match.group('quote')
block_id = match.group('block_id')
return quote + '/static/' + block_id + quote
def portable_jump_to_link_substitution(match):
quote = match.group('quote')
rest = match.group('block_id')
return quote + '/jump_to_id/' + rest + quote
# if something blows up, log the error and continue
# create a serialized template for what the id will look like in the source_course but with
# the block_id as a regex pattern
placeholder_id = uuid.uuid4().hex
asset_block_pattern = unicode(source_course_id.make_asset_key('asset', placeholder_id))
asset_block_pattern = asset_block_pattern.replace(placeholder_id, r'(?P<block_id>.*?)')
try:
text = _prefix_only_url_replace_regex(asset_block_pattern).sub(portable_asset_link_subtitution, text)
except Exception as exc: # pylint: disable=broad-except
logging.warning("Error producing regex substitution %r for text = %r.\n\nError msg = %s", asset_block_pattern, text, str(exc))
placeholder_category = 'cat_{}'.format(uuid.uuid4().hex)
usage_block_pattern = unicode(source_course_id.make_usage_key(placeholder_category, placeholder_id))
usage_block_pattern = usage_block_pattern.replace(placeholder_category, r'(?P<category>[^/+@]+)')
usage_block_pattern = usage_block_pattern.replace(placeholder_id, r'(?P<block_id>.*?)')
jump_to_link_base = ur'/courses/{course_key_string}/jump_to/{usage_key_string}'.format(
course_key_string=unicode(source_course_id), usage_key_string=usage_block_pattern
)
try:
text = _prefix_only_url_replace_regex(jump_to_link_base).sub(portable_jump_to_link_substitution, text)
except Exception as exc: # pylint: disable=broad-except
logging.warning("Error producing regex substitution %r for text = %r.\n\nError msg = %s", jump_to_link_base, text, str(exc))
# Also, there commonly is a set of link URL's used in the format:
# /courses/<org>/<course>/<name> which will be broken if migrated to a different course_id
# so let's rewrite those, but the target will also be non-portable,
#
# Note: we only need to do this if we are changing course-id's
#
if source_course_id != dest_course_id:
try:
generic_courseware_link_base = u'/courses/{}/'.format(unicode(source_course_id))
text = re.sub(_prefix_only_url_replace_regex(generic_courseware_link_base), portable_asset_link_subtitution, text)
except Exception as exc: # pylint: disable=broad-except
logging.warning("Error producing regex substitution %r for text = %r.\n\nError msg = %s", source_course_id, text, str(exc))
return text
def draft_node_constructor(module, url, parent_url, location=None, parent_location=None, index=None):
"""
Contructs a draft_node namedtuple with defaults.
"""
draft_node = namedtuple('draft_node', ['module', 'location', 'url', 'parent_location', 'parent_url', 'index'])
return draft_node(module, location, url, parent_location, parent_url, index)
def get_draft_subtree_roots(draft_nodes):
"""
Takes a list of draft_nodes, which are namedtuples, each of which identify
itself and its parent.
If a draft_node is in `draft_nodes`, then we expect for all its children
should be in `draft_nodes` as well. Since `_import_draft` is recursive,
we only want to import the roots of any draft subtrees contained in
`draft_nodes`.
This generator yields those roots.
"""
urls = [draft_node.url for draft_node in draft_nodes]
for draft_node in draft_nodes:
if draft_node.parent_url not in urls:
yield draft_node
|
agpl-3.0
|
vivianli32/TravelConnect
|
flask/lib/python3.4/site-packages/babel/numbers.py
|
80
|
19693
|
# -*- coding: utf-8 -*-
"""
babel.numbers
~~~~~~~~~~~~~
Locale dependent formatting and parsing of numeric data.
The default locale for the functions in this module is determined by the
following environment variables, in that order:
* ``LC_NUMERIC``,
* ``LC_ALL``, and
* ``LANG``
:copyright: (c) 2013 by the Babel Team.
:license: BSD, see LICENSE for more details.
"""
# TODO:
# Padding and rounding increments in pattern:
# - http://www.unicode.org/reports/tr35/ (Appendix G.6)
from decimal import Decimal, InvalidOperation
import math
import re
from babel.core import default_locale, Locale
from babel._compat import range_type
LC_NUMERIC = default_locale('LC_NUMERIC')
def get_currency_name(currency, count=None, locale=LC_NUMERIC):
"""Return the name used by the locale for the specified currency.
>>> get_currency_name('USD', locale='en_US')
u'US Dollar'
.. versionadded:: 0.9.4
:param currency: the currency code
:param count: the optional count. If provided the currency name
will be pluralized to that number if possible.
:param locale: the `Locale` object or locale identifier
"""
loc = Locale.parse(locale)
if count is not None:
plural_form = loc.plural_form(count)
plural_names = loc._data['currency_names_plural']
if currency in plural_names:
return plural_names[currency][plural_form]
return loc.currencies.get(currency, currency)
def get_currency_symbol(currency, locale=LC_NUMERIC):
"""Return the symbol used by the locale for the specified currency.
>>> get_currency_symbol('USD', locale='en_US')
u'$'
:param currency: the currency code
:param locale: the `Locale` object or locale identifier
"""
return Locale.parse(locale).currency_symbols.get(currency, currency)
def get_decimal_symbol(locale=LC_NUMERIC):
"""Return the symbol used by the locale to separate decimal fractions.
>>> get_decimal_symbol('en_US')
u'.'
:param locale: the `Locale` object or locale identifier
"""
return Locale.parse(locale).number_symbols.get('decimal', u'.')
def get_plus_sign_symbol(locale=LC_NUMERIC):
"""Return the plus sign symbol used by the current locale.
>>> get_plus_sign_symbol('en_US')
u'+'
:param locale: the `Locale` object or locale identifier
"""
return Locale.parse(locale).number_symbols.get('plusSign', u'+')
def get_minus_sign_symbol(locale=LC_NUMERIC):
"""Return the plus sign symbol used by the current locale.
>>> get_minus_sign_symbol('en_US')
u'-'
:param locale: the `Locale` object or locale identifier
"""
return Locale.parse(locale).number_symbols.get('minusSign', u'-')
def get_exponential_symbol(locale=LC_NUMERIC):
"""Return the symbol used by the locale to separate mantissa and exponent.
>>> get_exponential_symbol('en_US')
u'E'
:param locale: the `Locale` object or locale identifier
"""
return Locale.parse(locale).number_symbols.get('exponential', u'E')
def get_group_symbol(locale=LC_NUMERIC):
"""Return the symbol used by the locale to separate groups of thousands.
>>> get_group_symbol('en_US')
u','
:param locale: the `Locale` object or locale identifier
"""
return Locale.parse(locale).number_symbols.get('group', u',')
def format_number(number, locale=LC_NUMERIC):
u"""Return the given number formatted for a specific locale.
>>> format_number(1099, locale='en_US')
u'1,099'
>>> format_number(1099, locale='de_DE')
u'1.099'
:param number: the number to format
:param locale: the `Locale` object or locale identifier
"""
# Do we really need this one?
return format_decimal(number, locale=locale)
def format_decimal(number, format=None, locale=LC_NUMERIC):
u"""Return the given decimal number formatted for a specific locale.
>>> format_decimal(1.2345, locale='en_US')
u'1.234'
>>> format_decimal(1.2346, locale='en_US')
u'1.235'
>>> format_decimal(-1.2346, locale='en_US')
u'-1.235'
>>> format_decimal(1.2345, locale='sv_SE')
u'1,234'
>>> format_decimal(1.2345, locale='de')
u'1,234'
The appropriate thousands grouping and the decimal separator are used for
each locale:
>>> format_decimal(12345.5, locale='en_US')
u'12,345.5'
:param number: the number to format
:param format:
:param locale: the `Locale` object or locale identifier
"""
locale = Locale.parse(locale)
if not format:
format = locale.decimal_formats.get(format)
pattern = parse_pattern(format)
return pattern.apply(number, locale)
def format_currency(number, currency, format=None, locale=LC_NUMERIC):
u"""Return formatted currency value.
>>> format_currency(1099.98, 'USD', locale='en_US')
u'$1,099.98'
>>> format_currency(1099.98, 'USD', locale='es_CO')
u'1.099,98\\xa0US$'
>>> format_currency(1099.98, 'EUR', locale='de_DE')
u'1.099,98\\xa0\\u20ac'
The pattern can also be specified explicitly. The currency is
placed with the '¤' sign. As the sign gets repeated the format
expands (¤ being the symbol, ¤¤ is the currency abbreviation and
¤¤¤ is the full name of the currency):
>>> format_currency(1099.98, 'EUR', u'\xa4\xa4 #,##0.00', locale='en_US')
u'EUR 1,099.98'
>>> format_currency(1099.98, 'EUR', u'#,##0.00 \xa4\xa4\xa4', locale='en_US')
u'1,099.98 euros'
:param number: the number to format
:param currency: the currency code
:param locale: the `Locale` object or locale identifier
"""
locale = Locale.parse(locale)
if not format:
format = locale.currency_formats.get(format)
pattern = parse_pattern(format)
return pattern.apply(number, locale, currency=currency)
def format_percent(number, format=None, locale=LC_NUMERIC):
"""Return formatted percent value for a specific locale.
>>> format_percent(0.34, locale='en_US')
u'34%'
>>> format_percent(25.1234, locale='en_US')
u'2,512%'
>>> format_percent(25.1234, locale='sv_SE')
u'2\\xa0512\\xa0%'
The format pattern can also be specified explicitly:
>>> format_percent(25.1234, u'#,##0\u2030', locale='en_US')
u'25,123\u2030'
:param number: the percent number to format
:param format:
:param locale: the `Locale` object or locale identifier
"""
locale = Locale.parse(locale)
if not format:
format = locale.percent_formats.get(format)
pattern = parse_pattern(format)
return pattern.apply(number, locale)
def format_scientific(number, format=None, locale=LC_NUMERIC):
"""Return value formatted in scientific notation for a specific locale.
>>> format_scientific(10000, locale='en_US')
u'1E4'
The format pattern can also be specified explicitly:
>>> format_scientific(1234567, u'##0E00', locale='en_US')
u'1.23E06'
:param number: the number to format
:param format:
:param locale: the `Locale` object or locale identifier
"""
locale = Locale.parse(locale)
if not format:
format = locale.scientific_formats.get(format)
pattern = parse_pattern(format)
return pattern.apply(number, locale)
class NumberFormatError(ValueError):
"""Exception raised when a string cannot be parsed into a number."""
def parse_number(string, locale=LC_NUMERIC):
"""Parse localized number string into an integer.
>>> parse_number('1,099', locale='en_US')
1099
>>> parse_number('1.099', locale='de_DE')
1099
When the given string cannot be parsed, an exception is raised:
>>> parse_number('1.099,98', locale='de')
Traceback (most recent call last):
...
NumberFormatError: '1.099,98' is not a valid number
:param string: the string to parse
:param locale: the `Locale` object or locale identifier
:return: the parsed number
:raise `NumberFormatError`: if the string can not be converted to a number
"""
try:
return int(string.replace(get_group_symbol(locale), ''))
except ValueError:
raise NumberFormatError('%r is not a valid number' % string)
def parse_decimal(string, locale=LC_NUMERIC):
"""Parse localized decimal string into a decimal.
>>> parse_decimal('1,099.98', locale='en_US')
Decimal('1099.98')
>>> parse_decimal('1.099,98', locale='de')
Decimal('1099.98')
When the given string cannot be parsed, an exception is raised:
>>> parse_decimal('2,109,998', locale='de')
Traceback (most recent call last):
...
NumberFormatError: '2,109,998' is not a valid decimal number
:param string: the string to parse
:param locale: the `Locale` object or locale identifier
:raise NumberFormatError: if the string can not be converted to a
decimal number
"""
locale = Locale.parse(locale)
try:
return Decimal(string.replace(get_group_symbol(locale), '')
.replace(get_decimal_symbol(locale), '.'))
except InvalidOperation:
raise NumberFormatError('%r is not a valid decimal number' % string)
PREFIX_END = r'[^0-9@#.,]'
NUMBER_TOKEN = r'[0-9@#.\-,E+]'
PREFIX_PATTERN = r"(?P<prefix>(?:'[^']*'|%s)*)" % PREFIX_END
NUMBER_PATTERN = r"(?P<number>%s+)" % NUMBER_TOKEN
SUFFIX_PATTERN = r"(?P<suffix>.*)"
number_re = re.compile(r"%s%s%s" % (PREFIX_PATTERN, NUMBER_PATTERN,
SUFFIX_PATTERN))
def split_number(value):
"""Convert a number into a (intasstring, fractionasstring) tuple"""
if isinstance(value, Decimal):
# NB can't just do text = str(value) as str repr of Decimal may be
# in scientific notation, e.g. for small numbers.
sign, digits, exp = value.as_tuple()
# build list of digits in reverse order, then reverse+join
# as per http://docs.python.org/library/decimal.html#recipes
int_part = []
frac_part = []
digits = list(map(str, digits))
# get figures after decimal point
for i in range(-exp):
# add digit if available, else 0
if digits:
frac_part.append(digits.pop())
else:
frac_part.append('0')
# add in some zeroes...
for i in range(exp):
int_part.append('0')
# and the rest
while digits:
int_part.append(digits.pop())
# if < 1, int_part must be set to '0'
if len(int_part) == 0:
int_part = '0',
if sign:
int_part.append('-')
return ''.join(reversed(int_part)), ''.join(reversed(frac_part))
text = ('%.9f' % value).rstrip('0')
if '.' in text:
a, b = text.split('.', 1)
if b == '0':
b = ''
else:
a, b = text, ''
return a, b
def bankersround(value, ndigits=0):
"""Round a number to a given precision.
Works like round() except that the round-half-even (banker's rounding)
algorithm is used instead of round-half-up.
>>> bankersround(5.5, 0)
6.0
>>> bankersround(6.5, 0)
6.0
>>> bankersround(-6.5, 0)
-6.0
>>> bankersround(1234.0, -2)
1200.0
"""
sign = int(value < 0) and -1 or 1
value = abs(value)
a, b = split_number(value)
digits = a + b
add = 0
i = len(a) + ndigits
if i < 0 or i >= len(digits):
pass
elif digits[i] > '5':
add = 1
elif digits[i] == '5' and digits[i-1] in '13579':
add = 1
elif digits[i] == '5': # previous digit is even
# We round up unless all following digits are zero.
for j in range_type(i + 1, len(digits)):
if digits[j] != '0':
add = 1
break
scale = 10**ndigits
if isinstance(value, Decimal):
return Decimal(int(value * scale + add)) / scale * sign
else:
return float(int(value * scale + add)) / scale * sign
def parse_grouping(p):
"""Parse primary and secondary digit grouping
>>> parse_grouping('##')
(1000, 1000)
>>> parse_grouping('#,###')
(3, 3)
>>> parse_grouping('#,####,###')
(3, 4)
"""
width = len(p)
g1 = p.rfind(',')
if g1 == -1:
return 1000, 1000
g1 = width - g1 - 1
g2 = p[:-g1 - 1].rfind(',')
if g2 == -1:
return g1, g1
g2 = width - g1 - g2 - 2
return g1, g2
def parse_pattern(pattern):
"""Parse number format patterns"""
if isinstance(pattern, NumberPattern):
return pattern
def _match_number(pattern):
rv = number_re.search(pattern)
if rv is None:
raise ValueError('Invalid number pattern %r' % pattern)
return rv.groups()
# Do we have a negative subpattern?
if ';' in pattern:
pattern, neg_pattern = pattern.split(';', 1)
pos_prefix, number, pos_suffix = _match_number(pattern)
neg_prefix, _, neg_suffix = _match_number(neg_pattern)
else:
pos_prefix, number, pos_suffix = _match_number(pattern)
neg_prefix = '-' + pos_prefix
neg_suffix = pos_suffix
if 'E' in number:
number, exp = number.split('E', 1)
else:
exp = None
if '@' in number:
if '.' in number and '0' in number:
raise ValueError('Significant digit patterns can not contain '
'"@" or "0"')
if '.' in number:
integer, fraction = number.rsplit('.', 1)
else:
integer = number
fraction = ''
def parse_precision(p):
"""Calculate the min and max allowed digits"""
min = max = 0
for c in p:
if c in '@0':
min += 1
max += 1
elif c == '#':
max += 1
elif c == ',':
continue
else:
break
return min, max
int_prec = parse_precision(integer)
frac_prec = parse_precision(fraction)
if exp:
frac_prec = parse_precision(integer+fraction)
exp_plus = exp.startswith('+')
exp = exp.lstrip('+')
exp_prec = parse_precision(exp)
else:
exp_plus = None
exp_prec = None
grouping = parse_grouping(integer)
return NumberPattern(pattern, (pos_prefix, neg_prefix),
(pos_suffix, neg_suffix), grouping,
int_prec, frac_prec,
exp_prec, exp_plus)
class NumberPattern(object):
def __init__(self, pattern, prefix, suffix, grouping,
int_prec, frac_prec, exp_prec, exp_plus):
self.pattern = pattern
self.prefix = prefix
self.suffix = suffix
self.grouping = grouping
self.int_prec = int_prec
self.frac_prec = frac_prec
self.exp_prec = exp_prec
self.exp_plus = exp_plus
if '%' in ''.join(self.prefix + self.suffix):
self.scale = 100
elif u'‰' in ''.join(self.prefix + self.suffix):
self.scale = 1000
else:
self.scale = 1
def __repr__(self):
return '<%s %r>' % (type(self).__name__, self.pattern)
def apply(self, value, locale, currency=None):
if isinstance(value, float):
value = Decimal(str(value))
value *= self.scale
is_negative = int(value < 0)
if self.exp_prec: # Scientific notation
value = abs(value)
if value:
exp = int(math.floor(math.log(value, 10)))
else:
exp = 0
# Minimum number of integer digits
if self.int_prec[0] == self.int_prec[1]:
exp -= self.int_prec[0] - 1
# Exponent grouping
elif self.int_prec[1]:
exp = int(exp / self.int_prec[1]) * self.int_prec[1]
if not isinstance(value, Decimal):
value = float(value)
if exp < 0:
value = value * 10**(-exp)
else:
value = value / 10**exp
exp_sign = ''
if exp < 0:
exp_sign = get_minus_sign_symbol(locale)
elif self.exp_plus:
exp_sign = get_plus_sign_symbol(locale)
exp = abs(exp)
number = u'%s%s%s%s' % \
(self._format_sigdig(value, self.frac_prec[0],
self.frac_prec[1]),
get_exponential_symbol(locale), exp_sign,
self._format_int(str(exp), self.exp_prec[0],
self.exp_prec[1], locale))
elif '@' in self.pattern: # Is it a siginificant digits pattern?
text = self._format_sigdig(abs(value),
self.int_prec[0],
self.int_prec[1])
if '.' in text:
a, b = text.split('.')
a = self._format_int(a, 0, 1000, locale)
if b:
b = get_decimal_symbol(locale) + b
number = a + b
else:
number = self._format_int(text, 0, 1000, locale)
else: # A normal number pattern
a, b = split_number(bankersround(abs(value),
self.frac_prec[1]))
b = b or '0'
a = self._format_int(a, self.int_prec[0],
self.int_prec[1], locale)
b = self._format_frac(b, locale)
number = a + b
retval = u'%s%s%s' % (self.prefix[is_negative], number,
self.suffix[is_negative])
if u'¤' in retval:
retval = retval.replace(u'¤¤¤',
get_currency_name(currency, value, locale))
retval = retval.replace(u'¤¤', currency.upper())
retval = retval.replace(u'¤', get_currency_symbol(currency, locale))
return retval
def _format_sigdig(self, value, min, max):
"""Convert value to a string.
The resulting string will contain between (min, max) number of
significant digits.
"""
a, b = split_number(value)
ndecimals = len(a)
if a == '0' and b != '':
ndecimals = 0
while b.startswith('0'):
b = b[1:]
ndecimals -= 1
a, b = split_number(bankersround(value, max - ndecimals))
digits = len((a + b).lstrip('0'))
if not digits:
digits = 1
# Figure out if we need to add any trailing '0':s
if len(a) >= max and a != '0':
return a
if digits < min:
b += ('0' * (min - digits))
if b:
return '%s.%s' % (a, b)
return a
def _format_int(self, value, min, max, locale):
width = len(value)
if width < min:
value = '0' * (min - width) + value
gsize = self.grouping[0]
ret = ''
symbol = get_group_symbol(locale)
while len(value) > gsize:
ret = symbol + value[-gsize:] + ret
value = value[:-gsize]
gsize = self.grouping[1]
return value + ret
def _format_frac(self, value, locale):
min, max = self.frac_prec
if len(value) < min:
value += ('0' * (min - len(value)))
if max == 0 or (min == 0 and int(value) == 0):
return ''
width = len(value)
while len(value) > min and value[-1] == '0':
value = value[:-1]
return get_decimal_symbol(locale) + value
|
mit
|
geekboxzone/lollipop_external_chromium_org_third_party_WebKit
|
Tools/Scripts/webkitpy/layout_tests/controllers/layout_test_runner_unittest.py
|
31
|
15091
|
# Copyright (C) 2012 Google Inc. All rights reserved.
# Copyright (C) 2010 Gabor Rapcsanyi ([email protected]), University of Szeged
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
from webkitpy.common.host_mock import MockHost
from webkitpy.common.system.systemhost_mock import MockSystemHost
from webkitpy.layout_tests import run_webkit_tests
from webkitpy.layout_tests.controllers.layout_test_runner import LayoutTestRunner, Sharder, TestRunInterruptedException
from webkitpy.layout_tests.models import test_expectations
from webkitpy.layout_tests.models import test_failures
from webkitpy.layout_tests.models.test_run_results import TestRunResults
from webkitpy.layout_tests.models.test_input import TestInput
from webkitpy.layout_tests.models.test_results import TestResult
from webkitpy.layout_tests.port.test import TestPort
TestExpectations = test_expectations.TestExpectations
class FakePrinter(object):
num_completed = 0
num_tests = 0
def print_expected(self, run_results, get_tests_with_result_type):
pass
def print_workers_and_shards(self, num_workers, num_shards, num_locked_shards):
pass
def print_started_test(self, test_name):
pass
def print_finished_test(self, result, expected, exp_str, got_str):
pass
def write(self, msg):
pass
def write_update(self, msg):
pass
def flush(self):
pass
class LockCheckingRunner(LayoutTestRunner):
def __init__(self, port, options, printer, tester, http_lock):
super(LockCheckingRunner, self).__init__(options, port, printer, port.results_directory(), lambda test_name: False)
self._finished_list_called = False
self._tester = tester
self._should_have_http_lock = http_lock
def handle_finished_list(self, source, list_name, num_tests, elapsed_time):
if not self._finished_list_called:
self._tester.assertEqual(list_name, 'locked_tests')
self._tester.assertTrue(self._remaining_locked_shards)
self._tester.assertTrue(self._has_http_lock is self._should_have_http_lock)
super(LockCheckingRunner, self).handle_finished_list(source, list_name, num_tests, elapsed_time)
if not self._finished_list_called:
self._tester.assertEqual(self._remaining_locked_shards, [])
self._tester.assertFalse(self._has_http_lock)
self._finished_list_called = True
class LayoutTestRunnerTests(unittest.TestCase):
def _runner(self, port=None):
# FIXME: we shouldn't have to use run_webkit_tests.py to get the options we need.
options = run_webkit_tests.parse_args(['--platform', 'test-mac-snowleopard'])[0]
options.child_processes = '1'
host = MockHost()
port = port or host.port_factory.get(options.platform, options=options)
return LockCheckingRunner(port, options, FakePrinter(), self, True)
def _run_tests(self, runner, tests):
test_inputs = [TestInput(test, 6000) for test in tests]
expectations = TestExpectations(runner._port, tests)
runner.run_tests(expectations, test_inputs, set(), num_workers=1, retrying=False)
def test_interrupt_if_at_failure_limits(self):
runner = self._runner()
runner._options.exit_after_n_failures = None
runner._options.exit_after_n_crashes_or_times = None
test_names = ['passes/text.html', 'passes/image.html']
runner._test_inputs = [TestInput(test_name, 6000) for test_name in test_names]
run_results = TestRunResults(TestExpectations(runner._port, test_names), len(test_names))
run_results.unexpected_failures = 100
run_results.unexpected_crashes = 50
run_results.unexpected_timeouts = 50
# No exception when the exit_after* options are None.
runner._interrupt_if_at_failure_limits(run_results)
# No exception when we haven't hit the limit yet.
runner._options.exit_after_n_failures = 101
runner._options.exit_after_n_crashes_or_timeouts = 101
runner._interrupt_if_at_failure_limits(run_results)
# Interrupt if we've exceeded either limit:
runner._options.exit_after_n_crashes_or_timeouts = 10
self.assertRaises(TestRunInterruptedException, runner._interrupt_if_at_failure_limits, run_results)
self.assertEqual(run_results.results_by_name['passes/text.html'].type, test_expectations.SKIP)
self.assertEqual(run_results.results_by_name['passes/image.html'].type, test_expectations.SKIP)
runner._options.exit_after_n_crashes_or_timeouts = None
runner._options.exit_after_n_failures = 10
exception = self.assertRaises(TestRunInterruptedException, runner._interrupt_if_at_failure_limits, run_results)
def test_update_summary_with_result(self):
# Reftests expected to be image mismatch should be respected when pixel_tests=False.
runner = self._runner()
runner._options.pixel_tests = False
test = 'failures/expected/reftest.html'
expectations = TestExpectations(runner._port, tests=[test])
runner._expectations = expectations
run_results = TestRunResults(expectations, 1)
result = TestResult(test_name=test, failures=[test_failures.FailureReftestMismatchDidNotOccur()], reftest_type=['!='])
runner._update_summary_with_result(run_results, result)
self.assertEqual(1, run_results.expected)
self.assertEqual(0, run_results.unexpected)
run_results = TestRunResults(expectations, 1)
result = TestResult(test_name=test, failures=[], reftest_type=['=='])
runner._update_summary_with_result(run_results, result)
self.assertEqual(0, run_results.expected)
self.assertEqual(1, run_results.unexpected)
class SharderTests(unittest.TestCase):
test_list = [
"http/tests/websocket/tests/unicode.htm",
"animations/keyframes.html",
"http/tests/security/view-source-no-refresh.html",
"http/tests/websocket/tests/websocket-protocol-ignored.html",
"fast/css/display-none-inline-style-change-crash.html",
"http/tests/xmlhttprequest/supported-xml-content-types.html",
"dom/html/level2/html/HTMLAnchorElement03.html",
"ietestcenter/Javascript/11.1.5_4-4-c-1.html",
"dom/html/level2/html/HTMLAnchorElement06.html",
"perf/object-keys.html",
"virtual/threaded/dir/test.html",
"virtual/threaded/fast/foo/test.html",
]
def get_test_input(self, test_file):
return TestInput(test_file, requires_lock=(test_file.startswith('http') or test_file.startswith('perf')))
def get_shards(self, num_workers, fully_parallel, run_singly, test_list=None, max_locked_shards=1):
port = TestPort(MockSystemHost())
self.sharder = Sharder(port.split_test, max_locked_shards)
test_list = test_list or self.test_list
return self.sharder.shard_tests([self.get_test_input(test) for test in test_list],
num_workers, fully_parallel, run_singly)
def assert_shards(self, actual_shards, expected_shard_names):
self.assertEqual(len(actual_shards), len(expected_shard_names))
for i, shard in enumerate(actual_shards):
expected_shard_name, expected_test_names = expected_shard_names[i]
self.assertEqual(shard.name, expected_shard_name)
self.assertEqual([test_input.test_name for test_input in shard.test_inputs],
expected_test_names)
def test_shard_by_dir(self):
locked, unlocked = self.get_shards(num_workers=2, fully_parallel=False, run_singly=False)
# Note that although there are tests in multiple dirs that need locks,
# they are crammed into a single shard in order to reduce the # of
# workers hitting the server at once.
self.assert_shards(locked,
[('locked_shard_1',
['http/tests/security/view-source-no-refresh.html',
'http/tests/websocket/tests/unicode.htm',
'http/tests/websocket/tests/websocket-protocol-ignored.html',
'http/tests/xmlhttprequest/supported-xml-content-types.html',
'perf/object-keys.html'])])
self.assert_shards(unlocked,
[('virtual/threaded/dir', ['virtual/threaded/dir/test.html']),
('virtual/threaded/fast/foo', ['virtual/threaded/fast/foo/test.html']),
('animations', ['animations/keyframes.html']),
('dom/html/level2/html', ['dom/html/level2/html/HTMLAnchorElement03.html',
'dom/html/level2/html/HTMLAnchorElement06.html']),
('fast/css', ['fast/css/display-none-inline-style-change-crash.html']),
('ietestcenter/Javascript', ['ietestcenter/Javascript/11.1.5_4-4-c-1.html'])])
def test_shard_every_file(self):
locked, unlocked = self.get_shards(num_workers=2, fully_parallel=True, max_locked_shards=2, run_singly=False)
self.assert_shards(locked,
[('locked_shard_1',
['http/tests/websocket/tests/unicode.htm',
'http/tests/security/view-source-no-refresh.html',
'http/tests/websocket/tests/websocket-protocol-ignored.html']),
('locked_shard_2',
['http/tests/xmlhttprequest/supported-xml-content-types.html',
'perf/object-keys.html'])]),
self.assert_shards(unlocked,
[('virtual/threaded/dir', ['virtual/threaded/dir/test.html']),
('virtual/threaded/fast/foo', ['virtual/threaded/fast/foo/test.html']),
('.', ['animations/keyframes.html']),
('.', ['fast/css/display-none-inline-style-change-crash.html']),
('.', ['dom/html/level2/html/HTMLAnchorElement03.html']),
('.', ['ietestcenter/Javascript/11.1.5_4-4-c-1.html']),
('.', ['dom/html/level2/html/HTMLAnchorElement06.html'])])
def test_shard_in_two(self):
locked, unlocked = self.get_shards(num_workers=1, fully_parallel=False, run_singly=False)
self.assert_shards(locked,
[('locked_tests',
['http/tests/websocket/tests/unicode.htm',
'http/tests/security/view-source-no-refresh.html',
'http/tests/websocket/tests/websocket-protocol-ignored.html',
'http/tests/xmlhttprequest/supported-xml-content-types.html',
'perf/object-keys.html'])])
self.assert_shards(unlocked,
[('unlocked_tests',
['animations/keyframes.html',
'fast/css/display-none-inline-style-change-crash.html',
'dom/html/level2/html/HTMLAnchorElement03.html',
'ietestcenter/Javascript/11.1.5_4-4-c-1.html',
'dom/html/level2/html/HTMLAnchorElement06.html',
'virtual/threaded/dir/test.html',
'virtual/threaded/fast/foo/test.html'])])
def test_shard_in_two_has_no_locked_shards(self):
locked, unlocked = self.get_shards(num_workers=1, fully_parallel=False, run_singly=False,
test_list=['animations/keyframe.html'])
self.assertEqual(len(locked), 0)
self.assertEqual(len(unlocked), 1)
def test_shard_in_two_has_no_unlocked_shards(self):
locked, unlocked = self.get_shards(num_workers=1, fully_parallel=False, run_singly=False,
test_list=['http/tests/websocket/tests/unicode.htm'])
self.assertEqual(len(locked), 1)
self.assertEqual(len(unlocked), 0)
def test_multiple_locked_shards(self):
locked, unlocked = self.get_shards(num_workers=4, fully_parallel=False, max_locked_shards=2, run_singly=False)
self.assert_shards(locked,
[('locked_shard_1',
['http/tests/security/view-source-no-refresh.html',
'http/tests/websocket/tests/unicode.htm',
'http/tests/websocket/tests/websocket-protocol-ignored.html']),
('locked_shard_2',
['http/tests/xmlhttprequest/supported-xml-content-types.html',
'perf/object-keys.html'])])
locked, unlocked = self.get_shards(num_workers=4, fully_parallel=False, run_singly=False)
self.assert_shards(locked,
[('locked_shard_1',
['http/tests/security/view-source-no-refresh.html',
'http/tests/websocket/tests/unicode.htm',
'http/tests/websocket/tests/websocket-protocol-ignored.html',
'http/tests/xmlhttprequest/supported-xml-content-types.html',
'perf/object-keys.html'])])
def test_virtual_shards(self):
# With run_singly=False, we try to keep all of the tests in a virtual suite together even
# when fully_parallel=True, so that we don't restart every time the command line args change.
locked, unlocked = self.get_shards(num_workers=2, fully_parallel=True, max_locked_shards=2, run_singly=False,
test_list=['virtual/foo/bar1.html', 'virtual/foo/bar2.html'])
self.assert_shards(unlocked,
[('virtual/foo', ['virtual/foo/bar1.html', 'virtual/foo/bar2.html'])])
# But, with run_singly=True, we have to restart every time anyway, so we want full parallelism.
locked, unlocked = self.get_shards(num_workers=2, fully_parallel=True, max_locked_shards=2, run_singly=True,
test_list=['virtual/foo/bar1.html', 'virtual/foo/bar2.html'])
self.assert_shards(unlocked,
[('.', ['virtual/foo/bar1.html']),
('.', ['virtual/foo/bar2.html'])])
|
bsd-3-clause
|
jrgdiz/iria
|
main.py
|
1
|
2878
|
#!/usr/bin/env python3
# Based on echobot2.py
import logging
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters
from modules.gifsapm import GifsApmHandler
from modules.estraviz import EstravizHandler
from modules.dice import DiceHandler
########################
# LOGGING
########################
logging.basicConfig(format='[%(asctime)s] [%(name)s] [%(levelname)s] %(message)s',
level=logging.INFO,
filename='iria.log')
logger = logging.getLogger(__name__)
########################
# DEFAULT HANDLERS
########################
def start(bot, update):
update.message.reply_text('Olá!')
def help(bot, update):
update.message.reply_text(
"""
Prova:
/e(straviz) palavra-a-pesquisar
/g(ifsapm) tag-do-gif
/d(ados) jogada-de-dados
""")
def echo(bot, update):
update.message.reply_text(update.message.text, parse_mode="Markdown")
def error(bot, update, error):
logger.warn('Mensagem "%s" causou erro "%s"' % (update, error))
try:
raise error
except Unauthorized:
# remove update.message.chat_id from conversation list
logger.warn('Unauthorized: %s' % update.message.chat_id)
except BadRequest:
logger.warn('Bad Request')
# handle malformed requests - read more below!
except TimedOut:
logger.warn('Timed Out')
# handle slow connection problems
except NetworkError:
logger.warn('Network Error')
# handle other connection problems
except ChatMigrated as e:
# the chat_id of a group has changed, use e.new_chat_id instead
logger.warn('Chat Migrated: %s' % error.new_chat_id)
except TelegramError:
# handle all other telegram related errors
logger.warn('Other Telegram Error')
########################
# BOT START
########################
# Open secret token
with open("./secret/bot_token", mode='r', encoding="utf-8") as f:
tg_token = f.readline().strip()
# Create the EventHandler and pass it your bot's token.
updater = Updater(tg_token)
# Get the dispatcher to register handlers
dp = updater.dispatcher
# default handlers
dp.add_handler(CommandHandler("start", start))
dp.add_handler(CommandHandler("help", help))
dp.add_handler(MessageHandler(Filters.text, echo)) # on noncommand
dp.add_error_handler(error) # log all errors
#################################
# custom modules are defined here
#################################
EstravizHandler().register(dp, logger)
GifsApmHandler().register(dp, logger)
DiceHandler().register(dp, logger)
# Start the Bot
updater.start_polling()
# Run the bot until you press Ctrl-C or the process receives SIGINT,
# SIGTERM or SIGABRT. This should be used most of the time, since
# start_polling() is non-blocking and will stop the bot gracefully.
updater.idle()
|
mit
|
marshall007/rethinkdb
|
test/rql_test/connections/http_support/jinja2/nodes.py
|
623
|
28875
|
# -*- coding: utf-8 -*-
"""
jinja2.nodes
~~~~~~~~~~~~
This module implements additional nodes derived from the ast base node.
It also provides some node tree helper functions like `in_lineno` and
`get_nodes` used by the parser and translator in order to normalize
python and jinja nodes.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import operator
from collections import deque
from jinja2.utils import Markup
from jinja2._compat import next, izip, with_metaclass, text_type, \
method_type, function_type
#: the types we support for context functions
_context_function_types = (function_type, method_type)
_binop_to_func = {
'*': operator.mul,
'/': operator.truediv,
'//': operator.floordiv,
'**': operator.pow,
'%': operator.mod,
'+': operator.add,
'-': operator.sub
}
_uaop_to_func = {
'not': operator.not_,
'+': operator.pos,
'-': operator.neg
}
_cmpop_to_func = {
'eq': operator.eq,
'ne': operator.ne,
'gt': operator.gt,
'gteq': operator.ge,
'lt': operator.lt,
'lteq': operator.le,
'in': lambda a, b: a in b,
'notin': lambda a, b: a not in b
}
class Impossible(Exception):
"""Raised if the node could not perform a requested action."""
class NodeType(type):
"""A metaclass for nodes that handles the field and attribute
inheritance. fields and attributes from the parent class are
automatically forwarded to the child."""
def __new__(cls, name, bases, d):
for attr in 'fields', 'attributes':
storage = []
storage.extend(getattr(bases[0], attr, ()))
storage.extend(d.get(attr, ()))
assert len(bases) == 1, 'multiple inheritance not allowed'
assert len(storage) == len(set(storage)), 'layout conflict'
d[attr] = tuple(storage)
d.setdefault('abstract', False)
return type.__new__(cls, name, bases, d)
class EvalContext(object):
"""Holds evaluation time information. Custom attributes can be attached
to it in extensions.
"""
def __init__(self, environment, template_name=None):
self.environment = environment
if callable(environment.autoescape):
self.autoescape = environment.autoescape(template_name)
else:
self.autoescape = environment.autoescape
self.volatile = False
def save(self):
return self.__dict__.copy()
def revert(self, old):
self.__dict__.clear()
self.__dict__.update(old)
def get_eval_context(node, ctx):
if ctx is None:
if node.environment is None:
raise RuntimeError('if no eval context is passed, the '
'node must have an attached '
'environment.')
return EvalContext(node.environment)
return ctx
class Node(with_metaclass(NodeType, object)):
"""Baseclass for all Jinja2 nodes. There are a number of nodes available
of different types. There are four major types:
- :class:`Stmt`: statements
- :class:`Expr`: expressions
- :class:`Helper`: helper nodes
- :class:`Template`: the outermost wrapper node
All nodes have fields and attributes. Fields may be other nodes, lists,
or arbitrary values. Fields are passed to the constructor as regular
positional arguments, attributes as keyword arguments. Each node has
two attributes: `lineno` (the line number of the node) and `environment`.
The `environment` attribute is set at the end of the parsing process for
all nodes automatically.
"""
fields = ()
attributes = ('lineno', 'environment')
abstract = True
def __init__(self, *fields, **attributes):
if self.abstract:
raise TypeError('abstract nodes are not instanciable')
if fields:
if len(fields) != len(self.fields):
if not self.fields:
raise TypeError('%r takes 0 arguments' %
self.__class__.__name__)
raise TypeError('%r takes 0 or %d argument%s' % (
self.__class__.__name__,
len(self.fields),
len(self.fields) != 1 and 's' or ''
))
for name, arg in izip(self.fields, fields):
setattr(self, name, arg)
for attr in self.attributes:
setattr(self, attr, attributes.pop(attr, None))
if attributes:
raise TypeError('unknown attribute %r' %
next(iter(attributes)))
def iter_fields(self, exclude=None, only=None):
"""This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
it's possible to limit that to some fields by providing the `only`
parameter or to exclude some using the `exclude` parameter. Both
should be sets or tuples of field names.
"""
for name in self.fields:
if (exclude is only is None) or \
(exclude is not None and name not in exclude) or \
(only is not None and name in only):
try:
yield name, getattr(self, name)
except AttributeError:
pass
def iter_child_nodes(self, exclude=None, only=None):
"""Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
"""
for field, item in self.iter_fields(exclude, only):
if isinstance(item, list):
for n in item:
if isinstance(n, Node):
yield n
elif isinstance(item, Node):
yield item
def find(self, node_type):
"""Find the first node of a given type. If no such node exists the
return value is `None`.
"""
for result in self.find_all(node_type):
return result
def find_all(self, node_type):
"""Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
"""
for child in self.iter_child_nodes():
if isinstance(child, node_type):
yield child
for result in child.find_all(node_type):
yield result
def set_ctx(self, ctx):
"""Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
"""
todo = deque([self])
while todo:
node = todo.popleft()
if 'ctx' in node.fields:
node.ctx = ctx
todo.extend(node.iter_child_nodes())
return self
def set_lineno(self, lineno, override=False):
"""Set the line numbers of the node and children."""
todo = deque([self])
while todo:
node = todo.popleft()
if 'lineno' in node.attributes:
if node.lineno is None or override:
node.lineno = lineno
todo.extend(node.iter_child_nodes())
return self
def set_environment(self, environment):
"""Set the environment for all nodes."""
todo = deque([self])
while todo:
node = todo.popleft()
node.environment = environment
todo.extend(node.iter_child_nodes())
return self
def __eq__(self, other):
return type(self) is type(other) and \
tuple(self.iter_fields()) == tuple(other.iter_fields())
def __ne__(self, other):
return not self.__eq__(other)
# Restore Python 2 hashing behavior on Python 3
__hash__ = object.__hash__
def __repr__(self):
return '%s(%s)' % (
self.__class__.__name__,
', '.join('%s=%r' % (arg, getattr(self, arg, None)) for
arg in self.fields)
)
class Stmt(Node):
"""Base node for all statements."""
abstract = True
class Helper(Node):
"""Nodes that exist in a specific context only."""
abstract = True
class Template(Node):
"""Node that represents a template. This must be the outermost node that
is passed to the compiler.
"""
fields = ('body',)
class Output(Stmt):
"""A node that holds multiple expressions which are then printed out.
This is used both for the `print` statement and the regular template data.
"""
fields = ('nodes',)
class Extends(Stmt):
"""Represents an extends statement."""
fields = ('template',)
class For(Stmt):
"""The for loop. `target` is the target for the iteration (usually a
:class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
of nodes that are used as loop-body, and `else_` a list of nodes for the
`else` block. If no else node exists it has to be an empty list.
For filtered nodes an expression can be stored as `test`, otherwise `None`.
"""
fields = ('target', 'iter', 'body', 'else_', 'test', 'recursive')
class If(Stmt):
"""If `test` is true, `body` is rendered, else `else_`."""
fields = ('test', 'body', 'else_')
class Macro(Stmt):
"""A macro definition. `name` is the name of the macro, `args` a list of
arguments and `defaults` a list of defaults if there are any. `body` is
a list of nodes for the macro body.
"""
fields = ('name', 'args', 'defaults', 'body')
class CallBlock(Stmt):
"""Like a macro without a name but a call instead. `call` is called with
the unnamed macro as `caller` argument this node holds.
"""
fields = ('call', 'args', 'defaults', 'body')
class FilterBlock(Stmt):
"""Node for filter sections."""
fields = ('body', 'filter')
class Block(Stmt):
"""A node that represents a block."""
fields = ('name', 'body', 'scoped')
class Include(Stmt):
"""A node that represents the include tag."""
fields = ('template', 'with_context', 'ignore_missing')
class Import(Stmt):
"""A node that represents the import tag."""
fields = ('template', 'target', 'with_context')
class FromImport(Stmt):
"""A node that represents the from import tag. It's important to not
pass unsafe names to the name attribute. The compiler translates the
attribute lookups directly into getattr calls and does *not* use the
subscript callback of the interface. As exported variables may not
start with double underscores (which the parser asserts) this is not a
problem for regular Jinja code, but if this node is used in an extension
extra care must be taken.
The list of names may contain tuples if aliases are wanted.
"""
fields = ('template', 'names', 'with_context')
class ExprStmt(Stmt):
"""A statement that evaluates an expression and discards the result."""
fields = ('node',)
class Assign(Stmt):
"""Assigns an expression to a target."""
fields = ('target', 'node')
class Expr(Node):
"""Baseclass for all expressions."""
abstract = True
def as_const(self, eval_ctx=None):
"""Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible.
An :class:`EvalContext` can be provided, if none is given
a default context is created which requires the nodes to have
an attached environment.
.. versionchanged:: 2.4
the `eval_ctx` parameter was added.
"""
raise Impossible()
def can_assign(self):
"""Check if it's possible to assign something to this node."""
return False
class BinExpr(Expr):
"""Baseclass for all binary expressions."""
fields = ('left', 'right')
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if self.environment.sandboxed and \
self.operator in self.environment.intercepted_binops:
raise Impossible()
f = _binop_to_func[self.operator]
try:
return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
except Exception:
raise Impossible()
class UnaryExpr(Expr):
"""Baseclass for all unary expressions."""
fields = ('node',)
operator = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if self.environment.sandboxed and \
self.operator in self.environment.intercepted_unops:
raise Impossible()
f = _uaop_to_func[self.operator]
try:
return f(self.node.as_const(eval_ctx))
except Exception:
raise Impossible()
class Name(Expr):
"""Looks up a name or stores a value in a name.
The `ctx` of the node can be one of the following values:
- `store`: store a value in the name
- `load`: load that name
- `param`: like `store` but if the name was defined as function parameter.
"""
fields = ('name', 'ctx')
def can_assign(self):
return self.name not in ('true', 'false', 'none',
'True', 'False', 'None')
class Literal(Expr):
"""Baseclass for literals."""
abstract = True
class Const(Literal):
"""All constant values. The parser will return this node for simple
constants such as ``42`` or ``"foo"`` but it can be used to store more
complex values such as lists too. Only constants with a safe
representation (objects where ``eval(repr(x)) == x`` is true).
"""
fields = ('value',)
def as_const(self, eval_ctx=None):
return self.value
@classmethod
def from_untrusted(cls, value, lineno=None, environment=None):
"""Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
an `Impossible` exception.
"""
from .compiler import has_safe_repr
if not has_safe_repr(value):
raise Impossible()
return cls(value, lineno=lineno, environment=environment)
class TemplateData(Literal):
"""A constant template string."""
fields = ('data',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
if eval_ctx.autoescape:
return Markup(self.data)
return self.data
class Tuple(Literal):
"""For loop unpacking and some other things like multiple arguments
for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
is used for loading the names or storing.
"""
fields = ('items', 'ctx')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return tuple(x.as_const(eval_ctx) for x in self.items)
def can_assign(self):
for item in self.items:
if not item.can_assign():
return False
return True
class List(Literal):
"""Any list literal such as ``[1, 2, 3]``"""
fields = ('items',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return [x.as_const(eval_ctx) for x in self.items]
class Dict(Literal):
"""Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
:class:`Pair` nodes.
"""
fields = ('items',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return dict(x.as_const(eval_ctx) for x in self.items)
class Pair(Helper):
"""A key, value pair for dicts."""
fields = ('key', 'value')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
class Keyword(Helper):
"""A key, value pair for keyword arguments where key is a string."""
fields = ('key', 'value')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key, self.value.as_const(eval_ctx)
class CondExpr(Expr):
"""A conditional expression (inline if expression). (``{{
foo if bar else baz }}``)
"""
fields = ('test', 'expr1', 'expr2')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.test.as_const(eval_ctx):
return self.expr1.as_const(eval_ctx)
# if we evaluate to an undefined object, we better do that at runtime
if self.expr2 is None:
raise Impossible()
return self.expr2.as_const(eval_ctx)
class Filter(Expr):
"""This node applies a filter on an expression. `name` is the name of
the filter, the rest of the fields are the same as for :class:`Call`.
If the `node` of a filter is `None` the contents of the last buffer are
filtered. Buffers are created by macros and filter blocks.
"""
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile or self.node is None:
raise Impossible()
# we have to be careful here because we call filter_ below.
# if this variable would be called filter, 2to3 would wrap the
# call in a list beause it is assuming we are talking about the
# builtin filter function here which no longer returns a list in
# python 3. because of that, do not rename filter_ to filter!
filter_ = self.environment.filters.get(self.name)
if filter_ is None or getattr(filter_, 'contextfilter', False):
raise Impossible()
obj = self.node.as_const(eval_ctx)
args = [x.as_const(eval_ctx) for x in self.args]
if getattr(filter_, 'evalcontextfilter', False):
args.insert(0, eval_ctx)
elif getattr(filter_, 'environmentfilter', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return filter_(obj, *args, **kwargs)
except Exception:
raise Impossible()
class Test(Expr):
"""Applies a test on an expression. `name` is the name of the test, the
rest of the fields are the same as for :class:`Call`.
"""
fields = ('node', 'name', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
class Call(Expr):
"""Calls an expression. `args` is a list of arguments, `kwargs` a list
of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
and `dyn_kwargs` has to be either `None` or a node that is used as
node for dynamic positional (``*args``) or keyword (``**kwargs``)
arguments.
"""
fields = ('node', 'args', 'kwargs', 'dyn_args', 'dyn_kwargs')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
obj = self.node.as_const(eval_ctx)
# don't evaluate context functions
args = [x.as_const(eval_ctx) for x in self.args]
if isinstance(obj, _context_function_types):
if getattr(obj, 'contextfunction', False):
raise Impossible()
elif getattr(obj, 'evalcontextfunction', False):
args.insert(0, eval_ctx)
elif getattr(obj, 'environmentfunction', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return obj(*args, **kwargs)
except Exception:
raise Impossible()
class Getitem(Expr):
"""Get an attribute or item from an expression and prefer the item."""
fields = ('node', 'arg', 'ctx')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.ctx != 'load':
raise Impossible()
try:
return self.environment.getitem(self.node.as_const(eval_ctx),
self.arg.as_const(eval_ctx))
except Exception:
raise Impossible()
def can_assign(self):
return False
class Getattr(Expr):
"""Get an attribute or item from an expression that is a ascii-only
bytestring and prefer the attribute.
"""
fields = ('node', 'attr', 'ctx')
def as_const(self, eval_ctx=None):
if self.ctx != 'load':
raise Impossible()
try:
eval_ctx = get_eval_context(self, eval_ctx)
return self.environment.getattr(self.node.as_const(eval_ctx),
self.attr)
except Exception:
raise Impossible()
def can_assign(self):
return False
class Slice(Expr):
"""Represents a slice object. This must only be used as argument for
:class:`Subscript`.
"""
fields = ('start', 'stop', 'step')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
def const(obj):
if obj is None:
return None
return obj.as_const(eval_ctx)
return slice(const(self.start), const(self.stop), const(self.step))
class Concat(Expr):
"""Concatenates the list of expressions provided after converting them to
unicode.
"""
fields = ('nodes',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return ''.join(text_type(x.as_const(eval_ctx)) for x in self.nodes)
class Compare(Expr):
"""Compares an expression with some other expressions. `ops` must be a
list of :class:`Operand`\s.
"""
fields = ('expr', 'ops')
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
result = value = self.expr.as_const(eval_ctx)
try:
for op in self.ops:
new_value = op.expr.as_const(eval_ctx)
result = _cmpop_to_func[op.op](value, new_value)
value = new_value
except Exception:
raise Impossible()
return result
class Operand(Helper):
"""Holds an operator and an expression."""
fields = ('op', 'expr')
if __debug__:
Operand.__doc__ += '\nThe following operators are available: ' + \
', '.join(sorted('``%s``' % x for x in set(_binop_to_func) |
set(_uaop_to_func) | set(_cmpop_to_func)))
class Mul(BinExpr):
"""Multiplies the left with the right node."""
operator = '*'
class Div(BinExpr):
"""Divides the left by the right node."""
operator = '/'
class FloorDiv(BinExpr):
"""Divides the left by the right node and truncates conver the
result into an integer by truncating.
"""
operator = '//'
class Add(BinExpr):
"""Add the left to the right node."""
operator = '+'
class Sub(BinExpr):
"""Substract the right from the left node."""
operator = '-'
class Mod(BinExpr):
"""Left modulo right."""
operator = '%'
class Pow(BinExpr):
"""Left to the power of right."""
operator = '**'
class And(BinExpr):
"""Short circuited AND."""
operator = 'and'
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
class Or(BinExpr):
"""Short circuited OR."""
operator = 'or'
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
class Not(UnaryExpr):
"""Negate the expression."""
operator = 'not'
class Neg(UnaryExpr):
"""Make the expression negative."""
operator = '-'
class Pos(UnaryExpr):
"""Make the expression positive (noop for most expressions)"""
operator = '+'
# Helpers for extensions
class EnvironmentAttribute(Expr):
"""Loads an attribute from the environment object. This is useful for
extensions that want to call a callback stored on the environment.
"""
fields = ('name',)
class ExtensionAttribute(Expr):
"""Returns the attribute of an extension bound to the environment.
The identifier is the identifier of the :class:`Extension`.
This node is usually constructed by calling the
:meth:`~jinja2.ext.Extension.attr` method on an extension.
"""
fields = ('identifier', 'name')
class ImportedName(Expr):
"""If created with an import name the import name is returned on node
access. For example ``ImportedName('cgi.escape')`` returns the `escape`
function from the cgi module on evaluation. Imports are optimized by the
compiler so there is no need to assign them to local variables.
"""
fields = ('importname',)
class InternalName(Expr):
"""An internal name in the compiler. You cannot create these nodes
yourself but the parser provides a
:meth:`~jinja2.parser.Parser.free_identifier` method that creates
a new identifier for you. This identifier is not available from the
template and is not threated specially by the compiler.
"""
fields = ('name',)
def __init__(self):
raise TypeError('Can\'t create internal names. Use the '
'`free_identifier` method on a parser.')
class MarkSafe(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`)."""
fields = ('expr',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return Markup(self.expr.as_const(eval_ctx))
class MarkSafeIfAutoescape(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`) but
only if autoescaping is active.
.. versionadded:: 2.5
"""
fields = ('expr',)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
expr = self.expr.as_const(eval_ctx)
if eval_ctx.autoescape:
return Markup(expr)
return expr
class ContextReference(Expr):
"""Returns the current template context. It can be used like a
:class:`Name` node, with a ``'load'`` ctx and will return the
current :class:`~jinja2.runtime.Context` object.
Here an example that assigns the current template name to a
variable named `foo`::
Assign(Name('foo', ctx='store'),
Getattr(ContextReference(), 'name'))
"""
class Continue(Stmt):
"""Continue a loop."""
class Break(Stmt):
"""Break a loop."""
class Scope(Stmt):
"""An artificial scope."""
fields = ('body',)
class EvalContextModifier(Stmt):
"""Modifies the eval context. For each option that should be modified,
a :class:`Keyword` has to be added to the :attr:`options` list.
Example to change the `autoescape` setting::
EvalContextModifier(options=[Keyword('autoescape', Const(True))])
"""
fields = ('options',)
class ScopedEvalContextModifier(EvalContextModifier):
"""Modifies the eval context and reverts it later. Works exactly like
:class:`EvalContextModifier` but will only modify the
:class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
"""
fields = ('body',)
# make sure nobody creates custom nodes
def _failing_new(*args, **kwargs):
raise TypeError('can\'t create custom node types')
NodeType.__new__ = staticmethod(_failing_new); del _failing_new
|
agpl-3.0
|
horczech/coala-bears
|
bears/python/requirements/PySafetyBear.py
|
18
|
3252
|
from collections import namedtuple
import pkg_resources
import re
from safety import safety
from coalib.bears.LocalBear import LocalBear
from dependency_management.requirements.PipRequirement import PipRequirement
from coalib.results.Result import Result
from coalib.results.SourceRange import SourceRange
from coalib.settings.Setting import typed_list
# the safety module expects an object that looks like this
# (not importing it from there because it's in a private-ish location)
Package = namedtuple('Package', ('key', 'version'))
class PySafetyBear(LocalBear):
"""
Checks if any of your Python dependencies have known security issues.
Data is taken from pyup.io's vulnerability database hosted at
https://github.com/pyupio/safety.
"""
LANGUAGES = {
'Python Requirements',
'Python 2 Requirements',
'Python 3 Requirements',
}
AUTHORS = {'Bence Nagy'}
REQUIREMENTS = {PipRequirement('safety', '0.5.1')}
AUTHORS_EMAILS = {'[email protected]'}
LICENSE = 'AGPL'
CAN_DETECT = {'Security'}
def run(self, filename, file):
"""
Checks for vulnerable package versions in requirements files.
"""
packages = list(
Package(key=req.key, version=req.specs[0][1])
for req in self.try_parse_requirements(file)
if len(req.specs) == 1 and req.specs[0][0] == '=='
)
if not packages:
return
for vulnerability in safety.check(packages=packages):
if vulnerability.is_cve:
message_template = (
'{vuln.name}{vuln.spec} is vulnerable to {vuln.cve_id} '
'and your project is using {vuln.version}.'
)
else:
message_template = (
'{vuln.name}{vuln.spec} is vulnerable and your project is '
'using {vuln.version}.'
)
# StopIteration should not ever happen so skipping its branch
line_number, line = next( # pragma: no branch
(index, line) for index, line in enumerate(file, start=1)
if vulnerability.name in line
)
version_spec_match = re.search(r'[=<>]+(\S+?)(?:$|\s|#)', line)
source_range = SourceRange.from_values(
filename,
line_number,
version_spec_match.start(1) + 1,
line_number,
version_spec_match.end(1) + 1,
)
yield Result(
self,
message_template.format(vuln=vulnerability),
additional_info=vulnerability.description,
affected_code=(source_range, ),
)
@staticmethod
def try_parse_requirements(lines: typed_list(str)):
"""
Yields all package requirements parseable from the given lines.
:param lines: An iterable of lines from a requirements file.
"""
for line in lines:
try:
yield from pkg_resources.parse_requirements(line)
except pkg_resources.RequirementParseError:
# unsupported requirement specification
pass
|
agpl-3.0
|
junhuac/MQUIC
|
depot_tools/external_bin/gsutil/gsutil_4.15/gsutil/third_party/boto/tests/integration/gs/test_resumable_uploads.py
|
101
|
25789
|
# Copyright 2010 Google Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Tests of Google Cloud Storage resumable uploads.
"""
import StringIO
import errno
import random
import os
import time
import boto
from boto import storage_uri
from boto.gs.resumable_upload_handler import ResumableUploadHandler
from boto.exception import InvalidUriError
from boto.exception import ResumableTransferDisposition
from boto.exception import ResumableUploadException
from cb_test_harness import CallbackTestHarness
from tests.integration.gs.testcase import GSTestCase
SMALL_KEY_SIZE = 2 * 1024 # 2 KB.
LARGE_KEY_SIZE = 500 * 1024 # 500 KB.
LARGEST_KEY_SIZE = 1024 * 1024 # 1 MB.
class ResumableUploadTests(GSTestCase):
"""Resumable upload test suite."""
def build_input_file(self, size):
buf = []
# I manually construct the random data here instead of calling
# os.urandom() because I want to constrain the range of data (in
# this case to 0'..'9') so the test
# code can easily overwrite part of the StringIO file with
# known-to-be-different values.
for i in range(size):
buf.append(str(random.randint(0, 9)))
file_as_string = ''.join(buf)
return (file_as_string, StringIO.StringIO(file_as_string))
def make_small_file(self):
return self.build_input_file(SMALL_KEY_SIZE)
def make_large_file(self):
return self.build_input_file(LARGE_KEY_SIZE)
def make_tracker_file(self, tmpdir=None):
if not tmpdir:
tmpdir = self._MakeTempDir()
tracker_file = os.path.join(tmpdir, 'tracker')
return tracker_file
def test_non_resumable_upload(self):
"""
Tests that non-resumable uploads work
"""
small_src_file_as_string, small_src_file = self.make_small_file()
# Seek to end incase its the first test.
small_src_file.seek(0, os.SEEK_END)
dst_key = self._MakeKey(set_contents=False)
try:
dst_key.set_contents_from_file(small_src_file)
self.fail("should fail as need to rewind the filepointer")
except AttributeError:
pass
# Now try calling with a proper rewind.
dst_key.set_contents_from_file(small_src_file, rewind=True)
self.assertEqual(SMALL_KEY_SIZE, dst_key.size)
self.assertEqual(small_src_file_as_string,
dst_key.get_contents_as_string())
def test_upload_without_persistent_tracker(self):
"""
Tests a single resumable upload, with no tracker URI persistence
"""
res_upload_handler = ResumableUploadHandler()
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
dst_key.set_contents_from_file(
small_src_file, res_upload_handler=res_upload_handler)
self.assertEqual(SMALL_KEY_SIZE, dst_key.size)
self.assertEqual(small_src_file_as_string,
dst_key.get_contents_as_string())
def test_failed_upload_with_persistent_tracker(self):
"""
Tests that failed resumable upload leaves a correct tracker URI file
"""
harness = CallbackTestHarness()
tracker_file_name = self.make_tracker_file()
res_upload_handler = ResumableUploadHandler(
tracker_file_name=tracker_file_name, num_retries=0)
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
try:
dst_key.set_contents_from_file(
small_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
except ResumableUploadException, e:
# We'll get a ResumableUploadException at this point because
# of CallbackTestHarness (above). Check that the tracker file was
# created correctly.
self.assertEqual(e.disposition,
ResumableTransferDisposition.ABORT_CUR_PROCESS)
self.assertTrue(os.path.exists(tracker_file_name))
f = open(tracker_file_name)
uri_from_file = f.readline().strip()
f.close()
self.assertEqual(uri_from_file,
res_upload_handler.get_tracker_uri())
def test_retryable_exception_recovery(self):
"""
Tests handling of a retryable exception
"""
# Test one of the RETRYABLE_EXCEPTIONS.
exception = ResumableUploadHandler.RETRYABLE_EXCEPTIONS[0]
harness = CallbackTestHarness(exception=exception)
res_upload_handler = ResumableUploadHandler(num_retries=1)
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
dst_key.set_contents_from_file(
small_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
# Ensure uploaded object has correct content.
self.assertEqual(SMALL_KEY_SIZE, dst_key.size)
self.assertEqual(small_src_file_as_string,
dst_key.get_contents_as_string())
def test_broken_pipe_recovery(self):
"""
Tests handling of a Broken Pipe (which interacts with an httplib bug)
"""
exception = IOError(errno.EPIPE, "Broken pipe")
harness = CallbackTestHarness(exception=exception)
res_upload_handler = ResumableUploadHandler(num_retries=1)
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
dst_key.set_contents_from_file(
small_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
# Ensure uploaded object has correct content.
self.assertEqual(SMALL_KEY_SIZE, dst_key.size)
self.assertEqual(small_src_file_as_string,
dst_key.get_contents_as_string())
def test_non_retryable_exception_handling(self):
"""
Tests a resumable upload that fails with a non-retryable exception
"""
harness = CallbackTestHarness(
exception=OSError(errno.EACCES, 'Permission denied'))
res_upload_handler = ResumableUploadHandler(num_retries=1)
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
try:
dst_key.set_contents_from_file(
small_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected OSError')
except OSError, e:
# Ensure the error was re-raised.
self.assertEqual(e.errno, 13)
def test_failed_and_restarted_upload_with_persistent_tracker(self):
"""
Tests resumable upload that fails once and then completes, with tracker
file
"""
harness = CallbackTestHarness()
tracker_file_name = self.make_tracker_file()
res_upload_handler = ResumableUploadHandler(
tracker_file_name=tracker_file_name, num_retries=1)
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
dst_key.set_contents_from_file(
small_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
# Ensure uploaded object has correct content.
self.assertEqual(SMALL_KEY_SIZE, dst_key.size)
self.assertEqual(small_src_file_as_string,
dst_key.get_contents_as_string())
# Ensure tracker file deleted.
self.assertFalse(os.path.exists(tracker_file_name))
def test_multiple_in_process_failures_then_succeed(self):
"""
Tests resumable upload that fails twice in one process, then completes
"""
res_upload_handler = ResumableUploadHandler(num_retries=3)
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
dst_key.set_contents_from_file(
small_src_file, res_upload_handler=res_upload_handler)
# Ensure uploaded object has correct content.
self.assertEqual(SMALL_KEY_SIZE, dst_key.size)
self.assertEqual(small_src_file_as_string,
dst_key.get_contents_as_string())
def test_multiple_in_process_failures_then_succeed_with_tracker_file(self):
"""
Tests resumable upload that fails completely in one process,
then when restarted completes, using a tracker file
"""
# Set up test harness that causes more failures than a single
# ResumableUploadHandler instance will handle, writing enough data
# before the first failure that some of it survives that process run.
harness = CallbackTestHarness(
fail_after_n_bytes=LARGE_KEY_SIZE/2, num_times_to_fail=2)
tracker_file_name = self.make_tracker_file()
res_upload_handler = ResumableUploadHandler(
tracker_file_name=tracker_file_name, num_retries=1)
larger_src_file_as_string, larger_src_file = self.make_large_file()
larger_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
try:
dst_key.set_contents_from_file(
larger_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
except ResumableUploadException, e:
self.assertEqual(e.disposition,
ResumableTransferDisposition.ABORT_CUR_PROCESS)
# Ensure a tracker file survived.
self.assertTrue(os.path.exists(tracker_file_name))
# Try it one more time; this time should succeed.
larger_src_file.seek(0)
dst_key.set_contents_from_file(
larger_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
self.assertEqual(LARGE_KEY_SIZE, dst_key.size)
self.assertEqual(larger_src_file_as_string,
dst_key.get_contents_as_string())
self.assertFalse(os.path.exists(tracker_file_name))
# Ensure some of the file was uploaded both before and after failure.
self.assertTrue(len(harness.transferred_seq_before_first_failure) > 1
and
len(harness.transferred_seq_after_first_failure) > 1)
def test_upload_with_inital_partial_upload_before_failure(self):
"""
Tests resumable upload that successfully uploads some content
before it fails, then restarts and completes
"""
# Set up harness to fail upload after several hundred KB so upload
# server will have saved something before we retry.
harness = CallbackTestHarness(
fail_after_n_bytes=LARGE_KEY_SIZE/2)
res_upload_handler = ResumableUploadHandler(num_retries=1)
larger_src_file_as_string, larger_src_file = self.make_large_file()
larger_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
dst_key.set_contents_from_file(
larger_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
# Ensure uploaded object has correct content.
self.assertEqual(LARGE_KEY_SIZE, dst_key.size)
self.assertEqual(larger_src_file_as_string,
dst_key.get_contents_as_string())
# Ensure some of the file was uploaded both before and after failure.
self.assertTrue(len(harness.transferred_seq_before_first_failure) > 1
and
len(harness.transferred_seq_after_first_failure) > 1)
def test_empty_file_upload(self):
"""
Tests uploading an empty file (exercises boundary conditions).
"""
res_upload_handler = ResumableUploadHandler()
empty_src_file = StringIO.StringIO('')
empty_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
dst_key.set_contents_from_file(
empty_src_file, res_upload_handler=res_upload_handler)
self.assertEqual(0, dst_key.size)
def test_upload_retains_metadata(self):
"""
Tests that resumable upload correctly sets passed metadata
"""
res_upload_handler = ResumableUploadHandler()
headers = {'Content-Type' : 'text/plain', 'x-goog-meta-abc' : 'my meta',
'x-goog-acl' : 'public-read'}
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
dst_key.set_contents_from_file(
small_src_file, headers=headers,
res_upload_handler=res_upload_handler)
self.assertEqual(SMALL_KEY_SIZE, dst_key.size)
self.assertEqual(small_src_file_as_string,
dst_key.get_contents_as_string())
dst_key.open_read()
self.assertEqual('text/plain', dst_key.content_type)
self.assertTrue('abc' in dst_key.metadata)
self.assertEqual('my meta', str(dst_key.metadata['abc']))
acl = dst_key.get_acl()
for entry in acl.entries.entry_list:
if str(entry.scope) == '<AllUsers>':
self.assertEqual('READ', str(acl.entries.entry_list[1].permission))
return
self.fail('No <AllUsers> scope found')
def test_upload_with_file_size_change_between_starts(self):
"""
Tests resumable upload on a file that changes sizes between initial
upload start and restart
"""
harness = CallbackTestHarness(
fail_after_n_bytes=LARGE_KEY_SIZE/2)
tracker_file_name = self.make_tracker_file()
# Set up first process' ResumableUploadHandler not to do any
# retries (initial upload request will establish expected size to
# upload server).
res_upload_handler = ResumableUploadHandler(
tracker_file_name=tracker_file_name, num_retries=0)
larger_src_file_as_string, larger_src_file = self.make_large_file()
larger_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
try:
dst_key.set_contents_from_file(
larger_src_file, cb=harness.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
except ResumableUploadException, e:
# First abort (from harness-forced failure) should be
# ABORT_CUR_PROCESS.
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT_CUR_PROCESS)
# Ensure a tracker file survived.
self.assertTrue(os.path.exists(tracker_file_name))
# Try it again, this time with different size source file.
# Wait 1 second between retry attempts, to give upload server a
# chance to save state so it can respond to changed file size with
# 500 response in the next attempt.
time.sleep(1)
try:
largest_src_file = self.build_input_file(LARGEST_KEY_SIZE)[1]
largest_src_file.seek(0)
dst_key.set_contents_from_file(
largest_src_file, res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
except ResumableUploadException, e:
# This abort should be a hard abort (file size changing during
# transfer).
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(e.message.find('file size changed'), -1, e.message)
def test_upload_with_file_size_change_during_upload(self):
"""
Tests resumable upload on a file that changes sizes while upload
in progress
"""
# Create a file we can change during the upload.
test_file_size = 500 * 1024 # 500 KB.
test_file = self.build_input_file(test_file_size)[1]
harness = CallbackTestHarness(fp_to_change=test_file,
fp_change_pos=test_file_size)
res_upload_handler = ResumableUploadHandler(num_retries=1)
dst_key = self._MakeKey(set_contents=False)
try:
dst_key.set_contents_from_file(
test_file, cb=harness.call,
res_upload_handler=res_upload_handler)
self.fail('Did not get expected ResumableUploadException')
except ResumableUploadException, e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('File changed during upload'), -1)
def test_upload_with_file_content_change_during_upload(self):
"""
Tests resumable upload on a file that changes one byte of content
(so, size stays the same) while upload in progress.
"""
def Execute():
res_upload_handler = ResumableUploadHandler(num_retries=1)
dst_key = self._MakeKey(set_contents=False)
bucket_uri = storage_uri('gs://' + dst_key.bucket.name)
dst_key_uri = bucket_uri.clone_replace_name(dst_key.name)
try:
dst_key.set_contents_from_file(
test_file, cb=harness.call,
res_upload_handler=res_upload_handler)
return False
except ResumableUploadException, e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
# Ensure the file size didn't change.
test_file.seek(0, os.SEEK_END)
self.assertEqual(test_file_size, test_file.tell())
self.assertNotEqual(
e.message.find('md5 signature doesn\'t match etag'), -1)
# Ensure the bad data wasn't left around.
try:
dst_key_uri.get_key()
self.fail('Did not get expected InvalidUriError')
except InvalidUriError, e:
pass
return True
test_file_size = 500 * 1024 # 500 KB
# The sizes of all the blocks written, except the final block, must be a
# multiple of 256K bytes. We need to trigger a failure after the first
# 256K bytes have been uploaded so that at least one block of data is
# written on the server.
# See https://developers.google.com/storage/docs/concepts-techniques#resumable
# for more information about chunking of uploads.
n_bytes = 300 * 1024 # 300 KB
delay = 0
# First, try the test without a delay. If that fails, try it with a
# 15-second delay. The first attempt may fail to recognize that the
# server has a block if the server hasn't yet committed that block
# when we resume the transfer. This would cause a restarted upload
# instead of a resumed upload.
for attempt in range(2):
test_file = self.build_input_file(test_file_size)[1]
harness = CallbackTestHarness(
fail_after_n_bytes=n_bytes,
fp_to_change=test_file,
# Write to byte 1, as the CallbackTestHarness writes
# 3 bytes. This will result in the data on the server
# being different than the local file.
fp_change_pos=1,
delay_after_change=delay)
if Execute():
break
if (attempt == 0 and
0 in harness.transferred_seq_after_first_failure):
# We can confirm the upload was restarted instead of resumed
# by determining if there is an entry of 0 in the
# transferred_seq_after_first_failure list.
# In that case, try again with a 15 second delay.
delay = 15
continue
self.fail('Did not get expected ResumableUploadException')
def test_upload_with_content_length_header_set(self):
"""
Tests resumable upload on a file when the user supplies a
Content-Length header. This is used by gsutil, for example,
to set the content length when gzipping a file.
"""
res_upload_handler = ResumableUploadHandler()
small_src_file_as_string, small_src_file = self.make_small_file()
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
try:
dst_key.set_contents_from_file(
small_src_file, res_upload_handler=res_upload_handler,
headers={'Content-Length' : SMALL_KEY_SIZE})
self.fail('Did not get expected ResumableUploadException')
except ResumableUploadException, e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('Attempt to specify Content-Length header'), -1)
def test_upload_with_syntactically_invalid_tracker_uri(self):
"""
Tests resumable upload with a syntactically invalid tracker URI
"""
tmp_dir = self._MakeTempDir()
syntactically_invalid_tracker_file_name = os.path.join(tmp_dir,
'synt_invalid_uri_tracker')
with open(syntactically_invalid_tracker_file_name, 'w') as f:
f.write('ftp://example.com')
res_upload_handler = ResumableUploadHandler(
tracker_file_name=syntactically_invalid_tracker_file_name)
small_src_file_as_string, small_src_file = self.make_small_file()
# An error should be printed about the invalid URI, but then it
# should run the update successfully.
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
dst_key.set_contents_from_file(
small_src_file, res_upload_handler=res_upload_handler)
self.assertEqual(SMALL_KEY_SIZE, dst_key.size)
self.assertEqual(small_src_file_as_string,
dst_key.get_contents_as_string())
def test_upload_with_invalid_upload_id_in_tracker_file(self):
"""
Tests resumable upload with invalid upload ID
"""
invalid_upload_id = ('http://pub.storage.googleapis.com/?upload_id='
'AyzB2Uo74W4EYxyi5dp_-r68jz8rtbvshsv4TX7srJVkJ57CxTY5Dw2')
tmpdir = self._MakeTempDir()
invalid_upload_id_tracker_file_name = os.path.join(tmpdir,
'invalid_upload_id_tracker')
with open(invalid_upload_id_tracker_file_name, 'w') as f:
f.write(invalid_upload_id)
res_upload_handler = ResumableUploadHandler(
tracker_file_name=invalid_upload_id_tracker_file_name)
small_src_file_as_string, small_src_file = self.make_small_file()
# An error should occur, but then the tracker URI should be
# regenerated and the the update should succeed.
small_src_file.seek(0)
dst_key = self._MakeKey(set_contents=False)
dst_key.set_contents_from_file(
small_src_file, res_upload_handler=res_upload_handler)
self.assertEqual(SMALL_KEY_SIZE, dst_key.size)
self.assertEqual(small_src_file_as_string,
dst_key.get_contents_as_string())
self.assertNotEqual(invalid_upload_id,
res_upload_handler.get_tracker_uri())
def test_upload_with_unwritable_tracker_file(self):
"""
Tests resumable upload with an unwritable tracker file
"""
# Make dir where tracker_file lives temporarily unwritable.
tmp_dir = self._MakeTempDir()
tracker_file_name = self.make_tracker_file(tmp_dir)
save_mod = os.stat(tmp_dir).st_mode
try:
os.chmod(tmp_dir, 0)
res_upload_handler = ResumableUploadHandler(
tracker_file_name=tracker_file_name)
except ResumableUploadException, e:
self.assertEqual(e.disposition, ResumableTransferDisposition.ABORT)
self.assertNotEqual(
e.message.find('Couldn\'t write URI tracker file'), -1)
finally:
# Restore original protection of dir where tracker_file lives.
os.chmod(tmp_dir, save_mod)
|
mit
|
binhqnguyen/lena-local
|
src/antenna/bindings/modulegen__gcc_LP64.py
|
48
|
75162
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.antenna', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## angles.h (module 'antenna'): ns3::Angles [struct]
module.add_class('Angles')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## vector.h (module 'core'): ns3::Vector2D [class]
module.add_class('Vector2D', import_from_module='ns.core')
## vector.h (module 'core'): ns3::Vector3D [class]
module.add_class('Vector3D', import_from_module='ns.core')
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## antenna-model.h (module 'antenna'): ns3::AntennaModel [class]
module.add_class('AntennaModel', parent=root_module['ns3::Object'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## cosine-antenna-model.h (module 'antenna'): ns3::CosineAntennaModel [class]
module.add_class('CosineAntennaModel', parent=root_module['ns3::AntennaModel'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## isotropic-antenna-model.h (module 'antenna'): ns3::IsotropicAntennaModel [class]
module.add_class('IsotropicAntennaModel', parent=root_module['ns3::AntennaModel'])
## parabolic-antenna-model.h (module 'antenna'): ns3::ParabolicAntennaModel [class]
module.add_class('ParabolicAntennaModel', parent=root_module['ns3::AntennaModel'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## vector.h (module 'core'): ns3::Vector2DChecker [class]
module.add_class('Vector2DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## vector.h (module 'core'): ns3::Vector2DValue [class]
module.add_class('Vector2DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## vector.h (module 'core'): ns3::Vector3DChecker [class]
module.add_class('Vector3DChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## vector.h (module 'core'): ns3::Vector3DValue [class]
module.add_class('Vector3DValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
typehandlers.add_type_alias('ns3::Vector3DValue', 'ns3::VectorValue')
typehandlers.add_type_alias('ns3::Vector3DValue*', 'ns3::VectorValue*')
typehandlers.add_type_alias('ns3::Vector3DValue&', 'ns3::VectorValue&')
module.add_typedef(root_module['ns3::Vector3DValue'], 'VectorValue')
typehandlers.add_type_alias('ns3::Vector3D', 'ns3::Vector')
typehandlers.add_type_alias('ns3::Vector3D*', 'ns3::Vector*')
typehandlers.add_type_alias('ns3::Vector3D&', 'ns3::Vector&')
module.add_typedef(root_module['ns3::Vector3D'], 'Vector')
typehandlers.add_type_alias('ns3::Vector3DChecker', 'ns3::VectorChecker')
typehandlers.add_type_alias('ns3::Vector3DChecker*', 'ns3::VectorChecker*')
typehandlers.add_type_alias('ns3::Vector3DChecker&', 'ns3::VectorChecker&')
module.add_typedef(root_module['ns3::Vector3DChecker'], 'VectorChecker')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Angles_methods(root_module, root_module['ns3::Angles'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Vector2D_methods(root_module, root_module['ns3::Vector2D'])
register_Ns3Vector3D_methods(root_module, root_module['ns3::Vector3D'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3AntennaModel_methods(root_module, root_module['ns3::AntennaModel'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3CosineAntennaModel_methods(root_module, root_module['ns3::CosineAntennaModel'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3IsotropicAntennaModel_methods(root_module, root_module['ns3::IsotropicAntennaModel'])
register_Ns3ParabolicAntennaModel_methods(root_module, root_module['ns3::ParabolicAntennaModel'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3Vector2DChecker_methods(root_module, root_module['ns3::Vector2DChecker'])
register_Ns3Vector2DValue_methods(root_module, root_module['ns3::Vector2DValue'])
register_Ns3Vector3DChecker_methods(root_module, root_module['ns3::Vector3DChecker'])
register_Ns3Vector3DValue_methods(root_module, root_module['ns3::Vector3DValue'])
return
def register_Ns3Angles_methods(root_module, cls):
cls.add_output_stream_operator()
## angles.h (module 'antenna'): ns3::Angles::Angles(ns3::Angles const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Angles const &', 'arg0')])
## angles.h (module 'antenna'): ns3::Angles::Angles() [constructor]
cls.add_constructor([])
## angles.h (module 'antenna'): ns3::Angles::Angles(double phi, double theta) [constructor]
cls.add_constructor([param('double', 'phi'), param('double', 'theta')])
## angles.h (module 'antenna'): ns3::Angles::Angles(ns3::Vector v) [constructor]
cls.add_constructor([param('ns3::Vector', 'v')])
## angles.h (module 'antenna'): ns3::Angles::Angles(ns3::Vector v, ns3::Vector o) [constructor]
cls.add_constructor([param('ns3::Vector', 'v'), param('ns3::Vector', 'o')])
## angles.h (module 'antenna'): ns3::Angles::phi [variable]
cls.add_instance_attribute('phi', 'double', is_const=False)
## angles.h (module 'antenna'): ns3::Angles::theta [variable]
cls.add_instance_attribute('theta', 'double', is_const=False)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Vector2D_methods(root_module, cls):
cls.add_output_stream_operator()
## vector.h (module 'core'): ns3::Vector2D::Vector2D(ns3::Vector2D const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2D const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector2D::Vector2D(double _x, double _y) [constructor]
cls.add_constructor([param('double', '_x'), param('double', '_y')])
## vector.h (module 'core'): ns3::Vector2D::Vector2D() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2D::x [variable]
cls.add_instance_attribute('x', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector2D::y [variable]
cls.add_instance_attribute('y', 'double', is_const=False)
return
def register_Ns3Vector3D_methods(root_module, cls):
cls.add_output_stream_operator()
## vector.h (module 'core'): ns3::Vector3D::Vector3D(ns3::Vector3D const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3D const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector3D::Vector3D(double _x, double _y, double _z) [constructor]
cls.add_constructor([param('double', '_x'), param('double', '_y'), param('double', '_z')])
## vector.h (module 'core'): ns3::Vector3D::Vector3D() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3D::x [variable]
cls.add_instance_attribute('x', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector3D::y [variable]
cls.add_instance_attribute('y', 'double', is_const=False)
## vector.h (module 'core'): ns3::Vector3D::z [variable]
cls.add_instance_attribute('z', 'double', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Start() [member function]
cls.add_method('Start',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoStart() [member function]
cls.add_method('DoStart',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AntennaModel_methods(root_module, cls):
## antenna-model.h (module 'antenna'): ns3::AntennaModel::AntennaModel(ns3::AntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AntennaModel const &', 'arg0')])
## antenna-model.h (module 'antenna'): ns3::AntennaModel::AntennaModel() [constructor]
cls.add_constructor([])
## antenna-model.h (module 'antenna'): double ns3::AntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_pure_virtual=True, is_virtual=True)
## antenna-model.h (module 'antenna'): static ns3::TypeId ns3::AntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3CosineAntennaModel_methods(root_module, cls):
## cosine-antenna-model.h (module 'antenna'): ns3::CosineAntennaModel::CosineAntennaModel() [constructor]
cls.add_constructor([])
## cosine-antenna-model.h (module 'antenna'): ns3::CosineAntennaModel::CosineAntennaModel(ns3::CosineAntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CosineAntennaModel const &', 'arg0')])
## cosine-antenna-model.h (module 'antenna'): double ns3::CosineAntennaModel::GetBeamwidth() const [member function]
cls.add_method('GetBeamwidth',
'double',
[],
is_const=True)
## cosine-antenna-model.h (module 'antenna'): double ns3::CosineAntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_virtual=True)
## cosine-antenna-model.h (module 'antenna'): double ns3::CosineAntennaModel::GetOrientation() const [member function]
cls.add_method('GetOrientation',
'double',
[],
is_const=True)
## cosine-antenna-model.h (module 'antenna'): static ns3::TypeId ns3::CosineAntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## cosine-antenna-model.h (module 'antenna'): void ns3::CosineAntennaModel::SetBeamwidth(double beamwidthDegrees) [member function]
cls.add_method('SetBeamwidth',
'void',
[param('double', 'beamwidthDegrees')])
## cosine-antenna-model.h (module 'antenna'): void ns3::CosineAntennaModel::SetOrientation(double orientationDegrees) [member function]
cls.add_method('SetOrientation',
'void',
[param('double', 'orientationDegrees')])
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3IsotropicAntennaModel_methods(root_module, cls):
## isotropic-antenna-model.h (module 'antenna'): ns3::IsotropicAntennaModel::IsotropicAntennaModel(ns3::IsotropicAntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IsotropicAntennaModel const &', 'arg0')])
## isotropic-antenna-model.h (module 'antenna'): ns3::IsotropicAntennaModel::IsotropicAntennaModel() [constructor]
cls.add_constructor([])
## isotropic-antenna-model.h (module 'antenna'): double ns3::IsotropicAntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_virtual=True)
## isotropic-antenna-model.h (module 'antenna'): static ns3::TypeId ns3::IsotropicAntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
return
def register_Ns3ParabolicAntennaModel_methods(root_module, cls):
## parabolic-antenna-model.h (module 'antenna'): ns3::ParabolicAntennaModel::ParabolicAntennaModel() [constructor]
cls.add_constructor([])
## parabolic-antenna-model.h (module 'antenna'): ns3::ParabolicAntennaModel::ParabolicAntennaModel(ns3::ParabolicAntennaModel const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ParabolicAntennaModel const &', 'arg0')])
## parabolic-antenna-model.h (module 'antenna'): double ns3::ParabolicAntennaModel::GetBeamwidth() const [member function]
cls.add_method('GetBeamwidth',
'double',
[],
is_const=True)
## parabolic-antenna-model.h (module 'antenna'): double ns3::ParabolicAntennaModel::GetGainDb(ns3::Angles a) [member function]
cls.add_method('GetGainDb',
'double',
[param('ns3::Angles', 'a')],
is_virtual=True)
## parabolic-antenna-model.h (module 'antenna'): double ns3::ParabolicAntennaModel::GetOrientation() const [member function]
cls.add_method('GetOrientation',
'double',
[],
is_const=True)
## parabolic-antenna-model.h (module 'antenna'): static ns3::TypeId ns3::ParabolicAntennaModel::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## parabolic-antenna-model.h (module 'antenna'): void ns3::ParabolicAntennaModel::SetBeamwidth(double beamwidthDegrees) [member function]
cls.add_method('SetBeamwidth',
'void',
[param('double', 'beamwidthDegrees')])
## parabolic-antenna-model.h (module 'antenna'): void ns3::ParabolicAntennaModel::SetOrientation(double orientationDegrees) [member function]
cls.add_method('SetOrientation',
'void',
[param('double', 'orientationDegrees')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3Vector2DChecker_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2DChecker::Vector2DChecker(ns3::Vector2DChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2DChecker const &', 'arg0')])
return
def register_Ns3Vector2DValue_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2DValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector2DValue const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector2DValue::Vector2DValue(ns3::Vector2D const & value) [constructor]
cls.add_constructor([param('ns3::Vector2D const &', 'value')])
## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector2DValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## vector.h (module 'core'): bool ns3::Vector2DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## vector.h (module 'core'): ns3::Vector2D ns3::Vector2DValue::Get() const [member function]
cls.add_method('Get',
'ns3::Vector2D',
[],
is_const=True)
## vector.h (module 'core'): std::string ns3::Vector2DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## vector.h (module 'core'): void ns3::Vector2DValue::Set(ns3::Vector2D const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Vector2D const &', 'value')])
return
def register_Ns3Vector3DChecker_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3DChecker::Vector3DChecker(ns3::Vector3DChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3DChecker const &', 'arg0')])
return
def register_Ns3Vector3DValue_methods(root_module, cls):
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue() [constructor]
cls.add_constructor([])
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3DValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Vector3DValue const &', 'arg0')])
## vector.h (module 'core'): ns3::Vector3DValue::Vector3DValue(ns3::Vector3D const & value) [constructor]
cls.add_constructor([param('ns3::Vector3D const &', 'value')])
## vector.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::Vector3DValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## vector.h (module 'core'): bool ns3::Vector3DValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## vector.h (module 'core'): ns3::Vector3D ns3::Vector3DValue::Get() const [member function]
cls.add_method('Get',
'ns3::Vector3D',
[],
is_const=True)
## vector.h (module 'core'): std::string ns3::Vector3DValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## vector.h (module 'core'): void ns3::Vector3DValue::Set(ns3::Vector3D const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Vector3D const &', 'value')])
return
def register_functions(root_module):
module = root_module
## angles.h (module 'antenna'): extern double ns3::DegreesToRadians(double degrees) [free function]
module.add_function('DegreesToRadians',
'double',
[param('double', 'degrees')])
## angles.h (module 'antenna'): extern double ns3::RadiansToDegrees(double radians) [free function]
module.add_function('RadiansToDegrees',
'double',
[param('double', 'radians')])
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
gpl-2.0
|
ylatuya/Flumotion
|
flumotion/common/i18n.py
|
4
|
9380
|
# -*- Mode: Python; test-case-name: flumotion.test.test_i18n.py -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007,2008 Fluendo, S.L. (www.fluendo.com).
# All rights reserved.
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.GPL" in the source distribution for more information.
# Licensees having purchased or holding a valid Flumotion Advanced
# Streaming Server license may use this file in accordance with the
# Flumotion Advanced Streaming Server Commercial License Agreement.
# See "LICENSE.Flumotion" in the source distribution for more information.
# Headers in this file shall remain intact.
"""internationalization helpers
"""
import os
import gettext
from twisted.spread import pb
from flumotion.common import log
from flumotion.configure import configure
__version__ = "$Rev: 6693 $"
# Taken from twisted.python.util; modified so that if compareAttributes
# grows, but we get a message from a remote side that doesn't have one
# of the new attributes, that we don't raise an exception
class FancyEqMixin:
compareAttributes = ()
def __eq__(self, other):
if not self.compareAttributes:
return self is other
#XXX Maybe get rid of this, and rather use hasattr()s
if not isinstance(other, self.__class__):
return False
for attr in self.compareAttributes:
if hasattr(self, attr):
if not hasattr(other, attr):
return False
elif not getattr(self, attr) == getattr(other, attr):
return False
elif hasattr(other, attr):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def N_(format):
"""
Mark a singular string for translation, without translating it.
"""
return format
def ngettext(singular, plural, count):
"""
Mark a plural string for translation, without translating it.
"""
return (singular, plural, count)
def gettexter(domain=configure.PACKAGE):
"""
Return a function that takes a format string or tuple, and additional
format args,
and creates a L{Translatable} from it.
Example::
T_ = gettexter('flumotion')
t = T_(N_("Could not find '%s'."), file)
@param domain: the gettext domain to create translatables for.
"""
def create(format, *args):
if isinstance(format, str):
return TranslatableSingular(domain, format, *args)
else:
return TranslatablePlural(domain, format, *args)
return lambda *args: create(*args)
class Translatable(pb.Copyable, pb.RemoteCopy):
"""
I represent a serializable translatable gettext msg.
"""
domain = None
# NOTE: subclassing FancyEqMixin allows us to compare two
# RemoteCopy instances gotten from the same Copyable; this allows
# state _append and _remove to work correctly
# Take note however that this also means that two RemoteCopy objects
# of two different Copyable objects, but with the same args, will
# also pass equality
# For our purposes, this is fine.
class TranslatableSingular(Translatable, FancyEqMixin):
"""
I represent a translatable gettext msg in the singular form.
"""
compareAttributes = ["domain", "format", "args"]
def __init__(self, domain, format, *args):
"""
@param domain: the text domain for translations of this message
@param format: a format string
@param args: any arguments to the format string
"""
self.domain = domain
self.format = format
self.args = args
def untranslated(self):
if self.args:
result = self.format % self.args
else:
result = self.format
return result
pb.setUnjellyableForClass(TranslatableSingular, TranslatableSingular)
class TranslatablePlural(Translatable, FancyEqMixin):
"""
I represent a translatable gettext msg in the plural form.
"""
compareAttributes = ["domain", "singular", "plural", "count", "args"]
def __init__(self, domain, format, *args):
"""
@param domain: the text domain for translations of this message
@param format: a (singular, plural, count) tuple
@param args: any arguments to the format string
"""
singular, plural, count = format
self.domain = domain
self.singular = singular
self.plural = plural
self.count = count
self.args = args
def untranslated(self):
if self.args:
result = self.singular % self.args
else:
result = self.singular
return result
pb.setUnjellyableForClass(TranslatablePlural, TranslatablePlural)
class Translator(log.Loggable):
"""
I translate translatables and messages.
I need to be told where locale directories can be found for all domains
I need to translate for.
"""
logCategory = "translator"
def __init__(self):
self._localedirs = {} # domain name -> list of locale dirs
def addLocaleDir(self, domain, dir):
"""
Add a locale directory for the given text domain.
"""
if not domain in self._localedirs.keys():
self._localedirs[domain] = []
if not dir in self._localedirs[domain]:
self.debug('Adding localedir %s for domain %s' % (dir, domain))
self._localedirs[domain].append(dir)
def translateTranslatable(self, translatable, lang=None):
"""
Translate a translatable object, in the given language.
@param lang: language code (or the current locale if None)
"""
# gettext.translation objects are rumoured to be cached (API docs)
domain = translatable.domain
t = None
if domain in self._localedirs.keys():
# FIXME: possibly trap IOError and handle nicely ?
for localedir in self._localedirs[domain]:
try:
t = gettext.translation(domain, localedir, lang)
except IOError:
pass
else:
self.debug('no locales for domain %s' % domain)
format = None
if not t:
# if no translation object found, fall back to C
self.debug('no translation found, falling back to C')
if isinstance(translatable, TranslatableSingular):
format = translatable.format
elif isinstance(translatable, TranslatablePlural):
if translatable.count == 1:
format = translatable.singular
else:
format = translatable.plural
else:
raise NotImplementedError('Cannot translate translatable %r' %
translatable)
else:
# translation object found, translate
if isinstance(translatable, TranslatableSingular):
format = t.gettext(translatable.format)
elif isinstance(translatable, TranslatablePlural):
format = t.ngettext(translatable.singular, translatable.plural,
translatable.count)
else:
raise NotImplementedError('Cannot translate translatable %r' %
translatable)
if translatable.args:
return format % translatable.args
else:
return format
def translate(self, message, lang=None):
"""
Translate a message, in the given language.
"""
strings = []
for t in message.translatables:
strings.append(self.translateTranslatable(t, lang))
return "".join(strings)
def getLL():
"""
Return the (at most) two-letter language code set for message translation.
"""
# LANGUAGE is a GNU extension; it can be colon-seperated but we ignore the
# advanced stuff. If that's not present, just use LANG, as normal.
language = os.environ.get('LANGUAGE', None)
if language != None:
LL = language[:2]
else:
lang = os.environ.get('LANG', 'en')
LL = lang[:2]
return LL
def installGettext():
"""
Sets up gettext so that the program gets translated.
Use this in any Flumotion end-user application that needs translations.
"""
import locale
localedir = os.path.join(configure.localedatadir, 'locale')
log.debug("locale", "Loading locales from %s" % localedir)
gettext.bindtextdomain(configure.PACKAGE, localedir)
gettext.textdomain(configure.PACKAGE)
# Some platforms such as win32 lacks localse.bindtextdomin/textdomain.
# bindtextdomain/textdomain are undocumented functions only available
# in the posix _locale module. We use them to avoid having to import
# gtk.glade here and thus import gtk/create a connection to X.
if hasattr(locale, 'bindtextdomain'):
locale.bindtextdomain(configure.PACKAGE, localedir)
if hasattr(locale, 'textdomain'):
locale.textdomain(configure.PACKAGE)
|
gpl-2.0
|
haad/ansible
|
test/units/modules/network/nuage/test_nuage_vspk.py
|
50
|
49284
|
# -*- coding: utf-8 -*-
# (c) 2017, Nokia
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import sys
from nose.plugins.skip import SkipTest
if not(sys.version_info[0] == 2 and sys.version_info[1] >= 7):
raise SkipTest('Nuage Ansible modules requires Python 2.7')
try:
from vspk import v5_0 as vsdk
from bambou.exceptions import BambouHTTPError
from ansible.modules.network.nuage import nuage_vspk
except ImportError:
raise SkipTest('Nuage Ansible modules requires the vspk and bambou python libraries')
from ansible.compat.tests.mock import patch
from units.modules.utils import set_module_args, AnsibleExitJson, AnsibleFailJson
from .nuage_module import MockNuageConnection, TestNuageModule
_LOOP_COUNTER = 0
class TestNuageVSPKModule(TestNuageModule):
def setUp(self):
super(TestNuageVSPKModule, self).setUp()
self.patches = []
def enterprises_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
if 'unknown' in filter:
return []
result = [vsdk.NUEnterprise(id='enterprise-id', name='test-enterprise')]
if filter == '' or filter == 'name == "test%"':
result.append(vsdk.NUEnterprise(id='enterprise-id-2', name='test-enterprise-2'))
return result
self.enterprises_get_mock = patch('vspk.v5_0.fetchers.NUEnterprisesFetcher.get', new=enterprises_get)
self.enterprises_get_mock.start()
self.patches.append(self.enterprises_get_mock)
def enterprises_get_first(self, filter=None, order_by=None, group_by=None, query_parameters=None, commit=False, callback=None, **kwargs):
group_by = [] if group_by is None else group_by
if filter == 'name == "test-enterprise-create"' or 'unknown' in filter:
return None
return vsdk.NUEnterprise(id='enterprise-id', name='test-enterprise')
self.enterprises_get_first_mock = patch('vspk.v5_0.fetchers.NUEnterprisesFetcher.get_first', new=enterprises_get_first)
self.enterprises_get_first_mock.start()
self.patches.append(self.enterprises_get_first_mock)
def enterprise_delete(self, response_choice=1, callback=None, **kwargs):
pass
self.enterprise_delete_mock = patch('vspk.v5_0.NUEnterprise.delete', new=enterprise_delete)
self.enterprise_delete_mock.start()
self.patches.append(self.enterprise_delete_mock)
def enterprise_fetch(self, callback=None, **kwargs):
self.id = 'enterprise-id'
self.name = 'test-enterprise'
self.enterprise_fetch_mock = patch('vspk.v5_0.NUEnterprise.fetch', new=enterprise_fetch)
self.enterprise_fetch_mock.start()
self.patches.append(self.enterprise_fetch_mock)
def enterprise_save(self, response_choice=None, callback=None, **kwargs):
self.id = 'enterprise-id'
self.name = 'test-enterprise-update'
self.enterprise_save_mock = patch('vspk.v5_0.NUEnterprise.save', new=enterprise_save)
self.enterprise_save_mock.start()
self.patches.append(self.enterprise_save_mock)
def enterprise_create_child(self, nurest_object, response_choice=None, callback=None, commit=True, **kwargs):
nurest_object.id = 'user-id-create'
return nurest_object
self.enterprise_create_child_mock = patch('vspk.v5_0.NUEnterprise.create_child', new=enterprise_create_child)
self.enterprise_create_child_mock.start()
self.patches.append(self.enterprise_create_child_mock)
def me_create_child(self, nurest_object, response_choice=None, callback=None, commit=True, **kwargs):
nurest_object.id = 'enterprise-id-create'
return nurest_object
self.me_create_child_mock = patch('vspk.v5_0.NUMe.create_child', new=me_create_child)
self.me_create_child_mock.start()
self.patches.append(self.me_create_child_mock)
def user_fetch(self, callback=None, **kwargs):
self.id = 'user-id'
self.first_name = 'John'
self.last_name = 'Doe'
self.email = 'john.doe@localhost'
self.user_name = 'johndoe'
self.password = ''
self.user_fetch_mock = patch('vspk.v5_0.NUUser.fetch', new=user_fetch)
self.user_fetch_mock.start()
self.patches.append(self.user_fetch_mock)
def user_save(self, response_choice=None, callback=None, **kwargs):
self.id = 'user-id'
self.first_name = 'John'
self.last_name = 'Doe'
self.email = 'john.doe@localhost'
self.user_name = 'johndoe'
self.password = ''
self.user_save_mock = patch('vspk.v5_0.NUUser.save', new=user_save)
self.user_save_mock.start()
self.patches.append(self.user_save_mock)
def groups_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return []
self.groups_get_mock = patch('vspk.v5_0.fetchers.NUGroupsFetcher.get', new=groups_get)
self.groups_get_mock.start()
self.patches.append(self.groups_get_mock)
def group_fetch(self, callback=None, **kwargs):
self.id = 'group-id'
self.name = 'group'
self.group_fetch_mock = patch('vspk.v5_0.NUGroup.fetch', new=group_fetch)
self.group_fetch_mock.start()
self.patches.append(self.group_fetch_mock)
def group_assign(self, objects, nurest_object_type, callback=None, commit=True, **kwargs):
self.id = 'group-id'
self.name = 'group'
self.group_assign_mock = patch('vspk.v5_0.NUGroup.assign', new=group_assign)
self.group_assign_mock.start()
self.patches.append(self.group_assign_mock)
def job_fetch(self, callback=None, **kwargs):
global _LOOP_COUNTER
self.id = 'job-id'
self.command = 'EXPORT'
self.status = 'RUNNING'
if _LOOP_COUNTER > 1:
self.status = 'SUCCESS'
_LOOP_COUNTER += 1
self.job_fetch_mock = patch('vspk.v5_0.NUJob.fetch', new=job_fetch)
self.job_fetch_mock.start()
self.patches.append(self.job_fetch_mock)
def tearDown(self):
super(TestNuageVSPKModule, self).tearDown()
for mock in self.patches:
mock.stop()
def test_certificate_auth(self):
set_module_args(
args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise'
},
'auth': {
'api_username': 'csproot',
'api_certificate': '/dummy/location/certificate.pem',
'api_key': '/dummy/location/key.pem',
'api_enterprise': 'csp',
'api_url': 'https://localhost:8443',
'api_version': 'v5_0'
}
}
)
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_command_find_by_property(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'find',
'properties': {
'name': 'test-enterprise'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_command_find_by_filter(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'find',
'match_filter': 'name == "test%"'
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 2)
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
self.assertEqual(result['entities'][1]['name'], 'test-enterprise-2')
def test_command_find_by_id(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'command': 'find'
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_command_find_all(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'find'
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 2)
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
self.assertEqual(result['entities'][1]['name'], 'test-enterprise-2')
def test_command_change_password(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'enterprise-id',
'parent_type': 'Enterprise',
'command': 'change_password',
'properties': {
'password': 'test'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(result['id'], 'user-id')
self.assertEqual(result['entities'][0]['firstName'], 'John')
self.assertEqual(result['entities'][0]['lastName'], 'Doe')
self.assertEqual(result['entities'][0]['email'], 'john.doe@localhost')
self.assertEqual(result['entities'][0]['userName'], 'johndoe')
self.assertEqual(result['entities'][0]['password'], '')
def test_command_wait_for_job(self):
set_module_args(args={
'id': 'job-id',
'type': 'Job',
'command': 'wait_for_job',
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(result['id'], 'job-id')
self.assertEqual(result['entities'][0]['command'], 'EXPORT')
self.assertEqual(result['entities'][0]['status'], 'SUCCESS')
def test_command_get_csp_enterprise(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'get_csp_enterprise'
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_state_present_existing(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'id': 'enterprise-id',
'name': 'test-enterprise'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_state_present_existing_filter(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'match_filter': 'name == "test-enterprise"'
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_state_present_create(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id-create')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise-create')
def test_state_present_update(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-update'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise-update')
def test_state_present_member_existing(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'present'
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return [vsdk.NUUser(id='user-id'), vsdk.NUUser(id='user-id-2')]
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
def test_state_present_member_missing(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'present'
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return []
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'user-id')
def test_state_present_children_update(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise'
},
'children': [
{
'id': 'user-id',
'type': 'User',
'match_filter': 'userName == "johndoe"',
'properties': {
'user_name': 'johndoe-changed'
}
}
]
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(len(result['entities']), 2)
def test_state_present_children_create(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
},
'children': [
{
'type': 'User',
'properties': {
'user_name': 'johndoe-new'
}
}
]
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return []
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['changed'])
self.assertEqual(len(result['entities']), 2)
def test_state_present_children_member_missing(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'unkown-test-enterprise'
},
'children': [
{
'type': 'Group',
'properties': {
'name': 'unknown-group'
},
'children': [
{
'id': 'user-id',
'type': 'User'
}
]
}
]
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return []
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['changed'])
self.assertEqual(len(result['entities']), 3)
def test_state_absent(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'absent',
'properties': {
'name': 'test-enterprise'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['changed'])
def test_state_absent_member(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'absent'
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return [vsdk.NUUser(id='user-id')]
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['changed'])
def test_exception_session(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'command': 'find'
})
def failed_session_start(self):
raise BambouHTTPError(MockNuageConnection(status_code='401', reason='Unauthorized', errors={}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUVSDSession.start', new=failed_session_start):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unable to connect to the API URL with given username, password and enterprise: [HTTP 401(Unauthorized)] {}')
def test_exception_find_parent(self):
set_module_args(args={
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'command': 'find'
})
def group_failed_fetch(self, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUGroup.fetch', group_failed_fetch):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Failed to fetch the specified parent: [HTTP 404(Not Found)] {'description': 'Entity not found'}")
def test_exception_find_entities_id(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'command': 'find'
})
def enterprise_failed_fetch(self, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUEnterprise.fetch', enterprise_failed_fetch):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Failed to fetch the specified entity by ID: [HTTP 404(Not Found)] {'description': 'Entity not found'}")
def test_excption_find_entities_property(self):
set_module_args(args={
'type': 'Enterprise',
'match_filter': 'name == "enterprise-id"',
'command': 'find'
})
def enterprises_failed_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.fetchers.NUEnterprisesFetcher.get', enterprises_failed_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unable to find matching entries')
def test_exception_find_entity_id(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'state': 'present'
})
def enterprise_failed_fetch(self, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUEnterprise.fetch', enterprise_failed_fetch):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Failed to fetch the specified entity by ID: [HTTP 404(Not Found)] {'description': 'Entity not found'}")
def test_exception_find_entity_property(self):
set_module_args(args={
'type': 'Enterprise',
'match_filter': 'name == "enterprise-id"',
'state': 'absent'
})
def enterprises_failed_get_first(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUEnterprisesFetcher.get_first', enterprises_failed_get_first):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
def test_exception_get_csp_enterprise(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'get_csp_enterprise'
})
def enterprise_failed_fetch(self, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUEnterprise.fetch', enterprise_failed_fetch):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to fetch CSP enterprise: [HTTP 404(Not Found)] {'description': 'Entity not found'}")
def test_exception_assign_member(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'present'
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return []
def group_assign(self, objects, nurest_object_type, callback=None, commit=True, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='500', reason='Server exception', errors={'description': 'Unable to assign member'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
with patch('vspk.v5_0.NUGroup.assign', new=group_assign):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to assign entity as a member: [HTTP 500(Server exception)] {'description': 'Unable to assign member'}")
def test_exception_unassign_member(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'absent'
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return [vsdk.NUUser(id='user-id'), vsdk.NUUser(id='user-id-2')]
def group_assign(self, objects, nurest_object_type, callback=None, commit=True, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='500', reason='Server exception', errors={'description': 'Unable to remove member'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
with patch('vspk.v5_0.NUGroup.assign', new=group_assign):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to remove entity as a member: [HTTP 500(Server exception)] {'description': 'Unable to remove member'}")
def test_exception_create_entity(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
}
})
def me_create_child(self, nurest_object, response_choice=None, callback=None, commit=True, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='500', reason='Server exception', errors={'description': 'Unable to create entity'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUMe.create_child', me_create_child):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to create entity: [HTTP 500(Server exception)] {'description': 'Unable to create entity'}")
def test_exception_save_entity(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'new-enterprise-name'
}
})
def enterprise_save(self, response_choice=None, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='500', reason='Server exception', errors={'description': 'Unable to save entity'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUEnterprise.save', enterprise_save):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to update entity: [HTTP 500(Server exception)] {'description': 'Unable to save entity'}")
def test_exception_delete_entity(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'state': 'absent'
})
def enterprise_delete(self, response_choice=1, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='500', reason='Server exception', errors={'description': 'Unable to delete entity'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUEnterprise.delete', enterprise_delete):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to delete entity: [HTTP 500(Server exception)] {'description': 'Unable to delete entity'}")
def test_exception_wait_for_job(self):
set_module_args(args={
'id': 'job-id',
'type': 'Job',
'command': 'wait_for_job'
})
def job_fetch(self, callback=None, **kwargs):
global _LOOP_COUNTER
self.id = 'job-id'
self.command = 'EXPORT'
self.status = 'RUNNING'
if _LOOP_COUNTER > 1:
self.status = 'ERROR'
_LOOP_COUNTER += 1
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUJob.fetch', new=job_fetch):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Job ended in an error")
def test_fail_auth(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_username': 'csproot',
'api_enterprise': 'csp',
'api_url': 'https://localhost:8443',
'api_version': 'v5_0'
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Missing api_password or api_certificate and api_key parameter in auth')
def test_fail_version(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_username': 'csproot',
'api_password': 'csproot',
'api_enterprise': 'csp',
'api_url': 'https://localhost:8443',
'api_version': 'v1_0'
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'vspk is required for this module, or the API version specified does not exist.')
def test_fail_type(self):
set_module_args(args={
'type': 'Unknown',
'command': 'find'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unrecognised type specified')
def test_fail_parent_type(self):
set_module_args(args={
'type': 'User',
'parent_id': 'unkown-id',
'parent_type': 'Unknown',
'command': 'find'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unrecognised parent type specified')
def test_fail_parent_child(self):
set_module_args(args={
'type': 'Enterprise',
'parent_id': 'user-id',
'parent_type': 'User',
'command': 'find'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Specified parent is not a valid parent for the specified type')
def test_fail_no_parent(self):
set_module_args(args={
'type': 'Group',
'command': 'find'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'No parent specified and root object is not a parent for the type')
def test_fail_present_member(self):
set_module_args(args={
'type': 'User',
'match_filter': 'name == "test-user"',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'present'
})
def users_get_first(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return None
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get_first', users_get_first):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Trying to assign an entity that does not exist', result)
def test_fail_change_password(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'command': 'change_password',
'properties': {}
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'command is change_password but the following are missing: password property')
def test_fail_change_password_non_user(self):
set_module_args(args={
'id': 'group-id',
'type': 'Group',
'command': 'change_password',
'properties': {
'password': 'new-password'
}
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Entity does not have a password property')
def test_fail_command_find(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'find',
'properties': {
'id': 'unknown-enterprise-id',
'name': 'unkown-enterprise'
}
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unable to find matching entries')
def test_fail_children_type(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
},
'children': [
{
'properties': {
'user_name': 'johndoe-new'
}
}
]
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Child type unspecified')
def test_fail_children_mandatory(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
},
'children': [
{
'type': 'User'
}
]
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Child ID or properties unspecified')
def test_fail_children_unknown(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
},
'children': [
{
'id': 'unkown-id',
'type': 'Unkown'
}
]
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unrecognised child type specified')
def test_fail_children_parent(self):
set_module_args(args={
'id': 'group-id',
'type': 'Group',
'state': 'present',
'children': [
{
'type': 'User',
'properties': {
'name': 'test-user'
}
}
]
})
def users_get_first(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return None
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get_first', users_get_first):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Trying to assign a child that does not exist')
def test_fail_children_fetcher(self):
set_module_args(args={
'id': 'group-id',
'type': 'Group',
'state': 'present',
'children': [
{
'type': 'Enterprise',
'properties': {
'name': 'test-enterprise'
}
}
]
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unable to find a fetcher for child, and no ID specified.')
def test_fail_has_changed(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'state': 'present',
'properties': {
'user_name': 'changed-user',
'fake': 'invalid-property',
'password': 'hidden-property'
}
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Property fake is not valid for this type of entity')
def test_input_auth_username(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_password': 'csproot',
'api_enterprise': 'csp',
'api_url': 'https://localhost:8443',
'api_version': 'v5_0'
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'missing required arguments: api_username')
def test_input_auth_enterprise(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_username': 'csproot',
'api_password': 'csproot',
'api_url': 'https://localhost:8443',
'api_version': 'v5_0'
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'missing required arguments: api_enterprise')
def test_input_auth_url(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_username': 'csproot',
'api_password': 'csproot',
'api_enterprise': 'csp',
'api_version': 'v5_0'
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'missing required arguments: api_url')
def test_input_auth_version(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_username': 'csproot',
'api_password': 'csproot',
'api_enterprise': 'csp',
'api_url': 'https://localhost:8443',
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'missing required arguments: api_version')
def test_input_exclusive(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'command': 'find'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "parameters are mutually exclusive: ['command', 'state']")
def test_input_require_both_parent_id(self):
set_module_args(args={
'type': 'User',
'command': 'find',
'parent_type': 'Enterprise'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "parameters are required together: ['parent_id', 'parent_type']")
def test_input_require_both_parent_type(self):
set_module_args(args={
'type': 'User',
'command': 'find',
'parent_id': 'enterprise-id'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "parameters are required together: ['parent_id', 'parent_type']")
def test_input_require_on_off(self):
set_module_args(args={
'type': 'Enterprise'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "one of the following is required: command,state")
def test_input_require_if_present(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "state is present but the following are missing: id,properties,match_filter")
def test_input_require_if_absent(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'absent',
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "state is absent but the following are missing: id,properties,match_filter")
def test_input_require_if_change_password_id(self):
set_module_args(args={
'type': 'User',
'command': 'change_password',
'properties': {
'password': 'dummy-password'
}
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "command is change_password but the following are missing: id")
def test_input_require_if_change_password_properties(self):
set_module_args(args={
'type': 'User',
'command': 'change_password',
'id': 'user-id'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "command is change_password but the following are missing: properties")
def test_input_require_if_wait_for_job_id(self):
set_module_args(args={
'type': 'Job',
'command': 'wait_for_job'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "command is wait_for_job but the following are missing: id")
|
gpl-3.0
|
vtSpot/linux
|
scripts/gdb/linux/tasks.py
|
630
|
2892
|
#
# gdb helper commands and functions for Linux kernel debugging
#
# task & thread tools
#
# Copyright (c) Siemens AG, 2011-2013
#
# Authors:
# Jan Kiszka <[email protected]>
#
# This work is licensed under the terms of the GNU GPL version 2.
#
import gdb
from linux import utils
task_type = utils.CachedType("struct task_struct")
def task_lists():
task_ptr_type = task_type.get_type().pointer()
init_task = gdb.parse_and_eval("init_task").address
t = g = init_task
while True:
while True:
yield t
t = utils.container_of(t['thread_group']['next'],
task_ptr_type, "thread_group")
if t == g:
break
t = g = utils.container_of(g['tasks']['next'],
task_ptr_type, "tasks")
if t == init_task:
return
def get_task_by_pid(pid):
for task in task_lists():
if int(task['pid']) == pid:
return task
return None
class LxTaskByPidFunc(gdb.Function):
"""Find Linux task by PID and return the task_struct variable.
$lx_task_by_pid(PID): Given PID, iterate over all tasks of the target and
return that task_struct variable which PID matches."""
def __init__(self):
super(LxTaskByPidFunc, self).__init__("lx_task_by_pid")
def invoke(self, pid):
task = get_task_by_pid(pid)
if task:
return task.dereference()
else:
raise gdb.GdbError("No task of PID " + str(pid))
LxTaskByPidFunc()
class LxPs(gdb.Command):
"""Dump Linux tasks."""
def __init__(self):
super(LxPs, self).__init__("lx-ps", gdb.COMMAND_DATA)
def invoke(self, arg, from_tty):
for task in task_lists():
gdb.write("{address} {pid} {comm}\n".format(
address=task,
pid=task["pid"],
comm=task["comm"].string()))
LxPs()
thread_info_type = utils.CachedType("struct thread_info")
ia64_task_size = None
def get_thread_info(task):
thread_info_ptr_type = thread_info_type.get_type().pointer()
if utils.is_target_arch("ia64"):
global ia64_task_size
if ia64_task_size is None:
ia64_task_size = gdb.parse_and_eval("sizeof(struct task_struct)")
thread_info_addr = task.address + ia64_task_size
thread_info = thread_info_addr.cast(thread_info_ptr_type)
else:
thread_info = task['stack'].cast(thread_info_ptr_type)
return thread_info.dereference()
class LxThreadInfoFunc (gdb.Function):
"""Calculate Linux thread_info from task variable.
$lx_thread_info(TASK): Given TASK, return the corresponding thread_info
variable."""
def __init__(self):
super(LxThreadInfoFunc, self).__init__("lx_thread_info")
def invoke(self, task):
return get_thread_info(task)
LxThreadInfoFunc()
|
gpl-2.0
|
NinjaMSP/crossbar
|
crossbar/controller/test/test_cleanup.py
|
1
|
3048
|
#####################################################################################
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Unless a separate license agreement exists between you and Crossbar.io GmbH (e.g.
# you have purchased a commercial license), the license terms below apply.
#
# Should you enter into a separate license agreement after having received a copy of
# this software, then the terms of such license agreement replace the terms below at
# the time at which such license agreement becomes effective.
#
# In case a separate license agreement ends, and such agreement ends without being
# replaced by another separate license agreement, the license terms below apply
# from the time at which said agreement ends.
#
# LICENSE TERMS
#
# This program is free software: you can redistribute it and/or modify it under the
# terms of the GNU Affero General Public License, version 3, as published by the
# Free Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See the GNU Affero General Public License Version 3 for more details.
#
# You should have received a copy of the GNU Affero General Public license along
# with this program. If not, see <http://www.gnu.org/licenses/agpl-3.0.en.html>.
#
#####################################################################################
from __future__ import absolute_import
from mock import MagicMock
from twisted.trial import unittest
from twisted.internet.defer import Deferred
from twisted.internet import task
# WebSocket protocol gets used below, and the latter
# calls txaio.make_logger(). If we don't explicitly select
# the network framework before, we get an exception
# "To use txaio, you must first select a framework" from txaio
import txaio
txaio.use_twisted() # noqa
from crossbar.controller.process import NodeControllerSession
class CleanupHandler(unittest.TestCase):
def setUp(self):
self.transport = MagicMock()
self.worker = MagicMock()
self.worker.proto.transport = self.transport
self.worker.pid = '42'
self.worker.ready = Deferred()
self.worker.exit = Deferred()
def test_kill_after_term(self):
reactor = task.Clock()
NodeControllerSession._cleanup_worker(reactor, self.worker)
# should have sent TERM now
calls = self.worker.proto.transport.method_calls
self.assertTrue(calls[0][0] == "signalProcess")
self.assertTrue(calls[0][1] == ('TERM',))
# skip ahead until our KILL. we loop because we only run one
# timed-out thing on each advance maybe? Anyway it runs
# timeout() only twice if I advance(30) here instead...
for x in range(30):
reactor.advance(1)
calls = self.worker.proto.transport.method_calls
self.assertTrue(calls[1][0] == "signalProcess")
self.assertTrue(calls[1][1] == ("KILL",))
|
agpl-3.0
|
mancoast/CPythonPyc_test
|
fail/334_test_threading_local.py
|
167
|
6339
|
import unittest
from doctest import DocTestSuite
from test import support
import weakref
import gc
# Modules under test
_thread = support.import_module('_thread')
threading = support.import_module('threading')
import _threading_local
class Weak(object):
pass
def target(local, weaklist):
weak = Weak()
local.weak = weak
weaklist.append(weakref.ref(weak))
class BaseLocalTest:
def test_local_refs(self):
self._local_refs(20)
self._local_refs(50)
self._local_refs(100)
def _local_refs(self, n):
local = self._local()
weaklist = []
for i in range(n):
t = threading.Thread(target=target, args=(local, weaklist))
t.start()
t.join()
del t
gc.collect()
self.assertEqual(len(weaklist), n)
# XXX _threading_local keeps the local of the last stopped thread alive.
deadlist = [weak for weak in weaklist if weak() is None]
self.assertIn(len(deadlist), (n-1, n))
# Assignment to the same thread local frees it sometimes (!)
local.someothervar = None
gc.collect()
deadlist = [weak for weak in weaklist if weak() is None]
self.assertIn(len(deadlist), (n-1, n), (n, len(deadlist)))
def test_derived(self):
# Issue 3088: if there is a threads switch inside the __init__
# of a threading.local derived class, the per-thread dictionary
# is created but not correctly set on the object.
# The first member set may be bogus.
import time
class Local(self._local):
def __init__(self):
time.sleep(0.01)
local = Local()
def f(i):
local.x = i
# Simply check that the variable is correctly set
self.assertEqual(local.x, i)
threads= []
for i in range(10):
t = threading.Thread(target=f, args=(i,))
t.start()
threads.append(t)
for t in threads:
t.join()
def test_derived_cycle_dealloc(self):
# http://bugs.python.org/issue6990
class Local(self._local):
pass
locals = None
passed = False
e1 = threading.Event()
e2 = threading.Event()
def f():
nonlocal passed
# 1) Involve Local in a cycle
cycle = [Local()]
cycle.append(cycle)
cycle[0].foo = 'bar'
# 2) GC the cycle (triggers threadmodule.c::local_clear
# before local_dealloc)
del cycle
gc.collect()
e1.set()
e2.wait()
# 4) New Locals should be empty
passed = all(not hasattr(local, 'foo') for local in locals)
t = threading.Thread(target=f)
t.start()
e1.wait()
# 3) New Locals should recycle the original's address. Creating
# them in the thread overwrites the thread state and avoids the
# bug
locals = [Local() for i in range(10)]
e2.set()
t.join()
self.assertTrue(passed)
def test_arguments(self):
# Issue 1522237
class MyLocal(self._local):
def __init__(self, *args, **kwargs):
pass
MyLocal(a=1)
MyLocal(1)
self.assertRaises(TypeError, self._local, a=1)
self.assertRaises(TypeError, self._local, 1)
def _test_one_class(self, c):
self._failed = "No error message set or cleared."
obj = c()
e1 = threading.Event()
e2 = threading.Event()
def f1():
obj.x = 'foo'
obj.y = 'bar'
del obj.y
e1.set()
e2.wait()
def f2():
try:
foo = obj.x
except AttributeError:
# This is expected -- we haven't set obj.x in this thread yet!
self._failed = "" # passed
else:
self._failed = ('Incorrectly got value %r from class %r\n' %
(foo, c))
sys.stderr.write(self._failed)
t1 = threading.Thread(target=f1)
t1.start()
e1.wait()
t2 = threading.Thread(target=f2)
t2.start()
t2.join()
# The test is done; just let t1 know it can exit, and wait for it.
e2.set()
t1.join()
self.assertFalse(self._failed, self._failed)
def test_threading_local(self):
self._test_one_class(self._local)
def test_threading_local_subclass(self):
class LocalSubclass(self._local):
"""To test that subclasses behave properly."""
self._test_one_class(LocalSubclass)
def _test_dict_attribute(self, cls):
obj = cls()
obj.x = 5
self.assertEqual(obj.__dict__, {'x': 5})
with self.assertRaises(AttributeError):
obj.__dict__ = {}
with self.assertRaises(AttributeError):
del obj.__dict__
def test_dict_attribute(self):
self._test_dict_attribute(self._local)
def test_dict_attribute_subclass(self):
class LocalSubclass(self._local):
"""To test that subclasses behave properly."""
self._test_dict_attribute(LocalSubclass)
def test_cycle_collection(self):
class X:
pass
x = X()
x.local = self._local()
x.local.x = x
wr = weakref.ref(x)
del x
gc.collect()
self.assertIs(wr(), None)
class ThreadLocalTest(unittest.TestCase, BaseLocalTest):
_local = _thread._local
class PyThreadingLocalTest(unittest.TestCase, BaseLocalTest):
_local = _threading_local.local
def test_main():
suite = unittest.TestSuite()
suite.addTest(DocTestSuite('_threading_local'))
suite.addTest(unittest.makeSuite(ThreadLocalTest))
suite.addTest(unittest.makeSuite(PyThreadingLocalTest))
local_orig = _threading_local.local
def setUp(test):
_threading_local.local = _thread._local
def tearDown(test):
_threading_local.local = local_orig
suite.addTest(DocTestSuite('_threading_local',
setUp=setUp, tearDown=tearDown)
)
support.run_unittest(suite)
if __name__ == '__main__':
test_main()
|
gpl-3.0
|
printedheart/micropsi2
|
micropsi_server/tests/test_json_api.py
|
2
|
48914
|
import pytest
import json
import re
def assert_success(response):
assert response.json_body['status'] == 'success'
assert 'data' in response.json_body
def assert_failure(response):
assert response.json_body['status'] == 'error'
assert 'data' in response.json_body
def test_generate_uid(app):
response = app.get_json('/rpc/generate_uid()')
assert_success(response)
assert re.match('[a-f0-9]+', response.json_body['data']) is not None
def test_select_nodenet(app, test_nodenet):
app.set_auth()
response = app.get_json('/rpc/select_nodenet(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
data = response.json_body['data']
assert data == test_nodenet
def test_load_nodenet(app, test_nodenet):
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
data = response.json_body['data']
assert 'nodetypes' in data
assert 'nodes' in data
assert 'links' in data
assert data['uid'] == test_nodenet
def test_new_nodenet(app, engine):
app.set_auth()
response = app.post_json('/rpc/new_nodenet', params={
'name': 'FooBarTestNet',
'engine': engine
})
assert_success(response)
uid = response.json_body['data']
assert uid is not None
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % uid)
assert_success(response)
assert response.json_body['data']['name'] == 'FooBarTestNet'
assert response.json_body['data']['nodes'] == {}
def test_get_available_nodenets(app, test_nodenet):
response = app.get_json('/rpc/get_available_nodenets(user_id="Pytest User")')
assert_success(response)
assert test_nodenet in response.json_body['data']
def test_delete_nodenet(app, test_nodenet):
response = app.get_json('/rpc/delete_nodenet(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
response = app.get_json('/rpc/get_available_nodenets(user_id="Pytest User")')
assert test_nodenet not in response.json_body['data']
def test_set_nodenet_properties(app, test_nodenet, test_world):
app.set_auth()
response = app.post_json('/rpc/set_nodenet_properties', params=dict(nodenet_uid=test_nodenet, nodenet_name="new_name", worldadapter="Braitenberg", world_uid=test_world))
assert_success(response)
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
data = response.json_body['data']
assert data['name'] == 'new_name'
assert data['worldadapter'] == 'Braitenberg'
def test_set_node_state(app, test_nodenet, nodetype_def, nodefunc_def):
app.set_auth()
# create a native module:
with open(nodetype_def, 'w') as fp:
fp.write('{"Testnode": {\
"name": "Testnode",\
"slottypes": ["gen", "foo", "bar"],\
"nodefunction_name": "testnodefunc",\
"gatetypes": ["gen", "foo", "bar"],\
"symbol": "t"}}')
with open(nodefunc_def, 'w') as fp:
fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17")
response = app.get_json('/rpc/reload_native_modules()')
assert_success(response)
response = app.post_json('/rpc/add_node', params={
'nodenet_uid': test_nodenet,
'type': 'Testnode',
'position': [23, 23],
'nodespace': None,
'name': 'Testnode'
})
assert_success(response)
uid = response.json_body['data']
response = app.post_json('/rpc/set_node_state', params={
'nodenet_uid': test_nodenet,
'node_uid': uid,
'state': {'foo': 'bar'}
})
assert_success(response)
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
assert response.json_body['data']['nodes'][uid]['state'] == {'foo': 'bar'}
def test_set_node_activation(app, test_nodenet, node):
response = app.post_json('/rpc/set_node_activation', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'activation': '0.734'
})
assert_success(response)
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
sheaves = response.json_body['data']['nodes'][node]['sheaves']
assert float("%.3f" % sheaves['default']['activation']) == 0.734
def test_start_simulation(app, test_nodenet):
app.set_auth()
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
response = app.post_json('/rpc/start_simulation', params=dict(nodenet_uid=test_nodenet))
assert_success(response)
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
assert response.json_body['data']['is_active']
def test_start_simulation_with_condition(app, test_nodenet):
import time
app.set_auth()
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
response = app.post_json('/rpc/set_runner_condition', params={
'nodenet_uid': test_nodenet,
'steps': '2'
})
assert_success(response)
assert response.json_body['data']['step'] == 2
response = app.post_json('/rpc/start_simulation', params=dict(nodenet_uid=test_nodenet))
assert_success(response)
time.sleep(1)
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
assert not response.json_body['data']['is_active']
assert response.json_body['data']['current_step'] == 2
response = app.post_json('/rpc/remove_runner_condition', params=dict(nodenet_uid=test_nodenet))
assert_success(response)
def test_get_runner_properties(app):
app.set_auth()
response = app.get_json('/rpc/get_runner_properties()')
assert_success(response)
assert 'timestep' in response.json_body['data']
assert 'factor' in response.json_body['data']
def test_set_runner_properties(app):
app.set_auth()
response = app.post_json('/rpc/set_runner_properties', params=dict(timestep=123, factor=1))
assert_success(response)
response = app.get_json('/rpc/get_runner_properties()')
assert_success(response)
assert response.json_body['data']['timestep'] == 123
assert response.json_body['data']['factor'] == 1
def test_get_is_simulation_running(app, test_nodenet):
response = app.get_json('/rpc/get_is_simulation_running(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
assert not response.json_body['data']
def test_stop_simulation(app, test_nodenet):
app.set_auth()
response = app.post_json('/rpc/start_simulation', params=dict(nodenet_uid=test_nodenet))
assert_success(response)
response = app.get_json('/rpc/get_is_simulation_running(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
assert response.json_body['data']
response = app.post_json('/rpc/stop_simulation', params=dict(nodenet_uid=test_nodenet))
assert_success(response)
response = app.get_json('/rpc/get_is_simulation_running(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
assert not response.json_body['data']
def test_step_simulation(app, test_nodenet):
app.set_auth()
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
assert response.json_body['data']['current_step'] == 0
response = app.get_json('/rpc/step_simulation(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
assert response.json_body['data'] == 1
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
assert response.json_body['data']['current_step'] == 1
def test_get_current_state(app, test_nodenet, test_world, node):
from time import sleep
app.set_auth()
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
assert response.json_body['data']['current_step'] == 0
response = app.post_json('/rpc/set_nodenet_properties', params=dict(nodenet_uid=test_nodenet, nodenet_name="new_name", worldadapter="Braitenberg", world_uid=test_world))
response = app.post_json('/rpc/add_gate_monitor', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'gate': 'sub',
})
monitor_uid = response.json_body['data']
response = app.get_json('/rpc/step_simulation(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
response = app.get_json('/rpc/start_simulation(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
sleep(1)
response = app.post_json('/rpc/get_current_state', params={
'nodenet_uid': test_nodenet,
'nodenet': {
'nodespace': None,
'step': -1,
},
'monitors': {
'logger': ['system', 'world', 'nodenet'],
'after': 0
},
'world': {
'step': -1
}
})
data = response.json_body['data']
assert data['current_nodenet_step'] > 0
assert data['current_world_step'] > 0
assert data['simulation_running']
assert 'nodenet' in data
assert data['nodenet']['current_step'] > 0
assert data['nodenet']['is_active']
assert 'servertime' in data['monitors']['logs']
assert 'logs' in data['monitors']['logs']
assert len(data['monitors']['monitors'][monitor_uid]['values']) == data['nodenet']['current_step']
assert test_nodenet in data['world']['agents']
assert data['world']['current_step'] > 0
def test_revert_nodenet(app, test_nodenet, test_world):
app.set_auth()
response = app.post_json('/rpc/set_nodenet_properties', params=dict(nodenet_uid=test_nodenet, nodenet_name="new_name", worldadapter="Braitenberg", world_uid=test_world))
assert_success(response)
response = app.get_json('/rpc/revert_nodenet(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
data = response.json_body['data']
assert data['name'] == 'Testnet'
assert data['worldadapter'] is None
def test_save_nodenet(app, test_nodenet, test_world):
app.set_auth()
response = app.post_json('/rpc/set_nodenet_properties', params=dict(nodenet_uid=test_nodenet, nodenet_name="new_name", worldadapter="Braitenberg", world_uid=test_world))
assert_success(response)
response = app.get_json('/rpc/save_nodenet(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
response = app.get_json('/rpc/revert_nodenet(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
data = response.json_body['data']
assert data['name'] == 'new_name'
assert data['worldadapter'] == 'Braitenberg'
# now delete the nodenet, to get default state back.
app.get_json('/rpc/delete_nodenet(nodenet_uid="%s")' % test_nodenet)
def test_export_nodenet(app, test_nodenet, node):
response = app.get_json('/rpc/export_nodenet(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
data = json.loads(response.json_body['data'])
assert data['name'] == 'Testnet'
assert data['nodes'][node]['type'] == 'Pipe'
def test_import_nodenet(app, test_nodenet, node):
app.set_auth()
response = app.get_json('/rpc/export_nodenet(nodenet_uid="%s")' % test_nodenet)
data = json.loads(response.json_body['data'])
del data['uid']
response = app.post_json('/rpc/import_nodenet', params={
'nodenet_data': json.dumps(data)
})
assert_success(response)
uid = response.json_body['data']
assert uid is not None
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % uid)
assert list(response.json_body['data']['nodes'].keys()) == [node]
assert response.json_body['data']['name'] == 'Testnet'
response = app.get_json('/rpc/delete_nodenet(nodenet_uid="%s")' % uid)
def test_merge_nodenet(app, test_nodenet, engine, node):
app.set_auth()
response = app.get_json('/rpc/export_nodenet(nodenet_uid="%s")' % test_nodenet)
data = json.loads(response.json_body['data'])
response = app.post_json('/rpc/new_nodenet', params={
'name': 'ImporterNet',
'engine': engine,
'worldadapter': 'Braitenberg',
'owner': 'Pytest User'
})
uid = response.json_body['data']
data['uid'] = uid
response = app.post_json('/rpc/merge_nodenet', params={
'nodenet_uid': uid,
'nodenet_data': json.dumps(data)
})
assert_success(response)
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % uid)
assert len(list(response.json_body['data']['nodes'].keys())) == 1
assert response.json_body['data']['name'] == 'ImporterNet'
response = app.get_json('/rpc/delete_nodenet(nodenet_uid="%s")' % uid)
###################################################
##
##
## WORLD
##
##
###################################################
def test_get_available_worlds(app, test_world):
response = app.get_json('/rpc/get_available_worlds()')
assert_success(response)
assert test_world in response.json_body['data']
def test_get_available_worlds_for_user(app, test_world):
response = app.get_json('/rpc/get_available_worlds(user_id="Pytest User")')
assert_success(response)
assert test_world in response.json_body['data']
# TODO: get_nodenet_properties is missing.
def test_get_world_properties(app, test_world):
response = app.get_json('/rpc/get_world_properties(world_uid="%s")' % test_world)
assert_success(response)
data = response.json_body['data']
assert data['uid'] == test_world
assert data['name'] == "World of Pain"
assert 'available_worldadapters' in data
assert 'available_worldobjects' in data
def test_get_worldadapters(app, test_world):
response = app.get_json('/rpc/get_worldadapters(world_uid="%s")' % test_world)
assert_success(response)
assert 'Braitenberg' in response.json_body['data']
def test_get_world_objects(app, test_world):
response = app.get_json('/rpc/get_world_objects(world_uid="%s")' % test_world)
assert_success(response)
assert response.json_body['data'] == {}
def test_add_worldobject(app, test_world):
response = app.post_json('/rpc/add_worldobject', params={
'world_uid': test_world,
'type': 'Braintree',
'position': [10, 10],
'name': 'Testtree'
})
assert_success(response)
uid = response.json_body['data']
assert uid is not None
response = app.get_json('/rpc/get_world_objects(world_uid="%s")' % test_world)
assert uid in response.json_body['data']
def test_delete_worldobject(app, test_world):
response = app.post_json('/rpc/add_worldobject', params={
'world_uid': test_world,
'type': 'Braintree',
'position': [10, 10],
'name': 'Testtree'
})
uid = response.json_body['data']
response = app.post_json('/rpc/delete_worldobject', params={
'world_uid': test_world,
'object_uid': uid
})
assert_success(response)
response = app.get_json('/rpc/get_world_objects(world_uid="%s")' % test_world)
assert uid not in response.json_body['data']
def test_set_worldobject_properties(app, test_world):
response = app.post_json('/rpc/add_worldobject', params={
'world_uid': test_world,
'type': 'Braintree',
'position': [10, 10],
'name': 'Testtree'
})
uid = response.json_body['data']
response = app.post_json('/rpc/set_worldobject_properties', params={
'world_uid': test_world,
'uid': uid,
'position': [20, 20],
'orientation': 27,
'name': 'edited'
})
assert_success(response)
response = app.get_json('/rpc/get_world_objects(world_uid="%s")' % test_world)
data = response.json_body['data']
assert data[uid]['position'] == [20, 20]
assert data[uid]['orientation'] == 27
assert data[uid]['name'] == 'edited'
def test_get_world_view(app, test_world):
response = app.get_json('/rpc/get_world_view(world_uid="%s", step=0)' % test_world)
assert_success(response)
assert 'agents' in response.json_body['data']
assert 'objects' in response.json_body['data']
assert response.json_body['data']['current_step'] == 0
assert 'step' not in response.json_body['data']
def test_set_worldagent_properties(app, test_world, test_nodenet):
# create agent.
app.set_auth()
response = app.post_json('/rpc/set_nodenet_properties', params=dict(nodenet_uid=test_nodenet, worldadapter="Braitenberg", world_uid=test_world))
response = app.post_json('/rpc/set_worldagent_properties', params={
'world_uid': test_world,
'uid': test_nodenet,
'position': [23, 23],
'orientation': 37,
'name': 'Sepp'
})
assert_success(response)
response = app.get_json('/rpc/get_world_view(world_uid="%s", step=0)' % test_world)
data = response.json_body['data']['agents'][test_nodenet]
assert data['position'] == [23, 23]
assert data['orientation'] == 37
assert data['name'] == 'Sepp'
def test_new_world(app):
app.set_auth()
response = app.post_json('/rpc/new_world', params={
'world_name': 'FooBarTestWorld',
'world_type': 'Island'
})
assert_success(response)
uid = response.json_body['data']
response = app.get_json('/rpc/get_available_worlds(user_id="Pytest User")')
assert uid in response.json_body['data']
def test_get_available_world_types(app):
response = app.get_json('/rpc/get_available_world_types()')
assert_success(response)
assert 'Island' in response.json_body['data']
def test_delete_world(app, test_world):
response = app.get_json('/rpc/delete_world(world_uid="%s")' % test_world)
assert_success(response)
response = app.get_json('/rpc/get_available_worlds(user_id="Pytest User")')
assert test_world not in response.json_body['data']
def test_set_world_properties(app, test_world):
app.set_auth()
response = app.post_json('/rpc/set_world_properties', params={
'world_uid': test_world,
'world_name': 'asdf',
'owner': 'Pytest User'
})
assert_success(response)
response = app.get_json('/rpc/get_world_properties(world_uid="%s")' % test_world)
assert response.json_body['data']['name'] == "asdf"
def test_revert_world(app, test_world):
app.set_auth()
response = app.post_json('/rpc/add_worldobject', params={
'world_uid': test_world,
'type': 'Braintree',
'position': [10, 10],
'name': 'Testtree'
})
response = app.get_json('/rpc/revert_world(world_uid="%s")' % test_world)
assert_success(response)
response = app.get_json('/rpc/get_world_view(world_uid="%s",step=0)' % test_world)
data = response.json_body['data']
assert data['objects'] == {}
def test_save_world(app, test_world):
app.set_auth()
response = app.post_json('/rpc/add_worldobject', params={
'world_uid': test_world,
'type': 'Braintree',
'position': [10, 10],
'name': 'Testtree'
})
uid = response.json_body['data']
response = app.get_json('/rpc/save_world(world_uid="%s")' % test_world)
assert_success(response)
response = app.get_json('/rpc/revert_world(world_uid="%s")' % test_world)
response = app.get_json('/rpc/get_world_view(world_uid="%s",step=0)' % test_world)
data = response.json_body['data']
assert uid in data['objects']
# delete the world, to get the default state back
app.get_json('/rpc/delete_world(world_uid="%s")' % test_world)
def test_export_world(app, test_world):
response = app.get_json('/rpc/export_world(world_uid="%s")' % test_world)
assert_success(response)
export_data = json.loads(response.json_body['data'])
assert export_data['uid'] == test_world
assert export_data['name'] == 'World of Pain'
assert export_data['objects'] == {}
assert export_data['agents'] == {}
assert export_data['owner'] == 'Pytest User'
assert export_data['current_step'] == 0
assert export_data['world_type'] == 'Island'
def test_import_world(app, test_world):
response = app.get_json('/rpc/export_world(world_uid="%s")' % test_world)
data = json.loads(response.json_body['data'])
del data['uid']
data['name'] = 'Copied Pain'
response = app.post_json('/rpc/import_world', params={
'worlddata': json.dumps(data)
})
assert_success(response)
uid = response.json_body['data']
response = app.get_json('/rpc/export_world(world_uid="%s")' % uid)
data = json.loads(response.json_body['data'])
assert data['owner'] == 'Pytest User'
assert data['name'] == 'Copied Pain'
assert data['objects'] == {}
assert data['agents'] == {}
assert uid != test_world
###################################################
##
##
## MONITORS
##
##
###################################################
def test_export_monitor_data_all(app, test_nodenet):
response = app.get_json('/rpc/export_monitor_data(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
assert response.json_body['data'] == {}
def test_add_gate_monitor(app, test_nodenet, node):
response = app.post_json('/rpc/add_gate_monitor', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'gate': 'sub',
'sheaf': 'default'
})
assert_success(response)
uid = response.json_body['data']
response = app.post_json('/rpc/export_monitor_data', params={
'nodenet_uid': test_nodenet,
'monitor_uid': uid
})
assert response.json_body['data']['node_uid'] == node
assert response.json_body['data']['target'] == 'sub'
assert response.json_body['data']['type'] == 'gate'
assert response.json_body['data']['sheaf'] == 'default'
assert response.json_body['data']['values'] == {}
def test_add_slot_monitor(app, test_nodenet, node):
response = app.post_json('/rpc/add_slot_monitor', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'slot': 'gen',
'name': 'Foobar'
})
assert_success(response)
uid = response.json_body['data']
response = app.post_json('/rpc/export_monitor_data', params={
'nodenet_uid': test_nodenet,
'monitor_uid': uid
})
assert response.json_body['data']['name'] == 'Foobar'
assert response.json_body['data']['node_uid'] == node
assert response.json_body['data']['target'] == 'gen'
assert response.json_body['data']['type'] == 'slot'
assert response.json_body['data']['sheaf'] == 'default'
assert response.json_body['data']['values'] == {}
def test_add_link_monitor(app, test_nodenet, node):
response = app.post_json('/rpc/add_link_monitor', params={
'nodenet_uid': test_nodenet,
'source_node_uid': node,
'gate_type': 'gen',
'target_node_uid': node,
'slot_type': 'gen',
'property': 'weight',
'name': 'LinkWeight'
})
assert_success(response)
uid = response.json_body['data']
response = app.post_json('/rpc/export_monitor_data', params={
'nodenet_uid': test_nodenet,
'monitor_uid': uid
})
assert response.json_body['data']['name'] == 'LinkWeight'
assert response.json_body['data']['source_node_uid'] == node
assert response.json_body['data']['gate_type'] == 'gen'
assert response.json_body['data']['target_node_uid'] == node
assert response.json_body['data']['slot_type'] == 'gen'
assert response.json_body['data']['property'] == 'weight'
def test_add_custom_monitor(app, test_nodenet):
response = app.post_json('/rpc/add_custom_monitor', params={
'nodenet_uid': test_nodenet,
'function': 'return len(netapi.get_nodes())',
'name': 'nodecount'
})
assert_success(response)
uid = response.json_body['data']
response = app.post_json('/rpc/export_monitor_data', params={
'nodenet_uid': test_nodenet,
'monitor_uid': uid
})
assert response.json_body['data']['name'] == 'nodecount'
def test_remove_monitor(app, test_nodenet, node):
response = app.post_json('/rpc/add_slot_monitor', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'slot': 'gen'
})
uid = response.json_body['data']
response = app.post_json('/rpc/remove_monitor', params={
'nodenet_uid': test_nodenet,
'monitor_uid': uid
})
assert_success(response)
response = app.post_json('/rpc/export_monitor_data', params={
'nodenet_uid': test_nodenet
})
assert uid not in response.json_body['data']
def test_clear_monitor(app, test_nodenet, node):
response = app.post_json('/rpc/add_slot_monitor', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'slot': 'gen'
})
uid = response.json_body['data']
response = app.post_json('/rpc/clear_monitor', params={
'nodenet_uid': test_nodenet,
'monitor_uid': uid
})
assert_success(response)
def test_get_monitor_data(app, test_nodenet, node):
response = app.post_json('/rpc/add_gate_monitor', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'gate': 'sub'
})
uid = response.json_body['data']
response = app.post_json('/rpc/get_monitor_data', params={
'nodenet_uid': test_nodenet,
'step': 0
})
assert_success(response)
assert uid in response.json_body['data']['monitors']
###################################################
##
##
## NODENET
##
##
###################################################
def test_get_nodespace_list(app, test_nodenet, node):
response = app.get_json('/rpc/get_nodespace_list(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
rootid = list(response.json_body['data'].keys())[0]
assert response.json_body['data'][rootid]['name'] == 'Root'
assert response.json_body['data'][rootid]['parent'] is None
assert node in response.json_body['data'][rootid]['nodes']
def test_get_nodespace(app, test_nodenet, node):
response = app.post_json('/rpc/get_nodespace', params={
'nodenet_uid': test_nodenet,
'nodespace': None,
'include_links': True,
'step': -1,
})
assert_success(response)
assert node in response.json_body['data']['nodes']
def test_get_node(app, test_nodenet, node):
response = app.get_json('/rpc/get_node(nodenet_uid="%s",node_uid="%s")' % (test_nodenet, node))
assert_success(response)
assert response.json_body['data']['type'] == 'Pipe'
def test_add_node(app, test_nodenet):
app.set_auth()
response = app.post_json('/rpc/add_node', params={
'nodenet_uid': test_nodenet,
'type': 'Register',
'position': [23, 42],
'nodespace': None,
'name': 'N2'
})
assert_success(response)
uid = response.json_body['data']
response = app.get_json('/rpc/get_node(nodenet_uid="%s",node_uid="%s")' % (test_nodenet, uid))
assert response.json_body['data']['name'] == 'N2'
def test_add_nodespace(app, test_nodenet):
app.set_auth()
response = app.post_json('/rpc/add_nodespace', params={
'nodenet_uid': test_nodenet,
'position': [23, 42],
'nodespace': None,
'name': 'nodespace'
})
assert_success(response)
uid = response.json_body['data']
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % (test_nodenet))
assert uid in response.json_body['data']['nodespaces']
assert uid not in response.json_body['data']['nodes']
def test_clone_nodes(app, test_nodenet, node):
app.set_auth()
response = app.post_json('/rpc/clone_nodes', params={
'nodenet_uid': test_nodenet,
'node_uids': [node],
'clone_mode': 'all',
'nodespace': None,
'offset': [23, 23]
})
assert_success(response)
node = response.json_body['data']['nodes'][0]
link = response.json_body['data']['links'][0]
assert node['name'] == 'N1_copy'
assert node['position'] == [33, 33]
assert link['source_node_uid'] == node['uid']
assert link['target_node_uid'] == node['uid']
def test_set_node_position(app, test_nodenet, node):
app.set_auth()
response = app.post_json('/rpc/set_node_position', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'position': [42, 23]
})
assert_success(response)
response = app.get_json('/rpc/get_node(nodenet_uid="%s",node_uid="%s")' % (test_nodenet, node))
assert response.json_body['data']['position'] == [42, 23]
def test_set_node_name(app, test_nodenet, node):
app.set_auth()
response = app.post_json('/rpc/set_node_name', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'name': 'changed'
})
assert_success(response)
response = app.get_json('/rpc/get_node(nodenet_uid="%s",node_uid="%s")' % (test_nodenet, node))
assert response.json_body['data']['name'] == 'changed'
def test_delete_node(app, test_nodenet, node):
app.set_auth()
response = app.post_json('/rpc/delete_node', params={
'nodenet_uid': test_nodenet,
'node_uid': node
})
assert_success(response)
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
assert response.json_body['data']['nodes'] == {}
def test_delete_nodespace(app, test_nodenet, node):
app.set_auth()
response = app.post_json('/rpc/add_nodespace', params={
'nodenet_uid': test_nodenet,
'position': [23, 42],
'nodespace': None,
'name': 'nodespace'
})
uid = response.json_body['data']
response = app.post_json('/rpc/delete_nodespace', params={
'nodenet_uid': test_nodenet,
'nodespace_uid': uid
})
assert_success(response)
response = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
assert uid not in response.json_body['data']['nodespaces']
def test_align_nodes(app, test_nodenet):
app.set_auth()
# TODO: Why does autoalign only move a node if it has no links?
response = app.post_json('/rpc/add_node', params={
'nodenet_uid': test_nodenet,
'type': 'Register',
'position': [5, 5],
'nodespace': None,
'name': 'N2'
})
uid = response.json_body['data']
response = app.post_json('/rpc/align_nodes', params={
'nodenet_uid': test_nodenet,
'nodespace': None
})
assert_success(response)
response = app.get_json('/rpc/get_node(nodenet_uid="%s",node_uid="%s")' % (test_nodenet, uid))
assert response.json_body['data']['position'] != [5, 5]
def test_get_available_node_types(app, test_nodenet):
response = app.get_json('/rpc/get_available_node_types(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
assert 'Pipe' in response.json_body['data']
assert 'Register' in response.json_body['data']
assert 'Sensor' in response.json_body['data']
def test_get_available_native_module_types(app, test_nodenet):
response = app.get_json('/rpc/get_available_native_module_types(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
assert response.json_body['data'] == {}
def test_set_node_parameters(app, test_nodenet):
app.set_auth()
# add activator
response = app.post_json('/rpc/add_node', params={
'nodenet_uid': test_nodenet,
'type': 'Activator',
'nodespace': None,
'position': [23, 42],
})
uid = response.json_body['data']
response = app.post_json('/rpc/set_node_parameters', params={
'nodenet_uid': test_nodenet,
'node_uid': uid,
'parameters': {'type': 'sub'}
})
assert_success(response)
response = app.get_json('/rpc/get_node(nodenet_uid="%s",node_uid="%s")' % (test_nodenet, uid))
assert response.json_body['data']['parameters']['type'] == 'sub'
def test_get_gatefunction(app, test_nodenet, node):
response = app.post_json('/rpc/get_gatefunction', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'gate_type': 'gen'
})
assert_success(response)
assert response.json_body['data'] == 'identity'
def test_set_gatefunction(app, test_nodenet, node):
app.set_auth()
response = app.post_json('/rpc/set_gatefunction', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'gate_type': 'gen',
'gatefunction': 'sigmoid'
})
assert_success(response)
response = app.post_json('/rpc/get_gatefunction', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'gate_type': 'gen',
})
assert response.json_body['data'] == 'sigmoid'
def test_get_available_gatefunctions(app, test_nodenet):
response = app.post_json('/rpc/get_available_gatefunctions', params={'nodenet_uid': test_nodenet})
funcs = response.json_body['data']
assert 'sigmoid' in funcs
assert 'identity' in funcs
assert 'absolute' in funcs
def test_set_gate_parameters(app, test_nodenet, node):
app.set_auth()
response = app.post_json('/rpc/set_gate_parameters', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'gate_type': 'gen',
'parameters': {'minimum': -2}
})
assert_success(response)
response = app.get_json('/rpc/get_node(nodenet_uid="%s",node_uid="%s")' % (test_nodenet, node))
assert response.json_body['data']['gate_parameters']['gen']['minimum'] == -2
def test_get_available_datasources(app, test_nodenet, test_world):
app.set_auth()
# set worldadapter
response = app.post_json('/rpc/set_nodenet_properties', params=dict(nodenet_uid=test_nodenet, world_uid=test_world, worldadapter="Braitenberg"))
response = app.get_json('/rpc/get_available_datasources(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
assert 'brightness_l' in response.json_body['data']
assert 'brightness_l' in response.json_body['data']
def test_get_available_datatargets(app, test_nodenet, test_world):
app.set_auth()
response = app.post_json('/rpc/set_nodenet_properties', params=dict(nodenet_uid=test_nodenet, world_uid=test_world, worldadapter="Braitenberg"))
response = app.get_json('/rpc/get_available_datatargets(nodenet_uid="%s")' % test_nodenet)
assert_success(response)
assert 'engine_l' in response.json_body['data']
assert 'engine_r' in response.json_body['data']
def test_bind_datasource_to_sensor(app, test_nodenet):
app.set_auth()
response = app.post_json('/rpc/add_node', params={
'nodenet_uid': test_nodenet,
'type': 'Sensor',
'position': [23, 42],
'nodespace': None,
})
uid = response.json_body['data']
response = app.post_json('/rpc/bind_datasource_to_sensor', params={
'nodenet_uid': test_nodenet,
'sensor_uid': uid,
'datasource': 'brightness_l'
})
assert_success(response)
response = app.get_json('/rpc/get_node(nodenet_uid="%s",node_uid="%s")' % (test_nodenet, uid))
assert response.json_body['data']['parameters']['datasource'] == 'brightness_l'
def test_bind_datatarget_to_actor(app, test_nodenet):
app.set_auth()
response = app.post_json('/rpc/add_node', params={
'nodenet_uid': test_nodenet,
'type': 'Actor',
'position': [23, 42],
'nodespace': None,
})
uid = response.json_body['data']
response = app.post_json('/rpc/bind_datatarget_to_actor', params={
'nodenet_uid': test_nodenet,
'actor_uid': uid,
'datatarget': 'engine_l'
})
assert_success(response)
response = app.get_json('/rpc/get_node(nodenet_uid="%s",node_uid="%s")' % (test_nodenet, uid))
assert response.json_body['data']['parameters']['datatarget'] == 'engine_l'
def test_add_link(app, test_nodenet, node):
app.set_auth()
response = app.post_json('/rpc/add_link', params={
'nodenet_uid': test_nodenet,
'source_node_uid': node,
'gate_type': 'sub',
'target_node_uid': node,
'slot_type': 'gen',
'weight': 0.7
})
assert_success(response)
uid = response.json_body['data']
assert uid is not None
response = app.get_json('/rpc/export_nodenet(nodenet_uid="%s")' % test_nodenet)
data = json.loads(response.json_body['data'])
assert uid in data['links']
def test_set_link_weight(app, test_nodenet, node):
app.set_auth()
response = app.post_json('/rpc/set_link_weight', params={
'nodenet_uid': test_nodenet,
'source_node_uid': node,
'gate_type': "gen",
'target_node_uid': node,
'slot_type': "gen",
'weight': 0.345
})
assert_success(response)
response = app.get_json('/rpc/export_nodenet(nodenet_uid="%s")' % test_nodenet)
data = json.loads(response.json_body['data'])
for link in data['links'].values():
assert float("%.3f" % link['weight']) == 0.345
def test_get_links_for_nodes(app, test_nodenet, node):
response = app.post_json('/rpc/get_links_for_nodes', params={
'nodenet_uid': test_nodenet,
'node_uids': [node]
})
assert_success(response)
link = list(response.json_body['data']['links'].values())[0]
assert link['source_node_uid'] == node
def test_delete_link(app, test_nodenet, node):
app.set_auth()
response = app.post_json('/rpc/delete_link', params={
'nodenet_uid': test_nodenet,
'source_node_uid': node,
'gate_type': "gen",
'target_node_uid': node,
'slot_type': "gen"
})
assert_success(response)
response = app.get_json('/rpc/export_nodenet(nodenet_uid="%s")' % test_nodenet)
data = json.loads(response.json_body['data'])
data['links'] == {}
def test_reload_native_modules(app, test_nodenet, nodetype_def, nodefunc_def):
app.set_auth()
# create a native module:
with open(nodetype_def, 'w') as fp:
fp.write('{"Testnode": {\
"name": "Testnode",\
"slottypes": ["gen", "foo", "bar"],\
"nodefunction_name": "testnodefunc",\
"gatetypes": ["gen", "foo", "bar"],\
"symbol": "t"}}')
with open(nodefunc_def, 'w') as fp:
fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17")
response = app.get_json('/rpc/reload_native_modules()')
assert_success(response)
response = app.get_json('/rpc/get_available_node_types(nodenet_uid="%s")' % test_nodenet)
data = response.json_body['data']['Testnode']
assert data['nodefunction_name'] == "testnodefunc"
assert data['gatetypes'] == ['gen', 'foo', 'bar']
assert data['slottypes'] == ['gen', 'foo', 'bar']
assert data['name'] == 'Testnode'
def test_user_prompt_response(app, test_nodenet, nodetype_def, nodefunc_def):
app.set_auth()
# create a native module:
with open(nodetype_def, 'w') as fp:
fp.write('{"Testnode": {\
"name": "Testnode",\
"slottypes": ["gen", "foo", "bar"],\
"nodefunction_name": "testnodefunc",\
"gatetypes": ["gen", "foo", "bar"],\
"symbol": "t"}}')
with open(nodefunc_def, 'w') as fp:
fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17")
response = app.get_json('/rpc/reload_native_modules()')
assert_success(response)
response = app.post_json('/rpc/add_node', params={
'nodenet_uid': test_nodenet,
'type': 'Testnode',
'position': [23, 23],
'nodespace': None,
'name': 'Testnode'
})
assert_success(response)
uid = response.json_body['data']
response = app.post_json('/rpc/user_prompt_response', {
'nodenet_uid': test_nodenet,
'node_uid': uid,
'values': {'foo': 'bar'},
'resume_nodenet': True
})
assert_success(response)
response = app.get_json('/rpc/export_nodenet(nodenet_uid="%s")' % test_nodenet)
data = json.loads(response.json_body['data'])
assert data['nodes'][uid]['parameters']['foo'] == 'bar'
assert data['is_active']
def test_set_logging_levels(app):
response = app.post_json('/rpc/set_logging_levels', params={
'system': 'INFO',
'world': 'DEBUG',
'nodenet': 'CRITICAL'
})
assert_success(response)
import logging
assert logging.getLogger('nodenet').getEffectiveLevel() == 50
assert logging.getLogger('world').getEffectiveLevel() == 10
assert logging.getLogger('system').getEffectiveLevel() == 20
def test_get_logger_messages(app, test_nodenet):
response = app.get_json('/rpc/get_logger_messages(logger=["system"])')
assert_success(response)
assert 'servertime' in response.json_body['data']
assert response.json_body['data']['logs'] == []
def test_get_monitoring_info(app, test_nodenet):
response = app.get_json('/rpc/get_monitoring_info(nodenet_uid="%s",logger=["system,world"])' % test_nodenet)
assert_success(response)
assert 'logs' in response.json_body['data']
assert 'current_step' in response.json_body['data']
assert response.json_body['data']['monitors'] == {}
assert 'servertime' in response.json_body['data']['logs']
assert response.json_body['data']['logs']['logs'] == []
def test_400(app):
app.set_auth()
response = app.get_json('/rpc/save_nodenet("foobar")', expect_errors=True)
assert_failure(response)
assert "Malformed arguments" in response.json_body['data']
def test_401(app, test_nodenet):
app.unset_auth()
response = app.get_json('/rpc/delete_nodenet(nodenet_uid="%s")' % test_nodenet, expect_errors=True)
assert_failure(response)
assert 'Insufficient permissions' in response.json_body['data']
def test_404(app):
response = app.get_json('/rpc/notthere(foo="bar")', expect_errors=True)
assert_failure(response)
assert response.json_body['data'] == "Function not found"
def test_405(app, test_nodenet):
response = app.get_json('/rpc/get_available_nodenets', params={'nodenet_uid': test_nodenet}, expect_errors=True)
assert_failure(response)
assert response.json_body['data'] == "Method not allowed"
def test_500(app):
response = app.get_json('/rpc/generate_uid(foo="bar")', expect_errors=True)
assert_failure(response)
assert "unexpected keyword argument" in response.json_body['data']
assert response.json_body['traceback'] is not None
def test_get_recipes(app, test_nodenet, recipes_def):
app.set_auth()
with open(recipes_def, 'w') as fp:
fp.write("""
def foobar(netapi, quatsch=23):
return quatsch
""")
response = app.get_json('/rpc/reload_native_modules()')
response = app.get_json('/rpc/get_available_recipes()')
data = response.json_body['data']
assert 'foobar' in data
assert len(data['foobar']['parameters']) == 1
assert data['foobar']['parameters'][0]['name'] == 'quatsch'
assert data['foobar']['parameters'][0]['default'] == 23
def test_run_recipes(app, test_nodenet, recipes_def):
app.set_auth()
with open(recipes_def, 'w') as fp:
fp.write("""
def foobar(netapi, quatsch=23):
return quatsch
""")
response = app.get_json('/rpc/reload_native_modules()')
response = app.post_json('/rpc/run_recipe', {
'nodenet_uid': test_nodenet,
'name': 'foobar',
'parameters': {
'quatsch': ''
}
})
data = response.json_body['data']
assert data == 23
def test_nodenet_data_structure(app, test_nodenet, nodetype_def, nodefunc_def, node):
app.set_auth()
with open(nodetype_def, 'w') as fp:
fp.write('{"Testnode": {\
"name": "Testnode",\
"slottypes": ["gen", "foo", "bar"],\
"nodefunction_name": "testnodefunc",\
"gatetypes": ["gen", "foo", "bar"],\
"symbol": "t"}}')
with open(nodefunc_def, 'w') as fp:
fp.write("def testnodefunc(netapi, node=None, **prams):\r\n return 17")
response = app.get_json('/rpc/reload_native_modules()')
response = app.post_json('/rpc/add_nodespace', params={
'nodenet_uid': test_nodenet,
'position': [23, 23],
'nodespace': None,
'name': 'Test-Node-Space'
})
nodespace_uid = response.json_body['data']
response = app.post_json('/rpc/add_node', params={
'nodenet_uid': test_nodenet,
'type': 'Pipe',
'position': [42, 42],
'nodespace': nodespace_uid,
'name': 'N2'
})
n2_uid = response.json_body['data']
response = app.post_json('/rpc/add_gate_monitor', params={
'nodenet_uid': test_nodenet,
'node_uid': node,
'gate': 'gen',
'name': 'Testmonitor',
'color': '#332211'
})
monitor_uid = response.json_body['data']
response_1 = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
response = app.get_json('/rpc/save_nodenet(nodenet_uid="%s")' % test_nodenet)
response = app.get_json('/rpc/revert_nodenet(nodenet_uid="%s")' % test_nodenet)
response_2 = app.get_json('/rpc/load_nodenet(nodenet_uid="%s")' % test_nodenet)
assert response_1.json_body['data'] == response_2.json_body['data']
data = response_2.json_body['data']
# Monitors
response = app.get_json('/rpc/export_monitor_data(nodenet_uid="%s", monitor_uid="%s")' % (test_nodenet, monitor_uid))
monitor_data = response.json_body['data']
assert data['monitors'][monitor_uid]['name'] == 'Testmonitor'
assert data['monitors'][monitor_uid]['node_uid'] == node
assert data['monitors'][monitor_uid]['target'] == 'gen'
assert data['monitors'][monitor_uid]['type'] == 'gate'
assert data['monitors'][monitor_uid]['uid'] == monitor_uid
assert data['monitors'][monitor_uid]['values'] == {}
assert data['monitors'][monitor_uid]['color'] == '#332211'
assert data['monitors'][monitor_uid] == monitor_data
# Nodes
response = app.get_json('/rpc/get_node(nodenet_uid="%s", node_uid="%s")' % (test_nodenet, node))
node_data = response.json_body['data']
assert node in data['nodes']
assert n2_uid not in data['nodes']
assert nodespace_uid not in data['nodes']
# gates
for key in ['gen', 'por', 'ret', 'sub', 'sur', 'cat', 'exp']:
assert data['nodes'][node]['gate_activations'][key]['default']['activation'] == 0
assert key not in data['nodes'][node]['gate_parameters']
assert data['nodes'][node]['gate_functions'][key] == 'identity'
assert data['nodes'][node]['parameters']['expectation'] == 1
assert data['nodes'][node]['parameters']['wait'] == 10
assert data['nodes'][node]['position'] == [10, 10]
assert data['nodes'][node]['type'] == "Pipe"
assert data['nodes'][node] == node_data
# Links
for link in data['links'].values():
assert link['weight'] == 1
assert link['certainty'] == 1
assert link['source_node_uid'] == node
assert link['target_node_uid'] == node
assert link['source_gate_name'] == 'gen'
assert link['target_slot_name'] == 'gen'
# Nodespaces
# assert data['nodespaces'][nodespace_uid]['index'] == 3
assert data['nodespaces'][nodespace_uid]['name'] == 'Test-Node-Space'
# assert data['nodespaces'][nodespace_uid]['parent_nodespace'] == 'Root'
assert data['nodespaces'][nodespace_uid]['position'] == [23, 23]
# Nodetypes
response = app.get_json('/rpc/get_available_node_types(nodenet_uid="%s")' % test_nodenet)
node_type_data = response.json_body['data']
for key in ['Comment', 'Nodespace']:
assert 'gatetypes' not in data['nodetypes'][key]
assert 'slottypes' not in data['nodetypes'][key]
for key in ['Pipe', 'Register', 'Actor']:
assert 'gatetypes' in data['nodetypes'][key]
assert 'slottypes' in data['nodetypes'][key]
assert 'slottypes' in data['nodetypes']['Activator']
assert 'gatetypes' not in data['nodetypes']['Activator']
assert 'slottypes' not in data['nodetypes']['Sensor']
assert 'gatetypes' in data['nodetypes']['Sensor']
assert data['nodetypes'] == node_type_data
# Native Modules
response = app.get_json('/rpc/get_available_native_module_types(nodenet_uid="%s")' % test_nodenet)
native_module_data = response.json_body['data']
assert data['native_modules']['Testnode']['gatetypes'] == ['gen', 'foo', 'bar']
assert data['native_modules']['Testnode']['name'] == 'Testnode'
assert data['native_modules']['Testnode']['nodefunction_name'] == 'testnodefunc'
assert data['native_modules']['Testnode']['slottypes'] == ['gen', 'foo', 'bar']
assert data['native_modules']['Testnode']['symbol'] == 't'
assert data['native_modules'] == native_module_data
# Nodenet
assert data['current_step'] == 0 # TODO:
assert 'step' not in data # current_step && step?
assert data['version'] == 1
assert data['world'] is None
assert data['worldadapter'] is None
|
mit
|
grounduphq/stexchange
|
stexchange/stexchange/migrations/0002_auto__add_field_member_member_number.py
|
1
|
4201
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Member.member_number'
db.add_column(u'stexchange_member', 'member_number',
self.gf('django.db.models.fields.PositiveIntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Member.member_number'
db.delete_column(u'stexchange_member', 'member_number')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'stexchange.member': {
'Meta': {'object_name': 'Member'},
'balance': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'member_number': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['stexchange']
|
mit
|
lombritz/odoo
|
addons/project_issue/report/project_issue_report.py
|
303
|
4652
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp import tools
from openerp.addons.crm import crm
class project_issue_report(osv.osv):
_name = "project.issue.report"
_auto = False
_columns = {
'section_id':fields.many2one('crm.case.section', 'Sale Team', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'opening_date': fields.datetime('Date of Opening', readonly=True),
'create_date': fields.datetime('Create Date', readonly=True),
'date_closed': fields.datetime('Date of Closing', readonly=True),
'date_last_stage_update': fields.datetime('Last Stage Update', readonly=True),
'stage_id': fields.many2one('project.task.type', 'Stage'),
'nbr': fields.integer('# of Issues', readonly=True), # TDE FIXME master: rename into nbr_issues
'working_hours_open': fields.float('Avg. Working Hours to Open', readonly=True, group_operator="avg"),
'working_hours_close': fields.float('Avg. Working Hours to Close', readonly=True, group_operator="avg"),
'delay_open': fields.float('Avg. Delay to Open', digits=(16,2), readonly=True, group_operator="avg",
help="Number of Days to open the project issue."),
'delay_close': fields.float('Avg. Delay to Close', digits=(16,2), readonly=True, group_operator="avg",
help="Number of Days to close the project issue"),
'company_id' : fields.many2one('res.company', 'Company'),
'priority': fields.selection([('0','Low'), ('1','Normal'), ('2','High')], 'Priority'),
'project_id':fields.many2one('project.project', 'Project',readonly=True),
'version_id': fields.many2one('project.issue.version', 'Version'),
'user_id' : fields.many2one('res.users', 'Assigned to',readonly=True),
'partner_id': fields.many2one('res.partner','Contact'),
'channel': fields.char('Channel', readonly=True, help="Communication Channel."),
'task_id': fields.many2one('project.task', 'Task'),
'email': fields.integer('# Emails', size=128, readonly=True),
'reviewer_id': fields.many2one('res.users', 'Reviewer', readonly=True),
}
def init(self, cr):
tools.drop_view_if_exists(cr, 'project_issue_report')
cr.execute("""
CREATE OR REPLACE VIEW project_issue_report AS (
SELECT
c.id as id,
c.date_open as opening_date,
c.create_date as create_date,
c.date_last_stage_update as date_last_stage_update,
c.user_id,
c.working_hours_open,
c.working_hours_close,
c.section_id,
c.stage_id,
date(c.date_closed) as date_closed,
c.company_id as company_id,
c.priority as priority,
c.project_id as project_id,
c.version_id as version_id,
1 as nbr,
c.partner_id,
c.channel,
c.task_id,
c.day_open as delay_open,
c.day_close as delay_close,
(SELECT count(id) FROM mail_message WHERE model='project.issue' AND res_id=c.id) AS email,
t.reviewer_id
FROM
project_issue c
LEFT JOIN project_task t on c.task_id = t.id
WHERE c.active= 'true'
)""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
doduytrung/odoo-8.0
|
addons/account/report/account_general_journal.py
|
381
|
7669
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
from common_report_header import common_report_header
class journal_print(report_sxw.rml_parse, common_report_header):
def __init__(self, cr, uid, name, context=None):
if context is None:
context = {}
super(journal_print, self).__init__(cr, uid, name, context=context)
self.period_ids = []
self.journal_ids = []
self.localcontext.update( {
'time': time,
'lines': self.lines,
'periods': self.periods,
'sum_debit_period': self._sum_debit_period,
'sum_credit_period': self._sum_credit_period,
'sum_debit': self._sum_debit,
'sum_credit': self._sum_credit,
'get_fiscalyear': self._get_fiscalyear,
'get_account': self._get_account,
'get_start_period': self.get_start_period,
'get_end_period': self.get_end_period,
'get_sortby': self._get_sortby,
'get_filter': self._get_filter,
'get_journal': self._get_journal,
'get_start_date':self._get_start_date,
'get_end_date':self._get_end_date,
'display_currency':self._display_currency,
'get_target_move': self._get_target_move,
})
def set_context(self, objects, data, ids, report_type=None):
obj_move = self.pool.get('account.move.line')
new_ids = ids
self.query_get_clause = ''
self.target_move = data['form'].get('target_move', 'all')
if (data['model'] == 'ir.ui.menu'):
new_ids = 'active_ids' in data['form'] and data['form']['active_ids'] or []
self.query_get_clause = 'AND '
self.query_get_clause += obj_move._query_get(self.cr, self.uid, obj='l', context=data['form'].get('used_context', {}))
objects = self.pool.get('account.journal.period').browse(self.cr, self.uid, new_ids)
if new_ids:
self.cr.execute('SELECT period_id, journal_id FROM account_journal_period WHERE id IN %s', (tuple(new_ids),))
res = self.cr.fetchall()
self.period_ids, self.journal_ids = zip(*res)
return super(journal_print, self).set_context(objects, data, ids, report_type=report_type)
# returns a list of period objs
def periods(self, journal_period_objs):
dic = {}
def filter_unique(o):
key = o.period_id.id
res = key in dic
if not res:
dic[key] = True
return not res
filtered_objs = filter(filter_unique, journal_period_objs)
return map(lambda x: x.period_id, filtered_objs)
def lines(self, period_id):
if not self.journal_ids:
return []
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
self.cr.execute('SELECT j.code, j.name, l.amount_currency,c.symbol AS currency_code,l.currency_id, '
'SUM(l.debit) AS debit, SUM(l.credit) AS credit '
'FROM account_move_line l '
'LEFT JOIN account_move am ON (l.move_id=am.id) '
'LEFT JOIN account_journal j ON (l.journal_id=j.id) '
'LEFT JOIN res_currency c on (l.currency_id=c.id)'
'WHERE am.state IN %s AND l.period_id=%s AND l.journal_id IN %s ' + self.query_get_clause + ' '
'GROUP BY j.id, j.code, j.name, l.amount_currency, c.symbol, l.currency_id ',
(tuple(move_state), period_id, tuple(self.journal_ids)))
return self.cr.dictfetchall()
def _set_get_account_currency_code(self, account_id):
self.cr.execute("SELECT c.symbol AS code "\
"FROM res_currency c, account_account AS ac "\
"WHERE ac.id = %s AND ac.currency_id = c.id" % (account_id))
result = self.cr.fetchone()
if result:
self.account_currency = result[0]
else:
self.account_currency = False
def _get_account(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).company_id.name
return super(journal_print, self)._get_account(data)
def _get_fiscalyear(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).fiscalyear_id.name
return super(journal_print, self)._get_fiscalyear(data)
def _display_currency(self, data):
if data['model'] == 'account.journal.period':
return True
return data['form']['amount_currency']
def _sum_debit_period(self, period_id, journal_id=False):
if journal_id:
journals = [journal_id]
else:
journals = self.journal_ids
if not journals:
return 0.0
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
self.cr.execute('SELECT SUM(l.debit) FROM account_move_line l '
'LEFT JOIN account_move am ON (l.move_id=am.id) '
'WHERE am.state IN %s AND l.period_id=%s AND l.journal_id IN %s ' + self.query_get_clause + ' ' \
'AND l.state<>\'draft\'',
(tuple(move_state), period_id, tuple(journals)))
return self.cr.fetchone()[0] or 0.0
def _sum_credit_period(self, period_id, journal_id=None):
if journal_id:
journals = [journal_id]
else:
journals = self.journal_ids
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
if not journals:
return 0.0
self.cr.execute('SELECT SUM(l.credit) FROM account_move_line l '
'LEFT JOIN account_move am ON (l.move_id=am.id) '
'WHERE am.state IN %s AND l.period_id=%s AND l.journal_id IN %s '+ self.query_get_clause + ' ' \
'AND l.state<>\'draft\'',
(tuple(move_state), period_id, tuple(journals)))
return self.cr.fetchone()[0] or 0.0
class report_generaljournal(osv.AbstractModel):
_name = 'report.account.report_generaljournal'
_inherit = 'report.abstract_report'
_template = 'account.report_generaljournal'
_wrapped_report_class = journal_print
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
harshvardhanmalpani/googlepersonfinder
|
app/importer.py
|
7
|
14996
|
#!/usr/bin/python2.7
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for importing records in batches, with error detection.
This module converts Python dictionaries into datastore entities.
The values of all dictionary fields are Unicode strings."""
__author__ = '[email protected] (Ka-Ping Yee) and many other Googlers'
import datetime
import logging
import re
import sys
from google.appengine.api import datastore_errors
import subscribe
from model import *
from utils import validate_sex, validate_status, validate_approximate_date, \
validate_age, get_utcnow, get_full_name
DEFAULT_PUT_RETRIES = 3
MAX_PUT_BATCH = 100
def utf8_decoder(dict_reader):
"""Yields a dictionary where all string values are converted to Unicode.
Args:
dict_reader: An iterable that yields dictionaries with string values
Yields:
A dictionary with all string values converted to Unicode.
"""
for record in dict_reader:
for key in record:
value = record[key]
if isinstance(value, str):
record[key] = value.decode('utf-8')
yield record
def put_batch(batch, retries=DEFAULT_PUT_RETRIES):
for attempt in range(retries):
try:
db.put(batch)
logging.info('Imported records: %d' % len(batch))
return len(batch)
except:
type, value, traceback = sys.exc_info()
logging.warn('Retrying batch: %s' % value)
return 0
date_re = re.compile(r'^(\d\d\d\d)-(\d\d)-(\d\d)T(\d\d):(\d\d):(\d\d)Z$')
def strip(string_or_none):
if not string_or_none:
return ''
return string_or_none.strip() or ''
def validate_datetime(datetime_or_datestring):
if isinstance(datetime_or_datestring, datetime.datetime):
return datetime_or_datestring
if not datetime_or_datestring:
return None # A missing value is okay.
match = date_re.match(datetime_or_datestring)
if match:
return datetime.datetime(*map(int, match.groups()))
raise ValueError('Bad datetime: %r' % datetime_or_datestring)
def validate_boolean(string):
if not string:
return None # A missing value is okay.
return (isinstance(string, basestring) and
string.strip().lower() in ['true', 'yes', 'y', '1'])
def create_person(repo, fields):
"""Creates a Person entity in the given repository with the given field
values. If 'fields' contains a 'person_record_id', calling put() on the
resulting entity will overwrite any existing (original or clone) record
with the same person_record_id. Otherwise, a new original person record is
created in the given repository."""
person_fields = dict(
entry_date=get_utcnow(),
expiry_date=validate_datetime(fields.get('expiry_date')),
author_name=strip(fields.get('author_name')),
author_email=strip(fields.get('author_email')),
author_phone=strip(fields.get('author_phone')),
source_name=strip(fields.get('source_name')),
source_url=strip(fields.get('source_url')),
source_date=validate_datetime(fields.get('source_date')),
full_name=strip(fields.get('full_name')),
given_name=strip(fields.get('given_name')),
family_name=strip(fields.get('family_name')),
alternate_names=strip(fields.get('alternate_names')),
description=strip(fields.get('description')),
sex=validate_sex(fields.get('sex')),
date_of_birth=validate_approximate_date(fields.get('date_of_birth')),
age=validate_age(fields.get('age')),
home_street=strip(fields.get('home_street')),
home_neighborhood=strip(fields.get('home_neighborhood')),
home_city=strip(fields.get('home_city')),
home_state=strip(fields.get('home_state')),
home_postal_code=strip(fields.get('home_postal_code')),
home_country=strip(fields.get('home_country')),
photo_url=strip(fields.get('photo_url')),
profile_urls=strip(fields.get('profile_urls')),
)
# For PFIF 1.3 or older, populate full_name (it was an optional field
# before), using given_name and family_name if it is empty.
if not person_fields['full_name'].strip():
person_fields['full_name'] = get_full_name(
person_fields['given_name'],
person_fields['family_name'],
config.Configuration(repo))
record_id = strip(fields.get('person_record_id'))
if record_id: # create a record that might overwrite an existing one
if is_clone(repo, record_id):
return Person.create_clone(repo, record_id, **person_fields)
else:
return Person.create_original_with_record_id(
repo, record_id, **person_fields)
else: # create a new original record
return Person.create_original(repo, **person_fields)
def create_note(repo, fields):
"""Creates a Note entity in the given repository with the given field
values. If 'fields' contains a 'note_record_id', calling put() on the
resulting entity will overwrite any existing (original or clone) record
with the same note_record_id. Otherwise, a new original note record is
created in the given repository."""
assert strip(fields.get('person_record_id')), 'person_record_id is required'
assert strip(fields.get('source_date')), 'source_date is required'
note_fields = dict(
person_record_id=strip(fields['person_record_id']),
linked_person_record_id=strip(fields.get('linked_person_record_id')),
author_name=strip(fields.get('author_name')),
author_email=strip(fields.get('author_email')),
author_phone=strip(fields.get('author_phone')),
source_date=validate_datetime(fields.get('source_date')),
status=validate_status(fields.get('status')),
author_made_contact=validate_boolean(fields.get('author_made_contact')),
email_of_found_person=strip(fields.get('email_of_found_person')),
phone_of_found_person=strip(fields.get('phone_of_found_person')),
last_known_location=strip(fields.get('last_known_location')),
text=fields.get('text'),
photo_url=fields.get('photo_url'),
entry_date=get_utcnow(),
)
record_id = strip(fields.get('note_record_id'))
if record_id: # create a record that might overwrite an existing one
if is_clone(repo, record_id):
return Note.create_clone(repo, record_id, **note_fields)
else:
return Note.create_original_with_record_id(
repo, record_id, **note_fields)
else: # create a new original record
return Note.create_original(repo, **note_fields)
def filter_new_notes(entities, repo):
"""Filter the notes which are new."""
notes = []
for entity in entities:
# Send an an email notification for new notes only
if isinstance(entity, Note):
if not Note.get(repo, entity.get_note_record_id()):
notes.append(entity)
return notes
def send_notifications(handler, persons, notes):
"""For each note, send a notification to subscriber.
Args:
notes: List of notes for which to send notification.
persons: Dictionary of persons impacted by the notes,
indexed by person_record_id.
handler: Handler used to send email notification.
"""
for note in notes:
person = persons[note.person_record_id]
subscribe.send_notifications(handler, person, [note])
def notes_match(a, b):
fields = ['person_record_id', 'author_name', 'author_email', 'author_phone',
'source_date', 'status', 'author_made_contact',
'email_of_found_person', 'phone_of_found_person',
'last_known_location', 'text', 'photo_url']
return [getattr(a, f) for f in fields] == [getattr(b, f) for f in fields]
def import_records(repo, domain, converter, records,
mark_notes_reviewed=False,
believed_dead_permission=False,
handler=None,
omit_duplicate_notes=False):
"""Convert and import a list of entries into a respository.
Args:
repo: Identifies the repository in which to store the records.
domain: Accept only records that have this original domain. Only one
original domain may be imported at a time.
converter: A function to transform a dictionary of fields to a
datastore entity. This function may throw an exception if there
is anything wrong with the input fields and import_records will
skip the bad record. The key_name of the resulting datastore
entity must begin with domain + '/', or the record will be skipped.
records: A list of dictionaries representing the entries.
mark_notes_reviewed: If true, mark the new notes as reviewed.
believed_dead_permission: If true, allow importing notes with status
as 'believed_dead'; otherwise skip the note and return an error.
handler: Handler to use to send e-mail notification for notes. If this
is None, then we do not send e-mail.
omit_duplicate_notes: If true, skip any Notes that are identical to
existing Notes on the same Person.
Returns:
The number of passed-in records that were written (not counting other
Person records that were updated because they have new Notes), a list
of (error_message, record) pairs for the skipped records, and the
number of records processed in total.
"""
persons = {} # Person entities to write
notes = {} # Note entities to write
skipped = [] # entities skipped due to an error
total = 0 # total number of entities for which conversion was attempted
for fields in records:
total += 1
try:
entity = converter(repo, fields)
except (KeyError, ValueError, AssertionError,
datastore_errors.BadValueError), e:
skipped.append((e.__class__.__name__ + ': ' + str(e), fields))
continue
if entity.original_domain != domain:
skipped.append(
('Not in authorized domain: %r' % entity.record_id, fields))
continue
if isinstance(entity, Person):
entity.update_index(['old', 'new'])
persons[entity.record_id] = entity
if isinstance(entity, Note):
# Check whether reporting 'believed_dead' in note is permitted.
if (not believed_dead_permission and \
entity.status == 'believed_dead'):
skipped.append(
('Not authorized to post notes with ' \
'the status \"believed_dead\"',
fields))
continue
# Check whether commenting is already disabled by record author.
existing_person = Person.get(repo, entity.person_record_id)
if existing_person and existing_person.notes_disabled:
skipped.append(
('The author has disabled new commenting on this record',
fields))
continue
# Check whether the note is a duplicate.
if omit_duplicate_notes:
other_notes = Note.get_by_person_record_id(
repo, entity.person_record_id, filter_expired=False)
if any(notes_match(entity, note) for note in other_notes):
skipped.append(
('This is a duplicate of an existing note', fields))
continue
entity.reviewed = mark_notes_reviewed
notes[entity.record_id] = entity
# We keep two dictionaries 'persons' and 'extra_persons', with disjoint
# key sets: Person entities for the records passed in to import_records()
# go in 'persons', and any other Person entities affected by the import go
# in 'extra_persons'. The two dictionaries are kept separate in order to
# produce a count of records written that only counts 'persons'.
extra_persons = {} # updated Persons other than those being imported
# For each Note, update the latest_* fields on the associated Person.
# We do these updates in dictionaries keyed by person_record_id so that
# multiple updates for one person_record_id will mutate the same object.
for note in notes.values():
if note.person_record_id in persons:
# This Note belongs to a Person that is being imported.
person = persons[note.person_record_id]
elif note.person_record_id in extra_persons:
# This Note belongs to some other Person that is not part of this
# import and is already being updated due to another Note.
person = extra_persons[note.person_record_id]
else:
# This Note belongs to some other Person that is not part of this
# import and this is the first such Note in this import.
person = Person.get(repo, note.person_record_id)
if not person:
continue
extra_persons[note.person_record_id] = person
person.update_from_note(note)
# TODO(kpy): Don't overwrite existing Persons with newer source_dates.
# Now store the imported Persons and Notes, and count them.
entities = persons.values() + notes.values()
all_persons = dict(persons, **extra_persons)
written = 0
while entities:
# The presence of a handler indicates we should notify subscribers
# for any new notes being written. We do not notify on
# "re-imported" existing notes to avoid spamming subscribers.
new_notes = []
if handler:
new_notes = filter_new_notes(entities[:MAX_PUT_BATCH], repo)
written_batch = put_batch(entities[:MAX_PUT_BATCH])
written += written_batch
# If we have new_notes and results did not fail then send notifications.
if new_notes and written_batch:
send_notifications(handler, all_persons, new_notes)
entities[:MAX_PUT_BATCH] = []
# Also store the other updated Persons, but don't count them.
entities = extra_persons.values()
while entities:
put_batch(entities[:MAX_PUT_BATCH])
entities[:MAX_PUT_BATCH] = []
return written, skipped, total
|
apache-2.0
|
Turupawn/website
|
games/tests/test_api.py
|
1
|
3519
|
import json
from django.test import TestCase
from django.urls import reverse
from . import factories
class TestGameApi(TestCase):
def setUp(self):
self.num_games = 10
self.games = []
for n in range(self.num_games):
self.games.append(
factories.GameFactory(name='game_%d' % n, slug='game-%d' % n)
)
def test_can_get_games(self):
game_list_url = reverse('api_game_list')
response = self.client.get(game_list_url)
self.assertEqual(response.status_code, 200)
games = json.loads(response.content)
self.assertEqual(len(games['results']), self.num_games)
def test_can_get_subset_of_games(self):
game_slugs = {'games': ['game-1', 'game-2', 'game-4']}
game_list_url = reverse('api_game_list')
response = self.client.get(game_list_url, data=game_slugs,
extra={"Content-Type": "application/json"})
self.assertEqual(response.status_code, 200)
games = json.loads(response.content)
self.assertEqual(len(games['results']), len(game_slugs['games']))
def test_can_post_subset_of_games(self):
game_slugs = {'games': ['game-1', 'game-2', 'game-4']}
game_list_url = reverse('api_game_list')
response = self.client.post(
game_list_url,
data=json.dumps(game_slugs),
content_type='application/json'
)
self.assertEqual(response.status_code, 200)
games = json.loads(response.content)
self.assertEqual(len(games['results']), len(game_slugs['games']))
def test_can_query_game_details(self):
response = self.client.get(reverse('api_game_detail',
kwargs={'slug': 'game-1'}))
self.assertEqual(response.status_code, 200)
class TestGameLibraryApi(TestCase):
def setUp(self):
game = factories.GameFactory
games = [game() for i in range(5)]
self.library = factories.GameLibraryFactory(games=games)
other_games = [game(name="Metroid"), game(name="Mario")]
self.other_library = factories.GameLibraryFactory(games=other_games)
def test_anonymous_requests_are_rejected(self):
user = self.library.user
library_url = reverse('api_game_library',
kwargs={'username': user.username})
response = self.client.get(library_url)
self.assertEqual(response.status_code, 401)
def test_can_get_library(self):
user = self.library.user
self.client.login(username=user.username, password='password')
library_url = reverse('api_game_library',
kwargs={'username': user.username})
response = self.client.get(library_url)
self.assertEqual(response.status_code, 200)
class TestInstallerApi(TestCase):
def setUp(self):
self.slug = 'strider'
self.game = factories.GameFactory(name=self.slug)
factories.RunnerFactory(name="Linux", slug='linux')
platform = factories.PlatformFactory()
platform.default_installer = {"game": {"rom": "foo"}, "runner": "linux"}
platform.save()
self.game.platforms.add(platform)
def test_can_get_installer_list_for_a_game(self):
self.assertTrue(self.game.platforms.count())
response = self.client.get(reverse('api_game_installer_list', kwargs={'slug': self.slug}))
self.assertEqual(response.status_code, 200)
|
agpl-3.0
|
dsqmoore/0install
|
tests/testrecipe.py
|
9
|
2325
|
#!/usr/bin/env python
from __future__ import with_statement
import unittest
import sys
import os
import tempfile
import shutil
from basetest import BaseTest
sys.path.insert(0, '..')
from zeroinstall import SafeException
from zeroinstall.injector.fetch import StepRunner
from zeroinstall.injector.model import RenameStep
class TestRecipe(BaseTest):
def setUp(self):
super(TestRecipe, self).setUp()
self.basedir = tempfile.mkdtemp()
self.join = lambda *a: os.path.join(self.basedir, *a)
os.makedirs(self.join("dir1"))
os.makedirs(self.join("level1", "level2"))
with open(self.join("level1", "level2", "level3"), 'w') as f:
f.write("level3 contents")
with open(self.join("rootfile"), 'w') as f:
f.write("rootfile contents")
def tearDown(self):
shutil.rmtree(self.basedir)
super(TestRecipe, self).tearDown()
def _apply_step(self, step, **k):
if not 'impl_hint' in k: k['impl_hint'] = None
cls = StepRunner.class_for(step)
runner = cls(step, **k)
# NOTE: runner.prepare() is not performed in these tests,
# as they test local operations only that require no preparation
runner.apply(self.basedir)
def _assert_denies_escape(self, step):
try:
self._apply_step(step)
assert False
except SafeException as e:
if not 'is not within the base directory' in str(e): raise e
def testRenameDisallowsEscapingArchiveDirViaSrcSymlink(self):
os.symlink("/usr/bin", self.join("bin"))
self._assert_denies_escape(RenameStep(source="bin/gpg", dest="gpg"))
def testRenameDisallowsEscapingArchiveDirViaDestSymlink(self):
os.symlink("/tmp", self.join("tmp"))
self._assert_denies_escape(RenameStep(source="rootfile", dest="tmp/surprise"))
def testRenameDisallowsEscapingArchiveDirViaSrcRelativePath(self):
self._assert_denies_escape(RenameStep(source="../somefile", dest="somefile"))
def testRenameDisallowsEscapingArchiveDirViaDestRelativePath(self):
self._assert_denies_escape(RenameStep(source="rootfile", dest="../somefile"))
def testRenameDisallowsEscapingArchiveDirViaSrcAbsolutePath(self):
self._assert_denies_escape(RenameStep(source="/usr/bin/gpg", dest="gpg"))
def testRenameDisallowsEscapingArchiveDirViaDestAbsolutePath(self):
self._assert_denies_escape(RenameStep(source="rootfile", dest="/tmp/rootfile"))
if __name__ == '__main__':
unittest.main()
|
lgpl-2.1
|
unindented/streamcode
|
client/static/jsrepl/extern/python/closured/lib/python2.7/_LWPCookieJar.py
|
267
|
6553
|
"""Load / save to libwww-perl (LWP) format files.
Actually, the format is slightly extended from that used by LWP's
(libwww-perl's) HTTP::Cookies, to avoid losing some RFC 2965 information
not recorded by LWP.
It uses the version string "2.0", though really there isn't an LWP Cookies
2.0 format. This indicates that there is extra information in here
(domain_dot and # port_spec) while still being compatible with
libwww-perl, I hope.
"""
import time, re
from cookielib import (_warn_unhandled_exception, FileCookieJar, LoadError,
Cookie, MISSING_FILENAME_TEXT,
join_header_words, split_header_words,
iso2time, time2isoz)
def lwp_cookie_str(cookie):
"""Return string representation of Cookie in an the LWP cookie file format.
Actually, the format is extended a bit -- see module docstring.
"""
h = [(cookie.name, cookie.value),
("path", cookie.path),
("domain", cookie.domain)]
if cookie.port is not None: h.append(("port", cookie.port))
if cookie.path_specified: h.append(("path_spec", None))
if cookie.port_specified: h.append(("port_spec", None))
if cookie.domain_initial_dot: h.append(("domain_dot", None))
if cookie.secure: h.append(("secure", None))
if cookie.expires: h.append(("expires",
time2isoz(float(cookie.expires))))
if cookie.discard: h.append(("discard", None))
if cookie.comment: h.append(("comment", cookie.comment))
if cookie.comment_url: h.append(("commenturl", cookie.comment_url))
keys = cookie._rest.keys()
keys.sort()
for k in keys:
h.append((k, str(cookie._rest[k])))
h.append(("version", str(cookie.version)))
return join_header_words([h])
class LWPCookieJar(FileCookieJar):
"""
The LWPCookieJar saves a sequence of"Set-Cookie3" lines.
"Set-Cookie3" is the format used by the libwww-perl libary, not known
to be compatible with any browser, but which is easy to read and
doesn't lose information about RFC 2965 cookies.
Additional methods
as_lwp_str(ignore_discard=True, ignore_expired=True)
"""
def as_lwp_str(self, ignore_discard=True, ignore_expires=True):
"""Return cookies as a string of "\n"-separated "Set-Cookie3" headers.
ignore_discard and ignore_expires: see docstring for FileCookieJar.save
"""
now = time.time()
r = []
for cookie in self:
if not ignore_discard and cookie.discard:
continue
if not ignore_expires and cookie.is_expired(now):
continue
r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie))
return "\n".join(r+[""])
def save(self, filename=None, ignore_discard=False, ignore_expires=False):
if filename is None:
if self.filename is not None: filename = self.filename
else: raise ValueError(MISSING_FILENAME_TEXT)
f = open(filename, "w")
try:
# There really isn't an LWP Cookies 2.0 format, but this indicates
# that there is extra information in here (domain_dot and
# port_spec) while still being compatible with libwww-perl, I hope.
f.write("#LWP-Cookies-2.0\n")
f.write(self.as_lwp_str(ignore_discard, ignore_expires))
finally:
f.close()
def _really_load(self, f, filename, ignore_discard, ignore_expires):
magic = f.readline()
if not re.search(self.magic_re, magic):
msg = ("%r does not look like a Set-Cookie3 (LWP) format "
"file" % filename)
raise LoadError(msg)
now = time.time()
header = "Set-Cookie3:"
boolean_attrs = ("port_spec", "path_spec", "domain_dot",
"secure", "discard")
value_attrs = ("version",
"port", "path", "domain",
"expires",
"comment", "commenturl")
try:
while 1:
line = f.readline()
if line == "": break
if not line.startswith(header):
continue
line = line[len(header):].strip()
for data in split_header_words([line]):
name, value = data[0]
standard = {}
rest = {}
for k in boolean_attrs:
standard[k] = False
for k, v in data[1:]:
if k is not None:
lc = k.lower()
else:
lc = None
# don't lose case distinction for unknown fields
if (lc in value_attrs) or (lc in boolean_attrs):
k = lc
if k in boolean_attrs:
if v is None: v = True
standard[k] = v
elif k in value_attrs:
standard[k] = v
else:
rest[k] = v
h = standard.get
expires = h("expires")
discard = h("discard")
if expires is not None:
expires = iso2time(expires)
if expires is None:
discard = True
domain = h("domain")
domain_specified = domain.startswith(".")
c = Cookie(h("version"), name, value,
h("port"), h("port_spec"),
domain, domain_specified, h("domain_dot"),
h("path"), h("path_spec"),
h("secure"),
expires,
discard,
h("comment"),
h("commenturl"),
rest)
if not ignore_discard and c.discard:
continue
if not ignore_expires and c.is_expired(now):
continue
self.set_cookie(c)
except IOError:
raise
except Exception:
_warn_unhandled_exception()
raise LoadError("invalid Set-Cookie3 format file %r: %r" %
(filename, line))
|
mit
|
jzoldak/edx-platform
|
lms/djangoapps/branding/__init__.py
|
21
|
2946
|
"""
EdX Branding package.
Provides a way to retrieve "branded" parts of the site.
This module provides functions to retrieve basic branded parts
such as the site visible courses, university name and logo.
"""
from xmodule.modulestore.django import modulestore
from xmodule.course_module import CourseDescriptor
from django.conf import settings
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from openedx.core.djangoapps.content.course_overviews.models import CourseOverview
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
def get_visible_courses(org=None, filter_=None):
"""
Return the set of CourseOverviews that should be visible in this branded
instance.
Arguments:
org (string): Optional parameter that allows case-insensitive
filtering by organization.
filter_ (dict): Optional parameter that allows custom filtering by
fields on the course.
"""
current_site_org = configuration_helpers.get_value('course_org_filter')
if org and current_site_org:
# Return an empty result if the org passed by the caller does not match the designated site org.
courses = CourseOverview.get_all_courses(
org=org,
filter_=filter_,
) if org == current_site_org else []
else:
# We only make it to this point if one of org or current_site_org is defined.
# If both org and current_site_org were defined, the code would have fallen into the
# first branch of the conditional above, wherein an equality check is performed.
target_org = org or current_site_org
courses = CourseOverview.get_all_courses(org=target_org, filter_=filter_)
courses = sorted(courses, key=lambda course: course.number)
# Filtering can stop here.
if current_site_org:
return courses
# See if we have filtered course listings in this domain
filtered_visible_ids = None
# this is legacy format, which also handle dev case, which should not filter
subdomain = configuration_helpers.get_value('subdomain', 'default')
if hasattr(settings, 'COURSE_LISTINGS') and subdomain in settings.COURSE_LISTINGS and not settings.DEBUG:
filtered_visible_ids = frozenset(
[SlashSeparatedCourseKey.from_deprecated_string(c) for c in settings.COURSE_LISTINGS[subdomain]]
)
if filtered_visible_ids:
return [course for course in courses if course.id in filtered_visible_ids]
else:
# Filter out any courses based on current org, to avoid leaking these.
orgs = configuration_helpers.get_all_orgs()
return [course for course in courses if course.location.org not in orgs]
def get_university_for_request():
"""
Return the university name specified for the domain, or None
if no university was specified
"""
return configuration_helpers.get_value('university')
|
agpl-3.0
|
iychoi/syndicate
|
python/syndicate/ag/fs_driver_common/metadata.py
|
2
|
7583
|
#!/usr/bin/env python
"""
Copyright 2014 The Trustees of Princeton University
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import sys
import time
import threading
def get_current_time():
return int(round(time.time() * 1000))
"""
Interface class to dataset_tracker
"""
class dataset_directory(object):
def __init__(self, path=None,
entries=[],
last_visit_time=0,
handler=None):
self.path = path
self.entries = {}
for entry in entries:
self.entries[entry.name] = entry
if last_visit_time:
self.last_visit_time = last_visit_time
else:
self.last_visit_time = get_current_time()
self.lock = threading.RLock()
self.handler = handler
def __enter__(self):
self._lock()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._unlock()
def _lock(self):
self.lock.acquire()
def _unlock(self):
self.lock.release()
def getEntry(self, name):
with self.lock:
entry = self.entries.get(name)
return entry
def getEntries(self):
with self.lock:
entry = self.entries.values()
return entry
def updateFully(self, entries=[], last_visit_time=0):
with self.lock:
# find removed/added/updated entries
new_entries = {}
for entry in entries:
new_entries[entry.name] = entry
set_prev = set(self.entries.keys())
set_new = set(new_entries.keys())
set_intersection = set_prev & set_new
unchanged_entries = []
updated_entries = []
removed_entries = []
added_entries = []
# check update and unchanged
for key in set_intersection:
e_old = self.entries[key]
e_new = new_entries[key]
if e_old == e_new:
# unchanged
unchanged_entries.append(e_old)
else:
# changed
updated_entries.append(e_new)
# check removed
for key in set_prev:
if key not in set_intersection:
# removed
e_old = self.entries[key]
removed_entries.append(e_old)
# check added
for key in set_new:
if key not in set_intersection:
# added
e_new = new_entries[key]
added_entries.append(e_new)
# apply to existing dictionary
for entry in removed_entries:
del self.entries[entry.name]
for entry in updated_entries:
self.entries[entry.name] = entry
for entry in added_entries:
self.entries[entry.name] = entry
if last_visit_time:
self.last_visit_time = last_visit_time
else:
self.last_visit_time = get_current_time()
if self.handler:
self.handler(updated_entries, added_entries, removed_entries)
def removeAllEntries(self):
with self.lock:
removed_entries = self.entries.values()
self.entries.clear()
if self.handler:
self.handler([], [], removed_entries)
def __repr__(self):
return "<dataset_directory %s %d>" % (self.path, self.last_visit_time)
def __eq__(self, other):
return self.__dict__ == other.__dict__
class dataset_tracker(object):
def __init__(self, root_path="/",
update_event_handler=None, request_for_update_handler=None):
self.root_path = root_path
self.directories = {}
self.lock = threading.RLock()
self.update_event_handler = update_event_handler
self.request_for_update_handler = request_for_update_handler
def __enter__(self):
self._lock()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._unlock()
def _lock(self):
self.lock.acquire()
def _unlock(self):
self.lock.release()
def getRootPath(self):
return self.root_path
def _onRequestUpdate(self, directory):
if self.request_for_update_handler:
self.request_for_update_handler(directory)
def _onDirectoryUpdate(self, updated_entries, added_entries, removed_entries):
# if directory is updated
if removed_entries:
for removed_entry in removed_entries:
if removed_entry.directory:
with self.lock:
e_directory = self.directories.get(removed_entry.path)
if e_directory:
e_directory.removeAllEntries()
if (updated_entries and len(updated_entries) > 0) or \
(added_entries and len(added_entries) > 0) or \
(removed_entries and len(removed_entries) > 0):
# if any of these are not empty
if self.update_event_handler:
self.update_event_handler(updated_entries, added_entries, removed_entries)
if added_entries:
for added_entry in added_entries:
if added_entry.directory:
self._onRequestUpdate(added_entry)
if updated_entries:
for updated_entry in updated_entries:
if updated_entry.directory:
self._onRequestUpdate(updated_entry)
def updateDirectory(self, path=None, entries=[]):
with self.lock:
e_directory = self.directories.get(path)
if e_directory:
e_directory.updateFully(entries)
else:
self.directories[path] = dataset_directory(path=path, entries=entries, handler=self._onDirectoryUpdate)
self._onDirectoryUpdate([], entries, [])
def getDirectory(self, path):
with self.lock:
directory = self.directories.get(path)
return directory
def _walk(self, directory):
entries = []
print directory
if directory:
directory._lock()
dirs = []
for entry in directory.getEntries():
if entry.directory:
dirs.append(entry.path)
entries.append(entry.path)
for path in dirs:
sub_dir = self.directories.get(path)
if sub_dir:
for sub_entry in self._walk(sub_dir):
entries.append(sub_entry)
directory._unlock()
return entries
def walk(self):
entries = []
with self.lock:
root_dir = self.directories.get(self.root_path)
if root_dir:
for entry in self._walk(root_dir):
entries.append(entry)
return entries
|
apache-2.0
|
DemocracyClub/yournextrepresentative
|
ynr/apps/people/tests/test_age.py
|
1
|
1996
|
from datetime import date
from django.test import TestCase
from mock import patch
from people.models import Person
@patch("people.models.date")
class TestAgeCalculation(TestCase):
def test_age_full_obvious(self, mock_date):
mock_date.today.return_value = date(1977, 9, 3)
mock_date.side_effect = lambda *args, **kwargs: date(*args, **kwargs)
p = Person.objects.create(name="Test Person", birth_date="1976-09-01")
self.assertEqual(p.age, "1")
def test_age_full_early_in_year(self, mock_date):
mock_date.today.return_value = date(1977, 2, 28)
mock_date.side_effect = lambda *args, **kwargs: date(*args, **kwargs)
p = Person.objects.create(name="Test Person", birth_date="1976-09-01")
self.assertEqual(p.age, "0")
def test_age_month_obvious(self, mock_date):
mock_date.today.return_value = date(1977, 10, 3)
mock_date.side_effect = lambda *args, **kwargs: date(*args, **kwargs)
p = Person.objects.create(name="Test Person", birth_date="1976-09")
self.assertEqual(p.age, "1")
def test_age_month_early_in_year(self, mock_date):
mock_date.today.return_value = date(1977, 8, 15)
mock_date.side_effect = lambda *args, **kwargs: date(*args, **kwargs)
p = Person.objects.create(name="Test Person", birth_date="1976-09")
self.assertEqual(p.age, "0")
def test_age_month_ambiguous(self, mock_date):
mock_date.today.return_value = date(1977, 9, 10)
mock_date.side_effect = lambda *args, **kwargs: date(*args, **kwargs)
p = Person.objects.create(name="Test Person", birth_date="1976-09")
self.assertEqual(p.age, "0 or 1")
def test_age_year_ambiguous(self, mock_date):
mock_date.today.return_value = date(1977, 9, 10)
mock_date.side_effect = lambda *args, **kwargs: date(*args, **kwargs)
p = Person.objects.create(name="Test Person", birth_date="1975")
self.assertEqual(p.age, "1 or 2")
|
agpl-3.0
|
OpenData-NC/open-data-nc
|
opendata/catalog/migrations/0007_auto__del_field_urlimage_url.py
|
2
|
12816
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'UrlImage.url'
db.delete_column(u'catalog_urlimage', 'url_id')
def backwards(self, orm):
# Adding field 'UrlImage.url'
db.add_column(u'catalog_urlimage', 'url',
self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['catalog.Url']),
keep_default=False)
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'catalog.category': {
'Meta': {'object_name': 'Category'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '150'})
},
u'catalog.city': {
'Meta': {'object_name': 'City'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'catalog.coordsystem': {
'EPSG_code': ('django.db.models.fields.IntegerField', [], {'blank': 'True'}),
'Meta': {'ordering': "['EPSG_code']", 'object_name': 'CoordSystem'},
'description': ('django.db.models.fields.TextField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'catalog.county': {
'Meta': {'object_name': 'County'},
'cities': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'counties'", 'symmetrical': 'False', 'to': u"orm['catalog.City']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'catalog.datatype': {
'Meta': {'object_name': 'DataType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'catalog.department': {
'Meta': {'object_name': 'Department'},
'divisions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['catalog.Division']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'state'", 'max_length': '40'})
},
u'catalog.division': {
'Meta': {'object_name': 'Division'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'catalog.resource': {
'Meta': {'object_name': 'Resource'},
'agency_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'area_of_interest': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'resources'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['catalog.Category']"}),
'cities': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['catalog.City']", 'null': 'True', 'blank': 'True'}),
'contact_email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'contact_phone': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'contact_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'coord_sys': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['catalog.CoordSystem']", 'null': 'True', 'blank': 'True'}),
'counties': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['catalog.County']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_by'", 'to': u"orm['auth.User']"}),
'csw_anytext': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'csw_mdsource': ('django.db.models.fields.CharField', [], {'default': "'local'", 'max_length': '100'}),
'csw_schema': ('django.db.models.fields.CharField', [], {'default': "'http://www.opengis.net/cat/csw/2.0.2'", 'max_length': '200'}),
'csw_typename': ('django.db.models.fields.CharField', [], {'default': "'csw:Record'", 'max_length': '200'}),
'csw_xml': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'data_formats': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'data_types': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['catalog.DataType']", 'null': 'True', 'blank': 'True'}),
'department': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Department']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'division': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Division']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_published': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'keywords': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '255', 'blank': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'last_updated_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'updated_by'", 'to': u"orm['auth.User']"}),
'metadata_contact': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'metadata_notes': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'proj_coord_sys': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'rating_score': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'rating_votes': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'blank': 'True'}),
'release_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'time_period': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'update_frequency': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'updates': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.UpdateFrequency']", 'null': 'True', 'blank': 'True'}),
'usage': ('django.db.models.fields.TextField', [], {}),
'wkt_geometry': ('django.db.models.fields.TextField', [], {'blank': 'True'})
},
u'catalog.updatefrequency': {
'Meta': {'ordering': "['update_frequency']", 'object_name': 'UpdateFrequency'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'update_frequency': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'catalog.url': {
'Meta': {'object_name': 'Url'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Resource']"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'url_label': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'url_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.UrlType']"})
},
u'catalog.urlimage': {
'Meta': {'object_name': 'UrlImage'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'resource': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['catalog.Resource']"}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'source_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'catalog.urltype': {
'Meta': {'ordering': "['url_type']", 'object_name': 'UrlType'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'url_type': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['catalog']
|
mit
|
daoluan/decode-Django
|
Django-1.5.1/django/contrib/gis/gdal/tests/test_geom.py
|
104
|
20864
|
import json
from binascii import b2a_hex
try:
from django.utils.six.moves import cPickle as pickle
except ImportError:
import pickle
from django.contrib.gis.gdal import (OGRGeometry, OGRGeomType, OGRException,
OGRIndexError, SpatialReference, CoordTransform, GDAL_VERSION)
from django.contrib.gis.geometry.test_data import TestDataMixin
from django.utils.six.moves import xrange
from django.utils import unittest
class OGRGeomTest(unittest.TestCase, TestDataMixin):
"This tests the OGR Geometry."
def test00a_geomtype(self):
"Testing OGRGeomType object."
# OGRGeomType should initialize on all these inputs.
try:
g = OGRGeomType(1)
g = OGRGeomType(7)
g = OGRGeomType('point')
g = OGRGeomType('GeometrycollectioN')
g = OGRGeomType('LINearrING')
g = OGRGeomType('Unknown')
except:
self.fail('Could not create an OGRGeomType object!')
# Should throw TypeError on this input
self.assertRaises(OGRException, OGRGeomType, 23)
self.assertRaises(OGRException, OGRGeomType, 'fooD')
self.assertRaises(OGRException, OGRGeomType, 9)
# Equivalence can take strings, ints, and other OGRGeomTypes
self.assertEqual(True, OGRGeomType(1) == OGRGeomType(1))
self.assertEqual(True, OGRGeomType(7) == 'GeometryCollection')
self.assertEqual(True, OGRGeomType('point') == 'POINT')
self.assertEqual(False, OGRGeomType('point') == 2)
self.assertEqual(True, OGRGeomType('unknown') == 0)
self.assertEqual(True, OGRGeomType(6) == 'MULtiPolyGON')
self.assertEqual(False, OGRGeomType(1) != OGRGeomType('point'))
self.assertEqual(True, OGRGeomType('POINT') != OGRGeomType(6))
# Testing the Django field name equivalent property.
self.assertEqual('PointField', OGRGeomType('Point').django)
self.assertEqual('GeometryField', OGRGeomType('Unknown').django)
self.assertEqual(None, OGRGeomType('none').django)
# 'Geometry' initialization implies an unknown geometry type.
gt = OGRGeomType('Geometry')
self.assertEqual(0, gt.num)
self.assertEqual('Unknown', gt.name)
def test00b_geomtype_25d(self):
"Testing OGRGeomType object with 25D types."
wkb25bit = OGRGeomType.wkb25bit
self.assertTrue(OGRGeomType(wkb25bit + 1) == 'Point25D')
self.assertTrue(OGRGeomType('MultiLineString25D') == (5 + wkb25bit))
self.assertEqual('GeometryCollectionField', OGRGeomType('GeometryCollection25D').django)
def test01a_wkt(self):
"Testing WKT output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
self.assertEqual(g.wkt, geom.wkt)
def test01a_ewkt(self):
"Testing EWKT input/output."
for ewkt_val in ('POINT (1 2 3)', 'LINEARRING (0 0,1 1,2 1,0 0)'):
# First with ewkt output when no SRID in EWKT
self.assertEqual(ewkt_val, OGRGeometry(ewkt_val).ewkt)
# No test consumption with an SRID specified.
ewkt_val = 'SRID=4326;%s' % ewkt_val
geom = OGRGeometry(ewkt_val)
self.assertEqual(ewkt_val, geom.ewkt)
self.assertEqual(4326, geom.srs.srid)
def test01b_gml(self):
"Testing GML output."
for g in self.geometries.wkt_out:
geom = OGRGeometry(g.wkt)
exp_gml = g.gml
if GDAL_VERSION >= (1, 8):
# In GDAL 1.8, the non-conformant GML tag <gml:GeometryCollection> was
# replaced with <gml:MultiGeometry>.
exp_gml = exp_gml.replace('GeometryCollection', 'MultiGeometry')
self.assertEqual(exp_gml, geom.gml)
def test01c_hex(self):
"Testing HEX input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
self.assertEqual(g.hex.encode(), geom1.hex)
# Constructing w/HEX
geom2 = OGRGeometry(g.hex)
self.assertEqual(geom1, geom2)
def test01d_wkb(self):
"Testing WKB input/output."
for g in self.geometries.hex_wkt:
geom1 = OGRGeometry(g.wkt)
wkb = geom1.wkb
self.assertEqual(b2a_hex(wkb).upper(), g.hex.encode())
# Constructing w/WKB.
geom2 = OGRGeometry(wkb)
self.assertEqual(geom1, geom2)
def test01e_json(self):
"Testing GeoJSON input/output."
for g in self.geometries.json_geoms:
geom = OGRGeometry(g.wkt)
if not hasattr(g, 'not_equal'):
# Loading jsons to prevent decimal differences
self.assertEqual(json.loads(g.json), json.loads(geom.json))
self.assertEqual(json.loads(g.json), json.loads(geom.geojson))
self.assertEqual(OGRGeometry(g.wkt), OGRGeometry(geom.json))
def test02_points(self):
"Testing Point objects."
prev = OGRGeometry('POINT(0 0)')
for p in self.geometries.points:
if not hasattr(p, 'z'): # No 3D
pnt = OGRGeometry(p.wkt)
self.assertEqual(1, pnt.geom_type)
self.assertEqual('POINT', pnt.geom_name)
self.assertEqual(p.x, pnt.x)
self.assertEqual(p.y, pnt.y)
self.assertEqual((p.x, p.y), pnt.tuple)
def test03_multipoints(self):
"Testing MultiPoint objects."
for mp in self.geometries.multipoints:
mgeom1 = OGRGeometry(mp.wkt) # First one from WKT
self.assertEqual(4, mgeom1.geom_type)
self.assertEqual('MULTIPOINT', mgeom1.geom_name)
mgeom2 = OGRGeometry('MULTIPOINT') # Creating empty multipoint
mgeom3 = OGRGeometry('MULTIPOINT')
for g in mgeom1:
mgeom2.add(g) # adding each point from the multipoints
mgeom3.add(g.wkt) # should take WKT as well
self.assertEqual(mgeom1, mgeom2) # they should equal
self.assertEqual(mgeom1, mgeom3)
self.assertEqual(mp.coords, mgeom2.coords)
self.assertEqual(mp.n_p, mgeom2.point_count)
def test04_linestring(self):
"Testing LineString objects."
prev = OGRGeometry('POINT(0 0)')
for ls in self.geometries.linestrings:
linestr = OGRGeometry(ls.wkt)
self.assertEqual(2, linestr.geom_type)
self.assertEqual('LINESTRING', linestr.geom_name)
self.assertEqual(ls.n_p, linestr.point_count)
self.assertEqual(ls.coords, linestr.tuple)
self.assertEqual(True, linestr == OGRGeometry(ls.wkt))
self.assertEqual(True, linestr != prev)
self.assertRaises(OGRIndexError, linestr.__getitem__, len(linestr))
prev = linestr
# Testing the x, y properties.
x = [tmpx for tmpx, tmpy in ls.coords]
y = [tmpy for tmpx, tmpy in ls.coords]
self.assertEqual(x, linestr.x)
self.assertEqual(y, linestr.y)
def test05_multilinestring(self):
"Testing MultiLineString objects."
prev = OGRGeometry('POINT(0 0)')
for mls in self.geometries.multilinestrings:
mlinestr = OGRGeometry(mls.wkt)
self.assertEqual(5, mlinestr.geom_type)
self.assertEqual('MULTILINESTRING', mlinestr.geom_name)
self.assertEqual(mls.n_p, mlinestr.point_count)
self.assertEqual(mls.coords, mlinestr.tuple)
self.assertEqual(True, mlinestr == OGRGeometry(mls.wkt))
self.assertEqual(True, mlinestr != prev)
prev = mlinestr
for ls in mlinestr:
self.assertEqual(2, ls.geom_type)
self.assertEqual('LINESTRING', ls.geom_name)
self.assertRaises(OGRIndexError, mlinestr.__getitem__, len(mlinestr))
def test06_linearring(self):
"Testing LinearRing objects."
prev = OGRGeometry('POINT(0 0)')
for rr in self.geometries.linearrings:
lr = OGRGeometry(rr.wkt)
#self.assertEqual(101, lr.geom_type.num)
self.assertEqual('LINEARRING', lr.geom_name)
self.assertEqual(rr.n_p, len(lr))
self.assertEqual(True, lr == OGRGeometry(rr.wkt))
self.assertEqual(True, lr != prev)
prev = lr
def test07a_polygons(self):
"Testing Polygon objects."
# Testing `from_bbox` class method
bbox = (-180,-90,180,90)
p = OGRGeometry.from_bbox( bbox )
self.assertEqual(bbox, p.extent)
prev = OGRGeometry('POINT(0 0)')
for p in self.geometries.polygons:
poly = OGRGeometry(p.wkt)
self.assertEqual(3, poly.geom_type)
self.assertEqual('POLYGON', poly.geom_name)
self.assertEqual(p.n_p, poly.point_count)
self.assertEqual(p.n_i + 1, len(poly))
# Testing area & centroid.
self.assertAlmostEqual(p.area, poly.area, 9)
x, y = poly.centroid.tuple
self.assertAlmostEqual(p.centroid[0], x, 9)
self.assertAlmostEqual(p.centroid[1], y, 9)
# Testing equivalence
self.assertEqual(True, poly == OGRGeometry(p.wkt))
self.assertEqual(True, poly != prev)
if p.ext_ring_cs:
ring = poly[0]
self.assertEqual(p.ext_ring_cs, ring.tuple)
self.assertEqual(p.ext_ring_cs, poly[0].tuple)
self.assertEqual(len(p.ext_ring_cs), ring.point_count)
for r in poly:
self.assertEqual('LINEARRING', r.geom_name)
def test07b_closepolygons(self):
"Testing closing Polygon objects."
# Both rings in this geometry are not closed.
poly = OGRGeometry('POLYGON((0 0, 5 0, 5 5, 0 5), (1 1, 2 1, 2 2, 2 1))')
self.assertEqual(8, poly.point_count)
with self.assertRaises(OGRException):
_ = poly.centroid
poly.close_rings()
self.assertEqual(10, poly.point_count) # Two closing points should've been added
self.assertEqual(OGRGeometry('POINT(2.5 2.5)'), poly.centroid)
def test08_multipolygons(self):
"Testing MultiPolygon objects."
prev = OGRGeometry('POINT(0 0)')
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
self.assertEqual(6, mpoly.geom_type)
self.assertEqual('MULTIPOLYGON', mpoly.geom_name)
if mp.valid:
self.assertEqual(mp.n_p, mpoly.point_count)
self.assertEqual(mp.num_geom, len(mpoly))
self.assertRaises(OGRIndexError, mpoly.__getitem__, len(mpoly))
for p in mpoly:
self.assertEqual('POLYGON', p.geom_name)
self.assertEqual(3, p.geom_type)
self.assertEqual(mpoly.wkt, OGRGeometry(mp.wkt).wkt)
def test09a_srs(self):
"Testing OGR Geometries with Spatial Reference objects."
for mp in self.geometries.multipolygons:
# Creating a geometry w/spatial reference
sr = SpatialReference('WGS84')
mpoly = OGRGeometry(mp.wkt, sr)
self.assertEqual(sr.wkt, mpoly.srs.wkt)
# Ensuring that SRS is propagated to clones.
klone = mpoly.clone()
self.assertEqual(sr.wkt, klone.srs.wkt)
# Ensuring all children geometries (polygons and their rings) all
# return the assigned spatial reference as well.
for poly in mpoly:
self.assertEqual(sr.wkt, poly.srs.wkt)
for ring in poly:
self.assertEqual(sr.wkt, ring.srs.wkt)
# Ensuring SRS propagate in topological ops.
a = OGRGeometry(self.geometries.topology_geoms[0].wkt_a, sr)
b = OGRGeometry(self.geometries.topology_geoms[0].wkt_b, sr)
diff = a.difference(b)
union = a.union(b)
self.assertEqual(sr.wkt, diff.srs.wkt)
self.assertEqual(sr.srid, union.srs.srid)
# Instantiating w/an integer SRID
mpoly = OGRGeometry(mp.wkt, 4326)
self.assertEqual(4326, mpoly.srid)
mpoly.srs = SpatialReference(4269)
self.assertEqual(4269, mpoly.srid)
self.assertEqual('NAD83', mpoly.srs.name)
# Incrementing through the multipolyogn after the spatial reference
# has been re-assigned.
for poly in mpoly:
self.assertEqual(mpoly.srs.wkt, poly.srs.wkt)
poly.srs = 32140
for ring in poly:
# Changing each ring in the polygon
self.assertEqual(32140, ring.srs.srid)
self.assertEqual('NAD83 / Texas South Central', ring.srs.name)
ring.srs = str(SpatialReference(4326)) # back to WGS84
self.assertEqual(4326, ring.srs.srid)
# Using the `srid` property.
ring.srid = 4322
self.assertEqual('WGS 72', ring.srs.name)
self.assertEqual(4322, ring.srid)
def test09b_srs_transform(self):
"Testing transform()."
orig = OGRGeometry('POINT (-104.609 38.255)', 4326)
trans = OGRGeometry('POINT (992385.4472045 481455.4944650)', 2774)
# Using an srid, a SpatialReference object, and a CoordTransform object
# or transformations.
t1, t2, t3 = orig.clone(), orig.clone(), orig.clone()
t1.transform(trans.srid)
t2.transform(SpatialReference('EPSG:2774'))
ct = CoordTransform(SpatialReference('WGS84'), SpatialReference(2774))
t3.transform(ct)
# Testing use of the `clone` keyword.
k1 = orig.clone()
k2 = k1.transform(trans.srid, clone=True)
self.assertEqual(k1, orig)
self.assertNotEqual(k1, k2)
prec = 3
for p in (t1, t2, t3, k2):
self.assertAlmostEqual(trans.x, p.x, prec)
self.assertAlmostEqual(trans.y, p.y, prec)
def test09c_transform_dim(self):
"Testing coordinate dimension is the same on transformed geometries."
ls_orig = OGRGeometry('LINESTRING(-104.609 38.255)', 4326)
ls_trans = OGRGeometry('LINESTRING(992385.4472045 481455.4944650)', 2774)
prec = 3
ls_orig.transform(ls_trans.srs)
# Making sure the coordinate dimension is still 2D.
self.assertEqual(2, ls_orig.coord_dim)
self.assertAlmostEqual(ls_trans.x[0], ls_orig.x[0], prec)
self.assertAlmostEqual(ls_trans.y[0], ls_orig.y[0], prec)
def test10_difference(self):
"Testing difference()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.diff_geoms[i].wkt)
d2 = a.difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a - b) # __sub__ is difference operator
a -= b # testing __isub__
self.assertEqual(d1, a)
def test11_intersection(self):
"Testing intersects() and intersection()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
i1 = OGRGeometry(self.geometries.intersect_geoms[i].wkt)
self.assertEqual(True, a.intersects(b))
i2 = a.intersection(b)
self.assertEqual(i1, i2)
self.assertEqual(i1, a & b) # __and__ is intersection operator
a &= b # testing __iand__
self.assertEqual(i1, a)
def test12_symdifference(self):
"Testing sym_difference()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
d1 = OGRGeometry(self.geometries.sdiff_geoms[i].wkt)
d2 = a.sym_difference(b)
self.assertEqual(d1, d2)
self.assertEqual(d1, a ^ b) # __xor__ is symmetric difference operator
a ^= b # testing __ixor__
self.assertEqual(d1, a)
def test13_union(self):
"Testing union()."
for i in xrange(len(self.geometries.topology_geoms)):
a = OGRGeometry(self.geometries.topology_geoms[i].wkt_a)
b = OGRGeometry(self.geometries.topology_geoms[i].wkt_b)
u1 = OGRGeometry(self.geometries.union_geoms[i].wkt)
u2 = a.union(b)
self.assertEqual(u1, u2)
self.assertEqual(u1, a | b) # __or__ is union operator
a |= b # testing __ior__
self.assertEqual(u1, a)
def test14_add(self):
"Testing GeometryCollection.add()."
# Can't insert a Point into a MultiPolygon.
mp = OGRGeometry('MultiPolygon')
pnt = OGRGeometry('POINT(5 23)')
self.assertRaises(OGRException, mp.add, pnt)
# GeometryCollection.add may take an OGRGeometry (if another collection
# of the same type all child geoms will be added individually) or WKT.
for mp in self.geometries.multipolygons:
mpoly = OGRGeometry(mp.wkt)
mp1 = OGRGeometry('MultiPolygon')
mp2 = OGRGeometry('MultiPolygon')
mp3 = OGRGeometry('MultiPolygon')
for poly in mpoly:
mp1.add(poly) # Adding a geometry at a time
mp2.add(poly.wkt) # Adding WKT
mp3.add(mpoly) # Adding a MultiPolygon's entire contents at once.
for tmp in (mp1, mp2, mp3): self.assertEqual(mpoly, tmp)
def test15_extent(self):
"Testing `extent` property."
# The xmin, ymin, xmax, ymax of the MultiPoint should be returned.
mp = OGRGeometry('MULTIPOINT(5 23, 0 0, 10 50)')
self.assertEqual((0.0, 0.0, 10.0, 50.0), mp.extent)
# Testing on the 'real world' Polygon.
poly = OGRGeometry(self.geometries.polygons[3].wkt)
ring = poly.shell
x, y = ring.x, ring.y
xmin, ymin = min(x), min(y)
xmax, ymax = max(x), max(y)
self.assertEqual((xmin, ymin, xmax, ymax), poly.extent)
def test16_25D(self):
"Testing 2.5D geometries."
pnt_25d = OGRGeometry('POINT(1 2 3)')
self.assertEqual('Point25D', pnt_25d.geom_type.name)
self.assertEqual(3.0, pnt_25d.z)
self.assertEqual(3, pnt_25d.coord_dim)
ls_25d = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)')
self.assertEqual('LineString25D', ls_25d.geom_type.name)
self.assertEqual([1.0, 2.0, 3.0], ls_25d.z)
self.assertEqual(3, ls_25d.coord_dim)
def test17_pickle(self):
"Testing pickle support."
g1 = OGRGeometry('LINESTRING(1 1 1,2 2 2,3 3 3)', 'WGS84')
g2 = pickle.loads(pickle.dumps(g1))
self.assertEqual(g1, g2)
self.assertEqual(4326, g2.srs.srid)
self.assertEqual(g1.srs.wkt, g2.srs.wkt)
def test18_ogrgeometry_transform_workaround(self):
"Testing coordinate dimensions on geometries after transformation."
# A bug in GDAL versions prior to 1.7 changes the coordinate
# dimension of a geometry after it has been transformed.
# This test ensures that the bug workarounds employed within
# `OGRGeometry.transform` indeed work.
wkt_2d = "MULTILINESTRING ((0 0,1 1,2 2))"
wkt_3d = "MULTILINESTRING ((0 0 0,1 1 1,2 2 2))"
srid = 4326
# For both the 2D and 3D MultiLineString, ensure _both_ the dimension
# of the collection and the component LineString have the expected
# coordinate dimension after transform.
geom = OGRGeometry(wkt_2d, srid)
geom.transform(srid)
self.assertEqual(2, geom.coord_dim)
self.assertEqual(2, geom[0].coord_dim)
self.assertEqual(wkt_2d, geom.wkt)
geom = OGRGeometry(wkt_3d, srid)
geom.transform(srid)
self.assertEqual(3, geom.coord_dim)
self.assertEqual(3, geom[0].coord_dim)
self.assertEqual(wkt_3d, geom.wkt)
def test19_equivalence_regression(self):
"Testing equivalence methods with non-OGRGeometry instances."
self.assertNotEqual(None, OGRGeometry('POINT(0 0)'))
self.assertEqual(False, OGRGeometry('LINESTRING(0 0, 1 1)') == 3)
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(OGRGeomTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
|
gpl-2.0
|
aweinstock314/servo
|
tests/wpt/web-platform-tests/webdriver/element_state/visibility_test.py
|
58
|
14831
|
import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(__file__, "../..")))
import base_test
from webdriver import exceptions
class NaturalNonVisibleElementsTest(base_test.WebDriverBaseTest):
def test_0x0_pixel_element_is_not_visible(self):
self.driver.get(self.webserver.where_is("element_state/res/0x0-pixels.html"))
el = self.driver.find_element_by_css("div")
self.assertFalse(el.is_displayed())
def test_0x0_pixel_text_node_is_visible(self):
self.driver.get(self.webserver.where_is("element_state/res/0x0-pixels-text-node.html"))
el = self.driver.find_element_by_css("p")
self.assertTrue(el.is_displayed())
def test_1x1_pixel_element(self):
self.driver.get(self.webserver.where_is("element_state/res/1x1-pixels.html"))
el = self.driver.find_element_by_css("p")
self.assertTrue(el.is_displayed())
def test_zero_sized_element_is_shown_if_decendant_has_size(self):
self.driver.get(self.webserver.where_is("element_state/res/zero-sized-element-with-sizable-decendant.html"))
parent = self.driver.find_element_by_css("#parent")
child = self.driver.find_element_by_css("#child")
self.assertTrue(parent.is_displayed())
self.assertTrue(child.is_displayed())
def test_input_type_hidden_is_never_visible(self):
self.driver.get(self.webserver.where_is("element_state/res/input-type-hidden.html"))
input = self.driver.find_element_by_css("input")
self.assertFalse(input.is_displayed())
def test_input_morphs_into_hidden(self):
self.driver.get(self.webserver.where_is("element_state/res/input-morphs-into-hidden.html"))
input = self.driver.find_element_by_css("input")
self.assertFalse(input.is_displayed())
def test_parent_node_visible_when_all_children_are_absolutely_positioned_and_overflow_is_hidden(self):
pass
def test_parent_of_absolutely_positioned_elements_visible_where_ancestor_overflow_is_hidden(self):
"""When a parent's ancestor hides any overflow, absolutely positioned child elements are
still visible. The parent container is also considered visible by webdriver for this
reason because it is interactable."""
self.driver.get(self.webserver.where_is("element_state/res/absolute-children-ancestor-hidden-overflow.html"))
children = self.driver.find_elements_by_css(".child")
assert all(child.is_displayed() for child in children)
parent = self.driver.find_element_by_css("#parent")
assert parent.is_displayed()
def test_element_hidden_by_overflow_x_is_not_visible(self):
# TODO(andreastt): This test should probably be split in three. Also it's making two
# assertions.
pages = ["element_state/res/x-hidden-y-hidden.html",
"element_state/res/x-hidden-y-scroll.html",
"element_state/res/x-hidden-y-auto.html"]
for page in pages:
self.driver.get(self.webserver.where_is(page))
right = self.driver.find_element_by_css("#right")
bottom_right = self.driver.find_element_by_css("#bottom-right")
self.assertFalse(right.is_displayed())
self.assertFalse(bottom_right.is_displayed())
def test_element_hidden_by_overflow_y_is_not_visible(self):
# TODO(andreastt): This test should probably be split in three. Also it's making two
# assertions.
pages = ["element_state/res/x-hidden-y-hidden.html",
"element_state/res/x-scroll-y-hidden.html",
"element_state/res/x-auto-y-hidden.html"]
for page in pages:
self.driver.get(self.webserver.where_is(page))
bottom = self.driver.find_element_by_css("#bottom")
bottom_right = self.driver.find_element_by_css("#bottom-right")
self.assertFalse(bottom.is_displayed())
self.assertFalse(bottom_right.is_displayed())
def test_parent_node_visible_when_all_children_are_absolutely_position_and_overflow_is_hidden(self):
pass
def test_element_scrollable_by_overflow_x_is_visible(self):
pass
def test_element_scrollable_by_overflow_y_is_visible(self):
pass
def test_element_scrollable_by_overflow_x_and_y_is_visible(self):
pass
def test_element_scrollable_by_overflow_y_is_visible(self):
pass
def test_element_outside_viewport(self):
self.driver.get(self.webserver.where_is("element_state/res/element-outside-viewport.html"))
hidden = self.driver.find_element_by_css("div")
self.assertFalse(hidden.is_displayed())
def test_element_dynamically_moved_outside_viewport(self):
self.driver.get(self.webserver.where_is("element_state/res/element-dynamically-moved-outside-viewport.html"))
hidden = self.driver.find_element_by_css("div")
self.assertFalse(hidden.is_displayed())
def test_element_hidden_by_other_element(self):
self.driver.get(self.webserver.where_is("element_state/res/element-hidden-by-other-element.html"))
overlay = self.driver.find_element_by_css("#overlay")
hidden = self.driver.find_element_by_css("#hidden")
self.assertTrue(overlay.is_displayed())
self.assertFalse(hidden.is_displayed())
def test_element_partially_hidden_by_other_element(self):
self.driver.get(self.webserver.where_is("element_state/res/element-partially-hidden-by-other-element.html"))
partial = self.driver.find_element_by_css("#partial")
self.assertTrue(partial.is_displayed())
def test_element_hidden_by_z_index(self):
self.driver.get(self.webserver.where_is("element_state/res/element-hidden-by-z-index.html"))
overlay = self.driver.find_element_by_css("#overlay")
hidden = self.driver.find_element_by_css("#hidden")
self.assertTrue(overlay.is_displayed())
self.assertFalse(hidden.is_displayed())
def test_element_moved_outside_viewport_by_transform(self):
self.driver.get(self.webserver.where_is("element_state/res/element-moved-outside-viewport-by-transform.html"))
el = self.driver.find_element_by_css("div")
self.assertFalse(el.is_displayed())
def test_element_moved_behind_other_element_by_transform(self):
self.driver.get(self.webserver.where_is("element_state/res/element-moved-behind-other-element-by-transform.html"))
overlay = self.driver.find_element_by_css("#overlay")
hidden = self.driver.find_element_by_css("#hidden")
self.assertTrue(overlay.is_displayed())
self.assertFalse(hidden.is_displayed())
def test_text_with_same_color_as_background(self):
self.driver.get(self.webserver.where_is("element_state/res/text-with-same-color-as-background.html"))
p = self.driver.find_element_by_css("p")
self.assertFalse(p.is_displayed())
def test_text_with_same_color_as_parent_background(self):
self.driver.get(self.webserver.where_is("element_state/res/text-with-same-color-as-parent-background.html"))
p = self.driver.find_element_by_css("p")
self.assertFalse(p.is_displayed())
def test_text_with_matching_color_and_background(self):
self.driver.get(self.webserver.where_is("element_state/res/text-with-matching-color-and-background.html"))
p = self.driver.find_element_by_css("p")
self.assertTrue(p.is_displayed())
def test_element_with_same_color_as_background(self):
self.driver.get(self.webserver.where_is("element_state/res/element-with-same-color-as-background.html"))
el = self.driver.find_element_by_css("div")
self.assertFalse(el.is_displayed())
def test_element_with_same_color_as_parent_background(self):
self.driver.get(self.webserver.where_is("element_state/res/element-with-same-color-as-parent-background.html"))
hidden = self.driver.find_element_by_css("#hidden")
self.assertFalse(hidden.is_displayed())
class BodyElementIsAlwaysDisplayedTest(base_test.WebDriverBaseTest):
def assert_body_is_displayed_on(self, page):
self.driver.get(self.webserver.where_is(page))
body = self.driver.find_element_by_css("body")
assert body.is_displayed()
def test_implicit(self):
self.assert_body_is_displayed_on("element_state/res/body_implicit.html")
def test_empty(self):
self.assert_body_is_displayed_on("element_state/res/body_empty.html")
def test_visibility_hidden(self):
self.assert_body_is_displayed_on("element_state/res/body_visibility_hidden.html")
def test_overflow_hidden(self):
self.assert_body_is_displayed_on("element_state/res/body_overflow_hidden.html")
class DisplayTest(base_test.WebDriverBaseTest):
def test_display_block(self):
self.driver.get(self.webserver.where_is("element_state/res/display-block.html"))
el = self.driver.find_element_by_css("p")
self.assertTrue(el.is_displayed())
def test_display_none(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none.html"))
el = self.driver.find_element_by_css("p")
self.assertFalse(el.is_displayed())
def test_display_none_hides_child_node(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-child.html"))
parent = self.driver.find_element_by_css("#parent")
child = self.driver.find_element_by_css("#child")
self.assertFalse(parent.is_displayed())
self.assertFalse(child.is_displayed())
def test_display_none_hides_child_node_link(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-child-link.html"))
child = self.driver.find_element_by_css("#child")
self.assertFalse(child.is_displayed())
def test_display_none_hides_child_node_paragraph(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-child-paragraph.html"))
child = self.driver.find_element_by_css("#child")
self.assertFalse(child.is_displayed())
def test_display_none_on_parent_takes_presedence(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-parent-presedence.html"))
child = self.driver.find_element_by_css("#child")
self.assertFalse(child.is_displayed())
def test_display_none_on_parent_takes_presedence_over_visibility_visible(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-parent-presedence-visibility.html"))
child = self.driver.find_element_by_css("#child")
self.assertFalse(child.is_displayed())
def test_display_none_hidden_dynamically(self):
self.driver.get(self.webserver.where_is("element_state/res/display-none-dynamic.html"))
hidden = self.driver.find_element_by_css("#hidden")
self.assertFalse(hidden.is_displayed())
class VisibilityTest(base_test.WebDriverBaseTest):
def test_element_state_hidden(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-hidden.html"))
el = self.driver.find_element_by_css("p")
self.assertFalse(el.is_displayed())
def test_element_state_visible(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-visible.html"))
el = self.driver.find_element_by_css("p")
self.assertTrue(el.is_displayed())
def test_visibility_hidden_hides_child_node(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-child.html"))
parent = self.driver.find_element_by_css("#parent")
child = self.driver.find_element_by_css("#child")
self.assertFalse(parent.is_displayed())
self.assertFalse(child.is_displayed())
def test_visibility_hidden_hides_child_node_link(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-child-link.html"))
parent = self.driver.find_element_by_css("#parent")
child = self.driver.find_element_by_css("#child")
self.assertFalse(parent.is_displayed())
self.assertFalse(child.is_displayed())
def test_visibility_hidden_hides_child_node_paragraph(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-child-paragraph.html"))
parent = self.driver.find_element_by_css("#parent")
child = self.driver.find_element_by_css("#child")
self.assertFalse(parent.is_displayed())
self.assertFalse(child.is_displayed())
def test_visibility_hidden_on_child_takes_precedence(self):
self.driver.get(self.webserver.where_is("element_state/res/visibility-child-presedence.html"))
child = self.driver.find_element_by_css("#child")
self.assertTrue(child.is_displayed())
def test_visibility_hidden_on_parent_takes_precedence_over_display_block(self):
pass
def test_visibility_hidden_set_dynamically(self):
pass
def test_should_show_element_not_visible_with_hidden_attribute(self):
self.driver.get(self.webserver.where_is("element_state/res/hidden.html"))
singleHidden = self.driver.find_element_by_css('#singleHidden')
self.assertFalse(singleHidden.is_displayed())
def test_should_show_element_not_visible_when_parent_element_has_hidden_attribute(self):
self.driver.get(self.webserver.where_is("element_state/res/hidden.html"))
child = self.driver.find_element_by_css('#child')
self.assertFalse(child.is_displayed())
class VisibilityInteractionTest(base_test.WebDriverBaseTest):
def test_input_hidden_is_unclickable(self):
self.driver.get(self.webserver.where_is("element_state/res/input-type-hidden-unclickable.html"))
input = self.driver.find_element_by_css("input")
with self.assertRaises(exceptions.ElementNotVisibleException):
input.click()
def test_hidden_input_checkbox_is_untogglable(self):
self.driver.get(self.webserver.where_is("element_state/res/hidden-input-type-checkbox-untogglable.html"))
checkbox = self.driver.find_element_by_css("input")
with self.assertRaises(exceptions.ElementNotVisibleException):
checkbox.click()
def test_typing_in_hidden_input_is_impossible(self):
self.driver.get(self.webserver.where_is("element_state/res/hidden-input-type-text-writing.html"))
textfield = self.driver.find_element_by_css("input")
with self.assertRaises(exceptions.ElementNotVisibleException):
textfield.send_keys("Koha is a popular Indian cheese")
class OpacityTest(base_test.WebDriverBaseTest):
pass
if __name__ == "__main__":
unittest.main()
|
mpl-2.0
|
iFighting/flask
|
docs/flaskext.py
|
2228
|
4875
|
# flasky extensions. flasky pygments style based on tango style
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
class FlaskyStyle(Style):
background_color = "#f8f8f8"
default_style = ""
styles = {
# No corresponding class for the following:
#Text: "", # class: ''
Whitespace: "underline #f8f8f8", # class: 'w'
Error: "#a40000 border:#ef2929", # class: 'err'
Other: "#000000", # class 'x'
Comment: "italic #8f5902", # class: 'c'
Comment.Preproc: "noitalic", # class: 'cp'
Keyword: "bold #004461", # class: 'k'
Keyword.Constant: "bold #004461", # class: 'kc'
Keyword.Declaration: "bold #004461", # class: 'kd'
Keyword.Namespace: "bold #004461", # class: 'kn'
Keyword.Pseudo: "bold #004461", # class: 'kp'
Keyword.Reserved: "bold #004461", # class: 'kr'
Keyword.Type: "bold #004461", # class: 'kt'
Operator: "#582800", # class: 'o'
Operator.Word: "bold #004461", # class: 'ow' - like keywords
Punctuation: "bold #000000", # class: 'p'
# because special names such as Name.Class, Name.Function, etc.
# are not recognized as such later in the parsing, we choose them
# to look the same as ordinary variables.
Name: "#000000", # class: 'n'
Name.Attribute: "#c4a000", # class: 'na' - to be revised
Name.Builtin: "#004461", # class: 'nb'
Name.Builtin.Pseudo: "#3465a4", # class: 'bp'
Name.Class: "#000000", # class: 'nc' - to be revised
Name.Constant: "#000000", # class: 'no' - to be revised
Name.Decorator: "#888", # class: 'nd' - to be revised
Name.Entity: "#ce5c00", # class: 'ni'
Name.Exception: "bold #cc0000", # class: 'ne'
Name.Function: "#000000", # class: 'nf'
Name.Property: "#000000", # class: 'py'
Name.Label: "#f57900", # class: 'nl'
Name.Namespace: "#000000", # class: 'nn' - to be revised
Name.Other: "#000000", # class: 'nx'
Name.Tag: "bold #004461", # class: 'nt' - like a keyword
Name.Variable: "#000000", # class: 'nv' - to be revised
Name.Variable.Class: "#000000", # class: 'vc' - to be revised
Name.Variable.Global: "#000000", # class: 'vg' - to be revised
Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
Number: "#990000", # class: 'm'
Literal: "#000000", # class: 'l'
Literal.Date: "#000000", # class: 'ld'
String: "#4e9a06", # class: 's'
String.Backtick: "#4e9a06", # class: 'sb'
String.Char: "#4e9a06", # class: 'sc'
String.Doc: "italic #8f5902", # class: 'sd' - like a comment
String.Double: "#4e9a06", # class: 's2'
String.Escape: "#4e9a06", # class: 'se'
String.Heredoc: "#4e9a06", # class: 'sh'
String.Interpol: "#4e9a06", # class: 'si'
String.Other: "#4e9a06", # class: 'sx'
String.Regex: "#4e9a06", # class: 'sr'
String.Single: "#4e9a06", # class: 's1'
String.Symbol: "#4e9a06", # class: 'ss'
Generic: "#000000", # class: 'g'
Generic.Deleted: "#a40000", # class: 'gd'
Generic.Emph: "italic #000000", # class: 'ge'
Generic.Error: "#ef2929", # class: 'gr'
Generic.Heading: "bold #000080", # class: 'gh'
Generic.Inserted: "#00A000", # class: 'gi'
Generic.Output: "#888", # class: 'go'
Generic.Prompt: "#745334", # class: 'gp'
Generic.Strong: "bold #000000", # class: 'gs'
Generic.Subheading: "bold #800080", # class: 'gu'
Generic.Traceback: "bold #a40000", # class: 'gt'
}
|
bsd-3-clause
|
baoboa/pizza-lammps
|
src/DEFAULTS.py
|
4
|
3366
|
# Pizza.py toolkit, www.cs.sandia.gov/~sjplimp/pizza.html
# Steve Plimpton, [email protected], Sandia National Laboratories
#
# Copyright (2005) Sandia Corporation. Under the terms of Contract
# DE-AC04-94AL85000 with Sandia Corporation, the U.S. Government retains
# certain rights in this software. This software is distributed under
# the GNU General Public License.
# --------------
# --------------
# 3 variables used by Pizza.py
# to use TOOLS or SCRIPTS, edit and uncomment the line
# to use EXCLUDE, add to the existing list
# TOOLS = list of extra directories that contain Pizza.py tools
# Pizza.py will load all *.py files as tools from TOOLS dirs, then pizza/src
# this ordering means your tool can override a Pizza.py tool of the same name
# SCRIPTS = list of extra directories that contain Pizza.py scripts
# Pizza.py will look in working dir, SCRIPTS dirs, then pizza/scripts
# this ordering means your script can override a Pizza.py script of same name
# EXCLUDE = Python files to NOT load as tools when Pizza.py starts
# typically done for auxiliary Python files that are not tools
# any non-tool Python files from your TOOLS dirs should be added to list
#PIZZA_TOOLS = ["~/mystuff/new_pizza_tools"]
#PIZZA_SCRIPTS = ["~/mystuff/new_pizza_scripts"]
PIZZA_EXCLUDE = ["pizza", "DEFAULTS", "vizinfo"]
# --------------
# --------------
# Pathname for programs executed by various Pizza.py tools
# if you don't use a tool, it's settings can be ignored
# the default values are program names with no path
# to use a default value, the executable must therefore be in your path
# to change a default, uncomment and edit the PIZZA variable line
# --------------
# ImageMagick programs to manipulate image files
# DISPLAY = program to view GIF, PNG, SVG files
# tools that use it: rasmol, raster, svg
# CONVERT = program to convert one image format to another
# MONTAGE = program to stitch 2 images together
# tools that use it: image
#PIZZA_DISPLAY = "/usr/bin/display"
#PIZZA_CONVERT = "/usr/bin/convert"
#PIZZA_MONTAGE = "/usr/bin/montage"
# --------------
# GNUPLOT = the GnuPlot plotting package
# GNUTERM = terminal setting used by GnuPlot
# tools that use it: gnu
#PIZZA_GNUPLOT = "gnuplot"
#PIZZA_GNUTERM = "x11"
#PIZZA_GNUTERM = "aqua" # for Macs with Aquaterm installed
# --------------
# GUNZIP = program to uncompress gzipped files
# tools that use it: data dump log
#PIZZA_GUNZIP = "gunzip"
# --------------
# LABEL3D = program to put a label on a Raster3D image
# RENDER = the Raster3D visualization rendering engine
# tools that use it: raster
#PIZZA_LABEL3D = "label3d"
#PIZZA_RENDER = "render"
# --------------
# MATLAB = the MatLab numerical analysis and plotting package
# tools that use it: matlab
#PIZZA_MATLAB = "matlab -nosplash -nodesktop -nojvm"
# --------------
# RASMOL = the RasMol visualization package
# tools that use it: rasmol
#PIZZA_RASMOL = "rasmol"
# --------------
# VMD = the VMD visualization package
# tools that use it: vmd
#PIZZA_VMDNAME = "vmd" # good settings for a Linux box
#PIZZA_VMDDIR = "/usr/local/lib/vmd"
#PIZZA_VMDDEV = "win"
#PIZZA_VMDARCH = "LINUX"
#PIZZA_VMDNAME = "vmd" # good settings for a Mac
#PIZZA_VMDDIR = "/Applications/VMD\ 1.8.7.app/Contents/vmd"
#PIZZA_VMDDEV = "win"
#PIZZA_VMDARCH = "MACOSXX86"
|
gpl-2.0
|
SachaMPS/django-cms
|
cms/page_rendering.py
|
1
|
2562
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.urlresolvers import resolve, Resolver404
from django.http import Http404
from django.template import RequestContext
from django.template.response import TemplateResponse
from cms import __version__
from cms.cache.page import set_page_cache
from cms.models import Page
from cms.utils import get_template_from_request
def render_page(request, page, current_language, slug):
"""
Renders a page
"""
template_name = get_template_from_request(request, page, no_current_page=True)
# fill the context
context = RequestContext(request)
context['lang'] = current_language
context['current_page'] = page
context['has_change_permissions'] = page.has_change_permission(request)
context['has_view_permissions'] = page.has_view_permission(request)
if not context['has_view_permissions']:
return _handle_no_page(request, slug)
response = TemplateResponse(request, template_name, context)
response.add_post_render_callback(set_page_cache)
# Add headers for X Frame Options - this really should be changed upon moving to class based views
xframe_options = page.get_xframe_options()
# xframe_options can be None if there's no xframe information on the page
# (eg. a top-level page which has xframe options set to "inherit")
if xframe_options == Page.X_FRAME_OPTIONS_INHERIT or xframe_options is None:
# This is when we defer to django's own clickjacking handling
return response
# We want to prevent django setting this in their middlewear
response.xframe_options_exempt = True
if xframe_options == Page.X_FRAME_OPTIONS_ALLOW:
# Do nothing, allowed is no header.
return response
elif xframe_options == Page.X_FRAME_OPTIONS_SAMEORIGIN:
response['X-Frame-Options'] = 'SAMEORIGIN'
elif xframe_options == Page.X_FRAME_OPTIONS_DENY:
response['X-Frame-Options'] = 'DENY'
return response
def _handle_no_page(request, slug):
context = RequestContext(request)
context['cms_version'] = __version__
if not slug and settings.DEBUG:
return TemplateResponse(request, "cms/welcome.html", context)
try:
#add a $ to the end of the url (does not match on the cms anymore)
resolve('%s$' % request.path)
except Resolver404 as e:
# raise a django http 404 page
exc = Http404(dict(path=request.path, tried=e.args[0]['tried']))
raise exc
raise Http404('CMS Page not found: %s' % request.path)
|
bsd-3-clause
|
rclmenezes/sqlalchemy
|
test/orm/test_eager_relations.py
|
1
|
109760
|
"""tests of joined-eager loaded attributes"""
from sqlalchemy.testing import eq_, is_, is_not_
import sqlalchemy as sa
from sqlalchemy import testing
from sqlalchemy.orm import joinedload, deferred, undefer, \
joinedload_all, backref, eagerload, Session, immediateload
from sqlalchemy import Integer, String, Date, ForeignKey, and_, select, \
func
from sqlalchemy.testing.schema import Table, Column
from sqlalchemy.orm import mapper, relationship, create_session, \
lazyload, aliased, column_property
from sqlalchemy.sql import operators
from sqlalchemy.testing import eq_, assert_raises, \
assert_raises_message
from sqlalchemy.testing.assertsql import CompiledSQL
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy.util import OrderedDict as odict
import datetime
class EagerTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
run_inserts = 'once'
run_deletes = None
def test_basic(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(mapper(Address, addresses), lazy='joined', order_by=Address.id)
})
sess = create_session()
q = sess.query(User)
eq_([User(id=7, addresses=[Address(id=1, email_address='[email protected]')])],
q.filter(User.id==7).all())
eq_(self.static.user_address_result, q.order_by(User.id).all())
def test_late_compile(self):
User, Address, addresses, users = (self.classes.User,
self.classes.Address,
self.tables.addresses,
self.tables.users)
m = mapper(User, users)
sess = create_session()
sess.query(User).all()
m.add_property("addresses", relationship(mapper(Address, addresses)))
sess.expunge_all()
def go():
eq_(
[User(id=7, addresses=[Address(id=1, email_address='[email protected]')])],
sess.query(User).options(joinedload('addresses')).filter(User.id==7).all()
)
self.assert_sql_count(testing.db, go, 1)
def test_no_orphan(self):
"""An eagerly loaded child object is not marked as an orphan"""
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(Address, cascade="all,delete-orphan", lazy='joined')
})
mapper(Address, addresses)
sess = create_session()
user = sess.query(User).get(7)
assert getattr(User, 'addresses').\
hasparent(sa.orm.attributes.instance_state(user.addresses[0]), optimistic=True)
assert not sa.orm.class_mapper(Address).\
_is_orphan(sa.orm.attributes.instance_state(user.addresses[0]))
def test_orderby(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties = {
'addresses':relationship(mapper(Address, addresses),
lazy='joined', order_by=addresses.c.email_address),
})
q = create_session().query(User)
eq_([
User(id=7, addresses=[
Address(id=1)
]),
User(id=8, addresses=[
Address(id=3, email_address='[email protected]'),
Address(id=4, email_address='[email protected]'),
Address(id=2, email_address='[email protected]')
]),
User(id=9, addresses=[
Address(id=5)
]),
User(id=10, addresses=[])
], q.order_by(User.id).all())
def test_orderby_multi(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties = {
'addresses':relationship(mapper(Address, addresses),
lazy='joined',
order_by=[addresses.c.email_address, addresses.c.id]),
})
q = create_session().query(User)
eq_([
User(id=7, addresses=[
Address(id=1)
]),
User(id=8, addresses=[
Address(id=3, email_address='[email protected]'),
Address(id=4, email_address='[email protected]'),
Address(id=2, email_address='[email protected]')
]),
User(id=9, addresses=[
Address(id=5)
]),
User(id=10, addresses=[])
], q.order_by(User.id).all())
def test_orderby_related(self):
"""A regular mapper select on a single table can
order by a relationship to a second table"""
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties = dict(
addresses = relationship(Address, lazy='joined', order_by=addresses.c.id),
))
q = create_session().query(User)
l = q.filter(User.id==Address.user_id).order_by(Address.email_address).all()
eq_([
User(id=8, addresses=[
Address(id=2, email_address='[email protected]'),
Address(id=3, email_address='[email protected]'),
Address(id=4, email_address='[email protected]'),
]),
User(id=9, addresses=[
Address(id=5)
]),
User(id=7, addresses=[
Address(id=1)
]),
], l)
def test_orderby_desc(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties = dict(
addresses = relationship(Address, lazy='joined',
order_by=[sa.desc(addresses.c.email_address)]),
))
sess = create_session()
eq_([
User(id=7, addresses=[
Address(id=1)
]),
User(id=8, addresses=[
Address(id=2, email_address='[email protected]'),
Address(id=4, email_address='[email protected]'),
Address(id=3, email_address='[email protected]'),
]),
User(id=9, addresses=[
Address(id=5)
]),
User(id=10, addresses=[])
], sess.query(User).order_by(User.id).all())
def test_deferred_fk_col(self):
users, Dingaling, User, dingalings, Address, addresses = (self.tables.users,
self.classes.Dingaling,
self.classes.User,
self.tables.dingalings,
self.classes.Address,
self.tables.addresses)
mapper(Address, addresses, properties={
'user_id':deferred(addresses.c.user_id),
'user':relationship(User, lazy='joined')
})
mapper(User, users)
sess = create_session()
for q in [
sess.query(Address).filter(Address.id.in_([1, 4, 5])).order_by(Address.id),
sess.query(Address).filter(Address.id.in_([1, 4, 5])).order_by(Address.id).limit(3)
]:
sess.expunge_all()
eq_(q.all(),
[Address(id=1, user=User(id=7)),
Address(id=4, user=User(id=8)),
Address(id=5, user=User(id=9))]
)
sess.expunge_all()
a = sess.query(Address).filter(Address.id==1).all()[0]
def go():
eq_(a.user_id, 7)
# assert that the eager loader added 'user_id' to the row and deferred
# loading of that col was disabled
self.assert_sql_count(testing.db, go, 0)
sess.expunge_all()
a = sess.query(Address).filter(Address.id==1).first()
def go():
eq_(a.user_id, 7)
# assert that the eager loader added 'user_id' to the row and deferred
# loading of that col was disabled
self.assert_sql_count(testing.db, go, 0)
# do the mapping in reverse
# (we would have just used an "addresses" backref but the test
# fixtures then require the whole backref to be set up, lazy loaders
# trigger, etc.)
sa.orm.clear_mappers()
mapper(Address, addresses, properties={
'user_id':deferred(addresses.c.user_id),
})
mapper(User, users, properties={
'addresses':relationship(Address, lazy='joined')})
for q in [
sess.query(User).filter(User.id==7),
sess.query(User).filter(User.id==7).limit(1)
]:
sess.expunge_all()
eq_(q.all(),
[User(id=7, addresses=[Address(id=1)])]
)
sess.expunge_all()
u = sess.query(User).get(7)
def go():
eq_(u.addresses[0].user_id, 7)
# assert that the eager loader didn't have to affect 'user_id' here
# and that its still deferred
self.assert_sql_count(testing.db, go, 1)
sa.orm.clear_mappers()
mapper(User, users, properties={
'addresses':relationship(Address, lazy='joined',
order_by=addresses.c.id)})
mapper(Address, addresses, properties={
'user_id':deferred(addresses.c.user_id),
'dingalings':relationship(Dingaling, lazy='joined')})
mapper(Dingaling, dingalings, properties={
'address_id':deferred(dingalings.c.address_id)})
sess.expunge_all()
def go():
u = sess.query(User).get(8)
eq_(User(id=8,
addresses=[Address(id=2, dingalings=[Dingaling(id=1)]),
Address(id=3),
Address(id=4)]),
u)
self.assert_sql_count(testing.db, go, 1)
def test_options_pathing(self):
users, Keyword, orders, items, order_items, Order, Item, User, keywords, item_keywords = (self.tables.users,
self.classes.Keyword,
self.tables.orders,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.tables.keywords,
self.tables.item_keywords)
mapper(User, users, properties={
'orders':relationship(Order, order_by=orders.c.id), # o2m, m2o
})
mapper(Order, orders, properties={
'items':relationship(Item,
secondary=order_items, order_by=items.c.id), #m2m
})
mapper(Item, items, properties={
'keywords':relationship(Keyword,
secondary=item_keywords,
order_by=keywords.c.id) #m2m
})
mapper(Keyword, keywords)
for opt, count in [
((
joinedload(User.orders, Order.items),
), 10),
((joinedload("orders.items"), ), 10),
((
joinedload(User.orders, ),
joinedload(User.orders, Order.items),
joinedload(User.orders, Order.items, Item.keywords),
), 1),
((
joinedload(User.orders, Order.items, Item.keywords),
), 10),
((
joinedload(User.orders, Order.items),
joinedload(User.orders, Order.items, Item.keywords),
), 5),
]:
sess = create_session()
def go():
eq_(
sess.query(User).options(*opt).order_by(User.id).all(),
self.static.user_item_keyword_result
)
self.assert_sql_count(testing.db, go, count)
def test_disable_dynamic(self):
"""test no joined option on a dynamic."""
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users, properties={
'addresses':relationship(Address, lazy="dynamic")
})
mapper(Address, addresses)
sess = create_session()
assert_raises_message(
sa.exc.InvalidRequestError,
"User.addresses' does not support object population - eager loading cannot be applied.",
sess.query(User).options(joinedload(User.addresses)).first,
)
def test_many_to_many(self):
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
keywords = relationship(Keyword, secondary=item_keywords,
lazy='joined', order_by=keywords.c.id)))
q = create_session().query(Item).order_by(Item.id)
def go():
eq_(self.static.item_keyword_result, q.all())
self.assert_sql_count(testing.db, go, 1)
def go():
eq_(self.static.item_keyword_result[0:2],
q.join('keywords').filter(Keyword.name == 'red').all())
self.assert_sql_count(testing.db, go, 1)
def go():
eq_(self.static.item_keyword_result[0:2],
(q.join('keywords', aliased=True).
filter(Keyword.name == 'red')).all())
self.assert_sql_count(testing.db, go, 1)
def test_eager_option(self):
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
keywords = relationship(Keyword, secondary=item_keywords, lazy='select',
order_by=keywords.c.id)))
q = create_session().query(Item)
def go():
eq_(self.static.item_keyword_result[0:2],
(q.options(joinedload('keywords')).
join('keywords').filter(keywords.c.name == 'red')).order_by(Item.id).all())
self.assert_sql_count(testing.db, go, 1)
def test_cyclical(self):
"""A circular eager relationship breaks the cycle with a lazy loader"""
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
mapper(Address, addresses)
mapper(User, users, properties = dict(
addresses = relationship(Address, lazy='joined',
backref=sa.orm.backref('user', lazy='joined'),
order_by=Address.id)
))
eq_(sa.orm.class_mapper(User).get_property('addresses').lazy, 'joined')
eq_(sa.orm.class_mapper(Address).get_property('user').lazy, 'joined')
sess = create_session()
eq_(self.static.user_address_result, sess.query(User).order_by(User.id).all())
def test_double(self):
"""Eager loading with two relationships simultaneously,
from the same table, using aliases."""
users, orders, User, Address, Order, addresses = (self.tables.users,
self.tables.orders,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.addresses)
openorders = sa.alias(orders, 'openorders')
closedorders = sa.alias(orders, 'closedorders')
mapper(Address, addresses)
mapper(Order, orders)
open_mapper = mapper(Order, openorders, non_primary=True)
closed_mapper = mapper(Order, closedorders, non_primary=True)
mapper(User, users, properties = dict(
addresses = relationship(Address, lazy='joined', order_by=addresses.c.id),
open_orders = relationship(
open_mapper,
primaryjoin=sa.and_(openorders.c.isopen == 1,
users.c.id==openorders.c.user_id),
lazy='joined', order_by=openorders.c.id),
closed_orders = relationship(
closed_mapper,
primaryjoin=sa.and_(closedorders.c.isopen == 0,
users.c.id==closedorders.c.user_id),
lazy='joined', order_by=closedorders.c.id)))
q = create_session().query(User).order_by(User.id)
def go():
eq_([
User(
id=7,
addresses=[Address(id=1)],
open_orders = [Order(id=3)],
closed_orders = [Order(id=1), Order(id=5)]
),
User(
id=8,
addresses=[Address(id=2), Address(id=3), Address(id=4)],
open_orders = [],
closed_orders = []
),
User(
id=9,
addresses=[Address(id=5)],
open_orders = [Order(id=4)],
closed_orders = [Order(id=2)]
),
User(id=10)
], q.all())
self.assert_sql_count(testing.db, go, 1)
def test_double_same_mappers(self):
"""Eager loading with two relationships simulatneously,
from the same table, using aliases."""
addresses, items, order_items, orders, Item, User, Address, Order, users = (self.tables.addresses,
self.tables.items,
self.tables.order_items,
self.tables.orders,
self.classes.Item,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.users)
mapper(Address, addresses)
mapper(Order, orders, properties={
'items': relationship(Item, secondary=order_items, lazy='joined',
order_by=items.c.id)})
mapper(Item, items)
mapper(User, users, properties=dict(
addresses=relationship(Address, lazy='joined', order_by=addresses.c.id),
open_orders=relationship(
Order,
primaryjoin=sa.and_(orders.c.isopen == 1,
users.c.id==orders.c.user_id),
lazy='joined', order_by=orders.c.id),
closed_orders=relationship(
Order,
primaryjoin=sa.and_(orders.c.isopen == 0,
users.c.id==orders.c.user_id),
lazy='joined', order_by=orders.c.id)))
q = create_session().query(User).order_by(User.id)
def go():
eq_([
User(id=7,
addresses=[
Address(id=1)],
open_orders=[Order(id=3,
items=[
Item(id=3),
Item(id=4),
Item(id=5)])],
closed_orders=[Order(id=1,
items=[
Item(id=1),
Item(id=2),
Item(id=3)]),
Order(id=5,
items=[
Item(id=5)])]),
User(id=8,
addresses=[
Address(id=2),
Address(id=3),
Address(id=4)],
open_orders = [],
closed_orders = []),
User(id=9,
addresses=[
Address(id=5)],
open_orders=[
Order(id=4,
items=[
Item(id=1),
Item(id=5)])],
closed_orders=[
Order(id=2,
items=[
Item(id=1),
Item(id=2),
Item(id=3)])]),
User(id=10)
], q.all())
self.assert_sql_count(testing.db, go, 1)
def test_no_false_hits(self):
"""Eager loaders don't interpret main table columns as
part of their eager load."""
addresses, orders, User, Address, Order, users = (self.tables.addresses,
self.tables.orders,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.users)
mapper(User, users, properties={
'addresses':relationship(Address, lazy='joined'),
'orders':relationship(Order, lazy='joined')
})
mapper(Address, addresses)
mapper(Order, orders)
allusers = create_session().query(User).all()
# using a textual select, the columns will be 'id' and 'name'. the
# eager loaders have aliases which should not hit on those columns,
# they should be required to locate only their aliased/fully table
# qualified column name.
noeagers = create_session().query(User).\
from_statement("select * from users").all()
assert 'orders' not in noeagers[0].__dict__
assert 'addresses' not in noeagers[0].__dict__
@testing.fails_on('maxdb', 'FIXME: unknown')
def test_limit(self):
"""Limit operations combined with lazy-load relationships."""
users, items, order_items, orders, Item, User, Address, Order, addresses = (self.tables.users,
self.tables.items,
self.tables.order_items,
self.tables.orders,
self.classes.Item,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.addresses)
mapper(Item, items)
mapper(Order, orders, properties={
'items':relationship(Item, secondary=order_items, lazy='joined',
order_by=items.c.id)
})
mapper(User, users, properties={
'addresses':relationship(mapper(Address, addresses), lazy='joined', order_by=addresses.c.id),
'orders':relationship(Order, lazy='select', order_by=orders.c.id)
})
sess = create_session()
q = sess.query(User)
l = q.order_by(User.id).limit(2).offset(1).all()
eq_(self.static.user_all_result[1:3], l)
def test_distinct(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
# this is an involved 3x union of the users table to get a lot of rows.
# then see if the "distinct" works its way out. you actually get the same
# result with or without the distinct, just via less or more rows.
u2 = users.alias('u2')
s = sa.union_all(u2.select(use_labels=True), u2.select(use_labels=True), u2.select(use_labels=True)).alias('u')
mapper(User, users, properties={
'addresses':relationship(mapper(Address, addresses), lazy='joined', order_by=addresses.c.id),
})
sess = create_session()
q = sess.query(User)
def go():
l = q.filter(s.c.u2_id==User.id).distinct().order_by(User.id).all()
eq_(self.static.user_address_result, l)
self.assert_sql_count(testing.db, go, 1)
@testing.fails_on('maxdb', 'FIXME: unknown')
def test_limit_2(self):
keywords, items, item_keywords, Keyword, Item = (self.tables.keywords,
self.tables.items,
self.tables.item_keywords,
self.classes.Keyword,
self.classes.Item)
mapper(Keyword, keywords)
mapper(Item, items, properties = dict(
keywords = relationship(Keyword, secondary=item_keywords, lazy='joined', order_by=[keywords.c.id]),
))
sess = create_session()
q = sess.query(Item)
l = q.filter((Item.description=='item 2') |
(Item.description=='item 5') |
(Item.description=='item 3')).\
order_by(Item.id).limit(2).all()
eq_(self.static.item_keyword_result[1:3], l)
@testing.fails_on('maxdb', 'FIXME: unknown')
def test_limit_3(self):
"""test that the ORDER BY is propagated from the inner
select to the outer select, when using the
'wrapped' select statement resulting from the combination of
eager loading and limit/offset clauses."""
addresses, items, order_items, orders, Item, User, Address, Order, users = (self.tables.addresses,
self.tables.items,
self.tables.order_items,
self.tables.orders,
self.classes.Item,
self.classes.User,
self.classes.Address,
self.classes.Order,
self.tables.users)
mapper(Item, items)
mapper(Order, orders, properties = dict(
items = relationship(Item, secondary=order_items, lazy='joined')
))
mapper(Address, addresses)
mapper(User, users, properties = dict(
addresses = relationship(Address, lazy='joined', order_by=addresses.c.id),
orders = relationship(Order, lazy='joined', order_by=orders.c.id),
))
sess = create_session()
q = sess.query(User)
if not testing.against('maxdb', 'mssql'):
l = q.join('orders').order_by(Order.user_id.desc()).limit(2).offset(1)
eq_([
User(id=9,
orders=[Order(id=2), Order(id=4)],
addresses=[Address(id=5)]
),
User(id=7,
orders=[Order(id=1), Order(id=3), Order(id=5)],
addresses=[Address(id=1)]
)
], l.all())
l = q.join('addresses').order_by(Address.email_address.desc()).limit(1).offset(0)
eq_([
User(id=7,
orders=[Order(id=1), Order(id=3), Order(id=5)],
addresses=[Address(id=1)]
)
], l.all())
def test_limit_4(self):
User, Order, addresses, users, orders = (self.classes.User,
self.classes.Order,
self.tables.addresses,
self.tables.users,
self.tables.orders)
# tests the LIMIT/OFFSET aliasing on a mapper
# against a select. original issue from ticket #904
sel = sa.select([users, addresses.c.email_address],
users.c.id==addresses.c.user_id).alias('useralias')
mapper(User, sel, properties={
'orders':relationship(Order, primaryjoin=sel.c.id==orders.c.user_id,
lazy='joined', order_by=orders.c.id)
})
mapper(Order, orders)
sess = create_session()
eq_(sess.query(User).first(),
User(name=u'jack',orders=[
Order(address_id=1,description=u'order 1',isopen=0,user_id=7,id=1),
Order(address_id=1,description=u'order 3',isopen=1,user_id=7,id=3),
Order(address_id=None,description=u'order 5',isopen=0,user_id=7,id=5)],
email_address=u'[email protected]',id=7)
)
def test_useget_cancels_eager(self):
"""test that a one to many lazyload cancels the unnecessary
eager many-to-one join on the other side."""
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(User, users)
mapper(Address, addresses, properties={
'user':relationship(User, lazy='joined', backref='addresses')
})
sess = create_session()
u1 = sess.query(User).filter(User.id==8).one()
def go():
eq_(u1.addresses[0].user, u1)
self.assert_sql_execution(testing.db, go,
CompiledSQL(
"SELECT addresses.id AS addresses_id, addresses.user_id AS "
"addresses_user_id, addresses.email_address AS "
"addresses_email_address FROM addresses WHERE :param_1 = "
"addresses.user_id",
{'param_1': 8})
)
def test_manytoone_limit(self):
"""test that the subquery wrapping only occurs with
limit/offset and m2m or o2m joins present."""
users, items, order_items, Order, Item, User, Address, orders, addresses = (self.tables.users,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.classes.Address,
self.tables.orders,
self.tables.addresses)
mapper(User, users, properties=odict(
orders=relationship(Order, backref='user')
))
mapper(Order, orders, properties=odict([
('items', relationship(Item, secondary=order_items, backref='orders')),
('address', relationship(Address))
]))
mapper(Address, addresses)
mapper(Item, items)
sess = create_session()
self.assert_compile(
sess.query(User).options(joinedload(User.orders)).limit(10),
"SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS anon_1_users_name, "
"orders_1.id AS orders_1_id, orders_1.user_id AS orders_1_user_id, orders_1.address_id AS "
"orders_1_address_id, orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen "
"FROM (SELECT users.id AS users_id, users.name AS users_name "
"FROM users "
"LIMIT :param_1) AS anon_1 LEFT OUTER JOIN orders AS orders_1 ON anon_1.users_id = orders_1.user_id",
{'param_1':10},
use_default_dialect=True
)
self.assert_compile(
sess.query(Order).options(joinedload(Order.user)).limit(10),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, orders.address_id AS "
"orders_address_id, orders.description AS orders_description, orders.isopen AS orders_isopen, "
"users_1.id AS users_1_id, users_1.name AS users_1_name FROM orders LEFT OUTER JOIN users AS "
"users_1 ON users_1.id = orders.user_id LIMIT :param_1",
{'param_1':10},
use_default_dialect=True
)
self.assert_compile(
sess.query(Order).options(joinedload(Order.user, innerjoin=True)).limit(10),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, orders.address_id AS "
"orders_address_id, orders.description AS orders_description, orders.isopen AS orders_isopen, "
"users_1.id AS users_1_id, users_1.name AS users_1_name FROM orders JOIN users AS "
"users_1 ON users_1.id = orders.user_id LIMIT :param_1",
{'param_1':10},
use_default_dialect=True
)
self.assert_compile(
sess.query(User).options(joinedload_all("orders.address")).limit(10),
"SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS anon_1_users_name, "
"addresses_1.id AS addresses_1_id, addresses_1.user_id AS addresses_1_user_id, "
"addresses_1.email_address AS addresses_1_email_address, orders_1.id AS orders_1_id, "
"orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, "
"orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen FROM "
"(SELECT users.id AS users_id, users.name AS users_name FROM users LIMIT :param_1) AS anon_1 "
"LEFT OUTER JOIN orders AS orders_1 ON anon_1.users_id = orders_1.user_id LEFT OUTER JOIN "
"addresses AS addresses_1 ON addresses_1.id = orders_1.address_id",
{'param_1':10},
use_default_dialect=True
)
self.assert_compile(
sess.query(User).options(joinedload_all("orders.items"), joinedload("orders.address")),
"SELECT users.id AS users_id, users.name AS users_name, items_1.id AS items_1_id, "
"items_1.description AS items_1_description, addresses_1.id AS addresses_1_id, "
"addresses_1.user_id AS addresses_1_user_id, addresses_1.email_address AS "
"addresses_1_email_address, orders_1.id AS orders_1_id, orders_1.user_id AS "
"orders_1_user_id, orders_1.address_id AS orders_1_address_id, orders_1.description "
"AS orders_1_description, orders_1.isopen AS orders_1_isopen FROM users LEFT OUTER JOIN "
"orders AS orders_1 ON users.id = orders_1.user_id LEFT OUTER JOIN order_items AS "
"order_items_1 ON orders_1.id = order_items_1.order_id LEFT OUTER JOIN items AS "
"items_1 ON items_1.id = order_items_1.item_id LEFT OUTER JOIN addresses AS "
"addresses_1 ON addresses_1.id = orders_1.address_id"
,use_default_dialect=True
)
self.assert_compile(
sess.query(User).options(joinedload("orders"), joinedload("orders.address", innerjoin=True)).limit(10),
"SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS anon_1_users_name, "
"addresses_1.id AS addresses_1_id, addresses_1.user_id AS addresses_1_user_id, "
"addresses_1.email_address AS addresses_1_email_address, orders_1.id AS orders_1_id, "
"orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, "
"orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen "
"FROM (SELECT users.id AS users_id, users.name AS users_name "
"FROM users "
"LIMIT :param_1) AS anon_1 LEFT OUTER JOIN orders AS orders_1 ON anon_1.users_id = "
"orders_1.user_id LEFT OUTER JOIN addresses AS addresses_1 ON addresses_1.id = orders_1.address_id",
{'param_1':10},
use_default_dialect=True
)
self.assert_compile(
sess.query(User).options(joinedload("orders", innerjoin=True),
joinedload("orders.address", innerjoin=True)).limit(10),
"SELECT anon_1.users_id AS anon_1_users_id, anon_1.users_name AS anon_1_users_name, "
"addresses_1.id AS addresses_1_id, addresses_1.user_id AS addresses_1_user_id, "
"addresses_1.email_address AS addresses_1_email_address, orders_1.id AS orders_1_id, "
"orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, "
"orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen "
"FROM (SELECT users.id AS users_id, users.name AS users_name "
"FROM users "
"LIMIT :param_1) AS anon_1 JOIN orders AS orders_1 ON anon_1.users_id = "
"orders_1.user_id JOIN addresses AS addresses_1 ON addresses_1.id = orders_1.address_id",
{'param_1':10},
use_default_dialect=True
)
def test_one_to_many_scalar(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
mapper(User, users, properties = dict(
address = relationship(mapper(Address, addresses),
lazy='joined', uselist=False)
))
q = create_session().query(User)
def go():
l = q.filter(users.c.id == 7).all()
eq_([User(id=7, address=Address(id=1))], l)
self.assert_sql_count(testing.db, go, 1)
@testing.fails_on('maxdb', 'FIXME: unknown')
def test_many_to_one(self):
users, Address, addresses, User = (self.tables.users,
self.classes.Address,
self.tables.addresses,
self.classes.User)
mapper(Address, addresses, properties = dict(
user = relationship(mapper(User, users), lazy='joined')
))
sess = create_session()
q = sess.query(Address)
def go():
a = q.filter(addresses.c.id==1).one()
is_not_(a.user, None)
u1 = sess.query(User).get(7)
is_(a.user, u1)
self.assert_sql_count(testing.db, go, 1)
def test_many_to_one_null(self):
"""test that a many-to-one eager load which loads None does
not later trigger a lazy load.
"""
Order, Address, addresses, orders = (self.classes.Order,
self.classes.Address,
self.tables.addresses,
self.tables.orders)
# use a primaryjoin intended to defeat SA's usage of
# query.get() for a many-to-one lazyload
mapper(Order, orders, properties = dict(
address = relationship(mapper(Address, addresses),
primaryjoin=and_(
addresses.c.id==orders.c.address_id,
addresses.c.email_address != None
),
lazy='joined')
))
sess = create_session()
def go():
o1 = sess.query(Order).options(lazyload('address')).filter(Order.id==5).one()
eq_(o1.address, None)
self.assert_sql_count(testing.db, go, 2)
sess.expunge_all()
def go():
o1 = sess.query(Order).filter(Order.id==5).one()
eq_(o1.address, None)
self.assert_sql_count(testing.db, go, 1)
def test_one_and_many(self):
"""tests eager load for a parent object with a child object that
contains a many-to-many relationship to a third object."""
users, items, order_items, orders, Item, User, Order = (self.tables.users,
self.tables.items,
self.tables.order_items,
self.tables.orders,
self.classes.Item,
self.classes.User,
self.classes.Order)
mapper(User, users, properties={
'orders':relationship(Order, lazy='joined', order_by=orders.c.id)
})
mapper(Item, items)
mapper(Order, orders, properties = dict(
items = relationship(Item, secondary=order_items, lazy='joined', order_by=items.c.id)
))
q = create_session().query(User)
l = q.filter("users.id in (7, 8, 9)").order_by("users.id")
def go():
eq_(self.static.user_order_result[0:3], l.all())
self.assert_sql_count(testing.db, go, 1)
def test_double_with_aggregate(self):
User, users, orders, Order = (self.classes.User,
self.tables.users,
self.tables.orders,
self.classes.Order)
max_orders_by_user = sa.select([sa.func.max(orders.c.id).label('order_id')],
group_by=[orders.c.user_id]
).alias('max_orders_by_user')
max_orders = orders.select(orders.c.id==max_orders_by_user.c.order_id).\
alias('max_orders')
mapper(Order, orders)
mapper(User, users, properties={
'orders':relationship(Order, backref='user', lazy='joined',
order_by=orders.c.id),
'max_order':relationship(
mapper(Order, max_orders, non_primary=True),
lazy='joined', uselist=False)
})
q = create_session().query(User)
def go():
eq_([
User(id=7, orders=[
Order(id=1),
Order(id=3),
Order(id=5),
],
max_order=Order(id=5)
),
User(id=8, orders=[]),
User(id=9, orders=[Order(id=2),Order(id=4)],
max_order=Order(id=4)
),
User(id=10),
], q.order_by(User.id).all())
self.assert_sql_count(testing.db, go, 1)
def test_uselist_false_warning(self):
"""test that multiple rows received by a
uselist=False raises a warning."""
User, users, orders, Order = (self.classes.User,
self.tables.users,
self.tables.orders,
self.classes.Order)
mapper(User, users, properties={
'order':relationship(Order, uselist=False)
})
mapper(Order, orders)
s = create_session()
assert_raises(sa.exc.SAWarning,
s.query(User).options(joinedload(User.order)).all)
def test_wide(self):
users, items, order_items, Order, Item, User, Address, orders, addresses = (self.tables.users,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.classes.Address,
self.tables.orders,
self.tables.addresses)
mapper(Order, orders, properties={'items':relationship(Item, secondary=order_items, lazy='joined',
order_by=items.c.id)})
mapper(Item, items)
mapper(User, users, properties = dict(
addresses = relationship(mapper(Address, addresses), lazy = False, order_by=addresses.c.id),
orders = relationship(Order, lazy = False, order_by=orders.c.id),
))
q = create_session().query(User)
l = q.all()
eq_(self.static.user_all_result, q.order_by(User.id).all())
def test_against_select(self):
"""test eager loading of a mapper which is against a select"""
users, items, order_items, orders, Item, User, Order = (self.tables.users,
self.tables.items,
self.tables.order_items,
self.tables.orders,
self.classes.Item,
self.classes.User,
self.classes.Order)
s = sa.select([orders], orders.c.isopen==1).alias('openorders')
mapper(Order, s, properties={
'user':relationship(User, lazy='joined')
})
mapper(User, users)
mapper(Item, items)
q = create_session().query(Order)
eq_([
Order(id=3, user=User(id=7)),
Order(id=4, user=User(id=9))
], q.all())
q = q.select_from(s.join(order_items).join(items)).filter(~Item.id.in_([1, 2, 5]))
eq_([
Order(id=3, user=User(id=7)),
], q.all())
def test_aliasing(self):
"""test that eager loading uses aliases to insulate the eager
load from regular criterion against those tables."""
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
mapper(User, users, properties = dict(
addresses = relationship(mapper(Address, addresses),
lazy='joined', order_by=addresses.c.id)
))
q = create_session().query(User)
l = q.filter(addresses.c.email_address == '[email protected]').filter(
Address.user_id==User.id).order_by(User.id)
eq_(self.static.user_address_result[1:2], l.all())
def test_inner_join(self):
Address, addresses, users, User = (self.classes.Address,
self.tables.addresses,
self.tables.users,
self.classes.User)
mapper(User, users, properties = dict(
addresses = relationship(mapper(Address, addresses), lazy='joined',
innerjoin=True, order_by=addresses.c.id)
))
sess = create_session()
eq_(
[User(id=7, addresses=[ Address(id=1) ]),
User(id=8,
addresses=[ Address(id=2, email_address='[email protected]'),
Address(id=3, email_address='[email protected]'),
Address(id=4, email_address='[email protected]'), ]),
User(id=9, addresses=[ Address(id=5) ])]
,sess.query(User).all()
)
self.assert_compile(sess.query(User),
"SELECT users.id AS users_id, users.name AS users_name, "
"addresses_1.id AS addresses_1_id, addresses_1.user_id AS addresses_1_user_id, "
"addresses_1.email_address AS addresses_1_email_address FROM users JOIN "
"addresses AS addresses_1 ON users.id = addresses_1.user_id ORDER BY addresses_1.id"
, use_default_dialect=True)
def test_inner_join_chaining_options(self):
users, items, order_items, Order, Item, User, orders = (self.tables.users,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.tables.orders)
mapper(User, users, properties = dict(
orders =relationship(Order, innerjoin=True,
lazy=False)
))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, lazy=False,
innerjoin=True)
))
mapper(Item, items)
sess = create_session()
self.assert_compile(
sess.query(User),
"SELECT users.id AS users_id, users.name AS users_name, items_1.id AS "
"items_1_id, items_1.description AS items_1_description, orders_1.id AS "
"orders_1_id, orders_1.user_id AS orders_1_user_id, orders_1.address_id AS "
"orders_1_address_id, orders_1.description AS orders_1_description, "
"orders_1.isopen AS orders_1_isopen FROM users JOIN orders AS orders_1 ON "
"users.id = orders_1.user_id JOIN order_items AS order_items_1 ON orders_1.id = "
"order_items_1.order_id JOIN items AS items_1 ON items_1.id = "
"order_items_1.item_id",
use_default_dialect=True
)
self.assert_compile(
sess.query(User).options(joinedload(User.orders, innerjoin=False)),
"SELECT users.id AS users_id, users.name AS users_name, items_1.id AS "
"items_1_id, items_1.description AS items_1_description, orders_1.id AS "
"orders_1_id, orders_1.user_id AS orders_1_user_id, orders_1.address_id AS "
"orders_1_address_id, orders_1.description AS orders_1_description, "
"orders_1.isopen AS orders_1_isopen FROM users LEFT OUTER JOIN orders AS orders_1 ON "
"users.id = orders_1.user_id LEFT OUTER JOIN order_items AS order_items_1 ON orders_1.id = "
"order_items_1.order_id LEFT OUTER JOIN items AS items_1 ON items_1.id = "
"order_items_1.item_id",
use_default_dialect=True
)
self.assert_compile(
sess.query(User).options(joinedload(User.orders, Order.items, innerjoin=False)),
"SELECT users.id AS users_id, users.name AS users_name, items_1.id AS "
"items_1_id, items_1.description AS items_1_description, orders_1.id AS "
"orders_1_id, orders_1.user_id AS orders_1_user_id, orders_1.address_id AS "
"orders_1_address_id, orders_1.description AS orders_1_description, "
"orders_1.isopen AS orders_1_isopen FROM users JOIN orders AS orders_1 ON "
"users.id = orders_1.user_id LEFT OUTER JOIN order_items AS order_items_1 ON orders_1.id = "
"order_items_1.order_id LEFT OUTER JOIN items AS items_1 ON items_1.id = "
"order_items_1.item_id",
use_default_dialect=True
)
def test_inner_join_chaining_fixed(self):
users, items, order_items, Order, Item, User, orders = (self.tables.users,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.tables.orders)
mapper(User, users, properties = dict(
orders =relationship(Order, lazy=False)
))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, lazy=False,
innerjoin=True)
))
mapper(Item, items)
sess = create_session()
# joining from user, its all LEFT OUTER JOINs
self.assert_compile(
sess.query(User),
"SELECT users.id AS users_id, users.name AS users_name, items_1.id AS "
"items_1_id, items_1.description AS items_1_description, orders_1.id AS "
"orders_1_id, orders_1.user_id AS orders_1_user_id, orders_1.address_id AS "
"orders_1_address_id, orders_1.description AS orders_1_description, "
"orders_1.isopen AS orders_1_isopen FROM users LEFT OUTER JOIN orders AS orders_1 ON "
"users.id = orders_1.user_id LEFT OUTER JOIN order_items AS order_items_1 ON orders_1.id = "
"order_items_1.order_id LEFT OUTER JOIN items AS items_1 ON items_1.id = "
"order_items_1.item_id",
use_default_dialect=True
)
# joining just from Order, innerjoin=True can be respected
self.assert_compile(
sess.query(Order),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, "
"orders.address_id AS orders_address_id, orders.description AS "
"orders_description, orders.isopen AS orders_isopen, items_1.id "
"AS items_1_id, items_1.description AS items_1_description FROM "
"orders JOIN order_items AS order_items_1 ON orders.id = "
"order_items_1.order_id JOIN items AS items_1 ON items_1.id = "
"order_items_1.item_id",
use_default_dialect=True
)
def test_inner_join_options(self):
users, items, order_items, Order, Item, User, orders = (self.tables.users,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.tables.orders)
mapper(User, users, properties = dict(
orders =relationship(Order, backref=backref('user', innerjoin=True), order_by=orders.c.id)
))
mapper(Order, orders, properties=dict(
items=relationship(Item, secondary=order_items, order_by=items.c.id)
))
mapper(Item, items)
sess = create_session()
self.assert_compile(sess.query(User).options(joinedload(User.orders, innerjoin=True)),
"SELECT users.id AS users_id, users.name AS users_name, orders_1.id AS orders_1_id, "
"orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, "
"orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen "
"FROM users JOIN orders AS orders_1 ON users.id = orders_1.user_id ORDER BY orders_1.id"
, use_default_dialect=True)
self.assert_compile(sess.query(User).options(joinedload_all(User.orders, Order.items, innerjoin=True)),
"SELECT users.id AS users_id, users.name AS users_name, items_1.id AS items_1_id, "
"items_1.description AS items_1_description, orders_1.id AS orders_1_id, "
"orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, "
"orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen "
"FROM users JOIN orders AS orders_1 ON users.id = orders_1.user_id JOIN order_items AS "
"order_items_1 ON orders_1.id = order_items_1.order_id JOIN items AS items_1 ON "
"items_1.id = order_items_1.item_id ORDER BY orders_1.id, items_1.id"
, use_default_dialect=True)
def go():
eq_(
sess.query(User).options(
joinedload(User.orders, innerjoin=True),
joinedload(User.orders, Order.items, innerjoin=True)).
order_by(User.id).all(),
[User(id=7,
orders=[
Order(id=1, items=[ Item(id=1), Item(id=2), Item(id=3)]),
Order(id=3, items=[ Item(id=3), Item(id=4), Item(id=5)]),
Order(id=5, items=[Item(id=5)])]),
User(id=9, orders=[
Order(id=2, items=[ Item(id=1), Item(id=2), Item(id=3)]),
Order(id=4, items=[ Item(id=1), Item(id=5)])])
]
)
self.assert_sql_count(testing.db, go, 1)
# test that default innerjoin setting is used for options
self.assert_compile(
sess.query(Order).options(joinedload(Order.user)).filter(Order.description == 'foo'),
"SELECT orders.id AS orders_id, orders.user_id AS orders_user_id, orders.address_id AS "
"orders_address_id, orders.description AS orders_description, orders.isopen AS "
"orders_isopen, users_1.id AS users_1_id, users_1.name AS users_1_name "
"FROM orders JOIN users AS users_1 ON users_1.id = orders.user_id "
"WHERE orders.description = :description_1",
use_default_dialect=True
)
class SubqueryAliasingTest(fixtures.MappedTest, testing.AssertsCompiledSQL):
"""test #2188"""
__dialect__ = 'default'
@classmethod
def define_tables(cls, metadata):
Table('a', metadata,
Column('id', Integer, primary_key=True)
)
Table('b', metadata,
Column('id', Integer, primary_key=True),
Column('a_id', Integer, ForeignKey('a.id')),
Column('value', Integer),
)
@classmethod
def setup_classes(cls):
class A(cls.Comparable):
pass
class B(cls.Comparable):
pass
def _fixture(self, props):
A, B = self.classes.A, self.classes.B
b_table, a_table = self.tables.b, self.tables.a
mapper(A,a_table, properties=props)
mapper(B,b_table,properties = {
'a':relationship(A, backref="bs")
})
def test_column_property(self):
A, B = self.classes.A, self.classes.B
b_table, a_table = self.tables.b, self.tables.a
cp = select([func.sum(b_table.c.value)]).\
where(b_table.c.a_id==a_table.c.id)
self._fixture({
'summation':column_property(cp)
})
self.assert_compile(
create_session().query(A).options(joinedload_all('bs')).
order_by(A.summation).
limit(50),
"SELECT anon_1.anon_2 AS anon_1_anon_2, anon_1.a_id "
"AS anon_1_a_id, b_1.id AS b_1_id, b_1.a_id AS "
"b_1_a_id, b_1.value AS b_1_value FROM (SELECT "
"(SELECT sum(b.value) AS sum_1 FROM b WHERE b.a_id = a.id) "
"AS anon_2, a.id AS a_id FROM a ORDER BY (SELECT "
"sum(b.value) AS sum_1 FROM b WHERE b.a_id = a.id) "
"LIMIT :param_1) AS anon_1 LEFT OUTER JOIN b AS b_1 ON "
"anon_1.a_id = b_1.a_id ORDER BY anon_1.anon_2"
)
def test_column_property_desc(self):
A, B = self.classes.A, self.classes.B
b_table, a_table = self.tables.b, self.tables.a
cp = select([func.sum(b_table.c.value)]).\
where(b_table.c.a_id==a_table.c.id)
self._fixture({
'summation':column_property(cp)
})
self.assert_compile(
create_session().query(A).options(joinedload_all('bs')).
order_by(A.summation.desc()).
limit(50),
"SELECT anon_1.anon_2 AS anon_1_anon_2, anon_1.a_id "
"AS anon_1_a_id, b_1.id AS b_1_id, b_1.a_id AS "
"b_1_a_id, b_1.value AS b_1_value FROM (SELECT "
"(SELECT sum(b.value) AS sum_1 FROM b WHERE b.a_id = a.id) "
"AS anon_2, a.id AS a_id FROM a ORDER BY (SELECT "
"sum(b.value) AS sum_1 FROM b WHERE b.a_id = a.id) DESC "
"LIMIT :param_1) AS anon_1 LEFT OUTER JOIN b AS b_1 ON "
"anon_1.a_id = b_1.a_id ORDER BY anon_1.anon_2 DESC"
)
def test_column_property_correlated(self):
A, B = self.classes.A, self.classes.B
b_table, a_table = self.tables.b, self.tables.a
cp = select([func.sum(b_table.c.value)]).\
where(b_table.c.a_id==a_table.c.id).\
correlate(a_table)
self._fixture({
'summation':column_property(cp)
})
self.assert_compile(
create_session().query(A).options(joinedload_all('bs')).
order_by(A.summation).
limit(50),
"SELECT anon_1.anon_2 AS anon_1_anon_2, anon_1.a_id "
"AS anon_1_a_id, b_1.id AS b_1_id, b_1.a_id AS "
"b_1_a_id, b_1.value AS b_1_value FROM (SELECT "
"(SELECT sum(b.value) AS sum_1 FROM b WHERE b.a_id = a.id) "
"AS anon_2, a.id AS a_id FROM a ORDER BY (SELECT "
"sum(b.value) AS sum_1 FROM b WHERE b.a_id = a.id) "
"LIMIT :param_1) AS anon_1 LEFT OUTER JOIN b AS b_1 ON "
"anon_1.a_id = b_1.a_id ORDER BY anon_1.anon_2"
)
def test_standalone_subquery_unlabeled(self):
A, B = self.classes.A, self.classes.B
b_table, a_table = self.tables.b, self.tables.a
self._fixture({})
cp = select([func.sum(b_table.c.value)]).\
where(b_table.c.a_id == a_table.c.id).\
correlate(a_table).as_scalar()
# up until 0.8, this was ordering by a new subquery.
# the removal of a separate _make_proxy() from ScalarSelect
# fixed that.
self.assert_compile(
create_session().query(A).options(joinedload_all('bs')).
order_by(cp).
limit(50),
"SELECT anon_1.a_id AS anon_1_a_id, anon_1.anon_2 "
"AS anon_1_anon_2, b_1.id AS b_1_id, b_1.a_id AS "
"b_1_a_id, b_1.value AS b_1_value FROM (SELECT a.id "
"AS a_id, (SELECT sum(b.value) AS sum_1 FROM b WHERE "
"b.a_id = a.id) AS anon_2 FROM a ORDER BY (SELECT "
"sum(b.value) AS sum_1 FROM b WHERE b.a_id = a.id) "
"LIMIT :param_1) AS anon_1 LEFT OUTER JOIN b AS b_1 "
"ON anon_1.a_id = b_1.a_id ORDER BY anon_1.anon_2"
)
def test_standalone_subquery_labeled(self):
A, B = self.classes.A, self.classes.B
b_table, a_table = self.tables.b, self.tables.a
self._fixture({})
cp = select([func.sum(b_table.c.value)]).\
where(b_table.c.a_id==a_table.c.id).\
correlate(a_table).as_scalar().label('foo')
self.assert_compile(
create_session().query(A).options(joinedload_all('bs')).
order_by(cp).
limit(50),
"SELECT anon_1.a_id AS anon_1_a_id, anon_1.foo "
"AS anon_1_foo, b_1.id AS b_1_id, b_1.a_id AS "
"b_1_a_id, b_1.value AS b_1_value FROM (SELECT a.id "
"AS a_id, (SELECT sum(b.value) AS sum_1 FROM b WHERE "
"b.a_id = a.id) AS foo FROM a ORDER BY (SELECT "
"sum(b.value) AS sum_1 FROM b WHERE b.a_id = a.id) "
"LIMIT :param_1) AS anon_1 LEFT OUTER JOIN b AS b_1 "
"ON anon_1.a_id = b_1.a_id ORDER BY "
"anon_1.foo"
)
def test_standalone_negated(self):
A, B = self.classes.A, self.classes.B
b_table, a_table = self.tables.b, self.tables.a
self._fixture({})
cp = select([func.sum(b_table.c.value)]).\
where(b_table.c.a_id==a_table.c.id).\
correlate(a_table).\
as_scalar()
# test a different unary operator
self.assert_compile(
create_session().query(A).options(joinedload_all('bs')).
order_by(~cp).
limit(50),
"SELECT anon_1.a_id AS anon_1_a_id, anon_1.anon_2 "
"AS anon_1_anon_2, b_1.id AS b_1_id, b_1.a_id AS "
"b_1_a_id, b_1.value AS b_1_value FROM (SELECT a.id "
"AS a_id, NOT (SELECT sum(b.value) AS sum_1 FROM b "
"WHERE b.a_id = a.id) FROM a ORDER BY NOT (SELECT "
"sum(b.value) AS sum_1 FROM b WHERE b.a_id = a.id) "
"LIMIT :param_1) AS anon_1 LEFT OUTER JOIN b AS b_1 "
"ON anon_1.a_id = b_1.a_id ORDER BY anon_1.anon_2"
)
class LoadOnExistingTest(_fixtures.FixtureTest):
"""test that loaders from a base Query fully populate."""
run_inserts = 'once'
run_deletes = None
def _collection_to_scalar_fixture(self):
User, Address, Dingaling = self.classes.User, \
self.classes.Address, self.classes.Dingaling
mapper(User, self.tables.users, properties={
'addresses':relationship(Address),
})
mapper(Address, self.tables.addresses, properties={
'dingaling':relationship(Dingaling)
})
mapper(Dingaling, self.tables.dingalings)
sess = Session(autoflush=False)
return User, Address, Dingaling, sess
def _collection_to_collection_fixture(self):
User, Order, Item = self.classes.User, \
self.classes.Order, self.classes.Item
mapper(User, self.tables.users, properties={
'orders':relationship(Order),
})
mapper(Order, self.tables.orders, properties={
'items':relationship(Item, secondary=self.tables.order_items),
})
mapper(Item, self.tables.items)
sess = Session(autoflush=False)
return User, Order, Item, sess
def _eager_config_fixture(self):
User, Address = self.classes.User, self.classes.Address
mapper(User, self.tables.users, properties={
'addresses':relationship(Address, lazy="joined"),
})
mapper(Address, self.tables.addresses)
sess = Session(autoflush=False)
return User, Address, sess
def test_no_query_on_refresh(self):
User, Address, sess = self._eager_config_fixture()
u1 = sess.query(User).get(8)
assert 'addresses' in u1.__dict__
sess.expire(u1)
def go():
eq_(u1.id, 8)
self.assert_sql_count(testing.db, go, 1)
assert 'addresses' not in u1.__dict__
def test_loads_second_level_collection_to_scalar(self):
User, Address, Dingaling, sess = self._collection_to_scalar_fixture()
u1 = sess.query(User).get(8)
a1 = Address()
u1.addresses.append(a1)
a2 = u1.addresses[0]
a2.email_address = 'foo'
sess.query(User).options(joinedload_all("addresses.dingaling")).\
filter_by(id=8).all()
assert u1.addresses[-1] is a1
for a in u1.addresses:
if a is not a1:
assert 'dingaling' in a.__dict__
else:
assert 'dingaling' not in a.__dict__
if a is a2:
eq_(a2.email_address, 'foo')
def test_loads_second_level_collection_to_collection(self):
User, Order, Item, sess = self._collection_to_collection_fixture()
u1 = sess.query(User).get(7)
u1.orders
o1 = Order()
u1.orders.append(o1)
sess.query(User).options(joinedload_all("orders.items")).\
filter_by(id=7).all()
for o in u1.orders:
if o is not o1:
assert 'items' in o.__dict__
else:
assert 'items' not in o.__dict__
def test_load_two_levels_collection_to_scalar(self):
User, Address, Dingaling, sess = self._collection_to_scalar_fixture()
u1 = sess.query(User).filter_by(id=8).options(joinedload("addresses")).one()
sess.query(User).filter_by(id=8).options(joinedload_all("addresses.dingaling")).first()
assert 'dingaling' in u1.addresses[0].__dict__
def test_load_two_levels_collection_to_collection(self):
User, Order, Item, sess = self._collection_to_collection_fixture()
u1 = sess.query(User).filter_by(id=7).options(joinedload("orders")).one()
sess.query(User).filter_by(id=7).options(joinedload_all("orders.items")).first()
assert 'items' in u1.orders[0].__dict__
class AddEntityTest(_fixtures.FixtureTest):
run_inserts = 'once'
run_deletes = None
def _assert_result(self):
Item, Address, Order, User = (self.classes.Item,
self.classes.Address,
self.classes.Order,
self.classes.User)
return [
(
User(id=7,
addresses=[Address(id=1)]
),
Order(id=1,
items=[Item(id=1), Item(id=2), Item(id=3)]
),
),
(
User(id=7,
addresses=[Address(id=1)]
),
Order(id=3,
items=[Item(id=3), Item(id=4), Item(id=5)]
),
),
(
User(id=7,
addresses=[Address(id=1)]
),
Order(id=5,
items=[Item(id=5)]
),
),
(
User(id=9,
addresses=[Address(id=5)]
),
Order(id=2,
items=[Item(id=1), Item(id=2), Item(id=3)]
),
),
(
User(id=9,
addresses=[Address(id=5)]
),
Order(id=4,
items=[Item(id=1), Item(id=5)]
),
)
]
def test_mapper_configured(self):
users, items, order_items, Order, Item, User, Address, orders, addresses = (self.tables.users,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.classes.Address,
self.tables.orders,
self.tables.addresses)
mapper(User, users, properties={
'addresses':relationship(Address, lazy='joined'),
'orders':relationship(Order)
})
mapper(Address, addresses)
mapper(Order, orders, properties={
'items':relationship(Item, secondary=order_items, lazy='joined', order_by=items.c.id)
})
mapper(Item, items)
sess = create_session()
oalias = sa.orm.aliased(Order)
def go():
ret = sess.query(User, oalias).join(oalias, 'orders').\
order_by(User.id,oalias.id).all()
eq_(ret, self._assert_result())
self.assert_sql_count(testing.db, go, 1)
def test_options(self):
users, items, order_items, Order, Item, User, Address, orders, addresses = (self.tables.users,
self.tables.items,
self.tables.order_items,
self.classes.Order,
self.classes.Item,
self.classes.User,
self.classes.Address,
self.tables.orders,
self.tables.addresses)
mapper(User, users, properties={
'addresses':relationship(Address),
'orders':relationship(Order)
})
mapper(Address, addresses)
mapper(Order, orders, properties={
'items':relationship(Item, secondary=order_items, order_by=items.c.id)
})
mapper(Item, items)
sess = create_session()
oalias = sa.orm.aliased(Order)
def go():
ret = sess.query(User, oalias).options(joinedload('addresses')).\
join(oalias, 'orders').\
order_by(User.id, oalias.id).all()
eq_(ret, self._assert_result())
self.assert_sql_count(testing.db, go, 6)
sess.expunge_all()
def go():
ret = sess.query(User, oalias).\
options(joinedload('addresses'),
joinedload(oalias.items)).\
join(oalias, 'orders').\
order_by(User.id, oalias.id).all()
eq_(ret, self._assert_result())
self.assert_sql_count(testing.db, go, 1)
class OrderBySecondaryTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('m2m', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('aid', Integer, ForeignKey('a.id')),
Column('bid', Integer, ForeignKey('b.id')))
Table('a', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(50)))
Table('b', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('data', String(50)))
@classmethod
def fixtures(cls):
return dict(
a=(('id', 'data'),
(1, 'a1'),
(2, 'a2')),
b=(('id', 'data'),
(1, 'b1'),
(2, 'b2'),
(3, 'b3'),
(4, 'b4')),
m2m=(('id', 'aid', 'bid'),
(2, 1, 1),
(4, 2, 4),
(1, 1, 3),
(6, 2, 2),
(3, 1, 2),
(5, 2, 3)))
def test_ordering(self):
a, m2m, b = (self.tables.a,
self.tables.m2m,
self.tables.b)
class A(fixtures.ComparableEntity):pass
class B(fixtures.ComparableEntity):pass
mapper(A, a, properties={
'bs':relationship(B, secondary=m2m, lazy='joined', order_by=m2m.c.id)
})
mapper(B, b)
sess = create_session()
eq_(sess.query(A).all(), [
A(data='a1', bs=[B(data='b3'), B(data='b1'), B(data='b2')]),
A(bs=[B(data='b4'), B(data='b3'), B(data='b2')])
])
class SelfReferentialEagerTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('nodes', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('parent_id', Integer, ForeignKey('nodes.id')),
Column('data', String(30)))
@testing.fails_on('maxdb', 'FIXME: unknown')
def test_basic(self):
nodes = self.tables.nodes
class Node(fixtures.ComparableEntity):
def append(self, node):
self.children.append(node)
mapper(Node, nodes, properties={
'children':relationship(Node,
lazy='joined',
join_depth=3, order_by=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
n1.append(Node(data='n11'))
n1.append(Node(data='n12'))
n1.append(Node(data='n13'))
n1.children[1].append(Node(data='n121'))
n1.children[1].append(Node(data='n122'))
n1.children[1].append(Node(data='n123'))
sess.add(n1)
sess.flush()
sess.expunge_all()
def go():
d = sess.query(Node).filter_by(data='n1').all()[0]
eq_(Node(data='n1', children=[
Node(data='n11'),
Node(data='n12', children=[
Node(data='n121'),
Node(data='n122'),
Node(data='n123')
]),
Node(data='n13')
]), d)
self.assert_sql_count(testing.db, go, 1)
sess.expunge_all()
def go():
d = sess.query(Node).filter_by(data='n1').first()
eq_(Node(data='n1', children=[
Node(data='n11'),
Node(data='n12', children=[
Node(data='n121'),
Node(data='n122'),
Node(data='n123')
]),
Node(data='n13')
]), d)
self.assert_sql_count(testing.db, go, 1)
def test_lazy_fallback_doesnt_affect_eager(self):
nodes = self.tables.nodes
class Node(fixtures.ComparableEntity):
def append(self, node):
self.children.append(node)
mapper(Node, nodes, properties={
'children':relationship(Node, lazy='joined', join_depth=1,
order_by=nodes.c.id)
})
sess = create_session()
n1 = Node(data='n1')
n1.append(Node(data='n11'))
n1.append(Node(data='n12'))
n1.append(Node(data='n13'))
n1.children[1].append(Node(data='n121'))
n1.children[1].append(Node(data='n122'))
n1.children[1].append(Node(data='n123'))
sess.add(n1)
sess.flush()
sess.expunge_all()
# eager load with join depth 1. when eager load of 'n1' hits the
# children of 'n12', no columns are present, eager loader degrades to
# lazy loader; fine. but then, 'n12' is *also* in the first level of
# columns since we're loading the whole table. when those rows
# arrive, now we *can* eager load its children and an eager collection
# should be initialized. essentially the 'n12' instance is present in
# not just two different rows but two distinct sets of columns in this
# result set.
def go():
allnodes = sess.query(Node).order_by(Node.data).all()
n12 = allnodes[2]
eq_(n12.data, 'n12')
eq_([
Node(data='n121'),
Node(data='n122'),
Node(data='n123')
], list(n12.children))
self.assert_sql_count(testing.db, go, 1)
def test_with_deferred(self):
nodes = self.tables.nodes
class Node(fixtures.ComparableEntity):
def append(self, node):
self.children.append(node)
mapper(Node, nodes, properties={
'children':relationship(Node, lazy='joined', join_depth=3,
order_by=nodes.c.id),
'data':deferred(nodes.c.data)
})
sess = create_session()
n1 = Node(data='n1')
n1.append(Node(data='n11'))
n1.append(Node(data='n12'))
sess.add(n1)
sess.flush()
sess.expunge_all()
def go():
eq_(
Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
sess.query(Node).order_by(Node.id).first(),
)
self.assert_sql_count(testing.db, go, 4)
sess.expunge_all()
def go():
eq_(Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
sess.query(Node).options(undefer('data')).order_by(Node.id).first())
self.assert_sql_count(testing.db, go, 3)
sess.expunge_all()
def go():
eq_(Node(data='n1', children=[Node(data='n11'), Node(data='n12')]),
sess.query(Node).options(undefer('data'),
undefer('children.data')).first())
self.assert_sql_count(testing.db, go, 1)
def test_options(self):
nodes = self.tables.nodes
class Node(fixtures.ComparableEntity):
def append(self, node):
self.children.append(node)
mapper(Node, nodes, properties={
'children':relationship(Node, lazy='select', order_by=nodes.c.id)
}, order_by=nodes.c.id)
sess = create_session()
n1 = Node(data='n1')
n1.append(Node(data='n11'))
n1.append(Node(data='n12'))
n1.append(Node(data='n13'))
n1.children[1].append(Node(data='n121'))
n1.children[1].append(Node(data='n122'))
n1.children[1].append(Node(data='n123'))
sess.add(n1)
sess.flush()
sess.expunge_all()
def go():
d = sess.query(Node).filter_by(data='n1').\
options(joinedload('children.children')).first()
eq_(Node(data='n1', children=[
Node(data='n11'),
Node(data='n12', children=[
Node(data='n121'),
Node(data='n122'),
Node(data='n123')
]),
Node(data='n13')
]), d)
self.assert_sql_count(testing.db, go, 2)
def go():
d = sess.query(Node).filter_by(data='n1').\
options(joinedload('children.children')).first()
# test that the query isn't wrapping the initial query for eager loading.
self.assert_sql_execution(testing.db, go,
CompiledSQL(
"SELECT nodes.id AS nodes_id, nodes.parent_id AS "
"nodes_parent_id, nodes.data AS nodes_data FROM nodes "
"WHERE nodes.data = :data_1 ORDER BY nodes.id LIMIT :param_1",
{'data_1': 'n1'}
)
)
@testing.fails_on('maxdb', 'FIXME: unknown')
def test_no_depth(self):
nodes = self.tables.nodes
class Node(fixtures.ComparableEntity):
def append(self, node):
self.children.append(node)
mapper(Node, nodes, properties={
'children':relationship(Node, lazy='joined')
})
sess = create_session()
n1 = Node(data='n1')
n1.append(Node(data='n11'))
n1.append(Node(data='n12'))
n1.append(Node(data='n13'))
n1.children[1].append(Node(data='n121'))
n1.children[1].append(Node(data='n122'))
n1.children[1].append(Node(data='n123'))
sess.add(n1)
sess.flush()
sess.expunge_all()
def go():
d = sess.query(Node).filter_by(data='n1').first()
eq_(Node(data='n1', children=[
Node(data='n11'),
Node(data='n12', children=[
Node(data='n121'),
Node(data='n122'),
Node(data='n123')
]),
Node(data='n13')
]), d)
self.assert_sql_count(testing.db, go, 3)
class MixedSelfReferentialEagerTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('a_table', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True)
)
Table('b_table', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('parent_b1_id', Integer, ForeignKey('b_table.id')),
Column('parent_a_id', Integer, ForeignKey('a_table.id')),
Column('parent_b2_id', Integer, ForeignKey('b_table.id')))
@classmethod
def setup_mappers(cls):
b_table, a_table = cls.tables.b_table, cls.tables.a_table
class A(cls.Comparable):
pass
class B(cls.Comparable):
pass
mapper(A,a_table)
mapper(B,b_table,properties = {
'parent_b1': relationship(B,
remote_side = [b_table.c.id],
primaryjoin = (b_table.c.parent_b1_id ==b_table.c.id),
order_by = b_table.c.id
),
'parent_z': relationship(A,lazy = True),
'parent_b2': relationship(B,
remote_side = [b_table.c.id],
primaryjoin = (b_table.c.parent_b2_id ==b_table.c.id),
order_by = b_table.c.id
)
});
@classmethod
def insert_data(cls):
b_table, a_table = cls.tables.b_table, cls.tables.a_table
a_table.insert().execute(dict(id=1), dict(id=2), dict(id=3))
b_table.insert().execute(
dict(id=1, parent_a_id=2, parent_b1_id=None, parent_b2_id=None),
dict(id=2, parent_a_id=1, parent_b1_id=1, parent_b2_id=None),
dict(id=3, parent_a_id=1, parent_b1_id=1, parent_b2_id=2),
dict(id=4, parent_a_id=3, parent_b1_id=1, parent_b2_id=None),
dict(id=5, parent_a_id=3, parent_b1_id=None, parent_b2_id=2),
dict(id=6, parent_a_id=1, parent_b1_id=1, parent_b2_id=3),
dict(id=7, parent_a_id=2, parent_b1_id=None, parent_b2_id=3),
dict(id=8, parent_a_id=2, parent_b1_id=1, parent_b2_id=2),
dict(id=9, parent_a_id=None, parent_b1_id=1, parent_b2_id=None),
dict(id=10, parent_a_id=3, parent_b1_id=7, parent_b2_id=2),
dict(id=11, parent_a_id=3, parent_b1_id=1, parent_b2_id=8),
dict(id=12, parent_a_id=2, parent_b1_id=5, parent_b2_id=2),
dict(id=13, parent_a_id=3, parent_b1_id=4, parent_b2_id=4),
dict(id=14, parent_a_id=3, parent_b1_id=7, parent_b2_id=2),
)
def test_eager_load(self):
A, B = self.classes.A, self.classes.B
session = create_session()
def go():
eq_(
session.query(B).\
options(
joinedload('parent_b1'),
joinedload('parent_b2'),
joinedload('parent_z')).
filter(B.id.in_([2, 8, 11])).order_by(B.id).all(),
[
B(id=2, parent_z=A(id=1), parent_b1=B(id=1), parent_b2=None),
B(id=8, parent_z=A(id=2), parent_b1=B(id=1), parent_b2=B(id=2)),
B(id=11, parent_z=A(id=3), parent_b1=B(id=1), parent_b2=B(id=8))
]
)
self.assert_sql_count(testing.db, go, 1)
class SelfReferentialM2MEagerTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('widget', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('name', sa.Unicode(40), nullable=False, unique=True),
)
Table('widget_rel', metadata,
Column('parent_id', Integer, ForeignKey('widget.id')),
Column('child_id', Integer, ForeignKey('widget.id')),
sa.UniqueConstraint('parent_id', 'child_id'),
)
def test_basic(self):
widget, widget_rel = self.tables.widget, self.tables.widget_rel
class Widget(fixtures.ComparableEntity):
pass
mapper(Widget, widget, properties={
'children': relationship(Widget, secondary=widget_rel,
primaryjoin=widget_rel.c.parent_id==widget.c.id,
secondaryjoin=widget_rel.c.child_id==widget.c.id,
lazy='joined', join_depth=1,
)
})
sess = create_session()
w1 = Widget(name=u'w1')
w2 = Widget(name=u'w2')
w1.children.append(w2)
sess.add(w1)
sess.flush()
sess.expunge_all()
eq_([Widget(name='w1', children=[Widget(name='w2')])],
sess.query(Widget).filter(Widget.name==u'w1').all())
class MixedEntitiesTest(_fixtures.FixtureTest, testing.AssertsCompiledSQL):
run_setup_mappers = 'once'
run_inserts = 'once'
run_deletes = None
@classmethod
def setup_mappers(cls):
users, Keyword, items, order_items, orders, Item, User, Address, keywords, Order, item_keywords, addresses = (cls.tables.users,
cls.classes.Keyword,
cls.tables.items,
cls.tables.order_items,
cls.tables.orders,
cls.classes.Item,
cls.classes.User,
cls.classes.Address,
cls.tables.keywords,
cls.classes.Order,
cls.tables.item_keywords,
cls.tables.addresses)
mapper(User, users, properties={
'addresses':relationship(Address, backref='user'),
'orders':relationship(Order, backref='user'), # o2m, m2o
})
mapper(Address, addresses)
mapper(Order, orders, properties={
'items':relationship(Item, secondary=order_items, order_by=items.c.id), #m2m
})
mapper(Item, items, properties={
'keywords':relationship(Keyword, secondary=item_keywords) #m2m
})
mapper(Keyword, keywords)
def test_two_entities(self):
Item, Order, User, Address = (self.classes.Item,
self.classes.Order,
self.classes.User,
self.classes.Address)
sess = create_session()
# two FROM clauses
def go():
eq_(
[
(User(id=9, addresses=[Address(id=5)]), Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])),
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
],
sess.query(User, Order).filter(User.id==Order.user_id).\
options(joinedload(User.addresses), joinedload(Order.items)).filter(User.id==9).\
order_by(User.id, Order.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
# one FROM clause
def go():
eq_(
[
(User(id=9, addresses=[Address(id=5)]), Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])),
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
],
sess.query(User, Order).join(User.orders).options(joinedload(User.addresses), joinedload(Order.items)).filter(User.id==9).\
order_by(User.id, Order.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
@testing.exclude('sqlite', '>', (0, ), "sqlite flat out blows it on the multiple JOINs")
def test_two_entities_with_joins(self):
Item, Order, User, Address = (self.classes.Item,
self.classes.Order,
self.classes.User,
self.classes.Address)
sess = create_session()
# two FROM clauses where there's a join on each one
def go():
u1 = aliased(User)
o1 = aliased(Order)
eq_(
[
(
User(addresses=[Address(email_address=u'[email protected]')], name=u'fred'),
Order(description=u'order 2', isopen=0, items=[Item(description=u'item 1'), Item(description=u'item 2'), Item(description=u'item 3')]),
User(addresses=[Address(email_address=u'[email protected]')], name=u'jack'),
Order(description=u'order 3', isopen=1, items=[Item(description=u'item 3'), Item(description=u'item 4'), Item(description=u'item 5')])
),
(
User(addresses=[Address(email_address=u'[email protected]')], name=u'fred'),
Order(description=u'order 2', isopen=0, items=[Item(description=u'item 1'), Item(description=u'item 2'), Item(description=u'item 3')]),
User(addresses=[Address(email_address=u'[email protected]')], name=u'jack'),
Order(address_id=None, description=u'order 5', isopen=0, items=[Item(description=u'item 5')])
),
(
User(addresses=[Address(email_address=u'[email protected]')], name=u'fred'),
Order(description=u'order 4', isopen=1, items=[Item(description=u'item 1'), Item(description=u'item 5')]),
User(addresses=[Address(email_address=u'[email protected]')], name=u'jack'),
Order(address_id=None, description=u'order 5', isopen=0, items=[Item(description=u'item 5')])
),
],
sess.query(User, Order, u1, o1).\
join(Order, User.orders).options(joinedload(User.addresses), joinedload(Order.items)).filter(User.id==9).\
join(o1, u1.orders).options(joinedload(u1.addresses), joinedload(o1.items)).filter(u1.id==7).\
filter(Order.id<o1.id).\
order_by(User.id, Order.id, u1.id, o1.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
def test_aliased_entity(self):
Item, Order, User, Address = (self.classes.Item,
self.classes.Order,
self.classes.User,
self.classes.Address)
sess = create_session()
oalias = sa.orm.aliased(Order)
# two FROM clauses
def go():
eq_(
[
(User(id=9, addresses=[Address(id=5)]), Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])),
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
],
sess.query(User, oalias).filter(User.id==oalias.user_id).\
options(joinedload(User.addresses), joinedload(oalias.items)).filter(User.id==9).\
order_by(User.id, oalias.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
# one FROM clause
def go():
eq_(
[
(User(id=9, addresses=[Address(id=5)]), Order(id=2, items=[Item(id=1), Item(id=2), Item(id=3)])),
(User(id=9, addresses=[Address(id=5)]), Order(id=4, items=[Item(id=1), Item(id=5)])),
],
sess.query(User, oalias).join(oalias, User.orders).
options(joinedload(User.addresses),
joinedload(oalias.items)).
filter(User.id==9).
order_by(User.id, oalias.id).all(),
)
self.assert_sql_count(testing.db, go, 1)
from sqlalchemy.engine.default import DefaultDialect
# improper setup: oalias in the columns clause but join to usual
# orders alias. this should create two FROM clauses even though the
# query has a from_clause set up via the join
self.assert_compile(sess.query(User, oalias).join(User.orders).options(joinedload(oalias.items)).with_labels().statement,
"SELECT users.id AS users_id, users.name AS users_name, orders_1.id AS orders_1_id, "\
"orders_1.user_id AS orders_1_user_id, orders_1.address_id AS orders_1_address_id, "\
"orders_1.description AS orders_1_description, orders_1.isopen AS orders_1_isopen, items_1.id AS items_1_id, "\
"items_1.description AS items_1_description FROM users JOIN orders ON users.id = orders.user_id, "\
"orders AS orders_1 LEFT OUTER JOIN order_items AS order_items_1 ON orders_1.id = order_items_1.order_id "\
"LEFT OUTER JOIN items AS items_1 ON items_1.id = order_items_1.item_id ORDER BY items_1.id",
dialect=DefaultDialect()
)
class SubqueryTest(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('users_table', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('name', String(16))
)
Table('tags_table', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('user_id', Integer, ForeignKey("users_table.id")),
Column('score1', sa.Float),
Column('score2', sa.Float),
)
def test_label_anonymizing(self):
"""Eager loading works with subqueries with labels,
Even if an explicit labelname which conflicts with a label on the
parent.
There's not much reason a column_property() would ever need to have a
label of a specific name (and they don't even need labels these days),
unless you'd like the name to line up with a name that you may be
using for a straight textual statement used for loading instances of
that type.
"""
tags_table, users_table = self.tables.tags_table, self.tables.users_table
class User(fixtures.ComparableEntity):
@property
def prop_score(self):
return sum([tag.prop_score for tag in self.tags])
class Tag(fixtures.ComparableEntity):
@property
def prop_score(self):
return self.score1 * self.score2
for labeled, labelname in [(True, 'score'), (True, None), (False, None)]:
sa.orm.clear_mappers()
tag_score = (tags_table.c.score1 * tags_table.c.score2)
user_score = sa.select([sa.func.sum(tags_table.c.score1 *
tags_table.c.score2)],
tags_table.c.user_id == users_table.c.id)
if labeled:
tag_score = tag_score.label(labelname)
user_score = user_score.label(labelname)
else:
user_score = user_score.as_scalar()
mapper(Tag, tags_table, properties={
'query_score': sa.orm.column_property(tag_score),
})
mapper(User, users_table, properties={
'tags': relationship(Tag, backref='user', lazy='joined'),
'query_score': sa.orm.column_property(user_score),
})
session = create_session()
session.add(User(name='joe', tags=[Tag(score1=5.0, score2=3.0),
Tag(score1=55.0, score2=1.0)]))
session.add(User(name='bar', tags=[Tag(score1=5.0, score2=4.0),
Tag(score1=50.0, score2=1.0),
Tag(score1=15.0, score2=2.0)]))
session.flush()
session.expunge_all()
for user in session.query(User).all():
eq_(user.query_score, user.prop_score)
def go():
u = session.query(User).filter_by(name='joe').one()
eq_(u.query_score, u.prop_score)
self.assert_sql_count(testing.db, go, 1)
for t in (tags_table, users_table):
t.delete().execute()
class CorrelatedSubqueryTest(fixtures.MappedTest):
"""tests for #946, #947, #948.
The "users" table is joined to "stuff", and the relationship
would like to pull only the "stuff" entry with the most recent date.
Exercises a variety of ways to configure this.
"""
# another argument for joinedload learning about inner joins
__requires__ = ('correlated_outer_joins', )
@classmethod
def define_tables(cls, metadata):
users = Table('users', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('name', String(50))
)
stuff = Table('stuff', metadata,
Column('id', Integer, primary_key=True, test_needs_autoincrement=True),
Column('date', Date),
Column('user_id', Integer, ForeignKey('users.id')))
@classmethod
def insert_data(cls):
stuff, users = cls.tables.stuff, cls.tables.users
users.insert().execute(
{'id':1, 'name':'user1'},
{'id':2, 'name':'user2'},
{'id':3, 'name':'user3'},
)
stuff.insert().execute(
{'id':1, 'user_id':1, 'date':datetime.date(2007, 10, 15)},
{'id':2, 'user_id':1, 'date':datetime.date(2007, 12, 15)},
{'id':3, 'user_id':1, 'date':datetime.date(2007, 11, 15)},
{'id':4, 'user_id':2, 'date':datetime.date(2008, 1, 15)},
{'id':5, 'user_id':3, 'date':datetime.date(2007, 6, 15)},
{'id':6, 'user_id':3, 'date':datetime.date(2007, 3, 15)},
)
def test_labeled_on_date_noalias(self):
self._do_test('label', True, False)
def test_scalar_on_date_noalias(self):
self._do_test('scalar', True, False)
def test_plain_on_date_noalias(self):
self._do_test('none', True, False)
def test_labeled_on_limitid_noalias(self):
self._do_test('label', False, False)
def test_scalar_on_limitid_noalias(self):
self._do_test('scalar', False, False)
def test_plain_on_limitid_noalias(self):
self._do_test('none', False, False)
def test_labeled_on_date_alias(self):
self._do_test('label', True, True)
def test_scalar_on_date_alias(self):
self._do_test('scalar', True, True)
def test_plain_on_date_alias(self):
self._do_test('none', True, True)
def test_labeled_on_limitid_alias(self):
self._do_test('label', False, True)
def test_scalar_on_limitid_alias(self):
self._do_test('scalar', False, True)
def test_plain_on_limitid_alias(self):
self._do_test('none', False, True)
def _do_test(self, labeled, ondate, aliasstuff):
stuff, users = self.tables.stuff, self.tables.users
class User(fixtures.ComparableEntity):
pass
class Stuff(fixtures.ComparableEntity):
pass
mapper(Stuff, stuff)
if aliasstuff:
salias = stuff.alias()
else:
# if we don't alias the 'stuff' table within the correlated subquery,
# it gets aliased in the eager load along with the "stuff" table to "stuff_1".
# but it's a scalar subquery, and this doesn't actually matter
salias = stuff
if ondate:
# the more 'relational' way to do this, join on the max date
stuff_view = select([func.max(salias.c.date).label('max_date')]).\
where(salias.c.user_id==users.c.id).correlate(users)
else:
# a common method with the MySQL crowd, which actually might perform better in some
# cases - subquery does a limit with order by DESC, join on the id
stuff_view = select([salias.c.id]).where(salias.c.user_id==users.c.id).\
correlate(users).order_by(salias.c.date.desc()).limit(1)
# can't win on this one
if testing.against("mssql"):
operator = operators.in_op
else:
operator = operators.eq
if labeled == 'label':
stuff_view = stuff_view.label('foo')
operator = operators.eq
elif labeled == 'scalar':
stuff_view = stuff_view.as_scalar()
if ondate:
mapper(User, users, properties={
'stuff':relationship(Stuff, primaryjoin=and_(users.c.id==stuff.c.user_id, operator(stuff.c.date, stuff_view)))
})
else:
mapper(User, users, properties={
'stuff':relationship(Stuff, primaryjoin=and_(users.c.id==stuff.c.user_id, operator(stuff.c.id, stuff_view)))
})
sess = create_session()
def go():
eq_(
sess.query(User).order_by(User.name).options(joinedload('stuff')).all(),
[
User(name='user1', stuff=[Stuff(id=2)]),
User(name='user2', stuff=[Stuff(id=4)]),
User(name='user3', stuff=[Stuff(id=5)])
]
)
self.assert_sql_count(testing.db, go, 1)
sess = create_session()
def go():
eq_(
sess.query(User).order_by(User.name).first(),
User(name='user1', stuff=[Stuff(id=2)])
)
self.assert_sql_count(testing.db, go, 2)
sess = create_session()
def go():
eq_(
sess.query(User).order_by(User.name).options(joinedload('stuff')).first(),
User(name='user1', stuff=[Stuff(id=2)])
)
self.assert_sql_count(testing.db, go, 1)
sess = create_session()
def go():
eq_(
sess.query(User).filter(User.id==2).options(joinedload('stuff')).one(),
User(name='user2', stuff=[Stuff(id=4)])
)
self.assert_sql_count(testing.db, go, 1)
class CyclicalInheritingEagerTestOne(fixtures.MappedTest):
@classmethod
def define_tables(cls, metadata):
Table('t1', metadata,
Column('c1', Integer, primary_key=True, test_needs_autoincrement=True),
Column('c2', String(30)),
Column('type', String(30))
)
Table('t2', metadata,
Column('c1', Integer, primary_key=True, test_needs_autoincrement=True),
Column('c2', String(30)),
Column('type', String(30)),
Column('t1.id', Integer, ForeignKey('t1.c1')))
def test_basic(self):
t2, t1 = self.tables.t2, self.tables.t1
class T(object):
pass
class SubT(T):
pass
class T2(object):
pass
class SubT2(T2):
pass
mapper(T, t1, polymorphic_on=t1.c.type, polymorphic_identity='t1')
mapper(SubT, None, inherits=T, polymorphic_identity='subt1', properties={
't2s': relationship(SubT2, lazy='joined',
backref=sa.orm.backref('subt', lazy='joined'))
})
mapper(T2, t2, polymorphic_on=t2.c.type, polymorphic_identity='t2')
mapper(SubT2, None, inherits=T2, polymorphic_identity='subt2')
# testing a particular endless loop condition in eager load setup
create_session().query(SubT).all()
class CyclicalInheritingEagerTestTwo(fixtures.DeclarativeMappedTest,
testing.AssertsCompiledSQL):
__dialect__ = 'default'
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
class PersistentObject(Base):
__tablename__ = 'persistent'
id = Column(Integer, primary_key=True,
test_needs_autoincrement=True)
class Movie(PersistentObject):
__tablename__ = 'movie'
id = Column(Integer, ForeignKey('persistent.id'), primary_key=True)
director_id = Column(Integer, ForeignKey('director.id'))
title = Column(String(50))
class Director(PersistentObject):
__tablename__ = 'director'
id = Column(Integer, ForeignKey('persistent.id'), primary_key=True)
movies = relationship("Movie", foreign_keys=Movie.director_id)
name = Column(String(50))
def test_from_subclass(self):
Director = self.classes.Director
s = create_session()
self.assert_compile(
s.query(Director).options(joinedload('*')),
"SELECT director.id AS director_id, persistent.id AS persistent_id, "
"director.name AS director_name, anon_1.movie_id AS anon_1_movie_id, "
"anon_1.persistent_id AS anon_1_persistent_id, "
"anon_1.movie_director_id AS anon_1_movie_director_id, "
"anon_1.movie_title AS anon_1_movie_title "
"FROM persistent JOIN director ON persistent.id = director.id "
"LEFT OUTER JOIN "
"(SELECT persistent.id AS persistent_id, movie.id AS movie_id, "
"movie.director_id AS movie_director_id, movie.title AS movie_title "
"FROM persistent JOIN movie ON persistent.id = movie.id) AS anon_1 "
"ON director.id = anon_1.movie_director_id"
)
def test_integrate(self):
Director = self.classes.Director
Movie = self.classes.Movie
session = Session(testing.db)
rscott = Director(name=u"Ridley Scott")
alien = Movie(title=u"Alien")
brunner = Movie(title=u"Blade Runner")
rscott.movies.append(brunner)
rscott.movies.append(alien)
session.add_all([rscott, alien, brunner])
session.commit()
session.close_all()
d = session.query(Director).options(joinedload('*')).first()
assert len(list(session)) == 3
|
mit
|
zml312570140/XperiaKernel
|
tools/perf/scripts/python/failed-syscalls-by-pid.py
|
11180
|
2058
|
# failed system call counts, by pid
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
|
gpl-2.0
|
rogerwang/chromium
|
media/tools/layout_tests/layouttest_analyzer_helpers_unittest.py
|
9
|
9249
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
from datetime import datetime
import os
import pickle
import time
import unittest
import layouttest_analyzer_helpers
class TestLayoutTestAnalyzerHelpers(unittest.TestCase):
def testFindLatestTime(self):
time_list = ['2011-08-18-19', '2011-08-18-22', '2011-08-18-21',
'2012-01-11-21', '.foo']
self.assertEquals(layouttest_analyzer_helpers.FindLatestTime(time_list),
'2012-01-11-21')
def testFindLatestTimeWithEmptyList(self):
time_list = []
self.assertEquals(layouttest_analyzer_helpers.FindLatestTime(time_list),
None)
def testFindLatestTimeWithNoValidStringInList(self):
time_list = ['.foo1', '232232']
self.assertEquals(layouttest_analyzer_helpers.FindLatestTime(time_list),
None)
def GenerateTestDataWholeAndSkip(self):
"""You should call this method if you want to generate test data."""
file_path = os.path.join('test_data', 'base')
analyzerResultMapBase = (
layouttest_analyzer_helpers.AnalyzerResultMap.Load(file_path))
# Remove this first part
m = analyzerResultMapBase.result_map['whole']
del m['media/video-source-type.html']
m = analyzerResultMapBase.result_map['skip']
del m['media/track/track-webvtt-tc004-magicheader.html']
file_path = os.path.join('test_data', 'less')
analyzerResultMapBase.Save(file_path)
file_path = os.path.join('test_data', 'base')
analyzerResultMapBase = AnalyzerResultMap.Load(file_path)
analyzerResultMapBase.result_map['whole']['add1.html'] = True
analyzerResultMapBase.result_map['skip']['add2.html'] = True
file_path = os.path.join('test_data', 'more')
analyzerResultMapBase.Save(file_path)
def GenerateTestDataNonSkip(self):
"""You should call this method if you want to generate test data."""
file_path = os.path.join('test_data', 'base')
analyzerResultMapBase = AnalyzerResultMap.Load(file_path)
m = analyzerResultMapBase.result_map['nonskip']
ex = m['media/media-document-audio-repaint.html']
te_info_map1 = ex['te_info'][0]
te_info_map2 = copy.copy(te_info_map1)
te_info_map2['NEWADDED'] = True
ex['te_info'].append(te_info_map2)
m = analyzerResultMapBase.result_map['nonskip']
file_path = os.path.join('test_data', 'more_te_info')
analyzerResultMapBase.Save(file_path)
def testCompareResultMapsWholeAndSkip(self):
file_path = os.path.join('test_data', 'base')
analyzerResultMapBase = (
layouttest_analyzer_helpers.AnalyzerResultMap.Load(file_path))
file_path = os.path.join('test_data', 'less')
analyzerResultMapLess = (
layouttest_analyzer_helpers.AnalyzerResultMap.Load(file_path))
diff = analyzerResultMapBase.CompareToOtherResultMap(analyzerResultMapLess)
self.assertEquals(diff['skip'][0][0][0],
'media/track/track-webvtt-tc004-magicheader.html')
self.assertEquals(diff['whole'][0][0][0],
'media/video-source-type.html')
file_path = os.path.join('test_data', 'more')
analyzerResultMapMore = (
layouttest_analyzer_helpers.AnalyzerResultMap.Load(file_path))
diff = analyzerResultMapBase.CompareToOtherResultMap(analyzerResultMapMore)
self.assertEquals(diff['whole'][1][0][0], 'add1.html')
self.assertEquals(diff['skip'][1][0][0], 'add2.html')
def testCompareResultMapsNonSkip(self):
file_path = os.path.join('test_data', 'base')
analyzerResultMapBase = (
layouttest_analyzer_helpers.AnalyzerResultMap.Load(file_path))
file_path = os.path.join('test_data', 'more_te_info')
analyzerResultMapMoreTEInfo = (
layouttest_analyzer_helpers.AnalyzerResultMap.Load(file_path))
m = analyzerResultMapBase.CompareToOtherResultMap(
analyzerResultMapMoreTEInfo)
self.assertTrue('NEWADDED' in m['nonskip'][1][0][1][0])
def testGetListOfBugsForNonSkippedTests(self):
file_path = os.path.join('test_data', 'base')
analyzerResultMapBase = (
layouttest_analyzer_helpers.AnalyzerResultMap.Load(file_path))
self.assertEquals(
len(analyzerResultMapBase.GetListOfBugsForNonSkippedTests().keys()),
10)
def RunTestGetRevisionString(self, current_time_str, prev_time_str,
expected_rev_str, expected_simple_rev_str,
expected_rev_number, expected_rev_date,
testname, diff_map_none=False):
current_time = datetime.strptime(current_time_str, '%Y-%m-%d-%H')
current_time = time.mktime(current_time.timetuple())
prev_time = datetime.strptime(prev_time_str, '%Y-%m-%d-%H')
prev_time = time.mktime(prev_time.timetuple())
if diff_map_none:
diff_map = None
else:
diff_map = {
'whole': [[], []],
'skip': [[(testname, 'te_info1')], []],
'nonskip': [[], []],
}
(rev_str, simple_rev_str, rev_number, rev_date) = (
layouttest_analyzer_helpers.GetRevisionString(prev_time, current_time,
diff_map))
self.assertEquals(rev_str, expected_rev_str)
self.assertEquals(simple_rev_str, expected_simple_rev_str)
self.assertEquals(rev_number, expected_rev_number)
self.assertEquals(rev_date, expected_rev_date)
def testGetRevisionString(self):
expected_rev_str = ('<ul><a href="http://trac.webkit.org/changeset?'
'new=94377@trunk/LayoutTests/platform/chromium/'
'test_expectations.txt&old=94366@trunk/LayoutTests/'
'platform/chromium/test_expectations.txt">94366->'
'94377</a>\n'
'<li>[email protected]</li>\n'
'<li>2011-09-01 18:00:23</li>\n'
'<ul><li>-<a href="http://webkit.org/b/63878">'
'BUGWK63878</a> : <a href=\'http://test-results.'
'appspot.com/dashboards/flakiness_dashboard.html#'
'tests=fast/dom/dom-constructors.html\'>fast/dom/'
'dom-constructors.html</a> = TEXT</li>\n</ul></ul>')
expected_simple_rev_str = ('<a href="http://trac.webkit.org/changeset?'
'new=94377@trunk/LayoutTests/platform/chromium/'
'test_expectations.txt&old=94366@trunk/'
'LayoutTests/platform/chromium/'
'test_expectations.txt">94366->94377</a>,')
self.RunTestGetRevisionString('2011-09-02-00', '2011-09-01-00',
expected_rev_str, expected_simple_rev_str,
94377, '2011-09-01 18:00:23',
'fast/dom/dom-constructors.html')
def testGetRevisionStringNoneDiffMap(self):
self.RunTestGetRevisionString('2011-09-02-00', '2011-09-01-00', '', '',
'', '', '', diff_map_none=True)
def testGetRevisionStringNoMatchingTest(self):
self.RunTestGetRevisionString('2011-09-01-00', '2011-09-02-00', '', '',
'', '', 'foo1.html')
def testReplaceLineInFile(self):
file_path = os.path.join('test_data', 'inplace.txt')
f = open(file_path, 'w')
f.write('Hello')
f.close()
layouttest_analyzer_helpers.ReplaceLineInFile(
file_path, 'Hello', 'Goodbye')
f = open(file_path, 'r')
self.assertEquals(f.readline(), 'Goodbye')
f.close()
layouttest_analyzer_helpers.ReplaceLineInFile(
file_path, 'Bye', 'Hello')
f = open(file_path, 'r')
self.assertEquals(f.readline(), 'Goodbye')
f.close()
def testFindLatestResultWithNoData(self):
self.assertFalse(
layouttest_analyzer_helpers.FindLatestResult('test_data'))
def testConvertToCSVText(self):
file_path = os.path.join('test_data', 'base')
analyzerResultMapBase = (
layouttest_analyzer_helpers.AnalyzerResultMap.Load(file_path))
data, issues_txt = analyzerResultMapBase.ConvertToCSVText('11-10-10-2011')
self.assertEquals(data, '11-10-10-2011,204,36,10,95')
expected_issues_txt = """\
BUGWK,66310,TEXT PASS,media/media-blocked-by-beforeload.html,DEBUG TEXT PASS,\
media/video-source-error.html,
BUGCR,86714,GPU IMAGE CRASH MAC,media/video-zoom.html,GPU IMAGE CRASH MAC,\
media/video-controls-rendering.html,
BUGCR,74102,GPU IMAGE PASS LINUX,media/video-controls-rendering.html,
BUGWK,55718,TEXT IMAGE IMAGE+TEXT,media/media-document-audio-repaint.html,
BUGCR,78376,TIMEOUT,http/tests/media/video-play-stall-seek.html,
BUGCR,59415,WIN TEXT TIMEOUT PASS,media/video-loop.html,
BUGCR,72223,IMAGE PASS,media/video-frame-accurate-seek.html,
BUGCR,75354,TEXT IMAGE IMAGE+TEXT,media/media-document-audio-repaint.html,
BUGCR,73609,TEXT,http/tests/media/video-play-stall.html,
BUGWK,64003,DEBUG TEXT MAC PASS,media/video-delay-load-event.html,
"""
self.assertEquals(issues_txt, expected_issues_txt)
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
LuaDist/scite
|
scripts/CheckMentioned.py
|
1
|
1510
|
#!/usr/bin/env python3
# CheckMentioned.py
# Find all the symbols in scintilla/include/Scintilla.h and check if they
# are mentioned in scintilla/doc/ScintillaDoc.html.
import string
uninteresting = {
"SCINTILLA_H", "SCI_START", "SCI_LEXER_START", "SCI_OPTIONAL_START",
# These archaic names are #defined to the Sci_ prefixed modern equivalents.
# They are not documented so they are not used in new code.
"CharacterRange", "TextRange", "TextToFind", "RangeToFormat",
}
srcRoot = "../.."
incFileName = srcRoot + "/scintilla/include/Scintilla.h"
docFileName = srcRoot + "/scintilla/doc/ScintillaDoc.html"
try: # Old Python
identCharacters = "_" + string.letters + string.digits
except AttributeError: # Python 3.x
identCharacters = "_" + string.ascii_letters + string.digits
# Convert all punctuation characters except '_' into spaces.
def depunctuate(s):
d = ""
for ch in s:
if ch in identCharacters:
d = d + ch
else:
d = d + " "
return d
symbols = {}
incFile = open(incFileName, "rt")
for line in incFile.readlines():
if line.startswith("#define"):
identifier = line.split()[1]
symbols[identifier] = 0
incFile.close()
docFile = open(docFileName, "rt")
for line in docFile.readlines():
for word in depunctuate(line).split():
if word in symbols.keys():
symbols[word] = 1
docFile.close()
identifiersSorted = list(symbols.keys())
identifiersSorted.sort()
for identifier in identifiersSorted:
if not symbols[identifier] and identifier not in uninteresting:
print(identifier)
|
isc
|
schleichdi2/OPENNFR-6.0-CORE
|
bitbake/lib/bb/ui/uievent.py
|
4
|
5163
|
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
#
# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
# Copyright (C) 2006 - 2007 Richard Purdie
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
Use this class to fork off a thread to recieve event callbacks from the bitbake
server and queue them for the UI to process. This process must be used to avoid
client/server deadlocks.
"""
import socket, threading, pickle, collections
from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
class BBUIEventQueue:
def __init__(self, BBServer, clientinfo=("localhost, 0")):
self.eventQueue = []
self.eventQueueLock = threading.Lock()
self.eventQueueNotify = threading.Event()
self.BBServer = BBServer
self.clientinfo = clientinfo
server = UIXMLRPCServer(self.clientinfo)
self.host, self.port = server.socket.getsockname()
server.register_function( self.system_quit, "event.quit" )
server.register_function( self.send_event, "event.sendpickle" )
server.socket.settimeout(1)
self.EventHandle = None
# the event handler registration may fail here due to cooker being in invalid state
# this is a transient situation, and we should retry a couple of times before
# giving up
for count_tries in range(5):
ret = self.BBServer.registerEventHandler(self.host, self.port)
if isinstance(ret, collections.Iterable):
self.EventHandle, error = ret
else:
self.EventHandle = ret
error = ""
if self.EventHandle != None:
break
errmsg = "Could not register UI event handler. Error: %s, host %s, "\
"port %d" % (error, self.host, self.port)
bb.warn("%s, retry" % errmsg)
import time
time.sleep(1)
else:
raise Exception(errmsg)
self.server = server
self.t = threading.Thread()
self.t.setDaemon(True)
self.t.run = self.startCallbackHandler
self.t.start()
def getEvent(self):
self.eventQueueLock.acquire()
if len(self.eventQueue) == 0:
self.eventQueueLock.release()
return None
item = self.eventQueue.pop(0)
if len(self.eventQueue) == 0:
self.eventQueueNotify.clear()
self.eventQueueLock.release()
return item
def waitEvent(self, delay):
self.eventQueueNotify.wait(delay)
return self.getEvent()
def queue_event(self, event):
self.eventQueueLock.acquire()
self.eventQueue.append(event)
self.eventQueueNotify.set()
self.eventQueueLock.release()
def send_event(self, event):
self.queue_event(pickle.loads(event))
def startCallbackHandler(self):
self.server.timeout = 1
bb.utils.set_process_name("UIEventQueue")
while not self.server.quit:
try:
self.server.handle_request()
except Exception as e:
import traceback
logger.error("BBUIEventQueue.startCallbackHandler: Exception while trying to handle request: %s\n%s" % (e, traceback.format_exc()))
self.server.server_close()
def system_quit( self ):
"""
Shut down the callback thread
"""
try:
self.BBServer.unregisterEventHandler(self.EventHandle)
except:
pass
self.server.quit = True
class UIXMLRPCServer (SimpleXMLRPCServer):
def __init__( self, interface ):
self.quit = False
SimpleXMLRPCServer.__init__( self,
interface,
requestHandler=SimpleXMLRPCRequestHandler,
logRequests=False, allow_none=True, use_builtin_types=True)
def get_request(self):
while not self.quit:
try:
sock, addr = self.socket.accept()
sock.settimeout(1)
return (sock, addr)
except socket.timeout:
pass
return (None, None)
def close_request(self, request):
if request is None:
return
SimpleXMLRPCServer.close_request(self, request)
def process_request(self, request, client_address):
if request is None:
return
SimpleXMLRPCServer.process_request(self, request, client_address)
|
gpl-2.0
|
pfnet/chainercv
|
chainercv/utils/testing/assertions/assert_is_point.py
|
3
|
2196
|
import numpy as np
def assert_is_point(point, visible=None, size=None, n_point=None):
"""Checks if points satisfy the format.
This function checks if given points satisfy the format and
raises an :class:`AssertionError` when the points violate the convention.
Args:
point (~numpy.ndarray): Points to be checked.
visible (~numpy.ndarray): Visibility of the points.
If this is :obj:`None`, all points are regarded as visible.
size (tuple of ints): The size of an image.
If this argument is specified,
the coordinates of visible points are checked to be
within the image.
n_point (int): If specified, the number of points in each object is
expected to be :obj:`n_point`.
"""
for i, pnt in enumerate(point):
assert isinstance(pnt, np.ndarray), \
'pnt must be a numpy.ndarray.'
assert pnt.dtype == np.float32, \
'The type of pnt must be numpy.float32.'
assert pnt.shape[1:] == (2,), \
'The shape of pnt must be (*, 2).'
if n_point is not None:
assert pnt.shape[0] == n_point, \
'The number of points should always be n_point'
if visible is not None:
assert len(point) == len(visible), \
'The length of point and visible should be the same.'
vsble = visible[i]
assert isinstance(vsble, np.ndarray), \
'pnt should be a numpy.ndarray.'
assert vsble.dtype == np.bool, \
'The type of visible must be numpy.bool.'
assert vsble.ndim == 1, \
'The dimensionality of a visible must be one.'
assert vsble.shape[0] == pnt.shape[0], \
'The size of the first axis should be the same for ' \
'corresponding pnt and vsble.'
visible_pnt = pnt[vsble]
else:
visible_pnt = pnt
if size is not None:
assert (visible_pnt >= 0).all() and (visible_pnt <= size).all(),\
'The coordinates of visible points ' \
'should not exceed the size of image.'
|
mit
|
vslavik/poedit
|
deps/boost/tools/build/test/libzstd.py
|
6
|
3903
|
#!/usr/bin/python
# Copy-paste-modify from zlib.py
# Copyright (C) 2013 Steven Watanabe
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
import MockToolset
t = BoostBuild.Tester(arguments=['toolset=mock', '--ignore-site-config', '--user-config='], pass_toolset=0)
MockToolset.create(t)
# Generic definitions that aren't configuration specific
common_stuff = '''
source_file('test.cpp', 'test.cpp')
source_file('main.cpp', 'int main() {}')
source_file('zstd.h.cpp', '#include <zstd.h>\\n')
action('-c -x c++ $main.cpp -o $main.o')
'''
t.write('test.cpp', 'test.cpp')
# Default initialization - static library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using zstd ;
exe test : test.cpp /zstd//zstd : : <link>static <link>shared ;
""")
MockToolset.set_expected(t, common_stuff + '''
action('$main.o --static-lib=zstd -o $config.exe')
action('-c -x c++ $zstd.h.cpp -o $zstd.h.o')
action('-c -x c++ $test.cpp -o $test.o')
action('$test.o --static-lib=zstd -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
# Default initialization - shared library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using zstd ;
exe test : test.cpp /zstd//zstd : : <link>static <link>shared ;
""")
MockToolset.set_expected(t, common_stuff + '''
action('$main.o --shared-lib=zstd -o $config.exe')
action('-c -x c++ $zstd.h.cpp -o $zstd.h.o')
action('-c -x c++ $test.cpp -o $test.o')
action('$test.o --shared-lib=zstd -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
# Initialization in explicit location - static library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using zstd : : <name>myzstd <include>$(here)/zstd <search>$(here)/zstd ;
exe test : test.cpp /zstd//zstd : : <link>static <link>shared ;
""")
t.write('zstd/zstd.h', 'zstd')
MockToolset.set_expected(t, common_stuff + '''
action('$main.o -L./zstd --static-lib=myzstd -o $config.exe')
action('-c -x c++ $test.cpp -I./zstd -o $test.o')
action('$test.o -L./zstd --static-lib=myzstd -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
# Initialization in explicit location - shared library
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using zstd : : <name>myzstd <include>$(here)/zstd <search>$(here)/zstd ;
exe test : test.cpp /zstd//zstd : : <link>static <link>shared ;
""")
MockToolset.set_expected(t, common_stuff + '''
action('$main.o -L./zstd --shared-lib=myzstd -o $config.exe')
action('-c -x c++ $test.cpp -I./zstd -o $test.o')
action('$test.o -L./zstd --shared-lib=myzstd -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
# Initialization in explicit location - both static and shared libraries
t.rm('bin')
t.write("Jamroot.jam", """
path-constant here : . ;
using zstd : : <name>myzstd <include>$(here)/zstd <search>$(here)/zstd ;
exe test : test.cpp /zstd//zstd
: <link>shared:<define>SHARED : <link>static <link>shared ;
""")
MockToolset.set_expected(t, common_stuff + '''
action('$main.o -L./zstd --static-lib=myzstd -o $config.exe')
action('$main.o -L./zstd --shared-lib=myzstd -o $config.exe')
action('-c -x c++ $test.cpp -I./zstd -o $test-static.o')
action('-c -x c++ $test.cpp -I./zstd -DSHARED -o $test-shared.o')
action('$test-static.o -L./zstd --static-lib=myzstd -o $test')
action('$test-shared.o -L./zstd --shared-lib=myzstd -o $test')
''')
t.run_build_system()
t.expect_addition('bin/mock/debug/test.exe')
t.expect_addition('bin/mock/debug/link-static/test.exe')
t.cleanup()
|
mit
|
mgit-at/ansible
|
lib/ansible/plugins/filter/network.py
|
7
|
15679
|
#
# {c) 2017 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
import os
import traceback
import string
from xml.etree.ElementTree import fromstring
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.network.common.utils import Template
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils.common._collections_compat import Mapping
from ansible.errors import AnsibleError, AnsibleFilterError
from ansible.utils.display import Display
from ansible.utils.encrypt import random_password
try:
import yaml
HAS_YAML = True
except ImportError:
HAS_YAML = False
try:
import textfsm
HAS_TEXTFSM = True
except ImportError:
HAS_TEXTFSM = False
try:
from passlib.hash import md5_crypt
HAS_PASSLIB = True
except ImportError:
HAS_PASSLIB = False
display = Display()
def re_matchall(regex, value):
objects = list()
for match in re.findall(regex.pattern, value, re.M):
obj = {}
if regex.groupindex:
for name, index in iteritems(regex.groupindex):
if len(regex.groupindex) == 1:
obj[name] = match
else:
obj[name] = match[index - 1]
objects.append(obj)
return objects
def re_search(regex, value):
obj = {}
match = regex.search(value, re.M)
if match:
items = list(match.groups())
if regex.groupindex:
for name, index in iteritems(regex.groupindex):
obj[name] = items[index - 1]
return obj
def parse_cli(output, tmpl):
if not isinstance(output, string_types):
raise AnsibleError("parse_cli input should be a string, but was given a input of %s" % (type(output)))
if not os.path.exists(tmpl):
raise AnsibleError('unable to locate parse_cli template: %s' % tmpl)
try:
template = Template()
except ImportError as exc:
raise AnsibleError(to_native(exc))
spec = yaml.safe_load(open(tmpl).read())
obj = {}
for name, attrs in iteritems(spec['keys']):
value = attrs['value']
try:
variables = spec.get('vars', {})
value = template(value, variables)
except:
pass
if 'start_block' in attrs and 'end_block' in attrs:
start_block = re.compile(attrs['start_block'])
end_block = re.compile(attrs['end_block'])
blocks = list()
lines = None
block_started = False
for line in output.split('\n'):
match_start = start_block.match(line)
match_end = end_block.match(line)
if match_start:
lines = list()
lines.append(line)
block_started = True
elif match_end:
if lines:
lines.append(line)
blocks.append('\n'.join(lines))
block_started = False
elif block_started:
if lines:
lines.append(line)
regex_items = [re.compile(r) for r in attrs['items']]
objects = list()
for block in blocks:
if isinstance(value, Mapping) and 'key' not in value:
items = list()
for regex in regex_items:
match = regex.search(block)
if match:
item_values = match.groupdict()
item_values['match'] = list(match.groups())
items.append(item_values)
else:
items.append(None)
obj = {}
for k, v in iteritems(value):
try:
obj[k] = template(v, {'item': items}, fail_on_undefined=False)
except:
obj[k] = None
objects.append(obj)
elif isinstance(value, Mapping):
items = list()
for regex in regex_items:
match = regex.search(block)
if match:
item_values = match.groupdict()
item_values['match'] = list(match.groups())
items.append(item_values)
else:
items.append(None)
key = template(value['key'], {'item': items})
values = dict([(k, template(v, {'item': items})) for k, v in iteritems(value['values'])])
objects.append({key: values})
return objects
elif 'items' in attrs:
regexp = re.compile(attrs['items'])
when = attrs.get('when')
conditional = "{%% if %s %%}True{%% else %%}False{%% endif %%}" % when
if isinstance(value, Mapping) and 'key' not in value:
values = list()
for item in re_matchall(regexp, output):
entry = {}
for item_key, item_value in iteritems(value):
entry[item_key] = template(item_value, {'item': item})
if when:
if template(conditional, {'item': entry}):
values.append(entry)
else:
values.append(entry)
obj[name] = values
elif isinstance(value, Mapping):
values = dict()
for item in re_matchall(regexp, output):
entry = {}
for item_key, item_value in iteritems(value['values']):
entry[item_key] = template(item_value, {'item': item})
key = template(value['key'], {'item': item})
if when:
if template(conditional, {'item': {'key': key, 'value': entry}}):
values[key] = entry
else:
values[key] = entry
obj[name] = values
else:
item = re_search(regexp, output)
obj[name] = template(value, {'item': item})
else:
obj[name] = value
return obj
def parse_cli_textfsm(value, template):
if not HAS_TEXTFSM:
raise AnsibleError('parse_cli_textfsm filter requires TextFSM library to be installed')
if not isinstance(value, string_types):
raise AnsibleError("parse_cli_textfsm input should be a string, but was given a input of %s" % (type(value)))
if not os.path.exists(template):
raise AnsibleError('unable to locate parse_cli_textfsm template: %s' % template)
try:
template = open(template)
except IOError as exc:
raise AnsibleError(to_native(exc))
re_table = textfsm.TextFSM(template)
fsm_results = re_table.ParseText(value)
results = list()
for item in fsm_results:
results.append(dict(zip(re_table.header, item)))
return results
def _extract_param(template, root, attrs, value):
key = None
when = attrs.get('when')
conditional = "{%% if %s %%}True{%% else %%}False{%% endif %%}" % when
param_to_xpath_map = attrs['items']
if isinstance(value, Mapping):
key = value.get('key', None)
if key:
value = value['values']
entries = dict() if key else list()
for element in root.findall(attrs['top']):
entry = dict()
item_dict = dict()
for param, param_xpath in iteritems(param_to_xpath_map):
fields = None
try:
fields = element.findall(param_xpath)
except:
display.warning("Failed to evaluate value of '%s' with XPath '%s'.\nUnexpected error: %s." % (param, param_xpath, traceback.format_exc()))
tags = param_xpath.split('/')
# check if xpath ends with attribute.
# If yes set attribute key/value dict to param value in case attribute matches
# else if it is a normal xpath assign matched element text value.
if len(tags) and tags[-1].endswith(']'):
if fields:
if len(fields) > 1:
item_dict[param] = [field.attrib for field in fields]
else:
item_dict[param] = fields[0].attrib
else:
item_dict[param] = {}
else:
if fields:
if len(fields) > 1:
item_dict[param] = [field.text for field in fields]
else:
item_dict[param] = fields[0].text
else:
item_dict[param] = None
if isinstance(value, Mapping):
for item_key, item_value in iteritems(value):
entry[item_key] = template(item_value, {'item': item_dict})
else:
entry = template(value, {'item': item_dict})
if key:
expanded_key = template(key, {'item': item_dict})
if when:
if template(conditional, {'item': {'key': expanded_key, 'value': entry}}):
entries[expanded_key] = entry
else:
entries[expanded_key] = entry
else:
if when:
if template(conditional, {'item': entry}):
entries.append(entry)
else:
entries.append(entry)
return entries
def parse_xml(output, tmpl):
if not os.path.exists(tmpl):
raise AnsibleError('unable to locate parse_xml template: %s' % tmpl)
if not isinstance(output, string_types):
raise AnsibleError('parse_xml works on string input, but given input of : %s' % type(output))
root = fromstring(output)
try:
template = Template()
except ImportError as exc:
raise AnsibleError(to_native(exc))
spec = yaml.safe_load(open(tmpl).read())
obj = {}
for name, attrs in iteritems(spec['keys']):
value = attrs['value']
try:
variables = spec.get('vars', {})
value = template(value, variables)
except:
pass
if 'items' in attrs:
obj[name] = _extract_param(template, root, attrs, value)
else:
obj[name] = value
return obj
def type5_pw(password, salt=None):
if not HAS_PASSLIB:
raise AnsibleFilterError('type5_pw filter requires PassLib library to be installed')
if not isinstance(password, string_types):
raise AnsibleFilterError("type5_pw password input should be a string, but was given a input of %s" % (type(password).__name__))
salt_chars = u''.join((
to_text(string.ascii_letters),
to_text(string.digits),
u'./'
))
if salt is not None and not isinstance(salt, string_types):
raise AnsibleFilterError("type5_pw salt input should be a string, but was given a input of %s" % (type(salt).__name__))
elif not salt:
salt = random_password(length=4, chars=salt_chars)
elif not set(salt) <= set(salt_chars):
raise AnsibleFilterError("type5_pw salt used inproper characters, must be one of %s" % (salt_chars))
encrypted_password = md5_crypt.encrypt(password, salt=salt)
return encrypted_password
def hash_salt(password):
split_password = password.split("$")
if len(split_password) != 4:
raise AnsibleFilterError('Could not parse salt out password correctly from {0}'.format(password))
else:
return split_password[2]
def comp_type5(unencrypted_password, encrypted_password, return_original=False):
salt = hash_salt(encrypted_password)
if type5_pw(unencrypted_password, salt) == encrypted_password:
if return_original is True:
return encrypted_password
else:
return True
return False
def vlan_parser(vlan_list, first_line_len=48, other_line_len=44):
'''
Input: Unsorted list of vlan integers
Output: Sorted string list of integers according to IOS-like vlan list rules
1. Vlans are listed in ascending order
2. Runs of 3 or more consecutive vlans are listed with a dash
3. The first line of the list can be first_line_len characters long
4. Subsequent list lines can be other_line_len characters
'''
# Sort and remove duplicates
sorted_list = sorted(set(vlan_list))
if sorted_list[0] < 1 or sorted_list[-1] > 4094:
raise AnsibleFilterError('Valid VLAN range is 1-4094')
parse_list = []
idx = 0
while idx < len(sorted_list):
start = idx
end = start
while end < len(sorted_list) - 1:
if sorted_list[end + 1] - sorted_list[end] == 1:
end += 1
else:
break
if start == end:
# Single VLAN
parse_list.append(str(sorted_list[idx]))
elif start + 1 == end:
# Run of 2 VLANs
parse_list.append(str(sorted_list[start]))
parse_list.append(str(sorted_list[end]))
else:
# Run of 3 or more VLANs
parse_list.append(str(sorted_list[start]) + '-' + str(sorted_list[end]))
idx = end + 1
line_count = 0
result = ['']
for vlans in parse_list:
# First line (" switchport trunk allowed vlan ")
if line_count == 0:
if len(result[line_count] + vlans) > first_line_len:
result.append('')
line_count += 1
result[line_count] += vlans + ','
else:
result[line_count] += vlans + ','
# Subsequent lines (" switchport trunk allowed vlan add ")
else:
if len(result[line_count] + vlans) > other_line_len:
result.append('')
line_count += 1
result[line_count] += vlans + ','
else:
result[line_count] += vlans + ','
# Remove trailing orphan commas
for idx in range(0, len(result)):
result[idx] = result[idx].rstrip(',')
# Sometimes text wraps to next line, but there are no remaining VLANs
if '' in result:
result.remove('')
return result
class FilterModule(object):
"""Filters for working with output from network devices"""
filter_map = {
'parse_cli': parse_cli,
'parse_cli_textfsm': parse_cli_textfsm,
'parse_xml': parse_xml,
'type5_pw': type5_pw,
'hash_salt': hash_salt,
'comp_type5': comp_type5,
'vlan_parser': vlan_parser
}
def filters(self):
return self.filter_map
|
gpl-3.0
|
dbaxa/django
|
django/conf/locale/hu/formats.py
|
504
|
1117
|
# -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y. F j.'
TIME_FORMAT = 'G.i'
DATETIME_FORMAT = 'Y. F j. G.i'
YEAR_MONTH_FORMAT = 'Y. F'
MONTH_DAY_FORMAT = 'F j.'
SHORT_DATE_FORMAT = 'Y.m.d.'
SHORT_DATETIME_FORMAT = 'Y.m.d. G.i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%Y.%m.%d.', # '2006.10.25.'
]
TIME_INPUT_FORMATS = [
'%H.%M.%S', # '14.30.59'
'%H.%M', # '14.30'
]
DATETIME_INPUT_FORMATS = [
'%Y.%m.%d. %H.%M.%S', # '2006.10.25. 14.30.59'
'%Y.%m.%d. %H.%M.%S.%f', # '2006.10.25. 14.30.59.000200'
'%Y.%m.%d. %H.%M', # '2006.10.25. 14.30'
'%Y.%m.%d.', # '2006.10.25.'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = ' ' # Non-breaking space
NUMBER_GROUPING = 3
|
bsd-3-clause
|
ionomy/ion
|
test/functional/p2p-leaktests.py
|
2
|
5147
|
#!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test message sending before handshake completion.
A node should never send anything other than VERSION/VERACK/REJECT until it's
received a VERACK.
This test connects to a node and sends it a few messages, trying to intice it
into sending us something it shouldn't.
"""
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
banscore = 10
class CLazyNode(NodeConnCB):
def __init__(self):
super().__init__()
self.unexpected_msg = False
self.ever_connected = False
def bad_message(self, message):
self.unexpected_msg = True
self.log.info("should not have received message: %s" % message.command)
def on_open(self, conn):
self.connected = True
self.ever_connected = True
def on_version(self, conn, message): self.bad_message(message)
def on_verack(self, conn, message): self.bad_message(message)
def on_reject(self, conn, message): self.bad_message(message)
def on_inv(self, conn, message): self.bad_message(message)
def on_addr(self, conn, message): self.bad_message(message)
def on_alert(self, conn, message): self.bad_message(message)
def on_getdata(self, conn, message): self.bad_message(message)
def on_getblocks(self, conn, message): self.bad_message(message)
def on_tx(self, conn, message): self.bad_message(message)
def on_block(self, conn, message): self.bad_message(message)
def on_getaddr(self, conn, message): self.bad_message(message)
def on_headers(self, conn, message): self.bad_message(message)
def on_getheaders(self, conn, message): self.bad_message(message)
def on_ping(self, conn, message): self.bad_message(message)
def on_mempool(self, conn): self.bad_message(message)
def on_pong(self, conn, message): self.bad_message(message)
def on_sendheaders(self, conn, message): self.bad_message(message)
def on_sendcmpct(self, conn, message): self.bad_message(message)
def on_cmpctblock(self, conn, message): self.bad_message(message)
def on_getblocktxn(self, conn, message): self.bad_message(message)
def on_blocktxn(self, conn, message): self.bad_message(message)
# Node that never sends a version. We'll use this to send a bunch of messages
# anyway, and eventually get disconnected.
class CNodeNoVersionBan(CLazyNode):
# send a bunch of veracks without sending a message. This should get us disconnected.
# NOTE: implementation-specific check here. Remove if bitcoind ban behavior changes
def on_open(self, conn):
super().on_open(conn)
for i in range(banscore):
self.send_message(msg_verack())
def on_reject(self, conn, message): pass
# Node that never sends a version. This one just sits idle and hopes to receive
# any message (it shouldn't!)
class CNodeNoVersionIdle(CLazyNode):
def __init__(self):
super().__init__()
# Node that sends a version but not a verack.
class CNodeNoVerackIdle(CLazyNode):
def __init__(self):
self.version_received = False
super().__init__()
def on_reject(self, conn, message): pass
def on_verack(self, conn, message): pass
# When version is received, don't reply with a verack. Instead, see if the
# node will give us a message that it shouldn't. This is not an exhaustive
# list!
def on_version(self, conn, message):
self.version_received = True
conn.send_message(msg_ping())
conn.send_message(msg_getaddr())
class P2PLeakTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-banscore='+str(banscore)]]
def run_test(self):
no_version_bannode = self.nodes[0].add_p2p_connection(CNodeNoVersionBan(), send_version=False)
no_version_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVersionIdle(), send_version=False)
no_verack_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVerackIdle())
network_thread_start()
wait_until(lambda: no_version_bannode.ever_connected, timeout=10, lock=mininode_lock)
wait_until(lambda: no_version_idlenode.ever_connected, timeout=10, lock=mininode_lock)
wait_until(lambda: no_verack_idlenode.version_received, timeout=10, lock=mininode_lock)
# Mine a block and make sure that it's not sent to the connected nodes
self.nodes[0].generate(1)
#Give the node enough time to possibly leak out a message
time.sleep(5)
#This node should have been banned
assert not no_version_bannode.connected
self.nodes[0].disconnect_p2ps()
# Make sure no unexpected messages came in
assert(no_version_bannode.unexpected_msg == False)
assert(no_version_idlenode.unexpected_msg == False)
assert(no_verack_idlenode.unexpected_msg == False)
if __name__ == '__main__':
P2PLeakTest().main()
|
mit
|
CM-Tab-S/stock_chagalllte
|
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/Util.py
|
12527
|
1935
|
# Util.py - Python extension for perf script, miscellaneous utility code
#
# Copyright (C) 2010 by Tom Zanussi <[email protected]>
#
# This software may be distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import errno, os
FUTEX_WAIT = 0
FUTEX_WAKE = 1
FUTEX_PRIVATE_FLAG = 128
FUTEX_CLOCK_REALTIME = 256
FUTEX_CMD_MASK = ~(FUTEX_PRIVATE_FLAG | FUTEX_CLOCK_REALTIME)
NSECS_PER_SEC = 1000000000
def avg(total, n):
return total / n
def nsecs(secs, nsecs):
return secs * NSECS_PER_SEC + nsecs
def nsecs_secs(nsecs):
return nsecs / NSECS_PER_SEC
def nsecs_nsecs(nsecs):
return nsecs % NSECS_PER_SEC
def nsecs_str(nsecs):
str = "%5u.%09u" % (nsecs_secs(nsecs), nsecs_nsecs(nsecs)),
return str
def add_stats(dict, key, value):
if not dict.has_key(key):
dict[key] = (value, value, value, 1)
else:
min, max, avg, count = dict[key]
if value < min:
min = value
if value > max:
max = value
avg = (avg + value) / 2
dict[key] = (min, max, avg, count + 1)
def clear_term():
print("\x1b[H\x1b[2J")
audit_package_warned = False
try:
import audit
machine_to_id = {
'x86_64': audit.MACH_86_64,
'alpha' : audit.MACH_ALPHA,
'ia64' : audit.MACH_IA64,
'ppc' : audit.MACH_PPC,
'ppc64' : audit.MACH_PPC64,
's390' : audit.MACH_S390,
's390x' : audit.MACH_S390X,
'i386' : audit.MACH_X86,
'i586' : audit.MACH_X86,
'i686' : audit.MACH_X86,
}
try:
machine_to_id['armeb'] = audit.MACH_ARMEB
except:
pass
machine_id = machine_to_id[os.uname()[4]]
except:
if not audit_package_warned:
audit_package_warned = True
print "Install the audit-libs-python package to get syscall names"
def syscall_name(id):
try:
return audit.audit_syscall_to_name(id, machine_id)
except:
return str(id)
def strerror(nr):
try:
return errno.errorcode[abs(nr)]
except:
return "Unknown %d errno" % nr
|
gpl-2.0
|
KaranToor/MA450
|
google-cloud-sdk/lib/googlecloudsdk/third_party/appengine/datastore/entity_v4_pb.py
|
7
|
52044
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: apphosting/datastore/entity_v4.proto
from googlecloudsdk.third_party.appengine.proto import ProtocolBuffer
import array
import thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
class PartitionId(ProtocolBuffer.ProtocolMessage):
# Constants values
MAX_DIMENSION_TAG = 100
_Constants_NAMES = {
100: "MAX_DIMENSION_TAG",
}
def Constants_Name(cls, x): return cls._Constants_NAMES.get(x, "")
Constants_Name = classmethod(Constants_Name)
has_dataset_id_ = 0
dataset_id_ = ""
has_namespace_ = 0
namespace_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def dataset_id(self): return self.dataset_id_
def set_dataset_id(self, x):
self.has_dataset_id_ = 1
self.dataset_id_ = x
def clear_dataset_id(self):
if self.has_dataset_id_:
self.has_dataset_id_ = 0
self.dataset_id_ = ""
def has_dataset_id(self): return self.has_dataset_id_
def namespace(self): return self.namespace_
def set_namespace(self, x):
self.has_namespace_ = 1
self.namespace_ = x
def clear_namespace(self):
if self.has_namespace_:
self.has_namespace_ = 0
self.namespace_ = ""
def has_namespace(self): return self.has_namespace_
def MergeFrom(self, x):
assert x is not self
if (x.has_dataset_id()): self.set_dataset_id(x.dataset_id())
if (x.has_namespace()): self.set_namespace(x.namespace())
def Equals(self, x):
if x is self: return 1
if self.has_dataset_id_ != x.has_dataset_id_: return 0
if self.has_dataset_id_ and self.dataset_id_ != x.dataset_id_: return 0
if self.has_namespace_ != x.has_namespace_: return 0
if self.has_namespace_ and self.namespace_ != x.namespace_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_dataset_id_): n += 1 + self.lengthString(len(self.dataset_id_))
if (self.has_namespace_): n += 1 + self.lengthString(len(self.namespace_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_dataset_id_): n += 1 + self.lengthString(len(self.dataset_id_))
if (self.has_namespace_): n += 1 + self.lengthString(len(self.namespace_))
return n
def Clear(self):
self.clear_dataset_id()
self.clear_namespace()
def OutputUnchecked(self, out):
if (self.has_dataset_id_):
out.putVarInt32(26)
out.putPrefixedString(self.dataset_id_)
if (self.has_namespace_):
out.putVarInt32(34)
out.putPrefixedString(self.namespace_)
def OutputPartial(self, out):
if (self.has_dataset_id_):
out.putVarInt32(26)
out.putPrefixedString(self.dataset_id_)
if (self.has_namespace_):
out.putVarInt32(34)
out.putPrefixedString(self.namespace_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 26:
self.set_dataset_id(d.getPrefixedString())
continue
if tt == 34:
self.set_namespace(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_dataset_id_: res+=prefix+("dataset_id: %s\n" % self.DebugFormatString(self.dataset_id_))
if self.has_namespace_: res+=prefix+("namespace: %s\n" % self.DebugFormatString(self.namespace_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kdataset_id = 3
knamespace = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
3: "dataset_id",
4: "namespace",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.PartitionId'
class Key_PathElement(ProtocolBuffer.ProtocolMessage):
has_kind_ = 0
kind_ = ""
has_id_ = 0
id_ = 0
has_name_ = 0
name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = ""
def has_kind(self): return self.has_kind_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_id()): self.set_id(x.id())
if (x.has_name()): self.set_name(x.name())
def Equals(self, x):
if x is self: return 1
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_kind_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: kind not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.kind_))
if (self.has_id_): n += 1 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 1 + self.lengthString(len(self.name_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_kind_):
n += 1
n += self.lengthString(len(self.kind_))
if (self.has_id_): n += 1 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 1 + self.lengthString(len(self.name_))
return n
def Clear(self):
self.clear_kind()
self.clear_id()
self.clear_name()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.kind_)
if (self.has_id_):
out.putVarInt32(16)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
def OutputPartial(self, out):
if (self.has_kind_):
out.putVarInt32(10)
out.putPrefixedString(self.kind_)
if (self.has_id_):
out.putVarInt32(16)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_kind(d.getPrefixedString())
continue
if tt == 16:
self.set_id(d.getVarInt64())
continue
if tt == 26:
self.set_name(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkind = 1
kid = 2
kname = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "kind",
2: "id",
3: "name",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Key_PathElement'
class Key(ProtocolBuffer.ProtocolMessage):
has_partition_id_ = 0
partition_id_ = None
def __init__(self, contents=None):
self.path_element_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def partition_id(self):
if self.partition_id_ is None:
self.lazy_init_lock_.acquire()
try:
if self.partition_id_ is None: self.partition_id_ = PartitionId()
finally:
self.lazy_init_lock_.release()
return self.partition_id_
def mutable_partition_id(self): self.has_partition_id_ = 1; return self.partition_id()
def clear_partition_id(self):
# Warning: this method does not acquire the lock.
if self.has_partition_id_:
self.has_partition_id_ = 0;
if self.partition_id_ is not None: self.partition_id_.Clear()
def has_partition_id(self): return self.has_partition_id_
def path_element_size(self): return len(self.path_element_)
def path_element_list(self): return self.path_element_
def path_element(self, i):
return self.path_element_[i]
def mutable_path_element(self, i):
return self.path_element_[i]
def add_path_element(self):
x = Key_PathElement()
self.path_element_.append(x)
return x
def clear_path_element(self):
self.path_element_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_partition_id()): self.mutable_partition_id().MergeFrom(x.partition_id())
for i in xrange(x.path_element_size()): self.add_path_element().CopyFrom(x.path_element(i))
def Equals(self, x):
if x is self: return 1
if self.has_partition_id_ != x.has_partition_id_: return 0
if self.has_partition_id_ and self.partition_id_ != x.partition_id_: return 0
if len(self.path_element_) != len(x.path_element_): return 0
for e1, e2 in zip(self.path_element_, x.path_element_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_partition_id_ and not self.partition_id_.IsInitialized(debug_strs)): initialized = 0
for p in self.path_element_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_partition_id_): n += 1 + self.lengthString(self.partition_id_.ByteSize())
n += 1 * len(self.path_element_)
for i in xrange(len(self.path_element_)): n += self.lengthString(self.path_element_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_partition_id_): n += 1 + self.lengthString(self.partition_id_.ByteSizePartial())
n += 1 * len(self.path_element_)
for i in xrange(len(self.path_element_)): n += self.lengthString(self.path_element_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_partition_id()
self.clear_path_element()
def OutputUnchecked(self, out):
if (self.has_partition_id_):
out.putVarInt32(10)
out.putVarInt32(self.partition_id_.ByteSize())
self.partition_id_.OutputUnchecked(out)
for i in xrange(len(self.path_element_)):
out.putVarInt32(18)
out.putVarInt32(self.path_element_[i].ByteSize())
self.path_element_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_partition_id_):
out.putVarInt32(10)
out.putVarInt32(self.partition_id_.ByteSizePartial())
self.partition_id_.OutputPartial(out)
for i in xrange(len(self.path_element_)):
out.putVarInt32(18)
out.putVarInt32(self.path_element_[i].ByteSizePartial())
self.path_element_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_partition_id().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_path_element().TryMerge(tmp)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_partition_id_:
res+=prefix+"partition_id <\n"
res+=self.partition_id_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.path_element_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("path_element%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kpartition_id = 1
kpath_element = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "partition_id",
2: "path_element",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Key'
class GeoPoint(ProtocolBuffer.ProtocolMessage):
has_latitude_ = 0
latitude_ = 0.0
has_longitude_ = 0
longitude_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def latitude(self): return self.latitude_
def set_latitude(self, x):
self.has_latitude_ = 1
self.latitude_ = x
def clear_latitude(self):
if self.has_latitude_:
self.has_latitude_ = 0
self.latitude_ = 0.0
def has_latitude(self): return self.has_latitude_
def longitude(self): return self.longitude_
def set_longitude(self, x):
self.has_longitude_ = 1
self.longitude_ = x
def clear_longitude(self):
if self.has_longitude_:
self.has_longitude_ = 0
self.longitude_ = 0.0
def has_longitude(self): return self.has_longitude_
def MergeFrom(self, x):
assert x is not self
if (x.has_latitude()): self.set_latitude(x.latitude())
if (x.has_longitude()): self.set_longitude(x.longitude())
def Equals(self, x):
if x is self: return 1
if self.has_latitude_ != x.has_latitude_: return 0
if self.has_latitude_ and self.latitude_ != x.latitude_: return 0
if self.has_longitude_ != x.has_longitude_: return 0
if self.has_longitude_ and self.longitude_ != x.longitude_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_latitude_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: latitude not set.')
if (not self.has_longitude_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: longitude not set.')
return initialized
def ByteSize(self):
n = 0
return n + 18
def ByteSizePartial(self):
n = 0
if (self.has_latitude_):
n += 9
if (self.has_longitude_):
n += 9
return n
def Clear(self):
self.clear_latitude()
self.clear_longitude()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.putDouble(self.latitude_)
out.putVarInt32(17)
out.putDouble(self.longitude_)
def OutputPartial(self, out):
if (self.has_latitude_):
out.putVarInt32(9)
out.putDouble(self.latitude_)
if (self.has_longitude_):
out.putVarInt32(17)
out.putDouble(self.longitude_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_latitude(d.getDouble())
continue
if tt == 17:
self.set_longitude(d.getDouble())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_latitude_: res+=prefix+("latitude: %s\n" % self.DebugFormat(self.latitude_))
if self.has_longitude_: res+=prefix+("longitude: %s\n" % self.DebugFormat(self.longitude_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
klatitude = 1
klongitude = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "latitude",
2: "longitude",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.DOUBLE,
2: ProtocolBuffer.Encoder.DOUBLE,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.GeoPoint'
class Value(ProtocolBuffer.ProtocolMessage):
has_boolean_value_ = 0
boolean_value_ = 0
has_integer_value_ = 0
integer_value_ = 0
has_double_value_ = 0
double_value_ = 0.0
has_timestamp_microseconds_value_ = 0
timestamp_microseconds_value_ = 0
has_key_value_ = 0
key_value_ = None
has_blob_key_value_ = 0
blob_key_value_ = ""
has_string_value_ = 0
string_value_ = ""
has_blob_value_ = 0
blob_value_ = ""
has_entity_value_ = 0
entity_value_ = None
has_geo_point_value_ = 0
geo_point_value_ = None
has_meaning_ = 0
meaning_ = 0
has_indexed_ = 0
indexed_ = 1
def __init__(self, contents=None):
self.list_value_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def boolean_value(self): return self.boolean_value_
def set_boolean_value(self, x):
self.has_boolean_value_ = 1
self.boolean_value_ = x
def clear_boolean_value(self):
if self.has_boolean_value_:
self.has_boolean_value_ = 0
self.boolean_value_ = 0
def has_boolean_value(self): return self.has_boolean_value_
def integer_value(self): return self.integer_value_
def set_integer_value(self, x):
self.has_integer_value_ = 1
self.integer_value_ = x
def clear_integer_value(self):
if self.has_integer_value_:
self.has_integer_value_ = 0
self.integer_value_ = 0
def has_integer_value(self): return self.has_integer_value_
def double_value(self): return self.double_value_
def set_double_value(self, x):
self.has_double_value_ = 1
self.double_value_ = x
def clear_double_value(self):
if self.has_double_value_:
self.has_double_value_ = 0
self.double_value_ = 0.0
def has_double_value(self): return self.has_double_value_
def timestamp_microseconds_value(self): return self.timestamp_microseconds_value_
def set_timestamp_microseconds_value(self, x):
self.has_timestamp_microseconds_value_ = 1
self.timestamp_microseconds_value_ = x
def clear_timestamp_microseconds_value(self):
if self.has_timestamp_microseconds_value_:
self.has_timestamp_microseconds_value_ = 0
self.timestamp_microseconds_value_ = 0
def has_timestamp_microseconds_value(self): return self.has_timestamp_microseconds_value_
def key_value(self):
if self.key_value_ is None:
self.lazy_init_lock_.acquire()
try:
if self.key_value_ is None: self.key_value_ = Key()
finally:
self.lazy_init_lock_.release()
return self.key_value_
def mutable_key_value(self): self.has_key_value_ = 1; return self.key_value()
def clear_key_value(self):
# Warning: this method does not acquire the lock.
if self.has_key_value_:
self.has_key_value_ = 0;
if self.key_value_ is not None: self.key_value_.Clear()
def has_key_value(self): return self.has_key_value_
def blob_key_value(self): return self.blob_key_value_
def set_blob_key_value(self, x):
self.has_blob_key_value_ = 1
self.blob_key_value_ = x
def clear_blob_key_value(self):
if self.has_blob_key_value_:
self.has_blob_key_value_ = 0
self.blob_key_value_ = ""
def has_blob_key_value(self): return self.has_blob_key_value_
def string_value(self): return self.string_value_
def set_string_value(self, x):
self.has_string_value_ = 1
self.string_value_ = x
def clear_string_value(self):
if self.has_string_value_:
self.has_string_value_ = 0
self.string_value_ = ""
def has_string_value(self): return self.has_string_value_
def blob_value(self): return self.blob_value_
def set_blob_value(self, x):
self.has_blob_value_ = 1
self.blob_value_ = x
def clear_blob_value(self):
if self.has_blob_value_:
self.has_blob_value_ = 0
self.blob_value_ = ""
def has_blob_value(self): return self.has_blob_value_
def entity_value(self):
if self.entity_value_ is None:
self.lazy_init_lock_.acquire()
try:
if self.entity_value_ is None: self.entity_value_ = Entity()
finally:
self.lazy_init_lock_.release()
return self.entity_value_
def mutable_entity_value(self): self.has_entity_value_ = 1; return self.entity_value()
def clear_entity_value(self):
# Warning: this method does not acquire the lock.
if self.has_entity_value_:
self.has_entity_value_ = 0;
if self.entity_value_ is not None: self.entity_value_.Clear()
def has_entity_value(self): return self.has_entity_value_
def geo_point_value(self):
if self.geo_point_value_ is None:
self.lazy_init_lock_.acquire()
try:
if self.geo_point_value_ is None: self.geo_point_value_ = GeoPoint()
finally:
self.lazy_init_lock_.release()
return self.geo_point_value_
def mutable_geo_point_value(self): self.has_geo_point_value_ = 1; return self.geo_point_value()
def clear_geo_point_value(self):
# Warning: this method does not acquire the lock.
if self.has_geo_point_value_:
self.has_geo_point_value_ = 0;
if self.geo_point_value_ is not None: self.geo_point_value_.Clear()
def has_geo_point_value(self): return self.has_geo_point_value_
def list_value_size(self): return len(self.list_value_)
def list_value_list(self): return self.list_value_
def list_value(self, i):
return self.list_value_[i]
def mutable_list_value(self, i):
return self.list_value_[i]
def add_list_value(self):
x = Value()
self.list_value_.append(x)
return x
def clear_list_value(self):
self.list_value_ = []
def meaning(self): return self.meaning_
def set_meaning(self, x):
self.has_meaning_ = 1
self.meaning_ = x
def clear_meaning(self):
if self.has_meaning_:
self.has_meaning_ = 0
self.meaning_ = 0
def has_meaning(self): return self.has_meaning_
def indexed(self): return self.indexed_
def set_indexed(self, x):
self.has_indexed_ = 1
self.indexed_ = x
def clear_indexed(self):
if self.has_indexed_:
self.has_indexed_ = 0
self.indexed_ = 1
def has_indexed(self): return self.has_indexed_
def MergeFrom(self, x):
assert x is not self
if (x.has_boolean_value()): self.set_boolean_value(x.boolean_value())
if (x.has_integer_value()): self.set_integer_value(x.integer_value())
if (x.has_double_value()): self.set_double_value(x.double_value())
if (x.has_timestamp_microseconds_value()): self.set_timestamp_microseconds_value(x.timestamp_microseconds_value())
if (x.has_key_value()): self.mutable_key_value().MergeFrom(x.key_value())
if (x.has_blob_key_value()): self.set_blob_key_value(x.blob_key_value())
if (x.has_string_value()): self.set_string_value(x.string_value())
if (x.has_blob_value()): self.set_blob_value(x.blob_value())
if (x.has_entity_value()): self.mutable_entity_value().MergeFrom(x.entity_value())
if (x.has_geo_point_value()): self.mutable_geo_point_value().MergeFrom(x.geo_point_value())
for i in xrange(x.list_value_size()): self.add_list_value().CopyFrom(x.list_value(i))
if (x.has_meaning()): self.set_meaning(x.meaning())
if (x.has_indexed()): self.set_indexed(x.indexed())
def Equals(self, x):
if x is self: return 1
if self.has_boolean_value_ != x.has_boolean_value_: return 0
if self.has_boolean_value_ and self.boolean_value_ != x.boolean_value_: return 0
if self.has_integer_value_ != x.has_integer_value_: return 0
if self.has_integer_value_ and self.integer_value_ != x.integer_value_: return 0
if self.has_double_value_ != x.has_double_value_: return 0
if self.has_double_value_ and self.double_value_ != x.double_value_: return 0
if self.has_timestamp_microseconds_value_ != x.has_timestamp_microseconds_value_: return 0
if self.has_timestamp_microseconds_value_ and self.timestamp_microseconds_value_ != x.timestamp_microseconds_value_: return 0
if self.has_key_value_ != x.has_key_value_: return 0
if self.has_key_value_ and self.key_value_ != x.key_value_: return 0
if self.has_blob_key_value_ != x.has_blob_key_value_: return 0
if self.has_blob_key_value_ and self.blob_key_value_ != x.blob_key_value_: return 0
if self.has_string_value_ != x.has_string_value_: return 0
if self.has_string_value_ and self.string_value_ != x.string_value_: return 0
if self.has_blob_value_ != x.has_blob_value_: return 0
if self.has_blob_value_ and self.blob_value_ != x.blob_value_: return 0
if self.has_entity_value_ != x.has_entity_value_: return 0
if self.has_entity_value_ and self.entity_value_ != x.entity_value_: return 0
if self.has_geo_point_value_ != x.has_geo_point_value_: return 0
if self.has_geo_point_value_ and self.geo_point_value_ != x.geo_point_value_: return 0
if len(self.list_value_) != len(x.list_value_): return 0
for e1, e2 in zip(self.list_value_, x.list_value_):
if e1 != e2: return 0
if self.has_meaning_ != x.has_meaning_: return 0
if self.has_meaning_ and self.meaning_ != x.meaning_: return 0
if self.has_indexed_ != x.has_indexed_: return 0
if self.has_indexed_ and self.indexed_ != x.indexed_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_key_value_ and not self.key_value_.IsInitialized(debug_strs)): initialized = 0
if (self.has_entity_value_ and not self.entity_value_.IsInitialized(debug_strs)): initialized = 0
if (self.has_geo_point_value_ and not self.geo_point_value_.IsInitialized(debug_strs)): initialized = 0
for p in self.list_value_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_boolean_value_): n += 2
if (self.has_integer_value_): n += 1 + self.lengthVarInt64(self.integer_value_)
if (self.has_double_value_): n += 9
if (self.has_timestamp_microseconds_value_): n += 1 + self.lengthVarInt64(self.timestamp_microseconds_value_)
if (self.has_key_value_): n += 1 + self.lengthString(self.key_value_.ByteSize())
if (self.has_blob_key_value_): n += 2 + self.lengthString(len(self.blob_key_value_))
if (self.has_string_value_): n += 2 + self.lengthString(len(self.string_value_))
if (self.has_blob_value_): n += 2 + self.lengthString(len(self.blob_value_))
if (self.has_entity_value_): n += 1 + self.lengthString(self.entity_value_.ByteSize())
if (self.has_geo_point_value_): n += 1 + self.lengthString(self.geo_point_value_.ByteSize())
n += 1 * len(self.list_value_)
for i in xrange(len(self.list_value_)): n += self.lengthString(self.list_value_[i].ByteSize())
if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
if (self.has_indexed_): n += 2
return n
def ByteSizePartial(self):
n = 0
if (self.has_boolean_value_): n += 2
if (self.has_integer_value_): n += 1 + self.lengthVarInt64(self.integer_value_)
if (self.has_double_value_): n += 9
if (self.has_timestamp_microseconds_value_): n += 1 + self.lengthVarInt64(self.timestamp_microseconds_value_)
if (self.has_key_value_): n += 1 + self.lengthString(self.key_value_.ByteSizePartial())
if (self.has_blob_key_value_): n += 2 + self.lengthString(len(self.blob_key_value_))
if (self.has_string_value_): n += 2 + self.lengthString(len(self.string_value_))
if (self.has_blob_value_): n += 2 + self.lengthString(len(self.blob_value_))
if (self.has_entity_value_): n += 1 + self.lengthString(self.entity_value_.ByteSizePartial())
if (self.has_geo_point_value_): n += 1 + self.lengthString(self.geo_point_value_.ByteSizePartial())
n += 1 * len(self.list_value_)
for i in xrange(len(self.list_value_)): n += self.lengthString(self.list_value_[i].ByteSizePartial())
if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
if (self.has_indexed_): n += 2
return n
def Clear(self):
self.clear_boolean_value()
self.clear_integer_value()
self.clear_double_value()
self.clear_timestamp_microseconds_value()
self.clear_key_value()
self.clear_blob_key_value()
self.clear_string_value()
self.clear_blob_value()
self.clear_entity_value()
self.clear_geo_point_value()
self.clear_list_value()
self.clear_meaning()
self.clear_indexed()
def OutputUnchecked(self, out):
if (self.has_boolean_value_):
out.putVarInt32(8)
out.putBoolean(self.boolean_value_)
if (self.has_integer_value_):
out.putVarInt32(16)
out.putVarInt64(self.integer_value_)
if (self.has_double_value_):
out.putVarInt32(25)
out.putDouble(self.double_value_)
if (self.has_timestamp_microseconds_value_):
out.putVarInt32(32)
out.putVarInt64(self.timestamp_microseconds_value_)
if (self.has_key_value_):
out.putVarInt32(42)
out.putVarInt32(self.key_value_.ByteSize())
self.key_value_.OutputUnchecked(out)
if (self.has_entity_value_):
out.putVarInt32(50)
out.putVarInt32(self.entity_value_.ByteSize())
self.entity_value_.OutputUnchecked(out)
for i in xrange(len(self.list_value_)):
out.putVarInt32(58)
out.putVarInt32(self.list_value_[i].ByteSize())
self.list_value_[i].OutputUnchecked(out)
if (self.has_geo_point_value_):
out.putVarInt32(66)
out.putVarInt32(self.geo_point_value_.ByteSize())
self.geo_point_value_.OutputUnchecked(out)
if (self.has_meaning_):
out.putVarInt32(112)
out.putVarInt32(self.meaning_)
if (self.has_indexed_):
out.putVarInt32(120)
out.putBoolean(self.indexed_)
if (self.has_blob_key_value_):
out.putVarInt32(130)
out.putPrefixedString(self.blob_key_value_)
if (self.has_string_value_):
out.putVarInt32(138)
out.putPrefixedString(self.string_value_)
if (self.has_blob_value_):
out.putVarInt32(146)
out.putPrefixedString(self.blob_value_)
def OutputPartial(self, out):
if (self.has_boolean_value_):
out.putVarInt32(8)
out.putBoolean(self.boolean_value_)
if (self.has_integer_value_):
out.putVarInt32(16)
out.putVarInt64(self.integer_value_)
if (self.has_double_value_):
out.putVarInt32(25)
out.putDouble(self.double_value_)
if (self.has_timestamp_microseconds_value_):
out.putVarInt32(32)
out.putVarInt64(self.timestamp_microseconds_value_)
if (self.has_key_value_):
out.putVarInt32(42)
out.putVarInt32(self.key_value_.ByteSizePartial())
self.key_value_.OutputPartial(out)
if (self.has_entity_value_):
out.putVarInt32(50)
out.putVarInt32(self.entity_value_.ByteSizePartial())
self.entity_value_.OutputPartial(out)
for i in xrange(len(self.list_value_)):
out.putVarInt32(58)
out.putVarInt32(self.list_value_[i].ByteSizePartial())
self.list_value_[i].OutputPartial(out)
if (self.has_geo_point_value_):
out.putVarInt32(66)
out.putVarInt32(self.geo_point_value_.ByteSizePartial())
self.geo_point_value_.OutputPartial(out)
if (self.has_meaning_):
out.putVarInt32(112)
out.putVarInt32(self.meaning_)
if (self.has_indexed_):
out.putVarInt32(120)
out.putBoolean(self.indexed_)
if (self.has_blob_key_value_):
out.putVarInt32(130)
out.putPrefixedString(self.blob_key_value_)
if (self.has_string_value_):
out.putVarInt32(138)
out.putPrefixedString(self.string_value_)
if (self.has_blob_value_):
out.putVarInt32(146)
out.putPrefixedString(self.blob_value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_boolean_value(d.getBoolean())
continue
if tt == 16:
self.set_integer_value(d.getVarInt64())
continue
if tt == 25:
self.set_double_value(d.getDouble())
continue
if tt == 32:
self.set_timestamp_microseconds_value(d.getVarInt64())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key_value().TryMerge(tmp)
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_entity_value().TryMerge(tmp)
continue
if tt == 58:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_list_value().TryMerge(tmp)
continue
if tt == 66:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_geo_point_value().TryMerge(tmp)
continue
if tt == 112:
self.set_meaning(d.getVarInt32())
continue
if tt == 120:
self.set_indexed(d.getBoolean())
continue
if tt == 130:
self.set_blob_key_value(d.getPrefixedString())
continue
if tt == 138:
self.set_string_value(d.getPrefixedString())
continue
if tt == 146:
self.set_blob_value(d.getPrefixedString())
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_boolean_value_: res+=prefix+("boolean_value: %s\n" % self.DebugFormatBool(self.boolean_value_))
if self.has_integer_value_: res+=prefix+("integer_value: %s\n" % self.DebugFormatInt64(self.integer_value_))
if self.has_double_value_: res+=prefix+("double_value: %s\n" % self.DebugFormat(self.double_value_))
if self.has_timestamp_microseconds_value_: res+=prefix+("timestamp_microseconds_value: %s\n" % self.DebugFormatInt64(self.timestamp_microseconds_value_))
if self.has_key_value_:
res+=prefix+"key_value <\n"
res+=self.key_value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_blob_key_value_: res+=prefix+("blob_key_value: %s\n" % self.DebugFormatString(self.blob_key_value_))
if self.has_string_value_: res+=prefix+("string_value: %s\n" % self.DebugFormatString(self.string_value_))
if self.has_blob_value_: res+=prefix+("blob_value: %s\n" % self.DebugFormatString(self.blob_value_))
if self.has_entity_value_:
res+=prefix+"entity_value <\n"
res+=self.entity_value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_geo_point_value_:
res+=prefix+"geo_point_value <\n"
res+=self.geo_point_value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.list_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("list_value%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_meaning_: res+=prefix+("meaning: %s\n" % self.DebugFormatInt32(self.meaning_))
if self.has_indexed_: res+=prefix+("indexed: %s\n" % self.DebugFormatBool(self.indexed_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kboolean_value = 1
kinteger_value = 2
kdouble_value = 3
ktimestamp_microseconds_value = 4
kkey_value = 5
kblob_key_value = 16
kstring_value = 17
kblob_value = 18
kentity_value = 6
kgeo_point_value = 8
klist_value = 7
kmeaning = 14
kindexed = 15
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "boolean_value",
2: "integer_value",
3: "double_value",
4: "timestamp_microseconds_value",
5: "key_value",
6: "entity_value",
7: "list_value",
8: "geo_point_value",
14: "meaning",
15: "indexed",
16: "blob_key_value",
17: "string_value",
18: "blob_value",
}, 18)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.DOUBLE,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.STRING,
8: ProtocolBuffer.Encoder.STRING,
14: ProtocolBuffer.Encoder.NUMERIC,
15: ProtocolBuffer.Encoder.NUMERIC,
16: ProtocolBuffer.Encoder.STRING,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.STRING,
}, 18, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Value'
class Property(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
has_deprecated_multi_ = 0
deprecated_multi_ = 0
has_value_ = 0
value_ = None
def __init__(self, contents=None):
self.deprecated_value_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def deprecated_multi(self): return self.deprecated_multi_
def set_deprecated_multi(self, x):
self.has_deprecated_multi_ = 1
self.deprecated_multi_ = x
def clear_deprecated_multi(self):
if self.has_deprecated_multi_:
self.has_deprecated_multi_ = 0
self.deprecated_multi_ = 0
def has_deprecated_multi(self): return self.has_deprecated_multi_
def deprecated_value_size(self): return len(self.deprecated_value_)
def deprecated_value_list(self): return self.deprecated_value_
def deprecated_value(self, i):
return self.deprecated_value_[i]
def mutable_deprecated_value(self, i):
return self.deprecated_value_[i]
def add_deprecated_value(self):
x = Value()
self.deprecated_value_.append(x)
return x
def clear_deprecated_value(self):
self.deprecated_value_ = []
def value(self):
if self.value_ is None:
self.lazy_init_lock_.acquire()
try:
if self.value_ is None: self.value_ = Value()
finally:
self.lazy_init_lock_.release()
return self.value_
def mutable_value(self): self.has_value_ = 1; return self.value()
def clear_value(self):
# Warning: this method does not acquire the lock.
if self.has_value_:
self.has_value_ = 0;
if self.value_ is not None: self.value_.Clear()
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_deprecated_multi()): self.set_deprecated_multi(x.deprecated_multi())
for i in xrange(x.deprecated_value_size()): self.add_deprecated_value().CopyFrom(x.deprecated_value(i))
if (x.has_value()): self.mutable_value().MergeFrom(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_deprecated_multi_ != x.has_deprecated_multi_: return 0
if self.has_deprecated_multi_ and self.deprecated_multi_ != x.deprecated_multi_: return 0
if len(self.deprecated_value_) != len(x.deprecated_value_): return 0
for e1, e2 in zip(self.deprecated_value_, x.deprecated_value_):
if e1 != e2: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
for p in self.deprecated_value_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_value_ and not self.value_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
if (self.has_deprecated_multi_): n += 2
n += 1 * len(self.deprecated_value_)
for i in xrange(len(self.deprecated_value_)): n += self.lengthString(self.deprecated_value_[i].ByteSize())
if (self.has_value_): n += 1 + self.lengthString(self.value_.ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
if (self.has_deprecated_multi_): n += 2
n += 1 * len(self.deprecated_value_)
for i in xrange(len(self.deprecated_value_)): n += self.lengthString(self.deprecated_value_[i].ByteSizePartial())
if (self.has_value_): n += 1 + self.lengthString(self.value_.ByteSizePartial())
return n
def Clear(self):
self.clear_name()
self.clear_deprecated_multi()
self.clear_deprecated_value()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.name_)
if (self.has_deprecated_multi_):
out.putVarInt32(16)
out.putBoolean(self.deprecated_multi_)
for i in xrange(len(self.deprecated_value_)):
out.putVarInt32(26)
out.putVarInt32(self.deprecated_value_[i].ByteSize())
self.deprecated_value_[i].OutputUnchecked(out)
if (self.has_value_):
out.putVarInt32(34)
out.putVarInt32(self.value_.ByteSize())
self.value_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(10)
out.putPrefixedString(self.name_)
if (self.has_deprecated_multi_):
out.putVarInt32(16)
out.putBoolean(self.deprecated_multi_)
for i in xrange(len(self.deprecated_value_)):
out.putVarInt32(26)
out.putVarInt32(self.deprecated_value_[i].ByteSizePartial())
self.deprecated_value_[i].OutputPartial(out)
if (self.has_value_):
out.putVarInt32(34)
out.putVarInt32(self.value_.ByteSizePartial())
self.value_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_name(d.getPrefixedString())
continue
if tt == 16:
self.set_deprecated_multi(d.getBoolean())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_deprecated_value().TryMerge(tmp)
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_value().TryMerge(tmp)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_deprecated_multi_: res+=prefix+("deprecated_multi: %s\n" % self.DebugFormatBool(self.deprecated_multi_))
cnt=0
for e in self.deprecated_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("deprecated_value%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_value_:
res+=prefix+"value <\n"
res+=self.value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kname = 1
kdeprecated_multi = 2
kdeprecated_value = 3
kvalue = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "name",
2: "deprecated_multi",
3: "deprecated_value",
4: "value",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Property'
class Entity(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = None
def __init__(self, contents=None):
self.property_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key(self):
if self.key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.key_ is None: self.key_ = Key()
finally:
self.lazy_init_lock_.release()
return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key()
def clear_key(self):
# Warning: this method does not acquire the lock.
if self.has_key_:
self.has_key_ = 0;
if self.key_ is not None: self.key_.Clear()
def has_key(self): return self.has_key_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize())
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial())
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_key()
self.clear_property()
def OutputUnchecked(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
for i in xrange(len(self.property_)):
out.putVarInt32(18)
out.putVarInt32(self.property_[i].ByteSize())
self.property_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_key_):
out.putVarInt32(10)
out.putVarInt32(self.key_.ByteSizePartial())
self.key_.OutputPartial(out)
for i in xrange(len(self.property_)):
out.putVarInt32(18)
out.putVarInt32(self.property_[i].ByteSizePartial())
self.property_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_property().TryMerge(tmp)
continue
# tag 0 is special: it's used to indicate an error.
# so if we see it we raise an exception.
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kproperty = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "property",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
# stylesheet for XML output
_STYLE = \
""""""
_STYLE_CONTENT_TYPE = \
""""""
_PROTO_DESCRIPTOR_NAME = 'apphosting.datastore.v4.Entity'
if _extension_runtime:
pass
__all__ = ['PartitionId','Key_PathElement','Key','GeoPoint','Value','Property','Entity']
|
apache-2.0
|
cogeorg/BlackRhino
|
examples/Georg2012/production/networkx/algorithms/centrality/tests/test_current_flow_betweenness_centrality.py
|
89
|
7721
|
#!/usr/bin/env python
from nose.tools import *
from nose import SkipTest
import networkx
from nose.plugins.attrib import attr
from networkx import edge_current_flow_betweenness_centrality \
as edge_current_flow
from networkx import approximate_current_flow_betweenness_centrality \
as approximate_cfbc
class TestFlowBetweennessCentrality(object):
numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
@classmethod
def setupClass(cls):
global np
try:
import numpy as np
import scipy
except ImportError:
raise SkipTest('NumPy not available.')
def test_K4_normalized(self):
"""Betweenness centrality: K4"""
G=networkx.complete_graph(4)
b=networkx.current_flow_betweenness_centrality(G,normalized=True)
b_answer={0: 0.25, 1: 0.25, 2: 0.25, 3: 0.25}
for n in sorted(G):
assert_almost_equal(b[n],b_answer[n])
G.add_edge(0,1,{'weight':0.5,'other':0.3})
b=networkx.current_flow_betweenness_centrality(G,normalized=True,weight=None)
for n in sorted(G):
assert_almost_equal(b[n],b_answer[n])
wb_answer={0: 0.2222222, 1: 0.2222222, 2: 0.30555555, 3: 0.30555555}
b=networkx.current_flow_betweenness_centrality(G,normalized=True)
for n in sorted(G):
assert_almost_equal(b[n],wb_answer[n])
wb_answer={0: 0.2051282, 1: 0.2051282, 2: 0.33974358, 3: 0.33974358}
b=networkx.current_flow_betweenness_centrality(G,normalized=True,weight='other')
for n in sorted(G):
assert_almost_equal(b[n],wb_answer[n])
def test_K4(self):
"""Betweenness centrality: K4"""
G=networkx.complete_graph(4)
for solver in ['full','lu','cg']:
b=networkx.current_flow_betweenness_centrality(G, normalized=False,
solver=solver)
b_answer={0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
for n in sorted(G):
assert_almost_equal(b[n],b_answer[n])
def test_P4_normalized(self):
"""Betweenness centrality: P4 normalized"""
G=networkx.path_graph(4)
b=networkx.current_flow_betweenness_centrality(G,normalized=True)
b_answer={0: 0, 1: 2./3, 2: 2./3, 3:0}
for n in sorted(G):
assert_almost_equal(b[n],b_answer[n])
def test_P4(self):
"""Betweenness centrality: P4"""
G=networkx.path_graph(4)
b=networkx.current_flow_betweenness_centrality(G,normalized=False)
b_answer={0: 0, 1: 2, 2: 2, 3: 0}
for n in sorted(G):
assert_almost_equal(b[n],b_answer[n])
def test_star(self):
"""Betweenness centrality: star """
G=networkx.Graph()
G.add_star(['a','b','c','d'])
b=networkx.current_flow_betweenness_centrality(G,normalized=True)
b_answer={'a': 1.0, 'b': 0.0, 'c': 0.0, 'd':0.0}
for n in sorted(G):
assert_almost_equal(b[n],b_answer[n])
def test_solers(self):
"""Betweenness centrality: alternate solvers"""
G=networkx.complete_graph(4)
for solver in ['full','lu','cg']:
b=networkx.current_flow_betweenness_centrality(G,normalized=False,
solver=solver)
b_answer={0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
for n in sorted(G):
assert_almost_equal(b[n],b_answer[n])
class TestApproximateFlowBetweennessCentrality(object):
numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
@classmethod
def setupClass(cls):
global np
global assert_allclose
try:
import numpy as np
import scipy
from numpy.testing import assert_allclose
except ImportError:
raise SkipTest('NumPy not available.')
def test_K4_normalized(self):
"Approximate current-flow betweenness centrality: K4 normalized"
G=networkx.complete_graph(4)
b=networkx.current_flow_betweenness_centrality(G,normalized=True)
epsilon=0.1
ba = approximate_cfbc(G,normalized=True, epsilon=0.5*epsilon)
for n in sorted(G):
assert_allclose(b[n],ba[n],atol=epsilon)
def test_K4(self):
"Approximate current-flow betweenness centrality: K4"
G=networkx.complete_graph(4)
b=networkx.current_flow_betweenness_centrality(G,normalized=False)
epsilon=0.1
ba = approximate_cfbc(G,normalized=False, epsilon=0.5*epsilon)
for n in sorted(G):
assert_allclose(b[n],ba[n],atol=epsilon*len(G)**2)
def test_star(self):
"Approximate current-flow betweenness centrality: star"
G=networkx.Graph()
G.add_star(['a','b','c','d'])
b=networkx.current_flow_betweenness_centrality(G,normalized=True)
epsilon=0.1
ba = approximate_cfbc(G,normalized=True, epsilon=0.5*epsilon)
for n in sorted(G):
assert_allclose(b[n],ba[n],atol=epsilon)
def test_grid(self):
"Approximate current-flow betweenness centrality: 2d grid"
G=networkx.grid_2d_graph(4,4)
b=networkx.current_flow_betweenness_centrality(G,normalized=True)
epsilon=0.1
ba = approximate_cfbc(G,normalized=True, epsilon=0.5*epsilon)
for n in sorted(G):
assert_allclose(b[n],ba[n],atol=epsilon)
def test_solvers(self):
"Approximate current-flow betweenness centrality: solvers"
G=networkx.complete_graph(4)
epsilon=0.1
for solver in ['full','lu','cg']:
b=approximate_cfbc(G,normalized=False,solver=solver,
epsilon=0.5*epsilon)
b_answer={0: 0.75, 1: 0.75, 2: 0.75, 3: 0.75}
for n in sorted(G):
assert_allclose(b[n],b_answer[n],atol=epsilon)
class TestWeightedFlowBetweennessCentrality(object):
pass
class TestEdgeFlowBetweennessCentrality(object):
numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
@classmethod
def setupClass(cls):
global np
try:
import numpy as np
import scipy
except ImportError:
raise SkipTest('NumPy not available.')
def test_K4(self):
"""Edge flow betweenness centrality: K4"""
G=networkx.complete_graph(4)
b=edge_current_flow(G,normalized=True)
b_answer=dict.fromkeys(G.edges(),0.25)
for (s,t),v1 in b_answer.items():
v2=b.get((s,t),b.get((t,s)))
assert_almost_equal(v1,v2)
def test_K4_normalized(self):
"""Edge flow betweenness centrality: K4"""
G=networkx.complete_graph(4)
b=edge_current_flow(G,normalized=False)
b_answer=dict.fromkeys(G.edges(),0.75)
for (s,t),v1 in b_answer.items():
v2=b.get((s,t),b.get((t,s)))
assert_almost_equal(v1,v2)
def test_C4(self):
"""Edge flow betweenness centrality: C4"""
G=networkx.cycle_graph(4)
b=edge_current_flow(G,normalized=False)
b_answer={(0, 1):1.25,(0, 3):1.25, (1, 2):1.25, (2, 3): 1.25}
for (s,t),v1 in b_answer.items():
v2=b.get((s,t),b.get((t,s)))
assert_almost_equal(v1,v2)
def test_P4(self):
"""Edge betweenness centrality: P4"""
G=networkx.path_graph(4)
b=edge_current_flow(G,normalized=False)
b_answer={(0, 1):1.5,(1, 2):2.0, (2, 3):1.5}
for (s,t),v1 in b_answer.items():
v2=b.get((s,t),b.get((t,s)))
assert_almost_equal(v1,v2)
|
gpl-3.0
|
Fedik/gramps
|
gramps/gui/widgets/expandcollapsearrow.py
|
10
|
3458
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
__all__ = ["ExpandCollapseArrow"]
#-------------------------------------------------------------------------
#
# Standard python modules
#
#-------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
import logging
_LOG = logging.getLogger(".widgets.expandcollapsearrow")
#-------------------------------------------------------------------------
#
# GTK/Gnome modules
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
from gi.repository import Gdk
from gramps.gen.constfunc import has_display
#-------------------------------------------------------------------------
#
# Constants
#
#-------------------------------------------------------------------------
if has_display():
HAND_CURSOR = Gdk.Cursor.new_for_display(Gdk.Display.get_default(),
Gdk.CursorType.HAND2)
#-------------------------------------------------------------------------
#
# Module functions
#
#-------------------------------------------------------------------------
def realize_cb(widget):
widget.get_window().set_cursor(HAND_CURSOR)
#-------------------------------------------------------------------------
#
# ExpandCollapseArrow class
#
#-------------------------------------------------------------------------
class ExpandCollapseArrow(Gtk.EventBox):
"""
Arrow to be used for expand/collapse of sections.
.. note:: shadow does not work, we indicate action with realize_cb
"""
def __init__(self, collapsed, onbuttonpress, pair):
"""
Constructor for the ExpandCollapseArrow class.
:param collapsed: True if arrow must be shown collapsed,
False otherwise
:type collapsed: bool
:param onbuttonpress: The callback function for button press
:type onbuttonpress: callback
:param pair: user param for onbuttonpress function
"""
Gtk.EventBox.__init__(self)
if collapsed :
self.arrow = Gtk.Arrow(arrow_type=Gtk.ArrowType.RIGHT,
shadow_type=Gtk.ShadowType.OUT)
self.set_tooltip_text(_("Expand this section"))
else:
self.arrow = Gtk.Arrow(arrow_type=Gtk.ArrowType.DOWN,
shadow_type=Gtk.ShadowType.OUT)
self.set_tooltip_text(_("Collapse this section"))
self.add(self.arrow)
self.connect('button-press-event', onbuttonpress, pair)
self.connect('realize', realize_cb)
|
gpl-2.0
|
kenwang815/KodiPlugins
|
script.module.youtube.dl/lib/youtube_dl/extractor/allocine.py
|
21
|
3546
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
import json
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
qualities,
unescapeHTML,
xpath_element,
)
class AllocineIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?allocine\.fr/(?P<typ>article|video|film)/(fichearticle_gen_carticle=|player_gen_cmedia=|fichefilm_gen_cfilm=|video-)(?P<id>[0-9]+)(?:\.html)?'
_TESTS = [{
'url': 'http://www.allocine.fr/article/fichearticle_gen_carticle=18635087.html',
'md5': '0c9fcf59a841f65635fa300ac43d8269',
'info_dict': {
'id': '19546517',
'ext': 'mp4',
'title': 'Astérix - Le Domaine des Dieux Teaser VF',
'description': 'md5:abcd09ce503c6560512c14ebfdb720d2',
'thumbnail': 're:http://.*\.jpg',
},
}, {
'url': 'http://www.allocine.fr/video/player_gen_cmedia=19540403&cfilm=222257.html',
'md5': 'd0cdce5d2b9522ce279fdfec07ff16e0',
'info_dict': {
'id': '19540403',
'ext': 'mp4',
'title': 'Planes 2 Bande-annonce VF',
'description': 'Regardez la bande annonce du film Planes 2 (Planes 2 Bande-annonce VF). Planes 2, un film de Roberts Gannaway',
'thumbnail': 're:http://.*\.jpg',
},
}, {
'url': 'http://www.allocine.fr/film/fichefilm_gen_cfilm=181290.html',
'md5': '101250fb127ef9ca3d73186ff22a47ce',
'info_dict': {
'id': '19544709',
'ext': 'mp4',
'title': 'Dragons 2 - Bande annonce finale VF',
'description': 'md5:601d15393ac40f249648ef000720e7e3',
'thumbnail': 're:http://.*\.jpg',
},
}, {
'url': 'http://www.allocine.fr/video/video-19550147/',
'only_matching': True,
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
typ = mobj.group('typ')
display_id = mobj.group('id')
webpage = self._download_webpage(url, display_id)
if typ == 'film':
video_id = self._search_regex(r'href="/video/player_gen_cmedia=([0-9]+).+"', webpage, 'video id')
else:
player = self._search_regex(r'data-player=\'([^\']+)\'>', webpage, 'data player', default=None)
if player:
player_data = json.loads(player)
video_id = compat_str(player_data['refMedia'])
else:
model = self._search_regex(r'data-model="([^"]+)">', webpage, 'data model')
model_data = self._parse_json(unescapeHTML(model), display_id)
video_id = compat_str(model_data['id'])
xml = self._download_xml('http://www.allocine.fr/ws/AcVisiondataV4.ashx?media=%s' % video_id, display_id)
video = xpath_element(xml, './/AcVisionVideo').attrib
quality = qualities(['ld', 'md', 'hd'])
formats = []
for k, v in video.items():
if re.match(r'.+_path', k):
format_id = k.split('_')[0]
formats.append({
'format_id': format_id,
'quality': quality(format_id),
'url': v,
})
self._sort_formats(formats)
return {
'id': video_id,
'title': video['videoTitle'],
'thumbnail': self._og_search_thumbnail(webpage),
'formats': formats,
'description': self._og_search_description(webpage),
}
|
gpl-2.0
|
yoer/hue
|
desktop/core/ext-py/boto-2.38.0/boto/cloudhsm/exceptions.py
|
135
|
1347
|
# Copyright (c) 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.exception import BotoServerError
class InvalidRequestException(BotoServerError):
pass
class CloudHsmServiceException(BotoServerError):
pass
class CloudHsmInternalException(BotoServerError):
pass
|
apache-2.0
|
40123210/w17b_exam
|
static/Brython3.1.3-20150514-095342/Lib/copyreg.py
|
749
|
6611
|
"""Helper to provide extensibility for pickle.
This is only useful to add pickle support for extension types defined in
C, not for instances of user-defined classes.
"""
__all__ = ["pickle", "constructor",
"add_extension", "remove_extension", "clear_extension_cache"]
dispatch_table = {}
def pickle(ob_type, pickle_function, constructor_ob=None):
if not callable(pickle_function):
raise TypeError("reduction functions must be callable")
dispatch_table[ob_type] = pickle_function
# The constructor_ob function is a vestige of safe for unpickling.
# There is no reason for the caller to pass it anymore.
if constructor_ob is not None:
constructor(constructor_ob)
def constructor(object):
if not callable(object):
raise TypeError("constructors must be callable")
# Example: provide pickling support for complex numbers.
try:
complex
except NameError:
pass
else:
def pickle_complex(c):
return complex, (c.real, c.imag)
pickle(complex, pickle_complex, complex)
# Support for pickling new-style objects
def _reconstructor(cls, base, state):
if base is object:
obj = object.__new__(cls)
else:
obj = base.__new__(cls, state)
if base.__init__ != object.__init__:
base.__init__(obj, state)
return obj
_HEAPTYPE = 1<<9
# Python code for object.__reduce_ex__ for protocols 0 and 1
def _reduce_ex(self, proto):
assert proto < 2
for base in self.__class__.__mro__:
if hasattr(base, '__flags__') and not base.__flags__ & _HEAPTYPE:
break
else:
base = object # not really reachable
if base is object:
state = None
else:
if base is self.__class__:
raise TypeError("can't pickle %s objects" % base.__name__)
state = base(self)
args = (self.__class__, base, state)
try:
getstate = self.__getstate__
except AttributeError:
if getattr(self, "__slots__", None):
raise TypeError("a class that defines __slots__ without "
"defining __getstate__ cannot be pickled")
try:
dict = self.__dict__
except AttributeError:
dict = None
else:
dict = getstate()
if dict:
return _reconstructor, args, dict
else:
return _reconstructor, args
# Helper for __reduce_ex__ protocol 2
def __newobj__(cls, *args):
return cls.__new__(cls, *args)
def _slotnames(cls):
"""Return a list of slot names for a given class.
This needs to find slots defined by the class and its bases, so we
can't simply return the __slots__ attribute. We must walk down
the Method Resolution Order and concatenate the __slots__ of each
class found there. (This assumes classes don't modify their
__slots__ attribute to misrepresent their slots after the class is
defined.)
"""
# Get the value from a cache in the class if possible
names = cls.__dict__.get("__slotnames__")
if names is not None:
return names
# Not cached -- calculate the value
names = []
if not hasattr(cls, "__slots__"):
# This class has no slots
pass
else:
# Slots found -- gather slot names from all base classes
for c in cls.__mro__:
if "__slots__" in c.__dict__:
slots = c.__dict__['__slots__']
# if class has a single slot, it can be given as a string
if isinstance(slots, str):
slots = (slots,)
for name in slots:
# special descriptors
if name in ("__dict__", "__weakref__"):
continue
# mangled names
elif name.startswith('__') and not name.endswith('__'):
names.append('_%s%s' % (c.__name__, name))
else:
names.append(name)
# Cache the outcome in the class if at all possible
try:
cls.__slotnames__ = names
except:
pass # But don't die if we can't
return names
# A registry of extension codes. This is an ad-hoc compression
# mechanism. Whenever a global reference to <module>, <name> is about
# to be pickled, the (<module>, <name>) tuple is looked up here to see
# if it is a registered extension code for it. Extension codes are
# universal, so that the meaning of a pickle does not depend on
# context. (There are also some codes reserved for local use that
# don't have this restriction.) Codes are positive ints; 0 is
# reserved.
_extension_registry = {} # key -> code
_inverted_registry = {} # code -> key
_extension_cache = {} # code -> object
# Don't ever rebind those names: pickling grabs a reference to them when
# it's initialized, and won't see a rebinding.
def add_extension(module, name, code):
"""Register an extension code."""
code = int(code)
if not 1 <= code <= 0x7fffffff:
raise ValueError("code out of range")
key = (module, name)
if (_extension_registry.get(key) == code and
_inverted_registry.get(code) == key):
return # Redundant registrations are benign
if key in _extension_registry:
raise ValueError("key %s is already registered with code %s" %
(key, _extension_registry[key]))
if code in _inverted_registry:
raise ValueError("code %s is already in use for key %s" %
(code, _inverted_registry[code]))
_extension_registry[key] = code
_inverted_registry[code] = key
def remove_extension(module, name, code):
"""Unregister an extension code. For testing only."""
key = (module, name)
if (_extension_registry.get(key) != code or
_inverted_registry.get(code) != key):
raise ValueError("key %s is not registered with code %s" %
(key, code))
del _extension_registry[key]
del _inverted_registry[code]
if code in _extension_cache:
del _extension_cache[code]
def clear_extension_cache():
_extension_cache.clear()
# Standard extension code assignments
# Reserved ranges
# First Last Count Purpose
# 1 127 127 Reserved for Python standard library
# 128 191 64 Reserved for Zope
# 192 239 48 Reserved for 3rd parties
# 240 255 16 Reserved for private use (will never be assigned)
# 256 Inf Inf Reserved for future assignment
# Extension codes are assigned by the Python Software Foundation.
|
agpl-3.0
|
roidelapluie/GitPython
|
git/test/test_docs.py
|
13
|
24177
|
#-*-coding:utf-8-*-
# test_git.py
# Copyright (C) 2008, 2009 Michael Trier ([email protected]) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import os
from git.test.lib import TestBase
from gitdb.test.lib import with_rw_directory
class Tutorials(TestBase):
@with_rw_directory
def test_init_repo_object(self, rw_dir):
# [1-test_init_repo_object]
from git import Repo
join = os.path.join
# rorepo is a Repo instance pointing to the git-python repository.
# For all you know, the first argument to Repo is a path to the repository
# you want to work with
repo = Repo(self.rorepo.working_tree_dir)
assert not repo.bare
# ![1-test_init_repo_object]
# [2-test_init_repo_object]
bare_repo = Repo.init(join(rw_dir, 'bare-repo'), bare=True)
assert bare_repo.bare
# ![2-test_init_repo_object]
# [3-test_init_repo_object]
repo.config_reader() # get a config reader for read-only access
cw = repo.config_writer() # get a config writer to change configuration
cw.release() # call release() to be sure changes are written and locks are released
# ![3-test_init_repo_object]
# [4-test_init_repo_object]
assert not bare_repo.is_dirty() # check the dirty state
repo.untracked_files # retrieve a list of untracked files
# ['my_untracked_file']
# ![4-test_init_repo_object]
# [5-test_init_repo_object]
cloned_repo = repo.clone(join(rw_dir, 'to/this/path'))
assert cloned_repo.__class__ is Repo # clone an existing repository
assert Repo.init(join(rw_dir, 'path/for/new/repo')).__class__ is Repo
# ![5-test_init_repo_object]
# [6-test_init_repo_object]
repo.archive(open(join(rw_dir, 'repo.tar'), 'wb'))
# ![6-test_init_repo_object]
# repository paths
# [7-test_init_repo_object]
assert os.path.isdir(cloned_repo.working_tree_dir) # directory with your work files
assert cloned_repo.git_dir.startswith(cloned_repo.working_tree_dir) # directory containing the git repository
assert bare_repo.working_tree_dir is None # bare repositories have no working tree
# ![7-test_init_repo_object]
# heads, tags and references
# heads are branches in git-speak
# [8-test_init_repo_object]
assert repo.head.ref == repo.heads.master # head is a symbolic reference pointing to master
assert repo.tags['0.3.5'] == repo.tag('refs/tags/0.3.5') # you can access tags in various ways too
assert repo.refs.master == repo.heads['master'] # .refs provides access to all refs, i.e. heads ...
assert repo.refs['origin/master'] == repo.remotes.origin.refs.master # ... remotes ...
assert repo.refs['0.3.5'] == repo.tags['0.3.5'] # ... and tags
# ![8-test_init_repo_object]
# create a new head/branch
# [9-test_init_repo_object]
new_branch = cloned_repo.create_head('feature') # create a new branch ...
assert cloned_repo.active_branch != new_branch # which wasn't checked out yet ...
assert new_branch.commit == cloned_repo.active_branch.commit # and which points to the checked-out commit
# It's easy to let a branch point to the previous commit, without affecting anything else
# Each reference provides access to the git object it points to, usually commits
assert new_branch.set_commit('HEAD~1').commit == cloned_repo.active_branch.commit.parents[0]
# ![9-test_init_repo_object]
# create a new tag reference
# [10-test_init_repo_object]
past = cloned_repo.create_tag('past', ref=new_branch,
message="This is a tag-object pointing to %s" % new_branch.name)
assert past.commit == new_branch.commit # the tag points to the specified commit
assert past.tag.message.startswith("This is") # and its object carries the message provided
now = cloned_repo.create_tag('now') # This is a tag-reference. It may not carry meta-data
assert now.tag is None
# ![10-test_init_repo_object]
# Object handling
# [11-test_init_repo_object]
assert now.commit.message != past.commit.message
# You can read objects directly through binary streams, no working tree required
assert (now.commit.tree / 'VERSION').data_stream.read().decode('ascii').startswith('1')
# You can traverse trees as well to handle all contained files of a particular commit
file_count = 0
tree_count = 0
tree = past.commit.tree
for item in tree.traverse():
file_count += item.type == 'blob'
tree_count += item.type == 'tree'
assert file_count and tree_count # we have accumulated all directories and files
assert len(tree.blobs) + len(tree.trees) == len(tree) # a tree is iterable itself to traverse its children
# ![11-test_init_repo_object]
# remotes allow handling push, pull and fetch operations
# [12-test_init_repo_object]
from git import RemoteProgress
class MyProgressPrinter(RemoteProgress):
def update(self, op_code, cur_count, max_count=None, message=''):
print(op_code, cur_count, max_count, cur_count / (max_count or 100.0), message or "NO MESSAGE")
# end
assert len(cloned_repo.remotes) == 1 # we have been cloned, so there should be one remote
assert len(bare_repo.remotes) == 0 # this one was just initialized
origin = bare_repo.create_remote('origin', url=cloned_repo.working_tree_dir)
assert origin.exists()
for fetch_info in origin.fetch(progress=MyProgressPrinter()):
print("Updated %s to %s" % (fetch_info.ref, fetch_info.commit))
# create a local branch at the latest fetched master. We specify the name statically, but you have all
# information to do it programatically as well.
bare_master = bare_repo.create_head('master', origin.refs.master)
bare_repo.head.set_reference(bare_master)
assert not bare_repo.delete_remote(origin).exists()
# push and pull behave very similarly
# ![12-test_init_repo_object]
# index
# [13-test_init_repo_object]
assert new_branch.checkout() == cloned_repo.active_branch # checking out a branch adjusts the working tree
assert new_branch.commit == past.commit # Now the past is checked out
new_file_path = os.path.join(cloned_repo.working_tree_dir, 'my-new-file')
open(new_file_path, 'wb').close() # create new file in working tree
cloned_repo.index.add([new_file_path]) # add it to the index
# Commit the changes to deviate masters history
cloned_repo.index.commit("Added a new file in the past - for later merege")
# prepare a merge
master = cloned_repo.heads.master # right-hand side is ahead of us, in the future
merge_base = cloned_repo.merge_base(new_branch, master) # allwos for a three-way merge
cloned_repo.index.merge_tree(master, base=merge_base) # write the merge result into index
cloned_repo.index.commit("Merged past and now into future ;)",
parent_commits=(new_branch.commit, master.commit))
# now new_branch is ahead of master, which probably should be checked out and reset softly.
# note that all these operations didn't touch the working tree, as we managed it ourselves.
# This definitely requires you to know what you are doing :) !
assert os.path.basename(new_file_path) in new_branch.commit.tree # new file is now in tree
master.commit = new_branch.commit # let master point to most recent commit
cloned_repo.head.reference = master # we adjusted just the reference, not the working tree or index
# ![13-test_init_repo_object]
# submodules
# [14-test_init_repo_object]
# create a new submodule and check it out on the spot, setup to track master branch of `bare_repo`
# As our GitPython repository has submodules already that point to github, make sure we don't
# interact with them
for sm in cloned_repo.submodules:
assert not sm.remove().exists() # after removal, the sm doesn't exist anymore
sm = cloned_repo.create_submodule('mysubrepo', 'path/to/subrepo', url=bare_repo.git_dir, branch='master')
# .gitmodules was written and added to the index, which is now being committed
cloned_repo.index.commit("Added submodule")
assert sm.exists() and sm.module_exists() # this submodule is defintely available
sm.remove(module=True, configuration=False) # remove the working tree
assert sm.exists() and not sm.module_exists() # the submodule itself is still available
# update all submodules, non-recursively to save time, this method is very powerful, go have a look
cloned_repo.submodule_update(recursive=False)
assert sm.module_exists() # The submodules working tree was checked out by update
# ![14-test_init_repo_object]
@with_rw_directory
def test_references_and_objects(self, rw_dir):
# [1-test_references_and_objects]
import git
repo = git.Repo.clone_from(self._small_repo_url(), os.path.join(rw_dir, 'repo'), branch='master')
heads = repo.heads
master = heads.master # lists can be accessed by name for convenience
master.commit # the commit pointed to by head called master
master.rename('new_name') # rename heads
master.rename('master')
# ![1-test_references_and_objects]
# [2-test_references_and_objects]
tags = repo.tags
tagref = tags[0]
tagref.tag # tags may have tag objects carrying additional information
tagref.commit # but they always point to commits
repo.delete_tag(tagref) # delete or
repo.create_tag("my_tag") # create tags using the repo for convenience
# ![2-test_references_and_objects]
# [3-test_references_and_objects]
head = repo.head # the head points to the active branch/ref
master = head.reference # retrieve the reference the head points to
master.commit # from here you use it as any other reference
# ![3-test_references_and_objects]
# [4-test_references_and_objects]
log = master.log()
log[0] # first (i.e. oldest) reflog entry
log[-1] # last (i.e. most recent) reflog entry
# ![4-test_references_and_objects]
# [5-test_references_and_objects]
new_branch = repo.create_head('new') # create a new one
new_branch.commit = 'HEAD~10' # set branch to another commit without changing index or working trees
repo.delete_head(new_branch) # delete an existing head - only works if it is not checked out
# ![5-test_references_and_objects]
# [6-test_references_and_objects]
new_tag = repo.create_tag('my_new_tag', message='my message')
# You cannot change the commit a tag points to. Tags need to be re-created
self.failUnlessRaises(AttributeError, setattr, new_tag, 'commit', repo.commit('HEAD~1'))
repo.delete_tag(new_tag)
# ![6-test_references_and_objects]
# [7-test_references_and_objects]
new_branch = repo.create_head('another-branch')
repo.head.reference = new_branch
# ![7-test_references_and_objects]
# [8-test_references_and_objects]
hc = repo.head.commit
hct = hc.tree
hc != hct
hc != repo.tags[0]
hc == repo.head.reference.commit
# ![8-test_references_and_objects]
# [9-test_references_and_objects]
assert hct.type == 'tree' # preset string type, being a class attribute
assert hct.size > 0 # size in bytes
assert len(hct.hexsha) == 40
assert len(hct.binsha) == 20
# ![9-test_references_and_objects]
# [10-test_references_and_objects]
assert hct.path == '' # root tree has no path
assert hct.trees[0].path != '' # the first contained item has one though
assert hct.mode == 0o40000 # trees have the mode of a linux directory
assert hct.blobs[0].mode == 0o100644 # blobs have a specific mode though comparable to a standard linux fs
# ![10-test_references_and_objects]
# [11-test_references_and_objects]
hct.blobs[0].data_stream.read() # stream object to read data from
hct.blobs[0].stream_data(open(os.path.join(rw_dir, 'blob_data'), 'wb')) # write data to given stream
# ![11-test_references_and_objects]
# [12-test_references_and_objects]
repo.commit('master')
repo.commit('v0.8.1')
repo.commit('HEAD~10')
# ![12-test_references_and_objects]
# [13-test_references_and_objects]
fifty_first_commits = list(repo.iter_commits('master', max_count=50))
assert len(fifty_first_commits) == 50
# this will return commits 21-30 from the commit list as traversed backwards master
ten_commits_past_twenty = list(repo.iter_commits('master', max_count=10, skip=20))
assert len(ten_commits_past_twenty) == 10
assert fifty_first_commits[20:30] == ten_commits_past_twenty
# ![13-test_references_and_objects]
# [14-test_references_and_objects]
headcommit = repo.head.commit
assert len(headcommit.hexsha) == 40
assert len(headcommit.parents) > 0
assert headcommit.tree.type == 'tree'
assert headcommit.author.name == 'Sebastian Thiel'
assert isinstance(headcommit.authored_date, int)
assert headcommit.committer.name == 'Sebastian Thiel'
assert isinstance(headcommit.committed_date, int)
assert headcommit.message != ''
# ![14-test_references_and_objects]
# [15-test_references_and_objects]
import time
time.asctime(time.gmtime(headcommit.committed_date))
time.strftime("%a, %d %b %Y %H:%M", time.gmtime(headcommit.committed_date))
# ![15-test_references_and_objects]
# [16-test_references_and_objects]
assert headcommit.parents[0].parents[0].parents[0] == repo.commit('master^^^')
# ![16-test_references_and_objects]
# [17-test_references_and_objects]
tree = repo.heads.master.commit.tree
assert len(tree.hexsha) == 40
# ![17-test_references_and_objects]
# [18-test_references_and_objects]
assert len(tree.trees) > 0 # trees are subdirectories
assert len(tree.blobs) > 0 # blobs are files
assert len(tree.blobs) + len(tree.trees) == len(tree)
# ![18-test_references_and_objects]
# [19-test_references_and_objects]
assert tree['smmap'] == tree / 'smmap' # access by index and by sub-path
for entry in tree: # intuitive iteration of tree members
print(entry)
blob = tree.trees[0].blobs[0] # let's get a blob in a sub-tree
assert blob.name
assert len(blob.path) < len(blob.abspath)
assert tree.trees[0].name + '/' + blob.name == blob.path # this is how the relative blob path is generated
assert tree[blob.path] == blob # you can use paths like 'dir/file' in tree[...]
# ![19-test_references_and_objects]
# [20-test_references_and_objects]
assert tree / 'smmap' == tree['smmap']
assert tree / blob.path == tree[blob.path]
# ![20-test_references_and_objects]
# [21-test_references_and_objects]
# This example shows the various types of allowed ref-specs
assert repo.tree() == repo.head.commit.tree
past = repo.commit('HEAD~5')
assert repo.tree(past) == repo.tree(past.hexsha)
assert repo.tree('v0.8.1').type == 'tree' # yes, you can provide any refspec - works everywhere
# ![21-test_references_and_objects]
# [22-test_references_and_objects]
assert len(tree) < len(list(tree.traverse()))
# ![22-test_references_and_objects]
# [23-test_references_and_objects]
index = repo.index
# The index contains all blobs in a flat list
assert len(list(index.iter_blobs())) == len([o for o in repo.head.commit.tree.traverse() if o.type == 'blob'])
# Access blob objects
for (path, stage), entry in index.entries.items():
pass
new_file_path = os.path.join(repo.working_tree_dir, 'new-file-name')
open(new_file_path, 'w').close()
index.add([new_file_path]) # add a new file to the index
index.remove(['LICENSE']) # remove an existing one
assert os.path.isfile(os.path.join(repo.working_tree_dir, 'LICENSE')) # working tree is untouched
assert index.commit("my commit message").type == 'commit' # commit changed index
repo.active_branch.commit = repo.commit('HEAD~1') # forget last commit
from git import Actor
author = Actor("An author", "[email protected]")
committer = Actor("A committer", "[email protected]")
# commit by commit message and author and committer
index.commit("my commit message", author=author, committer=committer)
# ![23-test_references_and_objects]
# [24-test_references_and_objects]
from git import IndexFile
# loads a tree into a temporary index, which exists just in memory
IndexFile.from_tree(repo, 'HEAD~1')
# merge two trees three-way into memory
merge_index = IndexFile.from_tree(repo, 'HEAD~10', 'HEAD', repo.merge_base('HEAD~10', 'HEAD'))
# and persist it
merge_index.write(os.path.join(rw_dir, 'merged_index'))
# ![24-test_references_and_objects]
# [25-test_references_and_objects]
empty_repo = git.Repo.init(os.path.join(rw_dir, 'empty'))
origin = empty_repo.create_remote('origin', repo.remotes.origin.url)
assert origin.exists()
assert origin == empty_repo.remotes.origin == empty_repo.remotes['origin']
origin.fetch() # assure we actually have data. fetch() returns useful information
# Setup a local tracking branch of a remote branch
empty_repo.create_head('master', origin.refs.master).set_tracking_branch(origin.refs.master)
origin.rename('new_origin') # rename remotes
# push and pull behaves similarly to `git push|pull`
origin.pull()
origin.push()
# assert not empty_repo.delete_remote(origin).exists() # create and delete remotes
# ![25-test_references_and_objects]
# [26-test_references_and_objects]
assert origin.url == repo.remotes.origin.url
cw = origin.config_writer
cw.set("pushurl", "other_url")
cw.release()
# Please note that in python 2, writing origin.config_writer.set(...) is totally safe.
# In py3 __del__ calls can be delayed, thus not writing changes in time.
# ![26-test_references_and_objects]
# [27-test_references_and_objects]
hcommit = repo.head.commit
hcommit.diff() # diff tree against index
hcommit.diff('HEAD~1') # diff tree against previous tree
hcommit.diff(None) # diff tree against working tree
index = repo.index
index.diff() # diff index against itself yielding empty diff
index.diff(None) # diff index against working copy
index.diff('HEAD') # diff index against current HEAD tree
# ![27-test_references_and_objects]
# [28-test_references_and_objects]
# Traverse added Diff objects only
for diff_added in hcommit.diff('HEAD~1').iter_change_type('A'):
print(diff_added)
# ![28-test_references_and_objects]
# [29-test_references_and_objects]
# Reset our working tree 10 commits into the past
past_branch = repo.create_head('past_branch', 'HEAD~10')
repo.head.reference = past_branch
assert not repo.head.is_detached
# reset the index and working tree to match the pointed-to commit
repo.head.reset(index=True, working_tree=True)
# To detach your head, you have to point to a commit directy
repo.head.reference = repo.commit('HEAD~5')
assert repo.head.is_detached
# now our head points 15 commits into the past, whereas the working tree
# and index are 10 commits in the past
# ![29-test_references_and_objects]
# [30-test_references_and_objects]
# checkout the branch using git-checkout. It will fail as the working tree appears dirty
self.failUnlessRaises(git.GitCommandError, repo.heads.master.checkout)
repo.heads.past_branch.checkout()
# ![30-test_references_and_objects]
# [31-test_references_and_objects]
git = repo.git
git.checkout('HEAD', b="my_new_branch") # create a new branch
git.branch('another-new-one')
git.branch('-D', 'another-new-one') # pass strings for full control over argument order
git.for_each_ref() # '-' becomes '_' when calling it
# ![31-test_references_and_objects]
def test_submodules(self):
# [1-test_submodules]
repo = self.rorepo
sms = repo.submodules
assert len(sms) == 1
sm = sms[0]
assert sm.name == 'gitdb' # git-python has gitdb as single submodule ...
assert sm.children()[0].name == 'smmap' # ... which has smmap as single submodule
# The module is the repository referenced by the submodule
assert sm.module_exists() # the module is available, which doesn't have to be the case.
assert sm.module().working_tree_dir.endswith('gitdb')
# the submodule's absolute path is the module's path
assert sm.abspath == sm.module().working_tree_dir
assert len(sm.hexsha) == 40 # Its sha defines the commit to checkout
assert sm.exists() # yes, this submodule is valid and exists
# read its configuration conveniently
assert sm.config_reader().get_value('path') == sm.path
assert len(sm.children()) == 1 # query the submodule hierarchy
# ![1-test_submodules]
@with_rw_directory
def test_add_file_and_commit(self, rw_dir):
import git
repo_dir = os.path.join(rw_dir, 'my-new-repo')
file_name = os.path.join(repo_dir, 'new-file')
r = git.Repo.init(repo_dir)
# This function just creates an empty file ...
open(file_name, 'wb').close()
r.index.add([file_name])
r.index.commit("initial commit")
# ![test_add_file_and_commit]
|
bsd-3-clause
|
shanot/imp
|
modules/isd/examples/ubiquitin/replica_exchange.py
|
2
|
8389
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import os
import errno
import atexit
import random
# Comment either this line or the following to attain different communication schemes:
# to start pyro
try:
from IMP.isd.PyroGrid import PyroGrid as Grid
# from IMP.isd.FileBasedGrid import FileBasedGrid as Grid
except ImportError:
print("This example needs the Python Pyro module", file=sys.stderr)
sys.exit(0)
from IMP.isd.hosts import create_host_list
import IMP.atom
import IMP.container
import IMP.isd
from IMP.isd.Replica import ReplicaTracker
import _shared_functions as sf
IMP.set_log_level(IMP.NONE)
# simulation settings
# where to output files
outfolder = os.path.join(os.getcwd(), 'results')
# temp dir
tmpdir = os.path.join(os.getcwd(), 'tmp')
# number of replicas / hosts
nreps = 8
# lambda scaling distribution
kB = (1.381 * 6.02214) / 4184.0
lambda_1 = 1.0
lambda_N = 0.8
lambdas = [lambda_N * (lambda_1 / lambda_N) ** ((float(nreps) - k) / (nreps - 1))
for k in xrange(1, nreps + 1)]
# thermostat coupling constant (berendsen, in fs)
tau = [500.0] * nreps
# stat_rate is the rate at which to print out traj statistics
#(in units of gibbs sampling steps)
stat_rate = [1] * nreps
# list of files relative to the current dir to copy over to all nodes
initpdb = IMP.isd.get_example_path("generated2.pdb")
charmmtop = IMP.isd.get_example_path("top.lib")
charmmpar = IMP.isd.get_example_path("par.lib")
restraints = IMP.isd.get_example_path("NOE_HN-full_7A_sparse100.tbl")
sequence = IMP.isd.get_example_path('sequence.dat')
# export the files in a local tmp directory
# add whatever you want
filelist = [initpdb, charmmtop, charmmpar, sequence,
restraints, IMP.isd.get_example_path('_shared_functions.py')]
# prefix of output files
nums = [os.path.join(outfolder, 'r%02d' % (i + 1)) for i in xrange(nreps)]
# thermalization (mc parameters stay fixed)
n_therm = 100 # number of loops, where temperatures are scaled to target value
n_hmc_therm = 10 # number of steps per loop
# number of gibbs sampling steps in the first temperature relaxation
n_gibbs1 = 100
# number of gibbs sampling steps in the second temperature relaxation
n_gibbs2 = 100
# number of gibbs sampling steps in the production temperature relaxation
n_gibbs3 = 10000
# number of md steps
n_md = 1000
# number of mc steps (not < 50 because adaptive)
n_mc = 100
# where to run sims
hostlist = ['localhost'] * nreps
#qsub or ssh
grid_method = 'ssh'
# "-now no -cwd -q -j y -N 'slave' -S /bin/bash"
qsub_config = "-inherit $HOSTNAME"
# replica exchange scheme
rex_scheme = 'standard'
# replica exchange exchange method
rex_xchg = 'random'
# whether to use TuneRex to tune temperatures
tune_temps = True
tune_data = {'rate': 100, # temp optimization rate, in rex steps.
'method': 'ar',
# either "ar" acceptance ratio optimization, or "flux"
# optimization
'alpha': 0.05} # type I error on the estimates
templog = os.path.join(outfolder, 'temps.txt')
rexlog = os.path.join(outfolder, 'replicanums.txt')
# misc
imppy = os.path.abspath(
os.path.join(os.getenv('IMP_ISD_DATA'), '../../tools/imppy.sh'))
src_path = os.path.abspath(
os.path.join(os.getenv('IMP_ISD_DATA'), '../lib/IMP/isd'))
showX11 = False
grid_debug = False
grid_verbose = False
X11_delay = 1.0
window_size = '80x25'
# pyroGrid
shared_temp_path = True
terminate_during_publish = False
nshost = None
def mkdir_p(path):
"mkdir -p, taken from stackoverflow"
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
def launch_grid():
hosts = create_host_list(hostlist, tmpdir)
for host in hosts:
# ugly hack
host.init_cmd = imppy + ' !'
# pyro grid
grid = Grid(hosts, src_path, showX11, X11_delay, grid_debug,
grid_verbose, shared_temp_path, nshost, terminate_during_publish,
method=grid_method, qsub_config=qsub_config)
# uncomment the following lines and comments the two previous ones to use file based grid
# file based grid
# grid = Grid(hosts, src_path, showX11, X11_delay, grid_debug,
# grid_verbose)
#grid.shared_temp_path = shared_temp_path
if showX11:
grid.window_size = window_size
grid.copy_files('./', filelist)
# grid.copy_files(src_path,["_shared_functions.py"])
# start grid on all nodes
grid.start()
return grid
def main():
# launch grid
print("launching grid")
grid = launch_grid()
# publish the shared object
print("publishing sfo")
sfo = sf.sfo()
sfo_id = grid.publish(sfo)
print("communication test")
# get a worker to do a task
#proxy = grid.acquire_service(sfo_id)
# print proxy.hello().get()
# grid.release_service(proxy)
print("broadcast test")
print(grid.gather(grid.broadcast(sfo_id, 'hello')))
# call init on all nodes
print("initializing model")
mkdir_p(tmpdir)
mkdir_p(outfolder)
requests = grid.broadcast(
sfo_id,
'init_model',
tmpdir,
sequence,
initpdb,
restraints)
# wait til init is done
results = grid.gather(requests)
# turn off verbose noise (works because IMP.NONE is picklable, being
# an int.
grid.gather(grid.broadcast(sfo_id, 'set_checklevel', IMP.NONE))
grid.gather(grid.broadcast(sfo_id, 'set_loglevel', IMP.NONE))
# evaluate the score of the whole system (without derivatives, False flag)
print("initial energy")
grid.gather(grid.broadcast(sfo_id, 'm', 'evaluate', False))
# berendsen 300K tau=0.5ps
# perform two independent MC moves for sigma and gamma
print("initializing simulation and statistics")
grid.gather(grid.scatter(sfo_id, 'init_simulation',
zip(lambdas, tau)))
grid.gather(grid.scatter(sfo_id, 'init_stats', zip(nums, stat_rate)))
replica = ReplicaTracker(nreps, lambdas, grid, sfo_id,
rexlog=rexlog,
scheme=rex_scheme, xchg=rex_xchg,
tune_temps=tune_temps, tune_data=tune_data, templog=templog)
print("thermalization")
for i in range(n_therm):
print("\rgibbs step %d" % i, end=' ')
sys.stdout.flush()
grid.gather(grid.scatter(sfo_id, 'set_inv_temp',
[n_therm / float(i + 1) * l for l in lambdas]))
grid.gather(grid.broadcast(sfo_id, 'do_md', n_hmc_therm))
grid.gather(grid.broadcast(sfo_id, 'write_stats'))
print("start gibbs sampling loop: first relaxation")
for i in range(n_gibbs1):
print("\rgibbs step %d" % i, end=' ')
sys.stdout.flush()
# print " md"
grid.gather(grid.broadcast(sfo_id, 'do_md', n_md))
# print " mc"
grid.gather(grid.broadcast(sfo_id, 'do_mc', n_mc))
grid.gather(grid.broadcast(sfo_id, 'write_stats'))
# print " swaps"
replica.tune_data['dumb_scale'] = 0.5
replica.replica_exchange()
# print " stats"
replica.write_rex_stats()
print("start gibbs sampling loop: second relaxation")
for i in range(n_gibbs2):
print("\rgibbs step %d" % i, end=' ')
sys.stdout.flush()
# print " md"
grid.gather(grid.broadcast(sfo_id, 'do_md', n_md))
# print " mc"
grid.gather(grid.broadcast(sfo_id, 'do_mc', n_mc))
grid.gather(grid.broadcast(sfo_id, 'write_stats'))
# print " swaps"
replica.tune_data['dumb_scale'] = 0.2
replica.replica_exchange()
# print " stats"
replica.write_rex_stats()
print("start gibbs sampling loop: production")
for i in range(n_gibbs3):
print("\rgibbs step %d" % i, end=' ')
sys.stdout.flush()
# print " md"
grid.gather(grid.broadcast(sfo_id, 'do_md', n_md))
# print " mc"
grid.gather(grid.broadcast(sfo_id, 'do_mc', n_mc))
grid.gather(grid.broadcast(sfo_id, 'write_stats'))
# print " swaps"
replica.tune_temps = False
replica.replica_exchange()
# print " stats"
replica.write_rex_stats()
print("terminating grid")
grid.terminate()
print("done.")
if __name__ == '__main__':
main()
|
gpl-3.0
|
ibm-security-intelligence/api-samples
|
custom_actions/04_postCustomActions.py
|
1
|
12475
|
#!/usr/bin/env python3
# This sample demonstrates how to use various POST /custom_actions/ endpoints
# available REST API.
#
# WARNING: This sample makes changes to the QRadar system and it is
# recommended that it is not run against a production system.
#
# This script can be run once as custom actions require their names
# to be unique.
# Users can edit the assigned names in this script to enable
# multiple executions.
#
# The scenario demonstrates the following actions:
# - How to post custom action scripts to the system.
# - How to update existing scripts on the system.
# - How to post custom actions to the system.
# - How to update existing custom actions on the system.
# To view a list of the endpoints with the parameters they accept, you can view
# the REST API interactive help page on your deployment at
# https://<hostname>/api_doc. You can also retrieve a list of available
# endpoints with the REST API itself at the /api/help/endpoints endpoint.
import json
import sys
import os
import importlib
sys.path.append(os.path.realpath('../modules'))
client_module = importlib.import_module('RestApiClient')
SampleUtilities = importlib.import_module('SampleUtilities')
def main():
# Create our client.
rest_client = client_module.RestApiClient(version='6.0')
# Endpoints used in this sample
scripts_endpoint = 'analytics/custom_actions/scripts'
actions_endpoint = 'analytics/custom_actions/actions'
# Variable to hold the root path to the custom actions sample folder
root_path = os.path.dirname(os.path.realpath(__file__))
# Script file name & path to where it is stored
file_name = 'python_sample.py'
file_path = os.path.join(root_path, 'custom_action_samples', file_name)
# Opening script file in local file system
with open(file_path) as script:
# Adding a request header to contain the file name
# Also setting content-type header to application/octet-stream
request_header = rest_client.headers.copy()
request_header['file_name'] = file_name
request_header['Content-Type'] = 'application/octet-stream'
# Reading the content of the script file & encoding it for use
# with the endpoint.
script_data = script.read()
script_data_encoded = str.encode(script_data)
SampleUtilities.pretty_print_request(rest_client,
scripts_endpoint,
'POST')
# Calling scripts endpoint to POST script file.
response = rest_client.call_api(scripts_endpoint,
'POST',
headers=request_header,
data=script_data_encoded)
# Checking for a successful response code.
if response.code != 201:
print('Failed to POST custom action script to the server')
SampleUtilities.pretty_print_response(response)
sys.exit(1)
script_response = json.loads(response.read().decode('utf-8'))
retrieved_id = str(script_response['id'])
retrieved_name = str(script_response['file_name'])
format_str = 'Script successfully uploaded. Values returned: id=[{0}],'\
' file name=[{1}].\n'
print(format_str.format(retrieved_id, retrieved_name))
print("Demonstrating updating scripts via /scripts/{id} endpoint...")
# This script id will be used with the POST /scripts/{id} endpoint
# and with the POST /actions endpoint.
script_id = script_response['id']
# Demonstrating updating an existing script resource
file_name = 'bash_sample.sh'
file_path = os.path.join(root_path, 'custom_action_samples', file_name)
with open(file_path) as script:
# Adding a request header to contain the file name
# Also setting content-type header to application/octet-stream
request_header = rest_client.headers.copy()
request_header['file_name'] = file_name
request_header['Content-Type'] = 'application/octet-stream'
# Reading the content of the script file & encoding it
# for use with the endpoint.
script_data = script.read()
script_data_encoded = str.encode(script_data)
# Updating endpoint to include /{id}.
scripts_endpoint += '/' + str(script_id)
SampleUtilities.pretty_print_request(rest_client,
scripts_endpoint,
'POST')
# Calling the POST /scripts/{id} endpoint to
# update the script resource.
response = rest_client.call_api(scripts_endpoint,
'POST',
headers=request_header,
data=script_data_encoded)
if (response.code != 200):
print('Failed to POST updated custom action script to the server')
SampleUtilities.pretty_print_response(response)
sys.exit(1)
# Extracting script id and file name from the response.
script_response = json.loads(response.read().decode('utf-8'))
retrieved_id = str(script_response['id'])
retrieved_name = str(script_response['file_name'])
format_str = 'Script successfully updated. Values returned: id=[{0}],'\
' file name=[{1}].\n'
print(format_str.format(retrieved_id, retrieved_name))
# Using the script ID generated by the previous calls we can
# now create a new custom action.
# Custom actions are posted to the server as a complete object.
# This is demonstrated below.
# Dict object to contain the custom action
custom_action = {}
custom_action['name'] = "Custom Action Demonstration"
custom_action['description'] = "Demonstrating POST custom action endpoint"
# GET /interpreters can be used to return a collection of available
# interpreters from which ids can be retrieved. But for demo purposes
# this has been hard coded here to 1.
custom_action['interpreter'] = 1
# ID of script created earlier
custom_action['script'] = script_id
# Custom Action parameters are stored within a list object
custom_action_params = []
# Param dict objects to house each custom action parameter
param1 = {}
param1['name'] = 'demoParam1'
# Must be either 'fixed', or 'dynamic'.
param1['parameter_type'] = 'fixed'
# Only fixed parameters will can be encrypted.
# This will encrypt the value of the parameter at storage time
param1['encrypted'] = True
param1['value'] = 'Hello World!'
param2 = {}
param2['name'] = 'demoParam2'
# The value of dynamic parameters will be replaced with the action value
# occurring in the event which triggers
# the rule containing the custom action
param2['parameter_type'] = 'dynamic'
# Dynamic parameters cannot be encrypted, if set to
# true it will be defaulted back to false
param2['encrypted'] = False
# This value will be replaced with the actual source IP of the event
# which triggered the rule containing the custom action.
# Available dynamic parameter values can be retrieved via the
# /api/ariel/databases/events?fields=columns(name) endpoint.
param2['value'] = 'sourceip'
custom_action_params.append(param1)
custom_action_params.append(param2)
# Adding custom action parameters to custom action
custom_action['parameters'] = custom_action_params
# Converting custom action object to json and
# encoding it for use with the endpoint.
custom_action = json.dumps(custom_action).encode()
action_headers = rest_client.headers.copy()
action_headers['Content-Type'] = 'application/json'
SampleUtilities.pretty_print_request(rest_client,
actions_endpoint,
'POST')
response = rest_client.call_api(actions_endpoint,
'POST',
headers=action_headers,
data=custom_action)
if (response.code != 201):
print('Failed to POST custom action to the server')
SampleUtilities.pretty_print_response(response)
sys.exit(1)
# The created custom action is returned, which will
# have it's ID within a new field.
action_response = json.loads(response.read().decode('utf-8'))
action_id = action_response['id']
print("Successfully posted custom action [returned id=" +
str(action_id) + "].")
action_name = str(action_response['name'])
action_desc = str(action_response['description'])
action_interpreter = str(action_response['interpreter'])
action_script = str(action_response['script'])
format_str = 'Custom action values:\n[name={0}'\
', description={1} '\
', interpreter={2}'\
', script={3}].'
print(format_str.format(action_name,
action_desc,
action_interpreter,
action_script))
print("Parameters: ")
for each in action_response['parameters']:
param_name = str(each['name'])
param_type = str(each['parameter_type'])
param_encrypted = str(each['encrypted'])
param_value = str(each['value'])
format_str = '[name={0}'\
', parameter_type={1}'\
', encrypted={2}'\
', value={3}].'
print(format_str.format(param_name,
param_type,
param_encrypted,
param_value))
print()
# Demonstrating the POST /actions/{id} endpoint used
# for updating custom actions
updated_action = {}
updated_action['id'] = action_id
updated_action['name'] = 'Updated Demo Custom Action'
# Interpreter & script required even
# if they remain unchanged.
updated_action['interpreter'] = 2
updated_action['script'] = script_id
# Replacing old params with a single new parameter.
updated_action['parameters'] = [{'name': 'demoParam',
'parameter_type': 'fixed',
'encrypted': False,
'value': 'new param'}]
updated_action = json.dumps(updated_action).encode()
# Appending endpoint with action id.
actions_endpoint += '/' + str(action_id)
SampleUtilities.pretty_print_request(rest_client,
actions_endpoint,
'POST')
response = rest_client.call_api(actions_endpoint,
'POST',
headers=action_headers,
data=updated_action)
if (response.code != 200):
print('Failed to POST custom action [' +
str(action_id) + "] to the server.")
SampleUtilities.pretty_print_response(response)
sys.exit(1)
updated_response = json.loads(response.read().decode('utf-8'))
print("Successfully posted updated custom action [" +
str(action_id) + "] to the server")
action_name = str(updated_response['name'])
action_desc = str(updated_response['description'])
action_interpreter = str(updated_response['interpreter'])
action_script = str(updated_response['script'])
format_str = 'Updated custom action values:\n [name={0}'\
', description={1} '\
', interpreter={2}'\
', script={3}].'
print(format_str.format(action_name,
action_desc,
action_interpreter,
action_script))
print("Parameters: ")
for each in updated_response['parameters']:
param_name = str(each['name'])
param_type = str(each['parameter_type'])
param_encrypted = str(each['encrypted'])
param_value = str(each['value'])
format_str = '[name={0}'\
', parameter_type={1}'\
', encrypted={2}'\
', value={3}].'
print(format_str.format(param_name,
param_type,
param_encrypted,
param_value))
print()
if __name__ == '__main__':
main()
|
apache-2.0
|
rickmendes/ansible-modules-core
|
cloud/docker/docker_network.py
|
19
|
13023
|
#!/usr/bin/python
#
# Copyright 2016 Red Hat | Ansible
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
module: docker_network
version_added: "2.2"
short_description: Manage Docker networks
description:
- Create/remove Docker networks and connect containers to them.
- Performs largely the same function as the "docker network" CLI subcommand.
options:
name:
description:
- Name of the network to operate on.
required: true
aliases:
- network_name
connected:
description:
- List of container names or container IDs to connect to a network.
default: null
aliases:
- containers
driver:
description:
- Specify the type of network. Docker provides bridge and overlay drivers, but 3rd party drivers can also be used.
default: bridge
driver_options:
description:
- Dictionary of network settings. Consult docker docs for valid options and values.
default: null
force:
description:
- With state I(absent) forces disconnecting all containers from the
network prior to deleting the network. With state I(present) will
disconnect all containers, delete the network and re-create the
network. This option is required if you have changed the IPAM or
driver options and want an existing network to be updated to use the
new options.
default: false
appends:
description:
- By default the connected list is canonical, meaning containers not on the list are removed from the network.
Use C(appends) to leave existing containers connected.
default: false
aliases:
- incremental
ipam_driver:
description:
- Specify an IPAM driver.
default: null
ipam_options:
description:
- Dictionary of IPAM options.
default: null
state:
description:
- I(absent) deletes the network. If a network has connected containers, it
cannot be deleted. Use the C(force) option to disconnect all containers
and delete the network.
- I(present) creates the network, if it does not already exist with the
specified parameters, and connects the list of containers provided via
the connected parameter. Containers not on the list will be disconnected.
An empty list will leave no containers connected to the network. Use the
C(appends) option to leave existing containers connected. Use the C(force)
options to force re-creation of the network.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- docker
authors:
- "Ben Keith (@keitwb)"
- "Chris Houseknecht (@chouseknecht)"
requirements:
- "python >= 2.6"
- "docker-py >= 1.7.0"
- "The docker server >= 1.9.0"
'''
EXAMPLES = '''
- name: Create a network
docker_network:
name: network_one
- name: Remove all but selected list of containers
docker_network:
name: network_one
connected:
- container_a
- container_b
- container_c
- name: Remove a single container
docker_network:
name: network_one
connected: "{{ fulllist|difference(['container_a']) }}"
- name: Add a container to a network, leaving existing containers connected
docker_network:
name: network_one
connected:
- container_a
appends: yes
- name: Create a network with options
docker_network:
name: network_two
driver_options:
com.docker.network.bridge.name: net2
ipam_options:
subnet: '172.3.26.0/16'
gateway: 172.3.26.1
iprange: '192.168.1.0/24'
- name: Delete a network, disconnecting all containers
docker_network:
name: network_one
state: absent
force: yes
'''
RETURN = '''
facts:
description: Network inspection results for the affected network.
returned: success
type: complex
sample: {}
'''
from ansible.module_utils.docker_common import *
try:
from docker import utils
from docker.utils.types import Ulimit
except:
# missing docker-py handled in ansible.module_utils.docker
pass
class TaskParameters(DockerBaseClass):
def __init__(self, client):
super(TaskParameters, self).__init__()
self.client = client
self.network_name = None
self.connected = None
self.driver = None
self.driver_options = None
self.ipam_driver = None
self.ipam_options = None
self.appends = None
self.force = None
self.debug = None
for key, value in client.module.params.items():
setattr(self, key, value)
def container_names_in_network(network):
return [c['Name'] for c in network['Containers'].values()]
class DockerNetworkManager(object):
def __init__(self, client):
self.client = client
self.parameters = TaskParameters(client)
self.check_mode = self.client.check_mode
self.results = {
u'changed': False,
u'actions': []
}
self.diff = self.client.module._diff
self.existing_network = self.get_existing_network()
if not self.parameters.connected and self.existing_network:
self.parameters.connected = container_names_in_network(self.existing_network)
state = self.parameters.state
if state == 'present':
self.present()
elif state == 'absent':
self.absent()
def get_existing_network(self):
networks = self.client.networks()
network = None
for n in networks:
if n['Name'] == self.parameters.network_name:
network = n
return network
def has_different_config(self, net):
'''
Evaluates an existing network and returns a tuple containing a boolean
indicating if the configuration is different and a list of differences.
:param net: the inspection output for an existing network
:return: (bool, list)
'''
different = False
differences = []
if self.parameters.driver and self.parameters.driver != net['Driver']:
different = True
differences.append('driver')
if self.parameters.driver_options:
if not net.get('Options'):
different = True
differences.append('driver_options')
else:
for key, value in self.parameters.driver_options.iteritems():
if not net['Options'].get(key) or value != net['Options'][key]:
different = True
differences.append('driver_options.%s' % key)
if self.parameters.ipam_driver:
if not net.get('IPAM') or net['IPAM']['Driver'] != self.parameters.ipam_driver:
different = True
differences.append('ipam_driver')
if self.parameters.ipam_options:
if not net.get('IPAM') or not net['IPAM'].get('Config'):
different = True
differences.append('ipam_options')
else:
for key, value in self.parameters.ipam_options.iteritems():
camelkey = None
for net_key in net['IPAM']['Config'][0]:
if key == net_key.lower():
camelkey = net_key
break
if not camelkey:
# key not found
different = True
differences.append('ipam_options.%s' % key)
elif net['IPAM']['Config'][0].get(camelkey) != value:
# key has different value
different = True
differences.append('ipam_options.%s' % key)
return different, differences
def create_network(self):
if not self.existing_network:
ipam_pools = []
if self.parameters.ipam_options:
ipam_pools.append(utils.create_ipam_pool(**self.parameters.ipam_options))
ipam_config = utils.create_ipam_config(driver=self.parameters.ipam_driver,
pool_configs=ipam_pools)
if not self.check_mode:
resp = self.client.create_network(self.parameters.network_name,
driver=self.parameters.driver,
options=self.parameters.driver_options,
ipam=ipam_config)
self.existing_network = self.client.inspect_network(resp['Id'])
self.results['actions'].append("Created network %s with driver %s" % (self.parameters.network_name, self.parameters.driver))
self.results['changed'] = True
def remove_network(self):
if self.existing_network:
self.disconnect_all_containers()
if not self.check_mode:
self.client.remove_network(self.parameters.network_name)
self.results['actions'].append("Removed network %s" % (self.parameters.network_name,))
self.results['changed'] = True
def is_container_connected(self, container_name):
return container_name in container_names_in_network(self.existing_network)
def connect_containers(self):
for name in self.parameters.connected:
if not self.is_container_connected(name):
if not self.check_mode:
self.client.connect_container_to_network(name, self.parameters.network_name)
self.results['actions'].append("Connected container %s" % (name,))
self.results['changed'] = True
def disconnect_missing(self):
for c in self.existing_network['Containers'].values():
name = c['Name']
if name not in self.parameters.connected:
self.disconnect_container(name)
def disconnect_all_containers(self):
containers = self.client.inspect_network(self.parameters.network_name)['Containers']
for cont in containers.values():
self.disconnect_container(cont['Name'])
def disconnect_container(self, container_name):
if not self.check_mode:
self.client.disconnect_container_from_network(container_name, self.parameters.network_name)
self.results['actions'].append("Disconnected container %s" % (container_name,))
self.results['changed'] = True
def present(self):
different = False
differences = []
if self.existing_network:
different, differences = self.has_different_config(self.existing_network)
if self.parameters.force or different:
self.remove_network()
self.existing_network = None
self.create_network()
self.connect_containers()
if not self.parameters.appends:
self.disconnect_missing()
if self.diff or self.check_mode or self.parameters.debug:
self.results['diff'] = differences
if not self.check_mode and not self.parameters.debug:
self.results.pop('actions')
self.results['ansible_facts'] = {u'ansible_docker_network': self.get_existing_network()}
def absent(self):
self.remove_network()
def main():
argument_spec = dict(
network_name = dict(type='str', required=True, aliases=['name']),
connected = dict(type='list', default=[], aliases=['containers']),
state = dict(type='str', default='present', choices=['present', 'absent']),
driver = dict(type='str', default='bridge'),
driver_options = dict(type='dict', default={}),
force = dict(type='bool', default=False),
appends = dict(type='bool', default=False, aliases=['incremental']),
ipam_driver = dict(type='str', default=None),
ipam_options = dict(type='dict', default={}),
debug = dict(type='bool', default=False)
)
client = AnsibleDockerClient(
argument_spec=argument_spec,
supports_check_mode=True
)
cm = DockerNetworkManager(client)
client.module.exit_json(**cm.results)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
stefanv/aandete
|
app/lib/pygments/lexers/apl.py
|
25
|
3167
|
# -*- coding: utf-8 -*-
"""
pygments.lexers.apl
~~~~~~~~~~~~~~~~~~~
Lexers for APL.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation
__all__ = ['APLLexer']
class APLLexer(RegexLexer):
"""
A simple APL lexer.
.. versionadded:: 2.0
"""
name = 'APL'
aliases = ['apl']
filenames = ['*.apl']
tokens = {
'root': [
# Whitespace
# ==========
(r'\s+', Text),
#
# Comment
# =======
# '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog)
(u'[⍝#].*$', Comment.Single),
#
# Strings
# =======
(r'\'((\'\')|[^\'])*\'', String.Single),
(r'"(("")|[^"])*"', String.Double), # supported by NGN APL
#
# Punctuation
# ===========
# This token type is used for diamond and parenthesis
# but not for bracket and ; (see below)
(u'[⋄◇()]', Punctuation),
#
# Array indexing
# ==============
# Since this token type is very important in APL, it is not included in
# the punctuation token type but rather in the following one
(r'[\[\];]', String.Regex),
#
# Distinguished names
# ===================
# following IBM APL2 standard
(u'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function),
#
# Labels
# ======
# following IBM APL2 standard
# (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label),
#
# Variables
# =========
# following IBM APL2 standard
(u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable),
#
# Numbers
# =======
(u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)'
u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?',
Number),
#
# Operators
# ==========
(u'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type
(u'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]',
Operator),
#
# Constant
# ========
(u'⍬', Name.Constant),
#
# Quad symbol
# ===========
(u'[⎕⍞]', Name.Variable.Global),
#
# Arrows left/right
# =================
(u'[←→]', Keyword.Declaration),
#
# D-Fn
# ====
(u'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo),
(r'[{}]', Keyword.Type),
],
}
|
bsd-3-clause
|
Syrcon/servo
|
tests/wpt/web-platform-tests/tools/pywebsocket/src/mod_pywebsocket/xhr_benchmark_handler.py
|
415
|
3944
|
# Copyright 2014 Google Inc. All rights reserved.
#
# Use of this source code is governed by a BSD-style
# license that can be found in the COPYING file or at
# https://developers.google.com/open-source/licenses/bsd
from mod_pywebsocket import util
class XHRBenchmarkHandler(object):
def __init__(self, headers, rfile, wfile):
self._logger = util.get_class_logger(self)
self.headers = headers
self.rfile = rfile
self.wfile = wfile
def do_send(self):
content_length = int(self.headers.getheader('Content-Length'))
self._logger.debug('Requested to receive %s bytes', content_length)
RECEIVE_BLOCK_SIZE = 1024 * 1024
bytes_to_receive = content_length
while bytes_to_receive > 0:
bytes_to_receive_in_this_loop = bytes_to_receive
if bytes_to_receive_in_this_loop > RECEIVE_BLOCK_SIZE:
bytes_to_receive_in_this_loop = RECEIVE_BLOCK_SIZE
received_data = self.rfile.read(bytes_to_receive_in_this_loop)
if received_data != ('a' * bytes_to_receive_in_this_loop):
self._logger.debug('Request body verification failed')
return
bytes_to_receive -= len(received_data)
if bytes_to_receive < 0:
self._logger.debug('Received %d more bytes than expected' %
(-bytes_to_receive))
return
# Return the number of received bytes back to the client.
response_body = '%d' % content_length
self.wfile.write(
'HTTP/1.1 200 OK\r\n'
'Content-Type: text/html\r\n'
'Content-Length: %d\r\n'
'\r\n%s' % (len(response_body), response_body))
self.wfile.flush()
def do_receive(self):
content_length = int(self.headers.getheader('Content-Length'))
request_body = self.rfile.read(content_length)
request_array = request_body.split(' ')
if len(request_array) < 2:
self._logger.debug('Malformed request body: %r', request_body)
return
# Parse the size parameter.
bytes_to_send = request_array[0]
try:
bytes_to_send = int(bytes_to_send)
except ValueError, e:
self._logger.debug('Malformed size parameter: %r', bytes_to_send)
return
self._logger.debug('Requested to send %s bytes', bytes_to_send)
# Parse the transfer encoding parameter.
chunked_mode = False
mode_parameter = request_array[1]
if mode_parameter == 'chunked':
self._logger.debug('Requested chunked transfer encoding')
chunked_mode = True
elif mode_parameter != 'none':
self._logger.debug('Invalid mode parameter: %r', mode_parameter)
return
# Write a header
response_header = (
'HTTP/1.1 200 OK\r\n'
'Content-Type: application/octet-stream\r\n')
if chunked_mode:
response_header += 'Transfer-Encoding: chunked\r\n\r\n'
else:
response_header += (
'Content-Length: %d\r\n\r\n' % bytes_to_send)
self.wfile.write(response_header)
self.wfile.flush()
# Write a body
SEND_BLOCK_SIZE = 1024 * 1024
while bytes_to_send > 0:
bytes_to_send_in_this_loop = bytes_to_send
if bytes_to_send_in_this_loop > SEND_BLOCK_SIZE:
bytes_to_send_in_this_loop = SEND_BLOCK_SIZE
if chunked_mode:
self.wfile.write('%x\r\n' % bytes_to_send_in_this_loop)
self.wfile.write('a' * bytes_to_send_in_this_loop)
if chunked_mode:
self.wfile.write('\r\n')
self.wfile.flush()
bytes_to_send -= bytes_to_send_in_this_loop
if chunked_mode:
self.wfile.write('0\r\n\r\n')
self.wfile.flush()
|
mpl-2.0
|
trendels/rhino
|
rhino/util.py
|
1
|
3960
|
from __future__ import absolute_import
import functools
import inspect
import sys
__all__ = [
'apply_ctx',
'sse_event',
]
def _sse_encode(k, v):
# splitlines() discards the last trailing newline. Append an unambiguous
# newline so that the presence or absence of a trailing newline in the
# input string is preserved.
v += '\r\n'
# Normalize all newlines to \n. This happens anyway during reconstruction:
# https://html.spec.whatwg.org/multipage/comms.html#event-stream-interpretation
return ''.join('%s: %s\n' % (k, line) for line in v.splitlines())
def sse_event(event=None, data=None, id=None, retry=None, comment=None,
encoding='utf-8'):
"""Encode a Server-Sent Event (SSE).
At least one field must be present. All fields are strings, except retry,
which must be an integer. The event and id fields can not contain newlines.
"""
if all(x is None for x in [event, data, id, retry, comment]):
raise TypeError("Event must have at least one field")
if event and any(c in event for c in '\r\n'):
raise ValueError("'event' can not contain newlines: '%s'" % event)
if id and any(c in id for c in '\r\n'):
raise ValueError("'id' can not contain newlines: '%s'" % id)
return ''.join([
_sse_encode('', comment) if comment is not None else '',
_sse_encode('id', id) if id is not None else '',
_sse_encode('event', event) if event is not None else '',
_sse_encode('retry', str(int(retry))) if retry is not None else '',
_sse_encode('data', data) if data is not None else '',
'\n',
]).encode(encoding)
def dual_use_decorator(fn):
"""Turn a function into a decorator that can be called with or without
arguments."""
@functools.wraps(fn)
def decorator(*args, **kw):
if len(args) == 1 and not kw and callable(args[0]):
return fn()(args[0])
else:
return fn(*args, **kw)
return decorator
def dual_use_decorator_method(fn):
"""Turn a method into a decorator that can be called with or without
arguments. """
@functools.wraps(fn)
def decorator(*args, **kw):
if len(args) == 2 and not kw and callable(args[1]):
return fn(args[0])(args[1])
else:
return fn(*args, **kw)
return decorator
def get_args(obj):
"""Get a list of argument names for a callable."""
if inspect.isfunction(obj):
return inspect.getargspec(obj).args
elif inspect.ismethod(obj):
return inspect.getargspec(obj).args[1:]
elif inspect.isclass(obj):
return inspect.getargspec(obj.__init__).args[1:]
elif hasattr(obj, '__call__'):
return inspect.getargspec(obj.__call__).args[1:]
else:
raise TypeError("Can't inspect signature of '%s' object." % obj)
def apply_ctx(fn, ctx):
"""Return fn with ctx partially applied, if requested.
If the `fn` callable accepts an argument named "ctx", returns a
functools.partial object with ctx=ctx applied, else returns `fn` unchanged.
For this to work, the 'ctx' argument must come after any arguments that are
passed as positional arguments. For example, 'ctx' must be the 2nd argument
for request handlers, serializers and deserializers, that are always called
with one positional argument (the request, object to serialize, and input
filehandle, respectively).
"""
if 'ctx' in get_args(fn):
return functools.partial(fn, ctx=ctx)
else:
return fn
def log_exception(exc_info=None, stream=None):
"""Log the 'exc_info' tuple in the server log."""
exc_info = exc_info or sys.exc_info()
stream = stream or sys.stderr
try:
from traceback import print_exception
print_exception(exc_info[0], exc_info[1], exc_info[2], None, stream)
stream.flush()
finally:
exc_info = None # Clear traceback to avoid circular reference
|
mit
|
drexly/tonginBlobStore
|
lib/django/contrib/admin/views/main.py
|
327
|
16684
|
import sys
from collections import OrderedDict
from django.contrib.admin import FieldListFilter
from django.contrib.admin.exceptions import (
DisallowedModelAdminLookup, DisallowedModelAdminToField,
)
from django.contrib.admin.options import (
IS_POPUP_VAR, TO_FIELD_VAR, IncorrectLookupParameters,
)
from django.contrib.admin.utils import (
get_fields_from_path, lookup_needs_distinct, prepare_lookup_value, quote,
)
from django.core.exceptions import (
FieldDoesNotExist, ImproperlyConfigured, SuspiciousOperation,
)
from django.core.paginator import InvalidPage
from django.core.urlresolvers import reverse
from django.db import models
from django.utils import six
from django.utils.encoding import force_text
from django.utils.http import urlencode
from django.utils.translation import ugettext
# Changelist settings
ALL_VAR = 'all'
ORDER_VAR = 'o'
ORDER_TYPE_VAR = 'ot'
PAGE_VAR = 'p'
SEARCH_VAR = 'q'
ERROR_FLAG = 'e'
IGNORED_PARAMS = (
ALL_VAR, ORDER_VAR, ORDER_TYPE_VAR, SEARCH_VAR, IS_POPUP_VAR, TO_FIELD_VAR)
class ChangeList(object):
def __init__(self, request, model, list_display, list_display_links,
list_filter, date_hierarchy, search_fields, list_select_related,
list_per_page, list_max_show_all, list_editable, model_admin):
self.model = model
self.opts = model._meta
self.lookup_opts = self.opts
self.root_queryset = model_admin.get_queryset(request)
self.list_display = list_display
self.list_display_links = list_display_links
self.list_filter = list_filter
self.date_hierarchy = date_hierarchy
self.search_fields = search_fields
self.list_select_related = list_select_related
self.list_per_page = list_per_page
self.list_max_show_all = list_max_show_all
self.model_admin = model_admin
self.preserved_filters = model_admin.get_preserved_filters(request)
# Get search parameters from the query string.
try:
self.page_num = int(request.GET.get(PAGE_VAR, 0))
except ValueError:
self.page_num = 0
self.show_all = ALL_VAR in request.GET
self.is_popup = IS_POPUP_VAR in request.GET
to_field = request.GET.get(TO_FIELD_VAR)
if to_field and not model_admin.to_field_allowed(request, to_field):
raise DisallowedModelAdminToField("The field %s cannot be referenced." % to_field)
self.to_field = to_field
self.params = dict(request.GET.items())
if PAGE_VAR in self.params:
del self.params[PAGE_VAR]
if ERROR_FLAG in self.params:
del self.params[ERROR_FLAG]
if self.is_popup:
self.list_editable = ()
else:
self.list_editable = list_editable
self.query = request.GET.get(SEARCH_VAR, '')
self.queryset = self.get_queryset(request)
self.get_results(request)
if self.is_popup:
title = ugettext('Select %s')
else:
title = ugettext('Select %s to change')
self.title = title % force_text(self.opts.verbose_name)
self.pk_attname = self.lookup_opts.pk.attname
def get_filters_params(self, params=None):
"""
Returns all params except IGNORED_PARAMS
"""
if not params:
params = self.params
lookup_params = params.copy() # a dictionary of the query string
# Remove all the parameters that are globally and systematically
# ignored.
for ignored in IGNORED_PARAMS:
if ignored in lookup_params:
del lookup_params[ignored]
return lookup_params
def get_filters(self, request):
lookup_params = self.get_filters_params()
use_distinct = False
for key, value in lookup_params.items():
if not self.model_admin.lookup_allowed(key, value):
raise DisallowedModelAdminLookup("Filtering by %s not allowed" % key)
filter_specs = []
if self.list_filter:
for list_filter in self.list_filter:
if callable(list_filter):
# This is simply a custom list filter class.
spec = list_filter(request, lookup_params,
self.model, self.model_admin)
else:
field_path = None
if isinstance(list_filter, (tuple, list)):
# This is a custom FieldListFilter class for a given field.
field, field_list_filter_class = list_filter
else:
# This is simply a field name, so use the default
# FieldListFilter class that has been registered for
# the type of the given field.
field, field_list_filter_class = list_filter, FieldListFilter.create
if not isinstance(field, models.Field):
field_path = field
field = get_fields_from_path(self.model, field_path)[-1]
spec = field_list_filter_class(field, request, lookup_params,
self.model, self.model_admin, field_path=field_path)
# Check if we need to use distinct()
use_distinct = (use_distinct or
lookup_needs_distinct(self.lookup_opts,
field_path))
if spec and spec.has_output():
filter_specs.append(spec)
# At this point, all the parameters used by the various ListFilters
# have been removed from lookup_params, which now only contains other
# parameters passed via the query string. We now loop through the
# remaining parameters both to ensure that all the parameters are valid
# fields and to determine if at least one of them needs distinct(). If
# the lookup parameters aren't real fields, then bail out.
try:
for key, value in lookup_params.items():
lookup_params[key] = prepare_lookup_value(key, value)
use_distinct = (use_distinct or
lookup_needs_distinct(self.lookup_opts, key))
return filter_specs, bool(filter_specs), lookup_params, use_distinct
except FieldDoesNotExist as e:
six.reraise(IncorrectLookupParameters, IncorrectLookupParameters(e), sys.exc_info()[2])
def get_query_string(self, new_params=None, remove=None):
if new_params is None:
new_params = {}
if remove is None:
remove = []
p = self.params.copy()
for r in remove:
for k in list(p):
if k.startswith(r):
del p[k]
for k, v in new_params.items():
if v is None:
if k in p:
del p[k]
else:
p[k] = v
return '?%s' % urlencode(sorted(p.items()))
def get_results(self, request):
paginator = self.model_admin.get_paginator(request, self.queryset, self.list_per_page)
# Get the number of objects, with admin filters applied.
result_count = paginator.count
# Get the total number of objects, with no admin filters applied.
# Perform a slight optimization:
# full_result_count is equal to paginator.count if no filters
# were applied
if self.model_admin.show_full_result_count:
if self.get_filters_params() or self.params.get(SEARCH_VAR):
full_result_count = self.root_queryset.count()
else:
full_result_count = result_count
else:
full_result_count = None
can_show_all = result_count <= self.list_max_show_all
multi_page = result_count > self.list_per_page
# Get the list of objects to display on this page.
if (self.show_all and can_show_all) or not multi_page:
result_list = self.queryset._clone()
else:
try:
result_list = paginator.page(self.page_num + 1).object_list
except InvalidPage:
raise IncorrectLookupParameters
self.result_count = result_count
self.show_full_result_count = self.model_admin.show_full_result_count
# Admin actions are shown if there is at least one entry
# or if entries are not counted because show_full_result_count is disabled
self.show_admin_actions = not self.show_full_result_count or bool(full_result_count)
self.full_result_count = full_result_count
self.result_list = result_list
self.can_show_all = can_show_all
self.multi_page = multi_page
self.paginator = paginator
def _get_default_ordering(self):
ordering = []
if self.model_admin.ordering:
ordering = self.model_admin.ordering
elif self.lookup_opts.ordering:
ordering = self.lookup_opts.ordering
return ordering
def get_ordering_field(self, field_name):
"""
Returns the proper model field name corresponding to the given
field_name to use for ordering. field_name may either be the name of a
proper model field or the name of a method (on the admin or model) or a
callable with the 'admin_order_field' attribute. Returns None if no
proper model field name can be matched.
"""
try:
field = self.lookup_opts.get_field(field_name)
return field.name
except FieldDoesNotExist:
# See whether field_name is a name of a non-field
# that allows sorting.
if callable(field_name):
attr = field_name
elif hasattr(self.model_admin, field_name):
attr = getattr(self.model_admin, field_name)
else:
attr = getattr(self.model, field_name)
return getattr(attr, 'admin_order_field', None)
def get_ordering(self, request, queryset):
"""
Returns the list of ordering fields for the change list.
First we check the get_ordering() method in model admin, then we check
the object's default ordering. Then, any manually-specified ordering
from the query string overrides anything. Finally, a deterministic
order is guaranteed by ensuring the primary key is used as the last
ordering field.
"""
params = self.params
ordering = list(self.model_admin.get_ordering(request)
or self._get_default_ordering())
if ORDER_VAR in params:
# Clear ordering and used params
ordering = []
order_params = params[ORDER_VAR].split('.')
for p in order_params:
try:
none, pfx, idx = p.rpartition('-')
field_name = self.list_display[int(idx)]
order_field = self.get_ordering_field(field_name)
if not order_field:
continue # No 'admin_order_field', skip it
# reverse order if order_field has already "-" as prefix
if order_field.startswith('-') and pfx == "-":
ordering.append(order_field[1:])
else:
ordering.append(pfx + order_field)
except (IndexError, ValueError):
continue # Invalid ordering specified, skip it.
# Add the given query's ordering fields, if any.
ordering.extend(queryset.query.order_by)
# Ensure that the primary key is systematically present in the list of
# ordering fields so we can guarantee a deterministic order across all
# database backends.
pk_name = self.lookup_opts.pk.name
if not (set(ordering) & {'pk', '-pk', pk_name, '-' + pk_name}):
# The two sets do not intersect, meaning the pk isn't present. So
# we add it.
ordering.append('-pk')
return ordering
def get_ordering_field_columns(self):
"""
Returns an OrderedDict of ordering field column numbers and asc/desc
"""
# We must cope with more than one column having the same underlying sort
# field, so we base things on column numbers.
ordering = self._get_default_ordering()
ordering_fields = OrderedDict()
if ORDER_VAR not in self.params:
# for ordering specified on ModelAdmin or model Meta, we don't know
# the right column numbers absolutely, because there might be more
# than one column associated with that ordering, so we guess.
for field in ordering:
if field.startswith('-'):
field = field[1:]
order_type = 'desc'
else:
order_type = 'asc'
for index, attr in enumerate(self.list_display):
if self.get_ordering_field(attr) == field:
ordering_fields[index] = order_type
break
else:
for p in self.params[ORDER_VAR].split('.'):
none, pfx, idx = p.rpartition('-')
try:
idx = int(idx)
except ValueError:
continue # skip it
ordering_fields[idx] = 'desc' if pfx == '-' else 'asc'
return ordering_fields
def get_queryset(self, request):
# First, we collect all the declared list filters.
(self.filter_specs, self.has_filters, remaining_lookup_params,
filters_use_distinct) = self.get_filters(request)
# Then, we let every list filter modify the queryset to its liking.
qs = self.root_queryset
for filter_spec in self.filter_specs:
new_qs = filter_spec.queryset(request, qs)
if new_qs is not None:
qs = new_qs
try:
# Finally, we apply the remaining lookup parameters from the query
# string (i.e. those that haven't already been processed by the
# filters).
qs = qs.filter(**remaining_lookup_params)
except (SuspiciousOperation, ImproperlyConfigured):
# Allow certain types of errors to be re-raised as-is so that the
# caller can treat them in a special way.
raise
except Exception as e:
# Every other error is caught with a naked except, because we don't
# have any other way of validating lookup parameters. They might be
# invalid if the keyword arguments are incorrect, or if the values
# are not in the correct type, so we might get FieldError,
# ValueError, ValidationError, or ?.
raise IncorrectLookupParameters(e)
if not qs.query.select_related:
qs = self.apply_select_related(qs)
# Set ordering.
ordering = self.get_ordering(request, qs)
qs = qs.order_by(*ordering)
# Apply search results
qs, search_use_distinct = self.model_admin.get_search_results(
request, qs, self.query)
# Remove duplicates from results, if necessary
if filters_use_distinct | search_use_distinct:
return qs.distinct()
else:
return qs
def apply_select_related(self, qs):
if self.list_select_related is True:
return qs.select_related()
if self.list_select_related is False:
if self.has_related_field_in_list_display():
return qs.select_related()
if self.list_select_related:
return qs.select_related(*self.list_select_related)
return qs
def has_related_field_in_list_display(self):
for field_name in self.list_display:
try:
field = self.lookup_opts.get_field(field_name)
except FieldDoesNotExist:
pass
else:
if isinstance(field.remote_field, models.ManyToOneRel):
return True
return False
def url_for_result(self, result):
pk = getattr(result, self.pk_attname)
return reverse('admin:%s_%s_change' % (self.opts.app_label,
self.opts.model_name),
args=(quote(pk),),
current_app=self.model_admin.admin_site.name)
|
bsd-3-clause
|
zlorb/mitmproxy
|
mitmproxy/tools/console/master.py
|
3
|
6745
|
import asyncio
import mailcap
import mimetypes
import os
import os.path
import shlex
import signal
import stat
import subprocess
import sys
import tempfile
import typing # noqa
import contextlib
import urwid
from mitmproxy import addons
from mitmproxy import master
from mitmproxy import log
from mitmproxy.addons import intercept
from mitmproxy.addons import eventstore
from mitmproxy.addons import readfile
from mitmproxy.addons import view
from mitmproxy.tools.console import consoleaddons
from mitmproxy.tools.console import defaultkeys
from mitmproxy.tools.console import keymap
from mitmproxy.tools.console import palettes
from mitmproxy.tools.console import signals
from mitmproxy.tools.console import window
class ConsoleMaster(master.Master):
def __init__(self, opts):
super().__init__(opts)
self.start_err: typing.Optional[log.LogEntry] = None
self.view: view.View = view.View()
self.events = eventstore.EventStore()
self.events.sig_add.connect(self.sig_add_log)
self.stream_path = None
self.keymap = keymap.Keymap(self)
defaultkeys.map(self.keymap)
self.options.errored.connect(self.options_error)
self.view_stack = []
signals.call_in.connect(self.sig_call_in)
self.addons.add(*addons.default_addons())
self.addons.add(
intercept.Intercept(),
self.view,
self.events,
consoleaddons.UnsupportedLog(),
readfile.ReadFile(),
consoleaddons.ConsoleAddon(self),
keymap.KeymapConfig(),
)
def sigint_handler(*args, **kwargs):
self.prompt_for_exit()
signal.signal(signal.SIGINT, sigint_handler)
self.window = None
def __setattr__(self, name, value):
super().__setattr__(name, value)
signals.update_settings.send(self)
def options_error(self, opts, exc):
signals.status_message.send(
message=str(exc),
expire=1
)
def prompt_for_exit(self):
signals.status_prompt_onekey.send(
self,
prompt = "Quit",
keys = (
("yes", "y"),
("no", "n"),
),
callback = self.quit,
)
def sig_add_log(self, event_store, entry: log.LogEntry):
if log.log_tier(self.options.console_eventlog_verbosity) < log.log_tier(entry.level):
return
if entry.level in ("error", "warn", "alert"):
signals.status_message.send(
message = (
entry.level,
"{}: {}".format(entry.level.title(), str(entry.msg).lstrip())
),
expire=5
)
def sig_call_in(self, sender, seconds, callback, args=()):
def cb(*_):
return callback(*args)
self.loop.set_alarm_in(seconds, cb)
@contextlib.contextmanager
def uistopped(self):
self.loop.stop()
try:
yield
finally:
self.loop.start()
self.loop.screen_size = None
self.loop.draw_screen()
def spawn_editor(self, data):
text = not isinstance(data, bytes)
fd, name = tempfile.mkstemp('', "mproxy", text=text)
with open(fd, "w" if text else "wb") as f:
f.write(data)
# if no EDITOR is set, assume 'vi'
c = os.environ.get("EDITOR") or "vi"
cmd = shlex.split(c)
cmd.append(name)
with self.uistopped():
try:
subprocess.call(cmd)
except:
signals.status_message.send(
message="Can't start editor: %s" % " ".join(c)
)
else:
with open(name, "r" if text else "rb") as f:
data = f.read()
os.unlink(name)
return data
def spawn_external_viewer(self, data, contenttype):
if contenttype:
contenttype = contenttype.split(";")[0]
ext = mimetypes.guess_extension(contenttype) or ""
else:
ext = ""
fd, name = tempfile.mkstemp(ext, "mproxy")
os.write(fd, data)
os.close(fd)
# read-only to remind the user that this is a view function
os.chmod(name, stat.S_IREAD)
cmd = None
shell = False
if contenttype:
c = mailcap.getcaps()
cmd, _ = mailcap.findmatch(c, contenttype, filename=name)
if cmd:
shell = True
if not cmd:
# hm which one should get priority?
c = os.environ.get("PAGER") or os.environ.get("EDITOR")
if not c:
c = "less"
cmd = shlex.split(c)
cmd.append(name)
with self.uistopped():
try:
subprocess.call(cmd, shell=shell)
except:
signals.status_message.send(
message="Can't start external viewer: %s" % " ".join(c)
)
os.unlink(name)
def set_palette(self, opts, updated):
self.ui.register_palette(
palettes.palettes[opts.console_palette].palette(
opts.console_palette_transparent
)
)
self.ui.clear()
def inject_key(self, key):
self.loop.process_input([key])
def run(self):
if not sys.stdout.isatty():
print("Error: mitmproxy's console interface requires a tty. "
"Please run mitmproxy in an interactive shell environment.", file=sys.stderr)
sys.exit(1)
self.ui = window.Screen()
self.ui.set_terminal_properties(256)
self.set_palette(self.options, None)
self.options.subscribe(
self.set_palette,
["console_palette", "console_palette_transparent"]
)
self.loop = urwid.MainLoop(
urwid.SolidFill("x"),
event_loop=urwid.AsyncioEventLoop(loop=asyncio.get_event_loop()),
screen = self.ui,
handle_mouse = self.options.console_mouse,
)
self.window = window.Window(self)
self.loop.widget = self.window
self.window.refresh()
if self.start_err:
def display_err(*_):
self.sig_add_log(None, self.start_err)
self.start_err = None
self.loop.set_alarm_in(0.01, display_err)
super().run_loop(self.loop.run)
def overlay(self, widget, **kwargs):
self.window.set_overlay(widget, **kwargs)
def switch_view(self, name):
self.window.push(name)
def quit(self, a):
if a != "n":
self.shutdown()
|
mit
|
tomriddle1234/dgsCalc
|
dgsCheckFileExists.py
|
1
|
2776
|
# encoding=utf8
#input is a csv file path, and a search target folder
import fixutf8
import sys,os
import logging
import fnmatch
import argparse
import collections
from dgsUtil import *
parser = argparse.ArgumentParser(description='This program is to check file existence for DGS. Also generating a log file with the name of the output, end with .log ')
parser.add_argument('-i','--input', help='CSV file path contains list of code of works.', required=True)
parser.add_argument('-t','--targetpath', help='Target path contains original files need to be searched.', required=True )
parser.add_argument('-o','--output', help='CSV file with a list of existing files abs paths', required=True)
args = vars(parser.parse_args())
inputCSVPath = args['input']
outputFileListPath = args['output']
targetPath = args['targetpath']
print "输入编号列表:%s" % gbk2utf8(inputCSVPath)
print "输出文件绝对路径列表:%s" % gbk2utf8(outputFileListPath)
print "检测目标文件夹:%s" % gbk2utf8(targetPath)
print "##########################"
if not inputCSVPath or not outputFileListPath or not targetPath:
print "Input argument Error."
logFilePath = os.path.splitext(outputFileListPath)[0] + '.log'
print "记录文件: %s" % gbk2utf8(logFilePath)
missingFilePath = os.path.splitext(outputFileListPath)[0]+'_missing.csv'
print "丢失文件列表: %s" % gbk2utf8(missingFilePath)
logging.basicConfig(filename=logFilePath, level=logging.DEBUG,format='%(asctime)s %(message)s')
logging.info('开始检查文件。')
outputFileList = []
missingFileList = []
loadcsv(inputCSVPath)
for ele in csvtable:
matches = []
print "正在查找 %s" % ele[0].strip()
for root,dirname,filenames in os.walk(targetPath):
for filename in fnmatch.filter(filenames,ele[0].strip()+'*.*'):
matches.append(os.path.join(root,filename))
if not matches:
#Output to missing list
missingFileList.append(ele[0].strip())
outstr = "文件不存在:编号 %s 未找到任何文件。" % ele[0].strip()
print outstr
logging.warning(outstr)
else:
print "OK"
outputFileList += matches
#Check duplicated filenames in the output
basenameList = []
for item in matches:
basenameList.append(os.path.basename(item))
dupList = [item for item, count in collections.Counter(basenameList).items() if count > 1]
for item in dupList:
for ele in matches:
if os.path.basename(ele) == item:
outstr = ">>>文件 %s 存在重复<<<" % ele
print outstr
logging.warning(outstr)
#write out csv file
writecsv(outputFileList, outputFileListPath)
#write out missingFileList
writecsv(missingFileList, missingFilePath)
|
gpl-3.0
|
pektin/jam
|
compiler/llvm/builtins.py
|
1
|
7773
|
import logging
from functools import partial
from .state import State
from .util import *
from .. import lekvar
from . import bindings as llvm
def builtins(logger = logging.getLogger()):
global printf
printf = None
string = LLVMType("String")
size = LLVMType("Int64")
ints = [
LLVMType("Int8"),
LLVMType("Int16"),
LLVMType("Int32"),
LLVMType("Int64"),
LLVMType("Int128"),
]
floats = [
LLVMType("Float16"),
LLVMType("Float32"),
LLVMType("Float64"),
]
bool = LLVMType("Bool")
void = lekvar.VoidType("Void")
builtin_objects = [string, bool, void] + ints + floats
# (the types the method applies to, the name, the instruction, additional arguments)
methods = [
(ints, "intAdd", llvm.Builder.iAdd, []),
(ints, "intSub", llvm.Builder.iSub, []),
(ints, "intMul", llvm.Builder.iMul, []),
(ints, "intDiv", llvm.Builder.siDiv, []),
(ints, "intRem", llvm.Builder.siRem, []),
(ints, "intEqual", llvm.Builder.iCmp, [llvm.IntPredicate.equal]),
(ints, "intUnequal", llvm.Builder.iCmp, [llvm.IntPredicate.unequal]),
(ints, "intGreaterThan", llvm.Builder.iCmp, [llvm.IntPredicate.signed_greater_than]),
(ints, "intGreaterOrEqualTo", llvm.Builder.iCmp, [llvm.IntPredicate.signed_greater_or_equal_to]),
(ints, "intSmallerThan", llvm.Builder.iCmp, [llvm.IntPredicate.signed_less_than]),
(ints, "intSmallerOrEqualTo", llvm.Builder.iCmp, [llvm.IntPredicate.signed_less_or_equal_to]),
(floats, "floatAdd", llvm.Builder.fAdd, []),
(floats, "floatSub", llvm.Builder.fSub, []),
(floats, "floatMul", llvm.Builder.fMul, []),
(floats, "floatDiv", llvm.Builder.fDiv, []),
(floats, "floatRem", llvm.Builder.fRem, []),
(floats, "floatGreaterThan", llvm.Builder.fCmp, [llvm.RealPredicate.ordered_greater_than]),
(floats, "floatGreaterOrEqualTo", llvm.Builder.fCmp, [llvm.RealPredicate.ordered_greater_or_equal_to]),
(floats, "floatSmallerThan", llvm.Builder.fCmp, [llvm.RealPredicate.ordered_less_than]),
(floats, "floatSmallerOrEqualTo", llvm.Builder.fCmp, [llvm.RealPredicate.ordered_less_or_equal_to]),
]
for types, name, instruction, arguments in methods:
functions = []
for type in types:
return_type = bool if arguments else type
functions.append(
LLVMFunction("", [type, type], return_type,
partial(llvmInstructionWrapper, instruction,
args_before=arguments)
)
)
builtin_objects.append(
lekvar.Method(name, functions)
)
# int -> float conversions
for int_t in ints:
for float_t in floats:
name = int_t.name + "To" + float_t.name
wrap = partial(llvmInstructionWrapper, llvm.Builder.iToF, args_after=[float_t.emitType()])
function = LLVMFunction(name, [int_t], float_t, wrap)
builtin_objects.append(function)
# float -> int conversions
for float_t in floats:
for int_t in ints:
name = float_t.name + "To" + int_t.name
wrap = partial(llvmInstructionWrapper, llvm.Builder.fToI, args_after=[int_t.emitType()])
function = LLVMFunction(name, [float_t], int_t, wrap)
builtin_objects.append(function)
builtin_objects.append(
lekvar.Method("print",
[LLVMFunction("", [type], None, partial(llvmPrintfWrapper, type))
for type in (ints + floats + [string])],
),
)
builtin_objects.append(lekvar.ExternalFunction("alloc", "calloc", [size], void))
builtin_objects.append(lekvar.ExternalFunction("free", "free", [void], None))
builtin_objects.append(lekvar.ExternalFunction("realloc", "realloc", [void, size], void))
builtin_objects.append(LLVMFunction("ptrOffset", [void, size], void, llvmOffsetWrapper))
module = lekvar.Module("_builtins", builtin_objects)
module.verify()
return module
def llvmInstructionWrapper(instruction, self, args_before = [], args_after = []):
entry = self.llvm_value.appendBlock("")
with State.blockScope(entry):
args = [self.llvm_value.getParam(i) for i in range(len(self.type.arguments))]
arguments = [State.builder] + args_before + args + args_after + [""]
return_value = instruction(*arguments)
State.builder.ret(return_value)
def llvmOffsetWrapper(self):
entry = self.llvm_value.appendBlock("")
with State.blockScope(entry):
ptr = self.llvm_value.getParam(0)
offset = self.llvm_value.getParam(1)
result = State.builder.inBoundsGEP(ptr, [offset], "")
State.builder.ret(result)
PRINTF_MAP = {
"String": "s",
"Int8": "hhd",
"Int16": "hd",
"Int32": "d",
"Int64": "ld",
"Int128": "lld",
"Float16": "hg",
"Float32": "g",
"Float64": "lg",
}
def llvmPrintfWrapper(type, self):
global printf
if printf is None:
func_type = llvm.Function.new(LLVMType("Int32").emitType(), [LLVMType("String").emitType()], True)
printf = State.module.addFunction("printf", func_type)
entry = self.llvm_value.appendBlock("")
with State.blockScope(entry):
fmt_str_data = "%{}".format(PRINTF_MAP[type.name])
fmt_string = State.builder.globalString(fmt_str_data, "")
value = self.llvm_value.getParam(0)
State.builder.call(printf, [fmt_string, value], "")
State.builder.retVoid()
#
# Temporary
#
class LLVMType(lekvar.Type, lekvar.BoundObject):
def __init__(self, name:str):
lekvar.BoundObject.__init__(self, name)
def verify(self):
self._stats = lekvar.stats.ScopeStats(self.parent)
self.stats.static = True
self.stats.forward = False
def resolveType(self):
raise InternalError("LLVMTypes are typeless")
@property
def local_context(self):
raise InternalError("LLVMTypes do not have a local context")
def checkCompatibility(self, other:lekvar.Type, check_cache = None):
other = other.resolveValue()
if isinstance(other, LLVMType):
if self.name == other.name:
return True
return False
def __repr__(self):
return "{}<{}>".format(self.__class__.__name__, self.name)
# Emission
LLVM_MAP = None
def resetLocalEmission(self):
return None
def emit(self):
pass
def emitType(self):
if LLVMType.LLVM_MAP is None:
LLVMType.LLVM_MAP = {
"String": llvm.Pointer.new(llvm.Int.new(8), 0),
"Bool": llvm.Int.new(1),
"Int8": llvm.Int.new(8),
"Int16": llvm.Int.new(16),
"Int32": llvm.Int.new(32),
"Int64": llvm.Int.new(64),
"Int128": llvm.Int.new(128),
"Float16": llvm.Float.half(),
"Float32": llvm.Float.float(),
"Float64": llvm.Float.double(),
}
return LLVMType.LLVM_MAP[self.name]
class LLVMFunction(lekvar.ExternalFunction):
generator = None
def __init__(self, name:str, arguments:[lekvar.Type], return_type:lekvar.Type, generator):
lekvar.ExternalFunction.__init__(self, name, name, arguments, return_type)
self.generator = generator
@property
def local_context(self):
raise InternalError("LLVMFunctions do not have a local context")
# Emission
def resetLocalEmission(self):
return None
def emit(self):
if self.llvm_value is None:
lekvar.ExternalFunction.emit(self)
self.generator(self)
|
mit
|
Obus/scikit-learn
|
examples/semi_supervised/plot_label_propagation_structure.py
|
247
|
2432
|
"""
==============================================
Label Propagation learning a complex structure
==============================================
Example of LabelPropagation learning a complex internal structure
to demonstrate "manifold learning". The outer circle should be
labeled "red" and the inner circle "blue". Because both label groups
lie inside their own distinct shape, we can see that the labels
propagate correctly around the circle.
"""
print(__doc__)
# Authors: Clay Woolam <[email protected]>
# Andreas Mueller <[email protected]>
# Licence: BSD
import numpy as np
import matplotlib.pyplot as plt
from sklearn.semi_supervised import label_propagation
from sklearn.datasets import make_circles
# generate ring with inner box
n_samples = 200
X, y = make_circles(n_samples=n_samples, shuffle=False)
outer, inner = 0, 1
labels = -np.ones(n_samples)
labels[0] = outer
labels[-1] = inner
###############################################################################
# Learn with LabelSpreading
label_spread = label_propagation.LabelSpreading(kernel='knn', alpha=1.0)
label_spread.fit(X, labels)
###############################################################################
# Plot output labels
output_labels = label_spread.transduction_
plt.figure(figsize=(8.5, 4))
plt.subplot(1, 2, 1)
plot_outer_labeled, = plt.plot(X[labels == outer, 0],
X[labels == outer, 1], 'rs')
plot_unlabeled, = plt.plot(X[labels == -1, 0], X[labels == -1, 1], 'g.')
plot_inner_labeled, = plt.plot(X[labels == inner, 0],
X[labels == inner, 1], 'bs')
plt.legend((plot_outer_labeled, plot_inner_labeled, plot_unlabeled),
('Outer Labeled', 'Inner Labeled', 'Unlabeled'), 'upper left',
numpoints=1, shadow=False)
plt.title("Raw data (2 classes=red and blue)")
plt.subplot(1, 2, 2)
output_label_array = np.asarray(output_labels)
outer_numbers = np.where(output_label_array == outer)[0]
inner_numbers = np.where(output_label_array == inner)[0]
plot_outer, = plt.plot(X[outer_numbers, 0], X[outer_numbers, 1], 'rs')
plot_inner, = plt.plot(X[inner_numbers, 0], X[inner_numbers, 1], 'bs')
plt.legend((plot_outer, plot_inner), ('Outer Learned', 'Inner Learned'),
'upper left', numpoints=1, shadow=False)
plt.title("Labels learned with Label Spreading (KNN)")
plt.subplots_adjust(left=0.07, bottom=0.07, right=0.93, top=0.92)
plt.show()
|
bsd-3-clause
|
RealTimeWeb/wikisite
|
MoinMoin/script/migration/data.py
|
1
|
2861
|
# -*- coding: iso-8859-1 -*-
"""
MoinMoin - data_dir migration main script (new style)
You can use this script to migrate your wiki's data_dir to the format
expected by the current MoinMoin code. It will read data/meta to determine
what needs to be done and call other migration scripts as needed.
You must run this script as owner of the wiki files, usually this is the
web server user (like www-data).
Important: you must have run all 12_to_13* and the final 152_to_1050300
mig scripts ONCE and in correct order manually before attempting
to use the new style migration stuff.
@copyright: 2006 MoinMoin:ThomasWaldmann
@license: GNU GPL, see COPYING for details.
"""
import os
from MoinMoin import wikiutil
from MoinMoin.script import MoinScript
class PluginScript(MoinScript):
"""\
Purpose:
========
This tool allow you to migrate data of pages to a newer version
Detailed Instructions:
======================
General syntax: moin [options] migration data [migration-data-options]
[options] usually should be:
--config-dir=/path/to/my/cfg/ --wiki-url=http://wiki.example.org/
[migration-data-options] see below:
Please note:
* You must run this script as the owner of the wiki files.
* The file docs/UPDATE.html contains the general instructions
for upgrading a wiki.
"""
def __init__(self, argv, def_values):
MoinScript.__init__(self, argv, def_values)
self.parser.add_option(
"--all", action="store_true", dest="all_wikis",
help="when given, update all wikis that belong to this farm"
)
def mainloop(self):
self.init_request()
request = self.request
data_dir = request.cfg.data_dir
meta_fname = os.path.join(data_dir, 'meta')
while True:
try:
meta = wikiutil.MetaDict(meta_fname, request.cfg.cache_dir)
try:
curr_rev = meta['data_format_revision']
mig_name = str(curr_rev)
execute = wikiutil.importBuiltinPlugin('script.migration', mig_name)
print "Calling migration script for %s, base revision %d" % (data_dir, curr_rev)
curr_rev = execute(self, data_dir, curr_rev)
if curr_rev is None:
print "Final mig script reached, migration is complete."
break
else:
print "Returned. New rev is %d." % curr_rev
meta['data_format_revision'] = curr_rev
meta.sync()
except wikiutil.PluginMissingError:
print "Error: There is no script for %s." % mig_name
break
finally:
del meta
|
apache-2.0
|
CMDann/oppia
|
extensions/interactions/Continue/Continue.py
|
9
|
1168
|
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from extensions.interactions import base
class Continue(base.BaseInteraction):
"""Interaction that takes the form of a simple 'Continue' button."""
name = 'Continue Button'
description = 'A simple \'go to next state\' button.'
display_mode = base.DISPLAY_MODE_INLINE
_dependency_ids = []
answer_type = 'Null'
_customization_arg_specs = [{
'name': 'buttonText',
'description': 'Button label',
'schema': {
'type': 'unicode',
},
'default_value': 'Continue',
}]
|
apache-2.0
|
coolbombom/CouchPotatoServer
|
libs/enzyme/language.py
|
180
|
15146
|
# -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <[email protected]>
# Copyright 2003-2006 Dirk Meyer <[email protected]>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
import re
__all__ = ['resolve']
def resolve(code):
"""
Transform the given (2- or 3-letter) language code to a human readable
language name. The return value is a 2-tuple containing the given
language code and the language name. If the language code cannot be
resolved, name will be 'Unknown (<code>)'.
"""
if not code:
return None, None
if not isinstance(code, basestring):
raise ValueError('Invalid language code specified by parser')
# Take up to 3 letters from the code.
code = re.split(r'[^a-z]', code.lower())[0][:3]
for spec in codes:
if code in spec[:-1]:
return code, spec[-1]
return code, u'Unknown (%r)' % code
# Parsed from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
codes = (
('aar', 'aa', u'Afar'),
('abk', 'ab', u'Abkhazian'),
('ace', u'Achinese'),
('ach', u'Acoli'),
('ada', u'Adangme'),
('ady', u'Adyghe'),
('afa', u'Afro-Asiatic '),
('afh', u'Afrihili'),
('afr', 'af', u'Afrikaans'),
('ain', u'Ainu'),
('aka', 'ak', u'Akan'),
('akk', u'Akkadian'),
('alb', 'sq', u'Albanian'),
('ale', u'Aleut'),
('alg', u'Algonquian languages'),
('alt', u'Southern Altai'),
('amh', 'am', u'Amharic'),
('ang', u'English, Old '),
('anp', u'Angika'),
('apa', u'Apache languages'),
('ara', 'ar', u'Arabic'),
('arc', u'Official Aramaic '),
('arg', 'an', u'Aragonese'),
('arm', 'hy', u'Armenian'),
('arn', u'Mapudungun'),
('arp', u'Arapaho'),
('art', u'Artificial '),
('arw', u'Arawak'),
('asm', 'as', u'Assamese'),
('ast', u'Asturian'),
('ath', u'Athapascan languages'),
('aus', u'Australian languages'),
('ava', 'av', u'Avaric'),
('ave', 'ae', u'Avestan'),
('awa', u'Awadhi'),
('aym', 'ay', u'Aymara'),
('aze', 'az', u'Azerbaijani'),
('bad', u'Banda languages'),
('bai', u'Bamileke languages'),
('bak', 'ba', u'Bashkir'),
('bal', u'Baluchi'),
('bam', 'bm', u'Bambara'),
('ban', u'Balinese'),
('baq', 'eu', u'Basque'),
('bas', u'Basa'),
('bat', u'Baltic '),
('bej', u'Beja'),
('bel', 'be', u'Belarusian'),
('bem', u'Bemba'),
('ben', 'bn', u'Bengali'),
('ber', u'Berber '),
('bho', u'Bhojpuri'),
('bih', 'bh', u'Bihari'),
('bik', u'Bikol'),
('bin', u'Bini'),
('bis', 'bi', u'Bislama'),
('bla', u'Siksika'),
('bnt', u'Bantu '),
('bos', 'bs', u'Bosnian'),
('bra', u'Braj'),
('bre', 'br', u'Breton'),
('btk', u'Batak languages'),
('bua', u'Buriat'),
('bug', u'Buginese'),
('bul', 'bg', u'Bulgarian'),
('bur', 'my', u'Burmese'),
('byn', u'Blin'),
('cad', u'Caddo'),
('cai', u'Central American Indian '),
('car', u'Galibi Carib'),
('cat', 'ca', u'Catalan'),
('cau', u'Caucasian '),
('ceb', u'Cebuano'),
('cel', u'Celtic '),
('cha', 'ch', u'Chamorro'),
('chb', u'Chibcha'),
('che', 'ce', u'Chechen'),
('chg', u'Chagatai'),
('chi', 'zh', u'Chinese'),
('chk', u'Chuukese'),
('chm', u'Mari'),
('chn', u'Chinook jargon'),
('cho', u'Choctaw'),
('chp', u'Chipewyan'),
('chr', u'Cherokee'),
('chu', 'cu', u'Church Slavic'),
('chv', 'cv', u'Chuvash'),
('chy', u'Cheyenne'),
('cmc', u'Chamic languages'),
('cop', u'Coptic'),
('cor', 'kw', u'Cornish'),
('cos', 'co', u'Corsican'),
('cpe', u'Creoles and pidgins, English based '),
('cpf', u'Creoles and pidgins, French-based '),
('cpp', u'Creoles and pidgins, Portuguese-based '),
('cre', 'cr', u'Cree'),
('crh', u'Crimean Tatar'),
('crp', u'Creoles and pidgins '),
('csb', u'Kashubian'),
('cus', u'Cushitic '),
('cze', 'cs', u'Czech'),
('dak', u'Dakota'),
('dan', 'da', u'Danish'),
('dar', u'Dargwa'),
('day', u'Land Dayak languages'),
('del', u'Delaware'),
('den', u'Slave '),
('dgr', u'Dogrib'),
('din', u'Dinka'),
('div', 'dv', u'Divehi'),
('doi', u'Dogri'),
('dra', u'Dravidian '),
('dsb', u'Lower Sorbian'),
('dua', u'Duala'),
('dum', u'Dutch, Middle '),
('dut', 'nl', u'Dutch'),
('dyu', u'Dyula'),
('dzo', 'dz', u'Dzongkha'),
('efi', u'Efik'),
('egy', u'Egyptian '),
('eka', u'Ekajuk'),
('elx', u'Elamite'),
('eng', 'en', u'English'),
('enm', u'English, Middle '),
('epo', 'eo', u'Esperanto'),
('est', 'et', u'Estonian'),
('ewe', 'ee', u'Ewe'),
('ewo', u'Ewondo'),
('fan', u'Fang'),
('fao', 'fo', u'Faroese'),
('fat', u'Fanti'),
('fij', 'fj', u'Fijian'),
('fil', u'Filipino'),
('fin', 'fi', u'Finnish'),
('fiu', u'Finno-Ugrian '),
('fon', u'Fon'),
('fre', 'fr', u'French'),
('frm', u'French, Middle '),
('fro', u'French, Old '),
('frr', u'Northern Frisian'),
('frs', u'Eastern Frisian'),
('fry', 'fy', u'Western Frisian'),
('ful', 'ff', u'Fulah'),
('fur', u'Friulian'),
('gaa', u'Ga'),
('gay', u'Gayo'),
('gba', u'Gbaya'),
('gem', u'Germanic '),
('geo', 'ka', u'Georgian'),
('ger', 'de', u'German'),
('gez', u'Geez'),
('gil', u'Gilbertese'),
('gla', 'gd', u'Gaelic'),
('gle', 'ga', u'Irish'),
('glg', 'gl', u'Galician'),
('glv', 'gv', u'Manx'),
('gmh', u'German, Middle High '),
('goh', u'German, Old High '),
('gon', u'Gondi'),
('gor', u'Gorontalo'),
('got', u'Gothic'),
('grb', u'Grebo'),
('grc', u'Greek, Ancient '),
('gre', 'el', u'Greek, Modern '),
('grn', 'gn', u'Guarani'),
('gsw', u'Swiss German'),
('guj', 'gu', u'Gujarati'),
('gwi', u"Gwich'in"),
('hai', u'Haida'),
('hat', 'ht', u'Haitian'),
('hau', 'ha', u'Hausa'),
('haw', u'Hawaiian'),
('heb', 'he', u'Hebrew'),
('her', 'hz', u'Herero'),
('hil', u'Hiligaynon'),
('him', u'Himachali'),
('hin', 'hi', u'Hindi'),
('hit', u'Hittite'),
('hmn', u'Hmong'),
('hmo', 'ho', u'Hiri Motu'),
('hsb', u'Upper Sorbian'),
('hun', 'hu', u'Hungarian'),
('hup', u'Hupa'),
('iba', u'Iban'),
('ibo', 'ig', u'Igbo'),
('ice', 'is', u'Icelandic'),
('ido', 'io', u'Ido'),
('iii', 'ii', u'Sichuan Yi'),
('ijo', u'Ijo languages'),
('iku', 'iu', u'Inuktitut'),
('ile', 'ie', u'Interlingue'),
('ilo', u'Iloko'),
('ina', 'ia', u'Interlingua '),
('inc', u'Indic '),
('ind', 'id', u'Indonesian'),
('ine', u'Indo-European '),
('inh', u'Ingush'),
('ipk', 'ik', u'Inupiaq'),
('ira', u'Iranian '),
('iro', u'Iroquoian languages'),
('ita', 'it', u'Italian'),
('jav', 'jv', u'Javanese'),
('jbo', u'Lojban'),
('jpn', 'ja', u'Japanese'),
('jpr', u'Judeo-Persian'),
('jrb', u'Judeo-Arabic'),
('kaa', u'Kara-Kalpak'),
('kab', u'Kabyle'),
('kac', u'Kachin'),
('kal', 'kl', u'Kalaallisut'),
('kam', u'Kamba'),
('kan', 'kn', u'Kannada'),
('kar', u'Karen languages'),
('kas', 'ks', u'Kashmiri'),
('kau', 'kr', u'Kanuri'),
('kaw', u'Kawi'),
('kaz', 'kk', u'Kazakh'),
('kbd', u'Kabardian'),
('kha', u'Khasi'),
('khi', u'Khoisan '),
('khm', 'km', u'Central Khmer'),
('kho', u'Khotanese'),
('kik', 'ki', u'Kikuyu'),
('kin', 'rw', u'Kinyarwanda'),
('kir', 'ky', u'Kirghiz'),
('kmb', u'Kimbundu'),
('kok', u'Konkani'),
('kom', 'kv', u'Komi'),
('kon', 'kg', u'Kongo'),
('kor', 'ko', u'Korean'),
('kos', u'Kosraean'),
('kpe', u'Kpelle'),
('krc', u'Karachay-Balkar'),
('krl', u'Karelian'),
('kro', u'Kru languages'),
('kru', u'Kurukh'),
('kua', 'kj', u'Kuanyama'),
('kum', u'Kumyk'),
('kur', 'ku', u'Kurdish'),
('kut', u'Kutenai'),
('lad', u'Ladino'),
('lah', u'Lahnda'),
('lam', u'Lamba'),
('lao', 'lo', u'Lao'),
('lat', 'la', u'Latin'),
('lav', 'lv', u'Latvian'),
('lez', u'Lezghian'),
('lim', 'li', u'Limburgan'),
('lin', 'ln', u'Lingala'),
('lit', 'lt', u'Lithuanian'),
('lol', u'Mongo'),
('loz', u'Lozi'),
('ltz', 'lb', u'Luxembourgish'),
('lua', u'Luba-Lulua'),
('lub', 'lu', u'Luba-Katanga'),
('lug', 'lg', u'Ganda'),
('lui', u'Luiseno'),
('lun', u'Lunda'),
('luo', u'Luo '),
('lus', u'Lushai'),
('mac', 'mk', u'Macedonian'),
('mad', u'Madurese'),
('mag', u'Magahi'),
('mah', 'mh', u'Marshallese'),
('mai', u'Maithili'),
('mak', u'Makasar'),
('mal', 'ml', u'Malayalam'),
('man', u'Mandingo'),
('mao', 'mi', u'Maori'),
('map', u'Austronesian '),
('mar', 'mr', u'Marathi'),
('mas', u'Masai'),
('may', 'ms', u'Malay'),
('mdf', u'Moksha'),
('mdr', u'Mandar'),
('men', u'Mende'),
('mga', u'Irish, Middle '),
('mic', u"Mi'kmaq"),
('min', u'Minangkabau'),
('mis', u'Uncoded languages'),
('mkh', u'Mon-Khmer '),
('mlg', 'mg', u'Malagasy'),
('mlt', 'mt', u'Maltese'),
('mnc', u'Manchu'),
('mni', u'Manipuri'),
('mno', u'Manobo languages'),
('moh', u'Mohawk'),
('mol', 'mo', u'Moldavian'),
('mon', 'mn', u'Mongolian'),
('mos', u'Mossi'),
('mul', u'Multiple languages'),
('mun', u'Munda languages'),
('mus', u'Creek'),
('mwl', u'Mirandese'),
('mwr', u'Marwari'),
('myn', u'Mayan languages'),
('myv', u'Erzya'),
('nah', u'Nahuatl languages'),
('nai', u'North American Indian'),
('nap', u'Neapolitan'),
('nau', 'na', u'Nauru'),
('nav', 'nv', u'Navajo'),
('nbl', 'nr', u'Ndebele, South'),
('nde', 'nd', u'Ndebele, North'),
('ndo', 'ng', u'Ndonga'),
('nds', u'Low German'),
('nep', 'ne', u'Nepali'),
('new', u'Nepal Bhasa'),
('nia', u'Nias'),
('nic', u'Niger-Kordofanian '),
('niu', u'Niuean'),
('nno', 'nn', u'Norwegian Nynorsk'),
('nob', 'nb', u'Bokm\xe5l, Norwegian'),
('nog', u'Nogai'),
('non', u'Norse, Old'),
('nor', 'no', u'Norwegian'),
('nqo', u"N'Ko"),
('nso', u'Pedi'),
('nub', u'Nubian languages'),
('nwc', u'Classical Newari'),
('nya', 'ny', u'Chichewa'),
('nym', u'Nyamwezi'),
('nyn', u'Nyankole'),
('nyo', u'Nyoro'),
('nzi', u'Nzima'),
('oci', 'oc', u'Occitan '),
('oji', 'oj', u'Ojibwa'),
('ori', 'or', u'Oriya'),
('orm', 'om', u'Oromo'),
('osa', u'Osage'),
('oss', 'os', u'Ossetian'),
('ota', u'Turkish, Ottoman '),
('oto', u'Otomian languages'),
('paa', u'Papuan '),
('pag', u'Pangasinan'),
('pal', u'Pahlavi'),
('pam', u'Pampanga'),
('pan', 'pa', u'Panjabi'),
('pap', u'Papiamento'),
('pau', u'Palauan'),
('peo', u'Persian, Old '),
('per', 'fa', u'Persian'),
('phi', u'Philippine '),
('phn', u'Phoenician'),
('pli', 'pi', u'Pali'),
('pol', 'pl', u'Polish'),
('pon', u'Pohnpeian'),
('por', 'pt', u'Portuguese'),
('pra', u'Prakrit languages'),
('pro', u'Proven\xe7al, Old '),
('pus', 'ps', u'Pushto'),
('qaa-qtz', u'Reserved for local use'),
('que', 'qu', u'Quechua'),
('raj', u'Rajasthani'),
('rap', u'Rapanui'),
('rar', u'Rarotongan'),
('roa', u'Romance '),
('roh', 'rm', u'Romansh'),
('rom', u'Romany'),
('rum', 'ro', u'Romanian'),
('run', 'rn', u'Rundi'),
('rup', u'Aromanian'),
('rus', 'ru', u'Russian'),
('sad', u'Sandawe'),
('sag', 'sg', u'Sango'),
('sah', u'Yakut'),
('sai', u'South American Indian '),
('sal', u'Salishan languages'),
('sam', u'Samaritan Aramaic'),
('san', 'sa', u'Sanskrit'),
('sas', u'Sasak'),
('sat', u'Santali'),
('scc', 'sr', u'Serbian'),
('scn', u'Sicilian'),
('sco', u'Scots'),
('scr', 'hr', u'Croatian'),
('sel', u'Selkup'),
('sem', u'Semitic '),
('sga', u'Irish, Old '),
('sgn', u'Sign Languages'),
('shn', u'Shan'),
('sid', u'Sidamo'),
('sin', 'si', u'Sinhala'),
('sio', u'Siouan languages'),
('sit', u'Sino-Tibetan '),
('sla', u'Slavic '),
('slo', 'sk', u'Slovak'),
('slv', 'sl', u'Slovenian'),
('sma', u'Southern Sami'),
('sme', 'se', u'Northern Sami'),
('smi', u'Sami languages '),
('smj', u'Lule Sami'),
('smn', u'Inari Sami'),
('smo', 'sm', u'Samoan'),
('sms', u'Skolt Sami'),
('sna', 'sn', u'Shona'),
('snd', 'sd', u'Sindhi'),
('snk', u'Soninke'),
('sog', u'Sogdian'),
('som', 'so', u'Somali'),
('son', u'Songhai languages'),
('sot', 'st', u'Sotho, Southern'),
('spa', 'es', u'Spanish'),
('srd', 'sc', u'Sardinian'),
('srn', u'Sranan Tongo'),
('srr', u'Serer'),
('ssa', u'Nilo-Saharan '),
('ssw', 'ss', u'Swati'),
('suk', u'Sukuma'),
('sun', 'su', u'Sundanese'),
('sus', u'Susu'),
('sux', u'Sumerian'),
('swa', 'sw', u'Swahili'),
('swe', 'sv', u'Swedish'),
('syc', u'Classical Syriac'),
('syr', u'Syriac'),
('tah', 'ty', u'Tahitian'),
('tai', u'Tai '),
('tam', 'ta', u'Tamil'),
('tat', 'tt', u'Tatar'),
('tel', 'te', u'Telugu'),
('tem', u'Timne'),
('ter', u'Tereno'),
('tet', u'Tetum'),
('tgk', 'tg', u'Tajik'),
('tgl', 'tl', u'Tagalog'),
('tha', 'th', u'Thai'),
('tib', 'bo', u'Tibetan'),
('tig', u'Tigre'),
('tir', 'ti', u'Tigrinya'),
('tiv', u'Tiv'),
('tkl', u'Tokelau'),
('tlh', u'Klingon'),
('tli', u'Tlingit'),
('tmh', u'Tamashek'),
('tog', u'Tonga '),
('ton', 'to', u'Tonga '),
('tpi', u'Tok Pisin'),
('tsi', u'Tsimshian'),
('tsn', 'tn', u'Tswana'),
('tso', 'ts', u'Tsonga'),
('tuk', 'tk', u'Turkmen'),
('tum', u'Tumbuka'),
('tup', u'Tupi languages'),
('tur', 'tr', u'Turkish'),
('tut', u'Altaic '),
('tvl', u'Tuvalu'),
('twi', 'tw', u'Twi'),
('tyv', u'Tuvinian'),
('udm', u'Udmurt'),
('uga', u'Ugaritic'),
('uig', 'ug', u'Uighur'),
('ukr', 'uk', u'Ukrainian'),
('umb', u'Umbundu'),
('und', u'Undetermined'),
('urd', 'ur', u'Urdu'),
('uzb', 'uz', u'Uzbek'),
('vai', u'Vai'),
('ven', 've', u'Venda'),
('vie', 'vi', u'Vietnamese'),
('vol', 'vo', u'Volap\xfck'),
('vot', u'Votic'),
('wak', u'Wakashan languages'),
('wal', u'Walamo'),
('war', u'Waray'),
('was', u'Washo'),
('wel', 'cy', u'Welsh'),
('wen', u'Sorbian languages'),
('wln', 'wa', u'Walloon'),
('wol', 'wo', u'Wolof'),
('xal', u'Kalmyk'),
('xho', 'xh', u'Xhosa'),
('yao', u'Yao'),
('yap', u'Yapese'),
('yid', 'yi', u'Yiddish'),
('yor', 'yo', u'Yoruba'),
('ypk', u'Yupik languages'),
('zap', u'Zapotec'),
('zbl', u'Blissymbols'),
('zen', u'Zenaga'),
('zha', 'za', u'Zhuang'),
('znd', u'Zande languages'),
('zul', 'zu', u'Zulu'),
('zun', u'Zuni'),
('zxx', u'No linguistic content'),
('zza', u'Zaza'),
)
|
gpl-3.0
|
inares/edx-platform
|
openedx/core/djangoapps/credit/tests/test_tasks.py
|
62
|
13643
|
"""
Tests for credit course tasks.
"""
import mock
from datetime import datetime, timedelta
from pytz import UTC
from openedx.core.djangoapps.credit.api import get_credit_requirements
from openedx.core.djangoapps.credit.exceptions import InvalidCreditRequirements
from openedx.core.djangoapps.credit.models import CreditCourse
from openedx.core.djangoapps.credit.signals import on_course_publish
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls_range
from edx_proctoring.api import create_exam
class TestTaskExecution(ModuleStoreTestCase):
"""Set of tests to ensure that the task code will do the right thing when
executed directly.
The test course gets created without the listeners being present, which
allows us to ensure that when the listener is executed, it is done as
expected.
"""
def mocked_set_credit_requirements(course_key, requirements): # pylint: disable=no-self-argument, unused-argument
"""Used as a side effect when mocking method credit api method
'set_credit_requirements'.
"""
raise InvalidCreditRequirements
def add_icrv_xblock(self, related_assessment_name=None, start_date=None):
""" Create the 'edx-reverification-block' in course tree """
block = ItemFactory.create(
parent=self.vertical,
category='edx-reverification-block',
)
if related_assessment_name is not None:
block.related_assessment = related_assessment_name
block.start = start_date
self.store.update_item(block, ModuleStoreEnum.UserID.test)
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.course.id):
self.store.publish(block.location, ModuleStoreEnum.UserID.test)
return block
def setUp(self):
super(TestTaskExecution, self).setUp()
self.course = CourseFactory.create(start=datetime(2015, 3, 1))
self.section = ItemFactory.create(parent=self.course, category='chapter', display_name='Test Section')
self.subsection = ItemFactory.create(parent=self.section, category='sequential', display_name='Test Subsection')
self.vertical = ItemFactory.create(parent=self.subsection, category='vertical', display_name='Test Unit')
def test_task_adding_requirements_invalid_course(self):
"""
Test that credit requirements cannot be added for non credit course.
"""
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 0)
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 0)
def test_task_adding_requirements(self):
"""Test that credit requirements are added properly for credit course.
Make sure that the receiver correctly fires off the task when
invoked by signal.
"""
self.add_credit_course(self.course.id)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 0)
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 1)
def test_task_adding_icrv_requirements(self):
"""Make sure that the receiver correctly fires off the task when
invoked by signal.
"""
self.add_credit_course(self.course.id)
self.add_icrv_xblock()
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 0)
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 2)
def test_proctored_exam_requirements(self):
"""
Make sure that proctored exams are being registered as requirements
"""
self.add_credit_course(self.course.id)
create_exam(
course_id=unicode(self.course.id),
content_id=unicode(self.subsection.location),
exam_name='A Proctored Exam',
time_limit_mins=10,
is_proctored=True,
is_active=True
)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 0)
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 2)
self.assertEqual(requirements[1]['namespace'], 'proctored_exam')
self.assertEqual(requirements[1]['name'], unicode(self.subsection.location))
self.assertEqual(requirements[1]['display_name'], 'A Proctored Exam')
self.assertEqual(requirements[1]['criteria'], {})
def test_proctored_exam_filtering(self):
"""
Make sure that timed or inactive exams do not end up in the requirements table
Also practice protored exams are not a requirement
"""
self.add_credit_course(self.course.id)
create_exam(
course_id=unicode(self.course.id),
content_id='foo',
exam_name='A Proctored Exam',
time_limit_mins=10,
is_proctored=False,
is_active=True
)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 0)
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 1)
# make sure we don't have a proctoring requirement
self.assertFalse([
requirement
for requirement in requirements
if requirement['namespace'] == 'proctored_exam'
])
create_exam(
course_id=unicode(self.course.id),
content_id='foo2',
exam_name='A Proctored Exam',
time_limit_mins=10,
is_proctored=True,
is_active=False
)
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 1)
# make sure we don't have a proctoring requirement
self.assertFalse([
requirement
for requirement in requirements
if requirement['namespace'] == 'proctored_exam'
])
# practice proctored exams aren't requirements
create_exam(
course_id=unicode(self.course.id),
content_id='foo3',
exam_name='A Proctored Exam',
time_limit_mins=10,
is_proctored=True,
is_active=True,
is_practice_exam=True
)
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 1)
# make sure we don't have a proctoring requirement
self.assertFalse([
requirement
for requirement in requirements
if requirement['namespace'] == 'proctored_exam'
])
def test_query_counts(self):
self.add_credit_course(self.course.id)
self.add_icrv_xblock()
with check_mongo_calls_range(max_finds=11):
on_course_publish(self.course.id)
def test_remove_icrv_requirement(self):
self.add_credit_course(self.course.id)
self.add_icrv_xblock()
on_course_publish(self.course.id)
# There should be one ICRV requirement
requirements = get_credit_requirements(self.course.id, namespace="reverification")
self.assertEqual(len(requirements), 1)
# Delete the parent section containing the ICRV block
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.course.id):
self.store.delete_item(self.subsection.location, ModuleStoreEnum.UserID.test)
# Check that the ICRV block is no longer visible in the requirements
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id, namespace="reverification")
self.assertEqual(len(requirements), 0)
def test_icrv_requirement_ordering(self):
self.add_credit_course(self.course.id)
# Create multiple ICRV blocks
start = datetime.now(UTC)
self.add_icrv_xblock(related_assessment_name="Midterm A", start_date=start)
start = start - timedelta(days=1)
self.add_icrv_xblock(related_assessment_name="Midterm B", start_date=start)
# Primary sort is based on start date
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id, namespace="reverification")
self.assertEqual(len(requirements), 2)
self.assertEqual(requirements[0]["display_name"], "Midterm B")
self.assertEqual(requirements[1]["display_name"], "Midterm A")
# Add two additional ICRV blocks that have no start date
# and the same name.
start = datetime.now(UTC)
first_block = self.add_icrv_xblock(related_assessment_name="Midterm Start Date")
start = start + timedelta(days=1)
second_block = self.add_icrv_xblock(related_assessment_name="Midterm Start Date")
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id, namespace="reverification")
self.assertEqual(len(requirements), 4)
# Since we are now primarily sorting on start_date and display_name if
# start_date is present otherwise we are just sorting on display_name.
self.assertEqual(requirements[0]["display_name"], "Midterm B")
self.assertEqual(requirements[1]["display_name"], "Midterm A")
self.assertEqual(requirements[2]["display_name"], "Midterm Start Date")
self.assertEqual(requirements[3]["display_name"], "Midterm Start Date")
# Since the last two requirements have the same display name,
# we need to also check that their internal names (locations) are the same.
self.assertEqual(requirements[2]["name"], first_block.get_credit_requirement_name())
self.assertEqual(requirements[3]["name"], second_block.get_credit_requirement_name())
@mock.patch(
'openedx.core.djangoapps.credit.tasks.set_credit_requirements',
mock.Mock(
side_effect=mocked_set_credit_requirements
)
)
def test_retry(self):
"""Test that adding credit requirements is retried when
'InvalidCreditRequirements' exception is raised.
Make sure that the receiver correctly fires off the task when
invoked by signal
"""
self.add_credit_course(self.course.id)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 0)
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 0)
def test_credit_requirement_blocks_ordering(self):
"""
Test ordering of the proctoring and ICRV blocks are in proper order.
"""
self.add_credit_course(self.course.id)
subsection = ItemFactory.create(parent=self.section, category='sequential', display_name='Dummy Subsection')
create_exam(
course_id=unicode(self.course.id),
content_id=unicode(subsection.location),
exam_name='A Proctored Exam',
time_limit_mins=10,
is_proctored=True,
is_active=True
)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 0)
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id)
self.assertEqual(len(requirements), 2)
self.assertEqual(requirements[1]['namespace'], 'proctored_exam')
self.assertEqual(requirements[1]['name'], unicode(subsection.location))
self.assertEqual(requirements[1]['display_name'], 'A Proctored Exam')
self.assertEqual(requirements[1]['criteria'], {})
# Create multiple ICRV blocks
start = datetime.now(UTC)
self.add_icrv_xblock(related_assessment_name="Midterm A", start_date=start)
start = start - timedelta(days=1)
self.add_icrv_xblock(related_assessment_name="Midterm B", start_date=start)
# Primary sort is based on start date
on_course_publish(self.course.id)
requirements = get_credit_requirements(self.course.id)
# grade requirement is added on publish of the requirements
self.assertEqual(len(requirements), 4)
# check requirements are added in the desired order
# 1st Minimum grade then the blocks with start date than other blocks
self.assertEqual(requirements[0]["display_name"], "Minimum Grade")
self.assertEqual(requirements[1]["display_name"], "A Proctored Exam")
self.assertEqual(requirements[2]["display_name"], "Midterm B")
self.assertEqual(requirements[3]["display_name"], "Midterm A")
def add_credit_course(self, course_key):
"""Add the course as a credit.
Args:
course_key(CourseKey): Identifier for the course
Returns:
CreditCourse object added
"""
credit_course = CreditCourse(course_key=course_key, enabled=True)
credit_course.save()
return credit_course
|
agpl-3.0
|
Joneyviana/todolist-django-angular
|
todolist/users/migrations/0002_auto_20160728_2311.py
|
1
|
1044
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-07-28 23:11
from __future__ import unicode_literals
import django.contrib.auth.models
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AlterModelManagers(
name='user',
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=30, unique=True, validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.')], verbose_name='username'),
),
]
|
mit
|
vega123/enigma2
|
lib/python/Components/ParentalControlList.py
|
32
|
2252
|
from MenuList import MenuList
from Components.ParentalControl import IMG_WHITESERVICE, IMG_WHITEBOUQUET, IMG_BLACKSERVICE, IMG_BLACKBOUQUET
from Tools.Directories import SCOPE_SKIN_IMAGE, resolveFilename
from enigma import eListboxPythonMultiContent, gFont, RT_HALIGN_LEFT
from Tools.LoadPixmap import LoadPixmap
#Now there is a list of pictures instead of one...
entryPicture = {}
entryPicture[IMG_BLACKSERVICE] = LoadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/icons/lock.png"))
entryPicture[IMG_BLACKBOUQUET] = LoadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/icons/lockBouquet.png"))
entryPicture[IMG_WHITESERVICE] = LoadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/icons/unlock.png"))
entryPicture[IMG_WHITEBOUQUET] = LoadPixmap(resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/icons/unlockBouquet.png"))
def ParentalControlEntryComponent(service, name, protectionType):
locked = protectionType[0]
sImage = protectionType[1]
res = [
(service, name, locked),
(eListboxPythonMultiContent.TYPE_TEXT, 80, 5, 300, 50, 0, RT_HALIGN_LEFT, name)
]
#Changed logic: The image is defined by sImage, not by locked anymore
if sImage != "":
res.append((eListboxPythonMultiContent.TYPE_PIXMAP_ALPHATEST, 0, 0, 32, 32, entryPicture[sImage]))
return res
class ParentalControlList(MenuList):
def __init__(self, list, enableWrapAround = False):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.l.setFont(0, gFont("Regular", 20))
self.l.setItemHeight(32)
def toggleSelectedLock(self):
from Components.ParentalControl import parentalControl
print "self.l.getCurrentSelection():", self.l.getCurrentSelection()
print "self.l.getCurrentSelectionIndex():", self.l.getCurrentSelectionIndex()
curSel = self.l.getCurrentSelection()
if curSel[0][2]:
parentalControl.unProtectService(self.l.getCurrentSelection()[0][0])
else:
parentalControl.protectService(self.l.getCurrentSelection()[0][0])
#Instead of just negating the locked- flag, now I call the getProtectionType every time...
self.list[self.l.getCurrentSelectionIndex()] = ParentalControlEntryComponent(curSel[0][0], curSel[0][1], parentalControl.getProtectionType(curSel[0][0]))
self.l.setList(self.list)
|
gpl-2.0
|
vitalikp/yum
|
yum/failover.py
|
12
|
5125
|
#!/usr/bin/python
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# Copyright 2003 Jack Neely, NC State University
# Here we define a base class for failover methods. The idea here is that each
# failover method uses a class derived from the base class so yum only has to
# worry about calling get_serverurl() and server_failed() and these classes will
# figure out which URL to cough up based on the failover method.
"""Classes for handling failovers for server URLs."""
import random
class baseFailOverMethod:
"""A base class to provide a failover to switch to a new server if
the current one fails.
"""
def __init__(self, repo):
self.repo = repo
self.failures = 0
def get_serverurl(self, i=None):
"""Return a server URL based on this failover method, or None
if there is a complete failure. This method should always be
used to translate an index into a URL, as this object may
change how indexes map.
:param i: if given, this is the index of the server URL to
return, instead of using the failures counter
:return: the next server URL
"""
return None
def server_failed(self):
"""Notify the failover method that the current server has
failed.
"""
self.failures = self.failures + 1
def reset(self, i=0):
"""Reset the failures counter to the given index.
:param i: the index to reset the failures counter to
"""
self.failures = i
def get_index(self):
"""Return the current number of failures, which is also the
current index into the list of URLs that this object
represents. :fun:`get_serverurl` should always be used to
translate an index into a URL, as this object may change how
indexes map.
:return: the current number of failures, which is also the
current index
"""
return self.failures
def len(self):
"""Return the total number of URLs available to cycle through
in this object.
:return: the total number of URLs available
"""
return len(self.repo.urls)
class priority(baseFailOverMethod):
"""A class to provide a failover to switch to a new server
if the current one fails. This classes chooses the next server
based on the first success in the list of servers.
"""
def get_serverurl(self, i=None):
"""Return the next successful server URL in the list, or None
if there is a complete failure. This method should always be
used to translate an index into a URL, as this object may
change how indexes map.
:param i: if given, this is the index of the server URL to
return, instead of using the failures counter
:return: the next server URL
"""
if i == None:
index = self.failures
else:
index = i
if index >= len(self.repo.urls):
return None
return self.repo.urls[index]
class roundRobin(baseFailOverMethod):
"""A class to provide a failover to switch to a new server
if the current one fails. When an object of this class is
created, it selects a random place in the list of URLs to begin
with, then each time :func:`get_serveurl` is called, the next URL
in the list is returned, cycling back to the beginning of the list
after the end is reached.
"""
def __init__(self, repo):
baseFailOverMethod.__init__(self, repo)
random.seed()
self.offset = random.randint(0, 37)
def get_serverurl(self, i=None):
"""Return the next successful server URL in the list, using
the round robin scheme, or None if there is a complete
failure. This method should always be used to translate an
index into a URL, as this object may change how indexes map.
:param i: if given, this is the index of the server URL to
return, instead of using the failures counter
:return: the next server URL
"""
if i == None:
index = self.failures
else:
index = i
if index >= len(self.repo.urls):
return None
rr = (index + self.offset) % len(self.repo.urls)
return self.repo.urls[rr]
# SDG
|
gpl-2.0
|
chafique-delli/OpenUpgrade
|
scripts/migrate.py
|
13
|
10127
|
#!/usr/bin/python
import os
import sys
import StringIO
import psycopg2
import psycopg2.extensions
from optparse import OptionParser
from ConfigParser import SafeConfigParser
from bzrlib.branch import Branch
from bzrlib.repository import Repository
from bzrlib.workingtree import WorkingTree
import bzrlib.plugin
import bzrlib.builtins
import bzrlib.info
migrations={
'7.0': {
'addons': {
'addons': 'lp:openupgrade-addons/7.0',
'web': {'url': 'lp:openerp-web/7.0', 'addons_dir': 'addons'},
},
'server': {
'url': 'lp:openupgrade-server/7.0',
'addons_dir': os.path.join('openerp','addons'),
'root_dir': os.path.join(''),
'cmd': 'openerp-server --update=all --database=%(db)s '+
'--config=%(config)s --stop-after-init --no-xmlrpc --no-netrpc',
},
},
'6.1': {
'addons': {
'addons': 'lp:openupgrade-addons/6.1',
'web': {'url': 'lp:openerp-web/6.1', 'addons_dir': 'addons'},
},
'server': {
'url': 'lp:openupgrade-server/6.1',
'addons_dir': os.path.join('openerp','addons'),
'root_dir': os.path.join(''),
'cmd': 'openerp-server --update=all --database=%(db)s '+
'--config=%(config)s --stop-after-init --no-xmlrpc --no-netrpc',
},
},
'6.0': {
'addons': {
'addons': 'lp:openupgrade-addons/6.0',
},
'server': {
'url': 'lp:openupgrade-server/6.0',
'addons_dir': os.path.join('bin','addons'),
'root_dir': os.path.join('bin'),
'cmd': 'bin/openerp-server.py --update=all --database=%(db)s '+
'--config=%(config)s --stop-after-init --no-xmlrpc --no-netrpc',
},
},
}
config = SafeConfigParser()
parser = OptionParser(description="""Migrate script for the impatient or lazy.
Makes a copy of your database, downloads the files necessary to migrate
it as requested and runs the migration on the copy (so your original
database will not be touched). While the migration is running only errors are
shown, for a detailed log see ${branch-dir}/migration.log
""")
parser.add_option("-C", "--config", action="store", type="string",
dest="config",
help="current openerp config (required)")
parser.add_option("-D", "--database", action="store", type="string",
dest="database",
help="current openerp database (required if not given in config)")
parser.add_option("-B", "--branch-dir", action="store", type="string",
dest="branch_dir",
help="the directory to download openupgrade-server code to [%default]",
default='/var/tmp/openupgrade')
parser.add_option("-R", "--run-migrations", action="store", type="string",
dest="migrations",
help="comma separated list of migrations to run, ie. \""+
','.join(sorted([a for a in migrations]))+
"\" (required)")
parser.add_option("-A", "--add", action="store", type="string", dest="add",
help="load a python module that declares a dict 'migrations' which is "+
"merged with the one of this script (see the source for details). "
"You also can pass a string that evaluates to a dict. For the banking "
"addons, pass "
"\"{'6.1': {'addons': {'banking': 'lp:banking-addons/6.1'}}}\"")
parser.add_option("-I", "--inplace", action="store_true", dest="inplace",
help="don't copy database before attempting upgrade (dangerous)")
(options, args) = parser.parse_args()
if (not options.config or not options.migrations
or not reduce(lambda a,b: a and (b in migrations),
options.migrations.split(','), True)):
parser.print_help()
sys.exit()
config.read(options.config)
conn_parms = {}
for parm in ('host', 'port', 'user', 'password'):
db_parm = 'db_' + parm
if config.has_option('options', db_parm):
conn_parms[parm] = config.get('options', db_parm)
if not 'user' in conn_parms:
print 'No user found in configuration'
sys.exit()
db_user = conn_parms['user']
db_name=options.database or config.get('options', 'db_name')
if not db_name or db_name=='' or db_name.isspace() or db_name.lower()=='false':
parser.print_help()
sys.exit()
conn_parms['database'] = db_name
if options.inplace:
db=db_name
else:
db=db_name+'_migrated'
if options.add:
merge_migrations={}
if os.path.isfile(options.add):
import imp
merge_migrations_mod=imp.load_source('merge_migrations_mod',
options.add)
merge_migrations=merge_migrations_mod.migrations
else:
merge_migrations=eval(options.add)
def deep_update(dict1, dict2):
result={}
for (name,value) in dict1.iteritems():
if dict2.has_key(name):
if isinstance(dict1[name], dict) and isinstance(dict2[name],
dict):
result[name]=deep_update(dict1[name], dict2[name])
else:
result[name]=dict2[name]
else:
result[name]=dict1[name]
for (name,value) in dict2.iteritems():
if name not in dict1:
result[name]=value
return result
migrations=deep_update(migrations, merge_migrations)
for version in options.migrations.split(','):
if version not in migrations:
print '%s is not a valid version! (valid verions are %s)' % (version,
','.join(sorted([a for a in migrations])))
bzrlib.plugin.load_plugins()
bzrlib.trace.enable_default_logging()
logfile=os.path.join(options.branch_dir,'migration.log')
if not os.path.exists(options.branch_dir):
os.mkdir(options.branch_dir)
for version in options.migrations.split(','):
if not os.path.exists(os.path.join(options.branch_dir,version)):
os.mkdir(os.path.join(options.branch_dir,version))
for (name,url) in dict(migrations[version]['addons'],
server=migrations[version]['server']['url']).iteritems():
link=url.get('link', False) if isinstance(url, dict) else False
url=url['url'] if isinstance(url, dict) else url
if os.path.exists(os.path.join(options.branch_dir,version,name)):
if link:
continue
cmd_revno=bzrlib.builtins.cmd_revno()
cmd_revno.outf=StringIO.StringIO()
cmd_revno.run(location=os.path.join(options.branch_dir,version,
name))
print 'updating %s rev%s' %(os.path.join(version,name),
cmd_revno.outf.getvalue().strip())
cmd_update=bzrlib.builtins.cmd_update()
cmd_update.outf=StringIO.StringIO()
cmd_update.outf.encoding='utf8'
cmd_update.run(dir=os.path.join(options.branch_dir,version,
name))
if hasattr(cmd_update, '_operation'):
cmd_update.cleanup_now()
print 'now at rev'+cmd_revno.outf.getvalue().strip()
else:
if link:
print 'linking %s to %s'%(url,
os.path.join(options.branch_dir,version,name))
os.symlink(url, os.path.join(options.branch_dir,version,name))
else:
print 'getting '+url
cmd_checkout=bzrlib.builtins.cmd_checkout()
cmd_checkout.outf=StringIO.StringIO()
cmd_checkout.run(url, os.path.join(options.branch_dir,version,
name), lightweight=True)
if not options.inplace:
print('copying database %(db_name)s to %(db)s...' % {'db_name': db_name,
'db': db})
conn = psycopg2.connect(**conn_parms)
conn.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
cur=conn.cursor()
cur.execute('drop database if exists "%(db)s"' % {'db': db})
cur.execute('create database "%(db)s"' % {'db': db})
cur.close()
os.environ['PGUSER'] = db_user
if ('host' in conn_parms and conn_parms['host']
and not os.environ.get('PGHOST')):
os.environ['PGHOST'] = conn_parms['host']
if ('port' in conn_parms and conn_parms['port']
and not os.environ.get('PGPORT')):
os.environ['PGPORT'] = conn_parms['port']
password_set = False
if ('password' in conn_parms and conn_parms['password']
and not os.environ.get('PGPASSWORD')):
os.environ['PGPASSWORD'] = conn_parms['password']
password_set = True
os.system(
('pg_dump --format=custom --no-password %(db_name)s ' +
'| pg_restore --no-password --dbname=%(db)s') %
{'db_name': db_name, 'db': db}
)
if password_set:
del os.environ['PGPASSWORD']
for version in options.migrations.split(','):
print 'running migration for '+version
config.set('options', 'without_demo', 'True')
config.set('options', 'logfile', logfile)
config.set('options', 'port', 'False')
config.set('options', 'netport', 'False')
config.set('options', 'xmlrpc_port', 'False')
config.set('options', 'netrpc_port', 'False')
config.set('options', 'addons_path',
','.join([os.path.join(options.branch_dir,
version,'server',migrations[version]['server']['addons_dir'])] +
[
os.path.join(options.branch_dir,version,name,
url.get('addons_dir', '') if isinstance(url, dict) else '')
for (name,url) in migrations[version]['addons'].iteritems()
]
)
)
config.set('options', 'root_path', os.path.join(options.branch_dir,version,
'server', migrations[version]['server']['root_dir']))
config.write(open(
os.path.join(options.branch_dir,version,'server.cfg'), 'w+'))
os.system(
os.path.join(options.branch_dir,version,'server',
migrations[version]['server']['cmd'] % {
'db': db,
'config': os.path.join(options.branch_dir,version,
'server.cfg')
}
)
)
|
agpl-3.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.