repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
---|---|---|---|---|
mattvonrocketstein/smash | refs/heads/master | smashlib/ipy3x/parallel/engine/__main__.py | 9 | if __name__ == '__main__':
from IPython.parallel.apps import ipengineapp as app
app.launch_new_instance()
|
pandeyadarsh/sympy | refs/heads/master | sympy/stats/rv_interface.py | 88 | from __future__ import print_function, division
from .rv import (probability, expectation, density, where, given, pspace, cdf,
sample, sample_iter, random_symbols, independent, dependent,
sampling_density)
from sympy import sqrt
__all__ = ['P', 'E', 'density', 'where', 'given', 'sample', 'cdf', 'pspace',
'sample_iter', 'variance', 'std', 'skewness', 'covariance',
'dependent', 'independent', 'random_symbols', 'correlation',
'moment', 'cmoment', 'sampling_density']
def moment(X, n, c=0, condition=None, **kwargs):
"""
Return the nth moment of a random expression about c i.e. E((X-c)**n)
Default value of c is 0.
Examples
========
>>> from sympy.stats import Die, moment, E
>>> X = Die('X', 6)
>>> moment(X, 1, 6)
-5/2
>>> moment(X, 2)
91/6
>>> moment(X, 1) == E(X)
True
"""
return expectation((X - c)**n, condition, **kwargs)
def variance(X, condition=None, **kwargs):
"""
Variance of a random expression
Expectation of (X-E(X))**2
Examples
========
>>> from sympy.stats import Die, E, Bernoulli, variance
>>> from sympy import simplify, Symbol
>>> X = Die('X', 6)
>>> p = Symbol('p')
>>> B = Bernoulli('B', p, 1, 0)
>>> variance(2*X)
35/3
>>> simplify(variance(B))
p*(-p + 1)
"""
return cmoment(X, 2, condition, **kwargs)
def standard_deviation(X, condition=None, **kwargs):
"""
Standard Deviation of a random expression
Square root of the Expectation of (X-E(X))**2
Examples
========
>>> from sympy.stats import Bernoulli, std
>>> from sympy import Symbol, simplify
>>> p = Symbol('p')
>>> B = Bernoulli('B', p, 1, 0)
>>> simplify(std(B))
sqrt(p*(-p + 1))
"""
return sqrt(variance(X, condition, **kwargs))
std = standard_deviation
def covariance(X, Y, condition=None, **kwargs):
"""
Covariance of two random expressions
The expectation that the two variables will rise and fall together
Covariance(X,Y) = E( (X-E(X)) * (Y-E(Y)) )
Examples
========
>>> from sympy.stats import Exponential, covariance
>>> from sympy import Symbol
>>> rate = Symbol('lambda', positive=True, real=True, finite=True)
>>> X = Exponential('X', rate)
>>> Y = Exponential('Y', rate)
>>> covariance(X, X)
lambda**(-2)
>>> covariance(X, Y)
0
>>> covariance(X, Y + rate*X)
1/lambda
"""
return expectation(
(X - expectation(X, condition, **kwargs)) *
(Y - expectation(Y, condition, **kwargs)),
condition, **kwargs)
def correlation(X, Y, condition=None, **kwargs):
"""
Correlation of two random expressions, also known as correlation
coefficient or Pearson's correlation
The normalized expectation that the two variables will rise
and fall together
Correlation(X,Y) = E( (X-E(X)) * (Y-E(Y)) / (sigma(X) * sigma(Y)) )
Examples
========
>>> from sympy.stats import Exponential, correlation
>>> from sympy import Symbol
>>> rate = Symbol('lambda', positive=True, real=True, finite=True)
>>> X = Exponential('X', rate)
>>> Y = Exponential('Y', rate)
>>> correlation(X, X)
1
>>> correlation(X, Y)
0
>>> correlation(X, Y + rate*X)
1/sqrt(1 + lambda**(-2))
"""
return covariance(X, Y, condition, **kwargs)/(std(X, condition, **kwargs)
* std(Y, condition, **kwargs))
def cmoment(X, n, condition=None, **kwargs):
"""
Return the nth central moment of a random expression about its mean
i.e. E((X - E(X))**n)
Examples
========
>>> from sympy.stats import Die, cmoment, variance
>>> X = Die('X', 6)
>>> cmoment(X, 3)
0
>>> cmoment(X, 2)
35/12
>>> cmoment(X, 2) == variance(X)
True
"""
mu = expectation(X, condition, **kwargs)
return moment(X, n, mu, condition, **kwargs)
def smoment(X, n, condition=None, **kwargs):
"""
Return the nth Standardized moment of a random expression i.e.
E( ((X - mu)/sigma(X))**n )
Examples
========
>>> from sympy.stats import skewness, Exponential, smoment
>>> from sympy import Symbol
>>> rate = Symbol('lambda', positive=True, real=True, finite=True)
>>> Y = Exponential('Y', rate)
>>> smoment(Y, 4)
9
>>> smoment(Y, 4) == smoment(3*Y, 4)
True
>>> smoment(Y, 3) == skewness(Y)
True
"""
sigma = std(X, condition, **kwargs)
return (1/sigma)**n*cmoment(X, n, condition, **kwargs)
def skewness(X, condition=None, **kwargs):
"""
Measure of the asymmetry of the probability distribution
Positive skew indicates that most of the values lie to the right of
the mean
skewness(X) = E( ((X - E(X))/sigma)**3 )
Examples
========
>>> from sympy.stats import skewness, Exponential, Normal
>>> from sympy import Symbol
>>> X = Normal('X', 0, 1)
>>> skewness(X)
0
>>> rate = Symbol('lambda', positive=True, real=True, finite=True)
>>> Y = Exponential('Y', rate)
>>> skewness(Y)
2
"""
return smoment(X, 3, condition, **kwargs)
P = probability
E = expectation
|
hmoco/osf.io | refs/heads/develop | tests/test_views.py | 1 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Views tests for the OSF."""
from __future__ import absolute_import
import datetime as dt
import httplib as http
import json
import time
import pytz
import unittest
from flask import request
import mock
import pytest
from nose.tools import * # noqa PEP8 asserts
from django.utils import timezone
from django.apps import apps
from modularodm import Q
from modularodm.exceptions import ValidationError
from addons.github.tests.factories import GitHubAccountFactory
from framework.auth import cas
from framework.auth.core import generate_verification_key
from framework import auth
from framework.auth.campaigns import get_campaigns, is_institution_login, is_native_login, is_proxy_login, campaign_url_for
from framework.auth import Auth
from framework.auth.cas import get_login_url
from framework.auth.core import generate_verification_key
from framework.auth.exceptions import InvalidTokenError
from framework.auth.utils import impute_names_model, ensure_external_identity_uniqueness
from framework.auth.views import login_and_register_handler
from framework.celery_tasks import handlers
from framework.exceptions import HTTPError
from framework.transactions.handlers import no_auto_transaction
from tests.factories import MockAddonNodeSettings
from website import mailchimp_utils
from website import mails, settings
from addons.osfstorage import settings as osfstorage_settings
from website.models import Node, NodeLog, Pointer
from website.profile.utils import add_contributor_json, serialize_unregistered
from website.profile.views import fmt_date_or_none, update_osf_help_mails_subscription
from website.project.decorators import check_can_access
from website.project.model import has_anonymous_link
from website.project.signals import contributor_added
from website.project.views.contributor import (
deserialize_contributors,
notify_added_contributor,
send_claim_email,
send_claim_registered_email,
)
from website.project.views.node import _should_show_wiki_widget, _view_project, abbrev_authors
from website.util import api_url_for, web_url_for
from website.util import permissions, rubeus
from website.views import index
from osf.models import Comment
from osf.models import OSFUser as User
from tests.base import (
assert_is_redirect,
capture_signals,
fake,
get_default_metaschema,
OsfTestCase,
assert_datetime_equal,
)
from tests.base import test_app as mock_app
pytestmark = pytest.mark.django_db
from osf.models import NodeRelation
from osf_tests.factories import (
UserFactory,
UnconfirmedUserFactory,
UnregUserFactory,
AuthUserFactory,
PrivateLinkFactory,
ProjectFactory,
NodeFactory,
CommentFactory,
CollectionFactory,
InstitutionFactory,
RegistrationFactory,
ApiOAuth2ApplicationFactory,
ApiOAuth2PersonalTokenFactory,
ProjectWithAddonFactory,
PreprintFactory,
PreprintProviderFactory,
)
class Addon(MockAddonNodeSettings):
@property
def complete(self):
return True
def archive_errors(self):
return 'Error'
class Addon2(MockAddonNodeSettings):
@property
def complete(self):
return True
def archive_errors(self):
return 'Error'
@mock_app.route('/errorexc')
def error_exc():
UserFactory()
raise RuntimeError
@mock_app.route('/error500')
def error500():
UserFactory()
return 'error', 500
@mock_app.route('/noautotransact')
@no_auto_transaction
def no_auto_transact():
UserFactory()
return 'error', 500
class TestViewsAreAtomic(OsfTestCase):
def test_error_response_rolls_back_transaction(self):
original_user_count = User.objects.count()
self.app.get('/error500', expect_errors=True)
assert_equal(User.objects.count(), original_user_count)
# Need to set debug = False in order to rollback transactions in transaction_teardown_request
mock_app.debug = False
try:
self.app.get('/errorexc', expect_errors=True)
except RuntimeError:
pass
mock_app.debug = True
self.app.get('/noautotransact', expect_errors=True)
assert_equal(User.objects.count(), original_user_count + 1)
class TestViewingProjectWithPrivateLink(OsfTestCase):
def setUp(self):
super(TestViewingProjectWithPrivateLink, self).setUp()
self.user = AuthUserFactory() # Is NOT a contributor
self.project = ProjectFactory(is_public=False)
self.link = PrivateLinkFactory()
self.link.nodes.add(self.project)
self.link.save()
self.project_url = self.project.web_url_for('view_project')
def test_edit_private_link_empty(self):
node = ProjectFactory(creator=self.user)
link = PrivateLinkFactory()
link.nodes.add(node)
link.save()
url = node.api_url_for('project_private_link_edit')
res = self.app.put_json(url, {'pk': link._id, 'value': ''}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('Title cannot be blank', res.body)
def test_edit_private_link_invalid(self):
node = ProjectFactory(creator=self.user)
link = PrivateLinkFactory()
link.nodes.add(node)
link.save()
url = node.api_url_for('project_private_link_edit')
res = self.app.put_json(url, {'pk': link._id, 'value': '<a></a>'}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('Invalid link name.', res.body)
@mock.patch('framework.auth.core.Auth.private_link')
def test_can_be_anonymous_for_public_project(self, mock_property):
mock_property.return_value(mock.MagicMock())
mock_property.anonymous = True
anonymous_link = PrivateLinkFactory(anonymous=True)
anonymous_link.nodes.add(self.project)
anonymous_link.save()
self.project.set_privacy('public')
self.project.save()
self.project.reload()
auth = Auth(user=self.user, private_key=anonymous_link.key)
assert_true(has_anonymous_link(self.project, auth))
def test_has_private_link_key(self):
res = self.app.get(self.project_url, {'view_only': self.link.key})
assert_equal(res.status_code, 200)
def test_not_logged_in_no_key(self):
res = self.app.get(self.project_url, {'view_only': None})
assert_is_redirect(res)
res = res.follow(expect_errors=True)
assert_equal(res.status_code, 301)
assert_equal(
res.request.path,
'/login'
)
def test_logged_in_no_private_key(self):
res = self.app.get(self.project_url, {'view_only': None}, auth=self.user.auth,
expect_errors=True)
assert_equal(res.status_code, http.FORBIDDEN)
def test_logged_in_has_key(self):
res = self.app.get(
self.project_url, {'view_only': self.link.key}, auth=self.user.auth)
assert_equal(res.status_code, 200)
@unittest.skip('Skipping for now until we find a way to mock/set the referrer')
def test_prepare_private_key(self):
res = self.app.get(self.project_url, {'key': self.link.key})
res = res.click('Registrations')
assert_is_redirect(res)
res = res.follow()
assert_equal(res.status_code, 200)
assert_equal(res.request.GET['key'], self.link.key)
def test_cannot_access_registrations_or_forks_with_anon_key(self):
anonymous_link = PrivateLinkFactory(anonymous=True)
anonymous_link.nodes.add(self.project)
anonymous_link.save()
self.project.is_public = False
self.project.save()
url = self.project_url + 'registrations/?view_only={}'.format(anonymous_link.key)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
url = self.project_url + 'forks/?view_only={}'.format(anonymous_link.key)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 401)
def test_can_access_registrations_and_forks_with_not_anon_key(self):
link = PrivateLinkFactory(anonymous=False)
link.nodes.add(self.project)
link.save()
self.project.is_public = False
self.project.save()
url = self.project_url + 'registrations/?view_only={}'.format(self.link.key)
res = self.app.get(url)
assert_equal(res.status_code, 200)
url = self.project_url + 'forks/?view_only={}'.format(self.link.key)
res = self.app.get(url)
assert_equal(res.status_code, 200)
def test_check_can_access_valid(self):
contributor = AuthUserFactory()
self.project.add_contributor(contributor, auth=Auth(self.project.creator))
self.project.save()
assert_true(check_can_access(self.project, contributor))
def test_check_user_access_invalid(self):
noncontrib = AuthUserFactory()
with assert_raises(HTTPError):
check_can_access(self.project, noncontrib)
def test_check_user_access_if_user_is_None(self):
assert_false(check_can_access(self.project, None))
class TestProjectViews(OsfTestCase):
def setUp(self):
super(TestProjectViews, self).setUp()
self.user1 = AuthUserFactory()
self.user1.save()
self.consolidate_auth1 = Auth(user=self.user1)
self.auth = self.user1.auth
self.user2 = AuthUserFactory()
self.auth2 = self.user2.auth
# A project has 2 contributors
self.project = ProjectFactory(
title='Ham',
description='Honey-baked',
creator=self.user1
)
self.project.add_contributor(self.user2, auth=Auth(self.user1))
self.project.save()
self.project2 = ProjectFactory(
title='Tofu',
description='Glazed',
creator=self.user1
)
self.project2.add_contributor(self.user2, auth=Auth(self.user1))
self.project2.save()
def test_node_setting_with_multiple_matched_institution_email_domains(self):
# User has alternate emails matching more than one institution's email domains
inst1 = InstitutionFactory(email_domains=['foo.bar'])
inst2 = InstitutionFactory(email_domains=['baz.qux'])
user = AuthUserFactory()
user.emails.append('[email protected]')
user.emails.append('[email protected]')
user.save()
project = ProjectFactory(creator=user)
# node settings page loads without error
url = project.web_url_for('node_setting')
res = self.app.get(url, auth=user.auth)
assert_equal(res.status_code, 200)
# user is automatically affiliated with institutions
# that matched email domains
user.reload()
assert_in(inst1, user.affiliated_institutions.all())
assert_in(inst2, user.affiliated_institutions.all())
def test_edit_title_empty(self):
node = ProjectFactory(creator=self.user1)
url = node.api_url_for('edit_node')
res = self.app.post_json(url, {'name': 'title', 'value': ''}, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('Title cannot be blank', res.body)
def test_edit_title_invalid(self):
node = ProjectFactory(creator=self.user1)
url = node.api_url_for('edit_node')
res = self.app.post_json(url, {'name': 'title', 'value': '<a></a>'}, auth=self.user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_in('Invalid title.', res.body)
def test_cannot_remove_only_visible_contributor(self):
user1_contrib = self.project.contributor_set.get(user=self.user1)
user1_contrib.visible = False
user1_contrib.save()
url = self.project.api_url_for('project_remove_contributor')
res = self.app.post_json(
url, {'contributorID': self.user2._id,
'nodeIDs': [self.project._id]}, auth=self.auth, expect_errors=True
)
assert_equal(res.status_code, http.FORBIDDEN)
assert_equal(res.json['message_long'], 'Must have at least one bibliographic contributor')
assert_true(self.project.is_contributor(self.user2))
def test_remove_only_visible_contributor_return_false(self):
user1_contrib = self.project.contributor_set.get(user=self.user1)
user1_contrib.visible = False
user1_contrib.save()
ret = self.project.remove_contributor(contributor=self.user2, auth=self.consolidate_auth1)
assert_false(ret)
self.project.reload()
assert_true(self.project.is_contributor(self.user2))
def test_can_view_nested_project_as_admin(self):
self.parent_project = NodeFactory(
title='parent project',
category='project',
parent=self.project,
is_public=False
)
self.parent_project.save()
self.child_project = NodeFactory(
title='child project',
category='project',
parent=self.parent_project,
is_public=False
)
self.child_project.save()
url = self.child_project.web_url_for('view_project')
res = self.app.get(url, auth=self.auth)
assert_not_in('Private Project', res.body)
assert_in('parent project', res.body)
def test_edit_description(self):
url = '/api/v1/project/{0}/edit/'.format(self.project._id)
self.app.post_json(url,
{'name': 'description', 'value': 'Deep-fried'},
auth=self.auth)
self.project.reload()
assert_equal(self.project.description, 'Deep-fried')
def test_project_api_url(self):
url = self.project.api_url
res = self.app.get(url, auth=self.auth)
data = res.json
assert_equal(data['node']['category'], 'Project')
assert_equal(data['node']['node_type'], 'project')
assert_equal(data['node']['title'], self.project.title)
assert_equal(data['node']['is_public'], self.project.is_public)
assert_equal(data['node']['is_registration'], False)
assert_equal(data['node']['id'], self.project._primary_key)
assert_true(data['user']['is_contributor'])
assert_equal(data['node']['description'], self.project.description)
assert_equal(data['node']['url'], self.project.url)
assert_equal(data['node']['tags'], list(self.project.tags.values_list('name', flat=True)))
assert_in('forked_date', data['node'])
assert_in('registered_from_url', data['node'])
# TODO: Test "parent" and "user" output
def test_add_contributor_post(self):
# Two users are added as a contributor via a POST request
project = ProjectFactory(creator=self.user1, is_public=True)
user2 = UserFactory()
user3 = UserFactory()
url = '/api/v1/project/{0}/contributors/'.format(project._id)
dict2 = add_contributor_json(user2)
dict3 = add_contributor_json(user3)
dict2.update({
'permission': 'admin',
'visible': True,
})
dict3.update({
'permission': 'write',
'visible': False,
})
self.app.post_json(
url,
{
'users': [dict2, dict3],
'node_ids': [project._id],
},
content_type='application/json',
auth=self.auth,
).maybe_follow()
project.reload()
assert_in(user2, project.contributors)
# A log event was added
assert_equal(project.logs.latest().action, 'contributor_added')
assert_equal(len(project.contributors), 3)
assert_equal(project.get_permissions(user2), ['read', 'write', 'admin'])
assert_equal(project.get_permissions(user3), ['read', 'write'])
def test_manage_permissions(self):
url = self.project.api_url + 'contributors/manage/'
self.app.post_json(
url,
{
'contributors': [
{'id': self.project.creator._id, 'permission': 'admin',
'registered': True, 'visible': True},
{'id': self.user1._id, 'permission': 'read',
'registered': True, 'visible': True},
{'id': self.user2._id, 'permission': 'admin',
'registered': True, 'visible': True},
]
},
auth=self.auth,
)
self.project.reload()
assert_equal(self.project.get_permissions(self.user1), ['read'])
assert_equal(self.project.get_permissions(self.user2), ['read', 'write', 'admin'])
def test_manage_permissions_again(self):
url = self.project.api_url + 'contributors/manage/'
self.app.post_json(
url,
{
'contributors': [
{'id': self.user1._id, 'permission': 'admin',
'registered': True, 'visible': True},
{'id': self.user2._id, 'permission': 'admin',
'registered': True, 'visible': True},
]
},
auth=self.auth,
)
self.project.reload()
self.app.post_json(
url,
{
'contributors': [
{'id': self.user1._id, 'permission': 'admin',
'registered': True, 'visible': True},
{'id': self.user2._id, 'permission': 'read',
'registered': True, 'visible': True},
]
},
auth=self.auth,
)
self.project.reload()
assert_equal(self.project.get_permissions(self.user2), ['read'])
assert_equal(self.project.get_permissions(self.user1), ['read', 'write', 'admin'])
def test_contributor_manage_reorder(self):
# Two users are added as a contributor via a POST request
project = ProjectFactory(creator=self.user1, is_public=True)
reg_user1, reg_user2 = UserFactory(), UserFactory()
project.add_contributors(
[
{'user': reg_user1, 'permissions': [
'read', 'write', 'admin'], 'visible': True},
{'user': reg_user2, 'permissions': [
'read', 'write', 'admin'], 'visible': False},
]
)
# Add a non-registered user
unregistered_user = project.add_unregistered_contributor(
fullname=fake.name(), email=fake.email(),
auth=self.consolidate_auth1,
save=True,
)
url = project.api_url + 'contributors/manage/'
self.app.post_json(
url,
{
'contributors': [
{'id': reg_user2._id, 'permission': 'admin',
'registered': True, 'visible': False},
{'id': project.creator._id, 'permission': 'admin',
'registered': True, 'visible': True},
{'id': unregistered_user._id, 'permission': 'admin',
'registered': False, 'visible': True},
{'id': reg_user1._id, 'permission': 'admin',
'registered': True, 'visible': True},
]
},
auth=self.auth,
)
project.reload()
assert_equal(
# Note: Cast ForeignList to list for comparison
list(project.contributors),
[reg_user2, project.creator, unregistered_user, reg_user1]
)
assert_equal(
list(project.visible_contributors),
[project.creator, unregistered_user, reg_user1]
)
def test_project_remove_contributor(self):
url = self.project.api_url_for('project_remove_contributor')
# User 1 removes user2
payload = {'contributorID': self.user2._id,
'nodeIDs': [self.project._id]}
self.app.post(url, json.dumps(payload),
content_type='application/json',
auth=self.auth).maybe_follow()
self.project.reload()
assert_not_in(self.user2._id, self.project.contributors)
# A log event was added
assert_equal(self.project.logs.latest().action, 'contributor_removed')
def test_multiple_project_remove_contributor(self):
url = self.project.api_url_for('project_remove_contributor')
# User 1 removes user2
payload = {'contributorID': self.user2._id,
'nodeIDs': [self.project._id, self.project2._id]}
res = self.app.post(url, json.dumps(payload),
content_type='application/json',
auth=self.auth).maybe_follow()
self.project.reload()
self.project2.reload()
assert_not_in(self.user2._id, self.project.contributors)
assert_not_in('/dashboard/', res.json)
assert_not_in(self.user2._id, self.project2.contributors)
# A log event was added
assert_equal(self.project.logs.latest().action, 'contributor_removed')
def test_private_project_remove_self_not_admin(self):
url = self.project.api_url_for('project_remove_contributor')
# user2 removes self
payload = {"contributorID": self.user2._id,
"nodeIDs": [self.project._id]}
res = self.app.post(url, json.dumps(payload),
content_type="application/json",
auth=self.auth2).maybe_follow()
self.project.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['redirectUrl'], '/dashboard/')
assert_not_in(self.user2._id, self.project.contributors)
def test_public_project_remove_self_not_admin(self):
url = self.project.api_url_for('project_remove_contributor')
# user2 removes self
self.public_project = ProjectFactory(creator=self.user1, is_public=True)
self.public_project.add_contributor(self.user2, auth=Auth(self.user1))
self.public_project.save()
payload = {"contributorID": self.user2._id,
"nodeIDs": [self.public_project._id]}
res = self.app.post(url, json.dumps(payload),
content_type="application/json",
auth=self.auth2).maybe_follow()
self.public_project.reload()
assert_equal(res.status_code, 200)
assert_equal(res.json['redirectUrl'], '/' + self.public_project._id + '/')
assert_not_in(self.user2._id, self.public_project.contributors)
def test_project_remove_other_not_admin(self):
url = self.project.api_url_for('project_remove_contributor')
# User 1 removes user2
payload = {"contributorID": self.user1._id,
"nodeIDs": [self.project._id]}
res = self.app.post(url, json.dumps(payload),
content_type="application/json",
expect_errors=True,
auth=self.auth2).maybe_follow()
self.project.reload()
assert_equal(res.status_code, 403)
assert_equal(res.json['message_long'],
'You do not have permission to perform this action. '
'If this should not have occurred and the issue persists, '
'please report it to <a href="mailto:[email protected]">[email protected]</a>.'
)
assert_in(self.user1, self.project.contributors)
def test_project_remove_fake_contributor(self):
url = self.project.api_url_for('project_remove_contributor')
# User 1 removes user2
payload = {'contributorID': 'badid',
'nodeIDs': [self.project._id]}
res = self.app.post(url, json.dumps(payload),
content_type='application/json',
expect_errors=True,
auth=self.auth).maybe_follow()
self.project.reload()
# Assert the contributor id was invalid
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], 'Contributor not found.')
assert_not_in('badid', self.project.contributors)
def test_project_remove_self_only_admin(self):
url = self.project.api_url_for('project_remove_contributor')
# User 1 removes user2
payload = {'contributorID': self.user1._id,
'nodeIDs': [self.project._id]}
res = self.app.post(url, json.dumps(payload),
content_type='application/json',
expect_errors=True,
auth=self.auth).maybe_follow()
self.project.reload()
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], 'Could not remove contributor.')
assert_in(self.user1, self.project.contributors)
def test_get_contributors_abbrev(self):
# create a project with 3 registered contributors
project = ProjectFactory(creator=self.user1, is_public=True)
reg_user1, reg_user2 = UserFactory(), UserFactory()
project.add_contributors(
[
{'user': reg_user1, 'permissions': [
'read', 'write', 'admin'], 'visible': True},
{'user': reg_user2, 'permissions': [
'read', 'write', 'admin'], 'visible': True},
]
)
# add an unregistered contributor
project.add_unregistered_contributor(
fullname=fake.name(), email=fake.email(),
auth=self.consolidate_auth1,
save=True,
)
url = project.api_url_for('get_node_contributors_abbrev')
res = self.app.get(url, auth=self.auth)
assert_equal(len(project.contributors), 4)
assert_equal(len(res.json['contributors']), 3)
assert_equal(len(res.json['others_count']), 1)
assert_equal(res.json['contributors'][0]['separator'], ',')
assert_equal(res.json['contributors'][1]['separator'], ',')
assert_equal(res.json['contributors'][2]['separator'], ' &')
def test_edit_node_title(self):
url = '/api/v1/project/{0}/edit/'.format(self.project._id)
# The title is changed though posting form data
self.app.post_json(url, {'name': 'title', 'value': 'Bacon'},
auth=self.auth).maybe_follow()
self.project.reload()
# The title was changed
assert_equal(self.project.title, 'Bacon')
# A log event was saved
assert_equal(self.project.logs.latest().action, 'edit_title')
def test_make_public(self):
self.project.is_public = False
self.project.save()
url = "/api/v1/project/{0}/permissions/public/".format(self.project._id)
res = self.app.post_json(url, {}, auth=self.auth)
self.project.reload()
assert_true(self.project.is_public)
assert_equal(res.json['status'], 'success')
def test_make_private(self):
self.project.is_public = True
self.project.save()
url = "/api/v1/project/{0}/permissions/private/".format(self.project._id)
res = self.app.post_json(url, {}, auth=self.auth)
self.project.reload()
assert_false(self.project.is_public)
assert_equal(res.json['status'], 'success')
def test_cant_make_public_if_not_admin(self):
non_admin = AuthUserFactory()
self.project.add_contributor(non_admin, permissions=['read', 'write'])
self.project.is_public = False
self.project.save()
url = "/api/v1/project/{0}/permissions/public/".format(self.project._id)
res = self.app.post_json(
url, {}, auth=non_admin.auth,
expect_errors=True,
)
assert_equal(res.status_code, http.FORBIDDEN)
assert_false(self.project.is_public)
def test_cant_make_private_if_not_admin(self):
non_admin = AuthUserFactory()
self.project.add_contributor(non_admin, permissions=['read', 'write'])
self.project.is_public = True
self.project.save()
url = "/api/v1/project/{0}/permissions/private/".format(self.project._id)
res = self.app.post_json(
url, {}, auth=non_admin.auth,
expect_errors=True,
)
assert_equal(res.status_code, http.FORBIDDEN)
assert_true(self.project.is_public)
def test_add_tag(self):
url = self.project.api_url_for('project_add_tag')
self.app.post_json(url, {'tag': "foo'ta#@%#%^&g?"}, auth=self.auth)
self.project.reload()
assert_in("foo'ta#@%#%^&g?", self.project.tags.values_list('name', flat=True))
assert_equal("foo'ta#@%#%^&g?", self.project.logs.latest().params['tag'])
def test_remove_tag(self):
self.project.add_tag("foo'ta#@%#%^&g?", auth=self.consolidate_auth1, save=True)
assert_in("foo'ta#@%#%^&g?", self.project.tags.values_list('name', flat=True))
url = self.project.api_url_for('project_remove_tag')
self.app.delete_json(url, {'tag': "foo'ta#@%#%^&g?"}, auth=self.auth)
self.project.reload()
assert_not_in("foo'ta#@%#%^&g?", self.project.tags.values_list('name', flat=True))
latest_log = self.project.logs.latest()
assert_equal('tag_removed', latest_log.action)
assert_equal("foo'ta#@%#%^&g?", latest_log.params['tag'])
# Regression test for #OSF-5257
def test_removal_empty_tag_throws_error(self):
url = self.project.api_url_for('project_remove_tag')
res = self.app.delete_json(url, {'tag': ''}, auth=self.auth, expect_errors=True)
assert_equal(res.status_code, http.BAD_REQUEST)
# Regression test for #OSF-5257
def test_removal_unknown_tag_throws_error(self):
self.project.add_tag('narf', auth=self.consolidate_auth1, save=True)
url = self.project.api_url_for('project_remove_tag')
res = self.app.delete_json(url, {'tag': 'troz'}, auth=self.auth, expect_errors=True)
assert_equal(res.status_code, http.CONFLICT)
def test_remove_project(self):
url = self.project.api_url
res = self.app.delete_json(url, {}, auth=self.auth).maybe_follow()
self.project.reload()
assert_equal(self.project.is_deleted, True)
assert_in('url', res.json)
assert_equal(res.json['url'], '/dashboard/')
def test_suspended_project(self):
node = NodeFactory(parent=self.project, creator=self.user1)
node.remove_node(Auth(self.user1))
node.suspended = True
node.save()
url = node.api_url
res = self.app.get(url, auth=Auth(self.user1), expect_errors=True)
assert_equal(res.status_code, 451)
def test_private_link_edit_name(self):
link = PrivateLinkFactory(name='link')
link.nodes.add(self.project)
link.save()
assert_equal(link.name, 'link')
url = self.project.api_url + 'private_link/edit/'
self.app.put_json(
url,
{'pk': link._id, 'value': 'new name'},
auth=self.auth,
).maybe_follow()
self.project.reload()
link.reload()
assert_equal(link.name, 'new name')
def test_remove_private_link(self):
link = PrivateLinkFactory()
link.nodes.add(self.project)
link.save()
url = self.project.api_url_for('remove_private_link')
self.app.delete_json(
url,
{'private_link_id': link._id},
auth=self.auth,
).maybe_follow()
self.project.reload()
link.reload()
assert_true(link.is_deleted)
def test_remove_component(self):
node = NodeFactory(parent=self.project, creator=self.user1)
url = node.api_url
res = self.app.delete_json(url, {}, auth=self.auth).maybe_follow()
node.reload()
assert_equal(node.is_deleted, True)
assert_in('url', res.json)
assert_equal(res.json['url'], self.project.url)
def test_cant_remove_component_if_not_admin(self):
node = NodeFactory(parent=self.project, creator=self.user1)
non_admin = AuthUserFactory()
node.add_contributor(
non_admin,
permissions=['read', 'write'],
save=True,
)
url = node.api_url
res = self.app.delete_json(
url, {}, auth=non_admin.auth,
expect_errors=True,
).maybe_follow()
assert_equal(res.status_code, http.FORBIDDEN)
assert_false(node.is_deleted)
def test_view_project_returns_whether_to_show_wiki_widget(self):
user = AuthUserFactory()
project = ProjectFactory(creator=user, is_public=True)
project.add_contributor(user)
project.save()
url = project.api_url_for('view_project')
res = self.app.get(url, auth=user.auth)
assert_equal(res.status_code, http.OK)
assert_in('show_wiki_widget', res.json['user'])
def test_fork_count_does_not_include_deleted_forks(self):
user = AuthUserFactory()
project = ProjectFactory(creator=user)
auth = Auth(project.creator)
fork = project.fork_node(auth)
project.save()
fork.remove_node(auth)
fork.save()
url = project.api_url_for('view_project')
res = self.app.get(url, auth=user.auth)
assert_in('fork_count', res.json['node'])
assert_equal(0, res.json['node']['fork_count'])
def test_statistic_page_redirect(self):
url = self.project.web_url_for('project_statistics_redirect')
res = self.app.get(url, auth=self.auth)
assert_equal(res.status_code, 302)
assert_in(self.project.web_url_for('project_statistics', _guid=True), res.location)
def test_registration_retraction_redirect(self):
url = self.project.web_url_for('node_registration_retraction_redirect')
res = self.app.get(url, auth=self.auth)
assert_equal(res.status_code, 302)
assert_in(self.project.web_url_for('node_registration_retraction_get', _guid=True), res.location)
def test_update_node(self):
url = self.project.api_url_for('update_node')
res = self.app.put_json(url, {'title': 'newtitle'}, auth=self.auth)
assert_equal(res.status_code, 200)
self.project.reload()
assert_equal(self.project.title, 'newtitle')
# Regression test
def test_update_node_with_tags(self):
self.project.add_tag('cheezebørger', auth=Auth(self.project.creator), save=True)
url = self.project.api_url_for('update_node')
res = self.app.put_json(url, {'title': 'newtitle'}, auth=self.auth)
assert_equal(res.status_code, 200)
self.project.reload()
assert_equal(self.project.title, 'newtitle')
class TestEditableChildrenViews(OsfTestCase):
def setUp(self):
OsfTestCase.setUp(self)
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user, is_public=False)
self.child = ProjectFactory(parent=self.project, creator=self.user, is_public=True)
self.grandchild = ProjectFactory(parent=self.child, creator=self.user, is_public=False)
self.great_grandchild = ProjectFactory(parent=self.grandchild, creator=self.user, is_public=True)
self.great_great_grandchild = ProjectFactory(parent=self.great_grandchild, creator=self.user, is_public=False)
url = self.project.api_url_for('get_editable_children')
self.project_results = self.app.get(url, auth=self.user.auth).json
def test_get_editable_children(self):
assert_equal(len(self.project_results['children']), 4)
assert_equal(self.project_results['node']['id'], self.project._id)
def test_editable_children_order(self):
assert_equal(self.project_results['children'][0]['id'], self.child._id)
assert_equal(self.project_results['children'][1]['id'], self.grandchild._id)
assert_equal(self.project_results['children'][2]['id'], self.great_grandchild._id)
assert_equal(self.project_results['children'][3]['id'], self.great_great_grandchild._id)
def test_editable_children_indents(self):
assert_equal(self.project_results['children'][0]['indent'], 0)
assert_equal(self.project_results['children'][1]['indent'], 1)
assert_equal(self.project_results['children'][2]['indent'], 2)
assert_equal(self.project_results['children'][3]['indent'], 3)
def test_editable_children_parents(self):
assert_equal(self.project_results['children'][0]['parent_id'], self.project._id)
assert_equal(self.project_results['children'][1]['parent_id'], self.child._id)
assert_equal(self.project_results['children'][2]['parent_id'], self.grandchild._id)
assert_equal(self.project_results['children'][3]['parent_id'], self.great_grandchild._id)
def test_editable_children_privacy(self):
assert_false(self.project_results['node']['is_public'])
assert_true(self.project_results['children'][0]['is_public'])
assert_false(self.project_results['children'][1]['is_public'])
assert_true(self.project_results['children'][2]['is_public'])
assert_false(self.project_results['children'][3]['is_public'])
def test_editable_children_titles(self):
assert_equal(self.project_results['node']['title'], self.project.title)
assert_equal(self.project_results['children'][0]['title'], self.child.title)
assert_equal(self.project_results['children'][1]['title'], self.grandchild.title)
assert_equal(self.project_results['children'][2]['title'], self.great_grandchild.title)
assert_equal(self.project_results['children'][3]['title'], self.great_great_grandchild.title)
class TestGetNodeTree(OsfTestCase):
def setUp(self):
OsfTestCase.setUp(self)
self.user = AuthUserFactory()
self.user2 = AuthUserFactory()
def test_get_single_node(self):
project = ProjectFactory(creator=self.user)
# child = NodeFactory(parent=project, creator=self.user)
url = project.api_url_for('get_node_tree')
res = self.app.get(url, auth=self.user.auth)
node_id = res.json[0]['node']['id']
assert_equal(node_id, project._primary_key)
def test_get_node_with_children(self):
project = ProjectFactory(creator=self.user)
child1 = NodeFactory(parent=project, creator=self.user)
child2 = NodeFactory(parent=project, creator=self.user2)
child3 = NodeFactory(parent=project, creator=self.user)
url = project.api_url_for('get_node_tree')
res = self.app.get(url, auth=self.user.auth)
tree = res.json[0]
parent_node_id = tree['node']['id']
child1_id = tree['children'][0]['node']['id']
child2_id = tree['children'][1]['node']['id']
child3_id = tree['children'][2]['node']['id']
assert_equal(parent_node_id, project._primary_key)
assert_equal(child1_id, child1._primary_key)
assert_equal(child2_id, child2._primary_key)
assert_equal(child3_id, child3._primary_key)
def test_get_node_with_child_linked_to_parent(self):
project = ProjectFactory(creator=self.user)
child1 = NodeFactory(parent=project, creator=self.user)
child1.add_pointer(project, Auth(self.user))
child1.save()
url = project.api_url_for('get_node_tree')
res = self.app.get(url, auth=self.user.auth)
tree = res.json[0]
parent_node_id = tree['node']['id']
child1_id = tree['children'][0]['node']['id']
assert_equal(child1_id, child1._primary_key)
def test_get_node_not_parent_owner(self):
project = ProjectFactory(creator=self.user2)
child = NodeFactory(parent=project, creator=self.user2)
url = project.api_url_for('get_node_tree')
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 200)
assert_equal(res.json, [])
# Parent node should show because of user2 read access, the children should not
def test_get_node_parent_not_admin(self):
project = ProjectFactory(creator=self.user)
project.add_contributor(self.user2, auth=Auth(self.user))
project.save()
child1 = NodeFactory(parent=project, creator=self.user)
child2 = NodeFactory(parent=project, creator=self.user)
child3 = NodeFactory(parent=project, creator=self.user)
url = project.api_url_for('get_node_tree')
res = self.app.get(url, auth=self.user2.auth)
tree = res.json[0]
parent_node_id = tree['node']['id']
children = tree['children']
assert_equal(parent_node_id, project._primary_key)
assert_equal(children, [])
class TestUserProfile(OsfTestCase):
def setUp(self):
super(TestUserProfile, self).setUp()
self.user = AuthUserFactory()
def test_sanitization_of_edit_profile(self):
url = api_url_for('edit_profile', uid=self.user._id)
post_data = {'name': 'fullname', 'value': 'new<b> name</b> '}
request = self.app.post(url, post_data, auth=self.user.auth)
assert_equal('new name', request.json['name'])
def test_fmt_date_or_none(self):
with assert_raises(HTTPError) as cm:
#enter a date before 1900
fmt_date_or_none(dt.datetime(1890, 10, 31, 18, 23, 29, 227))
# error should be raised because date is before 1900
assert_equal(cm.exception.code, http.BAD_REQUEST)
def test_unserialize_social(self):
url = api_url_for('unserialize_social')
payload = {
'profileWebsites': ['http://frozen.pizza.com/reviews'],
'twitter': 'howtopizza',
'github': 'frozenpizzacode',
}
self.app.put_json(
url,
payload,
auth=self.user.auth,
)
self.user.reload()
for key, value in payload.iteritems():
assert_equal(self.user.social[key], value)
assert_true(self.user.social['researcherId'] is None)
# Regression test for help-desk ticket
def test_making_email_primary_is_not_case_sensitive(self):
user = AuthUserFactory(username='[email protected]')
# make confirmed email have different casing
user.emails[0] = user.emails[0].capitalize()
user.save()
url = api_url_for('update_user')
res = self.app.put_json(
url,
{'id': user._id, 'emails': [{'address': '[email protected]', 'primary': True, 'confirmed': True}]},
auth=user.auth
)
assert_equal(res.status_code, 200)
def test_unserialize_social_validation_failure(self):
url = api_url_for('unserialize_social')
# profileWebsites URL is invalid
payload = {
'profileWebsites': ['http://goodurl.com', 'http://invalidurl'],
'twitter': 'howtopizza',
'github': 'frozenpizzacode',
}
res = self.app.put_json(
url,
payload,
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], 'Invalid personal URL.')
def test_serialize_social_editable(self):
self.user.social['twitter'] = 'howtopizza'
self.user.social['profileWebsites'] = ['http://www.cos.io', 'http://www.osf.io', 'http://www.wordup.com']
self.user.save()
url = api_url_for('serialize_social')
res = self.app.get(
url,
auth=self.user.auth,
)
assert_equal(res.json.get('twitter'), 'howtopizza')
assert_equal(res.json.get('profileWebsites'), ['http://www.cos.io', 'http://www.osf.io', 'http://www.wordup.com'])
assert_true(res.json.get('github') is None)
assert_true(res.json['editable'])
def test_serialize_social_not_editable(self):
user2 = AuthUserFactory()
self.user.social['twitter'] = 'howtopizza'
self.user.social['profileWebsites'] = ['http://www.cos.io', 'http://www.osf.io', 'http://www.wordup.com']
self.user.save()
url = api_url_for('serialize_social', uid=self.user._id)
res = self.app.get(
url,
auth=user2.auth,
)
assert_equal(res.json.get('twitter'), 'howtopizza')
assert_equal(res.json.get('profileWebsites'), ['http://www.cos.io', 'http://www.osf.io', 'http://www.wordup.com'])
assert_true(res.json.get('github') is None)
assert_false(res.json['editable'])
def test_serialize_social_addons_editable(self):
self.user.add_addon('github')
github_account = GitHubAccountFactory()
github_account.save()
self.user.external_accounts.add(github_account)
self.user.save()
url = api_url_for('serialize_social')
res = self.app.get(
url,
auth=self.user.auth,
)
assert_equal(
res.json['addons']['github'],
'abc'
)
def test_serialize_social_addons_not_editable(self):
user2 = AuthUserFactory()
self.user.add_addon('github')
github_account = GitHubAccountFactory()
github_account.save()
self.user.external_accounts.add(github_account)
self.user.save()
url = api_url_for('serialize_social', uid=self.user._id)
res = self.app.get(
url,
auth=user2.auth,
)
assert_not_in('addons', res.json)
def test_unserialize_and_serialize_jobs(self):
jobs = [{
'institution': 'an institution',
'department': 'a department',
'title': 'a title',
'startMonth': 'January',
'startYear': '2001',
'endMonth': 'March',
'endYear': '2001',
'ongoing': False,
}, {
'institution': 'another institution',
'department': None,
'title': None,
'startMonth': 'May',
'startYear': '2001',
'endMonth': None,
'endYear': None,
'ongoing': True,
}]
payload = {'contents': jobs}
url = api_url_for('unserialize_jobs')
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(len(self.user.jobs), 2)
url = api_url_for('serialize_jobs')
res = self.app.get(
url,
auth=self.user.auth,
)
for i, job in enumerate(jobs):
assert_equal(job, res.json['contents'][i])
def test_unserialize_and_serialize_schools(self):
schools = [{
'institution': 'an institution',
'department': 'a department',
'degree': 'a degree',
'startMonth': 1,
'startYear': '2001',
'endMonth': 5,
'endYear': '2001',
'ongoing': False,
}, {
'institution': 'another institution',
'department': None,
'degree': None,
'startMonth': 5,
'startYear': '2001',
'endMonth': None,
'endYear': None,
'ongoing': True,
}]
payload = {'contents': schools}
url = api_url_for('unserialize_schools')
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(len(self.user.schools), 2)
url = api_url_for('serialize_schools')
res = self.app.get(
url,
auth=self.user.auth,
)
for i, job in enumerate(schools):
assert_equal(job, res.json['contents'][i])
def test_unserialize_jobs(self):
jobs = [
{
'institution': fake.company(),
'department': fake.catch_phrase(),
'title': fake.bs(),
'startMonth': 5,
'startYear': '2013',
'endMonth': 3,
'endYear': '2014',
'ongoing': False,
}
]
payload = {'contents': jobs}
url = api_url_for('unserialize_jobs')
res = self.app.put_json(url, payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.user.reload()
# jobs field is updated
assert_equal(self.user.jobs, jobs)
def test_unserialize_names(self):
fake_fullname_w_spaces = ' {} '.format(fake.name())
names = {
'full': fake_fullname_w_spaces,
'given': 'Tea',
'middle': 'Gray',
'family': 'Pot',
'suffix': 'Ms.',
}
url = api_url_for('unserialize_names')
res = self.app.put_json(url, names, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.user.reload()
# user is updated
assert_equal(self.user.fullname, fake_fullname_w_spaces.strip())
assert_equal(self.user.given_name, names['given'])
assert_equal(self.user.middle_names, names['middle'])
assert_equal(self.user.family_name, names['family'])
assert_equal(self.user.suffix, names['suffix'])
def test_unserialize_schools(self):
schools = [
{
'institution': fake.company(),
'department': fake.catch_phrase(),
'degree': fake.bs(),
'startMonth': 5,
'startYear': '2013',
'endMonth': 3,
'endYear': '2014',
'ongoing': False,
}
]
payload = {'contents': schools}
url = api_url_for('unserialize_schools')
res = self.app.put_json(url, payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
self.user.reload()
# schools field is updated
assert_equal(self.user.schools, schools)
def test_unserialize_jobs_valid(self):
jobs = [
{
'institution': fake.company(),
'department': fake.catch_phrase(),
'title': fake.bs(),
'startMonth': 5,
'startYear': '2013',
'endMonth': 3,
'endYear': '2014',
'ongoing': False,
}
]
payload = {'contents': jobs}
url = api_url_for('unserialize_jobs')
res = self.app.put_json(url, payload, auth=self.user.auth)
assert_equal(res.status_code, 200)
def test_get_current_user_gravatar_default_size(self):
url = api_url_for('current_user_gravatar')
res = self.app.get(url, auth=self.user.auth)
current_user_gravatar = res.json['gravatar_url']
assert_true(current_user_gravatar is not None)
url = api_url_for('get_gravatar', uid=self.user._id)
res = self.app.get(url, auth=self.user.auth)
my_user_gravatar = res.json['gravatar_url']
assert_equal(current_user_gravatar, my_user_gravatar)
def test_get_other_user_gravatar_default_size(self):
user2 = AuthUserFactory()
url = api_url_for('current_user_gravatar')
res = self.app.get(url, auth=self.user.auth)
current_user_gravatar = res.json['gravatar_url']
url = api_url_for('get_gravatar', uid=user2._id)
res = self.app.get(url, auth=self.user.auth)
user2_gravatar = res.json['gravatar_url']
assert_true(user2_gravatar is not None)
assert_not_equal(current_user_gravatar, user2_gravatar)
def test_get_current_user_gravatar_specific_size(self):
url = api_url_for('current_user_gravatar')
res = self.app.get(url, auth=self.user.auth)
current_user_default_gravatar = res.json['gravatar_url']
url = api_url_for('current_user_gravatar', size=11)
res = self.app.get(url, auth=self.user.auth)
current_user_small_gravatar = res.json['gravatar_url']
assert_true(current_user_small_gravatar is not None)
assert_not_equal(current_user_default_gravatar, current_user_small_gravatar)
def test_get_other_user_gravatar_specific_size(self):
user2 = AuthUserFactory()
url = api_url_for('get_gravatar', uid=user2._id)
res = self.app.get(url, auth=self.user.auth)
gravatar_default_size = res.json['gravatar_url']
url = api_url_for('get_gravatar', uid=user2._id, size=11)
res = self.app.get(url, auth=self.user.auth)
gravatar_small = res.json['gravatar_url']
assert_true(gravatar_small is not None)
assert_not_equal(gravatar_default_size, gravatar_small)
def test_update_user_timezone(self):
assert_equal(self.user.timezone, 'Etc/UTC')
payload = {'timezone': 'America/New_York', 'id': self.user._id}
url = api_url_for('update_user', uid=self.user._id)
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(self.user.timezone, 'America/New_York')
def test_update_user_locale(self):
assert_equal(self.user.locale, 'en_US')
payload = {'locale': 'de_DE', 'id': self.user._id}
url = api_url_for('update_user', uid=self.user._id)
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(self.user.locale, 'de_DE')
def test_update_user_locale_none(self):
assert_equal(self.user.locale, 'en_US')
payload = {'locale': None, 'id': self.user._id}
url = api_url_for('update_user', uid=self.user._id)
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(self.user.locale, 'en_US')
def test_update_user_locale_empty_string(self):
assert_equal(self.user.locale, 'en_US')
payload = {'locale': '', 'id': self.user._id}
url = api_url_for('update_user', uid=self.user._id)
self.app.put_json(url, payload, auth=self.user.auth)
self.user.reload()
assert_equal(self.user.locale, 'en_US')
def test_cannot_update_user_without_user_id(self):
user1 = AuthUserFactory()
url = api_url_for('update_user')
header = {'emails': [{'address': user1.username}]}
res = self.app.put_json(url, header, auth=user1.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], '"id" is required')
@mock.patch('framework.auth.views.mails.send_mail')
def test_add_emails_return_emails(self, send_mail):
user1 = AuthUserFactory()
url = api_url_for('update_user')
email = '[email protected]'
header = {'id': user1._id,
'emails': [{'address': user1.username, 'primary': True, 'confirmed': True},
{'address': email, 'primary': False, 'confirmed': False}
]}
res = self.app.put_json(url, header, auth=user1.auth)
assert_equal(res.status_code, 200)
assert_in('emails', res.json['profile'])
assert_equal(len(res.json['profile']['emails']), 2)
@mock.patch('framework.auth.views.mails.send_mail')
def test_resend_confirmation_return_emails(self, send_mail):
user1 = AuthUserFactory()
url = api_url_for('resend_confirmation')
email = '[email protected]'
header = {'id': user1._id,
'email': {'address': email, 'primary': False, 'confirmed': False}
}
res = self.app.put_json(url, header, auth=user1.auth)
assert_equal(res.status_code, 200)
assert_in('emails', res.json['profile'])
assert_equal(len(res.json['profile']['emails']), 2)
@mock.patch('framework.auth.views.mails.send_mail')
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_update_user_mailing_lists(self, mock_get_mailchimp_api, send_mail):
email = fake.email()
self.user.emails.append(email)
list_name = 'foo'
self.user.mailchimp_mailing_lists[list_name] = True
self.user.save()
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': 1, 'list_name': list_name}]}
list_id = mailchimp_utils.get_list_id_from_name(list_name)
url = api_url_for('update_user', uid=self.user._id)
emails = [
{'address': self.user.username, 'primary': False, 'confirmed': True},
{'address': email, 'primary': True, 'confirmed': True}]
payload = {'locale': '', 'id': self.user._id, 'emails': emails}
self.app.put_json(url, payload, auth=self.user.auth)
assert mock_client.lists.unsubscribe.called
mock_client.lists.unsubscribe.assert_called_with(
id=list_id,
email={'email': self.user.username},
send_goodbye=True
)
mock_client.lists.subscribe.assert_called_with(
id=list_id,
email={'email': email},
merge_vars={
'fname': self.user.given_name,
'lname': self.user.family_name,
},
double_optin=False,
update_existing=True
)
handlers.celery_teardown_request()
@mock.patch('framework.auth.views.mails.send_mail')
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_unsubscribe_mailchimp_not_called_if_user_not_subscribed(self, mock_get_mailchimp_api, send_mail):
email = fake.email()
self.user.emails.append(email)
list_name = 'foo'
self.user.mailchimp_mailing_lists[list_name] = False
self.user.save()
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': 1, 'list_name': list_name}]}
url = api_url_for('update_user', uid=self.user._id)
emails = [
{'address': self.user.username, 'primary': False, 'confirmed': True},
{'address': email, 'primary': True, 'confirmed': True}]
payload = {'locale': '', 'id': self.user._id, 'emails': emails}
self.app.put_json(url, payload, auth=self.user.auth)
assert_equal(mock_client.lists.unsubscribe.call_count, 0)
assert_equal(mock_client.lists.subscribe.call_count, 0)
handlers.celery_teardown_request()
# TODO: Uncomment once outstanding issues with this feature are addressed
# def test_twitter_redirect_success(self):
# self.user.social['twitter'] = fake.last_name()
# self.user.save()
# res = self.app.get(web_url_for('redirect_to_twitter', twitter_handle=self.user.social['twitter']))
# assert_equals(res.status_code, http.FOUND)
# assert_in(self.user.url, res.location)
# def test_twitter_redirect_is_case_insensitive(self):
# self.user.social['twitter'] = fake.last_name()
# self.user.save()
# res1 = self.app.get(web_url_for('redirect_to_twitter', twitter_handle=self.user.social['twitter']))
# res2 = self.app.get(web_url_for('redirect_to_twitter', twitter_handle=self.user.social['twitter'].lower()))
# assert_equal(res1.location, res2.location)
# def test_twitter_redirect_unassociated_twitter_handle_returns_404(self):
# unassociated_handle = fake.last_name()
# expected_error = 'There is no active user associated with the Twitter handle: {0}.'.format(unassociated_handle)
# res = self.app.get(
# web_url_for('redirect_to_twitter', twitter_handle=unassociated_handle),
# expect_errors=True
# )
# assert_equal(res.status_code, http.NOT_FOUND)
# assert_true(expected_error in res.body)
# def test_twitter_redirect_handle_with_multiple_associated_accounts_redirects_to_selection_page(self):
# self.user.social['twitter'] = fake.last_name()
# self.user.save()
# user2 = AuthUserFactory()
# user2.social['twitter'] = self.user.social['twitter']
# user2.save()
# expected_error = 'There are multiple OSF accounts associated with the Twitter handle: <strong>{0}</strong>.'.format(self.user.social['twitter'])
# res = self.app.get(
# web_url_for(
# 'redirect_to_twitter',
# twitter_handle=self.user.social['twitter'],
# expect_error=True
# )
# )
# assert_equal(res.status_code, http.MULTIPLE_CHOICES)
# assert_true(expected_error in res.body)
# assert_true(self.user.url in res.body)
# assert_true(user2.url in res.body)
class TestUserProfileApplicationsPage(OsfTestCase):
def setUp(self):
super(TestUserProfileApplicationsPage, self).setUp()
self.user = AuthUserFactory()
self.user2 = AuthUserFactory()
self.platform_app = ApiOAuth2ApplicationFactory(owner=self.user)
self.detail_url = web_url_for('oauth_application_detail', client_id=self.platform_app.client_id)
def test_non_owner_cant_access_detail_page(self):
res = self.app.get(self.detail_url, auth=self.user2.auth, expect_errors=True)
assert_equal(res.status_code, http.FORBIDDEN)
def test_owner_cant_access_deleted_application(self):
self.platform_app.is_active = False
self.platform_app.save()
res = self.app.get(self.detail_url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, http.GONE)
def test_owner_cant_access_nonexistent_application(self):
url = web_url_for('oauth_application_detail', client_id='nonexistent')
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, http.NOT_FOUND)
def test_url_has_not_broken(self):
assert_equal(self.platform_app.url, self.detail_url)
class TestUserProfileTokensPage(OsfTestCase):
def setUp(self):
super(TestUserProfileTokensPage, self).setUp()
self.user = AuthUserFactory()
self.token = ApiOAuth2PersonalTokenFactory()
self.detail_url = web_url_for('personal_access_token_detail', _id=self.token._id)
def test_url_has_not_broken(self):
assert_equal(self.token.url, self.detail_url)
class TestUserAccount(OsfTestCase):
def setUp(self):
super(TestUserAccount, self).setUp()
self.user = AuthUserFactory()
self.user.set_password('password')
self.user.auth = (self.user.username, 'password')
self.user.save()
@mock.patch('website.profile.views.push_status_message')
def test_password_change_valid(self,
mock_push_status_message,
old_password='password',
new_password='Pa$$w0rd',
confirm_password='Pa$$w0rd'):
url = web_url_for('user_account_password')
post_data = {
'old_password': old_password,
'new_password': new_password,
'confirm_password': confirm_password,
}
res = self.app.post(url, post_data, auth=(self.user.username, old_password))
assert_true(302, res.status_code)
res = res.follow(auth=(self.user.username, new_password))
assert_true(200, res.status_code)
self.user.reload()
assert_true(self.user.check_password(new_password))
assert_true(mock_push_status_message.called)
assert_in('Password updated successfully', mock_push_status_message.mock_calls[0][1][0])
@mock.patch('website.profile.views.push_status_message')
def test_password_change_invalid(self, mock_push_status_message, old_password='', new_password='',
confirm_password='', error_message='Old password is invalid'):
url = web_url_for('user_account_password')
post_data = {
'old_password': old_password,
'new_password': new_password,
'confirm_password': confirm_password,
}
res = self.app.post(url, post_data, auth=self.user.auth)
assert_true(302, res.status_code)
res = res.follow(auth=self.user.auth)
assert_true(200, res.status_code)
self.user.reload()
assert_false(self.user.check_password(new_password))
assert_true(mock_push_status_message.called)
error_strings = [e[1][0] for e in mock_push_status_message.mock_calls]
assert_in(error_message, error_strings)
def test_password_change_invalid_old_password(self):
self.test_password_change_invalid(
old_password='invalid old password',
new_password='new password',
confirm_password='new password',
error_message='Old password is invalid',
)
def test_password_change_invalid_confirm_password(self):
self.test_password_change_invalid(
old_password='password',
new_password='new password',
confirm_password='invalid confirm password',
error_message='Password does not match the confirmation',
)
def test_password_change_invalid_new_password_length(self):
self.test_password_change_invalid(
old_password='password',
new_password='1234567',
confirm_password='1234567',
error_message='Password should be at least eight characters',
)
def test_password_change_valid_new_password_length(self):
self.test_password_change_valid(
old_password='password',
new_password='12345678',
confirm_password='12345678',
)
def test_password_change_invalid_blank_password(self, old_password='', new_password='', confirm_password=''):
self.test_password_change_invalid(
old_password=old_password,
new_password=new_password,
confirm_password=confirm_password,
error_message='Passwords cannot be blank',
)
def test_password_change_invalid_blank_new_password(self):
for password in ('', ' '):
self.test_password_change_invalid_blank_password('password', password, 'new password')
def test_password_change_invalid_blank_confirm_password(self):
for password in ('', ' '):
self.test_password_change_invalid_blank_password('password', 'new password', password)
@mock.patch('framework.auth.views.mails.send_mail')
def test_user_cannot_request_account_export_before_throttle_expires(self, send_mail):
url = api_url_for('request_export')
self.app.post(url, auth=self.user.auth)
assert_true(send_mail.called)
res = self.app.post(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(send_mail.call_count, 1)
@mock.patch('framework.auth.views.mails.send_mail')
def test_user_cannot_request_account_deactivation_before_throttle_expires(self, send_mail):
url = api_url_for('request_deactivation')
self.app.post(url, auth=self.user.auth)
assert_true(send_mail.called)
res = self.app.post(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(send_mail.call_count, 1)
def test_get_unconfirmed_emails_exclude_external_identity(self):
external_identity = {
'service': {
'AFI': 'LINK'
}
}
self.user.add_unconfirmed_email("[email protected]")
self.user.add_unconfirmed_email("[email protected]", external_identity=external_identity)
self.user.save()
unconfirmed_emails = self.user.get_unconfirmed_emails_exclude_external_identity()
assert_in("[email protected]", unconfirmed_emails)
assert_not_in("[email protected]", unconfirmed_emails)
class TestAddingContributorViews(OsfTestCase):
def setUp(self):
super(TestAddingContributorViews, self).setUp()
self.creator = AuthUserFactory()
self.project = ProjectFactory(creator=self.creator)
self.auth = Auth(self.project.creator)
# Authenticate all requests
self.app.authenticate(*self.creator.auth)
contributor_added.connect(notify_added_contributor)
def test_serialize_unregistered_without_record(self):
name, email = fake.name(), fake.email()
res = serialize_unregistered(fullname=name, email=email)
assert_equal(res['fullname'], name)
assert_equal(res['email'], email)
assert_equal(res['id'], None)
assert_false(res['registered'])
assert_true(res['gravatar'])
assert_false(res['active'])
def test_deserialize_contributors(self):
contrib = UserFactory()
unreg = UnregUserFactory()
name, email = fake.name(), fake.email()
unreg_no_record = serialize_unregistered(name, email)
contrib_data = [
add_contributor_json(contrib),
serialize_unregistered(fake.name(), unreg.username),
unreg_no_record
]
contrib_data[0]['permission'] = 'admin'
contrib_data[1]['permission'] = 'write'
contrib_data[2]['permission'] = 'read'
contrib_data[0]['visible'] = True
contrib_data[1]['visible'] = True
contrib_data[2]['visible'] = True
res = deserialize_contributors(
self.project,
contrib_data,
auth=Auth(self.creator))
assert_equal(len(res), len(contrib_data))
assert_true(res[0]['user'].is_registered)
assert_false(res[1]['user'].is_registered)
assert_true(res[1]['user']._id)
assert_false(res[2]['user'].is_registered)
assert_true(res[2]['user']._id)
def test_deserialize_contributors_validates_fullname(self):
name = "<img src=1 onerror=console.log(1)>"
email = fake.email()
unreg_no_record = serialize_unregistered(name, email)
contrib_data = [unreg_no_record]
contrib_data[0]['permission'] = 'admin'
contrib_data[0]['visible'] = True
with assert_raises(ValidationError):
deserialize_contributors(
self.project,
contrib_data,
auth=Auth(self.creator),
validate=True)
def test_deserialize_contributors_validates_email(self):
name = fake.name()
email = "!@#$%%^&*"
unreg_no_record = serialize_unregistered(name, email)
contrib_data = [unreg_no_record]
contrib_data[0]['permission'] = 'admin'
contrib_data[0]['visible'] = True
with assert_raises(ValidationError):
deserialize_contributors(
self.project,
contrib_data,
auth=Auth(self.creator),
validate=True)
def test_serialize_unregistered_with_record(self):
name, email = fake.name(), fake.email()
user = self.project.add_unregistered_contributor(fullname=name,
email=email, auth=Auth(self.project.creator))
self.project.save()
res = serialize_unregistered(
fullname=name,
email=email
)
assert_false(res['active'])
assert_false(res['registered'])
assert_equal(res['id'], user._primary_key)
assert_true(res['gravatar_url'])
assert_equal(res['fullname'], name)
assert_equal(res['email'], email)
def test_add_contributor_with_unreg_contribs_and_reg_contribs(self):
n_contributors_pre = len(self.project.contributors)
reg_user = UserFactory()
name, email = fake.name(), fake.email()
pseudouser = {
'id': None,
'registered': False,
'fullname': name,
'email': email,
'permission': 'admin',
'visible': True,
}
reg_dict = add_contributor_json(reg_user)
reg_dict['permission'] = 'admin'
reg_dict['visible'] = True
payload = {
'users': [reg_dict, pseudouser],
'node_ids': []
}
url = self.project.api_url_for('project_contributors_post')
self.app.post_json(url, payload).maybe_follow()
self.project.reload()
assert_equal(len(self.project.contributors),
n_contributors_pre + len(payload['users']))
new_unreg = auth.get_user(email=email)
assert_false(new_unreg.is_registered)
# unclaimed record was added
new_unreg.reload()
assert_in(self.project._primary_key, new_unreg.unclaimed_records)
rec = new_unreg.get_unclaimed_record(self.project._primary_key)
assert_equal(rec['name'], name)
assert_equal(rec['email'], email)
@mock.patch('website.project.views.contributor.send_claim_email')
def test_add_contributors_post_only_sends_one_email_to_unreg_user(
self, mock_send_claim_email):
# Project has components
comp1, comp2 = NodeFactory(
creator=self.creator), NodeFactory(creator=self.creator)
NodeRelation.objects.create(parent=self.project, child=comp1)
NodeRelation.objects.create(parent=self.project, child=comp2)
self.project.save()
# An unreg user is added to the project AND its components
unreg_user = { # dict because user has not previous unreg record
'id': None,
'registered': False,
'fullname': fake.name(),
'email': fake.email(),
'permission': 'admin',
'visible': True,
}
payload = {
'users': [unreg_user],
'node_ids': [comp1._primary_key, comp2._primary_key]
}
# send request
url = self.project.api_url_for('project_contributors_post')
assert_true(self.project.can_edit(user=self.creator))
self.app.post_json(url, payload, auth=self.creator.auth)
# finalize_invitation should only have been called once
assert_equal(mock_send_claim_email.call_count, 1)
@mock.patch('website.mails.send_mail')
def test_add_contributors_post_only_sends_one_email_to_registered_user(self, mock_send_mail):
# Project has components
comp1 = NodeFactory(creator=self.creator, parent=self.project)
comp2 = NodeFactory(creator=self.creator, parent=self.project)
# A registered user is added to the project AND its components
user = UserFactory()
user_dict = {
'id': user._id,
'fullname': user.fullname,
'email': user.username,
'permission': 'write',
'visible': True}
payload = {
'users': [user_dict],
'node_ids': [comp1._primary_key, comp2._primary_key]
}
# send request
url = self.project.api_url_for('project_contributors_post')
assert self.project.can_edit(user=self.creator)
self.app.post_json(url, payload, auth=self.creator.auth)
# send_mail should only have been called once
assert_equal(mock_send_mail.call_count, 1)
@mock.patch('website.mails.send_mail')
def test_add_contributors_post_sends_email_if_user_not_contributor_on_parent_node(self, mock_send_mail):
# Project has a component with a sub-component
component = NodeFactory(creator=self.creator, parent=self.project)
sub_component = NodeFactory(creator=self.creator, parent=component)
# A registered user is added to the project and the sub-component, but NOT the component
user = UserFactory()
user_dict = {
'id': user._id,
'fullname': user.fullname,
'email': user.username,
'permission': 'write',
'visible': True}
payload = {
'users': [user_dict],
'node_ids': [sub_component._primary_key]
}
# send request
url = self.project.api_url_for('project_contributors_post')
assert self.project.can_edit(user=self.creator)
self.app.post_json(url, payload, auth=self.creator.auth)
# send_mail is called for both the project and the sub-component
assert_equal(mock_send_mail.call_count, 2)
@mock.patch('website.project.views.contributor.send_claim_email')
def test_email_sent_when_unreg_user_is_added(self, send_mail):
name, email = fake.name(), fake.email()
pseudouser = {
'id': None,
'registered': False,
'fullname': name,
'email': email,
'permission': 'admin',
'visible': True,
}
payload = {
'users': [pseudouser],
'node_ids': []
}
url = self.project.api_url_for('project_contributors_post')
self.app.post_json(url, payload).maybe_follow()
assert_true(send_mail.called)
assert_true(send_mail.called_with(email=email))
@mock.patch('website.mails.send_mail')
def test_email_sent_when_reg_user_is_added(self, send_mail):
contributor = UserFactory()
contributors = [{
'user': contributor,
'visible': True,
'permissions': ['read', 'write']
}]
project = ProjectFactory(creator=self.auth.user)
project.add_contributors(contributors, auth=self.auth)
project.save()
assert_true(send_mail.called)
send_mail.assert_called_with(
contributor.username,
mails.CONTRIBUTOR_ADDED_DEFAULT,
user=contributor,
node=project,
referrer_name=self.auth.user.fullname,
all_global_subscriptions_none=False,
branded_service_name=None,
)
assert_almost_equal(contributor.contributor_added_email_records[project._id]['last_sent'], int(time.time()), delta=1)
@mock.patch('website.mails.send_mail')
def test_contributor_added_email_sent_to_unreg_user(self, send_mail):
unreg_user = UnregUserFactory()
project = ProjectFactory()
project.add_unregistered_contributor(fullname=unreg_user.fullname, email=unreg_user.email, auth=Auth(project.creator))
project.save()
assert_true(send_mail.called)
@mock.patch('website.mails.send_mail')
def test_forking_project_does_not_send_contributor_added_email(self, send_mail):
project = ProjectFactory()
project.fork_node(auth=Auth(project.creator))
assert_false(send_mail.called)
@mock.patch('website.mails.send_mail')
def test_templating_project_does_not_send_contributor_added_email(self, send_mail):
project = ProjectFactory()
project.use_as_template(auth=Auth(project.creator))
assert_false(send_mail.called)
@mock.patch('website.archiver.tasks.archive')
@mock.patch('website.mails.send_mail')
def test_registering_project_does_not_send_contributor_added_email(self, send_mail, mock_archive):
project = ProjectFactory()
project.register_node(get_default_metaschema(), Auth(user=project.creator), '', None)
assert_false(send_mail.called)
@mock.patch('website.mails.send_mail')
def test_notify_contributor_email_does_not_send_before_throttle_expires(self, send_mail):
contributor = UserFactory()
project = ProjectFactory()
auth = Auth(project.creator)
notify_added_contributor(project, contributor, auth)
assert_true(send_mail.called)
# 2nd call does not send email because throttle period has not expired
notify_added_contributor(project, contributor, auth)
assert_equal(send_mail.call_count, 1)
@mock.patch('website.mails.send_mail')
def test_notify_contributor_email_sends_after_throttle_expires(self, send_mail):
throttle = 0.5
contributor = UserFactory()
project = ProjectFactory()
auth = Auth(project.creator)
notify_added_contributor(project, contributor, auth, throttle=throttle)
assert_true(send_mail.called)
time.sleep(1) # throttle period expires
notify_added_contributor(project, contributor, auth, throttle=throttle)
assert_equal(send_mail.call_count, 2)
def test_add_multiple_contributors_only_adds_one_log(self):
n_logs_pre = self.project.logs.count()
reg_user = UserFactory()
name = fake.name()
pseudouser = {
'id': None,
'registered': False,
'fullname': name,
'email': fake.email(),
'permission': 'write',
'visible': True,
}
reg_dict = add_contributor_json(reg_user)
reg_dict['permission'] = 'admin'
reg_dict['visible'] = True
payload = {
'users': [reg_dict, pseudouser],
'node_ids': []
}
url = self.project.api_url_for('project_contributors_post')
self.app.post_json(url, payload).maybe_follow()
self.project.reload()
assert_equal(self.project.logs.count(), n_logs_pre + 1)
def test_add_contribs_to_multiple_nodes(self):
child = NodeFactory(parent=self.project, creator=self.creator)
n_contributors_pre = child.contributors.count()
reg_user = UserFactory()
name, email = fake.name(), fake.email()
pseudouser = {
'id': None,
'registered': False,
'fullname': name,
'email': email,
'permission': 'admin',
'visible': True,
}
reg_dict = add_contributor_json(reg_user)
reg_dict['permission'] = 'admin'
reg_dict['visible'] = True
payload = {
'users': [reg_dict, pseudouser],
'node_ids': [self.project._primary_key, child._primary_key]
}
url = '/api/v1/project/{0}/contributors/'.format(self.project._id)
self.app.post_json(url, payload).maybe_follow()
child.reload()
assert_equal(child.contributors.count(),
n_contributors_pre + len(payload['users']))
def tearDown(self):
super(TestAddingContributorViews, self).tearDown()
contributor_added.disconnect(notify_added_contributor)
class TestUserInviteViews(OsfTestCase):
def setUp(self):
super(TestUserInviteViews, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user)
self.invite_url = '/api/v1/project/{0}/invite_contributor/'.format(
self.project._primary_key)
def test_invite_contributor_post_if_not_in_db(self):
name, email = fake.name(), fake.email()
res = self.app.post_json(
self.invite_url,
{'fullname': name, 'email': email},
auth=self.user.auth,
)
contrib = res.json['contributor']
assert_true(contrib['id'] is None)
assert_equal(contrib['fullname'], name)
assert_equal(contrib['email'], email)
def test_invite_contributor_post_if_unreg_already_in_db(self):
# A n unreg user is added to a different project
name, email = fake.name(), fake.email()
project2 = ProjectFactory()
unreg_user = project2.add_unregistered_contributor(fullname=name, email=email,
auth=Auth(project2.creator))
project2.save()
res = self.app.post_json(self.invite_url,
{'fullname': name, 'email': email}, auth=self.user.auth)
expected = add_contributor_json(unreg_user)
expected['fullname'] = name
expected['email'] = email
assert_equal(res.json['contributor'], expected)
def test_invite_contributor_post_if_emaiL_already_registered(self):
reg_user = UserFactory()
# Tries to invite user that is already regiestered
res = self.app.post_json(self.invite_url,
{'fullname': fake.name(), 'email': reg_user.username},
auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, http.BAD_REQUEST)
def test_invite_contributor_post_if_user_is_already_contributor(self):
unreg_user = self.project.add_unregistered_contributor(
fullname=fake.name(), email=fake.email(),
auth=Auth(self.project.creator)
)
self.project.save()
# Tries to invite unreg user that is already a contributor
res = self.app.post_json(self.invite_url,
{'fullname': fake.name(), 'email': unreg_user.username},
auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, http.BAD_REQUEST)
def test_invite_contributor_with_no_email(self):
name = fake.name()
res = self.app.post_json(self.invite_url,
{'fullname': name, 'email': None}, auth=self.user.auth)
assert_equal(res.status_code, http.OK)
data = res.json
assert_equal(data['status'], 'success')
assert_equal(data['contributor']['fullname'], name)
assert_true(data['contributor']['email'] is None)
assert_false(data['contributor']['registered'])
def test_invite_contributor_requires_fullname(self):
res = self.app.post_json(self.invite_url,
{'email': '[email protected]', 'fullname': ''}, auth=self.user.auth,
expect_errors=True)
assert_equal(res.status_code, http.BAD_REQUEST)
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_send_claim_email_to_given_email(self, send_mail):
project = ProjectFactory()
given_email = fake.email()
unreg_user = project.add_unregistered_contributor(
fullname=fake.name(),
email=given_email,
auth=Auth(project.creator),
)
project.save()
send_claim_email(email=given_email, unclaimed_user=unreg_user, node=project)
assert_true(send_mail.called)
assert_true(send_mail.called_with(
to_addr=given_email,
mail=mails.INVITE_DEFAULT
))
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_send_claim_email_to_referrer(self, send_mail):
project = ProjectFactory()
referrer = project.creator
given_email, real_email = fake.email(), fake.email()
unreg_user = project.add_unregistered_contributor(fullname=fake.name(),
email=given_email, auth=Auth(
referrer)
)
project.save()
send_claim_email(email=real_email, unclaimed_user=unreg_user, node=project)
assert_true(send_mail.called)
# email was sent to referrer
send_mail.assert_called_with(
referrer.username,
mails.FORWARD_INVITE,
user=unreg_user,
referrer=referrer,
claim_url=unreg_user.get_claim_url(project._id, external=True),
email=real_email.lower().strip(),
fullname=unreg_user.get_unclaimed_record(project._id)['name'],
node=project,
branded_service_name=None
)
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_send_claim_email_before_throttle_expires(self, send_mail):
project = ProjectFactory()
given_email = fake.email()
unreg_user = project.add_unregistered_contributor(
fullname=fake.name(),
email=given_email,
auth=Auth(project.creator),
)
project.save()
send_claim_email(email=fake.email(), unclaimed_user=unreg_user, node=project)
send_mail.reset_mock()
# 2nd call raises error because throttle hasn't expired
with assert_raises(HTTPError):
send_claim_email(email=fake.email(), unclaimed_user=unreg_user, node=project)
assert_false(send_mail.called)
class TestClaimViews(OsfTestCase):
def setUp(self):
super(TestClaimViews, self).setUp()
self.referrer = AuthUserFactory()
self.project = ProjectFactory(creator=self.referrer, is_public=True)
self.given_name = fake.name()
self.given_email = fake.email()
self.user = self.project.add_unregistered_contributor(
fullname=self.given_name,
email=self.given_email,
auth=Auth(user=self.referrer)
)
self.project.save()
@mock.patch('website.project.views.contributor.send_claim_email')
def test_claim_user_already_registered_redirects_to_claim_user_registered(self, claim_email):
name = fake.name()
email = fake.email()
# project contributor adds an unregistered contributor (without an email) on public project
unregistered_user = self.project.add_unregistered_contributor(
fullname=name,
email=None,
auth=Auth(user=self.referrer)
)
assert_in(unregistered_user, self.project.contributors)
# unregistered user comes along and claims themselves on the public project, entering an email
invite_url = self.project.api_url_for('claim_user_post', uid='undefined')
self.app.post_json(invite_url, {
'pk': unregistered_user._primary_key,
'value': email
})
assert_equal(claim_email.call_count, 1)
# set unregistered record email since we are mocking send_claim_email()
unclaimed_record = unregistered_user.get_unclaimed_record(self.project._primary_key)
unclaimed_record.update({'email': email})
unregistered_user.save()
# unregistered user then goes and makes an account with same email, before claiming themselves as contributor
UserFactory(username=email, fullname=name)
# claim link for the now registered email is accessed while not logged in
token = unregistered_user.get_unclaimed_record(self.project._primary_key)['token']
claim_url = '/user/{uid}/{pid}/claim/?token={token}'.format(
uid=unregistered_user._id,
pid=self.project._id,
token=token
)
res = self.app.get(claim_url)
# should redirect to 'claim_user_registered' view
claim_registered_url = '/user/{uid}/{pid}/claim/verify/{token}/'.format(
uid=unregistered_user._id,
pid=self.project._id,
token=token
)
assert_equal(res.status_code, 302)
assert_in(claim_registered_url, res.headers.get('Location'))
@mock.patch('website.project.views.contributor.send_claim_email')
def test_claim_user_already_registered_secondary_email_redirects_to_claim_user_registered(self, claim_email):
name = fake.name()
email = fake.email()
secondary_email = fake.email()
# project contributor adds an unregistered contributor (without an email) on public project
unregistered_user = self.project.add_unregistered_contributor(
fullname=name,
email=None,
auth=Auth(user=self.referrer)
)
assert_in(unregistered_user, self.project.contributors)
# unregistered user comes along and claims themselves on the public project, entering an email
invite_url = self.project.api_url_for('claim_user_post', uid='undefined')
self.app.post_json(invite_url, {
'pk': unregistered_user._primary_key,
'value': secondary_email
})
assert_equal(claim_email.call_count, 1)
# set unregistered record email since we are mocking send_claim_email()
unclaimed_record = unregistered_user.get_unclaimed_record(self.project._primary_key)
unclaimed_record.update({'email': secondary_email})
unregistered_user.save()
# unregistered user then goes and makes an account with same email, before claiming themselves as contributor
registered_user = UserFactory(username=email, fullname=name)
registered_user.emails.append(secondary_email)
registered_user.save()
# claim link for the now registered email is accessed while not logged in
token = unregistered_user.get_unclaimed_record(self.project._primary_key)['token']
claim_url = '/user/{uid}/{pid}/claim/?token={token}'.format(
uid=unregistered_user._id,
pid=self.project._id,
token=token
)
res = self.app.get(claim_url)
# should redirect to 'claim_user_registered' view
claim_registered_url = '/user/{uid}/{pid}/claim/verify/{token}/'.format(
uid=unregistered_user._id,
pid=self.project._id,
token=token
)
assert_equal(res.status_code, 302)
assert_in(claim_registered_url, res.headers.get('Location'))
def test_claim_user_invited_with_no_email_posts_to_claim_form(self):
given_name = fake.name()
invited_user = self.project.add_unregistered_contributor(
fullname=given_name,
email=None,
auth=Auth(user=self.referrer)
)
self.project.save()
url = invited_user.get_claim_url(self.project._primary_key)
res = self.app.post(url, {
'password': 'bohemianrhap',
'password2': 'bohemianrhap'
}, expect_errors=True)
assert_equal(res.status_code, 400)
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_claim_user_post_with_registered_user_id(self, send_mail):
# registered user who is attempting to claim the unclaimed contributor
reg_user = UserFactory()
payload = {
# pk of unreg user record
'pk': self.user._primary_key,
'claimerId': reg_user._primary_key
}
url = '/api/v1/user/{uid}/{pid}/claim/email/'.format(
uid=self.user._primary_key,
pid=self.project._primary_key,
)
res = self.app.post_json(url, payload)
# mail was sent
assert_equal(send_mail.call_count, 2)
# ... to the correct address
referrer_call = send_mail.call_args_list[0]
claimer_call = send_mail.call_args_list[1]
args, _ = referrer_call
assert_equal(args[0], self.referrer.username)
args, _ = claimer_call
assert_equal(args[0], reg_user.username)
# view returns the correct JSON
assert_equal(res.json, {
'status': 'success',
'email': reg_user.username,
'fullname': self.given_name,
})
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_send_claim_registered_email(self, mock_send_mail):
reg_user = UserFactory()
send_claim_registered_email(
claimer=reg_user,
unclaimed_user=self.user,
node=self.project
)
assert_equal(mock_send_mail.call_count, 2)
first_call_args = mock_send_mail.call_args_list[0][0]
assert_equal(first_call_args[0], self.referrer.username)
second_call_args = mock_send_mail.call_args_list[1][0]
assert_equal(second_call_args[0], reg_user.username)
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_send_claim_registered_email_before_throttle_expires(self, mock_send_mail):
reg_user = UserFactory()
send_claim_registered_email(
claimer=reg_user,
unclaimed_user=self.user,
node=self.project,
)
mock_send_mail.reset_mock()
# second call raises error because it was called before throttle period
with assert_raises(HTTPError):
send_claim_registered_email(
claimer=reg_user,
unclaimed_user=self.user,
node=self.project,
)
assert_false(mock_send_mail.called)
@mock.patch('website.project.views.contributor.send_claim_registered_email')
def test_claim_user_post_with_email_already_registered_sends_correct_email(
self, send_claim_registered_email):
reg_user = UserFactory()
payload = {
'value': reg_user.username,
'pk': self.user._primary_key
}
url = self.project.api_url_for('claim_user_post', uid=self.user._id)
self.app.post_json(url, payload)
assert_true(send_claim_registered_email.called)
def test_user_with_removed_unclaimed_url_claiming(self):
""" Tests that when an unclaimed user is removed from a project, the
unregistered user object does not retain the token.
"""
self.project.remove_contributor(self.user, Auth(user=self.referrer))
assert_not_in(
self.project._primary_key,
self.user.unclaimed_records.keys()
)
def test_user_with_claim_url_cannot_claim_twice(self):
""" Tests that when an unclaimed user is replaced on a project with a
claimed user, the unregistered user object does not retain the token.
"""
reg_user = AuthUserFactory()
self.project.replace_contributor(self.user, reg_user)
assert_not_in(
self.project._primary_key,
self.user.unclaimed_records.keys()
)
def test_claim_user_form_redirects_to_password_confirm_page_if_user_is_logged_in(self):
reg_user = AuthUserFactory()
url = self.user.get_claim_url(self.project._primary_key)
res = self.app.get(url, auth=reg_user.auth)
assert_equal(res.status_code, 302)
res = res.follow(auth=reg_user.auth)
token = self.user.get_unclaimed_record(self.project._primary_key)['token']
expected = self.project.web_url_for(
'claim_user_registered',
uid=self.user._id,
token=token,
)
assert_equal(res.request.path, expected)
def test_get_valid_form(self):
url = self.user.get_claim_url(self.project._primary_key)
res = self.app.get(url).maybe_follow()
assert_equal(res.status_code, 200)
def test_invalid_claim_form_raise_400(self):
uid = self.user._primary_key
pid = self.project._primary_key
url = '/user/{uid}/{pid}/claim/?token=badtoken'.format(**locals())
res = self.app.get(url, expect_errors=True).maybe_follow()
assert_equal(res.status_code, 400)
@mock.patch('framework.auth.core.User.update_search_nodes')
def test_posting_to_claim_form_with_valid_data(self, mock_update_search_nodes):
url = self.user.get_claim_url(self.project._primary_key)
res = self.app.post(url, {
'username': self.user.username,
'password': 'killerqueen',
'password2': 'killerqueen'
})
assert_equal(res.status_code, 302)
location = res.headers.get('Location')
assert_in('login?service=', location)
assert_in('username', location)
assert_in('verification_key', location)
assert_in(self.project._primary_key, location)
self.user.reload()
assert_true(self.user.is_registered)
assert_true(self.user.is_active)
assert_not_in(self.project._primary_key, self.user.unclaimed_records)
@mock.patch('framework.auth.core.User.update_search_nodes')
def test_posting_to_claim_form_removes_all_unclaimed_data(self, mock_update_search_nodes):
# user has multiple unclaimed records
p2 = ProjectFactory(creator=self.referrer)
self.user.add_unclaimed_record(node=p2, referrer=self.referrer,
given_name=fake.name())
self.user.save()
assert_true(len(self.user.unclaimed_records.keys()) > 1) # sanity check
url = self.user.get_claim_url(self.project._primary_key)
self.app.post(url, {
'username': self.given_email,
'password': 'bohemianrhap',
'password2': 'bohemianrhap'
})
self.user.reload()
assert_equal(self.user.unclaimed_records, {})
@mock.patch('framework.auth.core.User.update_search_nodes')
def test_posting_to_claim_form_sets_fullname_to_given_name(self, mock_update_search_nodes):
# User is created with a full name
original_name = fake.name()
unreg = UnregUserFactory(fullname=original_name)
# User invited with a different name
different_name = fake.name()
new_user = self.project.add_unregistered_contributor(
email=unreg.username,
fullname=different_name,
auth=Auth(self.project.creator),
)
self.project.save()
# Goes to claim url
claim_url = new_user.get_claim_url(self.project._id)
self.app.post(claim_url, {
'username': unreg.username,
'password': 'killerqueen', 'password2': 'killerqueen'
})
unreg.reload()
# Full name was set correctly
assert_equal(unreg.fullname, different_name)
# CSL names were set correctly
parsed_name = impute_names_model(different_name)
assert_equal(unreg.given_name, parsed_name['given_name'])
assert_equal(unreg.family_name, parsed_name['family_name'])
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_claim_user_post_returns_fullname(self, send_mail):
url = '/api/v1/user/{0}/{1}/claim/email/'.format(self.user._primary_key,
self.project._primary_key)
res = self.app.post_json(url,
{'value': self.given_email,
'pk': self.user._primary_key},
auth=self.referrer.auth)
assert_equal(res.json['fullname'], self.given_name)
assert_true(send_mail.called)
assert_true(send_mail.called_with(to_addr=self.given_email))
@mock.patch('website.project.views.contributor.mails.send_mail')
def test_claim_user_post_if_email_is_different_from_given_email(self, send_mail):
email = fake.email() # email that is different from the one the referrer gave
url = '/api/v1/user/{0}/{1}/claim/email/'.format(self.user._primary_key,
self.project._primary_key)
self.app.post_json(url,
{'value': email, 'pk': self.user._primary_key}
)
assert_true(send_mail.called)
assert_equal(send_mail.call_count, 2)
call_to_invited = send_mail.mock_calls[0]
assert_true(call_to_invited.called_with(
to_addr=email
))
call_to_referrer = send_mail.mock_calls[1]
assert_true(call_to_referrer.called_with(
to_addr=self.given_email
))
def test_claim_url_with_bad_token_returns_400(self):
url = self.project.web_url_for(
'claim_user_registered',
uid=self.user._id,
token='badtoken',
)
res = self.app.get(url, auth=self.referrer.auth, expect_errors=400)
assert_equal(res.status_code, 400)
def test_cannot_claim_user_with_user_who_is_already_contributor(self):
# user who is already a contirbutor to the project
contrib = AuthUserFactory()
self.project.add_contributor(contrib, auth=Auth(self.project.creator))
self.project.save()
# Claiming user goes to claim url, but contrib is already logged in
url = self.user.get_claim_url(self.project._primary_key)
res = self.app.get(
url,
auth=contrib.auth,
).follow(
auth=contrib.auth,
expect_errors=True,
)
# Response is a 400
assert_equal(res.status_code, 400)
@pytest.mark.skip('Watching no longer supported')
class TestWatchViews(OsfTestCase):
def setUp(self):
super(TestWatchViews, self).setUp()
self.user = AuthUserFactory()
self.consolidate_auth = Auth(user=self.user)
self.auth = self.user.auth # used for requests auth
# A public project
self.project = ProjectFactory(is_public=True)
self.project.save()
# Manually reset log date to 100 days ago so it won't show up in feed
latest_log = self.project.logs.latest()
latest_log.date = timezone.now() - dt.timedelta(days=100)
latest_log.save()
# A log added now
self.last_log = self.project.add_log(
NodeLog.TAG_ADDED,
params={'node': self.project._primary_key},
auth=self.consolidate_auth,
log_date=timezone.now(),
save=True,
)
# Clear watched list
WatchConfig = apps.get_model('osf.WatchConfig')
WatchConfig.objects.filter(user=self.user).delete()
def test_watching_a_project_appends_to_users_watched_list(self):
n_watched_then = self.user.watched.count()
url = '/api/v1/project/{0}/watch/'.format(self.project._id)
res = self.app.post_json(url,
params={'digest': True},
auth=self.auth)
assert_equal(res.json['watchCount'], 1)
self.user.reload()
n_watched_now = self.user.watched.count()
assert_equal(res.status_code, 200)
assert_equal(n_watched_now, n_watched_then + 1)
assert_true(self.user.watched.last().digest)
def test_watching_project_twice_returns_400(self):
url = '/api/v1/project/{0}/watch/'.format(self.project._id)
res = self.app.post_json(url,
params={},
auth=self.auth)
assert_equal(res.status_code, 200)
# User tries to watch a node she's already watching
res2 = self.app.post_json(url,
params={},
auth=self.auth,
expect_errors=True)
assert_equal(res2.status_code, http.BAD_REQUEST)
def test_unwatching_a_project_removes_from_watched_list(self):
# The user has already watched a project
watch_config = WatchConfigFactory(node=self.project)
self.user.watch(watch_config)
self.user.save()
n_watched_then = len(self.user.watched)
url = '/api/v1/project/{0}/unwatch/'.format(self.project._id)
res = self.app.post_json(url, {}, auth=self.auth)
self.user.reload()
n_watched_now = len(self.user.watched)
assert_equal(res.status_code, 200)
assert_equal(n_watched_now, n_watched_then - 1)
assert_false(self.user.is_watching(self.project))
def test_toggle_watch(self):
# The user is not watching project
assert_false(self.user.is_watching(self.project))
url = '/api/v1/project/{0}/togglewatch/'.format(self.project._id)
res = self.app.post_json(url, {}, auth=self.auth)
# The response json has a watchcount and watched property
assert_equal(res.json['watchCount'], 1)
assert_true(res.json['watched'])
assert_equal(res.status_code, 200)
self.user.reload()
# The user is now watching the project
assert_true(res.json['watched'])
assert_true(self.user.is_watching(self.project))
def test_toggle_watch_node(self):
# The project has a public sub-node
node = NodeFactory(creator=self.user, parent=self.project, is_public=True)
url = "/api/v1/project/{}/node/{}/togglewatch/".format(self.project._id,
node._id)
res = self.app.post_json(url, {}, auth=self.auth)
assert_equal(res.status_code, 200)
self.user.reload()
# The user is now watching the sub-node
assert_true(res.json['watched'])
assert_true(self.user.is_watching(node))
class TestPointerViews(OsfTestCase):
def setUp(self):
super(TestPointerViews, self).setUp()
self.user = AuthUserFactory()
self.consolidate_auth = Auth(user=self.user)
self.project = ProjectFactory(creator=self.user)
def _make_pointer_only_user_can_see(self, user, project, save=False):
node = ProjectFactory(creator=user)
project.add_pointer(node, auth=Auth(user=user), save=save)
def test_pointer_list_write_contributor_can_remove_private_component_entry(self):
"""Ensure that write contributors see the button to delete a pointer,
even if they cannot see what it is pointing at"""
url = web_url_for('view_project', pid=self.project._id)
user2 = AuthUserFactory()
self.project.add_contributor(user2,
auth=Auth(self.project.creator),
permissions=permissions.DEFAULT_CONTRIBUTOR_PERMISSIONS)
self._make_pointer_only_user_can_see(user2, self.project)
self.project.save()
res = self.app.get(url, auth=self.user.auth).maybe_follow()
assert_equal(res.status_code, 200)
has_controls = res.lxml.xpath('//li[@node_id]/p[starts-with(normalize-space(text()), "Private Link")]//i[contains(@class, "remove-pointer")]')
assert_true(has_controls)
def test_pointer_list_write_contributor_can_remove_public_component_entry(self):
url = web_url_for('view_project', pid=self.project._id)
for i in xrange(3):
self.project.add_pointer(ProjectFactory(creator=self.user),
auth=Auth(user=self.user))
self.project.save()
res = self.app.get(url, auth=self.user.auth).maybe_follow()
assert_equal(res.status_code, 200)
has_controls = res.lxml.xpath(
'//li[@node_id]//i[contains(@class, "remove-pointer")]')
assert_equal(len(has_controls), 3)
def test_pointer_list_read_contributor_cannot_remove_private_component_entry(self):
url = web_url_for('view_project', pid=self.project._id)
user2 = AuthUserFactory()
self.project.add_contributor(user2,
auth=Auth(self.project.creator),
permissions=[permissions.READ])
self._make_pointer_only_user_can_see(user2, self.project)
self.project.save()
res = self.app.get(url, auth=user2.auth).maybe_follow()
assert_equal(res.status_code, 200)
pointer_nodes = res.lxml.xpath('//li[@node_id]')
has_controls = res.lxml.xpath('//li[@node_id]/p[starts-with(normalize-space(text()), "Private Link")]//i[contains(@class, "remove-pointer")]')
assert_equal(len(pointer_nodes), 1)
assert_false(has_controls)
def test_pointer_list_read_contributor_cannot_remove_public_component_entry(self):
url = web_url_for('view_project', pid=self.project._id)
self.project.add_pointer(ProjectFactory(creator=self.user,
is_public=True),
auth=Auth(user=self.user))
user2 = AuthUserFactory()
self.project.add_contributor(user2,
auth=Auth(self.project.creator),
permissions=[permissions.READ])
self.project.save()
res = self.app.get(url, auth=user2.auth).maybe_follow()
assert_equal(res.status_code, 200)
pointer_nodes = res.lxml.xpath('//li[@node_id]')
has_controls = res.lxml.xpath(
'//li[@node_id]//i[contains(@class, "remove-pointer")]')
assert_equal(len(pointer_nodes), 1)
assert_equal(len(has_controls), 0)
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/1109
def test_get_pointed_excludes_folders(self):
pointer_project = ProjectFactory(is_public=True) # project that points to another project
pointed_project = ProjectFactory(creator=self.user) # project that other project points to
pointer_project.add_pointer(pointed_project, Auth(pointer_project.creator), save=True)
# Project is in an organizer collection
collection = CollectionFactory(creator=pointed_project.creator)
collection.add_pointer(pointed_project, Auth(pointed_project.creator), save=True)
url = pointed_project.api_url_for('get_pointed')
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
# pointer_project's id is included in response, but folder's id is not
pointer_ids = [each['id'] for each in res.json['pointed']]
assert_in(pointer_project._id, pointer_ids)
assert_not_in(collection._id, pointer_ids)
def test_add_pointers(self):
url = self.project.api_url + 'pointer/'
node_ids = [
NodeFactory()._id
for _ in range(5)
]
self.app.post_json(
url,
{'nodeIds': node_ids},
auth=self.user.auth,
).maybe_follow()
self.project.reload()
assert_equal(
self.project.nodes_active.count(),
5
)
def test_add_the_same_pointer_more_than_once(self):
url = self.project.api_url + 'pointer/'
double_node = NodeFactory()
self.app.post_json(
url,
{'nodeIds': [double_node._id]},
auth=self.user.auth,
)
res = self.app.post_json(
url,
{'nodeIds': [double_node._id]},
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_add_pointers_no_user_logg_in(self):
url = self.project.api_url_for('add_pointers')
node_ids = [
NodeFactory()._id
for _ in range(5)
]
res = self.app.post_json(
url,
{'nodeIds': node_ids},
auth=None,
expect_errors=True
)
assert_equal(res.status_code, 401)
def test_add_pointers_public_non_contributor(self):
project2 = ProjectFactory()
project2.set_privacy('public')
project2.save()
url = self.project.api_url_for('add_pointers')
self.app.post_json(
url,
{'nodeIds': [project2._id]},
auth=self.user.auth,
).maybe_follow()
self.project.reload()
assert_equal(
self.project.nodes_active.count(),
1
)
def test_add_pointers_contributor(self):
user2 = AuthUserFactory()
self.project.add_contributor(user2)
self.project.save()
url = self.project.api_url_for('add_pointers')
node_ids = [
NodeFactory()._id
for _ in range(5)
]
self.app.post_json(
url,
{'nodeIds': node_ids},
auth=user2.auth,
).maybe_follow()
self.project.reload()
assert_equal(
self.project.linked_nodes.count(),
5
)
def test_add_pointers_not_provided(self):
url = self.project.api_url + 'pointer/'
res = self.app.post_json(url, {}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_move_pointers(self):
project_two = ProjectFactory(creator=self.user)
url = api_url_for('move_pointers')
node = NodeFactory()
pointer = self.project.add_pointer(node, auth=self.consolidate_auth)
assert_equal(self.project.linked_nodes.count(), 1)
assert_equal(project_two.linked_nodes.count(), 0)
user_auth = self.user.auth
move_request = \
{
'fromNodeId': self.project._id,
'toNodeId': project_two._id,
'pointerIds': [pointer._id],
}
self.app.post_json(
url,
move_request,
auth=user_auth,
).maybe_follow()
self.project.reload()
project_two.reload()
assert_equal(self.project.linked_nodes.count(), 0)
assert_equal(project_two.linked_nodes.count(), 1)
def test_remove_pointer(self):
url = self.project.api_url + 'pointer/'
node = NodeFactory()
pointer = self.project.add_pointer(node, auth=self.consolidate_auth)
self.app.delete_json(
url,
{'pointerId': pointer.node._id},
auth=self.user.auth,
)
self.project.reload()
assert_equal(
len(list(self.project.nodes)),
0
)
def test_remove_pointer_not_provided(self):
url = self.project.api_url + 'pointer/'
res = self.app.delete_json(url, {}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_remove_pointer_not_found(self):
url = self.project.api_url + 'pointer/'
res = self.app.delete_json(
url,
{'pointerId': None},
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_remove_pointer_not_in_nodes(self):
url = self.project.api_url + 'pointer/'
node = NodeFactory()
pointer = Pointer()
res = self.app.delete_json(
url,
{'pointerId': pointer._id},
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_fork_pointer(self):
url = self.project.api_url + 'pointer/fork/'
node = NodeFactory(creator=self.user)
pointer = self.project.add_pointer(node, auth=self.consolidate_auth)
self.app.post_json(
url,
{'pointerId': pointer._id},
auth=self.user.auth
)
def test_fork_pointer_not_provided(self):
url = self.project.api_url + 'pointer/fork/'
res = self.app.post_json(url, {}, auth=self.user.auth,
expect_errors=True)
assert_equal(res.status_code, 400)
def test_fork_pointer_not_found(self):
url = self.project.api_url + 'pointer/fork/'
res = self.app.post_json(
url,
{'pointerId': None},
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_fork_pointer_not_in_nodes(self):
url = self.project.api_url + 'pointer/fork/'
node = NodeFactory()
pointer = Pointer()
res = self.app.post_json(
url,
{'pointerId': pointer._id},
auth=self.user.auth,
expect_errors=True
)
assert_equal(res.status_code, 400)
def test_before_register_with_pointer(self):
# Assert that link warning appears in before register callback.
node = NodeFactory()
self.project.add_pointer(node, auth=self.consolidate_auth)
url = self.project.api_url + 'fork/before/'
res = self.app.get(url, auth=self.user.auth).maybe_follow()
prompts = [
prompt
for prompt in res.json['prompts']
if 'Links will be copied into your fork' in prompt
]
assert_equal(len(prompts), 1)
def test_before_fork_with_pointer(self):
"""Assert that link warning appears in before fork callback."""
node = NodeFactory()
self.project.add_pointer(node, auth=self.consolidate_auth)
url = self.project.api_url + 'beforeregister/'
res = self.app.get(url, auth=self.user.auth).maybe_follow()
prompts = [
prompt
for prompt in res.json['prompts']
if 'Links will be copied into your registration' in prompt
]
assert_equal(len(prompts), 1)
def test_before_register_no_pointer(self):
"""Assert that link warning does not appear in before register callback."""
url = self.project.api_url + 'fork/before/'
res = self.app.get(url, auth=self.user.auth).maybe_follow()
prompts = [
prompt
for prompt in res.json['prompts']
if 'Links will be copied into your fork' in prompt
]
assert_equal(len(prompts), 0)
def test_before_fork_no_pointer(self):
"""Assert that link warning does not appear in before fork callback."""
url = self.project.api_url + 'beforeregister/'
res = self.app.get(url, auth=self.user.auth).maybe_follow()
prompts = [
prompt
for prompt in res.json['prompts']
if 'Links will be copied into your registration' in prompt
]
assert_equal(len(prompts), 0)
def test_get_pointed(self):
pointing_node = ProjectFactory(creator=self.user)
pointing_node.add_pointer(self.project, auth=Auth(self.user))
url = self.project.api_url_for('get_pointed')
res = self.app.get(url, auth=self.user.auth)
pointed = res.json['pointed']
assert_equal(len(pointed), 1)
assert_equal(pointed[0]['url'], pointing_node.url)
assert_equal(pointed[0]['title'], pointing_node.title)
assert_equal(pointed[0]['authorShort'], abbrev_authors(pointing_node))
def test_get_pointed_private(self):
secret_user = UserFactory()
pointing_node = ProjectFactory(creator=secret_user)
pointing_node.add_pointer(self.project, auth=Auth(secret_user))
url = self.project.api_url_for('get_pointed')
res = self.app.get(url, auth=self.user.auth)
pointed = res.json['pointed']
assert_equal(len(pointed), 1)
assert_equal(pointed[0]['url'], None)
assert_equal(pointed[0]['title'], 'Private Component')
assert_equal(pointed[0]['authorShort'], 'Private Author(s)')
class TestPublicViews(OsfTestCase):
def test_explore(self):
res = self.app.get("/explore/").maybe_follow()
assert_equal(res.status_code, 200)
class TestAuthViews(OsfTestCase):
def setUp(self):
super(TestAuthViews, self).setUp()
self.user = AuthUserFactory()
self.auth = self.user.auth
@mock.patch('framework.auth.views.mails.send_mail')
def test_register_ok(self, _):
url = api_url_for('register_user')
name, email, password = fake.name(), fake.email(), 'underpressure'
self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': email,
'password': password,
}
)
user = User.find_one(Q('username', 'eq', email))
assert_equal(user.fullname, name)
# Regression test for https://github.com/CenterForOpenScience/osf.io/issues/2902
@mock.patch('framework.auth.views.mails.send_mail')
def test_register_email_case_insensitive(self, _):
url = api_url_for('register_user')
name, email, password = fake.name(), fake.email(), 'underpressure'
self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': str(email).upper(),
'password': password,
}
)
user = User.find_one(Q('username', 'eq', email))
assert_equal(user.fullname, name)
@mock.patch('framework.auth.views.send_confirm_email')
def test_register_scrubs_username(self, _):
url = api_url_for('register_user')
name = "<i>Eunice</i> O' \"Cornwallis\"<script type='text/javascript' src='http://www.cornify.com/js/cornify.js'></script><script type='text/javascript'>cornify_add()</script>"
email, password = fake.email(), 'underpressure'
res = self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': email,
'password': password,
}
)
expected_scrub_username = "Eunice O' \"Cornwallis\"cornify_add()"
user = User.find_one(Q('username', 'eq', email))
assert_equal(res.status_code, http.OK)
assert_equal(user.fullname, expected_scrub_username)
def test_register_email_mismatch(self):
url = api_url_for('register_user')
name, email, password = fake.name(), fake.email(), 'underpressure'
res = self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': email + 'lol',
'password': password,
},
expect_errors=True,
)
assert_equal(res.status_code, http.BAD_REQUEST)
users = User.find(Q('username', 'eq', email))
assert_equal(users.count(), 0)
def test_register_blacklisted_email_domain(self):
url = api_url_for('register_user')
name, email, password = fake.name(), '[email protected]', 'agreatpasswordobviously'
res = self.app.post_json(
url, {
'fullName': name,
'email1': email,
'email2': email,
'password': password
},
expect_errors=True
)
assert_equal(res.status_code, http.BAD_REQUEST)
users = User.find(Q('username', 'eq', email))
assert_equal(users.count(), 0)
@mock.patch('framework.auth.views.validate_recaptcha', return_value=True)
@mock.patch('framework.auth.views.mails.send_mail')
def test_register_good_captcha(self, _, validate_recaptcha):
url = api_url_for('register_user')
name, email, password = fake.name(), fake.email(), 'underpressure'
captcha = 'some valid captcha'
with mock.patch.object(settings, 'RECAPTCHA_SITE_KEY', 'some_value'):
resp = self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': str(email).upper(),
'password': password,
'g-recaptcha-response': captcha,
}
)
validate_recaptcha.assert_called_with(captcha, remote_ip=None)
assert_equal(resp.status_code, http.OK)
user = User.find_one(Q('username', 'eq', email))
assert_equal(user.fullname, name)
@mock.patch('framework.auth.views.validate_recaptcha', return_value=False)
@mock.patch('framework.auth.views.mails.send_mail')
def test_register_missing_captcha(self, _, validate_recaptcha):
url = api_url_for('register_user')
name, email, password = fake.name(), fake.email(), 'underpressure'
with mock.patch.object(settings, 'RECAPTCHA_SITE_KEY', 'some_value'):
resp = self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': str(email).upper(),
'password': password,
# 'g-recaptcha-response': 'supposed to be None',
},
expect_errors=True
)
validate_recaptcha.assert_called_with(None, remote_ip=None)
assert_equal(resp.status_code, http.BAD_REQUEST)
@mock.patch('framework.auth.views.validate_recaptcha', return_value=False)
@mock.patch('framework.auth.views.mails.send_mail')
def test_register_bad_captcha(self, _, validate_recaptcha):
url = api_url_for('register_user')
name, email, password = fake.name(), fake.email(), 'underpressure'
with mock.patch.object(settings, 'RECAPTCHA_SITE_KEY', 'some_value'):
resp = self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': str(email).upper(),
'password': password,
'g-recaptcha-response': 'bad captcha',
},
expect_errors=True
)
assert_equal(resp.status_code, http.BAD_REQUEST)
@mock.patch('framework.auth.core.User.update_search_nodes')
def test_register_after_being_invited_as_unreg_contributor(self, mock_update_search_nodes):
# Regression test for:
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/861
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/1021
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/1026
# A user is invited as an unregistered contributor
project = ProjectFactory()
name, email = fake.name(), fake.email()
project.add_unregistered_contributor(fullname=name, email=email, auth=Auth(project.creator))
project.save()
# The new, unregistered user
new_user = User.find_one(Q('username', 'eq', email))
# Instead of following the invitation link, they register at the regular
# registration page
# They use a different name when they register, but same email
real_name = fake.name()
password = 'myprecious'
url = api_url_for('register_user')
payload = {
'fullName': real_name,
'email1': email,
'email2': email,
'password': password,
}
# Send registration request
self.app.post_json(url, payload)
new_user.reload()
# New user confirms by following confirmation link
confirm_url = new_user.get_confirmation_url(email, external=False)
self.app.get(confirm_url)
new_user.reload()
# Password and fullname should be updated
assert_true(new_user.is_confirmed)
assert_true(new_user.check_password(password))
assert_equal(new_user.fullname, real_name)
@mock.patch('framework.auth.views.send_confirm_email')
def test_register_sends_user_registered_signal(self, mock_send_confirm_email):
url = api_url_for('register_user')
name, email, password = fake.name(), fake.email(), 'underpressure'
with capture_signals() as mock_signals:
self.app.post_json(
url,
{
'fullName': name,
'email1': email,
'email2': email,
'password': password,
}
)
assert_equal(mock_signals.signals_sent(), set([auth.signals.user_registered,
auth.signals.unconfirmed_user_created]))
assert_true(mock_send_confirm_email.called)
@mock.patch('framework.auth.views.mails.send_mail')
def test_resend_confirmation(self, send_mail):
email = '[email protected]'
token = self.user.add_unconfirmed_email(email)
self.user.save()
url = api_url_for('resend_confirmation')
header = {'address': email, 'primary': False, 'confirmed': False}
self.app.put_json(url, {'id': self.user._id, 'email': header}, auth=self.user.auth)
assert_true(send_mail.called)
assert_true(send_mail.called_with(
to_addr=email
))
self.user.reload()
assert_not_equal(token, self.user.get_confirmation_token(email))
with assert_raises(InvalidTokenError):
self.user.get_unconfirmed_email_for_token(token)
@mock.patch('framework.auth.views.mails.send_mail')
def test_click_confirmation_email(self, send_mail):
email = '[email protected]'
token = self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], False)
url = '/confirm/{}/{}/?logout=1'.format(self.user._id, token, self.user.username)
res = self.app.get(url)
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], True)
assert_equal(res.status_code, 302)
login_url = 'login?service'
assert_in(login_url, res.body)
def test_get_email_to_add_no_email(self):
email_verifications = self.user.unconfirmed_email_info
assert_equal(email_verifications, [])
def test_get_unconfirmed_email(self):
email = '[email protected]'
self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
email_verifications = self.user.unconfirmed_email_info
assert_equal(email_verifications, [])
def test_get_email_to_add(self):
email = '[email protected]'
token = self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], False)
url = '/confirm/{}/{}/?logout=1'.format(self.user._id, token, self.user.username)
self.app.get(url)
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], True)
email_verifications = self.user.unconfirmed_email_info
assert_equal(email_verifications[0]['address'], '[email protected]')
def test_add_email(self):
email = '[email protected]'
token = self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], False)
url = '/confirm/{}/{}/?logout=1'.format(self.user._id, token)
self.app.get(url)
self.user.reload()
email_verifications = self.user.unconfirmed_email_info
put_email_url = api_url_for('unconfirmed_email_add')
res = self.app.put_json(put_email_url, email_verifications[0], auth=self.user.auth)
self.user.reload()
assert_equal(res.json_body['status'], 'success')
assert_equal(self.user.emails[1], '[email protected]')
def test_remove_email(self):
email = '[email protected]'
token = self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
url = '/confirm/{}/{}/?logout=1'.format(self.user._id, token)
self.app.get(url)
self.user.reload()
email_verifications = self.user.unconfirmed_email_info
remove_email_url = api_url_for('unconfirmed_email_remove')
remove_res = self.app.delete_json(remove_email_url, email_verifications[0], auth=self.user.auth)
self.user.reload()
assert_equal(remove_res.json_body['status'], 'success')
assert_equal(self.user.unconfirmed_email_info, [])
def test_add_expired_email(self):
# Do not return expired token and removes it from user.email_verifications
email = '[email protected]'
token = self.user.add_unconfirmed_email(email)
self.user.email_verifications[token]['expiration'] = timezone.now() - dt.timedelta(days=100)
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['email'], email)
self.user.clean_email_verifications(given_token=token)
unconfirmed_emails = self.user.unconfirmed_email_info
assert_equal(unconfirmed_emails, [])
assert_equal(self.user.email_verifications, {})
def test_clean_email_verifications(self):
# Do not return bad token and removes it from user.email_verifications
email = '[email protected]'
token = 'blahblahblah'
self.user.email_verifications[token] = {'expiration': timezone.now() + dt.timedelta(days=1),
'email': email,
'confirmed': False }
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['email'], email)
self.user.clean_email_verifications(given_token=token)
unconfirmed_emails = self.user.unconfirmed_email_info
assert_equal(unconfirmed_emails, [])
assert_equal(self.user.email_verifications, {})
def test_clean_email_verifications_when_email_verifications_is_an_empty_dict(self):
self.user.email_verifications = {}
self.user.save()
ret = self.user.clean_email_verifications()
assert_equal(ret, None)
assert_equal(self.user.email_verifications, {})
def test_add_invalid_email(self):
# Do not return expired token and removes it from user.email_verifications
email = u'\u0000\u0008\u000b\u000c\u000e\u001f\ufffe\[email protected]'
# illegal_str = u'\u0000\u0008\u000b\u000c\u000e\u001f\ufffe\uffffHello'
# illegal_str += unichr(0xd800) + unichr(0xdbff) + ' World'
# email = '[email protected]'
with assert_raises(ValidationError):
self.user.add_unconfirmed_email(email)
def test_add_email_merge(self):
email = "[email protected]"
dupe = UserFactory(
username=email,
emails=[email]
)
dupe.save()
token = self.user.add_unconfirmed_email(email)
self.user.save()
self.user.reload()
assert_equal(self.user.email_verifications[token]['confirmed'], False)
url = '/confirm/{}/{}/?logout=1'.format(self.user._id, token)
self.app.get(url)
self.user.reload()
email_verifications = self.user.unconfirmed_email_info
put_email_url = api_url_for('unconfirmed_email_add')
res = self.app.put_json(put_email_url, email_verifications[0], auth=self.user.auth)
self.user.reload()
assert_equal(res.json_body['status'], 'success')
assert_equal(self.user.emails[1], '[email protected]')
def test_resend_confirmation_without_user_id(self):
email = '[email protected]'
url = api_url_for('resend_confirmation')
header = {'address': email, 'primary': False, 'confirmed': False}
res = self.app.put_json(url, {'email': header}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], '"id" is required')
def test_resend_confirmation_without_email(self):
url = api_url_for('resend_confirmation')
res = self.app.put_json(url, {'id': self.user._id}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_resend_confirmation_not_work_for_primary_email(self):
email = '[email protected]'
url = api_url_for('resend_confirmation')
header = {'address': email, 'primary': True, 'confirmed': False}
res = self.app.put_json(url, {'id': self.user._id, 'email': header}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], 'Cannnot resend confirmation for confirmed emails')
def test_resend_confirmation_not_work_for_confirmed_email(self):
email = '[email protected]'
url = api_url_for('resend_confirmation')
header = {'address': email, 'primary': False, 'confirmed': True}
res = self.app.put_json(url, {'id': self.user._id, 'email': header}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['message_long'], 'Cannnot resend confirmation for confirmed emails')
@mock.patch('framework.auth.views.mails.send_mail')
def test_resend_confirmation_does_not_send_before_throttle_expires(self, send_mail):
email = '[email protected]'
self.user.save()
url = api_url_for('resend_confirmation')
header = {'address': email, 'primary': False, 'confirmed': False}
self.app.put_json(url, {'id': self.user._id, 'email': header}, auth=self.user.auth)
assert_true(send_mail.called)
# 2nd call does not send email because throttle period has not expired
res = self.app.put_json(url, {'id': self.user._id, 'email': header}, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_confirm_email_clears_unclaimed_records_and_revokes_token(self):
unclaimed_user = UnconfirmedUserFactory()
# unclaimed user has been invited to a project.
referrer = UserFactory()
project = ProjectFactory(creator=referrer)
unclaimed_user.add_unclaimed_record(project, referrer, 'foo')
unclaimed_user.save()
# sanity check
assert_equal(len(unclaimed_user.email_verifications.keys()), 1)
# user goes to email confirmation link
token = unclaimed_user.get_confirmation_token(unclaimed_user.username)
url = web_url_for('confirm_email_get', uid=unclaimed_user._id, token=token)
res = self.app.get(url)
assert_equal(res.status_code, 302)
# unclaimed records and token are cleared
unclaimed_user.reload()
assert_equal(unclaimed_user.unclaimed_records, {})
assert_equal(len(unclaimed_user.email_verifications.keys()), 0)
def test_confirmation_link_registers_user(self):
user = User.create_unconfirmed('[email protected]', 'bicycle123', 'Brian May')
assert_false(user.is_registered) # sanity check
user.save()
confirmation_url = user.get_confirmation_url('[email protected]', external=False)
res = self.app.get(confirmation_url)
assert_equal(res.status_code, 302, 'redirects to settings page')
res = res.follow()
user.reload()
assert_true(user.is_registered)
class TestAuthLoginAndRegisterLogic(OsfTestCase):
def setUp(self):
super(TestAuthLoginAndRegisterLogic, self).setUp()
self.no_auth = Auth()
self.user_auth = AuthUserFactory()
self.auth = Auth(user=self.user_auth)
self.next_url = web_url_for('my_projects', _absolute=True)
self.invalid_campaign = 'invalid_campaign'
def test_osf_login_with_auth(self):
# login: user with auth
data = login_and_register_handler(self.auth)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), web_url_for('dashboard', _absolute=True))
def test_osf_login_without_auth(self):
# login: user without auth
data = login_and_register_handler(self.no_auth)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), web_url_for('dashboard', _absolute=True))
def test_osf_register_with_auth(self):
# register: user with auth
data = login_and_register_handler(self.auth, login=False)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), web_url_for('dashboard', _absolute=True))
def test_osf_register_without_auth(self):
# register: user without auth
data = login_and_register_handler(self.no_auth, login=False)
assert_equal(data.get('status_code'), http.OK)
assert_equal(data.get('next_url'), web_url_for('dashboard', _absolute=True))
def test_next_url_login_with_auth(self):
# next_url login: user with auth
data = login_and_register_handler(self.auth, next_url=self.next_url)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), self.next_url)
def test_next_url_login_without_auth(self):
# login: user without auth
request.url = web_url_for('auth_login', next=self.next_url, _absolute=True)
data = login_and_register_handler(self.no_auth, next_url=self.next_url)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), get_login_url(request.url))
def test_next_url_register_with_auth(self):
# register: user with auth
data = login_and_register_handler(self.auth, login=False, next_url=self.next_url)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), self.next_url)
def test_next_url_register_without_auth(self):
# register: user without auth
data = login_and_register_handler(self.no_auth, login=False, next_url=self.next_url)
assert_equal(data.get('status_code'), http.OK)
assert_equal(data.get('next_url'), request.url)
def test_institution_login_and_register(self):
pass
def test_institution_login_with_auth(self):
# institution login: user with auth
data = login_and_register_handler(self.auth, campaign='institution')
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), web_url_for('dashboard', _absolute=True))
def test_institution_login_without_auth(self):
# institution login: user without auth
data = login_and_register_handler(self.no_auth, campaign='institution')
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(
data.get('next_url'),
get_login_url(web_url_for('dashboard', _absolute=True), campaign='institution'))
def test_institution_regsiter_with_auth(self):
# institution register: user with auth
data = login_and_register_handler(self.auth, login=False, campaign='institution')
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), web_url_for('dashboard', _absolute=True))
def test_institution_register_without_auth(self):
# institution register: user without auth
data = login_and_register_handler(self.no_auth, login=False, campaign='institution')
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(
data.get('next_url'),
get_login_url(web_url_for('dashboard', _absolute=True), campaign='institution')
)
def test_campaign_login_with_auth(self):
for campaign in get_campaigns():
if is_institution_login(campaign):
continue
# campaign login: user with auth
data = login_and_register_handler(self.auth, campaign=campaign)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), campaign_url_for(campaign))
def test_campaign_login_without_auth(self):
for campaign in get_campaigns():
if is_institution_login(campaign):
continue
# campaign login: user without auth
data = login_and_register_handler(self.no_auth, campaign=campaign)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(
data.get('next_url'),
web_url_for('auth_register', campaign=campaign, next=campaign_url_for(campaign))
)
def test_campaign_register_with_auth(self):
for campaign in get_campaigns():
if is_institution_login(campaign):
continue
# campaign register: user with auth
data = login_and_register_handler(self.auth, login=False, campaign=campaign)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), campaign_url_for(campaign))
def test_campaign_register_without_auth(self):
for campaign in get_campaigns():
if is_institution_login(campaign):
continue
# campaign register: user without auth
data = login_and_register_handler(self.no_auth, login=False, campaign=campaign)
assert_equal(data.get('status_code'), http.OK)
if is_native_login(campaign):
# native campaign: prereg and erpc
assert_equal(data.get('next_url'), campaign_url_for(campaign))
elif is_proxy_login(campaign):
# proxy campaign: preprints and branded ones
assert_equal(
data.get('next_url'),
web_url_for('auth_login', next=campaign_url_for(campaign), _absolute=True)
)
def test_campaign_next_url_login_with_auth(self):
for campaign in get_campaigns():
if is_institution_login(campaign):
continue
# campaign login: user with auth
next_url = campaign_url_for(campaign)
data = login_and_register_handler(self.auth, campaign=campaign, next_url=next_url)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), next_url)
def test_campaign_next_url_login_without_auth(self):
for campaign in get_campaigns():
if is_institution_login(campaign):
continue
# campaign login: user without auth
next_url = campaign_url_for(campaign)
data = login_and_register_handler(self.no_auth, campaign=campaign, next_url=next_url)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(
data.get('next_url'),
web_url_for('auth_register', campaign=campaign, next=next_url)
)
def test_campaign_next_url_register_with_auth(self):
for campaign in get_campaigns():
if is_institution_login(campaign):
continue
# campaign register: user with auth
next_url = campaign_url_for(campaign)
data = login_and_register_handler(self.auth, login=False, campaign=campaign, next_url=next_url)
assert_equal(data.get('status_code'), http.FOUND)
assert_equal(data.get('next_url'), next_url)
def test_campaign_next_url_register_without_auth(self):
for campaign in get_campaigns():
if is_institution_login(campaign):
continue
# campaign register: user without auth
next_url = campaign_url_for(campaign)
data = login_and_register_handler(self.no_auth, login=False, campaign=campaign, next_url=next_url)
assert_equal(data.get('status_code'), http.OK)
if is_native_login(campaign):
# native campaign: prereg and erpc
assert_equal(data.get('next_url'), next_url)
elif is_proxy_login(campaign):
# proxy campaign: preprints and branded ones
assert_equal(
data.get('next_url'),
web_url_for('auth_login', next= next_url, _absolute=True)
)
def test_invalid_campaign_login_without_auth(self):
data = login_and_register_handler(
self.no_auth,
login=True,
campaign=self.invalid_campaign,
next_url=self.next_url
)
redirect_url = web_url_for('auth_login', campaigns=None, next=self.next_url)
assert_equal(data['status_code'], http.FOUND)
assert_equal(data['next_url'], redirect_url)
assert_equal(data['campaign'], None)
def test_invalid_campaign_register_without_auth(self):
data = login_and_register_handler(
self.no_auth,
login=False,
campaign=self.invalid_campaign,
next_url=self.next_url
)
redirect_url = web_url_for('auth_register', campaigns=None, next=self.next_url)
assert_equal(data['status_code'], http.FOUND)
assert_equal(data['next_url'], redirect_url)
assert_equal(data['campaign'], None)
# The following two tests handles the special case for `claim_user_registered`
# When an authenticated user clicks the claim confirmation clink, there are two ways to trigger this flow:
# 1. If the authenticated user is already a contributor to the project, OSF will ask the user to sign out
# by providing a "logout" link.
# 2. If the authenticated user is not a contributor but decides not to claim contributor under this account,
# OSF provides a link "not <username>?" for the user to logout.
# Both links will land user onto the register page with "MUST LOGIN" push notification.
def test_register_logout_flag_with_auth(self):
# when user click the "logout" or "not <username>?" link, first step is to log user out
data = login_and_register_handler(self.auth, login=False, campaign=None, next_url=self.next_url, logout=True)
assert_equal(data.get('status_code'), 'auth_logout')
assert_equal(data.get('next_url'), self.next_url)
def test_register_logout_flage_without(self):
# the second step is to land user on register page with "MUST LOGIN" warning
data = login_and_register_handler(self.no_auth, login=False, campaign=None, next_url=self.next_url, logout=True)
assert_equal(data.get('status_code'), http.OK)
assert_equal(data.get('next_url'), self.next_url)
assert_true(data.get('must_login_warning'))
class TestAuthLogout(OsfTestCase):
def setUp(self):
super(TestAuthLogout, self).setUp()
self.goodbye_url = web_url_for('goodbye', _absolute=True)
self.redirect_url = web_url_for('forgot_password_get', _absolute=True)
self.valid_next_url = web_url_for('dashboard', _absolute=True)
self.invalid_next_url = 'http://localhost:1234/abcde'
self.auth_user = AuthUserFactory()
def tearDown(self):
super(TestAuthLogout, self).tearDown()
User.objects.all().delete()
assert_equal(User.objects.count(), 0)
def test_logout_with_valid_next_url_logged_in(self):
logout_url = web_url_for('auth_logout', _absolute=True, next=self.valid_next_url)
resp = self.app.get(logout_url, auth=self.auth_user.auth)
assert_equal(resp.status_code, http.FOUND)
assert_equal(cas.get_logout_url(logout_url), resp.headers['Location'])
def test_logout_with_valid_next_url_logged_out(self):
logout_url = web_url_for('auth_logout', _absolute=True, next=self.valid_next_url)
resp = self.app.get(logout_url, auth=None)
assert_equal(resp.status_code, http.FOUND)
assert_equal(self.valid_next_url, resp.headers['Location'])
def test_logout_with_invalid_next_url_logged_in(self):
logout_url = web_url_for('auth_logout', _absolute=True, next=self.invalid_next_url)
resp = self.app.get(logout_url, auth=self.auth_user.auth)
assert_equal(resp.status_code, http.FOUND)
assert_equal(cas.get_logout_url(self.goodbye_url), resp.headers['Location'])
def test_logout_with_invalid_next_url_logged_out(self):
logout_url = web_url_for('auth_logout', _absolute=True, next=self.invalid_next_url)
resp = self.app.get(logout_url, auth=None)
assert_equal(resp.status_code, http.FOUND)
assert_equal(cas.get_logout_url(self.goodbye_url), resp.headers['Location'])
def test_logout_with_redirect_url(self):
logout_url = web_url_for('auth_logout', _absolute=True, redirect_url=self.redirect_url)
resp = self.app.get(logout_url, auth=self.auth_user.auth)
assert_equal(resp.status_code, http.FOUND)
assert_equal(cas.get_logout_url(self.redirect_url), resp.headers['Location'])
def test_logout_with_no_parameter(self):
logout_url = web_url_for('auth_logout', _absolute=True)
resp = self.app.get(logout_url, auth=None)
assert_equal(resp.status_code, http.FOUND)
assert_equal(cas.get_logout_url(self.goodbye_url), resp.headers['Location'])
class TestExternalAuthViews(OsfTestCase):
def setUp(self):
super(TestExternalAuthViews, self).setUp()
name, email = fake.name(), fake.email()
self.provider_id = fake.ean()
external_identity = {
'service': {
self.provider_id: 'CREATE'
}
}
self.user = User.create_unconfirmed(
username=email,
password=str(fake.password()),
fullname=name,
external_identity=external_identity,
)
self.user.save()
self.auth = Auth(self.user)
def test_external_login_email_get_with_invalid_session(self):
url = web_url_for('external_login_email_get')
resp = self.app.get(url, expect_errors=True)
assert_equal(resp.status_code, 401)
def test_external_login_confirm_email_get_with_another_user_logged_in(self):
another_user = AuthUserFactory()
url = self.user.get_confirmation_url(self.user.username, external_id_provider='service', destination='dashboard')
res = self.app.get(url, auth=another_user.auth)
assert_equal(res.status_code, 302, 'redirects to cas logout')
assert_in('/logout?service=', res.location)
assert_in(url, res.location)
def test_external_login_confirm_email_get_without_destination(self):
url = self.user.get_confirmation_url(self.user.username, external_id_provider='service')
res = self.app.get(url, auth=self.auth, expect_errors=True)
assert_equal(res.status_code, 400, 'bad request')
@mock.patch('website.mails.send_mail')
def test_external_login_confirm_email_get_create(self, mock_welcome):
assert_false(self.user.is_registered)
url = self.user.get_confirmation_url(self.user.username, external_id_provider='service', destination='dashboard')
res = self.app.get(url, auth=self.auth)
assert_equal(res.status_code, 302, 'redirects to cas login')
assert_in('/login?service=', res.location)
assert_in('new=true', res.location)
assert_equal(mock_welcome.call_count, 1)
self.user.reload()
assert_equal(self.user.external_identity['service'][self.provider_id], 'VERIFIED')
assert_true(self.user.is_registered)
assert_true(self.user.has_usable_password())
@mock.patch('website.mails.send_mail')
def test_external_login_confirm_email_get_link(self, mock_link_confirm):
self.user.external_identity['service'][self.provider_id] = 'LINK'
self.user.save()
assert_false(self.user.is_registered)
url = self.user.get_confirmation_url(self.user.username, external_id_provider='service', destination='dashboard')
res = self.app.get(url, auth=self.auth)
assert_equal(res.status_code, 302, 'redirects to cas login')
assert_in('/login?service=', res.location)
assert_not_in('new=true', res.location)
assert_equal(mock_link_confirm.call_count, 1)
self.user.reload()
assert_equal(self.user.external_identity['service'][self.provider_id], 'VERIFIED')
assert_true(self.user.is_registered)
assert_true(self.user.has_usable_password())
@mock.patch('website.mails.send_mail')
def test_external_login_confirm_email_get_duped_id(self, mock_confirm):
dupe_user = UserFactory(external_identity={'service': {self.provider_id: 'CREATE'}})
assert_equal(dupe_user.external_identity, self.user.external_identity)
url = self.user.get_confirmation_url(self.user.username, external_id_provider='service', destination='dashboard')
res = self.app.get(url, auth=self.auth)
assert_equal(res.status_code, 302, 'redirects to cas login')
assert_in('/login?service=', res.location)
assert_equal(mock_confirm.call_count, 1)
self.user.reload()
dupe_user.reload()
assert_equal(self.user.external_identity['service'][self.provider_id], 'VERIFIED')
assert_equal(dupe_user.external_identity, {})
@mock.patch('website.mails.send_mail')
def test_external_login_confirm_email_get_duping_id(self, mock_confirm):
dupe_user = UserFactory(external_identity={'service': {self.provider_id: 'VERIFIED'}})
url = self.user.get_confirmation_url(self.user.username, external_id_provider='service', destination='dashboard')
res = self.app.get(url, auth=self.auth, expect_errors=True)
assert_equal(res.status_code, 403, 'only allows one user to link an id')
assert_equal(mock_confirm.call_count, 0)
self.user.reload()
dupe_user.reload()
assert_equal(dupe_user.external_identity['service'][self.provider_id], 'VERIFIED')
assert_equal(self.user.external_identity, {})
def test_ensure_external_identity_uniqueness_unverified(self):
dupe_user = UserFactory(external_identity={'service': {self.provider_id: 'CREATE'}})
assert_equal(dupe_user.external_identity, self.user.external_identity)
ensure_external_identity_uniqueness('service', self.provider_id, self.user)
dupe_user.reload()
self.user.reload()
assert_equal(dupe_user.external_identity, {})
assert_equal(self.user.external_identity, {'service': {self.provider_id: 'CREATE'}})
def test_ensure_external_identity_uniqueness_verified(self):
dupe_user = UserFactory(external_identity={'service': {self.provider_id: 'VERIFIED'}})
assert_equal(dupe_user.external_identity, {'service': {self.provider_id: 'VERIFIED'}})
assert_not_equal(dupe_user.external_identity, self.user.external_identity)
with assert_raises(ValidationError):
ensure_external_identity_uniqueness('service', self.provider_id, self.user)
dupe_user.reload()
self.user.reload()
assert_equal(dupe_user.external_identity, {'service': {self.provider_id: 'VERIFIED'}})
assert_equal(self.user.external_identity, {})
def test_ensure_external_identity_uniqueness_multiple(self):
dupe_user = UserFactory(external_identity={'service': {self.provider_id: 'CREATE'}})
assert_equal(dupe_user.external_identity, self.user.external_identity)
ensure_external_identity_uniqueness('service', self.provider_id)
dupe_user.reload()
self.user.reload()
assert_equal(dupe_user.external_identity, {})
assert_equal(self.user.external_identity, {})
# TODO: Use mock add-on
class TestAddonUserViews(OsfTestCase):
def setUp(self):
super(TestAddonUserViews, self).setUp()
self.user = AuthUserFactory()
def test_choose_addons_add(self):
"""Add add-ons; assert that add-ons are attached to project.
"""
url = '/api/v1/settings/addons/'
self.app.post_json(
url,
{'github': True},
auth=self.user.auth,
).maybe_follow()
self.user.reload()
assert_true(self.user.get_addon('github'))
def test_choose_addons_remove(self):
# Add, then delete, add-ons; assert that add-ons are not attached to
# project.
url = '/api/v1/settings/addons/'
self.app.post_json(
url,
{'github': True},
auth=self.user.auth,
).maybe_follow()
self.app.post_json(
url,
{'github': False},
auth=self.user.auth
).maybe_follow()
self.user.reload()
assert_false(self.user.get_addon('github'))
class TestConfigureMailingListViews(OsfTestCase):
@classmethod
def setUpClass(cls):
super(TestConfigureMailingListViews, cls).setUpClass()
cls._original_enable_email_subscriptions = settings.ENABLE_EMAIL_SUBSCRIPTIONS
settings.ENABLE_EMAIL_SUBSCRIPTIONS = True
def test_user_unsubscribe_and_subscribe_help_mailing_list(self):
user = AuthUserFactory()
url = api_url_for('user_choose_mailing_lists')
payload = {settings.OSF_HELP_LIST: False}
res = self.app.post_json(url, payload, auth=user.auth)
user.reload()
assert_false(user.osf_mailing_lists[settings.OSF_HELP_LIST])
payload = {settings.OSF_HELP_LIST: True}
res = self.app.post_json(url, payload, auth=user.auth)
user.reload()
assert_true(user.osf_mailing_lists[settings.OSF_HELP_LIST])
def test_get_notifications(self):
user = AuthUserFactory()
mailing_lists = dict(user.osf_mailing_lists.items() + user.mailchimp_mailing_lists.items())
url = api_url_for('user_notifications')
res = self.app.get(url, auth=user.auth)
assert_equal(mailing_lists, res.json['mailing_lists'])
def test_osf_help_mails_subscribe(self):
user = UserFactory()
user.osf_mailing_lists[settings.OSF_HELP_LIST] = False
user.save()
update_osf_help_mails_subscription(user, True)
assert_true(user.osf_mailing_lists[settings.OSF_HELP_LIST])
def test_osf_help_mails_unsubscribe(self):
user = UserFactory()
user.osf_mailing_lists[settings.OSF_HELP_LIST] = True
user.save()
update_osf_help_mails_subscription(user, False)
assert_false(user.osf_mailing_lists[settings.OSF_HELP_LIST])
@unittest.skipIf(settings.USE_CELERY, 'Subscription must happen synchronously for this test')
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_user_choose_mailing_lists_updates_user_dict(self, mock_get_mailchimp_api):
user = AuthUserFactory()
list_name = 'OSF General'
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': 1, 'list_name': list_name}]}
list_id = mailchimp_utils.get_list_id_from_name(list_name)
payload = {settings.MAILCHIMP_GENERAL_LIST: True}
url = api_url_for('user_choose_mailing_lists')
res = self.app.post_json(url, payload, auth=user.auth)
user.reload()
# check user.mailing_lists is updated
assert_true(user.mailchimp_mailing_lists[settings.MAILCHIMP_GENERAL_LIST])
assert_equal(
user.mailchimp_mailing_lists[settings.MAILCHIMP_GENERAL_LIST],
payload[settings.MAILCHIMP_GENERAL_LIST]
)
# check that user is subscribed
mock_client.lists.subscribe.assert_called_with(id=list_id,
email={'email': user.username},
merge_vars={
'fname': user.given_name,
'lname': user.family_name,
},
double_optin=False,
update_existing=True)
def test_get_mailchimp_get_endpoint_returns_200(self):
url = api_url_for('mailchimp_get_endpoint')
res = self.app.get(url)
assert_equal(res.status_code, 200)
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_mailchimp_webhook_subscribe_action_does_not_change_user(self, mock_get_mailchimp_api):
""" Test that 'subscribe' actions sent to the OSF via mailchimp
webhooks update the OSF database.
"""
list_id = '12345'
list_name = 'OSF General'
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': list_id, 'name': list_name}]}
# user is not subscribed to a list
user = AuthUserFactory()
user.mailchimp_mailing_lists = {'OSF General': False}
user.save()
# user subscribes and webhook sends request to OSF
data = {
'type': 'subscribe',
'data[list_id]': list_id,
'data[email]': user.username
}
url = api_url_for('sync_data_from_mailchimp') + '?key=' + settings.MAILCHIMP_WEBHOOK_SECRET_KEY
res = self.app.post(url,
data,
content_type="application/x-www-form-urlencoded",
auth=user.auth)
# user field is updated on the OSF
user.reload()
assert_true(user.mailchimp_mailing_lists[list_name])
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_mailchimp_webhook_profile_action_does_not_change_user(self, mock_get_mailchimp_api):
""" Test that 'profile' actions sent to the OSF via mailchimp
webhooks do not cause any database changes.
"""
list_id = '12345'
list_name = 'OSF General'
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': list_id, 'name': list_name}]}
# user is subscribed to a list
user = AuthUserFactory()
user.mailchimp_mailing_lists = {'OSF General': True}
user.save()
# user hits subscribe again, which will update the user's existing info on mailchimp
# webhook sends request (when configured to update on changes made through the API)
data = {
'type': 'profile',
'data[list_id]': list_id,
'data[email]': user.username
}
url = api_url_for('sync_data_from_mailchimp') + '?key=' + settings.MAILCHIMP_WEBHOOK_SECRET_KEY
res = self.app.post(url,
data,
content_type="application/x-www-form-urlencoded",
auth=user.auth)
# user field does not change
user.reload()
assert_true(user.mailchimp_mailing_lists[list_name])
@mock.patch('website.mailchimp_utils.get_mailchimp_api')
def test_sync_data_from_mailchimp_unsubscribes_user(self, mock_get_mailchimp_api):
list_id = '12345'
list_name = 'OSF General'
mock_client = mock.MagicMock()
mock_get_mailchimp_api.return_value = mock_client
mock_client.lists.list.return_value = {'data': [{'id': list_id, 'name': list_name}]}
# user is subscribed to a list
user = AuthUserFactory()
user.mailchimp_mailing_lists = {'OSF General': True}
user.save()
# user unsubscribes through mailchimp and webhook sends request
data = {
'type': 'unsubscribe',
'data[list_id]': list_id,
'data[email]': user.username
}
url = api_url_for('sync_data_from_mailchimp') + '?key=' + settings.MAILCHIMP_WEBHOOK_SECRET_KEY
res = self.app.post(url,
data,
content_type="application/x-www-form-urlencoded",
auth=user.auth)
# user field is updated on the OSF
user.reload()
assert_false(user.mailchimp_mailing_lists[list_name])
def test_sync_data_from_mailchimp_fails_without_secret_key(self):
user = AuthUserFactory()
payload = {'values': {'type': 'unsubscribe',
'data': {'list_id': '12345',
'email': '[email protected]'}}}
url = api_url_for('sync_data_from_mailchimp')
res = self.app.post_json(url, payload, auth=user.auth, expect_errors=True)
assert_equal(res.status_code, http.UNAUTHORIZED)
@classmethod
def tearDownClass(cls):
super(TestConfigureMailingListViews, cls).tearDownClass()
settings.ENABLE_EMAIL_SUBSCRIPTIONS = cls._original_enable_email_subscriptions
# TODO: Move to OSF Storage
class TestFileViews(OsfTestCase):
def setUp(self):
super(TestFileViews, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user, is_public=True)
self.project.add_contributor(self.user)
self.project.save()
def test_files_get(self):
url = self.project.api_url_for('collect_file_trees')
res = self.app.get(url, auth=self.user.auth)
expected = _view_project(self.project, auth=Auth(user=self.user))
assert_equal(res.status_code, http.OK)
assert_equal(res.json['node'], expected['node'])
assert_in('tree_js', res.json)
assert_in('tree_css', res.json)
def test_grid_data(self):
url = self.project.api_url_for('grid_data')
res = self.app.get(url, auth=self.user.auth).maybe_follow()
assert_equal(res.status_code, http.OK)
expected = rubeus.to_hgrid(self.project, auth=Auth(self.user))
data = res.json['data']
assert_equal(len(data), len(expected))
class TestTagViews(OsfTestCase):
def setUp(self):
super(TestTagViews, self).setUp()
self.user = AuthUserFactory()
self.project = ProjectFactory(creator=self.user)
@unittest.skip('Tags endpoint disabled for now.')
def test_tag_get_returns_200(self):
url = web_url_for('project_tag', tag='foo')
res = self.app.get(url)
assert_equal(res.status_code, 200)
class TestReorderComponents(OsfTestCase):
def setUp(self):
super(TestReorderComponents, self).setUp()
self.creator = AuthUserFactory()
self.contrib = AuthUserFactory()
# Project is public
self.project = ProjectFactory.create(creator=self.creator, is_public=True)
self.project.add_contributor(self.contrib, auth=Auth(self.creator))
# subcomponent that only creator can see
self.public_component = NodeFactory(creator=self.creator, is_public=True)
self.private_component = NodeFactory(creator=self.creator, is_public=False)
NodeRelation.objects.create(parent=self.project, child=self.public_component)
NodeRelation.objects.create(parent=self.project, child=self.private_component)
self.project.save()
# https://github.com/CenterForOpenScience/openscienceframework.org/issues/489
def test_reorder_components_with_private_component(self):
# contrib tries to reorder components
payload = {
'new_list': [
'{0}'.format(self.private_component._id),
'{0}'.format(self.public_component._id),
]
}
url = self.project.api_url_for('project_reorder_components')
res = self.app.post_json(url, payload, auth=self.contrib.auth)
assert_equal(res.status_code, 200)
class TestWikiWidgetViews(OsfTestCase):
def setUp(self):
super(TestWikiWidgetViews, self).setUp()
# project with no home wiki page
self.project = ProjectFactory()
self.read_only_contrib = AuthUserFactory()
self.project.add_contributor(self.read_only_contrib, permissions='read')
self.noncontributor = AuthUserFactory()
# project with no home wiki content
self.project2 = ProjectFactory(creator=self.project.creator)
self.project2.add_contributor(self.read_only_contrib, permissions='read')
self.project2.update_node_wiki(name='home', content='', auth=Auth(self.project.creator))
def test_show_wiki_for_contributors_when_no_wiki_or_content(self):
assert_true(_should_show_wiki_widget(self.project, self.project.creator))
assert_true(_should_show_wiki_widget(self.project2, self.project.creator))
def test_show_wiki_is_false_for_read_contributors_when_no_wiki_or_content(self):
assert_false(_should_show_wiki_widget(self.project, self.read_only_contrib))
assert_false(_should_show_wiki_widget(self.project2, self.read_only_contrib))
def test_show_wiki_is_false_for_noncontributors_when_no_wiki_or_content(self):
assert_false(_should_show_wiki_widget(self.project, self.noncontributor))
assert_false(_should_show_wiki_widget(self.project2, self.read_only_contrib))
class TestProjectCreation(OsfTestCase):
def setUp(self):
super(TestProjectCreation, self).setUp()
self.creator = AuthUserFactory()
self.url = api_url_for('project_new_post')
self.user1 = AuthUserFactory()
self.user2 = AuthUserFactory()
self.project = ProjectFactory(creator=self.user1)
self.project.add_contributor(self.user2, auth=Auth(self.user1))
self.project.save()
def tearDown(self):
super(TestProjectCreation, self).tearDown()
def test_needs_title(self):
res = self.app.post_json(self.url, {}, auth=self.creator.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_create_component_strips_html(self):
user = AuthUserFactory()
project = ProjectFactory(creator=user)
url = web_url_for('project_new_node', pid=project._id)
post_data = {'title': '<b>New <blink>Component</blink> Title</b>', 'category': ''}
request = self.app.post(url, post_data, auth=user.auth).follow()
project.reload()
child = project.nodes[0]
# HTML has been stripped
assert_equal(child.title, 'New Component Title')
def test_strip_html_from_title(self):
payload = {
'title': 'no html <b>here</b>'
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
node = Node.load(res.json['projectUrl'].replace('/', ''))
assert_true(node)
assert_equal('no html here', node.title)
def test_only_needs_title(self):
payload = {
'title': 'Im a real title'
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
assert_equal(res.status_code, 201)
def test_title_must_be_one_long(self):
payload = {
'title': ''
}
res = self.app.post_json(
self.url, payload, auth=self.creator.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_title_must_be_less_than_200(self):
payload = {
'title': ''.join([str(x) for x in xrange(0, 250)])
}
res = self.app.post_json(
self.url, payload, auth=self.creator.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_fails_to_create_project_with_whitespace_title(self):
payload = {
'title': ' '
}
res = self.app.post_json(
self.url, payload, auth=self.creator.auth, expect_errors=True)
assert_equal(res.status_code, 400)
def test_creates_a_project(self):
payload = {
'title': 'Im a real title'
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
assert_equal(res.status_code, 201)
node = Node.load(res.json['projectUrl'].replace('/', ''))
assert_true(node)
assert_true(node.title, 'Im a real title')
def test_create_component_add_contributors_admin(self):
url = web_url_for('project_new_node', pid=self.project._id)
post_data = {'title': 'New Component With Contributors Title', 'category': '', 'inherit_contributors': True}
res = self.app.post(url, post_data, auth=self.user1.auth)
self.project.reload()
child = self.project.nodes[0]
assert_equal(child.title, 'New Component With Contributors Title')
assert_in(self.user1, child.contributors)
assert_in(self.user2, child.contributors)
# check redirect url
assert_in('/contributors/', res.location)
def test_create_component_with_contributors_read_write(self):
url = web_url_for('project_new_node', pid=self.project._id)
non_admin = AuthUserFactory()
self.project.add_contributor(non_admin, permissions=['read', 'write'])
self.project.save()
post_data = {'title': 'New Component With Contributors Title', 'category': '', 'inherit_contributors': True}
res = self.app.post(url, post_data, auth=non_admin.auth)
self.project.reload()
child = self.project.nodes[0]
assert_equal(child.title, 'New Component With Contributors Title')
assert_in(non_admin, child.contributors)
assert_in(self.user1, child.contributors)
assert_in(self.user2, child.contributors)
assert_equal(child.get_permissions(non_admin), ['read', 'write', 'admin'])
# check redirect url
assert_in('/contributors/', res.location)
def test_create_component_with_contributors_read(self):
url = web_url_for('project_new_node', pid=self.project._id)
non_admin = AuthUserFactory()
self.project.add_contributor(non_admin, permissions=['read'])
self.project.save()
post_data = {'title': 'New Component With Contributors Title', 'category': '', 'inherit_contributors': True}
res = self.app.post(url, post_data, auth=non_admin.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_create_component_add_no_contributors(self):
url = web_url_for('project_new_node', pid=self.project._id)
post_data = {'title': 'New Component With Contributors Title', 'category': ''}
res = self.app.post(url, post_data, auth=self.user1.auth)
self.project.reload()
child = self.project.nodes[0]
assert_equal(child.title, 'New Component With Contributors Title')
assert_in(self.user1, child.contributors)
assert_not_in(self.user2, child.contributors)
# check redirect url
assert_not_in('/contributors/', res.location)
def test_new_project_returns_serialized_node_data(self):
payload = {
'title': 'Im a real title'
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
assert_equal(res.status_code, 201)
node = res.json['newNode']
assert_true(node)
assert_equal(node['title'], 'Im a real title')
def test_description_works(self):
payload = {
'title': 'Im a real title',
'description': 'I describe things!'
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
assert_equal(res.status_code, 201)
node = Node.load(res.json['projectUrl'].replace('/', ''))
assert_true(node)
assert_true(node.description, 'I describe things!')
def test_can_template(self):
other_node = ProjectFactory(creator=self.creator)
payload = {
'title': 'Im a real title',
'template': other_node._id
}
res = self.app.post_json(self.url, payload, auth=self.creator.auth)
assert_equal(res.status_code, 201)
node = Node.load(res.json['projectUrl'].replace('/', ''))
assert_true(node)
assert_true(node.template_node, other_node)
def test_project_before_template_no_addons(self):
project = ProjectFactory()
res = self.app.get(project.api_url_for('project_before_template'), auth=project.creator.auth)
assert_equal(res.json['prompts'], [])
def test_project_before_template_with_addons(self):
project = ProjectWithAddonFactory(addon='box')
res = self.app.get(project.api_url_for('project_before_template'), auth=project.creator.auth)
assert_in('Box', res.json['prompts'])
def test_project_new_from_template_non_user(self):
project = ProjectFactory()
url = api_url_for('project_new_from_template', nid=project._id)
res = self.app.post(url, auth=None)
assert_equal(res.status_code, 302)
res2 = res.follow(expect_errors=True)
assert_equal(res2.status_code, 301)
assert_equal(res2.request.path, '/login')
def test_project_new_from_template_public_non_contributor(self):
non_contributor = AuthUserFactory()
project = ProjectFactory(is_public=True)
url = api_url_for('project_new_from_template', nid=project._id)
res = self.app.post(url, auth=non_contributor.auth)
assert_equal(res.status_code, 201)
def test_project_new_from_template_contributor(self):
contributor = AuthUserFactory()
project = ProjectFactory(is_public=False)
project.add_contributor(contributor)
project.save()
url = api_url_for('project_new_from_template', nid=project._id)
res = self.app.post(url, auth=contributor.auth)
assert_equal(res.status_code, 201)
class TestUnconfirmedUserViews(OsfTestCase):
def test_can_view_profile(self):
user = UnconfirmedUserFactory()
url = web_url_for('profile_view_id', uid=user._id)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, http.BAD_REQUEST)
class TestStaticFileViews(OsfTestCase):
def test_robots_dot_txt(self):
res = self.app.get('/robots.txt')
assert_equal(res.status_code, 200)
assert_in('User-agent', res)
assert_in('text/plain', res.headers['Content-Type'])
def test_favicon(self):
res = self.app.get('/favicon.ico')
assert_equal(res.status_code, 200)
assert_in('image/vnd.microsoft.icon', res.headers['Content-Type'])
def test_getting_started_page(self):
res = self.app.get('/getting-started/')
assert_equal(res.status_code, 302)
assert_equal(res.location, 'http://help.osf.io/')
def test_help_redirect(self):
res = self.app.get('/help/')
assert_equal(res.status_code,302)
class TestUserConfirmSignal(OsfTestCase):
def test_confirm_user_signal_called_when_user_claims_account(self):
unclaimed_user = UnconfirmedUserFactory()
# unclaimed user has been invited to a project.
referrer = UserFactory()
project = ProjectFactory(creator=referrer)
unclaimed_user.add_unclaimed_record(project, referrer, 'foo', email=fake.email())
unclaimed_user.save()
token = unclaimed_user.get_unclaimed_record(project._primary_key)['token']
with capture_signals() as mock_signals:
url = web_url_for('claim_user_form', pid=project._id, uid=unclaimed_user._id, token=token)
payload = {'username': unclaimed_user.username,
'password': 'password',
'password2': 'password'}
res = self.app.post(url, payload)
assert_equal(res.status_code, 302)
assert_equal(mock_signals.signals_sent(), set([auth.signals.user_confirmed]))
def test_confirm_user_signal_called_when_user_confirms_email(self):
unconfirmed_user = UnconfirmedUserFactory()
unconfirmed_user.save()
# user goes to email confirmation link
token = unconfirmed_user.get_confirmation_token(unconfirmed_user.username)
with capture_signals() as mock_signals:
url = web_url_for('confirm_email_get', uid=unconfirmed_user._id, token=token)
res = self.app.get(url)
assert_equal(res.status_code, 302)
assert_equal(mock_signals.signals_sent(), set([auth.signals.user_confirmed]))
# copied from tests/test_comments.py
class TestCommentViews(OsfTestCase):
def setUp(self):
super(TestCommentViews, self).setUp()
self.project = ProjectFactory(is_public=True)
self.user = AuthUserFactory()
self.project.add_contributor(self.user)
self.project.save()
self.user.save()
def test_view_project_comments_updates_user_comments_view_timestamp(self):
url = self.project.api_url_for('update_comments_timestamp')
res = self.app.put_json(url, {
'page': 'node',
'rootId': self.project._id
}, auth=self.user.auth)
self.user.reload()
user_timestamp = self.user.comments_viewed_timestamp[self.project._id]
view_timestamp = timezone.now()
assert_datetime_equal(user_timestamp, view_timestamp)
def test_confirm_non_contrib_viewers_dont_have_pid_in_comments_view_timestamp(self):
non_contributor = AuthUserFactory()
url = self.project.api_url_for('update_comments_timestamp')
res = self.app.put_json(url, {
'page': 'node',
'rootId': self.project._id
}, auth=self.user.auth)
non_contributor.reload()
assert_not_in(self.project._id, non_contributor.comments_viewed_timestamp)
def test_view_comments_updates_user_comments_view_timestamp_files(self):
osfstorage = self.project.get_addon('osfstorage')
root_node = osfstorage.get_root()
test_file = root_node.append_file('test_file')
test_file.create_version(self.user, {
'object': '06d80e',
'service': 'cloud',
osfstorage_settings.WATERBUTLER_RESOURCE: 'osf',
}, {
'size': 1337,
'contentType': 'img/png'
}).save()
url = self.project.api_url_for('update_comments_timestamp')
res = self.app.put_json(url, {
'page': 'files',
'rootId': test_file._id
}, auth=self.user.auth)
self.user.reload()
user_timestamp = self.user.comments_viewed_timestamp[test_file._id]
view_timestamp = timezone.now()
assert_datetime_equal(user_timestamp, view_timestamp)
# Regression test for https://openscience.atlassian.net/browse/OSF-5193
# moved from tests/test_comments.py
def test_find_unread_includes_edited_comments(self):
project = ProjectFactory()
user = AuthUserFactory()
project.add_contributor(user, save=True)
comment = CommentFactory(node=project, user=project.creator)
n_unread = Comment.find_n_unread(user=user, node=project, page='node')
assert n_unread == 1
url = project.api_url_for('update_comments_timestamp')
payload = {'page': 'node', 'rootId': project._id}
self.app.put_json(url, payload, auth=user.auth)
user.reload()
n_unread = Comment.find_n_unread(user=user, node=project, page='node')
assert n_unread == 0
# Edit previously read comment
comment.edit(
auth=Auth(project.creator),
content='edited',
save=True
)
n_unread = Comment.find_n_unread(user=user, node=project, page='node')
assert n_unread == 1
class TestResetPassword(OsfTestCase):
def setUp(self):
super(TestResetPassword, self).setUp()
self.user = AuthUserFactory()
self.another_user = AuthUserFactory()
self.osf_key_v2 = generate_verification_key(verification_type='password')
self.user.verification_key_v2 = self.osf_key_v2
self.user.verification_key = None
self.user.save()
self.get_url = web_url_for(
'reset_password_get',
uid=self.user._id,
token=self.osf_key_v2['token']
)
self.get_url_invalid_key = web_url_for(
'reset_password_get',
uid=self.user._id,
token=generate_verification_key()
)
self.get_url_invalid_user = web_url_for(
'reset_password_get',
uid=self.another_user._id,
token=self.osf_key_v2['token']
)
# successfully load reset password page
def test_reset_password_view_returns_200(self):
res = self.app.get(self.get_url)
assert_equal(res.status_code, 200)
# raise http 400 error
def test_reset_password_view_raises_400(self):
res = self.app.get(self.get_url_invalid_key, expect_errors=True)
assert_equal(res.status_code, 400)
res = self.app.get(self.get_url_invalid_user, expect_errors=True)
assert_equal(res.status_code, 400)
self.user.verification_key_v2['expires'] = timezone.now()
self.user.save()
res = self.app.get(self.get_url, expect_errors=True)
assert_equal(res.status_code, 400)
# successfully reset password
@mock.patch('framework.auth.cas.CasClient.service_validate')
def test_can_reset_password_if_form_success(self, mock_service_validate):
# load reset password page and submit email
res = self.app.get(self.get_url)
form = res.forms['resetPasswordForm']
form['password'] = 'newpassword'
form['password2'] = 'newpassword'
res = form.submit()
# check request URL is /resetpassword with username and new verification_key_v2 token
request_url_path = res.request.path
assert_in('resetpassword', request_url_path)
assert_in(self.user._id, request_url_path)
assert_not_in(self.user.verification_key_v2['token'], request_url_path)
# check verification_key_v2 for OSF is destroyed and verification_key for CAS is in place
self.user.reload()
assert_equal(self.user.verification_key_v2, {})
assert_not_equal(self.user.verification_key, None)
# check redirection to CAS login with username and the new verification_key(CAS)
assert_equal(res.status_code, 302)
location = res.headers.get('Location')
assert_true('login?service=' in location)
assert_true('username={}'.format(self.user.username) in location)
assert_true('verification_key={}'.format(self.user.verification_key) in location)
# check if password was updated
self.user.reload()
assert_true(self.user.check_password('newpassword'))
# check if verification_key is destroyed after service validation
mock_service_validate.return_value = cas.CasResponse(
authenticated=True,
user=self.user._id,
attributes={'accessToken': fake.md5()}
)
ticket = fake.md5()
service_url = 'http://accounts.osf.io/?ticket=' + ticket
cas.make_response_from_ticket(ticket, service_url)
self.user.reload()
assert_equal(self.user.verification_key, None)
# log users out before they land on reset password page
def test_reset_password_logs_out_user(self):
# visit reset password link while another user is logged in
res = self.app.get(self.get_url, auth=self.another_user.auth)
# check redirection to CAS logout
assert_equal(res.status_code, 302)
location = res.headers.get('Location')
assert_not_in('reauth', location)
assert_in('logout?service=', location)
assert_in('resetpassword', location)
@unittest.skip('Unskip when institution hiding code is reimplemented')
class TestIndexView(OsfTestCase):
def setUp(self):
super(TestIndexView, self).setUp()
self.inst_one = InstitutionFactory()
self.inst_two = InstitutionFactory()
self.inst_three = InstitutionFactory()
self.inst_four = InstitutionFactory()
self.inst_five = InstitutionFactory()
self.user = AuthUserFactory()
self.user.affiliated_institutions.add(self.inst_one)
self.user.affiliated_institutions.add(self.inst_two)
self.user.save()
# tests 5 affiliated, non-registered, public projects
for i in range(settings.INSTITUTION_DISPLAY_NODE_THRESHOLD):
node = ProjectFactory(creator=self.user, is_public=True)
node.affiliated_institutions.add(self.inst_one)
node.save()
# tests 4 affiliated, non-registered, public projects
for i in range(settings.INSTITUTION_DISPLAY_NODE_THRESHOLD - 1):
node = ProjectFactory(creator=self.user, is_public=True)
node.affiliated_institutions.add(self.inst_two)
node.save()
# tests 5 affiliated, registered, public projects
for i in range(settings.INSTITUTION_DISPLAY_NODE_THRESHOLD):
registration = RegistrationFactory(creator=self.user, is_public=True)
registration.affiliated_institutions.add(self.inst_three)
registration.save()
# tests 5 affiliated, non-registered public components
for i in range(settings.INSTITUTION_DISPLAY_NODE_THRESHOLD):
node = NodeFactory(creator=self.user, is_public=True)
node.affiliated_institutions.add(self.inst_four)
node.save()
# tests 5 affiliated, non-registered, private projects
for i in range(settings.INSTITUTION_DISPLAY_NODE_THRESHOLD):
node = ProjectFactory(creator=self.user)
node.affiliated_institutions.add(self.inst_five)
node.save()
def test_dashboard_institutions(self):
dashboard_institutions = index()['dashboard_institutions']
assert_equal(len(dashboard_institutions), 1)
assert_equal(dashboard_institutions[0]['id'], self.inst_one._id)
assert_not_equal(dashboard_institutions[0]['id'], self.inst_two._id)
assert_not_equal(dashboard_institutions[0]['id'], self.inst_three._id)
assert_not_equal(dashboard_institutions[0]['id'], self.inst_four._id)
assert_not_equal(dashboard_institutions[0]['id'], self.inst_five._id)
class TestResolveGuid(OsfTestCase):
def setUp(self):
super(TestResolveGuid, self).setUp()
def test_preprint_provider_without_domain(self):
provider = PreprintProviderFactory(domain='')
preprint = PreprintFactory(provider=provider)
url = web_url_for('resolve_guid', _guid=True, guid=preprint._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(
res.request.path,
'/{}/'.format(preprint._id)
)
def test_preprint_provider_with_domain_without_redirect(self):
domain = 'https://test.com/'
provider = PreprintProviderFactory(_id='test', domain=domain, domain_redirect_enabled=False)
preprint = PreprintFactory(provider=provider)
url = web_url_for('resolve_guid', _guid=True, guid=preprint._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(
res.request.path,
'/{}/'.format(preprint._id)
)
def test_preprint_provider_with_domain_with_redirect(self):
domain = 'https://test.com/'
provider = PreprintProviderFactory(_id='test', domain=domain, domain_redirect_enabled=True)
preprint = PreprintFactory(provider=provider)
url = web_url_for('resolve_guid', _guid=True, guid=preprint._id)
res = self.app.get(url)
assert_is_redirect(res)
assert_equal(res.status_code, 301)
assert_equal(
res.headers['location'],
'{}{}/'.format(domain, preprint._id)
)
assert_equal(
res.request.path,
'/{}/'.format(preprint._id)
)
def test_preprint_provider_with_osf_domain(self):
provider = PreprintProviderFactory(_id='osf', domain='https://osf.io/')
preprint = PreprintFactory(provider=provider)
url = web_url_for('resolve_guid', _guid=True, guid=preprint._id)
res = self.app.get(url)
assert_equal(res.status_code, 200)
assert_equal(
res.request.path,
'/{}/'.format(preprint._id)
)
class TestConfirmationViewBlockBingPreview(OsfTestCase):
def setUp(self):
super(TestConfirmationViewBlockBingPreview, self).setUp()
self.user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534+ (KHTML, like Gecko) BingPreview/1.0b'
# reset password link should fail with BingPreview
def test_reset_password_get_returns_403(self):
user = UserFactory()
osf_key_v2 = generate_verification_key(verification_type='password')
user.verification_key_v2 = osf_key_v2
user.verification_key = None
user.save()
reset_password_get_url = web_url_for(
'reset_password_get',
uid=user._id,
token=osf_key_v2['token']
)
res = self.app.get(
reset_password_get_url,
expect_errors=True,
headers={
'User-Agent': self.user_agent,
}
)
assert_equal(res.status_code, 403)
# new user confirm account should fail with BingPreview
def test_confirm_email_get_new_user_returns_403(self):
user = User.create_unconfirmed('[email protected]', 'abCD12#$', 'Unconfirmed User')
user.save()
confirm_url = user.get_confirmation_url('[email protected]', external=False)
res = self.app.get(
confirm_url,
expect_errors=True,
headers={
'User-Agent': self.user_agent,
}
)
assert_equal(res.status_code, 403)
# confirmation for adding new email should fail with BingPreview
def test_confirm_email_add_email_returns_403(self):
user = UserFactory()
user.add_unconfirmed_email('[email protected]')
user.save()
confirm_url = user.get_confirmation_url('[email protected]', external=False) + '?logout=1'
res = self.app.get(
confirm_url,
expect_errors=True,
headers={
'User-Agent': self.user_agent,
}
)
assert_equal(res.status_code, 403)
# confirmation for merging accounts should fail with BingPreview
def test_confirm_email_merge_account_returns_403(self):
user = UserFactory()
user_to_be_merged = UserFactory()
user.add_unconfirmed_email(user_to_be_merged.username)
user.save()
confirm_url = user.get_confirmation_url(user_to_be_merged.username, external=False) + '?logout=1'
res = self.app.get(
confirm_url,
expect_errors=True,
headers={
'User-Agent': self.user_agent,
}
)
assert_equal(res.status_code, 403)
# confirmation for new user claiming contributor should fail with BingPreview
def test_claim_user_form_new_user(self):
referrer = AuthUserFactory()
project = ProjectFactory(creator=referrer, is_public=True)
given_name = fake.name()
given_email = fake.email()
user = project.add_unregistered_contributor(
fullname=given_name,
email=given_email,
auth=Auth(user=referrer)
)
project.save()
claim_url = user.get_claim_url(project._primary_key)
res = self.app.get(
claim_url,
expect_errors=True,
headers={
'User-Agent': self.user_agent,
}
)
assert_equal(res.status_code, 403)
# confirmation for existing user claiming contributor should fail with BingPreview
def test_claim_user_form_existing_user(self):
referrer = AuthUserFactory()
project = ProjectFactory(creator=referrer, is_public=True)
auth_user = AuthUserFactory()
pending_user = project.add_unregistered_contributor(
fullname=auth_user.fullname,
email=None,
auth=Auth(user=referrer)
)
project.save()
claim_url = pending_user.get_claim_url(project._primary_key)
res = self.app.get(
claim_url,
auth = auth_user.auth,
expect_errors=True,
headers={
'User-Agent': self.user_agent,
}
)
assert_equal(res.status_code, 403)
# account creation confirmation for ORCiD login should fail with BingPreview
def test_external_login_confirm_email_get_create_user(self):
name, email = fake.name(), fake.email()
provider_id = fake.ean()
external_identity = {
'service': {
provider_id: 'CREATE'
}
}
user = User.create_unconfirmed(
username=email,
password=str(fake.password()),
fullname=name,
external_identity=external_identity,
)
user.save()
create_url = user.get_confirmation_url(
user.username,
external_id_provider='service',
destination='dashboard'
)
res = self.app.get(
create_url,
expect_errors=True,
headers={
'User-Agent': self.user_agent,
}
)
assert_equal(res.status_code, 403)
# account linking confirmation for ORCiD login should fail with BingPreview
def test_external_login_confirm_email_get_link_user(self):
user = UserFactory()
provider_id = fake.ean()
user.external_identity = {
'service': {
provider_id: 'LINK'
}
}
user.add_unconfirmed_email(user.username, external_identity='service')
user.save()
link_url = user.get_confirmation_url(
user.username,
external_id_provider='service',
destination='dashboard'
)
res = self.app.get(
link_url,
expect_errors=True,
headers={
'User-Agent': self.user_agent,
}
)
assert_equal(res.status_code, 403)
if __name__ == '__main__':
unittest.main()
|
kenshay/ImageScript | refs/heads/master | ProgramData/SystemFiles/Python/share/doc/networkx-2.2/examples/drawing/plot_knuth_miles.py | 4 | #!/usr/bin/env python
"""
===========
Knuth Miles
===========
`miles_graph()` returns an undirected graph over the 128 US cities from
the datafile `miles_dat.txt`. The cities each have location and population
data. The edges are labeled with the distance between the two cities.
This example is described in Section 1.1 in Knuth's book (see [1]_ and [2]_).
References.
-----------
.. [1] Donald E. Knuth,
"The Stanford GraphBase: A Platform for Combinatorial Computing",
ACM Press, New York, 1993.
.. [2] http://www-cs-faculty.stanford.edu/~knuth/sgb.html
"""
# Author: Aric Hagberg ([email protected])
# Copyright (C) 2004-2018 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
import re
import sys
import matplotlib.pyplot as plt
import networkx as nx
def miles_graph():
""" Return the cites example graph in miles_dat.txt
from the Stanford GraphBase.
"""
# open file miles_dat.txt.gz (or miles_dat.txt)
import gzip
fh = gzip.open('knuth_miles.txt.gz', 'r')
G = nx.Graph()
G.position = {}
G.population = {}
cities = []
for line in fh.readlines():
line = line.decode()
if line.startswith("*"): # skip comments
continue
numfind = re.compile("^\d+")
if numfind.match(line): # this line is distances
dist = line.split()
for d in dist:
G.add_edge(city, cities[i], weight=int(d))
i = i + 1
else: # this line is a city, position, population
i = 1
(city, coordpop) = line.split("[")
cities.insert(0, city)
(coord, pop) = coordpop.split("]")
(y, x) = coord.split(",")
G.add_node(city)
# assign position - flip x axis for matplotlib, shift origin
G.position[city] = (-int(x) + 7500, int(y) - 3000)
G.population[city] = float(pop) / 1000.0
return G
if __name__ == '__main__':
G = miles_graph()
print("Loaded miles_dat.txt containing 128 cities.")
print("digraph has %d nodes with %d edges"
% (nx.number_of_nodes(G), nx.number_of_edges(G)))
# make new graph of cites, edge if less then 300 miles between them
H = nx.Graph()
for v in G:
H.add_node(v)
for (u, v, d) in G.edges(data=True):
if d['weight'] < 300:
H.add_edge(u, v)
# draw with matplotlib/pylab
plt.figure(figsize=(8, 8))
# with nodes colored by degree sized by population
node_color = [float(H.degree(v)) for v in H]
nx.draw(H, G.position,
node_size=[G.population[v] for v in H],
node_color=node_color,
with_labels=False)
# scale the axes equally
plt.xlim(-5000, 500)
plt.ylim(-2000, 3500)
plt.show()
|
pierluigiferrari/fcn8s_tensorflow | refs/heads/master | cityscapesscripts/helpers/__init__.py | 12133432 | |
nesdis/djongo | refs/heads/master | tests/django_tests/tests/v22/tests/swappable_models/__init__.py | 12133432 | |
michael-hartmann/snippets | refs/heads/master | euler/03.py | 1 | from math import sqrt
def isprime(n):
for m in range(2,int(sqrt(n))+1):
if n % m == 0:
return False
return True
if __name__ == "__main__":
factor = 600851475143
primes = []
for n in range(2,int(sqrt(factor))+1):
if (factor % n) == 0 and isprime(n):
primes.append(n)
print(max(primes))
|
rajsadho/django | refs/heads/master | django/forms/models.py | 46 | """
Helper functions for creating Form classes from Django models
and database field objects.
"""
from __future__ import unicode_literals
from collections import OrderedDict
from itertools import chain
from django.core.exceptions import (
NON_FIELD_ERRORS, FieldError, ImproperlyConfigured, ValidationError,
)
from django.forms.fields import ChoiceField, Field
from django.forms.forms import BaseForm, DeclarativeFieldsMetaclass
from django.forms.formsets import BaseFormSet, formset_factory
from django.forms.utils import ErrorList
from django.forms.widgets import (
HiddenInput, MultipleHiddenInput, SelectMultiple,
)
from django.utils import six
from django.utils.encoding import force_text, smart_text
from django.utils.text import capfirst, get_text_list
from django.utils.translation import ugettext, ugettext_lazy as _
__all__ = (
'ModelForm', 'BaseModelForm', 'model_to_dict', 'fields_for_model',
'ModelChoiceField', 'ModelMultipleChoiceField', 'ALL_FIELDS',
'BaseModelFormSet', 'modelformset_factory', 'BaseInlineFormSet',
'inlineformset_factory', 'modelform_factory',
)
ALL_FIELDS = '__all__'
def construct_instance(form, instance, fields=None, exclude=None):
"""
Constructs and returns a model instance from the bound ``form``'s
``cleaned_data``, but does not save the returned instance to the
database.
"""
from django.db import models
opts = instance._meta
cleaned_data = form.cleaned_data
file_field_list = []
for f in opts.fields:
if not f.editable or isinstance(f, models.AutoField) \
or f.name not in cleaned_data:
continue
if fields is not None and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
# Defer saving file-type fields until after the other fields, so a
# callable upload_to can use the values from other fields.
if isinstance(f, models.FileField):
file_field_list.append(f)
else:
f.save_form_data(instance, cleaned_data[f.name])
for f in file_field_list:
f.save_form_data(instance, cleaned_data[f.name])
return instance
# ModelForms #################################################################
def model_to_dict(instance, fields=None, exclude=None):
"""
Returns a dict containing the data in ``instance`` suitable for passing as
a Form's ``initial`` keyword argument.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned dict.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned dict, even if they are listed in
the ``fields`` argument.
"""
# avoid a circular import
from django.db.models.fields.related import ManyToManyField
opts = instance._meta
data = {}
for f in chain(opts.concrete_fields, opts.virtual_fields, opts.many_to_many):
if not getattr(f, 'editable', False):
continue
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
if isinstance(f, ManyToManyField):
# If the object doesn't have a primary key yet, just use an empty
# list for its m2m fields. Calling f.value_from_object will raise
# an exception.
if instance.pk is None:
data[f.name] = []
else:
# MultipleChoiceWidget needs a list of pks, not object instances.
qs = f.value_from_object(instance)
if qs._result_cache is not None:
data[f.name] = [item.pk for item in qs]
else:
data[f.name] = list(qs.values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
def fields_for_model(model, fields=None, exclude=None, widgets=None,
formfield_callback=None, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
field_classes=None):
"""
Returns a ``OrderedDict`` containing form fields for the given model.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned fields.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned fields, even if they are listed
in the ``fields`` argument.
``widgets`` is a dictionary of model field names mapped to a widget.
``formfield_callback`` is a callable that takes a model field and returns
a form field.
``localized_fields`` is a list of names of fields which should be localized.
``labels`` is a dictionary of model field names mapped to a label.
``help_texts`` is a dictionary of model field names mapped to a help text.
``error_messages`` is a dictionary of model field names mapped to a
dictionary of error messages.
``field_classes`` is a dictionary of model field names mapped to a form
field class.
"""
field_list = []
ignored = []
opts = model._meta
# Avoid circular import
from django.db.models.fields import Field as ModelField
sortable_virtual_fields = [f for f in opts.virtual_fields
if isinstance(f, ModelField)]
for f in sorted(chain(opts.concrete_fields, sortable_virtual_fields, opts.many_to_many)):
if not getattr(f, 'editable', False):
continue
if fields is not None and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
kwargs = {}
if widgets and f.name in widgets:
kwargs['widget'] = widgets[f.name]
if localized_fields == ALL_FIELDS or (localized_fields and f.name in localized_fields):
kwargs['localize'] = True
if labels and f.name in labels:
kwargs['label'] = labels[f.name]
if help_texts and f.name in help_texts:
kwargs['help_text'] = help_texts[f.name]
if error_messages and f.name in error_messages:
kwargs['error_messages'] = error_messages[f.name]
if field_classes and f.name in field_classes:
kwargs['form_class'] = field_classes[f.name]
if formfield_callback is None:
formfield = f.formfield(**kwargs)
elif not callable(formfield_callback):
raise TypeError('formfield_callback must be a function or callable')
else:
formfield = formfield_callback(f, **kwargs)
if formfield:
field_list.append((f.name, formfield))
else:
ignored.append(f.name)
field_dict = OrderedDict(field_list)
if fields:
field_dict = OrderedDict(
[(f, field_dict.get(f)) for f in fields
if ((not exclude) or (exclude and f not in exclude)) and (f not in ignored)]
)
return field_dict
class ModelFormOptions(object):
def __init__(self, options=None):
self.model = getattr(options, 'model', None)
self.fields = getattr(options, 'fields', None)
self.exclude = getattr(options, 'exclude', None)
self.widgets = getattr(options, 'widgets', None)
self.localized_fields = getattr(options, 'localized_fields', None)
self.labels = getattr(options, 'labels', None)
self.help_texts = getattr(options, 'help_texts', None)
self.error_messages = getattr(options, 'error_messages', None)
self.field_classes = getattr(options, 'field_classes', None)
class ModelFormMetaclass(DeclarativeFieldsMetaclass):
def __new__(mcs, name, bases, attrs):
formfield_callback = attrs.pop('formfield_callback', None)
new_class = super(ModelFormMetaclass, mcs).__new__(mcs, name, bases, attrs)
if bases == (BaseModelForm,):
return new_class
opts = new_class._meta = ModelFormOptions(getattr(new_class, 'Meta', None))
# We check if a string was passed to `fields` or `exclude`,
# which is likely to be a mistake where the user typed ('foo') instead
# of ('foo',)
for opt in ['fields', 'exclude', 'localized_fields']:
value = getattr(opts, opt)
if isinstance(value, six.string_types) and value != ALL_FIELDS:
msg = ("%(model)s.Meta.%(opt)s cannot be a string. "
"Did you mean to type: ('%(value)s',)?" % {
'model': new_class.__name__,
'opt': opt,
'value': value,
})
raise TypeError(msg)
if opts.model:
# If a model is defined, extract form fields from it.
if opts.fields is None and opts.exclude is None:
raise ImproperlyConfigured(
"Creating a ModelForm without either the 'fields' attribute "
"or the 'exclude' attribute is prohibited; form %s "
"needs updating." % name
)
if opts.fields == ALL_FIELDS:
# Sentinel for fields_for_model to indicate "get the list of
# fields from the model"
opts.fields = None
fields = fields_for_model(opts.model, opts.fields, opts.exclude,
opts.widgets, formfield_callback,
opts.localized_fields, opts.labels,
opts.help_texts, opts.error_messages,
opts.field_classes)
# make sure opts.fields doesn't specify an invalid field
none_model_fields = [k for k, v in six.iteritems(fields) if not v]
missing_fields = (set(none_model_fields) -
set(new_class.declared_fields.keys()))
if missing_fields:
message = 'Unknown field(s) (%s) specified for %s'
message = message % (', '.join(missing_fields),
opts.model.__name__)
raise FieldError(message)
# Override default model fields with any custom declared ones
# (plus, include all the other declared fields).
fields.update(new_class.declared_fields)
else:
fields = new_class.declared_fields
new_class.base_fields = fields
return new_class
class BaseModelForm(BaseForm):
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=None,
empty_permitted=False, instance=None):
opts = self._meta
if opts.model is None:
raise ValueError('ModelForm has no model class specified.')
if instance is None:
# if we didn't get an instance, instantiate a new one
self.instance = opts.model()
object_data = {}
else:
self.instance = instance
object_data = model_to_dict(instance, opts.fields, opts.exclude)
# if initial was provided, it should override the values from instance
if initial is not None:
object_data.update(initial)
# self._validate_unique will be set to True by BaseModelForm.clean().
# It is False by default so overriding self.clean() and failing to call
# super will stop validate_unique from being called.
self._validate_unique = False
super(BaseModelForm, self).__init__(data, files, auto_id, prefix, object_data,
error_class, label_suffix, empty_permitted)
# Apply ``limit_choices_to`` to each field.
for field_name in self.fields:
formfield = self.fields[field_name]
if hasattr(formfield, 'queryset') and hasattr(formfield, 'get_limit_choices_to'):
limit_choices_to = formfield.get_limit_choices_to()
if limit_choices_to is not None:
formfield.queryset = formfield.queryset.complex_filter(limit_choices_to)
def _get_validation_exclusions(self):
"""
For backwards-compatibility, several types of fields need to be
excluded from model validation. See the following tickets for
details: #12507, #12521, #12553
"""
exclude = []
# Build up a list of fields that should be excluded from model field
# validation and unique checks.
for f in self.instance._meta.fields:
field = f.name
# Exclude fields that aren't on the form. The developer may be
# adding these values to the model after form validation.
if field not in self.fields:
exclude.append(f.name)
# Don't perform model validation on fields that were defined
# manually on the form and excluded via the ModelForm's Meta
# class. See #12901.
elif self._meta.fields and field not in self._meta.fields:
exclude.append(f.name)
elif self._meta.exclude and field in self._meta.exclude:
exclude.append(f.name)
# Exclude fields that failed form validation. There's no need for
# the model fields to validate them as well.
elif field in self._errors.keys():
exclude.append(f.name)
# Exclude empty fields that are not required by the form, if the
# underlying model field is required. This keeps the model field
# from raising a required error. Note: don't exclude the field from
# validation if the model field allows blanks. If it does, the blank
# value may be included in a unique check, so cannot be excluded
# from validation.
else:
form_field = self.fields[field]
field_value = self.cleaned_data.get(field)
if not f.blank and not form_field.required and field_value in form_field.empty_values:
exclude.append(f.name)
return exclude
def clean(self):
self._validate_unique = True
return self.cleaned_data
def _update_errors(self, errors):
# Override any validation error messages defined at the model level
# with those defined at the form level.
opts = self._meta
# Allow the model generated by construct_instance() to raise
# ValidationError and have them handled in the same way as others.
if hasattr(errors, 'error_dict'):
error_dict = errors.error_dict
else:
error_dict = {NON_FIELD_ERRORS: errors}
for field, messages in error_dict.items():
if (field == NON_FIELD_ERRORS and opts.error_messages and
NON_FIELD_ERRORS in opts.error_messages):
error_messages = opts.error_messages[NON_FIELD_ERRORS]
elif field in self.fields:
error_messages = self.fields[field].error_messages
else:
continue
for message in messages:
if (isinstance(message, ValidationError) and
message.code in error_messages):
message.message = error_messages[message.code]
self.add_error(None, errors)
def _post_clean(self):
opts = self._meta
exclude = self._get_validation_exclusions()
try:
self.instance = construct_instance(self, self.instance, opts.fields, exclude)
except ValidationError as e:
self._update_errors(e)
# Foreign Keys being used to represent inline relationships
# are excluded from basic field value validation. This is for two
# reasons: firstly, the value may not be supplied (#12507; the
# case of providing new values to the admin); secondly the
# object being referred to may not yet fully exist (#12749).
# However, these fields *must* be included in uniqueness checks,
# so this can't be part of _get_validation_exclusions().
for name, field in self.fields.items():
if isinstance(field, InlineForeignKeyField):
exclude.append(name)
try:
self.instance.full_clean(exclude=exclude, validate_unique=False)
except ValidationError as e:
self._update_errors(e)
# Validate uniqueness if needed.
if self._validate_unique:
self.validate_unique()
def validate_unique(self):
"""
Calls the instance's validate_unique() method and updates the form's
validation errors if any were raised.
"""
exclude = self._get_validation_exclusions()
try:
self.instance.validate_unique(exclude=exclude)
except ValidationError as e:
self._update_errors(e)
def _save_m2m(self):
"""
Save the many-to-many fields and generic relations for this form.
"""
cleaned_data = self.cleaned_data
exclude = self._meta.exclude
fields = self._meta.fields
opts = self.instance._meta
# Note that for historical reasons we want to include also
# virtual_fields here. (GenericRelation was previously a fake
# m2m field).
for f in chain(opts.many_to_many, opts.virtual_fields):
if not hasattr(f, 'save_form_data'):
continue
if fields and f.name not in fields:
continue
if exclude and f.name in exclude:
continue
if f.name in cleaned_data:
f.save_form_data(self.instance, cleaned_data[f.name])
def save(self, commit=True):
"""
Save this form's self.instance object if commit=True. Otherwise, add
a save_m2m() method to the form which can be called after the instance
is saved manually at a later time. Return the model instance.
"""
if self.errors:
raise ValueError(
"The %s could not be %s because the data didn't validate." % (
self.instance._meta.object_name,
'created' if self.instance._state.adding else 'changed',
)
)
if commit:
# If committing, save the instance and the m2m data immediately.
self.instance.save()
self._save_m2m()
else:
# If not committing, add a method to the form to allow deferred
# saving of m2m data.
self.save_m2m = self._save_m2m
return self.instance
save.alters_data = True
class ModelForm(six.with_metaclass(ModelFormMetaclass, BaseModelForm)):
pass
def modelform_factory(model, form=ModelForm, fields=None, exclude=None,
formfield_callback=None, widgets=None, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
field_classes=None):
"""
Returns a ModelForm containing form fields for the given model.
``fields`` is an optional list of field names. If provided, only the named
fields will be included in the returned fields. If omitted or '__all__',
all fields will be used.
``exclude`` is an optional list of field names. If provided, the named
fields will be excluded from the returned fields, even if they are listed
in the ``fields`` argument.
``widgets`` is a dictionary of model field names mapped to a widget.
``localized_fields`` is a list of names of fields which should be localized.
``formfield_callback`` is a callable that takes a model field and returns
a form field.
``labels`` is a dictionary of model field names mapped to a label.
``help_texts`` is a dictionary of model field names mapped to a help text.
``error_messages`` is a dictionary of model field names mapped to a
dictionary of error messages.
``field_classes`` is a dictionary of model field names mapped to a form
field class.
"""
# Create the inner Meta class. FIXME: ideally, we should be able to
# construct a ModelForm without creating and passing in a temporary
# inner class.
# Build up a list of attributes that the Meta object will have.
attrs = {'model': model}
if fields is not None:
attrs['fields'] = fields
if exclude is not None:
attrs['exclude'] = exclude
if widgets is not None:
attrs['widgets'] = widgets
if localized_fields is not None:
attrs['localized_fields'] = localized_fields
if labels is not None:
attrs['labels'] = labels
if help_texts is not None:
attrs['help_texts'] = help_texts
if error_messages is not None:
attrs['error_messages'] = error_messages
if field_classes is not None:
attrs['field_classes'] = field_classes
# If parent form class already has an inner Meta, the Meta we're
# creating needs to inherit from the parent's inner meta.
parent = (object,)
if hasattr(form, 'Meta'):
parent = (form.Meta, object)
Meta = type(str('Meta'), parent, attrs)
# Give this new form class a reasonable name.
class_name = model.__name__ + str('Form')
# Class attributes for the new form class.
form_class_attrs = {
'Meta': Meta,
'formfield_callback': formfield_callback
}
if (getattr(Meta, 'fields', None) is None and
getattr(Meta, 'exclude', None) is None):
raise ImproperlyConfigured(
"Calling modelform_factory without defining 'fields' or "
"'exclude' explicitly is prohibited."
)
# Instantiate type(form) in order to use the same metaclass as form.
return type(form)(class_name, (form,), form_class_attrs)
# ModelFormSets ##############################################################
class BaseModelFormSet(BaseFormSet):
"""
A ``FormSet`` for editing a queryset and/or adding new objects to it.
"""
model = None
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
queryset=None, **kwargs):
self.queryset = queryset
self.initial_extra = kwargs.pop('initial', None)
defaults = {'data': data, 'files': files, 'auto_id': auto_id, 'prefix': prefix}
defaults.update(kwargs)
super(BaseModelFormSet, self).__init__(**defaults)
def initial_form_count(self):
"""Returns the number of forms that are required in this FormSet."""
if not (self.data or self.files):
return len(self.get_queryset())
return super(BaseModelFormSet, self).initial_form_count()
def _existing_object(self, pk):
if not hasattr(self, '_object_dict'):
self._object_dict = {o.pk: o for o in self.get_queryset()}
return self._object_dict.get(pk)
def _get_to_python(self, field):
"""
If the field is a related field, fetch the concrete field's (that
is, the ultimate pointed-to field's) to_python.
"""
while field.remote_field is not None:
field = field.remote_field.get_related_field()
return field.to_python
def _construct_form(self, i, **kwargs):
if self.is_bound and i < self.initial_form_count():
pk_key = "%s-%s" % (self.add_prefix(i), self.model._meta.pk.name)
pk = self.data[pk_key]
pk_field = self.model._meta.pk
to_python = self._get_to_python(pk_field)
pk = to_python(pk)
kwargs['instance'] = self._existing_object(pk)
if i < self.initial_form_count() and 'instance' not in kwargs:
kwargs['instance'] = self.get_queryset()[i]
if i >= self.initial_form_count() and self.initial_extra:
# Set initial values for extra forms
try:
kwargs['initial'] = self.initial_extra[i - self.initial_form_count()]
except IndexError:
pass
return super(BaseModelFormSet, self)._construct_form(i, **kwargs)
def get_queryset(self):
if not hasattr(self, '_queryset'):
if self.queryset is not None:
qs = self.queryset
else:
qs = self.model._default_manager.get_queryset()
# If the queryset isn't already ordered we need to add an
# artificial ordering here to make sure that all formsets
# constructed from this queryset have the same form order.
if not qs.ordered:
qs = qs.order_by(self.model._meta.pk.name)
# Removed queryset limiting here. As per discussion re: #13023
# on django-dev, max_num should not prevent existing
# related objects/inlines from being displayed.
self._queryset = qs
return self._queryset
def save_new(self, form, commit=True):
"""Saves and returns a new model instance for the given form."""
return form.save(commit=commit)
def save_existing(self, form, instance, commit=True):
"""Saves and returns an existing model instance for the given form."""
return form.save(commit=commit)
def delete_existing(self, obj, commit=True):
"""Deletes an existing model instance."""
if commit:
obj.delete()
def save(self, commit=True):
"""Saves model instances for every form, adding and changing instances
as necessary, and returns the list of instances.
"""
if not commit:
self.saved_forms = []
def save_m2m():
for form in self.saved_forms:
form.save_m2m()
self.save_m2m = save_m2m
return self.save_existing_objects(commit) + self.save_new_objects(commit)
save.alters_data = True
def clean(self):
self.validate_unique()
def validate_unique(self):
# Collect unique_checks and date_checks to run from all the forms.
all_unique_checks = set()
all_date_checks = set()
forms_to_delete = self.deleted_forms
valid_forms = [form for form in self.forms if form.is_valid() and form not in forms_to_delete]
for form in valid_forms:
exclude = form._get_validation_exclusions()
unique_checks, date_checks = form.instance._get_unique_checks(exclude=exclude)
all_unique_checks = all_unique_checks.union(set(unique_checks))
all_date_checks = all_date_checks.union(set(date_checks))
errors = []
# Do each of the unique checks (unique and unique_together)
for uclass, unique_check in all_unique_checks:
seen_data = set()
for form in valid_forms:
# get data for each field of each of unique_check
row_data = (form.cleaned_data[field]
for field in unique_check if field in form.cleaned_data)
# Reduce Model instances to their primary key values
row_data = tuple(d._get_pk_val() if hasattr(d, '_get_pk_val') else d
for d in row_data)
if row_data and None not in row_data:
# if we've already seen it then we have a uniqueness failure
if row_data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_unique_error_message(unique_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
for field in unique_check:
if field in form.cleaned_data:
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(row_data)
# iterate over each of the date checks now
for date_check in all_date_checks:
seen_data = set()
uclass, lookup, field, unique_for = date_check
for form in valid_forms:
# see if we have data for both fields
if (form.cleaned_data and form.cleaned_data[field] is not None
and form.cleaned_data[unique_for] is not None):
# if it's a date lookup we need to get the data for all the fields
if lookup == 'date':
date = form.cleaned_data[unique_for]
date_data = (date.year, date.month, date.day)
# otherwise it's just the attribute on the date/datetime
# object
else:
date_data = (getattr(form.cleaned_data[unique_for], lookup),)
data = (form.cleaned_data[field],) + date_data
# if we've already seen it then we have a uniqueness failure
if data in seen_data:
# poke error messages into the right places and mark
# the form as invalid
errors.append(self.get_date_error_message(date_check))
form._errors[NON_FIELD_ERRORS] = self.error_class([self.get_form_error()])
# remove the data from the cleaned_data dict since it was invalid
del form.cleaned_data[field]
# mark the data as seen
seen_data.add(data)
if errors:
raise ValidationError(errors)
def get_unique_error_message(self, unique_check):
if len(unique_check) == 1:
return ugettext("Please correct the duplicate data for %(field)s.") % {
"field": unique_check[0],
}
else:
return ugettext("Please correct the duplicate data for %(field)s, "
"which must be unique.") % {
"field": get_text_list(unique_check, six.text_type(_("and"))),
}
def get_date_error_message(self, date_check):
return ugettext("Please correct the duplicate data for %(field_name)s "
"which must be unique for the %(lookup)s in %(date_field)s.") % {
'field_name': date_check[2],
'date_field': date_check[3],
'lookup': six.text_type(date_check[1]),
}
def get_form_error(self):
return ugettext("Please correct the duplicate values below.")
def save_existing_objects(self, commit=True):
self.changed_objects = []
self.deleted_objects = []
if not self.initial_forms:
return []
saved_instances = []
forms_to_delete = self.deleted_forms
for form in self.initial_forms:
obj = form.instance
if form in forms_to_delete:
# If the pk is None, it means that the object can't be
# deleted again. Possible reason for this is that the
# object was already deleted from the DB. Refs #14877.
if obj.pk is None:
continue
self.deleted_objects.append(obj)
self.delete_existing(obj, commit=commit)
elif form.has_changed():
self.changed_objects.append((obj, form.changed_data))
saved_instances.append(self.save_existing(form, obj, commit=commit))
if not commit:
self.saved_forms.append(form)
return saved_instances
def save_new_objects(self, commit=True):
self.new_objects = []
for form in self.extra_forms:
if not form.has_changed():
continue
# If someone has marked an add form for deletion, don't save the
# object.
if self.can_delete and self._should_delete_form(form):
continue
self.new_objects.append(self.save_new(form, commit=commit))
if not commit:
self.saved_forms.append(form)
return self.new_objects
def add_fields(self, form, index):
"""Add a hidden field for the object's primary key."""
from django.db.models import AutoField, OneToOneField, ForeignKey
self._pk_field = pk = self.model._meta.pk
# If a pk isn't editable, then it won't be on the form, so we need to
# add it here so we can tell which object is which when we get the
# data back. Generally, pk.editable should be false, but for some
# reason, auto_created pk fields and AutoField's editable attribute is
# True, so check for that as well.
def pk_is_not_editable(pk):
return (
(not pk.editable) or (pk.auto_created or isinstance(pk, AutoField)) or (
pk.remote_field and pk.remote_field.parent_link
and pk_is_not_editable(pk.remote_field.model._meta.pk)
)
)
if pk_is_not_editable(pk) or pk.name not in form.fields:
if form.is_bound:
# If we're adding the related instance, ignore its primary key
# as it could be an auto-generated default which isn't actually
# in the database.
pk_value = None if form.instance._state.adding else form.instance.pk
else:
try:
if index is not None:
pk_value = self.get_queryset()[index].pk
else:
pk_value = None
except IndexError:
pk_value = None
if isinstance(pk, OneToOneField) or isinstance(pk, ForeignKey):
qs = pk.remote_field.model._default_manager.get_queryset()
else:
qs = self.model._default_manager.get_queryset()
qs = qs.using(form.instance._state.db)
if form._meta.widgets:
widget = form._meta.widgets.get(self._pk_field.name, HiddenInput)
else:
widget = HiddenInput
form.fields[self._pk_field.name] = ModelChoiceField(qs, initial=pk_value, required=False, widget=widget)
super(BaseModelFormSet, self).add_fields(form, index)
def modelformset_factory(model, form=ModelForm, formfield_callback=None,
formset=BaseModelFormSet, extra=1, can_delete=False,
can_order=False, max_num=None, fields=None, exclude=None,
widgets=None, validate_max=False, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
min_num=None, validate_min=False, field_classes=None):
"""
Returns a FormSet class for the given Django model class.
"""
meta = getattr(form, 'Meta', None)
if meta is None:
meta = type(str('Meta'), (object,), {})
if (getattr(meta, 'fields', fields) is None and
getattr(meta, 'exclude', exclude) is None):
raise ImproperlyConfigured(
"Calling modelformset_factory without defining 'fields' or "
"'exclude' explicitly is prohibited."
)
form = modelform_factory(model, form=form, fields=fields, exclude=exclude,
formfield_callback=formfield_callback,
widgets=widgets, localized_fields=localized_fields,
labels=labels, help_texts=help_texts,
error_messages=error_messages, field_classes=field_classes)
FormSet = formset_factory(form, formset, extra=extra, min_num=min_num, max_num=max_num,
can_order=can_order, can_delete=can_delete,
validate_min=validate_min, validate_max=validate_max)
FormSet.model = model
return FormSet
# InlineFormSets #############################################################
class BaseInlineFormSet(BaseModelFormSet):
"""A formset for child objects related to a parent."""
def __init__(self, data=None, files=None, instance=None,
save_as_new=False, prefix=None, queryset=None, **kwargs):
if instance is None:
self.instance = self.fk.remote_field.model()
else:
self.instance = instance
self.save_as_new = save_as_new
if queryset is None:
queryset = self.model._default_manager
if self.instance.pk is not None:
qs = queryset.filter(**{self.fk.name: self.instance})
else:
qs = queryset.none()
super(BaseInlineFormSet, self).__init__(data, files, prefix=prefix,
queryset=qs, **kwargs)
def initial_form_count(self):
if self.save_as_new:
return 0
return super(BaseInlineFormSet, self).initial_form_count()
def _construct_form(self, i, **kwargs):
form = super(BaseInlineFormSet, self)._construct_form(i, **kwargs)
if self.save_as_new:
# Remove the primary key from the form's data, we are only
# creating new instances
form.data[form.add_prefix(self._pk_field.name)] = None
# Remove the foreign key from the form's data
form.data[form.add_prefix(self.fk.name)] = None
# Set the fk value here so that the form can do its validation.
fk_value = self.instance.pk
if self.fk.remote_field.field_name != self.fk.remote_field.model._meta.pk.name:
fk_value = getattr(self.instance, self.fk.remote_field.field_name)
fk_value = getattr(fk_value, 'pk', fk_value)
setattr(form.instance, self.fk.get_attname(), fk_value)
return form
@classmethod
def get_default_prefix(cls):
return cls.fk.remote_field.get_accessor_name(model=cls.model).replace('+', '')
def save_new(self, form, commit=True):
# Ensure the latest copy of the related instance is present on each
# form (it may have been saved after the formset was originally
# instantiated).
setattr(form.instance, self.fk.name, self.instance)
# Use commit=False so we can assign the parent key afterwards, then
# save the object.
obj = form.save(commit=False)
pk_value = getattr(self.instance, self.fk.remote_field.field_name)
setattr(obj, self.fk.get_attname(), getattr(pk_value, 'pk', pk_value))
if commit:
obj.save()
# form.save_m2m() can be called via the formset later on if commit=False
if commit and hasattr(form, 'save_m2m'):
form.save_m2m()
return obj
def add_fields(self, form, index):
super(BaseInlineFormSet, self).add_fields(form, index)
if self._pk_field == self.fk:
name = self._pk_field.name
kwargs = {'pk_field': True}
else:
# The foreign key field might not be on the form, so we poke at the
# Model field to get the label, since we need that for error messages.
name = self.fk.name
kwargs = {
'label': getattr(form.fields.get(name), 'label', capfirst(self.fk.verbose_name))
}
if self.fk.remote_field.field_name != self.fk.remote_field.model._meta.pk.name:
kwargs['to_field'] = self.fk.remote_field.field_name
# If we're adding a new object, ignore a parent's auto-generated key
# as it will be regenerated on the save request.
if self.instance._state.adding:
if kwargs.get('to_field') is not None:
to_field = self.instance._meta.get_field(kwargs['to_field'])
else:
to_field = self.instance._meta.pk
if to_field.has_default():
setattr(self.instance, to_field.attname, None)
form.fields[name] = InlineForeignKeyField(self.instance, **kwargs)
# Add the generated field to form._meta.fields if it's defined to make
# sure validation isn't skipped on that field.
if form._meta.fields:
if isinstance(form._meta.fields, tuple):
form._meta.fields = list(form._meta.fields)
form._meta.fields.append(self.fk.name)
def get_unique_error_message(self, unique_check):
unique_check = [field for field in unique_check if field != self.fk.name]
return super(BaseInlineFormSet, self).get_unique_error_message(unique_check)
def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False):
"""
Finds and returns the ForeignKey from model to parent if there is one
(returns None if can_fail is True and no such field exists). If fk_name is
provided, assume it is the name of the ForeignKey field. Unless can_fail is
True, an exception is raised if there is no ForeignKey from model to
parent_model.
"""
# avoid circular import
from django.db.models import ForeignKey
opts = model._meta
if fk_name:
fks_to_parent = [f for f in opts.fields if f.name == fk_name]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
if not isinstance(fk, ForeignKey) or \
(fk.remote_field.model != parent_model and
fk.remote_field.model not in parent_model._meta.get_parent_list()):
raise ValueError(
"fk_name '%s' is not a ForeignKey to '%s'." % (fk_name, parent_model._meta.label)
)
elif len(fks_to_parent) == 0:
raise ValueError(
"'%s' has no field named '%s'." % (model._meta.label, fk_name)
)
else:
# Try to discover what the ForeignKey from model to parent_model is
fks_to_parent = [
f for f in opts.fields
if isinstance(f, ForeignKey)
and (f.remote_field.model == parent_model
or f.remote_field.model in parent_model._meta.get_parent_list())
]
if len(fks_to_parent) == 1:
fk = fks_to_parent[0]
elif len(fks_to_parent) == 0:
if can_fail:
return
raise ValueError(
"'%s' has no ForeignKey to '%s'." % (
model._meta.label,
parent_model._meta.label,
)
)
else:
raise ValueError(
"'%s' has more than one ForeignKey to '%s'." % (
model._meta.label,
parent_model._meta.label,
)
)
return fk
def inlineformset_factory(parent_model, model, form=ModelForm,
formset=BaseInlineFormSet, fk_name=None,
fields=None, exclude=None, extra=3, can_order=False,
can_delete=True, max_num=None, formfield_callback=None,
widgets=None, validate_max=False, localized_fields=None,
labels=None, help_texts=None, error_messages=None,
min_num=None, validate_min=False, field_classes=None):
"""
Returns an ``InlineFormSet`` for the given kwargs.
You must provide ``fk_name`` if ``model`` has more than one ``ForeignKey``
to ``parent_model``.
"""
fk = _get_foreign_key(parent_model, model, fk_name=fk_name)
# enforce a max_num=1 when the foreign key to the parent model is unique.
if fk.unique:
max_num = 1
kwargs = {
'form': form,
'formfield_callback': formfield_callback,
'formset': formset,
'extra': extra,
'can_delete': can_delete,
'can_order': can_order,
'fields': fields,
'exclude': exclude,
'min_num': min_num,
'max_num': max_num,
'widgets': widgets,
'validate_min': validate_min,
'validate_max': validate_max,
'localized_fields': localized_fields,
'labels': labels,
'help_texts': help_texts,
'error_messages': error_messages,
'field_classes': field_classes,
}
FormSet = modelformset_factory(model, **kwargs)
FormSet.fk = fk
return FormSet
# Fields #####################################################################
class InlineForeignKeyField(Field):
"""
A basic integer field that deals with validating the given value to a
given parent instance in an inline.
"""
widget = HiddenInput
default_error_messages = {
'invalid_choice': _('The inline foreign key did not match the parent instance primary key.'),
}
def __init__(self, parent_instance, *args, **kwargs):
self.parent_instance = parent_instance
self.pk_field = kwargs.pop("pk_field", False)
self.to_field = kwargs.pop("to_field", None)
if self.parent_instance is not None:
if self.to_field:
kwargs["initial"] = getattr(self.parent_instance, self.to_field)
else:
kwargs["initial"] = self.parent_instance.pk
kwargs["required"] = False
super(InlineForeignKeyField, self).__init__(*args, **kwargs)
def clean(self, value):
if value in self.empty_values:
if self.pk_field:
return None
# if there is no value act as we did before.
return self.parent_instance
# ensure the we compare the values as equal types.
if self.to_field:
orig = getattr(self.parent_instance, self.to_field)
else:
orig = self.parent_instance.pk
if force_text(value) != force_text(orig):
raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice')
return self.parent_instance
def has_changed(self, initial, data):
return False
class ModelChoiceIterator(object):
def __init__(self, field):
self.field = field
self.queryset = field.queryset
def __iter__(self):
if self.field.empty_label is not None:
yield ("", self.field.empty_label)
for obj in self.queryset.iterator():
yield self.choice(obj)
def __len__(self):
return (len(self.queryset) +
(1 if self.field.empty_label is not None else 0))
def choice(self, obj):
return (self.field.prepare_value(obj), self.field.label_from_instance(obj))
class ModelChoiceField(ChoiceField):
"""A ChoiceField whose choices are a model QuerySet."""
# This class is a subclass of ChoiceField for purity, but it doesn't
# actually use any of ChoiceField's implementation.
default_error_messages = {
'invalid_choice': _('Select a valid choice. That choice is not one of'
' the available choices.'),
}
def __init__(self, queryset, empty_label="---------",
required=True, widget=None, label=None, initial=None,
help_text='', to_field_name=None, limit_choices_to=None,
*args, **kwargs):
if required and (initial is not None):
self.empty_label = None
else:
self.empty_label = empty_label
# Call Field instead of ChoiceField __init__() because we don't need
# ChoiceField.__init__().
Field.__init__(self, required, widget, label, initial, help_text,
*args, **kwargs)
self.queryset = queryset
self.limit_choices_to = limit_choices_to # limit the queryset later.
self.to_field_name = to_field_name
def get_limit_choices_to(self):
"""
Returns ``limit_choices_to`` for this form field.
If it is a callable, it will be invoked and the result will be
returned.
"""
if callable(self.limit_choices_to):
return self.limit_choices_to()
return self.limit_choices_to
def __deepcopy__(self, memo):
result = super(ChoiceField, self).__deepcopy__(memo)
# Need to force a new ModelChoiceIterator to be created, bug #11183
result.queryset = result.queryset
return result
def _get_queryset(self):
return self._queryset
def _set_queryset(self, queryset):
self._queryset = queryset
self.widget.choices = self.choices
queryset = property(_get_queryset, _set_queryset)
# this method will be used to create object labels by the QuerySetIterator.
# Override it to customize the label.
def label_from_instance(self, obj):
"""
This method is used to convert objects into strings; it's used to
generate the labels for the choices presented by this object. Subclasses
can override this method to customize the display of the choices.
"""
return smart_text(obj)
def _get_choices(self):
# If self._choices is set, then somebody must have manually set
# the property self.choices. In this case, just return self._choices.
if hasattr(self, '_choices'):
return self._choices
# Otherwise, execute the QuerySet in self.queryset to determine the
# choices dynamically. Return a fresh ModelChoiceIterator that has not been
# consumed. Note that we're instantiating a new ModelChoiceIterator *each*
# time _get_choices() is called (and, thus, each time self.choices is
# accessed) so that we can ensure the QuerySet has not been consumed. This
# construct might look complicated but it allows for lazy evaluation of
# the queryset.
return ModelChoiceIterator(self)
choices = property(_get_choices, ChoiceField._set_choices)
def prepare_value(self, value):
if hasattr(value, '_meta'):
if self.to_field_name:
return value.serializable_value(self.to_field_name)
else:
return value.pk
return super(ModelChoiceField, self).prepare_value(value)
def to_python(self, value):
if value in self.empty_values:
return None
try:
key = self.to_field_name or 'pk'
value = self.queryset.get(**{key: value})
except (ValueError, TypeError, self.queryset.model.DoesNotExist):
raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice')
return value
def validate(self, value):
return Field.validate(self, value)
def has_changed(self, initial, data):
initial_value = initial if initial is not None else ''
data_value = data if data is not None else ''
return force_text(self.prepare_value(initial_value)) != force_text(data_value)
class ModelMultipleChoiceField(ModelChoiceField):
"""A MultipleChoiceField whose choices are a model QuerySet."""
widget = SelectMultiple
hidden_widget = MultipleHiddenInput
default_error_messages = {
'list': _('Enter a list of values.'),
'invalid_choice': _('Select a valid choice. %(value)s is not one of the'
' available choices.'),
'invalid_pk_value': _('"%(pk)s" is not a valid value for a primary key.')
}
def __init__(self, queryset, required=True, widget=None, label=None,
initial=None, help_text='', *args, **kwargs):
super(ModelMultipleChoiceField, self).__init__(queryset, None,
required, widget, label, initial, help_text, *args, **kwargs)
def to_python(self, value):
if not value:
return []
return list(self._check_values(value))
def clean(self, value):
if self.required and not value:
raise ValidationError(self.error_messages['required'], code='required')
elif not self.required and not value:
return self.queryset.none()
if not isinstance(value, (list, tuple)):
raise ValidationError(self.error_messages['list'], code='list')
qs = self._check_values(value)
# Since this overrides the inherited ModelChoiceField.clean
# we run custom validators here
self.run_validators(value)
return qs
def _check_values(self, value):
"""
Given a list of possible PK values, returns a QuerySet of the
corresponding objects. Raises a ValidationError if a given value is
invalid (not a valid PK, not in the queryset, etc.)
"""
key = self.to_field_name or 'pk'
# deduplicate given values to avoid creating many querysets or
# requiring the database backend deduplicate efficiently.
try:
value = frozenset(value)
except TypeError:
# list of lists isn't hashable, for example
raise ValidationError(
self.error_messages['list'],
code='list',
)
for pk in value:
try:
self.queryset.filter(**{key: pk})
except (ValueError, TypeError):
raise ValidationError(
self.error_messages['invalid_pk_value'],
code='invalid_pk_value',
params={'pk': pk},
)
qs = self.queryset.filter(**{'%s__in' % key: value})
pks = set(force_text(getattr(o, key)) for o in qs)
for val in value:
if force_text(val) not in pks:
raise ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': val},
)
return qs
def prepare_value(self, value):
if (hasattr(value, '__iter__') and
not isinstance(value, six.text_type) and
not hasattr(value, '_meta')):
return [super(ModelMultipleChoiceField, self).prepare_value(v) for v in value]
return super(ModelMultipleChoiceField, self).prepare_value(value)
def has_changed(self, initial, data):
if initial is None:
initial = []
if data is None:
data = []
if len(initial) != len(data):
return True
initial_set = set(force_text(value) for value in self.prepare_value(initial))
data_set = set(force_text(value) for value in data)
return data_set != initial_set
def modelform_defines_fields(form_class):
return (form_class is not None and (
hasattr(form_class, '_meta') and
(form_class._meta.fields is not None or
form_class._meta.exclude is not None)
))
|
snehasi/servo | refs/heads/master | tests/wpt/web-platform-tests/tools/pytest/testing/test_skipping.py | 165 | import pytest
import sys
from _pytest.skipping import MarkEvaluator, folded_skips, pytest_runtest_setup
from _pytest.runner import runtestprotocol
class TestEvaluator:
def test_no_marker(self, testdir):
item = testdir.getitem("def test_func(): pass")
evalskipif = MarkEvaluator(item, 'skipif')
assert not evalskipif
assert not evalskipif.istrue()
def test_marked_no_args(self, testdir):
item = testdir.getitem("""
import pytest
@pytest.mark.xyz
def test_func():
pass
""")
ev = MarkEvaluator(item, 'xyz')
assert ev
assert ev.istrue()
expl = ev.getexplanation()
assert expl == ""
assert not ev.get("run", False)
def test_marked_one_arg(self, testdir):
item = testdir.getitem("""
import pytest
@pytest.mark.xyz("hasattr(os, 'sep')")
def test_func():
pass
""")
ev = MarkEvaluator(item, 'xyz')
assert ev
assert ev.istrue()
expl = ev.getexplanation()
assert expl == "condition: hasattr(os, 'sep')"
@pytest.mark.skipif('sys.version_info[0] >= 3')
def test_marked_one_arg_unicode(self, testdir):
item = testdir.getitem("""
import pytest
@pytest.mark.xyz(u"hasattr(os, 'sep')")
def test_func():
pass
""")
ev = MarkEvaluator(item, 'xyz')
assert ev
assert ev.istrue()
expl = ev.getexplanation()
assert expl == "condition: hasattr(os, 'sep')"
def test_marked_one_arg_with_reason(self, testdir):
item = testdir.getitem("""
import pytest
@pytest.mark.xyz("hasattr(os, 'sep')", attr=2, reason="hello world")
def test_func():
pass
""")
ev = MarkEvaluator(item, 'xyz')
assert ev
assert ev.istrue()
expl = ev.getexplanation()
assert expl == "hello world"
assert ev.get("attr") == 2
def test_marked_one_arg_twice(self, testdir):
lines = [
'''@pytest.mark.skipif("not hasattr(os, 'murks')")''',
'''@pytest.mark.skipif("hasattr(os, 'murks')")'''
]
for i in range(0, 2):
item = testdir.getitem("""
import pytest
%s
%s
def test_func():
pass
""" % (lines[i], lines[(i+1) %2]))
ev = MarkEvaluator(item, 'skipif')
assert ev
assert ev.istrue()
expl = ev.getexplanation()
assert expl == "condition: not hasattr(os, 'murks')"
def test_marked_one_arg_twice2(self, testdir):
item = testdir.getitem("""
import pytest
@pytest.mark.skipif("hasattr(os, 'murks')")
@pytest.mark.skipif("not hasattr(os, 'murks')")
def test_func():
pass
""")
ev = MarkEvaluator(item, 'skipif')
assert ev
assert ev.istrue()
expl = ev.getexplanation()
assert expl == "condition: not hasattr(os, 'murks')"
def test_marked_skip_with_not_string(self, testdir):
item = testdir.getitem("""
import pytest
@pytest.mark.skipif(False)
def test_func():
pass
""")
ev = MarkEvaluator(item, 'skipif')
exc = pytest.raises(pytest.fail.Exception, ev.istrue)
assert """Failed: you need to specify reason=STRING when using booleans as conditions.""" in exc.value.msg
def test_skipif_class(self, testdir):
item, = testdir.getitems("""
import pytest
class TestClass:
pytestmark = pytest.mark.skipif("config._hackxyz")
def test_func(self):
pass
""")
item.config._hackxyz = 3
ev = MarkEvaluator(item, 'skipif')
assert ev.istrue()
expl = ev.getexplanation()
assert expl == "condition: config._hackxyz"
class TestXFail:
@pytest.mark.parametrize('strict', [True, False])
def test_xfail_simple(self, testdir, strict):
item = testdir.getitem("""
import pytest
@pytest.mark.xfail(strict=%s)
def test_func():
assert 0
""" % strict)
reports = runtestprotocol(item, log=False)
assert len(reports) == 3
callreport = reports[1]
assert callreport.skipped
assert callreport.wasxfail == ""
def test_xfail_xpassed(self, testdir):
item = testdir.getitem("""
import pytest
@pytest.mark.xfail
def test_func():
assert 1
""")
reports = runtestprotocol(item, log=False)
assert len(reports) == 3
callreport = reports[1]
assert callreport.failed
assert callreport.wasxfail == ""
def test_xfail_run_anyway(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.xfail
def test_func():
assert 0
def test_func2():
pytest.xfail("hello")
""")
result = testdir.runpytest("--runxfail")
result.stdout.fnmatch_lines([
"*def test_func():*",
"*assert 0*",
"*1 failed*1 pass*",
])
def test_xfail_evalfalse_but_fails(self, testdir):
item = testdir.getitem("""
import pytest
@pytest.mark.xfail('False')
def test_func():
assert 0
""")
reports = runtestprotocol(item, log=False)
callreport = reports[1]
assert callreport.failed
assert not hasattr(callreport, "wasxfail")
assert 'xfail' in callreport.keywords
def test_xfail_not_report_default(self, testdir):
p = testdir.makepyfile(test_one="""
import pytest
@pytest.mark.xfail
def test_this():
assert 0
""")
testdir.runpytest(p, '-v')
#result.stdout.fnmatch_lines([
# "*HINT*use*-r*"
#])
def test_xfail_not_run_xfail_reporting(self, testdir):
p = testdir.makepyfile(test_one="""
import pytest
@pytest.mark.xfail(run=False, reason="noway")
def test_this():
assert 0
@pytest.mark.xfail("True", run=False)
def test_this_true():
assert 0
@pytest.mark.xfail("False", run=False, reason="huh")
def test_this_false():
assert 1
""")
result = testdir.runpytest(p, '--report=xfailed', )
result.stdout.fnmatch_lines([
"*test_one*test_this*",
"*NOTRUN*noway",
"*test_one*test_this_true*",
"*NOTRUN*condition:*True*",
"*1 passed*",
])
def test_xfail_not_run_no_setup_run(self, testdir):
p = testdir.makepyfile(test_one="""
import pytest
@pytest.mark.xfail(run=False, reason="hello")
def test_this():
assert 0
def setup_module(mod):
raise ValueError(42)
""")
result = testdir.runpytest(p, '--report=xfailed', )
result.stdout.fnmatch_lines([
"*test_one*test_this*",
"*NOTRUN*hello",
"*1 xfailed*",
])
def test_xfail_xpass(self, testdir):
p = testdir.makepyfile(test_one="""
import pytest
@pytest.mark.xfail
def test_that():
assert 1
""")
result = testdir.runpytest(p, '-rX')
result.stdout.fnmatch_lines([
"*XPASS*test_that*",
"*1 xpassed*"
])
assert result.ret == 0
def test_xfail_imperative(self, testdir):
p = testdir.makepyfile("""
import pytest
def test_this():
pytest.xfail("hello")
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*1 xfailed*",
])
result = testdir.runpytest(p, "-rx")
result.stdout.fnmatch_lines([
"*XFAIL*test_this*",
"*reason:*hello*",
])
result = testdir.runpytest(p, "--runxfail")
result.stdout.fnmatch_lines("*1 pass*")
def test_xfail_imperative_in_setup_function(self, testdir):
p = testdir.makepyfile("""
import pytest
def setup_function(function):
pytest.xfail("hello")
def test_this():
assert 0
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*1 xfailed*",
])
result = testdir.runpytest(p, "-rx")
result.stdout.fnmatch_lines([
"*XFAIL*test_this*",
"*reason:*hello*",
])
result = testdir.runpytest(p, "--runxfail")
result.stdout.fnmatch_lines("""
*def test_this*
*1 fail*
""")
def xtest_dynamic_xfail_set_during_setup(self, testdir):
p = testdir.makepyfile("""
import pytest
def setup_function(function):
pytest.mark.xfail(function)
def test_this():
assert 0
def test_that():
assert 1
""")
result = testdir.runpytest(p, '-rxX')
result.stdout.fnmatch_lines([
"*XFAIL*test_this*",
"*XPASS*test_that*",
])
def test_dynamic_xfail_no_run(self, testdir):
p = testdir.makepyfile("""
import pytest
def pytest_funcarg__arg(request):
request.applymarker(pytest.mark.xfail(run=False))
def test_this(arg):
assert 0
""")
result = testdir.runpytest(p, '-rxX')
result.stdout.fnmatch_lines([
"*XFAIL*test_this*",
"*NOTRUN*",
])
def test_dynamic_xfail_set_during_funcarg_setup(self, testdir):
p = testdir.makepyfile("""
import pytest
def pytest_funcarg__arg(request):
request.applymarker(pytest.mark.xfail)
def test_this2(arg):
assert 0
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*1 xfailed*",
])
@pytest.mark.parametrize('expected, actual, matchline',
[('TypeError', 'TypeError', "*1 xfailed*"),
('(AttributeError, TypeError)', 'TypeError', "*1 xfailed*"),
('TypeError', 'IndexError', "*1 failed*"),
('(AttributeError, TypeError)', 'IndexError', "*1 failed*"),
])
def test_xfail_raises(self, expected, actual, matchline, testdir):
p = testdir.makepyfile("""
import pytest
@pytest.mark.xfail(raises=%s)
def test_raises():
raise %s()
""" % (expected, actual))
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
matchline,
])
def test_strict_sanity(self, testdir):
"""sanity check for xfail(strict=True): a failing test should behave
exactly like a normal xfail.
"""
p = testdir.makepyfile("""
import pytest
@pytest.mark.xfail(reason='unsupported feature', strict=True)
def test_foo():
assert 0
""")
result = testdir.runpytest(p, '-rxX')
result.stdout.fnmatch_lines([
'*XFAIL*',
'*unsupported feature*',
])
assert result.ret == 0
@pytest.mark.parametrize('strict', [True, False])
def test_strict_xfail(self, testdir, strict):
p = testdir.makepyfile("""
import pytest
@pytest.mark.xfail(reason='unsupported feature', strict=%s)
def test_foo():
with open('foo_executed', 'w'): pass # make sure test executes
""" % strict)
result = testdir.runpytest(p, '-rxX')
if strict:
result.stdout.fnmatch_lines([
'*test_foo*',
'*XPASS(strict)*unsupported feature*',
])
else:
result.stdout.fnmatch_lines([
'*test_strict_xfail*',
'XPASS test_strict_xfail.py::test_foo unsupported feature',
])
assert result.ret == (1 if strict else 0)
assert testdir.tmpdir.join('foo_executed').isfile()
@pytest.mark.parametrize('strict', [True, False])
def test_strict_xfail_condition(self, testdir, strict):
p = testdir.makepyfile("""
import pytest
@pytest.mark.xfail(False, reason='unsupported feature', strict=%s)
def test_foo():
pass
""" % strict)
result = testdir.runpytest(p, '-rxX')
result.stdout.fnmatch_lines('*1 passed*')
assert result.ret == 0
@pytest.mark.parametrize('strict_val', ['true', 'false'])
def test_strict_xfail_default_from_file(self, testdir, strict_val):
testdir.makeini('''
[pytest]
xfail_strict = %s
''' % strict_val)
p = testdir.makepyfile("""
import pytest
@pytest.mark.xfail(reason='unsupported feature')
def test_foo():
pass
""")
result = testdir.runpytest(p, '-rxX')
strict = strict_val == 'true'
result.stdout.fnmatch_lines('*1 failed*' if strict else '*1 xpassed*')
assert result.ret == (1 if strict else 0)
class TestXFailwithSetupTeardown:
def test_failing_setup_issue9(self, testdir):
testdir.makepyfile("""
import pytest
def setup_function(func):
assert 0
@pytest.mark.xfail
def test_func():
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*1 xfail*",
])
def test_failing_teardown_issue9(self, testdir):
testdir.makepyfile("""
import pytest
def teardown_function(func):
assert 0
@pytest.mark.xfail
def test_func():
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*1 xfail*",
])
class TestSkip:
def test_skip_class(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skip
class TestSomething(object):
def test_foo(self):
pass
def test_bar(self):
pass
def test_baz():
pass
""")
rec = testdir.inline_run()
rec.assertoutcome(skipped=2, passed=1)
def test_skips_on_false_string(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skip('False')
def test_foo():
pass
""")
rec = testdir.inline_run()
rec.assertoutcome(skipped=1)
def test_arg_as_reason(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skip('testing stuff')
def test_bar():
pass
""")
result = testdir.runpytest('-rs')
result.stdout.fnmatch_lines([
"*testing stuff*",
"*1 skipped*",
])
def test_skip_no_reason(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skip
def test_foo():
pass
""")
result = testdir.runpytest('-rs')
result.stdout.fnmatch_lines([
"*unconditional skip*",
"*1 skipped*",
])
def test_skip_with_reason(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skip(reason="for lolz")
def test_bar():
pass
""")
result = testdir.runpytest('-rs')
result.stdout.fnmatch_lines([
"*for lolz*",
"*1 skipped*",
])
def test_only_skips_marked_test(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skip
def test_foo():
pass
@pytest.mark.skip(reason="nothing in particular")
def test_bar():
pass
def test_baz():
assert True
""")
result = testdir.runpytest('-rs')
result.stdout.fnmatch_lines([
"*nothing in particular*",
"*1 passed*2 skipped*",
])
class TestSkipif:
def test_skipif_conditional(self, testdir):
item = testdir.getitem("""
import pytest
@pytest.mark.skipif("hasattr(os, 'sep')")
def test_func():
pass
""") # noqa
x = pytest.raises(pytest.skip.Exception, lambda:
pytest_runtest_setup(item))
assert x.value.msg == "condition: hasattr(os, 'sep')"
@pytest.mark.parametrize('params', [
'"hasattr(sys, \'platform\')"',
'True, reason="invalid platform"',
])
def test_skipif_reporting(self, testdir, params):
p = testdir.makepyfile(test_foo="""
import pytest
@pytest.mark.skipif(%(params)s)
def test_that():
assert 0
""" % dict(params=params))
result = testdir.runpytest(p, '-s', '-rs')
result.stdout.fnmatch_lines([
"*SKIP*1*test_foo.py*platform*",
"*1 skipped*"
])
assert result.ret == 0
@pytest.mark.parametrize('marker, msg1, msg2', [
('skipif', 'SKIP', 'skipped'),
('xfail', 'XPASS', 'xpassed'),
])
def test_skipif_reporting_multiple(self, testdir, marker, msg1, msg2):
testdir.makepyfile(test_foo="""
import pytest
@pytest.mark.{marker}(False, reason='first_condition')
@pytest.mark.{marker}(True, reason='second_condition')
def test_foobar():
assert 1
""".format(marker=marker))
result = testdir.runpytest('-s', '-rsxX')
result.stdout.fnmatch_lines([
"*{msg1}*test_foo.py*second_condition*".format(msg1=msg1),
"*1 {msg2}*".format(msg2=msg2),
])
assert result.ret == 0
def test_skip_not_report_default(testdir):
p = testdir.makepyfile(test_one="""
import pytest
def test_this():
pytest.skip("hello")
""")
result = testdir.runpytest(p, '-v')
result.stdout.fnmatch_lines([
#"*HINT*use*-r*",
"*1 skipped*",
])
def test_skipif_class(testdir):
p = testdir.makepyfile("""
import pytest
class TestClass:
pytestmark = pytest.mark.skipif("True")
def test_that(self):
assert 0
def test_though(self):
assert 0
""")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines([
"*2 skipped*"
])
def test_skip_reasons_folding():
path = 'xyz'
lineno = 3
message = "justso"
longrepr = (path, lineno, message)
class X:
pass
ev1 = X()
ev1.when = "execute"
ev1.skipped = True
ev1.longrepr = longrepr
ev2 = X()
ev2.longrepr = longrepr
ev2.skipped = True
l = folded_skips([ev1, ev2])
assert len(l) == 1
num, fspath, lineno, reason = l[0]
assert num == 2
assert fspath == path
assert lineno == lineno
assert reason == message
def test_skipped_reasons_functional(testdir):
testdir.makepyfile(
test_one="""
from conftest import doskip
def setup_function(func):
doskip()
def test_func():
pass
class TestClass:
def test_method(self):
doskip()
""",
test_two = """
from conftest import doskip
doskip()
""",
conftest = """
import pytest
def doskip():
pytest.skip('test')
"""
)
result = testdir.runpytest('--report=skipped')
result.stdout.fnmatch_lines([
"*SKIP*3*conftest.py:3: test",
])
assert result.ret == 0
def test_reportchars(testdir):
testdir.makepyfile("""
import pytest
def test_1():
assert 0
@pytest.mark.xfail
def test_2():
assert 0
@pytest.mark.xfail
def test_3():
pass
def test_4():
pytest.skip("four")
""")
result = testdir.runpytest("-rfxXs")
result.stdout.fnmatch_lines([
"FAIL*test_1*",
"XFAIL*test_2*",
"XPASS*test_3*",
"SKIP*four*",
])
def test_reportchars_error(testdir):
testdir.makepyfile(
conftest="""
def pytest_runtest_teardown():
assert 0
""",
test_simple="""
def test_foo():
pass
""")
result = testdir.runpytest('-rE')
result.stdout.fnmatch_lines([
'ERROR*test_foo*',
])
def test_reportchars_all(testdir):
testdir.makepyfile("""
import pytest
def test_1():
assert 0
@pytest.mark.xfail
def test_2():
assert 0
@pytest.mark.xfail
def test_3():
pass
def test_4():
pytest.skip("four")
""")
result = testdir.runpytest("-ra")
result.stdout.fnmatch_lines([
"FAIL*test_1*",
"SKIP*four*",
"XFAIL*test_2*",
"XPASS*test_3*",
])
def test_reportchars_all_error(testdir):
testdir.makepyfile(
conftest="""
def pytest_runtest_teardown():
assert 0
""",
test_simple="""
def test_foo():
pass
""")
result = testdir.runpytest('-ra')
result.stdout.fnmatch_lines([
'ERROR*test_foo*',
])
@pytest.mark.xfail("hasattr(sys, 'pypy_version_info')")
def test_errors_in_xfail_skip_expressions(testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skipif("asd")
def test_nameerror():
pass
@pytest.mark.xfail("syntax error")
def test_syntax():
pass
def test_func():
pass
""")
result = testdir.runpytest()
markline = " ^"
if sys.platform.startswith("java"):
# XXX report this to java
markline = "*" + markline[8:]
result.stdout.fnmatch_lines([
"*ERROR*test_nameerror*",
"*evaluating*skipif*expression*",
"*asd*",
"*ERROR*test_syntax*",
"*evaluating*xfail*expression*",
" syntax error",
markline,
"SyntaxError: invalid syntax",
"*1 pass*2 error*",
])
def test_xfail_skipif_with_globals(testdir):
testdir.makepyfile("""
import pytest
x = 3
@pytest.mark.skipif("x == 3")
def test_skip1():
pass
@pytest.mark.xfail("x == 3")
def test_boolean():
assert 0
""")
result = testdir.runpytest("-rsx")
result.stdout.fnmatch_lines([
"*SKIP*x == 3*",
"*XFAIL*test_boolean*",
"*x == 3*",
])
def test_direct_gives_error(testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skipif(True)
def test_skip1():
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"*1 error*",
])
def test_default_markers(testdir):
result = testdir.runpytest("--markers")
result.stdout.fnmatch_lines([
"*skipif(*condition)*skip*",
"*xfail(*condition, reason=None, run=True, raises=None)*expected failure*",
])
def test_xfail_test_setup_exception(testdir):
testdir.makeconftest("""
def pytest_runtest_setup():
0 / 0
""")
p = testdir.makepyfile("""
import pytest
@pytest.mark.xfail
def test_func():
assert 0
""")
result = testdir.runpytest(p)
assert result.ret == 0
assert 'xfailed' in result.stdout.str()
assert 'xpassed' not in result.stdout.str()
def test_imperativeskip_on_xfail_test(testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.xfail
def test_that_fails():
assert 0
@pytest.mark.skipif("True")
def test_hello():
pass
""")
testdir.makeconftest("""
import pytest
def pytest_runtest_setup(item):
pytest.skip("abc")
""")
result = testdir.runpytest("-rsxX")
result.stdout.fnmatch_lines_random("""
*SKIP*abc*
*SKIP*condition: True*
*2 skipped*
""")
class TestBooleanCondition:
def test_skipif(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skipif(True, reason="True123")
def test_func1():
pass
@pytest.mark.skipif(False, reason="True123")
def test_func2():
pass
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines("""
*1 passed*1 skipped*
""")
def test_skipif_noreason(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.skipif(True)
def test_func():
pass
""")
result = testdir.runpytest("-rs")
result.stdout.fnmatch_lines("""
*1 error*
""")
def test_xfail(self, testdir):
testdir.makepyfile("""
import pytest
@pytest.mark.xfail(True, reason="True123")
def test_func():
assert 0
""")
result = testdir.runpytest("-rxs")
result.stdout.fnmatch_lines("""
*XFAIL*
*True123*
*1 xfail*
""")
def test_xfail_item(testdir):
# Ensure pytest.xfail works with non-Python Item
testdir.makeconftest("""
import pytest
class MyItem(pytest.Item):
nodeid = 'foo'
def runtest(self):
pytest.xfail("Expected Failure")
def pytest_collect_file(path, parent):
return MyItem("foo", parent)
""")
result = testdir.inline_run()
passed, skipped, failed = result.listoutcomes()
assert not failed
xfailed = [r for r in skipped if hasattr(r, 'wasxfail')]
assert xfailed
|
laperry1/android_external_chromium_org | refs/heads/cm-12.1 | tools/telemetry/telemetry/core/system_info.py | 58 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core import gpu_info
class SystemInfo(object):
"""Provides low-level system information."""
def __init__(self, model_name, gpu_dict):
if (model_name == None) or (gpu_dict == None):
raise Exception("Missing model_name or gpu_dict argument")
self._model_name = model_name
self._gpu = gpu_info.GPUInfo.FromDict(gpu_dict)
@classmethod
def FromDict(cls, attrs):
"""Constructs a SystemInfo from a dictionary of attributes.
Attributes currently required to be present in the dictionary:
model_name (string): a platform-dependent string
describing the model of machine, or the empty string if not
supported.
gpu (object containing GPUInfo's required attributes)
"""
return cls(attrs["model_name"], attrs["gpu"])
@property
def model_name(self):
"""A string describing the machine model.
This is a highly platform-dependent value and not currently
specified for any machine type aside from Macs. On Mac OS, this
is the model identifier, reformatted slightly; for example,
'MacBookPro 10.1'."""
return self._model_name
@property
def gpu(self):
"""A GPUInfo object describing the graphics processor(s) on the system."""
return self._gpu
|
ghdk/networkx | refs/heads/master | networkx/classes/tests/test_graph.py | 31 | #!/usr/bin/env python
from nose.tools import *
import networkx
class BaseGraphTester(object):
""" Tests for data-structure independent graph class features."""
def test_contains(self):
G=self.K3
assert(1 in G )
assert(4 not in G )
assert('b' not in G )
assert([] not in G ) # no exception for nonhashable
assert({1:1} not in G) # no exception for nonhashable
def test_order(self):
G=self.K3
assert_equal(len(G),3)
assert_equal(G.order(),3)
assert_equal(G.number_of_nodes(),3)
def test_nodes_iter(self):
G=self.K3
assert_equal(sorted(G.nodes_iter()),self.k3nodes)
assert_equal(sorted(G.nodes_iter(data=True)),[(0,{}),(1,{}),(2,{})])
def test_nodes(self):
G=self.K3
assert_equal(sorted(G.nodes()),self.k3nodes)
assert_equal(sorted(G.nodes(data=True)),[(0,{}),(1,{}),(2,{})])
def test_has_node(self):
G=self.K3
assert(G.has_node(1))
assert(not G.has_node(4))
assert(not G.has_node([])) # no exception for nonhashable
assert(not G.has_node({1:1})) # no exception for nonhashable
def test_has_edge(self):
G=self.K3
assert_equal(G.has_edge(0,1),True)
assert_equal(G.has_edge(0,-1),False)
def test_neighbors(self):
G=self.K3
assert_equal(sorted(G.neighbors(0)),[1,2])
assert_raises((KeyError,networkx.NetworkXError), G.neighbors,-1)
def test_neighbors_iter(self):
G=self.K3
assert_equal(sorted(G.neighbors_iter(0)),[1,2])
assert_raises((KeyError,networkx.NetworkXError), G.neighbors_iter,-1)
def test_edges(self):
G=self.K3
assert_equal(sorted(G.edges()),[(0,1),(0,2),(1,2)])
assert_equal(sorted(G.edges(0)),[(0,1),(0,2)])
assert_raises((KeyError,networkx.NetworkXError), G.edges,-1)
def test_edges_iter(self):
G=self.K3
assert_equal(sorted(G.edges_iter()),[(0,1),(0,2),(1,2)])
assert_equal(sorted(G.edges_iter(0)),[(0,1),(0,2)])
f=lambda x:list(G.edges_iter(x))
assert_raises((KeyError,networkx.NetworkXError), f, -1)
def test_adjacency_list(self):
G=self.K3
assert_equal(G.adjacency_list(),[[1,2],[0,2],[0,1]])
def test_degree(self):
G=self.K3
assert_equal(list(G.degree().values()),[2,2,2])
assert_equal(G.degree(),{0:2,1:2,2:2})
assert_equal(G.degree(0),2)
assert_equal(G.degree([0]),{0:2})
assert_raises((KeyError,networkx.NetworkXError), G.degree,-1)
def test_weighted_degree(self):
G=self.Graph()
G.add_edge(1,2,weight=2)
G.add_edge(2,3,weight=3)
assert_equal(list(G.degree(weight='weight').values()),[2,5,3])
assert_equal(G.degree(weight='weight'),{1:2,2:5,3:3})
assert_equal(G.degree(1,weight='weight'),2)
assert_equal(G.degree([1],weight='weight'),{1:2})
def test_degree_iter(self):
G=self.K3
assert_equal(list(G.degree_iter()),[(0,2),(1,2),(2,2)])
assert_equal(dict(G.degree_iter()),{0:2,1:2,2:2})
assert_equal(list(G.degree_iter(0)),[(0,2)])
def test_size(self):
G=self.K3
assert_equal(G.size(),3)
assert_equal(G.number_of_edges(),3)
def test_add_star(self):
G=self.K3.copy()
nlist=[12,13,14,15]
G.add_star(nlist)
assert_equal(sorted(G.edges(nlist)),[(12,13),(12,14),(12,15)])
G=self.K3.copy()
G.add_star(nlist,weight=2.0)
assert_equal(sorted(G.edges(nlist,data=True)),\
[(12,13,{'weight':2.}),
(12,14,{'weight':2.}),
(12,15,{'weight':2.})])
def test_add_path(self):
G=self.K3.copy()
nlist=[12,13,14,15]
G.add_path(nlist)
assert_equal(sorted(G.edges(nlist)),[(12,13),(13,14),(14,15)])
G=self.K3.copy()
G.add_path(nlist,weight=2.0)
assert_equal(sorted(G.edges(nlist,data=True)),\
[(12,13,{'weight':2.}),
(13,14,{'weight':2.}),
(14,15,{'weight':2.})])
def test_add_cycle(self):
G=self.K3.copy()
nlist=[12,13,14,15]
oklists=[ [(12,13),(12,15),(13,14),(14,15)], \
[(12,13),(13,14),(14,15),(15,12)] ]
G.add_cycle(nlist)
assert_true(sorted(G.edges(nlist)) in oklists)
G=self.K3.copy()
oklists=[ [(12,13,{'weight':1.}),\
(12,15,{'weight':1.}),\
(13,14,{'weight':1.}),\
(14,15,{'weight':1.})], \
\
[(12,13,{'weight':1.}),\
(13,14,{'weight':1.}),\
(14,15,{'weight':1.}),\
(15,12,{'weight':1.})] \
]
G.add_cycle(nlist,weight=1.0)
assert_true(sorted(G.edges(nlist,data=True)) in oklists)
def test_nbunch_iter(self):
G=self.K3
assert_equal(list(G.nbunch_iter()),self.k3nodes) # all nodes
assert_equal(list(G.nbunch_iter(0)),[0]) # single node
assert_equal(list(G.nbunch_iter([0,1])),[0,1]) # sequence
# sequence with none in graph
assert_equal(list(G.nbunch_iter([-1])),[])
# string sequence with none in graph
assert_equal(list(G.nbunch_iter("foo")),[])
# node not in graph doesn't get caught upon creation of iterator
bunch=G.nbunch_iter(-1)
# but gets caught when iterator used
assert_raises(networkx.NetworkXError,list,bunch)
# unhashable doesn't get caught upon creation of iterator
bunch=G.nbunch_iter([0,1,2,{}])
# but gets caught when iterator hits the unhashable
assert_raises(networkx.NetworkXError,list,bunch)
def test_selfloop_degree(self):
G=self.Graph()
G.add_edge(1,1)
assert_equal(list(G.degree().values()),[2])
assert_equal(G.degree(),{1:2})
assert_equal(G.degree(1),2)
assert_equal(G.degree([1]),{1:2})
assert_equal(G.degree([1],weight='weight'),{1:2})
def test_selfloops(self):
G=self.K3.copy()
G.add_edge(0,0)
assert_equal(G.nodes_with_selfloops(),[0])
assert_equal(G.selfloop_edges(),[(0,0)])
assert_equal(G.number_of_selfloops(),1)
G.remove_edge(0,0)
G.add_edge(0,0)
G.remove_edges_from([(0,0)])
G.add_edge(1,1)
G.remove_node(1)
G.add_edge(0,0)
G.add_edge(1,1)
G.remove_nodes_from([0,1])
class BaseAttrGraphTester(BaseGraphTester):
""" Tests of graph class attribute features."""
def test_weighted_degree(self):
G=self.Graph()
G.add_edge(1,2,weight=2,other=3)
G.add_edge(2,3,weight=3,other=4)
assert_equal(list(G.degree(weight='weight').values()),[2,5,3])
assert_equal(G.degree(weight='weight'),{1:2,2:5,3:3})
assert_equal(G.degree(1,weight='weight'),2)
assert_equal(G.degree([1],weight='weight'),{1:2})
assert_equal(list(G.degree(weight='other').values()),[3,7,4])
assert_equal(G.degree(weight='other'),{1:3,2:7,3:4})
assert_equal(G.degree(1,weight='other'),3)
assert_equal(G.degree([1],weight='other'),{1:3})
def add_attributes(self,G):
G.graph['foo']=[]
G.node[0]['foo']=[]
G.remove_edge(1,2)
ll=[]
G.add_edge(1,2,foo=ll)
G.add_edge(2,1,foo=ll)
# attr_dict must be dict
assert_raises(networkx.NetworkXError,G.add_edge,0,1,attr_dict=[])
def test_name(self):
G=self.Graph(name='')
assert_equal(G.name,"")
G=self.Graph(name='test')
assert_equal(G.__str__(),"test")
assert_equal(G.name,"test")
def test_copy(self):
G=self.K3
self.add_attributes(G)
H=G.copy()
self.is_deepcopy(H,G)
H=G.__class__(G)
self.is_shallow_copy(H,G)
def test_copy_attr(self):
G=self.Graph(foo=[])
G.add_node(0,foo=[])
G.add_edge(1,2,foo=[])
H=G.copy()
self.is_deepcopy(H,G)
H=G.__class__(G) # just copy
self.is_shallow_copy(H,G)
def is_deepcopy(self,H,G):
self.graphs_equal(H,G)
self.different_attrdict(H,G)
self.deep_copy_attrdict(H,G)
def deep_copy_attrdict(self,H,G):
self.deepcopy_graph_attr(H,G)
self.deepcopy_node_attr(H,G)
self.deepcopy_edge_attr(H,G)
def deepcopy_graph_attr(self,H,G):
assert_equal(G.graph['foo'],H.graph['foo'])
G.graph['foo'].append(1)
assert_not_equal(G.graph['foo'],H.graph['foo'])
def deepcopy_node_attr(self,H,G):
assert_equal(G.node[0]['foo'],H.node[0]['foo'])
G.node[0]['foo'].append(1)
assert_not_equal(G.node[0]['foo'],H.node[0]['foo'])
def deepcopy_edge_attr(self,H,G):
assert_equal(G[1][2]['foo'],H[1][2]['foo'])
G[1][2]['foo'].append(1)
assert_not_equal(G[1][2]['foo'],H[1][2]['foo'])
def is_shallow_copy(self,H,G):
self.graphs_equal(H,G)
self.different_attrdict(H,G)
self.shallow_copy_attrdict(H,G)
def shallow_copy_attrdict(self,H,G):
self.shallow_copy_graph_attr(H,G)
self.shallow_copy_node_attr(H,G)
self.shallow_copy_edge_attr(H,G)
def shallow_copy_graph_attr(self,H,G):
assert_equal(G.graph['foo'],H.graph['foo'])
G.graph['foo'].append(1)
assert_equal(G.graph['foo'],H.graph['foo'])
def shallow_copy_node_attr(self,H,G):
assert_equal(G.node[0]['foo'],H.node[0]['foo'])
G.node[0]['foo'].append(1)
assert_equal(G.node[0]['foo'],H.node[0]['foo'])
def shallow_copy_edge_attr(self,H,G):
assert_equal(G[1][2]['foo'],H[1][2]['foo'])
G[1][2]['foo'].append(1)
assert_equal(G[1][2]['foo'],H[1][2]['foo'])
def same_attrdict(self, H, G):
old_foo=H[1][2]['foo']
H.add_edge(1,2,foo='baz')
assert_equal(G.edge,H.edge)
H.add_edge(1,2,foo=old_foo)
assert_equal(G.edge,H.edge)
old_foo=H.node[0]['foo']
H.node[0]['foo']='baz'
assert_equal(G.node,H.node)
H.node[0]['foo']=old_foo
assert_equal(G.node,H.node)
def different_attrdict(self, H, G):
old_foo=H[1][2]['foo']
H.add_edge(1,2,foo='baz')
assert_not_equal(G.edge,H.edge)
H.add_edge(1,2,foo=old_foo)
assert_equal(G.edge,H.edge)
old_foo=H.node[0]['foo']
H.node[0]['foo']='baz'
assert_not_equal(G.node,H.node)
H.node[0]['foo']=old_foo
assert_equal(G.node,H.node)
def graphs_equal(self,H,G):
assert_equal(G.adj,H.adj)
assert_equal(G.edge,H.edge)
assert_equal(G.node,H.node)
assert_equal(G.graph,H.graph)
assert_equal(G.name,H.name)
if not G.is_directed() and not H.is_directed():
assert_true(H.adj[1][2] is H.adj[2][1])
assert_true(G.adj[1][2] is G.adj[2][1])
else: # at least one is directed
if not G.is_directed():
G.pred=G.adj
G.succ=G.adj
if not H.is_directed():
H.pred=H.adj
H.succ=H.adj
assert_equal(G.pred,H.pred)
assert_equal(G.succ,H.succ)
assert_true(H.succ[1][2] is H.pred[2][1])
assert_true(G.succ[1][2] is G.pred[2][1])
def test_graph_attr(self):
G=self.K3
G.graph['foo']='bar'
assert_equal(G.graph['foo'], 'bar')
del G.graph['foo']
assert_equal(G.graph, {})
H=self.Graph(foo='bar')
assert_equal(H.graph['foo'], 'bar')
def test_node_attr(self):
G=self.K3
G.add_node(1,foo='bar')
assert_equal(G.nodes(), [0,1,2])
assert_equal(G.nodes(data=True), [(0,{}),(1,{'foo':'bar'}),(2,{})])
G.node[1]['foo']='baz'
assert_equal(G.nodes(data=True), [(0,{}),(1,{'foo':'baz'}),(2,{})])
def test_node_attr2(self):
G=self.K3
a={'foo':'bar'}
G.add_node(3,attr_dict=a)
assert_equal(G.nodes(), [0,1,2,3])
assert_equal(G.nodes(data=True),
[(0,{}),(1,{}),(2,{}),(3,{'foo':'bar'})])
def test_edge_attr(self):
G=self.Graph()
G.add_edge(1,2,foo='bar')
assert_equal(G.edges(data=True), [(1,2,{'foo':'bar'})])
assert_equal(G.edges(data='foo'), [(1,2,'bar')])
def test_edge_attr2(self):
G=self.Graph()
G.add_edges_from([(1,2),(3,4)],foo='foo')
assert_equal(sorted(G.edges(data=True)),
[(1,2,{'foo':'foo'}),(3,4,{'foo':'foo'})])
assert_equal(sorted(G.edges(data='foo')),
[(1,2,'foo'),(3,4,'foo')])
def test_edge_attr3(self):
G=self.Graph()
G.add_edges_from([(1,2,{'weight':32}),(3,4,{'weight':64})],foo='foo')
assert_equal(G.edges(data=True),
[(1,2,{'foo':'foo','weight':32}),\
(3,4,{'foo':'foo','weight':64})])
G.remove_edges_from([(1,2),(3,4)])
G.add_edge(1,2,data=7,spam='bar',bar='foo')
assert_equal(G.edges(data=True),
[(1,2,{'data':7,'spam':'bar','bar':'foo'})])
def test_edge_attr4(self):
G=self.Graph()
G.add_edge(1,2,data=7,spam='bar',bar='foo')
assert_equal(G.edges(data=True),
[(1,2,{'data':7,'spam':'bar','bar':'foo'})])
G[1][2]['data']=10 # OK to set data like this
assert_equal(G.edges(data=True),
[(1,2,{'data':10,'spam':'bar','bar':'foo'})])
G.edge[1][2]['data']=20 # another spelling, "edge"
assert_equal(G.edges(data=True),
[(1,2,{'data':20,'spam':'bar','bar':'foo'})])
G.edge[1][2]['listdata']=[20,200]
G.edge[1][2]['weight']=20
assert_equal(G.edges(data=True),
[(1,2,{'data':20,'spam':'bar',
'bar':'foo','listdata':[20,200],'weight':20})])
def test_attr_dict_not_dict(self):
# attr_dict must be dict
G=self.Graph()
edges=[(1,2)]
assert_raises(networkx.NetworkXError,G.add_edges_from,edges,
attr_dict=[])
def test_to_undirected(self):
G=self.K3
self.add_attributes(G)
H=networkx.Graph(G)
self.is_shallow_copy(H,G)
H=G.to_undirected()
self.is_deepcopy(H,G)
def test_to_directed(self):
G=self.K3
self.add_attributes(G)
H=networkx.DiGraph(G)
self.is_shallow_copy(H,G)
H=G.to_directed()
self.is_deepcopy(H,G)
def test_subgraph(self):
G=self.K3
self.add_attributes(G)
H=G.subgraph([0,1,2,5])
# assert_equal(H.name, 'Subgraph of ('+G.name+')')
H.name=G.name
self.graphs_equal(H,G)
self.same_attrdict(H,G)
self.shallow_copy_attrdict(H,G)
H=G.subgraph(0)
assert_equal(H.adj,{0:{}})
H=G.subgraph([])
assert_equal(H.adj,{})
assert_not_equal(G.adj,{})
def test_selfloops_attr(self):
G=self.K3.copy()
G.add_edge(0,0)
G.add_edge(1,1,weight=2)
assert_equal(G.selfloop_edges(data=True),
[(0,0,{}),(1,1,{'weight':2})])
assert_equal(G.selfloop_edges(data='weight'),
[(0,0,None),(1,1,2)])
class TestGraph(BaseAttrGraphTester):
"""Tests specific to dict-of-dict-of-dict graph data structure"""
def setUp(self):
self.Graph=networkx.Graph
# build dict-of-dict-of-dict K3
ed1,ed2,ed3 = ({},{},{})
self.k3adj={0: {1: ed1, 2: ed2},
1: {0: ed1, 2: ed3},
2: {0: ed2, 1: ed3}}
self.k3edges=[(0, 1), (0, 2), (1, 2)]
self.k3nodes=[0, 1, 2]
self.K3=self.Graph()
self.K3.adj=self.K3.edge=self.k3adj
self.K3.node={}
self.K3.node[0]={}
self.K3.node[1]={}
self.K3.node[2]={}
def test_data_input(self):
G=self.Graph(data={1:[2],2:[1]}, name="test")
assert_equal(G.name,"test")
assert_equal(sorted(G.adj.items()),[(1, {2: {}}), (2, {1: {}})])
G=self.Graph({1:[2],2:[1]}, name="test")
assert_equal(G.name,"test")
assert_equal(sorted(G.adj.items()),[(1, {2: {}}), (2, {1: {}})])
def test_adjacency_iter(self):
G=self.K3
assert_equal(dict(G.adjacency_iter()),
{0: {1: {}, 2: {}}, 1: {0: {}, 2: {}}, 2: {0: {}, 1: {}}})
def test_getitem(self):
G=self.K3
assert_equal(G[0],{1: {}, 2: {}})
assert_raises(KeyError, G.__getitem__, 'j')
assert_raises((TypeError,networkx.NetworkXError), G.__getitem__, ['A'])
def test_add_node(self):
G=self.Graph()
G.add_node(0)
assert_equal(G.adj,{0:{}})
# test add attributes
G.add_node(1,c='red')
G.add_node(2,{'c':'blue'})
G.add_node(3,{'c':'blue'},c='red')
assert_raises(networkx.NetworkXError, G.add_node, 4, [])
assert_raises(networkx.NetworkXError, G.add_node, 4, 4)
assert_equal(G.node[1]['c'],'red')
assert_equal(G.node[2]['c'],'blue')
assert_equal(G.node[3]['c'],'red')
# test updating attributes
G.add_node(1,c='blue')
G.add_node(2,{'c':'red'})
G.add_node(3,{'c':'red'},c='blue')
assert_equal(G.node[1]['c'],'blue')
assert_equal(G.node[2]['c'],'red')
assert_equal(G.node[3]['c'],'blue')
def test_add_nodes_from(self):
G=self.Graph()
G.add_nodes_from([0,1,2])
assert_equal(G.adj,{0:{},1:{},2:{}})
# test add attributes
G.add_nodes_from([0,1,2],c='red')
assert_equal(G.node[0]['c'],'red')
assert_equal(G.node[2]['c'],'red')
# test that attribute dicts are not the same
assert(G.node[0] is not G.node[1])
# test updating attributes
G.add_nodes_from([0,1,2],c='blue')
assert_equal(G.node[0]['c'],'blue')
assert_equal(G.node[2]['c'],'blue')
assert(G.node[0] is not G.node[1])
# test tuple input
H=self.Graph()
H.add_nodes_from(G.nodes(data=True))
assert_equal(H.node[0]['c'],'blue')
assert_equal(H.node[2]['c'],'blue')
assert(H.node[0] is not H.node[1])
# specific overrides general
H.add_nodes_from([0,(1,{'c':'green'}),(3,{'c':'cyan'})],c='red')
assert_equal(H.node[0]['c'],'red')
assert_equal(H.node[1]['c'],'green')
assert_equal(H.node[2]['c'],'blue')
assert_equal(H.node[3]['c'],'cyan')
def test_remove_node(self):
G=self.K3
G.remove_node(0)
assert_equal(G.adj,{1:{2:{}},2:{1:{}}})
assert_raises((KeyError,networkx.NetworkXError), G.remove_node,-1)
# generator here to implement list,set,string...
def test_remove_nodes_from(self):
G=self.K3
G.remove_nodes_from([0,1])
assert_equal(G.adj,{2:{}})
G.remove_nodes_from([-1]) # silent fail
def test_add_edge(self):
G=self.Graph()
G.add_edge(0,1)
assert_equal(G.adj,{0: {1: {}}, 1: {0: {}}})
G=self.Graph()
G.add_edge(*(0,1))
assert_equal(G.adj,{0: {1: {}}, 1: {0: {}}})
def test_add_edges_from(self):
G=self.Graph()
G.add_edges_from([(0,1),(0,2,{'weight':3})])
assert_equal(G.adj,{0: {1:{}, 2:{'weight':3}}, 1: {0:{}}, \
2:{0:{'weight':3}}})
G=self.Graph()
G.add_edges_from([(0,1),(0,2,{'weight':3}),(1,2,{'data':4})],data=2)
assert_equal(G.adj,{\
0: {1:{'data':2}, 2:{'weight':3,'data':2}}, \
1: {0:{'data':2}, 2:{'data':4}}, \
2: {0:{'weight':3,'data':2}, 1:{'data':4}} \
})
assert_raises(networkx.NetworkXError,
G.add_edges_from,[(0,)]) # too few in tuple
assert_raises(networkx.NetworkXError,
G.add_edges_from,[(0,1,2,3)]) # too many in tuple
assert_raises(TypeError, G.add_edges_from,[0]) # not a tuple
def test_remove_edge(self):
G=self.K3
G.remove_edge(0,1)
assert_equal(G.adj,{0:{2:{}},1:{2:{}},2:{0:{},1:{}}})
assert_raises((KeyError,networkx.NetworkXError), G.remove_edge,-1,0)
def test_remove_edges_from(self):
G=self.K3
G.remove_edges_from([(0,1)])
assert_equal(G.adj,{0:{2:{}},1:{2:{}},2:{0:{},1:{}}})
G.remove_edges_from([(0,0)]) # silent fail
def test_clear(self):
G=self.K3
G.clear()
assert_equal(G.adj,{})
def test_edges_data(self):
G=self.K3
assert_equal(sorted(G.edges(data=True)),[(0,1,{}),(0,2,{}),(1,2,{})])
assert_equal(sorted(G.edges(0,data=True)),[(0,1,{}),(0,2,{})])
assert_raises((KeyError,networkx.NetworkXError), G.edges,-1)
def test_get_edge_data(self):
G=self.K3
assert_equal(G.get_edge_data(0,1),{})
assert_equal(G[0][1],{})
assert_equal(G.get_edge_data(10,20),None)
assert_equal(G.get_edge_data(-1,0),None)
assert_equal(G.get_edge_data(-1,0,default=1),1)
|
zfil/ansible-modules-core | refs/heads/devel | files/unarchive.py | 49 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <[email protected]>
# (c) 2013, Dylan Martin <[email protected]>
# (c) 2015, Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: unarchive
version_added: 1.4
short_description: Unpacks an archive after (optionally) copying it from the local machine.
extends_documentation_fragment: files
description:
- The M(unarchive) module unpacks an archive. By default, it will copy the source file from the local system to the target before unpacking - set copy=no to unpack an archive which already exists on the target..
options:
src:
description:
- If copy=yes (default), local path to archive file to copy to the target server; can be absolute or relative. If copy=no, path on the target server to existing archive file to unpack.
- If copy=no and src contains ://, the remote machine will download the file from the url first. (version_added 2.0)
required: true
default: null
dest:
description:
- Remote absolute path where the archive should be unpacked
required: true
default: null
copy:
description:
- "If true, the file is copied from local 'master' to the target machine, otherwise, the plugin will look for src archive at the target machine."
required: false
choices: [ "yes", "no" ]
default: "yes"
creates:
description:
- a filename, when it already exists, this step will B(not) be run.
required: no
default: null
version_added: "1.6"
list_files:
description:
- If set to True, return the list of files that are contained in the tarball.
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "2.0"
author: "Dylan Martin (@pileofrogs)"
todo:
- detect changed/unchanged for .zip files
- handle common unarchive args, like preserve owner/timestamp etc...
notes:
- requires C(tar)/C(unzip) command on target host
- can handle I(gzip), I(bzip2) and I(xz) compressed as well as uncompressed tar files
- detects type of archive automatically
- uses tar's C(--diff arg) to calculate if changed or not. If this C(arg) is not
supported, it will always unpack the archive
- does not detect if a .zip file is different from destination - always unzips
- existing files/directories in the destination which are not in the archive
are not touched. This is the same behavior as a normal archive extraction
- existing files/directories in the destination which are not in the archive
are ignored for purposes of deciding if the archive should be unpacked or not
'''
EXAMPLES = '''
# Example from Ansible Playbooks
- unarchive: src=foo.tgz dest=/var/lib/foo
# Unarchive a file that is already on the remote machine
- unarchive: src=/tmp/foo.zip dest=/usr/local/bin copy=no
# Unarchive a file that needs to be downloaded (added in 2.0)
- unarchive: src=https://example.com/example.zip dest=/usr/local/bin copy=no
'''
import re
import os
from zipfile import ZipFile
# String from tar that shows the tar contents are different from the
# filesystem
DIFFERENCE_RE = re.compile(r': (.*) differs$')
# When downloading an archive, how much of the archive to download before
# saving to a tempfile (64k)
BUFSIZE = 65536
class UnarchiveError(Exception):
pass
# class to handle .zip files
class ZipArchive(object):
def __init__(self, src, dest, module):
self.src = src
self.dest = dest
self.module = module
self.cmd_path = self.module.get_bin_path('unzip')
self._files_in_archive = []
@property
def files_in_archive(self, force_refresh=False):
if self._files_in_archive and not force_refresh:
return self._files_in_archive
archive = ZipFile(self.src)
try:
self._files_in_archive = archive.namelist()
except:
raise UnarchiveError('Unable to list files in the archive')
return self._files_in_archive
def is_unarchived(self, mode, owner, group):
return dict(unarchived=False)
def unarchive(self):
cmd = '%s -o "%s" -d "%s"' % (self.cmd_path, self.src, self.dest)
rc, out, err = self.module.run_command(cmd)
return dict(cmd=cmd, rc=rc, out=out, err=err)
def can_handle_archive(self):
if not self.cmd_path:
return False
cmd = '%s -l "%s"' % (self.cmd_path, self.src)
rc, out, err = self.module.run_command(cmd)
if rc == 0:
return True
return False
# class to handle gzipped tar files
class TgzArchive(object):
def __init__(self, src, dest, module):
self.src = src
self.dest = dest
self.module = module
# Prefer gtar (GNU tar) as it supports the compression options -zjJ
self.cmd_path = self.module.get_bin_path('gtar', None)
if not self.cmd_path:
# Fallback to tar
self.cmd_path = self.module.get_bin_path('tar')
self.zipflag = 'z'
self._files_in_archive = []
@property
def files_in_archive(self, force_refresh=False):
if self._files_in_archive and not force_refresh:
return self._files_in_archive
cmd = '%s -t%sf "%s"' % (self.cmd_path, self.zipflag, self.src)
rc, out, err = self.module.run_command(cmd)
if rc != 0:
raise UnarchiveError('Unable to list files in the archive')
for filename in out.splitlines():
if filename:
self._files_in_archive.append(filename)
return self._files_in_archive
def is_unarchived(self, mode, owner, group):
cmd = '%s -C "%s" --diff -%sf "%s"' % (self.cmd_path, self.dest, self.zipflag, self.src)
rc, out, err = self.module.run_command(cmd)
unarchived = (rc == 0)
if not unarchived:
# Check whether the differences are in something that we're
# setting anyway
# What will be set
to_be_set = set()
for perm in (('Mode', mode), ('Gid', group), ('Uid', owner)):
if perm[1] is not None:
to_be_set.add(perm[0])
# What is different
changes = set()
if err:
# Assume changes if anything returned on stderr
# * Missing files are known to trigger this
return dict(unarchived=unarchived, rc=rc, out=out, err=err, cmd=cmd)
for line in out.splitlines():
match = DIFFERENCE_RE.search(line)
if not match:
# Unknown tar output. Assume we have changes
return dict(unarchived=unarchived, rc=rc, out=out, err=err, cmd=cmd)
changes.add(match.groups()[0])
if changes and changes.issubset(to_be_set):
unarchived = True
return dict(unarchived=unarchived, rc=rc, out=out, err=err, cmd=cmd)
def unarchive(self):
cmd = '%s -x%sf "%s"' % (self.cmd_path, self.zipflag, self.src)
rc, out, err = self.module.run_command(cmd, cwd=self.dest)
return dict(cmd=cmd, rc=rc, out=out, err=err)
def can_handle_archive(self):
if not self.cmd_path:
return False
try:
if self.files_in_archive:
return True
except UnarchiveError:
pass
# Errors and no files in archive assume that we weren't able to
# properly unarchive it
return False
# class to handle tar files that aren't compressed
class TarArchive(TgzArchive):
def __init__(self, src, dest, module):
super(TarArchive, self).__init__(src, dest, module)
self.zipflag = ''
# class to handle bzip2 compressed tar files
class TarBzipArchive(TgzArchive):
def __init__(self, src, dest, module):
super(TarBzipArchive, self).__init__(src, dest, module)
self.zipflag = 'j'
# class to handle xz compressed tar files
class TarXzArchive(TgzArchive):
def __init__(self, src, dest, module):
super(TarXzArchive, self).__init__(src, dest, module)
self.zipflag = 'J'
# try handlers in order and return the one that works or bail if none work
def pick_handler(src, dest, module):
handlers = [TgzArchive, ZipArchive, TarArchive, TarBzipArchive, TarXzArchive]
for handler in handlers:
obj = handler(src, dest, module)
if obj.can_handle_archive():
return obj
module.fail_json(msg='Failed to find handler to unarchive. Make sure the required command to extract the file is installed.')
def main():
module = AnsibleModule(
# not checking because of daisy chain to file module
argument_spec = dict(
src = dict(required=True),
original_basename = dict(required=False), # used to handle 'dest is a directory' via template, a slight hack
dest = dict(required=True),
copy = dict(default=True, type='bool'),
creates = dict(required=False),
list_files = dict(required=False, default=False, type='bool'),
),
add_file_common_args=True,
)
src = os.path.expanduser(module.params['src'])
dest = os.path.expanduser(module.params['dest'])
copy = module.params['copy']
file_args = module.load_file_common_arguments(module.params)
# did tar file arrive?
if not os.path.exists(src):
if copy:
module.fail_json(msg="Source '%s' failed to transfer" % src)
# If copy=false, and src= contains ://, try and download the file to a temp directory.
elif '://' in src:
tempdir = os.path.dirname(__file__)
package = os.path.join(tempdir, str(src.rsplit('/', 1)[1]))
try:
rsp, info = fetch_url(module, src)
f = open(package, 'w')
# Read 1kb at a time to save on ram
while True:
data = rsp.read(BUFSIZE)
if data == "":
break # End of file, break while loop
f.write(data)
f.close()
src = package
except Exception, e:
module.fail_json(msg="Failure downloading %s, %s" % (src, e))
else:
module.fail_json(msg="Source '%s' does not exist" % src)
if not os.access(src, os.R_OK):
module.fail_json(msg="Source '%s' not readable" % src)
# skip working with 0 size archives
try:
if os.path.getsize(src) == 0:
module.fail_json(msg="Invalid archive '%s', the file is 0 bytes" % src)
except Exception, e:
module.fail_json(msg="Source '%s' not readable" % src)
# is dest OK to receive tar file?
if not os.path.isdir(dest):
module.fail_json(msg="Destination '%s' is not a directory" % dest)
handler = pick_handler(src, dest, module)
res_args = dict(handler=handler.__class__.__name__, dest=dest, src=src)
# do we need to do unpack?
res_args['check_results'] = handler.is_unarchived(file_args['mode'],
file_args['owner'], file_args['group'])
if res_args['check_results']['unarchived']:
res_args['changed'] = False
else:
# do the unpack
try:
res_args['extract_results'] = handler.unarchive()
if res_args['extract_results']['rc'] != 0:
module.fail_json(msg="failed to unpack %s to %s" % (src, dest), **res_args)
except IOError:
module.fail_json(msg="failed to unpack %s to %s" % (src, dest))
else:
res_args['changed'] = True
# do we need to change perms?
for filename in handler.files_in_archive:
file_args['path'] = os.path.join(dest, filename)
try:
res_args['changed'] = module.set_fs_attributes_if_different(file_args, res_args['changed'])
except (IOError, OSError), e:
module.fail_json(msg="Unexpected error when accessing exploded file: %s" % str(e))
if module.params['list_files']:
res_args['files'] = handler.files_in_archive
module.exit_json(**res_args)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
|
dcowden/cadquery-freecad-module | refs/heads/master | CadQuery/Libs/pyqode/core/modes/backspace.py | 7 | """
This module contains the smart backspace mode
"""
from pyqode.qt import QtCore, QtGui
from pyqode.core.api import Mode
class SmartBackSpaceMode(Mode):
""" Improves backspace behaviour.
When you press backspace and there are spaces on the left of the cursor,
those spaces will be deleted (at most tab_len spaces).
Basically this turns backspace into Shitf+Tab
"""
def on_state_changed(self, state):
if state:
self.editor.key_pressed.connect(self._on_key_pressed)
else:
self.editor.key_pressed.disconnect(self._on_key_pressed)
def _on_key_pressed(self, event):
no_modifiers = int(event.modifiers()) == QtCore.Qt.NoModifier
if event.key() == QtCore.Qt.Key_Backspace and no_modifiers:
if self.editor.textCursor().atBlockStart():
return
tab_len = self.editor.tab_length
tab_len = self.editor.textCursor().positionInBlock() % tab_len
if tab_len == 0:
tab_len = self.editor.tab_length
# count the number of spaces deletable, stop at tab len
spaces = 0
cursor = QtGui.QTextCursor(self.editor.textCursor())
while spaces < tab_len or cursor.atBlockStart():
pos = cursor.position()
cursor.movePosition(cursor.Left, cursor.KeepAnchor)
char = cursor.selectedText()
if char == " ":
spaces += 1
else:
break
cursor.setPosition(pos - 1)
cursor = self.editor.textCursor()
if spaces == 0:
return
cursor.beginEditBlock()
for _ in range(spaces):
cursor.deletePreviousChar()
cursor.endEditBlock()
self.editor.setTextCursor(cursor)
event.accept()
|
ClementLancien/convertToEntrezGeneID | refs/heads/master | script/conversion/trembl.py | 1 | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 24 17:51:26 2017
@author: clancien
"""
try:
import ConfigParser
except ImportError:
import configparser as ConfigParser
import os
import logging
from logging.handlers import RotatingFileHandler
import sys
class TREMBL():
def __init__(self):
config = ConfigParser.ConfigParser()
config.readfp(open('../../configuration.ini','r'))
self.logFile = config.get('Error', 'logFile')
self.trembl = config.get('Download', 'gene2trembl')
self.protein = config.get('Convert', 'trEMBL')
self.logger=None
self.formatter=None
self.file_handler=None
#GeneID UniGene_cluster
self.path_exist()
self.init_log()
def path_exist(self):
""" Check if dir exist if not we create the path
string = dir/subdir/subsubdir
string.rsplit('/',1)[0]
==> return dir/subdir/ """
if not os.path.isdir(self.protein.rsplit('/',1)[0]):
os.makedirs(self.protein.rsplit('/', 1)[0])
def init_log(self):
# création de l'objet logger qui va nous servir à écrire dans les logs
self.logger = logging.getLogger()
# on met le niveau du logger à DEBUG, comme ça il écrit tout
self.logger.setLevel(logging.DEBUG)
# création d'un formateur qui va ajouter le temps, le niveau
# de chaque message quand on écrira un message dans le log
self.formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s')
# création d'un handler qui va rediriger une écriture du log vers
# un fichier en mode 'append', avec 1 backup et une taille max de 1Mo
self.file_handler = RotatingFileHandler(self.logFile, 'a', 1000000, 1)
# on lui met le niveau sur DEBUG, on lui dit qu'il doit utiliser le formateur
# créé précédement et on ajoute ce handler au logger
self.file_handler.setLevel(logging.DEBUG)
self.file_handler.setFormatter(self.formatter)
self.logger.addHandler(self.file_handler)
def get_trEMBL(self):
try:
with open(self.trembl,'r') as inputFile,\
open(self.protein, 'w') as outputFile:
ID=""
AC=[]
for line in inputFile:
if("AC" in line[:2]):
newLine=line[5:].split("; ")
for ac in newLine:
AC.append(ac)
if("GeneID" in line and "DR" in line):
newLine=line[5:].split("; ")
ID=newLine[1]
if("//" in line):
if (ID != ""):
for ac in AC:
outputFile.write(str(ID) + "\t" + str(ac.split(";")[0]) + "\n")
ID=""
AC=[]
except:
self.logger.warning("Error - trembl.py - gettrEMBL ")
self.logger.warning("Exception at the line : {}".format(sys.exc_info()[-1].tb_lineno))
self.logger.warning(sys.exc_info())
if __name__ == '__main__':
TREMBL().get_trEMBL()
|
wwbcoop/wwb.cc | refs/heads/master | apps/models/migrations/0004_remove_project_published_date.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-10-27 15:37
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('models', '0003_auto_20171027_1526'),
]
operations = [
migrations.RemoveField(
model_name='project',
name='published_date',
),
]
|
dancingdan/tensorflow | refs/heads/master | tensorflow/contrib/distributions/python/kernel_tests/bijectors/weibull_test.py | 25 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from scipy import stats
from tensorflow.contrib.distributions.python.ops.bijectors.weibull import Weibull
from tensorflow.python.ops.distributions.bijector_test_util import assert_bijective_and_finite
from tensorflow.python.ops.distributions.bijector_test_util import assert_scalar_congruency
from tensorflow.python.platform import test
class WeibullBijectorTest(test.TestCase):
"""Tests correctness of the weibull bijector."""
def testBijector(self):
with self.cached_session():
scale = 5.
concentration = 0.3
bijector = Weibull(
scale=scale, concentration=concentration,
validate_args=True)
self.assertEqual("weibull", bijector.name)
x = np.array([[[0.], [1.], [14.], [20.], [100.]]], dtype=np.float32)
# Weibull distribution
weibull_dist = stats.frechet_r(c=concentration, scale=scale)
y = weibull_dist.cdf(x).astype(np.float32)
self.assertAllClose(y, bijector.forward(x).eval())
self.assertAllClose(x, bijector.inverse(y).eval())
self.assertAllClose(
weibull_dist.logpdf(x),
bijector.forward_log_det_jacobian(x, event_ndims=0).eval())
self.assertAllClose(
-bijector.inverse_log_det_jacobian(y, event_ndims=0).eval(),
bijector.forward_log_det_jacobian(x, event_ndims=0).eval(),
rtol=1e-4,
atol=0.)
def testScalarCongruency(self):
with self.cached_session():
assert_scalar_congruency(
Weibull(scale=20., concentration=0.3),
lower_x=1., upper_x=100., rtol=0.02)
def testBijectiveAndFinite(self):
with self.cached_session():
bijector = Weibull(
scale=20., concentration=2., validate_args=True)
x = np.linspace(1., 8., num=10).astype(np.float32)
y = np.linspace(
-np.expm1(-1 / 400.),
-np.expm1(-16), num=10).astype(np.float32)
assert_bijective_and_finite(bijector, x, y, event_ndims=0, rtol=1e-3)
if __name__ == "__main__":
test.main()
|
nooone/libgdx | refs/heads/master | extensions/gdx-freetype/jni/freetype-2.4.10/src/tools/docmaker/docmaker.py | 463 | #!/usr/bin/env python
#
# DocMaker (c) 2002, 2004, 2008 David Turner <[email protected]>
#
# This program is a re-write of the original DocMaker took used
# to generate the API Reference of the FreeType font engine
# by converting in-source comments into structured HTML.
#
# This new version is capable of outputting XML data, as well
# as accepts more liberal formatting options.
#
# It also uses regular expression matching and substitution
# to speed things significantly.
#
from sources import *
from content import *
from utils import *
from formatter import *
from tohtml import *
import utils
import sys, os, time, string, glob, getopt
def usage():
print "\nDocMaker Usage information\n"
print " docmaker [options] file1 [file2 ...]\n"
print "using the following options:\n"
print " -h : print this page"
print " -t : set project title, as in '-t \"My Project\"'"
print " -o : set output directory, as in '-o mydir'"
print " -p : set documentation prefix, as in '-p ft2'"
print ""
print " --title : same as -t, as in '--title=\"My Project\"'"
print " --output : same as -o, as in '--output=mydir'"
print " --prefix : same as -p, as in '--prefix=ft2'"
def main( argv ):
"""main program loop"""
global output_dir
try:
opts, args = getopt.getopt( sys.argv[1:], \
"ht:o:p:", \
["help", "title=", "output=", "prefix="] )
except getopt.GetoptError:
usage()
sys.exit( 2 )
if args == []:
usage()
sys.exit( 1 )
# process options
#
project_title = "Project"
project_prefix = None
output_dir = None
for opt in opts:
if opt[0] in ( "-h", "--help" ):
usage()
sys.exit( 0 )
if opt[0] in ( "-t", "--title" ):
project_title = opt[1]
if opt[0] in ( "-o", "--output" ):
utils.output_dir = opt[1]
if opt[0] in ( "-p", "--prefix" ):
project_prefix = opt[1]
check_output()
# create context and processor
source_processor = SourceProcessor()
content_processor = ContentProcessor()
# retrieve the list of files to process
file_list = make_file_list( args )
for filename in file_list:
source_processor.parse_file( filename )
content_processor.parse_sources( source_processor )
# process sections
content_processor.finish()
formatter = HtmlFormatter( content_processor, project_title, project_prefix )
formatter.toc_dump()
formatter.index_dump()
formatter.section_dump_all()
# if called from the command line
#
if __name__ == '__main__':
main( sys.argv )
# eof
|
jkstrick/samba | refs/heads/master | source4/dsdb/tests/python/token_group.py | 26 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# test tokengroups attribute against internal token calculation
import optparse
import sys
import os
sys.path.insert(0, "bin/python")
import samba
from samba.tests.subunitrun import SubunitOptions, TestProgram
import samba.getopt as options
from samba.auth import system_session
from samba import ldb, dsdb
from samba.samdb import SamDB
from samba.auth import AuthContext
from samba.ndr import ndr_unpack
from samba import gensec
from samba.credentials import Credentials, DONT_USE_KERBEROS
from samba.dsdb import GTYPE_SECURITY_GLOBAL_GROUP, GTYPE_SECURITY_UNIVERSAL_GROUP
import samba.tests
from samba.tests import delete_force
from samba.auth import AUTH_SESSION_INFO_DEFAULT_GROUPS, AUTH_SESSION_INFO_AUTHENTICATED, AUTH_SESSION_INFO_SIMPLE_PRIVILEGES
parser = optparse.OptionParser("ldap.py [options] <host>")
sambaopts = options.SambaOptions(parser)
parser.add_option_group(sambaopts)
parser.add_option_group(options.VersionOptions(parser))
# use command line creds if available
credopts = options.CredentialsOptions(parser)
parser.add_option_group(credopts)
subunitopts = SubunitOptions(parser)
parser.add_option_group(subunitopts)
opts, args = parser.parse_args()
if len(args) < 1:
parser.print_usage()
sys.exit(1)
url = args[0]
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp)
creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL)
def closure(vSet, wSet, aSet):
for edge in aSet:
start, end = edge
if start in wSet:
if end not in wSet and end in vSet:
wSet.add(end)
closure(vSet, wSet, aSet)
class StaticTokenTest(samba.tests.TestCase):
def setUp(self):
super(StaticTokenTest, self).setUp()
self.ldb = SamDB(url, credentials=creds, session_info=system_session(lp), lp=lp)
self.base_dn = self.ldb.domain_dn()
res = self.ldb.search("", scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
self.user_sid_dn = "<SID=%s>" % str(ndr_unpack(samba.dcerpc.security.dom_sid, res[0]["tokenGroups"][0]))
session_info_flags = ( AUTH_SESSION_INFO_DEFAULT_GROUPS |
AUTH_SESSION_INFO_AUTHENTICATED |
AUTH_SESSION_INFO_SIMPLE_PRIVILEGES)
session = samba.auth.user_session(self.ldb, lp_ctx=lp, dn=self.user_sid_dn,
session_info_flags=session_info_flags)
token = session.security_token
self.user_sids = []
for s in token.sids:
self.user_sids.append(str(s))
def test_rootDSE_tokenGroups(self):
"""Testing rootDSE tokengroups against internal calculation"""
if not url.startswith("ldap"):
self.fail(msg="This test is only valid on ldap")
res = self.ldb.search("", scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
print("Getting tokenGroups from rootDSE")
tokengroups = []
for sid in res[0]['tokenGroups']:
tokengroups.append(str(ndr_unpack(samba.dcerpc.security.dom_sid, sid)))
sidset1 = set(tokengroups)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("tokengroups: %s" % tokengroups)
print("calculated : %s" % self.user_sids)
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against rootDSE tokenGroups")
def test_dn_tokenGroups(self):
print("Getting tokenGroups from user DN")
res = self.ldb.search(self.user_sid_dn, scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
dn_tokengroups = []
for sid in res[0]['tokenGroups']:
dn_tokengroups.append(str(ndr_unpack(samba.dcerpc.security.dom_sid, sid)))
sidset1 = set(dn_tokengroups)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against user DN tokenGroups")
def test_pac_groups(self):
settings = {}
settings["lp_ctx"] = lp
settings["target_hostname"] = lp.get("netbios name")
gensec_client = gensec.Security.start_client(settings)
gensec_client.set_credentials(creds)
gensec_client.want_feature(gensec.FEATURE_SEAL)
gensec_client.start_mech_by_sasl_name("GSSAPI")
auth_context = AuthContext(lp_ctx=lp, ldb=self.ldb, methods=[])
gensec_server = gensec.Security.start_server(settings, auth_context)
machine_creds = Credentials()
machine_creds.guess(lp)
machine_creds.set_machine_account(lp)
gensec_server.set_credentials(machine_creds)
gensec_server.want_feature(gensec.FEATURE_SEAL)
gensec_server.start_mech_by_sasl_name("GSSAPI")
client_finished = False
server_finished = False
server_to_client = ""
# Run the actual call loop.
while client_finished == False and server_finished == False:
if not client_finished:
print "running client gensec_update"
(client_finished, client_to_server) = gensec_client.update(server_to_client)
if not server_finished:
print "running server gensec_update"
(server_finished, server_to_client) = gensec_server.update(client_to_server)
session = gensec_server.session_info()
token = session.security_token
pac_sids = []
for s in token.sids:
pac_sids.append(str(s))
sidset1 = set(pac_sids)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against user PAC tokenGroups")
class DynamicTokenTest(samba.tests.TestCase):
def get_creds(self, target_username, target_password):
creds_tmp = Credentials()
creds_tmp.set_username(target_username)
creds_tmp.set_password(target_password)
creds_tmp.set_domain(creds.get_domain())
creds_tmp.set_realm(creds.get_realm())
creds_tmp.set_workstation(creds.get_workstation())
creds_tmp.set_gensec_features(creds_tmp.get_gensec_features()
| gensec.FEATURE_SEAL)
return creds_tmp
def get_ldb_connection(self, target_username, target_password):
creds_tmp = self.get_creds(target_username, target_password)
ldb_target = SamDB(url=url, credentials=creds_tmp, lp=lp)
return ldb_target
def setUp(self):
super(DynamicTokenTest, self).setUp()
self.admin_ldb = SamDB(url, credentials=creds, session_info=system_session(lp), lp=lp)
self.base_dn = self.admin_ldb.domain_dn()
self.test_user = "tokengroups_user1"
self.test_user_pass = "samba123@"
self.admin_ldb.newuser(self.test_user, self.test_user_pass)
self.test_group0 = "tokengroups_group0"
self.admin_ldb.newgroup(self.test_group0, grouptype=dsdb.GTYPE_SECURITY_DOMAIN_LOCAL_GROUP)
res = self.admin_ldb.search(base="cn=%s,cn=users,%s" % (self.test_group0, self.base_dn),
attrs=["objectSid"], scope=ldb.SCOPE_BASE)
self.test_group0_sid = ndr_unpack(samba.dcerpc.security.dom_sid, res[0]["objectSid"][0])
self.admin_ldb.add_remove_group_members(self.test_group0, [self.test_user],
add_members_operation=True)
self.test_group1 = "tokengroups_group1"
self.admin_ldb.newgroup(self.test_group1, grouptype=dsdb.GTYPE_SECURITY_GLOBAL_GROUP)
res = self.admin_ldb.search(base="cn=%s,cn=users,%s" % (self.test_group1, self.base_dn),
attrs=["objectSid"], scope=ldb.SCOPE_BASE)
self.test_group1_sid = ndr_unpack(samba.dcerpc.security.dom_sid, res[0]["objectSid"][0])
self.admin_ldb.add_remove_group_members(self.test_group1, [self.test_user],
add_members_operation=True)
self.test_group2 = "tokengroups_group2"
self.admin_ldb.newgroup(self.test_group2, grouptype=dsdb.GTYPE_SECURITY_UNIVERSAL_GROUP)
res = self.admin_ldb.search(base="cn=%s,cn=users,%s" % (self.test_group2, self.base_dn),
attrs=["objectSid"], scope=ldb.SCOPE_BASE)
self.test_group2_sid = ndr_unpack(samba.dcerpc.security.dom_sid, res[0]["objectSid"][0])
self.admin_ldb.add_remove_group_members(self.test_group2, [self.test_user],
add_members_operation=True)
self.ldb = self.get_ldb_connection(self.test_user, self.test_user_pass)
res = self.ldb.search("", scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
self.user_sid_dn = "<SID=%s>" % str(ndr_unpack(samba.dcerpc.security.dom_sid, res[0]["tokenGroups"][0]))
res = self.ldb.search(self.user_sid_dn, scope=ldb.SCOPE_BASE, attrs=[])
self.assertEquals(len(res), 1)
self.test_user_dn = res[0].dn
session_info_flags = ( AUTH_SESSION_INFO_DEFAULT_GROUPS |
AUTH_SESSION_INFO_AUTHENTICATED |
AUTH_SESSION_INFO_SIMPLE_PRIVILEGES)
session = samba.auth.user_session(self.ldb, lp_ctx=lp, dn=self.user_sid_dn,
session_info_flags=session_info_flags)
token = session.security_token
self.user_sids = []
for s in token.sids:
self.user_sids.append(str(s))
def tearDown(self):
super(DynamicTokenTest, self).tearDown()
delete_force(self.admin_ldb, "CN=%s,%s,%s" %
(self.test_user, "cn=users", self.base_dn))
delete_force(self.admin_ldb, "CN=%s,%s,%s" %
(self.test_group0, "cn=users", self.base_dn))
delete_force(self.admin_ldb, "CN=%s,%s,%s" %
(self.test_group1, "cn=users", self.base_dn))
delete_force(self.admin_ldb, "CN=%s,%s,%s" %
(self.test_group2, "cn=users", self.base_dn))
def test_rootDSE_tokenGroups(self):
"""Testing rootDSE tokengroups against internal calculation"""
if not url.startswith("ldap"):
self.fail(msg="This test is only valid on ldap")
res = self.ldb.search("", scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
print("Getting tokenGroups from rootDSE")
tokengroups = []
for sid in res[0]['tokenGroups']:
tokengroups.append(str(ndr_unpack(samba.dcerpc.security.dom_sid, sid)))
sidset1 = set(tokengroups)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("tokengroups: %s" % tokengroups)
print("calculated : %s" % self.user_sids)
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against rootDSE tokenGroups")
def test_dn_tokenGroups(self):
print("Getting tokenGroups from user DN")
res = self.ldb.search(self.user_sid_dn, scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
dn_tokengroups = []
for sid in res[0]['tokenGroups']:
dn_tokengroups.append(str(ndr_unpack(samba.dcerpc.security.dom_sid, sid)))
sidset1 = set(dn_tokengroups)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against user DN tokenGroups")
def test_pac_groups(self):
settings = {}
settings["lp_ctx"] = lp
settings["target_hostname"] = lp.get("netbios name")
gensec_client = gensec.Security.start_client(settings)
gensec_client.set_credentials(self.get_creds(self.test_user, self.test_user_pass))
gensec_client.want_feature(gensec.FEATURE_SEAL)
gensec_client.start_mech_by_sasl_name("GSSAPI")
auth_context = AuthContext(lp_ctx=lp, ldb=self.ldb, methods=[])
gensec_server = gensec.Security.start_server(settings, auth_context)
machine_creds = Credentials()
machine_creds.guess(lp)
machine_creds.set_machine_account(lp)
gensec_server.set_credentials(machine_creds)
gensec_server.want_feature(gensec.FEATURE_SEAL)
gensec_server.start_mech_by_sasl_name("GSSAPI")
client_finished = False
server_finished = False
server_to_client = ""
# Run the actual call loop.
while client_finished == False and server_finished == False:
if not client_finished:
print "running client gensec_update"
(client_finished, client_to_server) = gensec_client.update(server_to_client)
if not server_finished:
print "running server gensec_update"
(server_finished, server_to_client) = gensec_server.update(client_to_server)
session = gensec_server.session_info()
token = session.security_token
pac_sids = []
for s in token.sids:
pac_sids.append(str(s))
sidset1 = set(pac_sids)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against user PAC tokenGroups")
def test_tokenGroups_manual(self):
# Manually run the tokenGroups algorithm from MS-ADTS 3.1.1.4.5.19 and MS-DRSR 4.1.8.3
# and compare the result
res = self.admin_ldb.search(base=self.base_dn, scope=ldb.SCOPE_SUBTREE,
expression="(|(objectclass=user)(objectclass=group))",
attrs=["memberOf"])
aSet = set()
aSetR = set()
vSet = set()
for obj in res:
if "memberOf" in obj:
for dn in obj["memberOf"]:
first = obj.dn.get_casefold()
second = ldb.Dn(self.admin_ldb, dn).get_casefold()
aSet.add((first, second))
aSetR.add((second, first))
vSet.add(first)
vSet.add(second)
res = self.admin_ldb.search(base=self.base_dn, scope=ldb.SCOPE_SUBTREE,
expression="(objectclass=user)",
attrs=["primaryGroupID"])
for obj in res:
if "primaryGroupID" in obj:
sid = "%s-%d" % (self.admin_ldb.get_domain_sid(), int(obj["primaryGroupID"][0]))
res2 = self.admin_ldb.search(base="<SID=%s>" % sid, scope=ldb.SCOPE_BASE,
attrs=[])
first = obj.dn.get_casefold()
second = res2[0].dn.get_casefold()
aSet.add((first, second))
aSetR.add((second, first))
vSet.add(first)
vSet.add(second)
wSet = set()
wSet.add(self.test_user_dn.get_casefold())
closure(vSet, wSet, aSet)
wSet.remove(self.test_user_dn.get_casefold())
tokenGroupsSet = set()
res = self.ldb.search(self.user_sid_dn, scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
dn_tokengroups = []
for sid in res[0]['tokenGroups']:
sid = ndr_unpack(samba.dcerpc.security.dom_sid, sid)
res3 = self.admin_ldb.search(base="<SID=%s>" % sid, scope=ldb.SCOPE_BASE,
attrs=[])
tokenGroupsSet.add(res3[0].dn.get_casefold())
if len(wSet.difference(tokenGroupsSet)):
self.fail(msg="additional calculated: %s" % wSet.difference(tokenGroupsSet))
if len(tokenGroupsSet.difference(wSet)):
self.fail(msg="additional tokenGroups: %s" % tokenGroupsSet.difference(wSet))
def filtered_closure(self, wSet, filter_grouptype):
res = self.admin_ldb.search(base=self.base_dn, scope=ldb.SCOPE_SUBTREE,
expression="(|(objectclass=user)(objectclass=group))",
attrs=["memberOf"])
aSet = set()
aSetR = set()
vSet = set()
for obj in res:
vSet.add(obj.dn.get_casefold())
if "memberOf" in obj:
for dn in obj["memberOf"]:
first = obj.dn.get_casefold()
second = ldb.Dn(self.admin_ldb, dn).get_casefold()
aSet.add((first, second))
aSetR.add((second, first))
vSet.add(first)
vSet.add(second)
res = self.admin_ldb.search(base=self.base_dn, scope=ldb.SCOPE_SUBTREE,
expression="(objectclass=user)",
attrs=["primaryGroupID"])
for obj in res:
if "primaryGroupID" in obj:
sid = "%s-%d" % (self.admin_ldb.get_domain_sid(), int(obj["primaryGroupID"][0]))
res2 = self.admin_ldb.search(base="<SID=%s>" % sid, scope=ldb.SCOPE_BASE,
attrs=[])
first = obj.dn.get_casefold()
second = res2[0].dn.get_casefold()
aSet.add((first, second))
aSetR.add((second, first))
vSet.add(first)
vSet.add(second)
uSet = set()
for v in vSet:
res_group = self.admin_ldb.search(base=v, scope=ldb.SCOPE_BASE,
attrs=["groupType"],
expression="objectClass=group")
if len(res_group) == 1:
if hex(int(res_group[0]["groupType"][0]) & 0x00000000FFFFFFFF) == hex(filter_grouptype):
uSet.add(v)
else:
uSet.add(v)
closure(uSet, wSet, aSet)
def test_tokenGroupsGlobalAndUniversal_manual(self):
# Manually run the tokenGroups algorithm from MS-ADTS 3.1.1.4.5.19 and MS-DRSR 4.1.8.3
# and compare the result
# The variable names come from MS-ADTS May 15, 2014
S = set()
S.add(self.test_user_dn.get_casefold())
self.filtered_closure(S, GTYPE_SECURITY_GLOBAL_GROUP)
T = set()
# Not really a SID, we do this on DNs...
for sid in S:
X = set()
X.add(sid)
self.filtered_closure(X, GTYPE_SECURITY_UNIVERSAL_GROUP)
T = T.union(X)
T.remove(self.test_user_dn.get_casefold())
tokenGroupsSet = set()
res = self.ldb.search(self.user_sid_dn, scope=ldb.SCOPE_BASE, attrs=["tokenGroupsGlobalAndUniversal"])
self.assertEquals(len(res), 1)
dn_tokengroups = []
for sid in res[0]['tokenGroupsGlobalAndUniversal']:
sid = ndr_unpack(samba.dcerpc.security.dom_sid, sid)
res3 = self.admin_ldb.search(base="<SID=%s>" % sid, scope=ldb.SCOPE_BASE,
attrs=[])
tokenGroupsSet.add(res3[0].dn.get_casefold())
if len(T.difference(tokenGroupsSet)):
self.fail(msg="additional calculated: %s" % T.difference(tokenGroupsSet))
if len(tokenGroupsSet.difference(T)):
self.fail(msg="additional tokenGroupsGlobalAndUniversal: %s" % tokenGroupsSet.difference(T))
if not "://" in url:
if os.path.isfile(url):
url = "tdb://%s" % url
else:
url = "ldap://%s" % url
TestProgram(module=__name__, opts=subunitopts)
|
abhattad4/Digi-Menu | refs/heads/master | tests/mail/tests.py | 11 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import asyncore
import os
import shutil
import smtpd
import sys
import tempfile
import threading
from email.mime.text import MIMEText
from smtplib import SMTP, SMTPException
from ssl import SSLError
from django.core import mail
from django.core.mail import (
EmailMessage, EmailMultiAlternatives, mail_admins, mail_managers,
send_mail, send_mass_mail,
)
from django.core.mail.backends import console, dummy, filebased, locmem, smtp
from django.core.mail.message import BadHeaderError
from django.test import SimpleTestCase, override_settings
from django.utils.encoding import force_bytes, force_text
from django.utils.six import PY3, StringIO, binary_type
from django.utils.translation import ugettext_lazy
if PY3:
from email.utils import parseaddr
from email import message_from_bytes, message_from_binary_file
else:
from email.Utils import parseaddr
from email import (message_from_string as message_from_bytes,
message_from_file as message_from_binary_file)
class HeadersCheckMixin(object):
def assertMessageHasHeaders(self, message, headers):
"""
Check that :param message: has all :param headers: headers.
:param message: can be an instance of an email.Message subclass or a
string with the contents of an email message.
:param headers: should be a set of (header-name, header-value) tuples.
"""
if isinstance(message, binary_type):
message = message_from_bytes(message)
msg_headers = set(message.items())
self.assertTrue(headers.issubset(msg_headers), msg='Message is missing '
'the following headers: %s' % (headers - msg_headers),)
class MailTests(HeadersCheckMixin, SimpleTestCase):
"""
Non-backend specific tests.
"""
def test_ascii(self):
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'])
message = email.message()
self.assertEqual(message['Subject'], 'Subject')
self.assertEqual(message.get_payload(), 'Content')
self.assertEqual(message['From'], '[email protected]')
self.assertEqual(message['To'], '[email protected]')
def test_multiple_recipients(self):
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]', '[email protected]'])
message = email.message()
self.assertEqual(message['Subject'], 'Subject')
self.assertEqual(message.get_payload(), 'Content')
self.assertEqual(message['From'], '[email protected]')
self.assertEqual(message['To'], '[email protected], [email protected]')
def test_cc(self):
"""Regression test for #7722"""
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], cc=['[email protected]'])
message = email.message()
self.assertEqual(message['Cc'], '[email protected]')
self.assertEqual(email.recipients(), ['[email protected]', '[email protected]'])
# Test multiple CC with multiple To
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]', '[email protected]'], cc=['[email protected]', '[email protected]'])
message = email.message()
self.assertEqual(message['Cc'], '[email protected], [email protected]')
self.assertEqual(email.recipients(), ['[email protected]', '[email protected]', '[email protected]', '[email protected]'])
# Testing with Bcc
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]', '[email protected]'], cc=['[email protected]', '[email protected]'], bcc=['[email protected]'])
message = email.message()
self.assertEqual(message['Cc'], '[email protected], [email protected]')
self.assertEqual(email.recipients(), ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]'])
def test_reply_to(self):
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
reply_to=['[email protected]'],
)
message = email.message()
self.assertEqual(message['Reply-To'], '[email protected]')
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
reply_to=['[email protected]', '[email protected]']
)
message = email.message()
self.assertEqual(message['Reply-To'], '[email protected], [email protected]')
def test_recipients_as_tuple(self):
email = EmailMessage('Subject', 'Content', '[email protected]', ('[email protected]', '[email protected]'), cc=('[email protected]', '[email protected]'), bcc=('[email protected]',))
message = email.message()
self.assertEqual(message['Cc'], '[email protected], [email protected]')
self.assertEqual(email.recipients(), ['[email protected]', '[email protected]', '[email protected]', '[email protected]', '[email protected]'])
def test_recipients_as_string(self):
with self.assertRaisesMessage(TypeError, '"to" argument must be a list or tuple'):
EmailMessage(to='[email protected]')
with self.assertRaisesMessage(TypeError, '"cc" argument must be a list or tuple'):
EmailMessage(cc='[email protected]')
with self.assertRaisesMessage(TypeError, '"bcc" argument must be a list or tuple'):
EmailMessage(bcc='[email protected]')
with self.assertRaisesMessage(TypeError, '"reply_to" argument must be a list or tuple'):
EmailMessage(reply_to='[email protected]')
def test_header_injection(self):
email = EmailMessage('Subject\nInjection Test', 'Content', '[email protected]', ['[email protected]'])
self.assertRaises(BadHeaderError, email.message)
email = EmailMessage(ugettext_lazy('Subject\nInjection Test'), 'Content', '[email protected]', ['[email protected]'])
self.assertRaises(BadHeaderError, email.message)
def test_space_continuation(self):
"""
Test for space continuation character in long (ASCII) subject headers (#7747)
"""
email = EmailMessage('Long subject lines that get wrapped should contain a space continuation character to get expected behavior in Outlook and Thunderbird', 'Content', '[email protected]', ['[email protected]'])
message = email.message()
# Note that in Python 3, maximum line length has increased from 76 to 78
self.assertEqual(message['Subject'].encode(), b'Long subject lines that get wrapped should contain a space continuation\n character to get expected behavior in Outlook and Thunderbird')
def test_message_header_overrides(self):
"""
Specifying dates or message-ids in the extra headers overrides the
default values (#9233)
"""
headers = {"date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
email = EmailMessage('subject', 'content', '[email protected]', ['[email protected]'], headers=headers)
self.assertMessageHasHeaders(email.message(), {
('Content-Transfer-Encoding', '7bit'),
('Content-Type', 'text/plain; charset="utf-8"'),
('From', '[email protected]'),
('MIME-Version', '1.0'),
('Message-ID', 'foo'),
('Subject', 'subject'),
('To', '[email protected]'),
('date', 'Fri, 09 Nov 2001 01:08:47 -0000'),
})
def test_from_header(self):
"""
Make sure we can manually set the From header (#9214)
"""
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
message = email.message()
self.assertEqual(message['From'], '[email protected]')
def test_to_header(self):
"""
Make sure we can manually set the To header (#17444)
"""
email = EmailMessage('Subject', 'Content', '[email protected]',
['[email protected]', '[email protected]'],
headers={'To': '[email protected]'})
message = email.message()
self.assertEqual(message['To'], '[email protected]')
self.assertEqual(email.to, ['[email protected]', '[email protected]'])
# If we don't set the To header manually, it should default to the `to` argument to the constructor
email = EmailMessage('Subject', 'Content', '[email protected]',
['[email protected]', '[email protected]'])
message = email.message()
self.assertEqual(message['To'], '[email protected], [email protected]')
self.assertEqual(email.to, ['[email protected]', '[email protected]'])
def test_reply_to_header(self):
"""
Specifying 'Reply-To' in headers should override reply_to.
"""
email = EmailMessage(
'Subject', 'Content', '[email protected]', ['[email protected]'],
reply_to=['[email protected]'], headers={'Reply-To': '[email protected]'},
)
message = email.message()
self.assertEqual(message['Reply-To'], '[email protected]')
def test_multiple_message_call(self):
"""
Regression for #13259 - Make sure that headers are not changed when
calling EmailMessage.message()
"""
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
message = email.message()
self.assertEqual(message['From'], '[email protected]')
message = email.message()
self.assertEqual(message['From'], '[email protected]')
def test_unicode_address_header(self):
"""
Regression for #11144 - When a to/from/cc header contains unicode,
make sure the email addresses are parsed correctly (especially with
regards to commas)
"""
email = EmailMessage('Subject', 'Content', '[email protected]', ['"Firstname Sürname" <[email protected]>', '[email protected]'])
self.assertEqual(email.message()['To'], '=?utf-8?q?Firstname_S=C3=BCrname?= <[email protected]>, [email protected]')
email = EmailMessage('Subject', 'Content', '[email protected]', ['"Sürname, Firstname" <[email protected]>', '[email protected]'])
self.assertEqual(email.message()['To'], '=?utf-8?q?S=C3=BCrname=2C_Firstname?= <[email protected]>, [email protected]')
def test_unicode_headers(self):
email = EmailMessage("Gżegżółka", "Content", "[email protected]", ["[email protected]"],
headers={"Sender": '"Firstname Sürname" <[email protected]>',
"Comments": 'My Sürname is non-ASCII'})
message = email.message()
self.assertEqual(message['Subject'], '=?utf-8?b?R8W8ZWfFvMOzxYJrYQ==?=')
self.assertEqual(message['Sender'], '=?utf-8?q?Firstname_S=C3=BCrname?= <[email protected]>')
self.assertEqual(message['Comments'], '=?utf-8?q?My_S=C3=BCrname_is_non-ASCII?=')
def test_safe_mime_multipart(self):
"""
Make sure headers can be set with a different encoding than utf-8 in
SafeMIMEMultipart as well
"""
headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
from_email, to = '[email protected]', '"Sürname, Firstname" <[email protected]>'
text_content = 'This is an important message.'
html_content = '<p>This is an <strong>important</strong> message.</p>'
msg = EmailMultiAlternatives('Message from Firstname Sürname', text_content, from_email, [to], headers=headers)
msg.attach_alternative(html_content, "text/html")
msg.encoding = 'iso-8859-1'
self.assertEqual(msg.message()['To'], '=?iso-8859-1?q?S=FCrname=2C_Firstname?= <[email protected]>')
self.assertEqual(msg.message()['Subject'], '=?iso-8859-1?q?Message_from_Firstname_S=FCrname?=')
def test_encoding(self):
"""
Regression for #12791 - Encode body correctly with other encodings
than utf-8
"""
email = EmailMessage('Subject', 'Firstname Sürname is a great guy.', '[email protected]', ['[email protected]'])
email.encoding = 'iso-8859-1'
message = email.message()
self.assertMessageHasHeaders(message, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="iso-8859-1"'),
('Content-Transfer-Encoding', 'quoted-printable'),
('Subject', 'Subject'),
('From', '[email protected]'),
('To', '[email protected]')})
self.assertEqual(message.get_payload(), 'Firstname S=FCrname is a great guy.')
# Make sure MIME attachments also works correctly with other encodings than utf-8
text_content = 'Firstname Sürname is a great guy.'
html_content = '<p>Firstname Sürname is a <strong>great</strong> guy.</p>'
msg = EmailMultiAlternatives('Subject', text_content, '[email protected]', ['[email protected]'])
msg.encoding = 'iso-8859-1'
msg.attach_alternative(html_content, "text/html")
payload0 = msg.message().get_payload(0)
self.assertMessageHasHeaders(payload0, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="iso-8859-1"'),
('Content-Transfer-Encoding', 'quoted-printable')})
self.assertTrue(payload0.as_bytes().endswith(b'\n\nFirstname S=FCrname is a great guy.'))
payload1 = msg.message().get_payload(1)
self.assertMessageHasHeaders(payload1, {
('MIME-Version', '1.0'),
('Content-Type', 'text/html; charset="iso-8859-1"'),
('Content-Transfer-Encoding', 'quoted-printable')})
self.assertTrue(payload1.as_bytes().endswith(b'\n\n<p>Firstname S=FCrname is a <strong>great</strong> guy.</p>'))
def test_attachments(self):
"""Regression test for #9367"""
headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
subject, from_email, to = 'hello', '[email protected]', '[email protected]'
text_content = 'This is an important message.'
html_content = '<p>This is an <strong>important</strong> message.</p>'
msg = EmailMultiAlternatives(subject, text_content, from_email, [to], headers=headers)
msg.attach_alternative(html_content, "text/html")
msg.attach("an attachment.pdf", b"%PDF-1.4.%...", mimetype="application/pdf")
msg_bytes = msg.message().as_bytes()
message = message_from_bytes(msg_bytes)
self.assertTrue(message.is_multipart())
self.assertEqual(message.get_content_type(), 'multipart/mixed')
self.assertEqual(message.get_default_type(), 'text/plain')
payload = message.get_payload()
self.assertEqual(payload[0].get_content_type(), 'multipart/alternative')
self.assertEqual(payload[1].get_content_type(), 'application/pdf')
def test_non_ascii_attachment_filename(self):
"""Regression test for #14964"""
headers = {"Date": "Fri, 09 Nov 2001 01:08:47 -0000", "Message-ID": "foo"}
subject, from_email, to = 'hello', '[email protected]', '[email protected]'
content = 'This is the message.'
msg = EmailMessage(subject, content, from_email, [to], headers=headers)
# Unicode in file name
msg.attach("une pièce jointe.pdf", b"%PDF-1.4.%...", mimetype="application/pdf")
msg_bytes = msg.message().as_bytes()
message = message_from_bytes(msg_bytes)
payload = message.get_payload()
self.assertEqual(payload[1].get_filename(), 'une pièce jointe.pdf')
def test_dummy_backend(self):
"""
Make sure that dummy backends returns correct number of sent messages
"""
connection = dummy.EmailBackend()
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
self.assertEqual(connection.send_messages([email, email, email]), 3)
def test_arbitrary_keyword(self):
"""
Make sure that get_connection() accepts arbitrary keyword that might be
used with custom backends.
"""
c = mail.get_connection(fail_silently=True, foo='bar')
self.assertTrue(c.fail_silently)
def test_custom_backend(self):
"""Test custom backend defined in this suite."""
conn = mail.get_connection('mail.custombackend.EmailBackend')
self.assertTrue(hasattr(conn, 'test_outbox'))
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
conn.send_messages([email])
self.assertEqual(len(conn.test_outbox), 1)
def test_backend_arg(self):
"""Test backend argument of mail.get_connection()"""
self.assertIsInstance(mail.get_connection('django.core.mail.backends.smtp.EmailBackend'), smtp.EmailBackend)
self.assertIsInstance(mail.get_connection('django.core.mail.backends.locmem.EmailBackend'), locmem.EmailBackend)
self.assertIsInstance(mail.get_connection('django.core.mail.backends.dummy.EmailBackend'), dummy.EmailBackend)
self.assertIsInstance(mail.get_connection('django.core.mail.backends.console.EmailBackend'), console.EmailBackend)
tmp_dir = tempfile.mkdtemp()
try:
self.assertIsInstance(mail.get_connection('django.core.mail.backends.filebased.EmailBackend', file_path=tmp_dir), filebased.EmailBackend)
finally:
shutil.rmtree(tmp_dir)
self.assertIsInstance(mail.get_connection(), locmem.EmailBackend)
@override_settings(
EMAIL_BACKEND='django.core.mail.backends.locmem.EmailBackend',
ADMINS=[('nobody', '[email protected]')],
MANAGERS=[('nobody', '[email protected]')])
def test_connection_arg(self):
"""Test connection argument to send_mail(), et. al."""
mail.outbox = []
# Send using non-default connection
connection = mail.get_connection('mail.custombackend.EmailBackend')
send_mail('Subject', 'Content', '[email protected]', ['[email protected]'], connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 1)
self.assertEqual(connection.test_outbox[0].subject, 'Subject')
connection = mail.get_connection('mail.custombackend.EmailBackend')
send_mass_mail([
('Subject1', 'Content1', '[email protected]', ['[email protected]']),
('Subject2', 'Content2', '[email protected]', ['[email protected]']),
], connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 2)
self.assertEqual(connection.test_outbox[0].subject, 'Subject1')
self.assertEqual(connection.test_outbox[1].subject, 'Subject2')
connection = mail.get_connection('mail.custombackend.EmailBackend')
mail_admins('Admin message', 'Content', connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 1)
self.assertEqual(connection.test_outbox[0].subject, '[Django] Admin message')
connection = mail.get_connection('mail.custombackend.EmailBackend')
mail_managers('Manager message', 'Content', connection=connection)
self.assertEqual(mail.outbox, [])
self.assertEqual(len(connection.test_outbox), 1)
self.assertEqual(connection.test_outbox[0].subject, '[Django] Manager message')
def test_dont_mangle_from_in_body(self):
# Regression for #13433 - Make sure that EmailMessage doesn't mangle
# 'From ' in message body.
email = EmailMessage('Subject', 'From the future', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
self.assertNotIn(b'>From the future', email.message().as_bytes())
def test_dont_base64_encode(self):
# Ticket #3472
# Shouldn't use Base64 encoding at all
msg = EmailMessage('Subject', 'UTF-8 encoded body', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
self.assertNotIn(b'Content-Transfer-Encoding: base64', msg.message().as_bytes())
# Ticket #11212
# Shouldn't use quoted printable, should detect it can represent content with 7 bit data
msg = EmailMessage('Subject', 'Body with only ASCII characters.', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
s = msg.message().as_bytes()
self.assertNotIn(b'Content-Transfer-Encoding: quoted-printable', s)
self.assertIn(b'Content-Transfer-Encoding: 7bit', s)
# Shouldn't use quoted printable, should detect it can represent content with 8 bit data
msg = EmailMessage('Subject', 'Body with latin characters: àáä.', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
s = msg.message().as_bytes()
self.assertNotIn(b'Content-Transfer-Encoding: quoted-printable', s)
self.assertIn(b'Content-Transfer-Encoding: 8bit', s)
msg = EmailMessage('Subject', 'Body with non latin characters: А Б В Г Д Е Ж Ѕ З И І К Л М Н О П.', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
s = msg.message().as_bytes()
self.assertNotIn(b'Content-Transfer-Encoding: quoted-printable', s)
self.assertIn(b'Content-Transfer-Encoding: 8bit', s)
def test_dont_base64_encode_message_rfc822(self):
# Ticket #18967
# Shouldn't use base64 encoding for a child EmailMessage attachment.
# Create a child message first
child_msg = EmailMessage('Child Subject', 'Some body of child message', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
child_s = child_msg.message().as_string()
# Now create a parent
parent_msg = EmailMessage('Parent Subject', 'Some parent body', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
# Attach to parent as a string
parent_msg.attach(content=child_s, mimetype='message/rfc822')
parent_s = parent_msg.message().as_string()
# Verify that the child message header is not base64 encoded
self.assertIn(str('Child Subject'), parent_s)
# Feature test: try attaching email.Message object directly to the mail.
parent_msg = EmailMessage('Parent Subject', 'Some parent body', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
parent_msg.attach(content=child_msg.message(), mimetype='message/rfc822')
parent_s = parent_msg.message().as_string()
# Verify that the child message header is not base64 encoded
self.assertIn(str('Child Subject'), parent_s)
# Feature test: try attaching Django's EmailMessage object directly to the mail.
parent_msg = EmailMessage('Parent Subject', 'Some parent body', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
parent_msg.attach(content=child_msg, mimetype='message/rfc822')
parent_s = parent_msg.message().as_string()
# Verify that the child message header is not base64 encoded
self.assertIn(str('Child Subject'), parent_s)
class PythonGlobalState(SimpleTestCase):
"""
Tests for #12422 -- Django smarts (#2472/#11212) with charset of utf-8 text
parts shouldn't pollute global email Python package charset registry when
django.mail.message is imported.
"""
def test_utf8(self):
txt = MIMEText('UTF-8 encoded body', 'plain', 'utf-8')
self.assertIn('Content-Transfer-Encoding: base64', txt.as_string())
def test_7bit(self):
txt = MIMEText('Body with only ASCII characters.', 'plain', 'utf-8')
self.assertIn('Content-Transfer-Encoding: base64', txt.as_string())
def test_8bit_latin(self):
txt = MIMEText('Body with latin characters: àáä.', 'plain', 'utf-8')
self.assertIn(str('Content-Transfer-Encoding: base64'), txt.as_string())
def test_8bit_non_latin(self):
txt = MIMEText('Body with non latin characters: А Б В Г Д Е Ж Ѕ З И І К Л М Н О П.', 'plain', 'utf-8')
self.assertIn(str('Content-Transfer-Encoding: base64'), txt.as_string())
class BaseEmailBackendTests(HeadersCheckMixin, object):
email_backend = None
def setUp(self):
self.settings_override = override_settings(EMAIL_BACKEND=self.email_backend)
self.settings_override.enable()
def tearDown(self):
self.settings_override.disable()
def assertStartsWith(self, first, second):
if not first.startswith(second):
self.longMessage = True
self.assertEqual(first[:len(second)], second, "First string doesn't start with the second.")
def get_mailbox_content(self):
raise NotImplementedError('subclasses of BaseEmailBackendTests must provide a get_mailbox_content() method')
def flush_mailbox(self):
raise NotImplementedError('subclasses of BaseEmailBackendTests may require a flush_mailbox() method')
def get_the_message(self):
mailbox = self.get_mailbox_content()
self.assertEqual(len(mailbox), 1,
"Expected exactly one message, got %d.\n%r" % (len(mailbox), [
m.as_string() for m in mailbox]))
return mailbox[0]
def test_send(self):
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'])
num_sent = mail.get_connection().send_messages([email])
self.assertEqual(num_sent, 1)
message = self.get_the_message()
self.assertEqual(message["subject"], "Subject")
self.assertEqual(message.get_payload(), "Content")
self.assertEqual(message["from"], "[email protected]")
self.assertEqual(message.get_all("to"), ["[email protected]"])
def test_send_unicode(self):
email = EmailMessage('Chère maman', 'Je t\'aime très fort', '[email protected]', ['[email protected]'])
num_sent = mail.get_connection().send_messages([email])
self.assertEqual(num_sent, 1)
message = self.get_the_message()
self.assertEqual(message["subject"], '=?utf-8?q?Ch=C3=A8re_maman?=')
self.assertEqual(force_text(message.get_payload(decode=True)), 'Je t\'aime très fort')
def test_send_many(self):
email1 = EmailMessage('Subject', 'Content1', '[email protected]', ['[email protected]'])
email2 = EmailMessage('Subject', 'Content2', '[email protected]', ['[email protected]'])
num_sent = mail.get_connection().send_messages([email1, email2])
self.assertEqual(num_sent, 2)
messages = self.get_mailbox_content()
self.assertEqual(len(messages), 2)
self.assertEqual(messages[0].get_payload(), "Content1")
self.assertEqual(messages[1].get_payload(), "Content2")
def test_send_verbose_name(self):
email = EmailMessage("Subject", "Content", '"Firstname Sürname" <[email protected]>',
["[email protected]"])
email.send()
message = self.get_the_message()
self.assertEqual(message["subject"], "Subject")
self.assertEqual(message.get_payload(), "Content")
self.assertEqual(message["from"], "=?utf-8?q?Firstname_S=C3=BCrname?= <[email protected]>")
def test_plaintext_send_mail(self):
"""
Test send_mail without the html_message
regression test for adding html_message parameter to send_mail()
"""
send_mail('Subject', 'Content', '[email protected]', ['[email protected]'])
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get_all('to'), ['[email protected]'])
self.assertFalse(message.is_multipart())
self.assertEqual(message.get_payload(), 'Content')
self.assertEqual(message.get_content_type(), 'text/plain')
def test_html_send_mail(self):
"""Test html_message argument to send_mail"""
send_mail('Subject', 'Content', '[email protected]', ['[email protected]'], html_message='HTML Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get_all('to'), ['[email protected]'])
self.assertTrue(message.is_multipart())
self.assertEqual(len(message.get_payload()), 2)
self.assertEqual(message.get_payload(0).get_payload(), 'Content')
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_payload(), 'HTML Content')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
@override_settings(MANAGERS=[('nobody', '[email protected]')])
def test_html_mail_managers(self):
"""Test html_message argument to mail_managers"""
mail_managers('Subject', 'Content', html_message='HTML Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
self.assertEqual(message.get_all('to'), ['[email protected]'])
self.assertTrue(message.is_multipart())
self.assertEqual(len(message.get_payload()), 2)
self.assertEqual(message.get_payload(0).get_payload(), 'Content')
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_payload(), 'HTML Content')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
@override_settings(ADMINS=[('nobody', '[email protected]')])
def test_html_mail_admins(self):
"""Test html_message argument to mail_admins """
mail_admins('Subject', 'Content', html_message='HTML Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
self.assertEqual(message.get_all('to'), ['[email protected]'])
self.assertTrue(message.is_multipart())
self.assertEqual(len(message.get_payload()), 2)
self.assertEqual(message.get_payload(0).get_payload(), 'Content')
self.assertEqual(message.get_payload(0).get_content_type(), 'text/plain')
self.assertEqual(message.get_payload(1).get_payload(), 'HTML Content')
self.assertEqual(message.get_payload(1).get_content_type(), 'text/html')
@override_settings(
ADMINS=[('nobody', '[email protected]')],
MANAGERS=[('nobody', '[email protected]')])
def test_manager_and_admin_mail_prefix(self):
"""
String prefix + lazy translated subject = bad output
Regression for #13494
"""
mail_managers(ugettext_lazy('Subject'), 'Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
self.flush_mailbox()
mail_admins(ugettext_lazy('Subject'), 'Content')
message = self.get_the_message()
self.assertEqual(message.get('subject'), '[Django] Subject')
@override_settings(ADMINS=(), MANAGERS=())
def test_empty_admins(self):
"""
Test that mail_admins/mail_managers doesn't connect to the mail server
if there are no recipients (#9383)
"""
mail_admins('hi', 'there')
self.assertEqual(self.get_mailbox_content(), [])
mail_managers('hi', 'there')
self.assertEqual(self.get_mailbox_content(), [])
def test_message_cc_header(self):
"""
Regression test for #7722
"""
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], cc=['[email protected]'])
mail.get_connection().send_messages([email])
message = self.get_the_message()
self.assertMessageHasHeaders(message, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="utf-8"'),
('Content-Transfer-Encoding', '7bit'),
('Subject', 'Subject'),
('From', '[email protected]'),
('To', '[email protected]'),
('Cc', '[email protected]')})
self.assertIn('\nDate: ', message.as_string())
def test_idn_send(self):
"""
Regression test for #14301
"""
self.assertTrue(send_mail('Subject', 'Content', 'from@öäü.com', ['to@öäü.com']))
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), '[email protected]')
self.assertEqual(message.get('to'), '[email protected]')
self.flush_mailbox()
m = EmailMessage('Subject', 'Content', 'from@öäü.com',
['to@öäü.com'], cc=['cc@öäü.com'])
m.send()
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), '[email protected]')
self.assertEqual(message.get('to'), '[email protected]')
self.assertEqual(message.get('cc'), '[email protected]')
def test_recipient_without_domain(self):
"""
Regression test for #15042
"""
self.assertTrue(send_mail("Subject", "Content", "tester", ["django"]))
message = self.get_the_message()
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), "tester")
self.assertEqual(message.get('to'), "django")
def test_close_connection(self):
"""
Test that connection can be closed (even when not explicitly opened)
"""
conn = mail.get_connection(username='', password='')
try:
conn.close()
except Exception as e:
self.fail("close() unexpectedly raised an exception: %s" % e)
def test_use_as_contextmanager(self):
"""
Test that the connection can be used as a contextmanager.
"""
opened = [False]
closed = [False]
conn = mail.get_connection(username='', password='')
def open():
opened[0] = True
conn.open = open
def close():
closed[0] = True
conn.close = close
with conn as same_conn:
self.assertTrue(opened[0])
self.assertIs(same_conn, conn)
self.assertFalse(closed[0])
self.assertTrue(closed[0])
class LocmemBackendTests(BaseEmailBackendTests, SimpleTestCase):
email_backend = 'django.core.mail.backends.locmem.EmailBackend'
def get_mailbox_content(self):
return [m.message() for m in mail.outbox]
def flush_mailbox(self):
mail.outbox = []
def tearDown(self):
super(LocmemBackendTests, self).tearDown()
mail.outbox = []
def test_locmem_shared_messages(self):
"""
Make sure that the locmen backend populates the outbox.
"""
connection = locmem.EmailBackend()
connection2 = locmem.EmailBackend()
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
connection.send_messages([email])
connection2.send_messages([email])
self.assertEqual(len(mail.outbox), 2)
def test_validate_multiline_headers(self):
# Ticket #18861 - Validate emails when using the locmem backend
with self.assertRaises(BadHeaderError):
send_mail('Subject\nMultiline', 'Content', '[email protected]', ['[email protected]'])
class FileBackendTests(BaseEmailBackendTests, SimpleTestCase):
email_backend = 'django.core.mail.backends.filebased.EmailBackend'
def setUp(self):
super(FileBackendTests, self).setUp()
self.tmp_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.tmp_dir)
self._settings_override = override_settings(EMAIL_FILE_PATH=self.tmp_dir)
self._settings_override.enable()
def tearDown(self):
self._settings_override.disable()
super(FileBackendTests, self).tearDown()
def flush_mailbox(self):
for filename in os.listdir(self.tmp_dir):
os.unlink(os.path.join(self.tmp_dir, filename))
def get_mailbox_content(self):
messages = []
for filename in os.listdir(self.tmp_dir):
with open(os.path.join(self.tmp_dir, filename), 'rb') as fp:
session = fp.read().split(force_bytes('\n' + ('-' * 79) + '\n', encoding='ascii'))
messages.extend(message_from_bytes(m) for m in session if m)
return messages
def test_file_sessions(self):
"""Make sure opening a connection creates a new file"""
msg = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'], headers={'From': '[email protected]'})
connection = mail.get_connection()
connection.send_messages([msg])
self.assertEqual(len(os.listdir(self.tmp_dir)), 1)
with open(os.path.join(self.tmp_dir, os.listdir(self.tmp_dir)[0]), 'rb') as fp:
message = message_from_binary_file(fp)
self.assertEqual(message.get_content_type(), 'text/plain')
self.assertEqual(message.get('subject'), 'Subject')
self.assertEqual(message.get('from'), '[email protected]')
self.assertEqual(message.get('to'), '[email protected]')
connection2 = mail.get_connection()
connection2.send_messages([msg])
self.assertEqual(len(os.listdir(self.tmp_dir)), 2)
connection.send_messages([msg])
self.assertEqual(len(os.listdir(self.tmp_dir)), 2)
msg.connection = mail.get_connection()
self.assertTrue(connection.open())
msg.send()
self.assertEqual(len(os.listdir(self.tmp_dir)), 3)
msg.send()
self.assertEqual(len(os.listdir(self.tmp_dir)), 3)
connection.close()
class ConsoleBackendTests(BaseEmailBackendTests, SimpleTestCase):
email_backend = 'django.core.mail.backends.console.EmailBackend'
def setUp(self):
super(ConsoleBackendTests, self).setUp()
self.__stdout = sys.stdout
self.stream = sys.stdout = StringIO()
def tearDown(self):
del self.stream
sys.stdout = self.__stdout
del self.__stdout
super(ConsoleBackendTests, self).tearDown()
def flush_mailbox(self):
self.stream = sys.stdout = StringIO()
def get_mailbox_content(self):
messages = self.stream.getvalue().split(str('\n' + ('-' * 79) + '\n'))
return [message_from_bytes(force_bytes(m)) for m in messages if m]
def test_console_stream_kwarg(self):
"""
Test that the console backend can be pointed at an arbitrary stream.
"""
s = StringIO()
connection = mail.get_connection('django.core.mail.backends.console.EmailBackend', stream=s)
send_mail('Subject', 'Content', '[email protected]', ['[email protected]'], connection=connection)
message = force_bytes(s.getvalue().split('\n' + ('-' * 79) + '\n')[0])
self.assertMessageHasHeaders(message, {
('MIME-Version', '1.0'),
('Content-Type', 'text/plain; charset="utf-8"'),
('Content-Transfer-Encoding', '7bit'),
('Subject', 'Subject'),
('From', '[email protected]'),
('To', '[email protected]')})
self.assertIn(b'\nDate: ', message)
class FakeSMTPChannel(smtpd.SMTPChannel):
def collect_incoming_data(self, data):
try:
super(FakeSMTPChannel, self).collect_incoming_data(data)
except UnicodeDecodeError:
# ignore decode error in SSL/TLS connection tests as we only care
# whether the connection attempt was made
pass
class FakeSMTPServer(smtpd.SMTPServer, threading.Thread):
"""
Asyncore SMTP server wrapped into a thread. Based on DummyFTPServer from:
http://svn.python.org/view/python/branches/py3k/Lib/test/test_ftplib.py?revision=86061&view=markup
"""
channel_class = FakeSMTPChannel
def __init__(self, *args, **kwargs):
threading.Thread.__init__(self)
smtpd.SMTPServer.__init__(self, *args, **kwargs)
self._sink = []
self.active = False
self.active_lock = threading.Lock()
self.sink_lock = threading.Lock()
def process_message(self, peer, mailfrom, rcpttos, data):
if PY3:
data = data.encode('utf-8')
m = message_from_bytes(data)
maddr = parseaddr(m.get('from'))[1]
if mailfrom != maddr:
return "553 '%s' != '%s'" % (mailfrom, maddr)
with self.sink_lock:
self._sink.append(m)
def get_sink(self):
with self.sink_lock:
return self._sink[:]
def flush_sink(self):
with self.sink_lock:
self._sink[:] = []
def start(self):
assert not self.active
self.__flag = threading.Event()
threading.Thread.start(self)
self.__flag.wait()
def run(self):
self.active = True
self.__flag.set()
while self.active and asyncore.socket_map:
with self.active_lock:
asyncore.loop(timeout=0.1, count=1)
asyncore.close_all()
def stop(self):
if self.active:
self.active = False
self.join()
class SMTPBackendTestsBase(SimpleTestCase):
@classmethod
def setUpClass(cls):
super(SMTPBackendTestsBase, cls).setUpClass()
cls.server = FakeSMTPServer(('127.0.0.1', 0), None)
cls._settings_override = override_settings(
EMAIL_HOST="127.0.0.1",
EMAIL_PORT=cls.server.socket.getsockname()[1])
cls._settings_override.enable()
cls.server.start()
@classmethod
def tearDownClass(cls):
cls._settings_override.disable()
cls.server.stop()
super(SMTPBackendTestsBase, cls).tearDownClass()
class SMTPBackendTests(BaseEmailBackendTests, SMTPBackendTestsBase):
email_backend = 'django.core.mail.backends.smtp.EmailBackend'
def setUp(self):
super(SMTPBackendTests, self).setUp()
self.server.flush_sink()
def tearDown(self):
self.server.flush_sink()
super(SMTPBackendTests, self).tearDown()
def flush_mailbox(self):
self.server.flush_sink()
def get_mailbox_content(self):
return self.server.get_sink()
@override_settings(
EMAIL_HOST_USER="not empty username",
EMAIL_HOST_PASSWORD="not empty password")
def test_email_authentication_use_settings(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.username, 'not empty username')
self.assertEqual(backend.password, 'not empty password')
@override_settings(
EMAIL_HOST_USER="not empty username",
EMAIL_HOST_PASSWORD="not empty password")
def test_email_authentication_override_settings(self):
backend = smtp.EmailBackend(username='username', password='password')
self.assertEqual(backend.username, 'username')
self.assertEqual(backend.password, 'password')
@override_settings(
EMAIL_HOST_USER="not empty username",
EMAIL_HOST_PASSWORD="not empty password")
def test_email_disabled_authentication(self):
backend = smtp.EmailBackend(username='', password='')
self.assertEqual(backend.username, '')
self.assertEqual(backend.password, '')
def test_auth_attempted(self):
"""
Test that opening the backend with non empty username/password tries
to authenticate against the SMTP server.
"""
backend = smtp.EmailBackend(
username='not empty username', password='not empty password')
try:
self.assertRaisesMessage(SMTPException,
'SMTP AUTH extension not supported by server.', backend.open)
finally:
backend.close()
def test_server_open(self):
"""
Test that open() tells us whether it opened a connection.
"""
backend = smtp.EmailBackend(username='', password='')
self.assertFalse(backend.connection)
opened = backend.open()
backend.close()
self.assertTrue(opened)
@override_settings(EMAIL_USE_TLS=True)
def test_email_tls_use_settings(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_tls)
@override_settings(EMAIL_USE_TLS=True)
def test_email_tls_override_settings(self):
backend = smtp.EmailBackend(use_tls=False)
self.assertFalse(backend.use_tls)
def test_email_tls_default_disabled(self):
backend = smtp.EmailBackend()
self.assertFalse(backend.use_tls)
@override_settings(EMAIL_USE_SSL=True)
def test_email_ssl_use_settings(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_ssl)
@override_settings(EMAIL_USE_SSL=True)
def test_email_ssl_override_settings(self):
backend = smtp.EmailBackend(use_ssl=False)
self.assertFalse(backend.use_ssl)
def test_email_ssl_default_disabled(self):
backend = smtp.EmailBackend()
self.assertFalse(backend.use_ssl)
@override_settings(EMAIL_SSL_CERTFILE='foo')
def test_email_ssl_certfile_use_settings(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.ssl_certfile, 'foo')
@override_settings(EMAIL_SSL_CERTFILE='foo')
def test_email_ssl_certfile_override_settings(self):
backend = smtp.EmailBackend(ssl_certfile='bar')
self.assertEqual(backend.ssl_certfile, 'bar')
def test_email_ssl_certfile_default_disabled(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.ssl_certfile, None)
@override_settings(EMAIL_SSL_KEYFILE='foo')
def test_email_ssl_keyfile_use_settings(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.ssl_keyfile, 'foo')
@override_settings(EMAIL_SSL_KEYFILE='foo')
def test_email_ssl_keyfile_override_settings(self):
backend = smtp.EmailBackend(ssl_keyfile='bar')
self.assertEqual(backend.ssl_keyfile, 'bar')
def test_email_ssl_keyfile_default_disabled(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.ssl_keyfile, None)
@override_settings(EMAIL_USE_TLS=True)
def test_email_tls_attempts_starttls(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_tls)
try:
self.assertRaisesMessage(SMTPException,
'STARTTLS extension not supported by server.', backend.open)
finally:
backend.close()
@override_settings(EMAIL_USE_SSL=True)
def test_email_ssl_attempts_ssl_connection(self):
backend = smtp.EmailBackend()
self.assertTrue(backend.use_ssl)
try:
self.assertRaises(SSLError, backend.open)
finally:
backend.close()
def test_connection_timeout_default(self):
"""Test that the connection's timeout value is None by default."""
connection = mail.get_connection('django.core.mail.backends.smtp.EmailBackend')
self.assertEqual(connection.timeout, None)
def test_connection_timeout_custom(self):
"""Test that the timeout parameter can be customized."""
class MyEmailBackend(smtp.EmailBackend):
def __init__(self, *args, **kwargs):
kwargs.setdefault('timeout', 42)
super(MyEmailBackend, self).__init__(*args, **kwargs)
myemailbackend = MyEmailBackend()
myemailbackend.open()
self.assertEqual(myemailbackend.timeout, 42)
self.assertEqual(myemailbackend.connection.timeout, 42)
myemailbackend.close()
@override_settings(EMAIL_TIMEOUT=10)
def test_email_timeout_override_settings(self):
backend = smtp.EmailBackend()
self.assertEqual(backend.timeout, 10)
def test_email_msg_uses_crlf(self):
"""#23063 -- Test that RFC-compliant messages are sent over SMTP."""
send = SMTP.send
try:
smtp_messages = []
def mock_send(self, s):
smtp_messages.append(s)
return send(self, s)
SMTP.send = mock_send
email = EmailMessage('Subject', 'Content', '[email protected]', ['[email protected]'])
mail.get_connection().send_messages([email])
# Find the actual message
msg = None
for i, m in enumerate(smtp_messages):
if m[:4] == 'data':
msg = smtp_messages[i + 1]
break
self.assertTrue(msg)
if PY3:
msg = msg.decode('utf-8')
# Ensure that the message only contains CRLF and not combinations of CRLF, LF, and CR.
msg = msg.replace('\r\n', '')
self.assertNotIn('\r', msg)
self.assertNotIn('\n', msg)
finally:
SMTP.send = send
class SMTPBackendStoppedServerTest(SMTPBackendTestsBase):
"""
This test requires a separate class, because it shuts down the
FakeSMTPServer started in setUpClass(). It cannot be restarted
("RuntimeError: threads can only be started once").
"""
def test_server_stopped(self):
"""
Test that closing the backend while the SMTP server is stopped doesn't
raise an exception.
"""
backend = smtp.EmailBackend(username='', password='')
backend.open()
self.server.stop()
try:
backend.close()
except Exception as e:
self.fail("close() unexpectedly raised an exception: %s" % e)
|
hcsturix74/django | refs/heads/master | tests/cache/liberal_backend.py | 446 | from django.core.cache.backends.locmem import LocMemCache
class LiberalKeyValidationMixin(object):
def validate_key(self, key):
pass
class CacheClass(LiberalKeyValidationMixin, LocMemCache):
pass
|
anthraxx/jenkins.debian.net | refs/heads/master | job-cfg/chroot-installation.yaml.py | 1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import os
from string import join
from yaml import load, dump
try:
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Loader, Dumper
base_distros = [
'jessie',
'stretch',
'buster',
'sid',
]
distro_upgrades = {
'jessie': 'stretch',
'stretch': 'buster',
'buster': 'sid',
}
# deb.debian.org runs mirror updates at 03:25, 09:25, 15:25 and 21:25 UTC and usually they run 10m...
trigger_times = {
'jessie': '30 16 1 */2 *',
'stretch': '30 10 7,28 * *',
'buster': '30 10 */3 * *',
'sid': '30 4 * * *',
}
all_targets = [
'gnome',
'kde',
'kde-full',
'cinnamon',
'lxde',
'lxqt',
'xfce',
'full_desktop',
'qt4',
'qt5',
'haskell',
'developer',
'debconf-video',
'education-tasks',
'education-menus',
'education-astronomy',
'education-chemistry',
'education-common',
'education-desktop-gnome',
'education-desktop-kde',
'education-desktop-lxde',
'education-desktop-lxqt',
'education-desktop-mate',
'education-desktop-other',
'education-desktop-xfce',
'education-development',
'education-electronics',
'education-geography',
'education-graphics',
'education-language',
'education-lang-da',
'education-lang-de',
'education-lang-es',
'education-lang-fr',
'education-lang-he',
'education-lang-it',
'education-lang-ja',
'education-lang-no',
'education-lang-se',
'education-lang-zh-tw',
'education-laptop',
'education-logic-games',
'education-ltsp-server',
'education-main-server',
'education-mathematics',
'education-misc',
'education-music',
'education-networked',
'education-physics',
'education-primaryschool',
'education-services',
'education-standalone',
'education-thin-client',
'education-thin-client-server',
'education-roaming-workstation',
'education-video',
'education-workstation',
'parl-desktop-eu',
'parl-desktop-strict',
'parl-desktop-world',
'design-desktop-animation',
'design-desktop-graphics',
'design-desktop-strict',
'design-desktop-web',
]
#
# not all packages are available in all distros
#
def is_target_in_distro(distro, target):
# education-ltsp-server and education-roaming-workstation are only availble since stretch…
if distro in ('jessie') and target in ('education-ltsp-server', 'education-roaming-workstation'):
return False
# education-thin-client-server is obsolete since stretch…
elif distro in ('sid', 'buster', 'stretch') and target == 'education-thin-client-server':
return False
# education-services is obsolete since buster…
elif distro in ('sid', 'buster') and target == 'education-services':
return False
# lxqt is only available since stretch
elif distro in ('jessie') and target == 'lxqt':
return False
# education-lang-*, parl-desktop* and design-desktop* packages only exist since stretch
elif distro in ('jessie') and (target[:15] == 'education-lang-' or target[:12] == 'parl-desktop' or target[:14] == 'design-desktop'):
return False
# education-desktop-lxqt, education-primaryschool and education-video packages only exist since buster
elif distro in ('jessie', 'stretch') and target in ('education-desktop-lxqt', 'education-primaryschool', 'education-video'):
return False
return True
#
# who gets mail for which target
#
def get_recipients(target):
if target == 'haskell':
return 'jenkins+debian-haskell [email protected] [email protected]'
elif target == 'gnome':
return 'jenkins+debian-qa [email protected] [email protected]'
elif target == 'cinnamon':
return 'jenkins+debian-cinnamon [email protected] [email protected]'
elif target == 'debconf-video':
return 'jenkins+debconf-video [email protected]'
elif target[:3] == 'kde' or target[:2] == 'qt':
return 'jenkins+debian-qa [email protected] [email protected]'
elif target[:10] == 'education-':
return 'jenkins+debian-edu [email protected]'
else:
return 'jenkins+debian-qa [email protected]'
#
# views for different targets
#
def get_view(target, distro):
if target == 'haskell':
return 'haskell'
elif target[:10] == 'education-':
if distro in ('jessie', 'stretch'):
return 'edu_stable'
else:
return 'edu_devel'
else:
return 'chroot-installation'
#
# special descriptions used for some targets
#
spoken_names = {
'gnome': 'GNOME',
'kde': 'KDE plasma desktop',
'kde-full': 'complete KDE desktop',
'cinnamon': 'Cinnamon',
'lxde': 'LXDE',
'lxqt': 'LXQT',
'xfce': 'Xfce',
'qt4': 'Qt4 cross-platform C++ application framework',
'qt5': 'Qt5 cross-platform C++ application framework',
'full_desktop': 'four desktop environments and the most commonly used applications and packages',
'haskell': 'all Haskell related packages',
'developer': 'four desktop environments and the most commonly used applications and packages - and the build depends for all of these',
'debconf-video': 'all packages relevant for the DebConf videoteam',
}
def get_spoken_name(target):
if target[:12] == 'parl-desktop':
return 'the Debian Parl metapackage '+target
elif target[:14] == 'design-desktop':
return 'the Debian Parl metapackage '+target
elif target[:10] == 'education-':
return 'the Debian Edu metapackage '+target
elif target in spoken_names:
return spoken_names[target]
else:
return target
#
# nothing to edit below
#
#
# This structure contains the differences between the default, upgrade and upgrade_apt+dpkg_first jobs
#
jobspecs = [
{ 'j_ext': '',
'd_ext': '',
's_ext': '',
'dist_func': (lambda d: d),
'distfilter': (lambda d: tuple(set(d))),
'skiptaryet': (lambda t: False)
},
{ 'j_ext': '_upgrade_to_{dist2}',
'd_ext': ', then upgrade to {dist2}',
's_ext': ' {dist2}',
'dist_func': (lambda d: [{dist: {'dist2': distro_upgrades[dist]}} for dist in d]),
'distfilter': (lambda d: tuple(set(d) & set(distro_upgrades))),
'skiptaryet': (lambda t: False)
},
]
# some functions first…
#
# return the list of targets, filtered to be those present in 'distro'
#
def get_targets_in_distro(distro):
return [t for t in all_targets if is_target_in_distro(distro, t)]
#
# given a target, returns a list of ([dist], key) tuples, so we can handle the
# edu packages having views that are distro dependant
#
# this groups all the distros that have matching views
#
def get_dists_per_key(target,get_distro_key):
dists_per_key = {}
for distro in base_distros:
if is_target_in_distro(distro, target):
key = get_distro_key(distro)
if key not in dists_per_key.keys():
dists_per_key[key] = []
dists_per_key[key].append(distro)
return dists_per_key
# main…
data = []
jobs = []
data.append(
{ 'defaults': { 'builders': [{ 'shell': '{my_shell}'}],
'description': '{my_description}{do_not_edit}',
'logrotate': { 'artifactDaysToKeep': -1,
'artifactNumToKeep': -1,
'daysToKeep': 120,
'numToKeep': 150},
'name': 'chroot-installation',
'properties': [ { 'sidebar': { 'icon': '/userContent/images/debian-swirl-24x24.png',
'text': 'About jenkins.debian.net',
'url': 'https://jenkins.debian.net/userContent/about.html'}},
{ 'sidebar': { 'icon': '/userContent/images/debian-jenkins-24x24.png',
'text': 'All {my_view} jobs',
'url': 'https://jenkins.debian.net/view/{my_view}/'}},
{ 'sidebar': { 'icon': '/userContent/images/profitbricks-24x24.png',
'text': 'Sponsored by Profitbricks',
'url': 'http://www.profitbricks.co.uk'}},
{ 'priority-sorter': { 'priority': '{my_prio}'}},
{ 'throttle': { 'categories': [ 'chroot-installation'],
'enabled': True,
'max-per-node': 6,
'max-total': 6,
'option': 'category'}}],
'publishers': [ { 'trigger': { 'project': '{my_trigger}'}},
{ 'email-ext': { 'attach-build-log': False,
'body': 'See $BUILD_URL/console or just $BUILD_URL for more information.',
'first-failure': False,
'second-failure': True,
'failure': False,
'fixed': True,
'recipients': '{my_recipients}',
'subject': '$BUILD_STATUS: $JOB_NAME/$BUILD_NUMBER'}},
{ 'logparser': { 'parse-rules': '/srv/jenkins/logparse/chroot-installation.rules',
'unstable-on-warning': True,}},
{ 'naginator': { 'progressive-delay-increment': 5,
'progressive-delay-maximum': 15,
'max-failed-builds': 3,
'regular-expression': '^E: (Couldn.t download .*/Packages|Unable to fetch some archives|Failed getting release file|Failed getting release signature file)'}}],
'triggers': [{ 'timed': '{my_time}'}],
'wrappers': [{ 'timeout': { 'timeout': 360}}]}})
data.append(
{ 'job-template': { 'defaults': 'chroot-installation',
'name': '{name}_{dist}_{action}'}})
data.append(
{ 'job-template': { 'defaults': 'chroot-installation',
'name': '{name}_{dist}_install_{target}'}})
data.append(
{ 'job-template': { 'defaults': 'chroot-installation',
'name': '{name}_{dist}_{action}_upgrade_to_{dist2}'}})
data.append(
{ 'job-template': { 'defaults': 'chroot-installation',
'name': '{name}_{dist}_install_{target}_upgrade_to_{dist2}'}})
# maintenance jobs
maint_distros = []
for base_distro in sorted(base_distros):
dist2 = ''
if base_distro in distro_upgrades.values():
trigger = 'chroot-installation_{dist}_bootstrap'
for item in distro_upgrades.items():
if item[1]==base_distro and base_distro in distro_upgrades:
trigger = trigger+', chroot-installation_{dist}_bootstrap_upgrade_to_{dist2}'
dist2 = distro_upgrades[base_distro]
else:
trigger = 'chroot-installation_{dist}_bootstrap_upgrade_to_{dist2}'
dist2 = distro_upgrades[base_distro]
maint_distros.append({ base_distro: {
'my_time': trigger_times[base_distro],
'dist2': dist2,
'my_trigger': trigger}})
jobs.append({ '{name}_{dist}_{action}': {
'action': 'maintenance',
'dist': maint_distros,
'my_description': 'Maintainance job for chroot-installation_{dist}_* jobs, do some cleanups and monitoring so that there is a predictable environment.',
'my_prio': '135',
'my_recipients': '[email protected]',
'my_shell': '/srv/jenkins/bin/maintenance.sh chroot-installation_{dist}',
'my_view': 'jenkins.d.n'}})
# bootstrap jobs
js_dists_trigs = [{},{},{}]
for trigs, dists in get_dists_per_key('bootstrap',(lambda d: tuple(sorted(get_targets_in_distro(d))))).items():
for jobindex, jobspec in enumerate(jobspecs):
js_dists = jobspec['distfilter'](dists)
if (js_dists):
js_disttrig = tuple((tuple(js_dists), trigs))
js_dists_trigs[jobindex][js_disttrig] = True
for jobindex, jobspec in enumerate(jobspecs):
jobs.extend([{ '{name}_{dist}_{action}'+jobspec['j_ext']: {
'action': 'bootstrap',
'dist': list(dists) if jobspec['j_ext'] == '' else
[{dist: {'dist2': distro_upgrades[dist]}} for dist in dists],
'my_trigger': join(['chroot-installation_{dist}_install_'+t+jobspec['j_ext']
for t in list(trigs)], ', '),
'my_description': 'Debootstrap {dist}'+jobspec['d_ext']+'.',
'my_prio': 131,
'my_time': '',
'my_recipients': get_recipients('bootstrap'),
'my_shell': '/srv/jenkins/bin/chroot-installation.sh {dist} none'+jobspec['s_ext'],
'my_view': get_view('bootstrap', None),
}}
for (dists, trigs) in js_dists_trigs[jobindex].keys()])
# now all the other jobs
targets_per_distview = [{},{},{}]
for target in sorted(all_targets):
for view, dists in get_dists_per_key(target,(lambda d: get_view(target, d))).items():
for jobindex, jobspec in enumerate(jobspecs):
if jobspec['skiptaryet'](target):
continue
js_dists = jobspec['distfilter'](dists)
if (js_dists):
distview = tuple((tuple(js_dists), view))
if distview not in targets_per_distview[jobindex].keys():
targets_per_distview[jobindex][distview] = []
targets_per_distview[jobindex][distview].append(target)
for jobindex, jobspec in enumerate(jobspecs):
jobs.extend([{ '{name}_{dist}_install_{target}'+jobspec['j_ext']: {
'dist': jobspec['dist_func'](list(dists)),
'target': [{t: {
'my_spokenname': get_spoken_name(t),
'my_recipients': get_recipients(t)}}
for t in dv_targs],
'my_description': 'Debootstrap {dist}, then install {my_spokenname}'+jobspec['d_ext']+'.',
'my_shell': '/srv/jenkins/bin/chroot-installation.sh {dist} {target}'+jobspec['s_ext'],
'my_view': view,
}}
for (dists, view), dv_targs in targets_per_distview[jobindex].items()])
data.append({'project': {
'name': 'chroot-installation',
'do_not_edit': '<br><br>Job configuration source is <a href="https://anonscm.debian.org/git/qa/jenkins.debian.net.git/tree/job-cfg/chroot-installation.yaml.py">chroot-installation.yaml.py</a>.',
'my_prio': '130',
'my_trigger': '',
'my_time': '',
'jobs': jobs}})
sys.stdout.write(dump(data, Dumper=Dumper))
|
pculka/SkyDrop | refs/heads/master | skydrop/utils/etc/test.py | 5 | #!/usr/bin/python
file = open("TEST.CSV", "r")
lines = file.read().split("\n")
file.close()
def print_bites(val):
s = ""
for i in range(8):
if (val & 1 << (7-i)):
s += "1"
else:
s += "."
return s
def print_bites2(val):
s = ""
for i in range(8):
if (i == 4):
s += " "
if (val & 1 << (7-i)):
s += "1"
else:
s += "."
return s
def cmp_r(a,b):
a, b = map(print_bites, [a, b])
for i in range(8):
if a[i] != b[7-i]:
return False
return True
for line in lines:
if (line == ""):
break;
a, b = map(int, line.split(";"))
if (not cmp_r(a, b)):
print "%03d %03d %s | %s" % (a, b, print_bites2(a), print_bites2(b))
|
sorenk/ansible | refs/heads/devel | lib/ansible/modules/network/avi/avi_virtualservice.py | 23 | #!/usr/bin/python
#
# @author: Gaurav Rastogi ([email protected])
# Eric Anderson ([email protected])
# module_check: supported
# Avi Version: 17.1.1
#
# Copyright: (c) 2017 Gaurav Rastogi, <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_virtualservice
author: Gaurav Rastogi ([email protected])
short_description: Module for setup of VirtualService Avi RESTful Object
description:
- This module is used to configure VirtualService object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.3"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
active_standby_se_tag:
description:
- This configuration only applies if the virtualservice is in legacy active standby ha mode and load distribution among active standby is enabled.
- This field is used to tag the virtualservice so that virtualservices with the same tag will share the same active serviceengine.
- Virtualservices with different tags will have different active serviceengines.
- If one of the serviceengine's in the serviceenginegroup fails, all virtualservices will end up using the same active serviceengine.
- Redistribution of the virtualservices can be either manual or automated when the failed serviceengine recovers.
- Redistribution is based on the auto redistribute property of the serviceenginegroup.
- Enum options - ACTIVE_STANDBY_SE_1, ACTIVE_STANDBY_SE_2.
- Default value when not specified in API or module is interpreted by Avi Controller as ACTIVE_STANDBY_SE_1.
analytics_policy:
description:
- Determines analytics settings for the application.
analytics_profile_ref:
description:
- Specifies settings related to analytics.
- It is a reference to an object of type analyticsprofile.
application_profile_ref:
description:
- Enable application layer specific features for the virtual service.
- It is a reference to an object of type applicationprofile.
auto_allocate_floating_ip:
description:
- Auto-allocate floating/elastic ip from the cloud infrastructure.
- Field deprecated in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
auto_allocate_ip:
description:
- Auto-allocate vip from the provided subnet.
- Field deprecated in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
availability_zone:
description:
- Availability-zone to place the virtual service.
- Field deprecated in 17.1.1.
avi_allocated_fip:
description:
- (internal-use) fip allocated by avi in the cloud infrastructure.
- Field deprecated in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
avi_allocated_vip:
description:
- (internal-use) vip allocated by avi in the cloud infrastructure.
- Field deprecated in 17.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
bulk_sync_kvcache:
description:
- (this is a beta feature).
- Sync key-value cache to the new ses when vs is scaled out.
- For ex ssl sessions are stored using vs's key-value cache.
- When the vs is scaled out, the ssl session information is synced to the new se, allowing existing ssl sessions to be reused on the new se.
- Field introduced in 17.2.7, 18.1.1.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
version_added: "2.6"
type: bool
client_auth:
description:
- Http authentication configuration for protected resources.
close_client_conn_on_config_update:
description:
- Close client connection on vs config update.
- Field introduced in 17.2.4.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
version_added: "2.5"
type: bool
cloud_config_cksum:
description:
- Checksum of cloud configuration for vs.
- Internally set by cloud connector.
cloud_ref:
description:
- It is a reference to an object of type cloud.
cloud_type:
description:
- Enum options - cloud_none, cloud_vcenter, cloud_openstack, cloud_aws, cloud_vca, cloud_apic, cloud_mesos, cloud_linuxserver, cloud_docker_ucp,
- cloud_rancher, cloud_oshift_k8s, cloud_azure.
- Default value when not specified in API or module is interpreted by Avi Controller as CLOUD_NONE.
connections_rate_limit:
description:
- Rate limit the incoming connections to this virtual service.
content_rewrite:
description:
- Profile used to match and rewrite strings in request and/or response body.
created_by:
description:
- Creator name.
delay_fairness:
description:
- Select the algorithm for qos fairness.
- This determines how multiple virtual services sharing the same service engines will prioritize traffic over a congested network.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
description:
description:
- User defined description for the object.
discovered_network_ref:
description:
- (internal-use) discovered networks providing reachability for client facing virtual service ip.
- This field is deprecated.
- It is a reference to an object of type network.
- Field deprecated in 17.1.1.
discovered_networks:
description:
- (internal-use) discovered networks providing reachability for client facing virtual service ip.
- This field is used internally by avi, not editable by the user.
- Field deprecated in 17.1.1.
discovered_subnet:
description:
- (internal-use) discovered subnets providing reachability for client facing virtual service ip.
- This field is deprecated.
- Field deprecated in 17.1.1.
dns_info:
description:
- Service discovery specific data including fully qualified domain name, type and time-to-live of the dns record.
- Note that only one of fqdn and dns_info setting is allowed.
dns_policies:
description:
- Dns policies applied on the dns traffic of the virtual service.
- Field introduced in 17.1.1.
version_added: "2.4"
east_west_placement:
description:
- Force placement on all se's in service group (mesos mode only).
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
enable_autogw:
description:
- Response traffic to clients will be sent back to the source mac address of the connection, rather than statically sent to a default gateway.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
type: bool
enable_rhi:
description:
- Enable route health injection using the bgp config in the vrf context.
type: bool
enable_rhi_snat:
description:
- Enable route health injection for source nat'ted floating ip address using the bgp config in the vrf context.
type: bool
enabled:
description:
- Enable or disable the virtual service.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
type: bool
error_page_profile_ref:
description:
- Error page profile to be used for this virtualservice.this profile is used to send the custom error page to the client generated by the proxy.
- It is a reference to an object of type errorpageprofile.
- Field introduced in 17.2.4.
version_added: "2.5"
floating_ip:
description:
- Floating ip to associate with this virtual service.
- Field deprecated in 17.1.1.
floating_subnet_uuid:
description:
- If auto_allocate_floating_ip is true and more than one floating-ip subnets exist, then the subnet for the floating ip address allocation.
- This field is applicable only if the virtualservice belongs to an openstack or aws cloud.
- In openstack or aws cloud it is required when auto_allocate_floating_ip is selected.
- Field deprecated in 17.1.1.
flow_dist:
description:
- Criteria for flow distribution among ses.
- Enum options - LOAD_AWARE, CONSISTENT_HASH_SOURCE_IP_ADDRESS, CONSISTENT_HASH_SOURCE_IP_ADDRESS_AND_PORT.
- Default value when not specified in API or module is interpreted by Avi Controller as LOAD_AWARE.
flow_label_type:
description:
- Criteria for flow labelling.
- Enum options - NO_LABEL, APPLICATION_LABEL, SERVICE_LABEL.
- Default value when not specified in API or module is interpreted by Avi Controller as NO_LABEL.
fqdn:
description:
- Dns resolvable, fully qualified domain name of the virtualservice.
- Only one of 'fqdn' and 'dns_info' configuration is allowed.
host_name_xlate:
description:
- Translate the host name sent to the servers to this value.
- Translate the host name sent from servers back to the value used by the client.
http_policies:
description:
- Http policies applied on the data traffic of the virtual service.
ign_pool_net_reach:
description:
- Ignore pool servers network reachability constraints for virtual service placement.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
ip_address:
description:
- Ip address of the virtual service.
- Field deprecated in 17.1.1.
ipam_network_subnet:
description:
- Subnet and/or network for allocating virtualservice ip by ipam provider module.
- Field deprecated in 17.1.1.
l4_policies:
description:
- L4 policies applied to the data traffic of the virtual service.
- Field introduced in 17.2.7.
version_added: "2.6"
limit_doser:
description:
- Limit potential dos attackers who exceed max_cps_per_client significantly to a fraction of max_cps_per_client for a while.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
max_cps_per_client:
description:
- Maximum connections per second per client ip.
- Allowed values are 10-1000.
- Special values are 0- 'unlimited'.
- Default value when not specified in API or module is interpreted by Avi Controller as 0.
microservice_ref:
description:
- Microservice representing the virtual service.
- It is a reference to an object of type microservice.
name:
description:
- Name for the virtual service.
required: true
network_profile_ref:
description:
- Determines network settings such as protocol, tcp or udp, and related options for the protocol.
- It is a reference to an object of type networkprofile.
network_ref:
description:
- Manually override the network on which the virtual service is placed.
- It is a reference to an object of type network.
- Field deprecated in 17.1.1.
network_security_policy_ref:
description:
- Network security policies for the virtual service.
- It is a reference to an object of type networksecuritypolicy.
nsx_securitygroup:
description:
- A list of nsx service groups representing the clients which can access the virtual ip of the virtual service.
- Field introduced in 17.1.1.
version_added: "2.4"
performance_limits:
description:
- Optional settings that determine performance limits like max connections or bandwdith etc.
pool_group_ref:
description:
- The pool group is an object that contains pools.
- It is a reference to an object of type poolgroup.
pool_ref:
description:
- The pool is an object that contains destination servers and related attributes such as load-balancing and persistence.
- It is a reference to an object of type pool.
port_uuid:
description:
- (internal-use) network port assigned to the virtual service ip address.
- Field deprecated in 17.1.1.
remove_listening_port_on_vs_down:
description:
- Remove listening port if virtualservice is down.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
requests_rate_limit:
description:
- Rate limit the incoming requests to this virtual service.
scaleout_ecmp:
description:
- Disable re-distribution of flows across service engines for a virtual service.
- Enable if the network itself performs flow hashing with ecmp in environments such as gcp.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
se_group_ref:
description:
- The service engine group to use for this virtual service.
- Moving to a new se group is disruptive to existing connections for this vs.
- It is a reference to an object of type serviceenginegroup.
server_network_profile_ref:
description:
- Determines the network settings profile for the server side of tcp proxied connections.
- Leave blank to use the same settings as the client to vs side of the connection.
- It is a reference to an object of type networkprofile.
service_metadata:
description:
- Metadata pertaining to the service provided by this virtual service.
- In openshift/kubernetes environments, egress pod info is stored.
- Any user input to this field will be overwritten by avi vantage.
version_added: "2.4"
service_pool_select:
description:
- Select pool based on destination port.
services:
description:
- List of services defined for this virtual service.
sideband_profile:
description:
- Sideband configuration to be used for this virtualservice.it can be used for sending traffic to sideband vips for external inspection etc.
version_added: "2.4"
snat_ip:
description:
- Nat'ted floating source ip address(es) for upstream connection to servers.
sp_pool_refs:
description:
- Gslb pools used to manage site-persistence functionality.
- Each site-persistence pool contains the virtualservices in all the other sites, that is auto-generated by the gslb manager.
- This is a read-only field for the user.
- It is a reference to an object of type pool.
- Field introduced in 17.2.2.
version_added: "2.5"
ssl_key_and_certificate_refs:
description:
- Select or create one or two certificates, ec and/or rsa, that will be presented to ssl/tls terminated connections.
- It is a reference to an object of type sslkeyandcertificate.
ssl_profile_ref:
description:
- Determines the set of ssl versions and ciphers to accept for ssl/tls terminated connections.
- It is a reference to an object of type sslprofile.
ssl_sess_cache_avg_size:
description:
- Expected number of ssl session cache entries (may be exceeded).
- Allowed values are 1024-16383.
- Default value when not specified in API or module is interpreted by Avi Controller as 1024.
static_dns_records:
description:
- List of static dns records applied to this virtual service.
- These are static entries and no health monitoring is performed against the ip addresses.
subnet:
description:
- Subnet providing reachability for client facing virtual service ip.
- Field deprecated in 17.1.1.
subnet_uuid:
description:
- It represents subnet for the virtual service ip address allocation when auto_allocate_ip is true.it is only applicable in openstack or aws cloud.
- This field is required if auto_allocate_ip is true.
- Field deprecated in 17.1.1.
tenant_ref:
description:
- It is a reference to an object of type tenant.
traffic_clone_profile_ref:
description:
- Server network or list of servers for cloning traffic.
- It is a reference to an object of type trafficcloneprofile.
- Field introduced in 17.1.1.
version_added: "2.4"
traffic_enabled:
description:
- Knob to enable the virtual service traffic on its assigned service engines.
- This setting is effective only when the enabled flag is set to true.
- Field introduced in 17.2.8.
- Default value when not specified in API or module is interpreted by Avi Controller as True.
version_added: "2.6"
type: bool
type:
description:
- Specify if this is a normal virtual service, or if it is the parent or child of an sni-enabled virtual hosted virtual service.
- Enum options - VS_TYPE_NORMAL, VS_TYPE_VH_PARENT, VS_TYPE_VH_CHILD.
- Default value when not specified in API or module is interpreted by Avi Controller as VS_TYPE_NORMAL.
url:
description:
- Avi controller URL of the object.
use_bridge_ip_as_vip:
description:
- Use bridge ip as vip on each host in mesos deployments.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
type: bool
use_vip_as_snat:
description:
- Use the virtual ip as the snat ip for health monitoring and sending traffic to the backend servers instead of the service engine interface ip.
- The caveat of enabling this option is that the virtualservice cannot be configued in an active-active ha mode.
- Dns based multi vip solution has to be used for ha & non-disruptive upgrade purposes.
- Field introduced in 17.1.9,17.2.3.
- Default value when not specified in API or module is interpreted by Avi Controller as False.
version_added: "2.5"
type: bool
uuid:
description:
- Uuid of the virtualservice.
vh_domain_name:
description:
- The exact name requested from the client's sni-enabled tls hello domain name field.
- If this is a match, the parent vs will forward the connection to this child vs.
vh_parent_vs_uuid:
description:
- Specifies the virtual service acting as virtual hosting (sni) parent.
vip:
description:
- List of virtual service ips.
- While creating a 'shared vs',please use vsvip_ref to point to the shared entities.
- Field introduced in 17.1.1.
version_added: "2.4"
vrf_context_ref:
description:
- Virtual routing context that the virtual service is bound to.
- This is used to provide the isolation of the set of networks the application is attached to.
- It is a reference to an object of type vrfcontext.
vs_datascripts:
description:
- Datascripts applied on the data traffic of the virtual service.
vsvip_ref:
description:
- Mostly used during the creation of shared vs, this field refers to entities that can be shared across virtual services.
- It is a reference to an object of type vsvip.
- Field introduced in 17.1.1.
version_added: "2.4"
waf_policy_ref:
description:
- Waf policy for the virtual service.
- It is a reference to an object of type wafpolicy.
- Field introduced in 17.2.1.
version_added: "2.5"
weight:
description:
- The quality of service weight to assign to traffic transmitted from this virtual service.
- A higher weight will prioritize traffic versus other virtual services sharing the same service engines.
- Allowed values are 1-128.
- Default value when not specified in API or module is interpreted by Avi Controller as 1.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Create SSL Virtual Service using Pool testpool2
avi_virtualservice:
controller: 10.10.27.90
username: admin
password: AviNetworks123!
name: newtestvs
state: present
performance_limits:
max_concurrent_connections: 1000
services:
- port: 443
enable_ssl: true
- port: 80
ssl_profile_ref: '/api/sslprofile?name=System-Standard'
application_profile_ref: '/api/applicationprofile?name=System-Secure-HTTP'
ssl_key_and_certificate_refs:
- '/api/sslkeyandcertificate?name=System-Default-Cert'
ip_address:
addr: 10.90.131.103
type: V4
pool_ref: '/api/pool?name=testpool2'
"""
RETURN = '''
obj:
description: VirtualService (api/virtualservice) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
active_standby_se_tag=dict(type='str',),
analytics_policy=dict(type='dict',),
analytics_profile_ref=dict(type='str',),
application_profile_ref=dict(type='str',),
auto_allocate_floating_ip=dict(type='bool',),
auto_allocate_ip=dict(type='bool',),
availability_zone=dict(type='str',),
avi_allocated_fip=dict(type='bool',),
avi_allocated_vip=dict(type='bool',),
bulk_sync_kvcache=dict(type='bool',),
client_auth=dict(type='dict',),
close_client_conn_on_config_update=dict(type='bool',),
cloud_config_cksum=dict(type='str',),
cloud_ref=dict(type='str',),
cloud_type=dict(type='str',),
connections_rate_limit=dict(type='dict',),
content_rewrite=dict(type='dict',),
created_by=dict(type='str',),
delay_fairness=dict(type='bool',),
description=dict(type='str',),
discovered_network_ref=dict(type='list',),
discovered_networks=dict(type='list',),
discovered_subnet=dict(type='list',),
dns_info=dict(type='list',),
dns_policies=dict(type='list',),
east_west_placement=dict(type='bool',),
enable_autogw=dict(type='bool',),
enable_rhi=dict(type='bool',),
enable_rhi_snat=dict(type='bool',),
enabled=dict(type='bool',),
error_page_profile_ref=dict(type='str',),
floating_ip=dict(type='dict',),
floating_subnet_uuid=dict(type='str',),
flow_dist=dict(type='str',),
flow_label_type=dict(type='str',),
fqdn=dict(type='str',),
host_name_xlate=dict(type='str',),
http_policies=dict(type='list',),
ign_pool_net_reach=dict(type='bool',),
ip_address=dict(type='dict',),
ipam_network_subnet=dict(type='dict',),
l4_policies=dict(type='list',),
limit_doser=dict(type='bool',),
max_cps_per_client=dict(type='int',),
microservice_ref=dict(type='str',),
name=dict(type='str', required=True),
network_profile_ref=dict(type='str',),
network_ref=dict(type='str',),
network_security_policy_ref=dict(type='str',),
nsx_securitygroup=dict(type='list',),
performance_limits=dict(type='dict',),
pool_group_ref=dict(type='str',),
pool_ref=dict(type='str',),
port_uuid=dict(type='str',),
remove_listening_port_on_vs_down=dict(type='bool',),
requests_rate_limit=dict(type='dict',),
scaleout_ecmp=dict(type='bool',),
se_group_ref=dict(type='str',),
server_network_profile_ref=dict(type='str',),
service_metadata=dict(type='str',),
service_pool_select=dict(type='list',),
services=dict(type='list',),
sideband_profile=dict(type='dict',),
snat_ip=dict(type='list',),
sp_pool_refs=dict(type='list',),
ssl_key_and_certificate_refs=dict(type='list',),
ssl_profile_ref=dict(type='str',),
ssl_sess_cache_avg_size=dict(type='int',),
static_dns_records=dict(type='list',),
subnet=dict(type='dict',),
subnet_uuid=dict(type='str',),
tenant_ref=dict(type='str',),
traffic_clone_profile_ref=dict(type='str',),
traffic_enabled=dict(type='bool',),
type=dict(type='str',),
url=dict(type='str',),
use_bridge_ip_as_vip=dict(type='bool',),
use_vip_as_snat=dict(type='bool',),
uuid=dict(type='str',),
vh_domain_name=dict(type='list',),
vh_parent_vs_uuid=dict(type='str',),
vip=dict(type='list',),
vrf_context_ref=dict(type='str',),
vs_datascripts=dict(type='list',),
vsvip_ref=dict(type='str',),
waf_policy_ref=dict(type='str',),
weight=dict(type='int',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'virtualservice',
set([]))
if __name__ == '__main__':
main()
|
rouge8/regex2dfa | refs/heads/master | regex2dfa_build.py | 3 | from os.path import abspath, dirname, join
from cffi import FFI
regex2dfa_header = abspath(join(dirname(__file__), 'src', 'regex2dfa.h'))
ffi = FFI()
ffi.cdef(
"""
const char * cffi_regex2dfa(char *regex, uint32_t len);
void free(void *ptr);
"""
)
ffi.set_source(
'_regex2dfa',
"""
#include "%s"
extern "C" {
extern const char * cffi_regex2dfa(char *regex, uint32_t len) {
const std::string input_regex = std::string(regex, len);
std::string minimized_dfa;
regex2dfa::Regex2Dfa(input_regex, &minimized_dfa);
return strdup(minimized_dfa.c_str());
}
}
""" % regex2dfa_header,
source_extension='.cpp',
library_dirs=['.libs'],
libraries=['regex2dfa'],
extra_compile_args=[
'-O3',
'-fstack-protector-all',
'-D_FORTIFY_SOURCE',
'-fPIC',
],
)
if __name__ == '__main__':
ffi.compile()
|
deathslocus/jor1k-sysroot | refs/heads/master | fs/usr/lib/python2.7/UserDict.py | 358 | """A more or less complete user-defined wrapper around dictionary objects."""
class UserDict:
def __init__(self, dict=None, **kwargs):
self.data = {}
if dict is not None:
self.update(dict)
if len(kwargs):
self.update(kwargs)
def __repr__(self): return repr(self.data)
def __cmp__(self, dict):
if isinstance(dict, UserDict):
return cmp(self.data, dict.data)
else:
return cmp(self.data, dict)
__hash__ = None # Avoid Py3k warning
def __len__(self): return len(self.data)
def __getitem__(self, key):
if key in self.data:
return self.data[key]
if hasattr(self.__class__, "__missing__"):
return self.__class__.__missing__(self, key)
raise KeyError(key)
def __setitem__(self, key, item): self.data[key] = item
def __delitem__(self, key): del self.data[key]
def clear(self): self.data.clear()
def copy(self):
if self.__class__ is UserDict:
return UserDict(self.data.copy())
import copy
data = self.data
try:
self.data = {}
c = copy.copy(self)
finally:
self.data = data
c.update(self)
return c
def keys(self): return self.data.keys()
def items(self): return self.data.items()
def iteritems(self): return self.data.iteritems()
def iterkeys(self): return self.data.iterkeys()
def itervalues(self): return self.data.itervalues()
def values(self): return self.data.values()
def has_key(self, key): return key in self.data
def update(self, dict=None, **kwargs):
if dict is None:
pass
elif isinstance(dict, UserDict):
self.data.update(dict.data)
elif isinstance(dict, type({})) or not hasattr(dict, 'items'):
self.data.update(dict)
else:
for k, v in dict.items():
self[k] = v
if len(kwargs):
self.data.update(kwargs)
def get(self, key, failobj=None):
if key not in self:
return failobj
return self[key]
def setdefault(self, key, failobj=None):
if key not in self:
self[key] = failobj
return self[key]
def pop(self, key, *args):
return self.data.pop(key, *args)
def popitem(self):
return self.data.popitem()
def __contains__(self, key):
return key in self.data
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
class IterableUserDict(UserDict):
def __iter__(self):
return iter(self.data)
import _abcoll
_abcoll.MutableMapping.register(IterableUserDict)
class DictMixin:
# Mixin defining all dictionary methods for classes that already have
# a minimum dictionary interface including getitem, setitem, delitem,
# and keys. Without knowledge of the subclass constructor, the mixin
# does not define __init__() or copy(). In addition to the four base
# methods, progressively more efficiency comes with defining
# __contains__(), __iter__(), and iteritems().
# second level definitions support higher levels
def __iter__(self):
for k in self.keys():
yield k
def has_key(self, key):
try:
self[key]
except KeyError:
return False
return True
def __contains__(self, key):
return self.has_key(key)
# third level takes advantage of second level definitions
def iteritems(self):
for k in self:
yield (k, self[k])
def iterkeys(self):
return self.__iter__()
# fourth level uses definitions from lower levels
def itervalues(self):
for _, v in self.iteritems():
yield v
def values(self):
return [v for _, v in self.iteritems()]
def items(self):
return list(self.iteritems())
def clear(self):
for key in self.keys():
del self[key]
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
def pop(self, key, *args):
if len(args) > 1:
raise TypeError, "pop expected at most 2 arguments, got "\
+ repr(1 + len(args))
try:
value = self[key]
except KeyError:
if args:
return args[0]
raise
del self[key]
return value
def popitem(self):
try:
k, v = self.iteritems().next()
except StopIteration:
raise KeyError, 'container is empty'
del self[k]
return (k, v)
def update(self, other=None, **kwargs):
# Make progressively weaker assumptions about "other"
if other is None:
pass
elif hasattr(other, 'iteritems'): # iteritems saves memory and lookups
for k, v in other.iteritems():
self[k] = v
elif hasattr(other, 'keys'):
for k in other.keys():
self[k] = other[k]
else:
for k, v in other:
self[k] = v
if kwargs:
self.update(kwargs)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __repr__(self):
return repr(dict(self.iteritems()))
def __cmp__(self, other):
if other is None:
return 1
if isinstance(other, DictMixin):
other = dict(other.iteritems())
return cmp(dict(self.iteritems()), other)
def __len__(self):
return len(self.keys())
|
pqftgs/bge-netplay | refs/heads/master | netplay/builtin_tables.py | 1 | from . import packer
def define():
tabledef = packer.TableDef('_permission')
tabledef.define('uint16', 'id')
tabledef.define('uint8', 'state')
tabledef = packer.TableDef('_destroy')
tabledef.define('uint16', 'id')
tabledef = packer.TableDef('_GameObject')
tabledef.define('uint16', 'id')
tabledef.define('float', 'pos_x')
tabledef.define('float', 'pos_y')
tabledef.define('float', 'pos_z')
tabledef.define('float', 'rot_x')
tabledef.define('float', 'rot_y')
tabledef.define('float', 'rot_z')
tabledef.define('float', 'rot_w')
tabledef = packer.TableDef('_RigidGameObject', template=tabledef)
tabledef.define('float', 'lv_x')
tabledef.define('float', 'lv_y')
tabledef.define('float', 'lv_z')
tabledef.define('float', 'av_x')
tabledef.define('float', 'av_y')
tabledef.define('float', 'av_z') |
unnikrishnankgs/va | refs/heads/master | venv/lib/python3.5/site-packages/IPython/utils/ulinecache.py | 5 | """
This module has been deprecated since IPython 6.0.
Wrapper around linecache which decodes files to unicode according to PEP 263.
"""
import functools
import linecache
from warnings import warn
getline = linecache.getline
# getlines has to be looked up at runtime, because doctests monkeypatch it.
@functools.wraps(linecache.getlines)
def getlines(filename, module_globals=None):
"""
Deprecated since IPython 6.0
"""
warn(("`IPython.utils.ulinecache.getlines` is deprecated since"
" IPython 6.0 and will be removed in future versions."),
DeprecationWarning, stacklevel=2)
return linecache.getlines(filename, module_globals=module_globals)
|
Alberto-Beralix/Beralix | refs/heads/master | i386-squashfs-root/usr/lib/python2.7/dist-packages/aptdaemon/client.py | 1 | ../../../../share/pyshared/aptdaemon/client.py |
jburger424/MediaQueueHCI | refs/heads/dev | m-q-env/lib/python3.4/site-packages/setuptools/command/build_py.py | 207 | import os
import sys
import fnmatch
import textwrap
from distutils.command.build_py import build_py as _build_py
from distutils.util import convert_path
from glob import glob
try:
from setuptools.lib2to3_ex import Mixin2to3
except ImportError:
class Mixin2to3:
def run_2to3(self, files, doctests=True):
"do nothing"
class build_py(_build_py, Mixin2to3):
"""Enhanced 'build_py' command that includes data files with packages
The data files are specified via a 'package_data' argument to 'setup()'.
See 'setuptools.dist.Distribution' for more details.
Also, this version of the 'build_py' command allows you to specify both
'py_modules' and 'packages' in the same setup operation.
"""
def finalize_options(self):
_build_py.finalize_options(self)
self.package_data = self.distribution.package_data
self.exclude_package_data = self.distribution.exclude_package_data or {}
if 'data_files' in self.__dict__: del self.__dict__['data_files']
self.__updated_files = []
self.__doctests_2to3 = []
def run(self):
"""Build modules, packages, and copy data files to build directory"""
if not self.py_modules and not self.packages:
return
if self.py_modules:
self.build_modules()
if self.packages:
self.build_packages()
self.build_package_data()
self.run_2to3(self.__updated_files, False)
self.run_2to3(self.__updated_files, True)
self.run_2to3(self.__doctests_2to3, True)
# Only compile actual .py files, using our base class' idea of what our
# output files are.
self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
def __getattr__(self, attr):
if attr=='data_files': # lazily compute data files
self.data_files = files = self._get_data_files()
return files
return _build_py.__getattr__(self,attr)
def build_module(self, module, module_file, package):
outfile, copied = _build_py.build_module(self, module, module_file, package)
if copied:
self.__updated_files.append(outfile)
return outfile, copied
def _get_data_files(self):
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
self.analyze_manifest()
data = []
for package in self.packages or ():
# Locate package source directory
src_dir = self.get_package_dir(package)
# Compute package build directory
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
# Length of path to strip from found files
plen = len(src_dir)+1
# Strip directory from globbed filenames
filenames = [
file[plen:] for file in self.find_data_files(package, src_dir)
]
data.append((package, src_dir, build_dir, filenames))
return data
def find_data_files(self, package, src_dir):
"""Return filenames for package's data files in 'src_dir'"""
globs = (self.package_data.get('', [])
+ self.package_data.get(package, []))
files = self.manifest_files.get(package, [])[:]
for pattern in globs:
# Each pattern has to be converted to a platform-specific path
files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
return self.exclude_data_files(package, src_dir, files)
def build_package_data(self):
"""Copy data files into build directory"""
for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
self.mkpath(os.path.dirname(target))
srcfile = os.path.join(src_dir, filename)
outf, copied = self.copy_file(srcfile, target)
srcfile = os.path.abspath(srcfile)
if copied and srcfile in self.distribution.convert_2to3_doctests:
self.__doctests_2to3.append(outf)
def analyze_manifest(self):
self.manifest_files = mf = {}
if not self.distribution.include_package_data:
return
src_dirs = {}
for package in self.packages or ():
# Locate package source directory
src_dirs[assert_relative(self.get_package_dir(package))] = package
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
for path in ei_cmd.filelist.files:
d,f = os.path.split(assert_relative(path))
prev = None
oldf = f
while d and d!=prev and d not in src_dirs:
prev = d
d, df = os.path.split(d)
f = os.path.join(df, f)
if d in src_dirs:
if path.endswith('.py') and f==oldf:
continue # it's a module, not data
mf.setdefault(src_dirs[d],[]).append(path)
def get_data_files(self): pass # kludge 2.4 for lazy computation
if sys.version<"2.4": # Python 2.4 already has this code
def get_outputs(self, include_bytecode=1):
"""Return complete list of files copied to the build directory
This includes both '.py' files and data files, as well as '.pyc'
and '.pyo' files if 'include_bytecode' is true. (This method is
needed for the 'install_lib' command to do its job properly, and to
generate a correct installation manifest.)
"""
return _build_py.get_outputs(self, include_bytecode) + [
os.path.join(build_dir, filename)
for package, src_dir, build_dir,filenames in self.data_files
for filename in filenames
]
def check_package(self, package, package_dir):
"""Check namespace packages' __init__ for declare_namespace"""
try:
return self.packages_checked[package]
except KeyError:
pass
init_py = _build_py.check_package(self, package, package_dir)
self.packages_checked[package] = init_py
if not init_py or not self.distribution.namespace_packages:
return init_py
for pkg in self.distribution.namespace_packages:
if pkg==package or pkg.startswith(package+'.'):
break
else:
return init_py
f = open(init_py,'rbU')
if 'declare_namespace'.encode() not in f.read():
from distutils import log
log.warn(
"WARNING: %s is a namespace package, but its __init__.py does\n"
"not declare_namespace(); setuptools 0.7 will REQUIRE this!\n"
'(See the setuptools manual under "Namespace Packages" for '
"details.)\n", package
)
f.close()
return init_py
def initialize_options(self):
self.packages_checked={}
_build_py.initialize_options(self)
def get_package_dir(self, package):
res = _build_py.get_package_dir(self, package)
if self.distribution.src_root is not None:
return os.path.join(self.distribution.src_root, res)
return res
def exclude_data_files(self, package, src_dir, files):
"""Filter filenames for package's data files in 'src_dir'"""
globs = (self.exclude_package_data.get('', [])
+ self.exclude_package_data.get(package, []))
bad = []
for pattern in globs:
bad.extend(
fnmatch.filter(
files, os.path.join(src_dir, convert_path(pattern))
)
)
bad = dict.fromkeys(bad)
seen = {}
return [
f for f in files if f not in bad
and f not in seen and seen.setdefault(f,1) # ditch dupes
]
def assert_relative(path):
if not os.path.isabs(path):
return path
from distutils.errors import DistutilsSetupError
msg = textwrap.dedent("""
Error: setup script specifies an absolute path:
%s
setup() arguments must *always* be /-separated paths relative to the
setup.py directory, *never* absolute paths.
""").lstrip() % path
raise DistutilsSetupError(msg)
|
denzow/ipymessenger | refs/heads/master | ipymessenger/consts.py | 1 | # -*- coding: utf-8 -*-
# coding:utf-8
# status
STAT_TABLE = range(4)
(STAT_ON, STAT_AFK, STAT_INVISIBLE, STAT_OFF) = STAT_TABLE
STAT_NAME = { STAT_ON : 'online', \
STAT_AFK : 'afk', \
STAT_INVISIBLE : 'invisible', \
STAT_OFF : 'offline', }
# misc
TCP_DATA_LEN = 0x5A8
TCP_RECV_PORT = 0x9D6D
UDP_DATA_LEN = 0x80000
DECRYPT_ERRMSG = 'Oops... Something went wrong when decrypting your message. Please refresh to require the latest public key.'
PROTOCOL_VERSION = 'Python版 Ver2.09'
# protocol (same with original ipmsg)
FILELIST_SEPARATOR = '\x07'
HOSTLIST_SEPARATOR = ''
HOSTLIST_DUMMY = ""
# header
class command_const(object):
IPMSG_VERSION = 0x0001
IPMSG_DEFAULT_PORT = 0x0979
# command
IPMSG_NOOPERATION = 0x00000000
IPMSG_BR_ENTRY = 0x00000001
IPMSG_BR_EXIT = 0x00000002
IPMSG_ANSENTRY = 0x00000003
IPMSG_BR_ABSENCE = 0x00000004
IPMSG_BR_ISGETLIST = 0x00000010
IPMSG_OKGETLIST = 0x00000011
IPMSG_GETLIST = 0x00000012
IPMSG_ANSLIST = 0x00000013
IPMSG_BR_ISGETLIST2 = 0x00000018
IPMSG_SENDMSG = 0x00000020
IPMSG_RECVMSG = 0x00000021
IPMSG_READMSG = 0x00000030
IPMSG_DELMSG = 0x00000031
IPMSG_ANSREADMSG = 0x00000032
IPMSG_GETINFO = 0x00000040
IPMSG_SENDINFO = 0x00000041
IPMSG_GETABSENCEINFO = 0x00000050
IPMSG_SENDABSENCEINFO = 0x00000051
IPMSG_GETFILEDATA = 0x00000060
IPMSG_RELEASEFILES = 0x00000061
IPMSG_GETDIRFILES = 0x00000062
IPMSG_GETPUBKEY = 0x00000072
IPMSG_ANSPUBKEY = 0x00000073
# option for all command
IPMSG_ABSENCEOPT = 0x00000100
IPMSG_SERVEROPT = 0x00000200
IPMSG_DIALUPOPT = 0x00010000
IPMSG_FILEATTACHOPT = 0x00200000
IPMSG_ENCRYPTOPT = 0x00400000
IPMSG_UTF8OPT = 0x00800000
# option for send command
IPMSG_SENDCHECKOPT = 0x00000100
IPMSG_SECRETOPT = 0x00000200
IPMSG_BROADCASTOPT = 0x00000400
IPMSG_MULTICASTOPT = 0x00000800
IPMSG_NOPOPUPOPT = 0x00001000
IPMSG_AUTORETOPT = 0x00002000
IPMSG_RETRYOPT = 0x00004000
IPMSG_PASSWORDOPT = 0x00008000
IPMSG_NOLOGOPT = 0x00020000
IPMSG_NEWMUTIOPT = 0x00040000
IPMSG_NOADDLISTOPT = 0x00080000
IPMSG_READCHECKOPT = 0x00100000
IPMSG_SECRETEXOPT = (IPMSG_READCHECKOPT|IPMSG_SECRETOPT)
# encryption flags for encrypt command
IPMSG_RSA_512 = 0x00000001
IPMSG_RSA_1024 = 0x00000002
IPMSG_RSA_2048 = 0x00000004
IPMSG_RC2_40 = 0x00001000
IPMSG_RC2_128 = 0x00004000
IPMSG_RC2_256 = 0x00008000
IPMSG_BLOWFISH_128 = 0x00020000
IPMSG_BLOWFISH_256 = 0x00040000
IPMSG_AES_128 = 0x00080000
IPMSG_SIGN_MD5 = 0x10000000
IPMSG_SIGN_SHA1 = 0x20000000
# compatibilty for Win beta version
IPMSG_RC2_40OLD = 0x00000010 # for beta1-4 only
IPMSG_RC2_128OLD = 0x00000040 # for beta1-4 only
IPMSG_BLOWFISH_128OLD = 0x00000400 # for beta1-4 only
IPMSG_RC2_40ALL = (IPMSG_RC2_40|IPMSG_RC2_40OLD)
IPMSG_RC2_128ALL = (IPMSG_RC2_128|IPMSG_RC2_128OLD)
IPMSG_BLOWFISH_128ALL = (IPMSG_BLOWFISH_128|IPMSG_BLOWFISH_128OLD)
# file types for fileattach command
IPMSG_FILE_REGULAR = 0x00000001
IPMSG_FILE_DIR = 0x00000002
IPMSG_FILE_RETPARENT = 0x00000003 # return parent directory
IPMSG_FILE_SYMLINK = 0x00000004
IPMSG_FILE_CDEV = 0x00000005 # for UNIX
IPMSG_FILE_BDEV = 0x00000006 # for UNIX
IPMSG_FILE_FIFO = 0x00000007 # for UNIX
IPMSG_FILE_RESFORK = 0x00000010 # for Mac
# file attribute options for fileattach command
IPMSG_FILE_RONLYOPT = 0x00000100
IPMSG_FILE_HIDDENOPT = 0x00001000
IPMSG_FILE_EXHIDDENOPT = 0x00002000 # for MacOS X
IPMSG_FILE_ARCHIVEOPT = 0x00004000
IPMSG_FILE_SYSTEMOPT = 0x00008000
# extend attribute types for fileattach command
IPMSG_FILE_UID = 0x00000001
IPMSG_FILE_USERNAME = 0x00000002 # uid by string
IPMSG_FILE_GID = 0x00000003
IPMSG_FILE_GROUPNAME = 0x00000004 # gid by string
IPMSG_FILE_PERM = 0x00000010 # for UNIX
IPMSG_FILE_MAJORNO = 0x00000011 # for UNIX devfile
IPMSG_FILE_MINORNO = 0x00000012 # for UNIX devfile
IPMSG_FILE_CTIME = 0x00000013 # for UNIX
IPMSG_FILE_MTIME = 0x00000014
IPMSG_FILE_ATIME = 0x00000015
IPMSG_FILE_CREATETIME = 0x00000016
IPMSG_FILE_CREATOR = 0x00000020 # for Mac
IPMSG_FILE_FILETYPE = 0x00000021 # for Mac
IPMSG_FILE_FINDERINFO = 0x00000022 # for Mac
IPMSG_FILE_ACL = 0x00000030
IPMSG_FILE_ALIASFNAME = 0x00000040 # alias fname
IPMSG_FILE_UNICODEFNAME = 0x00000041 # UNICODE fname
|
fergalmoran/Chrome2Kindle | refs/heads/master | server/model/Hits.py | 1 | from google.appengine.ext import db
class SiteStats(db.Model):
source = db.StringProperty(required = True)
action = db.StringProperty(required = True)
headers = db.StringProperty(required = True)
timestamp = db.DateTimeProperty(auto_now_add = True)
|
dzbarsky/servo | refs/heads/master | tests/wpt/web-platform-tests/webdriver/navigation/get_from_http_test.py | 142 | import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(__file__, "../..")))
import base_test
class GetFromHttpTest(base_test.WebDriverBaseTest):
def testGetUrlWithNoRedirectionOverHttp(self):
page = self.webserver.where_is('navigation/res/empty.html')
self.driver.get(page)
url = self.driver.current_url
self.assertEquals(page, url)
def testGetWillFollowTheLocationHeader(self):
page = self.webserver.where_is('navigation/redirect')
self.driver.get(page)
expected = self.webserver.where_is('navigation/res/empty.html')
url = self.driver.current_url
self.assertEquals(expected, url)
def testGetWillFollowMetaRefreshThatRefreshesInstantly(self):
page = self.webserver.where_is('navigation/res/instant-meta-redirect.html')
self.driver.get(page)
expected = self.webserver.where_is('navigation/res/empty.html')
url = self.driver.current_url
self.assertEquals(expected, url)
def testGetWillFollowMetaRefreshThatRefreshesAfterOneSecond(self):
page = self.webserver.where_is('navigation/res/1s-meta-redirect.html')
self.driver.get(page)
expected = self.webserver.where_is('navigation/res/empty.html')
url = self.driver.current_url
self.assertEquals(expected, url)
def testGetWillNotFollowMetaRefreshThatRefreshesAfterMoreThanOneSecond(self):
page = self.webserver.where_is('navigation/res/60s-meta-redirect.html')
self.driver.get(page)
url = self.driver.current_url
self.assertEquals(page, url)
def testGetFragmentInCurrentDocumentDoesNotReloadPage(self):
page = self.webserver.where_is("navigation/res/fragment.html")
fragment_page = "%s#%s" % (page, "fragment")
self.driver.get(page)
self.driver.execute_script("state = true")
self.driver.get(fragment_page)
self.assertEquals(True, self.driver.execute_script("return state"))
if __name__ == '__main__':
unittest.main()
|
cs591B1-Project/Social-Media-Impact-on-Stock-Market-and-Price | refs/heads/master | data/22 starbucks/sbGrapher.py | 14 | from ast import literal_eval
import matplotlib.pyplot as plt
p = [line.rstrip('\n') for line in open("positive.txt")]
n = [line.rstrip('\n') for line in open("negative.txt")]
a = [line.rstrip('\n') for line in open("all.txt")]
# stock closing prices from Nov 3 to Dec 3 (weedends/holidays given same price as previous closing)
s = [ 782.52002,
782.52002,
782.52002,
790.51001,
785.309998,
762.559998,
754.02002,
754.02002,
754.02002,
736.080017,
758.48999,
764.47998,
771.22998,
760.539978,
760.539978,
760.539978,
769.200012,
768.27002,
760.98999,
760.98999,
761.679993,
761.679993,
761.679993,
768.23999,
770.840027,
758.039978,
747.919983,
750.5,
750.5,
750.5]
# create numeric list from p
plist = []
for line in p:
plist.append(literal_eval(line))
# create numeric list from p
nlist = []
for line in n:
nlist.append(literal_eval(line))
# create numeric list from p
alist = []
for line in a:
alist.append(literal_eval(line))
# create plot
plt.plot(s, label='Closing stock prices', color='blue')
plt.plot(p, label='# positive articles', color='orange')
plt.plot(n, label='# negative articles', color='green')
plt.plot(a, label='Total articles', color='red')
plt.legend()
plt.show()
# close-up plot
plt.plot(s, label='Closing stock prices', color='blue')
plt.plot(p, label='# positive articles', color='orange')
plt.legend()
plt.show()
#eof |
pbrunet/pythran | refs/heads/master | pythran/analyses/is_assigned.py | 4 | """ Gathers variables that have value modification in the given node. """
from pythran.passmanager import NodeAnalysis
from collections import defaultdict
import ast
class IsAssigned(NodeAnalysis):
"""
Gather variable that change in given node.
It doesn't check constness as it is use for integer so we don't care about
arguments effects as it is use by value.
"""
def __init__(self):
""" Basic initialiser. """
self.result = defaultdict(bool)
super(IsAssigned, self).__init__()
def visit_Name(self, node):
""" Stored variable have new value. """
if isinstance(node.ctx, ast.Store):
self.result[node.id] = True
|
Matt-Stammers/Python-Foundations | refs/heads/master | Simple Functions/Number_List_Power.py | 1 |
def powers_of_two(n):
result_list = []
i = 0
for i in range(n+1):
result = 2**i
result_list.append(result)
i+=1
return result_list
# or a much nicer way to do it:
def powers_of_two(n):
return [2**x for x in range(n+1)]
# powers of two:
def powers_of_two(n):
a = []
for i in range(0, n + 1):
a.append(2 ** i)
return a
|
fae92/TSML | refs/heads/master | __init__.py | 3 | __author__ = 'tianqilei'
|
pignacio/pignacio_scripts | refs/heads/develop | pignacio_scripts/pylint_extra/transforms/__init__.py | 12133432 | |
gkawamoto/yowsup | refs/heads/master | yowsup/layers/protocol_ib/protocolentities/clean_iq.py | 70 | from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from yowsup.layers.protocol_iq.protocolentities import IqProtocolEntity
class CleanIqProtocolEntity(IqProtocolEntity):
'''
<iq id="" type="set" to="self.domain" xmlns="urn:xmpp:whatsapp:dirty">
<clean type="{{dirty_type}}"></clean>
</iq>
'''
def __init__(self, cleanType, to, _id = None):
super(CleanIqProtocolEntity, self).__init__(
"urn:xmpp:whatsapp:dirty",
_id = _id,
_type = "set",
to = to
)
self.setProps(cleanType)
def setProps(self, cleanType):
self.cleanType = cleanType
def __str__(self):
out = super(CleanIqProtocolEntity, self).__str__()
out += "Clean Type: %s\n" % self.cleanType
return out
def toProtocolTreeNode(self):
node = super(CleanIqProtocolEntity, self).toProtocolTreeNode()
cleanNode = ProtocolTreeNode("clean", {"type": self.cleanType})
node.addChild(cleanNode)
return node
@staticmethod
def fromProtocolTreeNode(node):
entity = IqProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = CleanIqProtocolEntity
entity.setProps(node.getChild("clean").getAttributeValue("type"))
return entity |
Kamik423/uni_plan | refs/heads/master | plan/plan/lib/python3.4/site-packages/pip/_vendor/cachecontrol/filewrapper.py | 346 | from io import BytesIO
class CallbackFileWrapper(object):
"""
Small wrapper around a fp object which will tee everything read into a
buffer, and when that file is closed it will execute a callback with the
contents of that buffer.
All attributes are proxied to the underlying file object.
This class uses members with a double underscore (__) leading prefix so as
not to accidentally shadow an attribute.
"""
def __init__(self, fp, callback):
self.__buf = BytesIO()
self.__fp = fp
self.__callback = callback
def __getattr__(self, name):
# The vaguaries of garbage collection means that self.__fp is
# not always set. By using __getattribute__ and the private
# name[0] allows looking up the attribute value and raising an
# AttributeError when it doesn't exist. This stop thigns from
# infinitely recursing calls to getattr in the case where
# self.__fp hasn't been set.
#
# [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
fp = self.__getattribute__('_CallbackFileWrapper__fp')
return getattr(fp, name)
def __is_fp_closed(self):
try:
return self.__fp.fp is None
except AttributeError:
pass
try:
return self.__fp.closed
except AttributeError:
pass
# We just don't cache it then.
# TODO: Add some logging here...
return False
def _close(self):
if self.__callback:
self.__callback(self.__buf.getvalue())
# We assign this to None here, because otherwise we can get into
# really tricky problems where the CPython interpreter dead locks
# because the callback is holding a reference to something which
# has a __del__ method. Setting this to None breaks the cycle
# and allows the garbage collector to do it's thing normally.
self.__callback = None
def read(self, amt=None):
data = self.__fp.read(amt)
self.__buf.write(data)
if self.__is_fp_closed():
self._close()
return data
def _safe_read(self, amt):
data = self.__fp._safe_read(amt)
if amt == 2 and data == b'\r\n':
# urllib executes this read to toss the CRLF at the end
# of the chunk.
return data
self.__buf.write(data)
if self.__is_fp_closed():
self._close()
return data
|
txm/make-good | refs/heads/master | django/contrib/localflavor/it/it_province.py | 406 | # -*- coding: utf-8 -*
PROVINCE_CHOICES = (
('AG', 'Agrigento'),
('AL', 'Alessandria'),
('AN', 'Ancona'),
('AO', 'Aosta'),
('AR', 'Arezzo'),
('AP', 'Ascoli Piceno'),
('AT', 'Asti'),
('AV', 'Avellino'),
('BA', 'Bari'),
('BT', 'Barletta-Andria-Trani'), # active starting from 2009
('BL', 'Belluno'),
('BN', 'Benevento'),
('BG', 'Bergamo'),
('BI', 'Biella'),
('BO', 'Bologna'),
('BZ', 'Bolzano/Bozen'),
('BS', 'Brescia'),
('BR', 'Brindisi'),
('CA', 'Cagliari'),
('CL', 'Caltanissetta'),
('CB', 'Campobasso'),
('CI', 'Carbonia-Iglesias'),
('CE', 'Caserta'),
('CT', 'Catania'),
('CZ', 'Catanzaro'),
('CH', 'Chieti'),
('CO', 'Como'),
('CS', 'Cosenza'),
('CR', 'Cremona'),
('KR', 'Crotone'),
('CN', 'Cuneo'),
('EN', 'Enna'),
('FM', 'Fermo'), # active starting from 2009
('FE', 'Ferrara'),
('FI', 'Firenze'),
('FG', 'Foggia'),
('FC', 'Forlì-Cesena'),
('FR', 'Frosinone'),
('GE', 'Genova'),
('GO', 'Gorizia'),
('GR', 'Grosseto'),
('IM', 'Imperia'),
('IS', 'Isernia'),
('SP', 'La Spezia'),
('AQ', u'L’Aquila'),
('LT', 'Latina'),
('LE', 'Lecce'),
('LC', 'Lecco'),
('LI', 'Livorno'),
('LO', 'Lodi'),
('LU', 'Lucca'),
('MC', 'Macerata'),
('MN', 'Mantova'),
('MS', 'Massa-Carrara'),
('MT', 'Matera'),
('VS', 'Medio Campidano'),
('ME', 'Messina'),
('MI', 'Milano'),
('MO', 'Modena'),
('MB', 'Monza e Brianza'), # active starting from 2009
('NA', 'Napoli'),
('NO', 'Novara'),
('NU', 'Nuoro'),
('OG', 'Ogliastra'),
('OT', 'Olbia-Tempio'),
('OR', 'Oristano'),
('PD', 'Padova'),
('PA', 'Palermo'),
('PR', 'Parma'),
('PV', 'Pavia'),
('PG', 'Perugia'),
('PU', 'Pesaro e Urbino'),
('PE', 'Pescara'),
('PC', 'Piacenza'),
('PI', 'Pisa'),
('PT', 'Pistoia'),
('PN', 'Pordenone'),
('PZ', 'Potenza'),
('PO', 'Prato'),
('RG', 'Ragusa'),
('RA', 'Ravenna'),
('RC', 'Reggio Calabria'),
('RE', 'Reggio Emilia'),
('RI', 'Rieti'),
('RN', 'Rimini'),
('RM', 'Roma'),
('RO', 'Rovigo'),
('SA', 'Salerno'),
('SS', 'Sassari'),
('SV', 'Savona'),
('SI', 'Siena'),
('SR', 'Siracusa'),
('SO', 'Sondrio'),
('TA', 'Taranto'),
('TE', 'Teramo'),
('TR', 'Terni'),
('TO', 'Torino'),
('TP', 'Trapani'),
('TN', 'Trento'),
('TV', 'Treviso'),
('TS', 'Trieste'),
('UD', 'Udine'),
('VA', 'Varese'),
('VE', 'Venezia'),
('VB', 'Verbano Cusio Ossola'),
('VC', 'Vercelli'),
('VR', 'Verona'),
('VV', 'Vibo Valentia'),
('VI', 'Vicenza'),
('VT', 'Viterbo'),
)
|
lundjordan/releasewarrior-2.0 | refs/heads/master | releasewarrior/test/test_helpers.py | 1 | import pytest
from releasewarrior.helpers import validate_graphid
@pytest.mark.parametrize("graphid,expected", [
('UZ1SzyoKQuCQWNw5DD3Wew', True),
('H8sEEXySSqSQdAcbaM8VjA', True),
('', False), # Empty
('H8sEEXySSqS$dAcbaM8VjA', False), # Invalid characters
('H8sEEXySSqSQdAcbaM8Vj', False), # Too short
("u'H8sEEXySSqSQdAcbaM8VjA", False), # Too long, releaserunner's unicode output
(u'UZ1SzyoKQuCQWNw5DD3Wew', True), # Releaserunner displays unicode output
])
def test_validate_graphid(graphid, expected):
assert validate_graphid(graphid) == expected
|
NelleV/pyconfr-test | refs/heads/master | symposion/cms/models.py | 7 | import datetime
import os
import re
from django.conf import settings
from django.core.urlresolvers import reverse
from django.core.exceptions import ValidationError
from django.db import models
from django.utils.translation import ugettext_lazy as _
from markitup.fields import MarkupField
from taggit.managers import TaggableManager
import reversion
from .managers import PublishedPageManager
class Page(models.Model):
STATUS_CHOICES = (
(1, _("Draft")),
(2, _("Public")),
)
title = models.CharField(max_length=100)
path = models.CharField(max_length=100, unique=True)
body = MarkupField()
status = models.IntegerField(choices=STATUS_CHOICES, default=2)
publish_date = models.DateTimeField(default=datetime.datetime.now)
created = models.DateTimeField(editable=False, default=datetime.datetime.now)
updated = models.DateTimeField(editable=False, default=datetime.datetime.now)
tags = TaggableManager(blank=True)
published = PublishedPageManager()
def __unicode__(self):
return self.title
@models.permalink
def get_absolute_url(self):
return ("cms_page", [self.path])
@property
def is_community(self):
return self.path.lower().startswith("community/")
def save(self, *args, **kwargs):
self.updated = datetime.datetime.now()
super(Page, self).save(*args, **kwargs)
def clean_fields(self, exclude=None):
super(Page, self).clean_fields(exclude)
if not re.match(settings.SYMPOSION_PAGE_REGEX, self.path):
raise ValidationError({"path": [_("Path can only contain letters, numbers and hyphens and end with /")]})
reversion.register(Page)
def generate_filename(instance, filename):
return filename
class File(models.Model):
file = models.FileField(upload_to=generate_filename)
created = models.DateTimeField(default=datetime.datetime.now)
def download_url(self):
return reverse("file_download", args=[self.pk, os.path.basename(self.file.name).lower()])
|
drexly/tonginBlobStore | refs/heads/master | lib/django/core/cache/backends/locmem.py | 586 | "Thread-safe in-memory cache backend."
import time
from contextlib import contextmanager
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
from django.utils.synch import RWLock
try:
from django.utils.six.moves import cPickle as pickle
except ImportError:
import pickle
# Global in-memory store of cache data. Keyed by name, to provide
# multiple named local memory caches.
_caches = {}
_expire_info = {}
_locks = {}
@contextmanager
def dummy():
"""A context manager that does nothing special."""
yield
class LocMemCache(BaseCache):
def __init__(self, name, params):
BaseCache.__init__(self, params)
self._cache = _caches.setdefault(name, {})
self._expire_info = _expire_info.setdefault(name, {})
self._lock = _locks.setdefault(name, RWLock())
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
with self._lock.writer():
if self._has_expired(key):
self._set(key, pickled, timeout)
return True
return False
def get(self, key, default=None, version=None, acquire_lock=True):
key = self.make_key(key, version=version)
self.validate_key(key)
pickled = None
with (self._lock.reader() if acquire_lock else dummy()):
if not self._has_expired(key):
pickled = self._cache[key]
if pickled is not None:
try:
return pickle.loads(pickled)
except pickle.PickleError:
return default
with (self._lock.writer() if acquire_lock else dummy()):
try:
del self._cache[key]
del self._expire_info[key]
except KeyError:
pass
return default
def _set(self, key, value, timeout=DEFAULT_TIMEOUT):
if len(self._cache) >= self._max_entries:
self._cull()
self._cache[key] = value
self._expire_info[key] = self.get_backend_timeout(timeout)
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
with self._lock.writer():
self._set(key, pickled, timeout)
def incr(self, key, delta=1, version=None):
with self._lock.writer():
value = self.get(key, version=version, acquire_lock=False)
if value is None:
raise ValueError("Key '%s' not found" % key)
new_value = value + delta
key = self.make_key(key, version=version)
pickled = pickle.dumps(new_value, pickle.HIGHEST_PROTOCOL)
self._cache[key] = pickled
return new_value
def has_key(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.reader():
if not self._has_expired(key):
return True
with self._lock.writer():
try:
del self._cache[key]
del self._expire_info[key]
except KeyError:
pass
return False
def _has_expired(self, key):
exp = self._expire_info.get(key, -1)
if exp is None or exp > time.time():
return False
return True
def _cull(self):
if self._cull_frequency == 0:
self.clear()
else:
doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
for k in doomed:
self._delete(k)
def _delete(self, key):
try:
del self._cache[key]
except KeyError:
pass
try:
del self._expire_info[key]
except KeyError:
pass
def delete(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.writer():
self._delete(key)
def clear(self):
self._cache.clear()
self._expire_info.clear()
|
AyoubZahid/odoo | refs/heads/9.0 | addons/utm/models/__init__.py | 78 | # -*- coding: utf-8 -*-
import utm
import ir_http
|
piyush82/icclab-rcb-web | refs/heads/master | virtualenv/lib/python2.7/site-packages/django/conf/locale/vi/formats.py | 237 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'\N\gà\y d \t\há\n\g n \nă\m Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = r'H:i:s \N\gà\y d \t\há\n\g n \nă\m Y'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'd-m-Y'
SHORT_DATETIME_FORMAT = 'H:i:s d-m-Y'
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# DATE_INPUT_FORMATS =
# TIME_INPUT_FORMATS =
# DATETIME_INPUT_FORMATS =
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
# NUMBER_GROUPING =
|
MrSurly/micropython | refs/heads/master | tests/basics/int_big_xor2.py | 61 | # test + +
print( 97989513389222316022151446562729620153292831887555425160965597396
^ 23716683549865351578586448630079789776107310103486834795830390982)
print( 53817081128841898634258263553430908085326601592682411889506742059
^ 37042558948907407488299113387826240429667200950043601129661240876)
print( 26167512042587370698808974207700979337713004510730289760097826496
^ 98456276326770292376138852628141531773120376436197321310863125849)
print( 21085380307304977067262070503651827226504797285572981274069266136
^ 15928222825828272388778130358888206480162413547887287646273147570)
print( 40827393422334167255488276244226338235131323044408420081160772273
^ 63815443187857978125545555033672525708399848575557475462799643340)
print( 5181013159871685724135944379095645225188360725917119022722046448
^ 59734090450462480092384049604830976376887859531148103803093112493)
print( 283894311
^ 86526825689187217371383854139783231460931720533100376593106943447)
print( 40019818573920230246248826511203818792007462193311949166285967147
^ 9487909752)
# test - -
print( -97989513389222316022151446562729620153292831887555425160965597396
^ -23716683549865351578586448630079789776107310103486834795830390982)
print( -53817081128841898634258263553430908085326601592682411889506742059
^ -37042558948907407488299113387826240429667200950043601129661240876)
print( -26167512042587370698808974207700979337713004510730289760097826496
^ -98456276326770292376138852628141531773120376436197321310863125849)
print( -21085380307304977067262070503651827226504797285572981274069266136
^ -15928222825828272388778130358888206480162413547887287646273147570)
print( -40827393422334167255488276244226338235131323044408420081160772273
^ -63815443187857978125545555033672525708399848575557475462799643340)
print( -5181013159871685724135944379095645225188360725917119022722046448
^ -59734090450462480092384049604830976376887859531148103803093112493)
print( -283894311
^ -86526825689187217371383854139783231460931720533100376593106943447)
print( -40019818573920230246248826511203818792007462193311949166285967147
^ -9487909752)
|
oinopion/django | refs/heads/master | django/contrib/staticfiles/utils.py | 248 | import fnmatch
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
def matches_patterns(path, patterns=None):
"""
Return True or False depending on whether the ``path`` should be
ignored (if it matches any pattern in ``ignore_patterns``).
"""
if patterns is None:
patterns = []
for pattern in patterns:
if fnmatch.fnmatchcase(path, pattern):
return True
return False
def get_files(storage, ignore_patterns=None, location=''):
"""
Recursively walk the storage directories yielding the paths
of all files that should be copied.
"""
if ignore_patterns is None:
ignore_patterns = []
directories, files = storage.listdir(location)
for fn in files:
if matches_patterns(fn, ignore_patterns):
continue
if location:
fn = os.path.join(location, fn)
yield fn
for dir in directories:
if matches_patterns(dir, ignore_patterns):
continue
if location:
dir = os.path.join(location, dir)
for fn in get_files(storage, ignore_patterns, dir):
yield fn
def check_settings(base_url=None):
"""
Checks if the staticfiles settings have sane values.
"""
if base_url is None:
base_url = settings.STATIC_URL
if not base_url:
raise ImproperlyConfigured(
"You're using the staticfiles app "
"without having set the required STATIC_URL setting.")
if settings.MEDIA_URL == base_url:
raise ImproperlyConfigured("The MEDIA_URL and STATIC_URL "
"settings must have different values")
if ((settings.MEDIA_ROOT and settings.STATIC_ROOT) and
(settings.MEDIA_ROOT == settings.STATIC_ROOT)):
raise ImproperlyConfigured("The MEDIA_ROOT and STATIC_ROOT "
"settings must have different values")
|
vladikr/nova_drafts | refs/heads/master | nova/api/metadata/__init__.py | 116 | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
:mod:`nova.api.metadata` -- Nova Metadata Server
================================================
.. automodule:: nova.api.metadata
:platform: Unix
:synopsis: Metadata Server for Nova
"""
|
vipulroxx/sympy | refs/heads/master | sympy/functions/special/tests/test_bsplines.py | 83 | from sympy.functions import bspline_basis_set
from sympy.core.compatibility import range
from sympy import Piecewise, Interval
from sympy import symbols, Rational
x, y = symbols('x,y')
def test_basic_degree_0():
d = 0
knots = range(5)
splines = bspline_basis_set(d, knots, x)
for i in range(len(splines)):
assert splines[i] == Piecewise((1, Interval(i, i + 1)
.contains(x)), (0, True))
def test_basic_degree_1():
d = 1
knots = range(5)
splines = bspline_basis_set(d, knots, x)
assert splines[0] == Piecewise(
(x, Interval(0, 1, False, True).contains(x)),
(2 - x, Interval(1, 2).contains(x)), (0, True))
assert splines[1] == Piecewise(
(-1 + x, Interval(1, 2, False, True).contains(x)),
(3 - x, Interval(2, 3).contains(x)), (0, True))
assert splines[2] == Piecewise(
(-2 + x, Interval(2, 3, False, True).contains(x)),
(4 - x, Interval(3, 4).contains(x)), (0, True))
def test_basic_degree_2():
d = 2
knots = range(5)
splines = bspline_basis_set(d, knots, x)
b0 = Piecewise((x**2/2, Interval(0, 1, False, True).contains(x)),
(Rational(
-3, 2) + 3*x - x**2, Interval(1, 2, False, True).contains(x)),
(Rational(9, 2) - 3*x + x**2/2, Interval(2, 3).contains(x)), (0, True))
b1 = Piecewise(
(Rational(1, 2) - x + x**2/2, Interval(1, 2, False, True).contains(x)),
(Rational(
-11, 2) + 5*x - x**2, Interval(2, 3, False, True).contains(x)),
(8 - 4*x + x**2/2, Interval(3, 4).contains(x)), (0, True))
assert splines[0] == b0
assert splines[1] == b1
def test_basic_degree_3():
d = 3
knots = range(5)
splines = bspline_basis_set(d, knots, x)
b0 = Piecewise(
(x**3/6, Interval(0, 1, False, True).contains(x)),
(Rational(2, 3) - 2*x + 2*x**2 - x**3/2, Interval(1, 2,
False, True).contains(x)),
(Rational(-22, 3) + 10*x - 4*x**2 + x**3/2, Interval(2, 3,
False, True).contains(x)),
(Rational(32, 3) - 8*x + 2*x**2 - x**3/6, Interval(3, 4).contains(x)),
(0, True)
)
assert splines[0] == b0
def test_repeated_degree_1():
d = 1
knots = [0, 0, 1, 2, 2, 3, 4, 4]
splines = bspline_basis_set(d, knots, x)
assert splines[0] == Piecewise((1 - x, Interval(0, 1).contains(x)),
(0, True))
assert splines[1] == Piecewise(
(x, Interval(0, 1, False, True).contains(x)),
(2 - x, Interval(1, 2).contains(x)), (0, True))
assert splines[2] == Piecewise((-1 + x, Interval(1, 2).contains(x)
), (0, True))
assert splines[3] == Piecewise((3 - x, Interval(2, 3).contains(x)),
(0, True))
assert splines[4] == Piecewise(
(-2 + x, Interval(2, 3, False, True).contains(x)),
(4 - x, Interval(3, 4).contains(x)), (0, True))
assert splines[5] == Piecewise((-3 + x, Interval(3, 4).contains(x)
), (0, True))
|
jiahaoliang/group-based-policy | refs/heads/lbaasv2-mitaka-pull-request | gbpservice/tests/contrib/nfp_service/reference_configurator/api/config.py | 2 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Server Specific Configurations
server = {
'port': '8080',
'host': '0.0.0.0'
}
# Pecan Application Configurations
app = {
'root': 'root_controller.RootController',
'modules': ['v1'],
'debug': True,
'errors': {
404: '/error/404',
'__force_dict__': True
}
}
logging = {
'root': {'level': 'INFO', 'handlers': ['console']},
'loggers': {
'pecanlog': {'level': 'INFO',
'handlers': ['console'],
'propagate': False},
'pecan': {'level': 'INFO',
'handlers': ['console'],
'propagate': False},
'py.warnings': {'handlers': ['console']},
'__force_dict__': True
},
'handlers': {
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'color'
}
},
'formatters': {
'simple': {
'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]'
'[%(threadName)s] %(message)s')
},
'color': {
'()': 'pecan.log.ColorFormatter',
'format': ('%(asctime)s [%(padded_color_levelname)s] [%(name)s]'
'[%(threadName)s] %(message)s'),
'__force_dict__': True
}
}
}
# Custom Configurations must be in Python dictionary format::
#
# foo = {'bar':'baz'}
#
# All configurations are accessible at::
# pecan.conf
|
shantanugoel/email-actions | refs/heads/master | email_actions/plugins/email.py | 1 | import logging
from smtplib import SMTP, SMTPHeloError, SMTPAuthenticationError, \
SMTPNotSupportedError, SMTPException, SMTPRecipientsRefused,\
SMTPSenderRefused, SMTPDataError
from email_actions.config import read_config_plugin
PLUGIN_NAME = 'email'
def email_notify(filter_name, msg_from, msg_to, msg_subject, msg_content):
plugin_cfg = read_config_plugin(filter_name, PLUGIN_NAME)
params = {
'host': None,
'port': 25,
'username': None,
'password': None,
'secure': False
}
for key in plugin_cfg.keys():
params[key] = plugin_cfg[key]
if not params['host']:
logging.error('Missing mandatory host config for email')
return
client = SMTP(params['host'], params['port'])
send_mail = True
if params['username'] and ['password']:
if params['secure']:
client.starttls()
try:
client.login(params['username'], params['password'])
except (SMTPHeloError, SMTPAuthenticationError, SMTPNotSupportedError,
SMTPException) as e:
send_mail = False
logging.error('Error logging in to SMTP server: %s' % (e))
if send_mail:
# TODO: Form message properly
try:
client.sendmail(msg_from, msg_to, msg_content)
except (SMTPHeloError, SMTPNotSupportedError, SMTPRecipientsRefused,
SMTPSenderRefused, SMTPDataError) as e:
logging.error('Error sending email: %s' % (e))
client.quit()
|
lyndsysimon/pywik | refs/heads/primary | tests/__init__.py | 1 | import json
import unittest
import responses
class PywikTestCase(unittest.TestCase):
def add_fake_response(self, module, method, value, http_method='get', status=200):
responses.add(
getattr(responses, http_method.upper()),
'http://example.test/?'
'module={module}'
'&method={module}.{method}'
'&format=json'
'&token_auth=abc'.format(
module=module,
method=method
),
match_querystring=True,
body=json.dumps(value),
content_type='application/json',
status=200,
) |
pa-siirja/shopcode | refs/heads/master | shopcode/core/tests.py | 24123 | from django.test import TestCase
# Create your tests here.
|
chris-wood/SCoNet | refs/heads/master | ns-3-dev/doc/tutorial-pt-br/pickle-to-xml.py | 392 | #!/usr/bin/python
# output xml format:
# <pages>
# <page url="xx"><prev url="yyy">zzz</prev><next url="hhh">lll</next><fragment>file.frag</fragment></page>
# ...
# </pages>
import pickle
import os
import codecs
def dump_pickles(out, dirname, filename, path):
f = open(os.path.join(dirname, filename), 'r')
data = pickle.load(f)
fragment_file = codecs.open(data['current_page_name'] + '.frag', mode='w', encoding='utf-8')
fragment_file.write(data['body'])
fragment_file.close()
out.write(' <page url="%s">\n' % path)
out.write(' <fragment>%s.frag</fragment>\n' % data['current_page_name'])
if data['prev'] is not None:
out.write(' <prev url="%s">%s</prev>\n' %
(os.path.normpath(os.path.join(path, data['prev']['link'])),
data['prev']['title']))
if data['next'] is not None:
out.write(' <next url="%s">%s</next>\n' %
(os.path.normpath(os.path.join(path, data['next']['link'])),
data['next']['title']))
out.write(' </page>\n')
f.close()
if data['next'] is not None:
next_path = os.path.normpath(os.path.join(path, data['next']['link']))
next_filename = os.path.basename(next_path) + '.fpickle'
dump_pickles(out, dirname, next_filename, next_path)
return
import sys
sys.stdout.write('<pages>\n')
dump_pickles(sys.stdout, os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1]), '/')
sys.stdout.write('</pages>')
|
flomotlik/formica | refs/heads/master | formica/stack_waiter.py | 1 | import sys
import time
from datetime import datetime
import boto3
import logging
from texttable import Texttable
EVENT_TABLE_HEADERS = ["Timestamp", "Status", "Type", "Logical ID", "Status reason"]
TABLE_COLUMN_SIZE = [28, 24, 30, 30, 50]
SUCCESSFUL_STATES = ["CREATE_COMPLETE", "UPDATE_COMPLETE", "DELETE_COMPLETE"]
FAILED_STATES = [
"CREATE_FAILED",
"DELETE_FAILED",
"ROLLBACK_FAILED",
"ROLLBACK_COMPLETE",
"UPDATE_FAILED",
"UPDATE_ROLLBACK_FAILED",
"UPDATE_ROLLBACK_COMPLETE",
]
logger = logging.getLogger(__name__)
SLEEP_TIME = 5
cf = boto3.client("cloudformation")
class StackWaiter:
def __init__(self, stack, timeout=0):
self.stack = stack
self.timeout = timeout
def wait(self, last_event):
header_printed = False
finished = False
canceled = False
start = datetime.now()
while not finished:
stack_events = cf.describe_stack_events(StackName=self.stack)["StackEvents"]
index = next((i for i, v in enumerate(stack_events) if v["EventId"] == last_event))
last_event = stack_events[0]["EventId"]
new_events = stack_events[0:index]
if new_events:
if not header_printed:
self.print_header()
header_printed = True
self.print_events(new_events)
stack_status = self.stack_status()
if stack_status in SUCCESSFUL_STATES:
finished = True
logger.info("Stack Status Successful: {}".format(stack_status))
elif stack_status in FAILED_STATES:
logger.info("Stack Status Failed: {}".format(stack_status))
sys.exit(1)
elif not canceled and self.timeout > 0 and (datetime.now() - start).seconds > (self.timeout * 60):
logger.info("Timeout of {} minute(s) reached. Canceling Update.".format(self.timeout))
canceled = True
cf.cancel_update_stack(StackName=self.stack)
else:
time.sleep(SLEEP_TIME)
def stack_status(self):
return cf.describe_stacks(StackName=self.stack)["Stacks"][0]["StackStatus"]
def __create_table(self):
table = Texttable()
table.set_cols_width(TABLE_COLUMN_SIZE)
return table
def print_header(self):
if self.timeout > 0:
logger.info("Timeout set to {} minute(s)".format(self.timeout))
table = self.__create_table()
table.add_rows([EVENT_TABLE_HEADERS])
table.set_deco(Texttable.BORDER | Texttable.VLINES)
logger.info(table.draw())
def print_events(self, events):
table = self.__create_table()
table.set_deco(0)
for event in reversed(events):
table.add_row(
[
event["Timestamp"].strftime("%Y-%m-%d %H:%M:%S %Z%z"),
event["ResourceStatus"],
event["ResourceType"],
event["LogicalResourceId"],
event.get("ResourceStatusReason", ""),
]
)
logger.info(table.draw())
|
cherez/youtube-dl | refs/heads/master | youtube_dl/extractor/orf.py | 111 | # coding: utf-8
from __future__ import unicode_literals
import json
import re
import calendar
import datetime
from .common import InfoExtractor
from ..utils import (
HEADRequest,
unified_strdate,
ExtractorError,
strip_jsonp,
int_or_none,
float_or_none,
determine_ext,
remove_end,
)
class ORFTVthekIE(InfoExtractor):
IE_NAME = 'orf:tvthek'
IE_DESC = 'ORF TVthek'
_VALID_URL = r'https?://tvthek\.orf\.at/(?:programs/.+?/episodes|topics?/.+?|program/[^/]+)/(?P<id>\d+)'
_TESTS = [{
'url': 'http://tvthek.orf.at/program/Aufgetischt/2745173/Aufgetischt-Mit-der-Steirischen-Tafelrunde/8891389',
'playlist': [{
'md5': '2942210346ed779588f428a92db88712',
'info_dict': {
'id': '8896777',
'ext': 'mp4',
'title': 'Aufgetischt: Mit der Steirischen Tafelrunde',
'description': 'md5:c1272f0245537812d4e36419c207b67d',
'duration': 2668,
'upload_date': '20141208',
},
}],
'skip': 'Blocked outside of Austria / Germany',
}, {
'url': 'http://tvthek.orf.at/topic/Im-Wandel-der-Zeit/8002126/Best-of-Ingrid-Thurnher/7982256',
'playlist': [{
'md5': '68f543909aea49d621dfc7703a11cfaf',
'info_dict': {
'id': '7982259',
'ext': 'mp4',
'title': 'Best of Ingrid Thurnher',
'upload_date': '20140527',
'description': 'Viele Jahre war Ingrid Thurnher das "Gesicht" der ZIB 2. Vor ihrem Wechsel zur ZIB 2 im jahr 1995 moderierte sie unter anderem "Land und Leute", "Österreich-Bild" und "Niederösterreich heute".',
}
}],
'_skip': 'Blocked outside of Austria / Germany',
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
data_json = self._search_regex(
r'initializeAdworx\((.+?)\);\n', webpage, 'video info')
all_data = json.loads(data_json)
def get_segments(all_data):
for data in all_data:
if data['name'] in (
'Tracker::EPISODE_DETAIL_PAGE_OVER_PROGRAM',
'Tracker::EPISODE_DETAIL_PAGE_OVER_TOPIC'):
return data['values']['segments']
sdata = get_segments(all_data)
if not sdata:
raise ExtractorError('Unable to extract segments')
def quality_to_int(s):
m = re.search('([0-9]+)', s)
if m is None:
return -1
return int(m.group(1))
entries = []
for sd in sdata:
video_id = sd['id']
formats = [{
'preference': -10 if fd['delivery'] == 'hls' else None,
'format_id': '%s-%s-%s' % (
fd['delivery'], fd['quality'], fd['quality_string']),
'url': fd['src'],
'protocol': fd['protocol'],
'quality': quality_to_int(fd['quality']),
} for fd in sd['playlist_item_array']['sources']]
# Check for geoblocking.
# There is a property is_geoprotection, but that's always false
geo_str = sd.get('geoprotection_string')
if geo_str:
try:
http_url = next(
f['url']
for f in formats
if re.match(r'^https?://.*\.mp4$', f['url']))
except StopIteration:
pass
else:
req = HEADRequest(http_url)
self._request_webpage(
req, video_id,
note='Testing for geoblocking',
errnote=((
'This video seems to be blocked outside of %s. '
'You may want to try the streaming-* formats.')
% geo_str),
fatal=False)
self._sort_formats(formats)
upload_date = unified_strdate(sd['created_date'])
entries.append({
'_type': 'video',
'id': video_id,
'title': sd['header'],
'formats': formats,
'description': sd.get('description'),
'duration': int(sd['duration_in_seconds']),
'upload_date': upload_date,
'thumbnail': sd.get('image_full_url'),
})
return {
'_type': 'playlist',
'entries': entries,
'id': playlist_id,
}
class ORFOE1IE(InfoExtractor):
IE_NAME = 'orf:oe1'
IE_DESC = 'Radio Österreich 1'
_VALID_URL = r'http://oe1\.orf\.at/(?:programm/|konsole.*?#\?track_id=)(?P<id>[0-9]+)'
# Audios on ORF radio are only available for 7 days, so we can't add tests.
_TEST = {
'url': 'http://oe1.orf.at/konsole?show=on_demand#?track_id=394211',
'only_matching': True,
}
def _real_extract(self, url):
show_id = self._match_id(url)
data = self._download_json(
'http://oe1.orf.at/programm/%s/konsole' % show_id,
show_id
)
timestamp = datetime.datetime.strptime('%s %s' % (
data['item']['day_label'],
data['item']['time']
), '%d.%m.%Y %H:%M')
unix_timestamp = calendar.timegm(timestamp.utctimetuple())
return {
'id': show_id,
'title': data['item']['title'],
'url': data['item']['url_stream'],
'ext': 'mp3',
'description': data['item'].get('info'),
'timestamp': unix_timestamp
}
class ORFFM4IE(InfoExtractor):
IE_NAME = 'orf:fm4'
IE_DESC = 'radio FM4'
_VALID_URL = r'http://fm4\.orf\.at/7tage/?#(?P<date>[0-9]+)/(?P<show>\w+)'
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
show_date = mobj.group('date')
show_id = mobj.group('show')
data = self._download_json(
'http://audioapi.orf.at/fm4/json/2.0/broadcasts/%s/4%s' % (show_date, show_id),
show_id
)
def extract_entry_dict(info, title, subtitle):
return {
'id': info['loopStreamId'].replace('.mp3', ''),
'url': 'http://loopstream01.apa.at/?channel=fm4&id=%s' % info['loopStreamId'],
'title': title,
'description': subtitle,
'duration': (info['end'] - info['start']) / 1000,
'timestamp': info['start'] / 1000,
'ext': 'mp3'
}
entries = [extract_entry_dict(t, data['title'], data['subtitle']) for t in data['streams']]
return {
'_type': 'playlist',
'id': show_id,
'title': data['title'],
'description': data['subtitle'],
'entries': entries
}
class ORFIPTVIE(InfoExtractor):
IE_NAME = 'orf:iptv'
IE_DESC = 'iptv.ORF.at'
_VALID_URL = r'http://iptv\.orf\.at/(?:#/)?stories/(?P<id>\d+)'
_TEST = {
'url': 'http://iptv.orf.at/stories/2275236/',
'md5': 'c8b22af4718a4b4af58342529453e3e5',
'info_dict': {
'id': '350612',
'ext': 'flv',
'title': 'Weitere Evakuierungen um Vulkan Calbuco',
'description': 'md5:d689c959bdbcf04efeddedbf2299d633',
'duration': 68.197,
'thumbnail': 're:^https?://.*\.jpg$',
'upload_date': '20150425',
},
}
def _real_extract(self, url):
story_id = self._match_id(url)
webpage = self._download_webpage(
'http://iptv.orf.at/stories/%s' % story_id, story_id)
video_id = self._search_regex(
r'data-video(?:id)?="(\d+)"', webpage, 'video id')
data = self._download_json(
'http://bits.orf.at/filehandler/static-api/json/current/data.json?file=%s' % video_id,
video_id)[0]
duration = float_or_none(data['duration'], 1000)
video = data['sources']['default']
load_balancer_url = video['loadBalancerUrl']
abr = int_or_none(video.get('audioBitrate'))
vbr = int_or_none(video.get('bitrate'))
fps = int_or_none(video.get('videoFps'))
width = int_or_none(video.get('videoWidth'))
height = int_or_none(video.get('videoHeight'))
thumbnail = video.get('preview')
rendition = self._download_json(
load_balancer_url, video_id, transform_source=strip_jsonp)
f = {
'abr': abr,
'vbr': vbr,
'fps': fps,
'width': width,
'height': height,
}
formats = []
for format_id, format_url in rendition['redirect'].items():
if format_id == 'rtmp':
ff = f.copy()
ff.update({
'url': format_url,
'format_id': format_id,
})
formats.append(ff)
elif determine_ext(format_url) == 'f4m':
formats.extend(self._extract_f4m_formats(
format_url, video_id, f4m_id=format_id))
elif determine_ext(format_url) == 'm3u8':
formats.extend(self._extract_m3u8_formats(
format_url, video_id, 'mp4', m3u8_id=format_id))
else:
continue
self._sort_formats(formats)
title = remove_end(self._og_search_title(webpage), ' - iptv.ORF.at')
description = self._og_search_description(webpage)
upload_date = unified_strdate(self._html_search_meta(
'dc.date', webpage, 'upload date'))
return {
'id': video_id,
'title': title,
'description': description,
'duration': duration,
'thumbnail': thumbnail,
'upload_date': upload_date,
'formats': formats,
}
|
adlius/osf.io | refs/heads/develop | osf_tests/conftest.py | 6 | import pytest
from framework.celery_tasks.handlers import handlers as celery_handlers
from framework.django.handlers import handlers as django_handlers
from framework.flask import rm_handlers
from website.app import init_app
from website.project.signals import contributor_added
from website.project.views.contributor import notify_added_contributor
# NOTE: autouse so that ADDONS_REQUESTED gets set on website.settings
@pytest.fixture(autouse=True, scope='session')
def app():
try:
test_app = init_app(routes=True, set_backends=False)
except AssertionError: # Routes have already been set up
test_app = init_app(routes=False, set_backends=False)
rm_handlers(test_app, django_handlers)
rm_handlers(test_app, celery_handlers)
test_app.testing = True
return test_app
@pytest.fixture(autouse=True, scope='session')
def app_init():
init_app(routes=False, set_backends=False)
@pytest.yield_fixture()
def request_context(app):
context = app.test_request_context(headers={
'Remote-Addr': '146.9.219.56',
'User-Agent': 'Mozilla/5.0 (X11; U; SunOS sun4u; en-US; rv:0.9.4.1) Gecko/20020518 Netscape6/6.2.3'
})
context.push()
yield context
context.pop()
DISCONNECTED_SIGNALS = {
# disconnect notify_add_contributor so that add_contributor does not send "fake" emails in tests
contributor_added: [notify_added_contributor]
}
@pytest.fixture(autouse=True)
def disconnected_signals():
for signal in DISCONNECTED_SIGNALS:
for receiver in DISCONNECTED_SIGNALS[signal]:
signal.disconnect(receiver)
|
ukanga/SickRage | refs/heads/master | lib/twilio/rest/resources/trunking/trunks.py | 24 | from .. import NextGenInstanceResource, NextGenListResource
class Trunk(NextGenInstanceResource):
"""
A Trunk resource.
See the `TaskRouter API reference
<https://www.twilio.com/docs/sip-trunking/rest/trunks>_`
for more information.
.. attribute:: sid
The unique ID for this Trunk.
"""
def delete(self):
"""
Deletes a Trunk.
"""
return self.parent.delete_instance(self.name)
def update(self, **kwargs):
"""
Updates a Trunk.
"""
return self.parent.update_instance(self.name, **kwargs)
class Trunks(NextGenListResource):
""" A list of Trunk resources """
name = "Trunks"
instance = Trunk
key = "trunks"
def list(self, **kwargs):
"""
Retrieve the list of Trunk resources.
:param Page: The subset of results that needs to be fetched
:param PageSize: The size of the Page that needs to be fetched
"""
return super(Trunks, self).list(**kwargs)
def create(self, **kwargs):
"""
Creates a Trunk.
"""
return self.create_instance(kwargs)
def update(self, sid, body):
"""
Updates a Trunk.
:param sid: A human readable 34 character unique identifier
:param body: Request body
"""
return self.update_instance(sid, body)
def delete(self, sid):
"""
Deletes a Trunk.
:param sid: A human readable 34 character unique identifier
"""
return self.delete_instance(sid)
|
wilvk/ansible | refs/heads/devel | lib/ansible/modules/network/eos/eos_eapi.py | 10 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: eos_eapi
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Manage and configure Arista EOS eAPI.
requirements:
- "EOS v4.12 or greater"
description:
- Use to enable or disable eAPI access, and set the port and state
of http, https, local_http and unix-socket servers.
- When enabling eAPI access the default is to enable HTTP on port
80, enable HTTPS on port 443, disable local HTTP, and disable
Unix socket server. Use the options listed below to override the
default configuration.
- Requires EOS v4.12 or greater.
extends_documentation_fragment: eos
options:
http:
description:
- The C(http) argument controls the operating state of the HTTP
transport protocol when eAPI is present in the running-config.
When the value is set to True, the HTTP protocol is enabled and
when the value is set to False, the HTTP protocol is disabled.
By default, when eAPI is first configured, the HTTP protocol is
disabled.
required: false
default: no
choices: ['yes', 'no']
aliases: ['enable_http']
http_port:
description:
- Configures the HTTP port that will listen for connections when
the HTTP transport protocol is enabled. This argument accepts
integer values in the valid range of 1 to 65535.
required: false
default: 80
https:
description:
- The C(https) argument controls the operating state of the HTTPS
transport protocol when eAPI is present in the running-config.
When the value is set to True, the HTTPS protocol is enabled and
when the value is set to False, the HTTPS protocol is disabled.
By default, when eAPI is first configured, the HTTPS protocol is
enabled.
required: false
default: yes
choices: ['yes', 'no']
aliases: ['enable_https']
https_port:
description:
- Configures the HTTP port that will listen for connections when
the HTTP transport protocol is enabled. This argument accepts
integer values in the valid range of 1 to 65535.
required: false
default: 443
local_http:
description:
- The C(local_http) argument controls the operating state of the
local HTTP transport protocol when eAPI is present in the
running-config. When the value is set to True, the HTTP protocol
is enabled and restricted to connections from localhost only. When
the value is set to False, the HTTP local protocol is disabled.
- Note is value is independent of the C(http) argument
required: false
default: false
choices: ['yes', 'no']
aliases: ['enable_local_http']
local_http_port:
description:
- Configures the HTTP port that will listen for connections when
the HTTP transport protocol is enabled. This argument accepts
integer values in the valid range of 1 to 65535.
required: false
default: 8080
socket:
description:
- The C(socket) argument controls the operating state of the UNIX
Domain Socket used to receive eAPI requests. When the value
of this argument is set to True, the UDS will listen for eAPI
requests. When the value is set to False, the UDS will not be
available to handle requests. By default when eAPI is first
configured, the UDS is disabled.
required: false
default: false
choices: ['yes', 'no']
aliases: ['enable_socket']
vrf:
description:
- The C(vrf) argument will configure eAPI to listen for connections
in the specified VRF. By default, eAPI transports will listen
for connections in the global table. This value requires the
VRF to already be created otherwise the task will fail.
required: false
default: default
version_added: "2.2"
config:
description:
- The module, by default, will connect to the remote device and
retrieve the current running-config to use as a base for comparing
against the contents of source. There are times when it is not
desirable to have the task get the current running-config for
every task in a playbook. The I(config) argument allows the
implementer to pass in the configuration to use as the base
config for comparison.
required: false
default: nul
version_added: "2.2"
state:
description:
- The C(state) argument controls the operational state of eAPI
on the remote device. When this argument is set to C(started),
eAPI is enabled to receive requests and when this argument is
C(stopped), eAPI is disabled and will not receive requests.
required: false
default: started
choices: ['started', 'stopped']
"""
EXAMPLES = """
- name: Enable eAPI access with default configuration
eos_eapi:
state: started
- name: Enable eAPI with no HTTP, HTTPS at port 9443, local HTTP at port 80, and socket enabled
eos_eapi:
state: started
http: false
https_port: 9443
local_http: yes
local_http_port: 80
socket: yes
- name: Shutdown eAPI access
eos_eapi:
state: stopped
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- management api http-commands
- protocol http port 81
- no protocol https
urls:
description: Hash of URL endpoints eAPI is listening on per interface
returned: when eAPI is started
type: dict
sample: {'Management1': ['http://172.26.10.1:80']}
session_name:
description: The EOS config session name used to load the configuration
returned: when changed is True
type: str
sample: ansible_1479315771
"""
import re
import time
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.eos.eos import run_commands, load_config
from ansible.module_utils.six import iteritems
from ansible.module_utils.network.eos.eos import eos_argument_spec, check_args
def check_transport(module):
transport = module.params['transport']
provider_transport = (module.params['provider'] or {}).get('transport')
if 'eapi' in (transport, provider_transport):
module.fail_json(msg='eos_eapi module is only supported over cli transport')
def validate_http_port(value, module):
if not 1 <= value <= 65535:
module.fail_json(msg='http_port must be between 1 and 65535')
def validate_https_port(value, module):
if not 1 <= value <= 65535:
module.fail_json(msg='http_port must be between 1 and 65535')
def validate_local_http_port(value, module):
if not 1 <= value <= 65535:
module.fail_json(msg='http_port must be between 1 and 65535')
def validate_vrf(value, module):
out = run_commands(module, ['show vrf'])
configured_vrfs = re.findall(r'^\s+(\w+)(?=\s)', out[0], re.M)
configured_vrfs.append('default')
if value not in configured_vrfs:
module.fail_json(msg='vrf `%s` is not configured on the system' % value)
def map_obj_to_commands(updates, module, warnings):
commands = list()
want, have = updates
def needs_update(x):
return want.get(x) is not None and (want.get(x) != have.get(x))
def add(cmd):
if 'management api http-commands' not in commands:
commands.insert(0, 'management api http-commands')
commands.append(cmd)
if any((needs_update('http'), needs_update('http_port'))):
if want['http'] is False:
add('no protocol http')
else:
if have['http'] is False and want['http'] in (False, None):
warnings.append('protocol http is not enabled, not configuring http port value')
else:
port = want['http_port'] or 80
add('protocol http port %s' % port)
if any((needs_update('https'), needs_update('https_port'))):
if want['https'] is False:
add('no protocol https')
else:
if have['https'] is False and want['https'] in (False, None):
warnings.append('protocol https is not enabled, not configuring https port value')
else:
port = want['https_port'] or 443
add('protocol https port %s' % port)
if any((needs_update('local_http'), needs_update('local_http_port'))):
if want['local_http'] is False:
add('no protocol http localhost')
else:
if have['local_http'] is False and want['local_http'] in (False, None):
warnings.append('protocol local_http is not enabled, not configuring local_http port value')
else:
port = want['local_http_port'] or 8080
add('protocol http localhost port %s' % port)
if any((needs_update('socket'), needs_update('socket'))):
if want['socket'] is False:
add('no protocol unix-socket')
else:
add('protocol unix-socket')
if needs_update('state') and not needs_update('vrf'):
if want['state'] == 'stopped':
add('shutdown')
elif want['state'] == 'started':
add('no shutdown')
if needs_update('vrf'):
add('vrf %s' % want['vrf'])
# switching operational vrfs here
# need to add the desired state as well
if want['state'] == 'stopped':
add('shutdown')
elif want['state'] == 'started':
add('no shutdown')
return commands
def parse_state(data):
if data[0]['enabled']:
return 'started'
else:
return 'stopped'
def map_config_to_obj(module):
out = run_commands(module, ['show management api http-commands | json'])
return {
'http': out[0]['httpServer']['configured'],
'http_port': out[0]['httpServer']['port'],
'https': out[0]['httpsServer']['configured'],
'https_port': out[0]['httpsServer']['port'],
'local_http': out[0]['localHttpServer']['configured'],
'local_http_port': out[0]['localHttpServer']['port'],
'socket': out[0]['unixSocketServer']['configured'],
'vrf': out[0]['vrf'],
'state': parse_state(out)
}
def map_params_to_obj(module):
obj = {
'http': module.params['http'],
'http_port': module.params['http_port'],
'https': module.params['https'],
'https_port': module.params['https_port'],
'local_http': module.params['local_http'],
'local_http_port': module.params['local_http_port'],
'socket': module.params['socket'],
'vrf': module.params['vrf'],
'state': module.params['state']
}
for key, value in iteritems(obj):
if value:
validator = globals().get('validate_%s' % key)
if validator:
validator(value, module)
return obj
def verify_state(updates, module):
want, have = updates
invalid_state = [('http', 'httpServer'),
('https', 'httpsServer'),
('local_http', 'localHttpServer'),
('socket', 'unixSocketServer')]
timeout = module.params['timeout'] or 30
state = module.params['state']
while invalid_state:
out = run_commands(module, ['show management api http-commands | json'])
for index, item in enumerate(invalid_state):
want_key, eapi_key = item
if want[want_key] is not None:
if want[want_key] == out[0][eapi_key]['running']:
del invalid_state[index]
elif state == 'stopped':
if not out[0][eapi_key]['running']:
del invalid_state[index]
else:
del invalid_state[index]
time.sleep(1)
timeout -= 1
if timeout == 0:
module.fail_json(msg='timeout expired before eapi running state changed')
def collect_facts(module, result):
out = run_commands(module, ['show management api http-commands | json'])
facts = dict(eos_eapi_urls=dict())
for each in out[0]['urls']:
intf, url = each.split(' : ')
key = str(intf).strip()
if key not in facts['eos_eapi_urls']:
facts['eos_eapi_urls'][key] = list()
facts['eos_eapi_urls'][key].append(str(url).strip())
result['ansible_facts'] = facts
def main():
""" main entry point for module execution
"""
argument_spec = dict(
http=dict(aliases=['enable_http'], type='bool'),
http_port=dict(type='int'),
https=dict(aliases=['enable_https'], type='bool'),
https_port=dict(type='int'),
local_http=dict(aliases=['enable_local_http'], type='bool'),
local_http_port=dict(type='int'),
socket=dict(aliases=['enable_socket'], type='bool'),
vrf=dict(default='default'),
config=dict(),
state=dict(default='started', choices=['stopped', 'started']),
)
argument_spec.update(eos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
check_transport(module)
result = {'changed': False}
warnings = list()
if module.params['config']:
warnings.append('config parameter is no longer necessary and will be ignored')
want = map_params_to_obj(module)
have = map_config_to_obj(module)
commands = map_obj_to_commands((want, have), module, warnings)
result['commands'] = commands
if commands:
commit = not module.check_mode
response = load_config(module, commands, commit=commit)
if response.get('diff') and module._diff:
result['diff'] = {'prepared': response.get('diff')}
result['session_name'] = response.get('session')
result['changed'] = True
if result['changed']:
verify_state((want, have), module)
collect_facts(module, result)
if warnings:
result['warnings'] = warnings
module.exit_json(**result)
if __name__ == '__main__':
main()
|
jfburkhart/shyft | refs/heads/master | shyft/tests/test_concat_data_repository.py | 1 | from __future__ import print_function
import unittest
from os import path
from shyft import shyftdata_dir
from shyft import api
from shyft.repository.netcdf.concat_data_repository import ConcatDataRepository
#from shyft.repository.netcdf.concant_data_repository import ConcatDataRepositoryError
from shapely.geometry import box
#import netCDF4
import numpy as np
class ConcatDataRepositoryTestCase(unittest.TestCase):
"""
TODO: This test needs to be written, along with utility that generates concat from sources
"""
@property
def _epsg_bbox(self):
"""A slice of test-data located in shyft-data repository/arome."""
EPSG = 32632
x0 = 436100.0 # lower left
y0 = 6823000.0 # lower right
nx = 74
ny = 24
dx = 1000.0
dy = 1000.0
return EPSG, ([x0, x0 + nx*dx, x0 + nx*dx, x0], [y0, y0, y0 + ny*dy, y0 + ny*dy]), box(x0, y0, x0 + dx*nx, y0 + dy*ny)
def test_transform_functions_fixed_interval(self):
"""
test the _transform_raw function.
"""
return
# TODO: add concat file in shyft-data, then implement the tests
EPSG, bbox, bpoly = self._epsg_bbox
# Period start
t0 = api.YMDhms(2015, 8, 24, 0)
date_str = "{}{:02}{:02}_{:02}".format(t0.year, t0.month, t0.day, t0.hour)
utc = api.Calendar() # No offset gives Utc
f1 = path.join(shyftdata_dir, "repository", "arome_data_repository",f"arome_metcoop_red_default2_5km_{date_str}_diff_time_unit.nc")
ar1 = ConcatDataRepository(epsg=EPSG, filename=f1)
np_raw_array = np.array(
[ # 0 # 1 # 2 # 3
[1.0, 2.0, 3.0, 4.0],
[1.1, 2.1, 3.1, 4.1],
[1.2, 2.2, 3.2, 4.2],
[1.4, 2.5, 3.6, 4.7]
], dtype=np.float64
)
raw_values = {
'wind_speed': (np_raw_array, 'wind_speed', 'm/s'),
'rel_hum': (np_raw_array, 'relative_humidity_2m', '?'),
'temperature': (273.15 + np_raw_array, 'air_temperature_2m', 'K'),
'radiation': (3600.0*np_raw_array, 'integral_of_surface_downwelling_shortwave_flux_in_air_wrt_time', 'W s/m2'),
'prepitation_acc': (np_raw_array, 'precipitation_amount_acc', 'Mg/m^2'),
'prepitation': (np_raw_array, 'precipitation_amount', 'mm')
}
raw_time = np.array([0, 3600, 7200, 10800], dtype=np.int64)
rd = ar1._transform_raw(raw_values, raw_time)
ta3 = api.TimeAxis(api.time(0), api.time(3600), 3)
ta4 = api.TimeAxis(api.time(0), api.time(3600), 4)
e_precip_acc = np.array(
[ # 0 # 1 # 2 # 3
[100.0, 100.0, 100.0, 100.0],
[100.0, 100.0, 100.0, 100.0],
[200.0, 300.0, 400.0, 500.0],
], dtype=np.float64
)
e_precip = np.array(
[ # 0 # 1 # 2 # 3
[1.1, 2.1, 3.1, 4.1],
[1.2, 2.2, 3.2, 4.2],
[1.4, 2.5, 3.6, 4.7]
], dtype=np.float64
)
e_rad = np.array(
[ # 0 # 1 # 2 # 3
[0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1],
[0.2, 0.3, 0.4, 0.5],
], dtype=np.float64
)
e = {
'wind_speed': (np_raw_array, ta4),
'rel_hum': (np_raw_array, ta4),
'temperature': (np_raw_array, ta4),
'radiation': (e_rad, ta3),
'prepitation_acc': (e_precip_acc, ta3),
'prepitation': (e_precip, ta3)
}
self.assertIsNotNone(rd)
for k, r in rd.items():
self.assertTrue(k in e)
self.assertEqual(r[1], e[k][1], "expect correct time-axis")
self.assertTrue(np.allclose(r[0], e[k][0]), "expect exact correct values")
def test_transform_functions_variable_interval(self):
"""
test the _transform_raw function.
"""
return
# TODO: add concat file in shyft-data, then implement the tests
EPSG, bbox, bpoly = self._epsg_bbox
# Period start
n_hours = 30
t0 = api.YMDhms(2015, 8, 24, 0)
date_str = "{}{:02}{:02}_{:02}".format(t0.year, t0.month, t0.day, t0.hour)
utc = api.Calendar() # No offset gives Utc
base_dir = path.join(shyftdata_dir, "repository", "arome_data_repository")
f1 = "arome_metcoop_red_default2_5km_{}_diff_time_unit.nc".format(date_str)
ar1 = ConcatDataRepository(EPSG, base_dir, filename=f1)
np_raw_array = np.array(
[ # 0 # 1 # 2 # 3
[1.0, 2.0, 3.0, 4.0],
[1.1, 2.1, 3.1, 4.1],
[1.2, 2.2, 3.2, 4.2],
[1.4, 2.5, 3.6, 4.7]
], dtype=np.float64
)
raw_values = {
'wind_speed': (np_raw_array, 'wind_speed', 'm/s'),
'rel_hum': (np_raw_array, 'relative_humidity_2m', '?'),
'temperature': (273.15 + np_raw_array, 'air_temperature_2m', 'K'),
'radiation': (3600.0*np_raw_array, 'integral_of_surface_downwelling_shortwave_flux_in_air_wrt_time', 'W s/m2'),
'prepitation_acc': (np_raw_array, 'precipitation_amount_acc', 'Mg/m^2'),
'prepitation': (np_raw_array, 'precipitation_amount', 'mm')
}
raw_time = np.array([0, 3600, 7200, 7200+2*3600], dtype=np.int64) # last step is 2 hours!
rd = ar1._transform_raw(raw_values, raw_time)
ta3 = api.TimeAxis(api.UtcTimeVector(raw_time[:-1]), api.ConcatData(int(raw_time[-1])))
ta4 = api.TimeAxis(api.UtcTimeVector(raw_time), api.ConcatData(int(raw_time[-1]+2*3600))) # assume last step is also 2 hours
e_precip_acc = np.array(
[ # 0 # 1 # 2 # 3
[100.0, 100.0, 100.0, 100.0],
[100.0, 100.0, 100.0, 100.0],
[100.0, 150.0, 200.0, 250.0],
], dtype=np.float64
)
e_precip = np.array(
[ # 0 # 1 # 2 # 3
[1.1, 2.1, 3.1, 4.1],
[1.2, 2.2, 3.2, 4.2],
[1.4, 2.5, 3.6, 4.7]
], dtype=np.float64
)
e_rad = np.array(
[ # 0 # 1 # 2 # 3
[0.1, 0.1, 0.1, 0.1],
[0.1, 0.1, 0.1, 0.1],
[0.1, 0.15, 0.2, 0.25],
], dtype=np.float64
)
e = {
'wind_speed': (np_raw_array, ta4),
'rel_hum': (np_raw_array, ta4),
'temperature': (np_raw_array, ta4),
'radiation': (e_rad, ta3),
'prepitation_acc': (e_precip_acc, ta3),
'prepitation': (e_precip, ta3)
}
self.assertIsNotNone(rd)
for k, r in rd.items():
self.assertTrue(k in e)
self.assertEqual(r[1], e[k][1], "expect correct time-axis")
self.assertTrue(np.allclose(r[0], e[k][0]), "expect exact correct values")
if __name__ == "__main__":
unittest.main()
|
DiamondLightSource/auto_tomo_calibration-experimental | refs/heads/master | measure_resolution/find_resolution.py | 1 | import numpy as np
import pylab as pl
from scipy.ndimage.filters import median_filter
from skimage.filter import threshold_otsu
from skimage import io, exposure
import os
import fit_data
from scipy import misc
from math import ceil
def save_data(filename, data):
import cPickle
print("Saving data")
f = open(filename, 'w')
cPickle.dump(data, f)
f.close()
def create_dir(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def fit_and_visualize(image, folder_name, r1, r2, window_size):
"""
Takes in the region of interest, which is a 2D image.
Modulation is calculated for the lines further away
from the touch point. It is used for normalizing MTF.
Intensity_left/right store the intensities of the left and right
spheres, which are the mean pixel values over a narrow strip
along every sphere.
"""
# Denoise using a median filter
if window_size != 0:
denoised = median_filter(image, window_size)
else:
denoised = image
# Save the images containing the gaps
misc.imsave(folder_name + "touch_img.png", denoised)
misc.imsave(folder_name + "touch_img.tif", denoised)
# Calculate average sphere intensity. They are segmented using
# thresholding and the pixels are averaged
left = denoised[:, 0:image.shape[1] / 2. - 10]
right = denoised[:, image.shape[1] / 2. + 10:image.shape[1]]
thresh_l = threshold_otsu(left)
thresh_r = threshold_otsu(right)
sphere_pixels_l = []
for y in range(left.shape[1]):
for x in range(left.shape[0]):
pixel = left[x, y]
if pixel > thresh_l:
sphere_pixels_l.append(pixel)
sphere_pixels_r = []
for y in range(right.shape[1]):
for x in range(right.shape[0]):
pixel = right[x, y]
if pixel > thresh_r:
sphere_pixels_r.append(pixel)
intensity_left = np.mean(sphere_pixels_l)
intensity_right = np.mean(sphere_pixels_r)
# This is the modulation at a high separation, used
# to normalize the MTF values between 0% and 100%
low_freq_left = (intensity_left - np.min(denoised)) /\
(intensity_left + np.min(denoised))
low_freq_right = (intensity_right - np.min(denoised)) /\
(intensity_right + np.min(denoised))
gap = []
mtf_cleft = []
mtf_cright = []
# Take values only to the middle of the image
# since the problem is symmetric on the other side
for i in np.arange(0., image.shape[0] / 2.):
Xdata = []
Ydata = []
gapX = []
gapY = []
distance = dist_between_spheres(r1, r2, i, image.shape[0] / 2.)
signal = [pixel for pixel in denoised[i, :]]
gap_signal = []
for j in np.arange(0., image.shape[1]):
# Used to plot line on the image
Xdata.append(j)
Ydata.append(i)
# If we are in the region where the spheres are separated,
# stores these vlaues to plot the gap
if image.shape[1] / 2. + distance / 2. > j > image.shape[1] / 2. - distance / 2.:
gapX.append(j)
gapY.append(i)
# Take the region around the gap, which later on will be used
# to define the intensity at the gap between the spheres.
# The width of the gap is not exact
if image.shape[1] / 2. + distance + 10 > j > image.shape[1] / 2. - distance - 10:
gap_signal.append(denoised[i, j])
# Check if the gap still exists
if gap_signal:
# If the signal minima is higher than the minima in the gap
# it means that the contrast must be lost in the centre - stop
if distance < 10:
if np.min(signal) >= np.min(gap_signal):
mtf = 100 * modulation(np.min(gap_signal), intensity_left, distance) / low_freq_left
# PLOT THE REGION AROUND THE MIDDLE OF THE CURVE
# PLOT THE LINE PROFILE
# Do this only if mtf is mroe than 9, bellow that,
# the gap is unresolved, and gaussian width starts
# to spread out - ruins the results
if mtf >= 1:
# FIT A GAUSSIAN
amp = -np.min(gap_signal)
centre = np.mean(np.argwhere(np.min(gap_signal) == gap_signal))
sigma = distance / 6.
offset = np.max(gap_signal)
guess_params = [amp, centre, sigma, offset]
Xfit, Yfit, fwhm, fit_centre = fit_data.GaussConst(gap_signal, guess_params)
ymax = np.max(denoised)
ymin = np.min(denoised)
data = np.array([range(len(gap_signal)), gap_signal]).T
pl.plot(data[:,0],
data[:,1], 'bo')
pl.plot(Xfit, Yfit)
pl.title("Analytical {0} / Fitted dist {1} / Contrast {2} ".format(round(distance, 2), round(fwhm, 2), round(mtf,2)))
pl.ylim(ymin, ymax)
# PLOT THE ANALYTICAL WIDTH
pl.plot(np.repeat(fit_centre - distance / 2., len(Yfit)),
np.arange(len(Yfit)), 'r-')
pl.plot(np.repeat(fit_centre + distance / 2., len(Yfit)),
np.arange(len(Yfit)), 'r-', label = "Analytical")
pl.legend()
pl.savefig(folder_name + 'results%i.png' % i)
pl.close('all')
# Store the values of the gap width for every value
# of contrast
gap.append(distance)
mtf = 100 * modulation(np.min(gap_signal), intensity_left, distance) / low_freq_left
mtf_cleft.append(mtf)
mtf = 100 * modulation(np.min(gap_signal), intensity_right, distance) / low_freq_right
mtf_cright.append(mtf)
############# LEFT SPHERE #########################
pl.gca().invert_xaxis()
pl.plot(gap, mtf_cleft, 'r', label="left sphere data")
pl.xlabel("Width in pixels")
pl.ylabel("MTF %")
pl.tight_layout()
# Save data points
save_data(folder_name + 'gap_width.npy', gap)
save_data(folder_name + 'mtf_cleft.npy', mtf_cleft)
f = open(folder_name + 'gap_width.txt', 'w')
for i in range(len(gap)):
f.write(repr(gap[i]) + '\n')
f.close()
f = open(folder_name + 'mtf_cleft.txt', 'w')
for i in range(len(mtf_cleft)):
f.write(repr(mtf_cleft[i]) + '\n')
f.close()
pl.savefig(folder_name + 'mtf_left.png')
pl.close('all')
############### RIGHT SPHERE #####################
pl.gca().invert_xaxis()
pl.plot(gap, mtf_cright, 'r', label="left sphere data")
pl.xlabel("Width in pixels")
pl.ylabel("MTF %")
# Save data points
save_data(folder_name + 'mtf_cright.npy', mtf_cright)
f = open(folder_name + 'mtf_cright.txt', 'w')
for i in range(len(mtf_cright)):
f.write(repr(mtf_cright[i]) + '\n')
f.close()
pl.savefig(folder_name + 'mtf_right.png')
pl.close('all')
def dist_between_spheres(r1, r2, Y, C):
"""
Calculate distance between the spheres using
geometry. Read report to see how it is done.
"""
h = C - Y
d1 = np.sqrt(r1**2 - h**2)
d2 = np.sqrt(r2**2 - h**2)
dist = r1 - d1 + r2 - d2
return dist
def modulation(minima, contrast, distance):
"""
modulation(contrast) = (Imax - Imin) / (Imax + Imin)
"""
numerator = contrast - minima
denominator = contrast + minima
return numerator / denominator
def modulus(vect):
"""
Get the modulus of a vector
"""
return np.sqrt(vect[0]**2 + vect[1]**2 + vect[2]**2)
def distance_3D(c1, c2):
"""
Calculate the distance between two points
"""
return np.sqrt((c1[0] - c2[0]) ** 2 + (c1[1] - c2[1]) ** 2 + (c1[2] - c2[2]) ** 2)
def vector_3D(pt1, pt2, t):
"""
Compute the 3d line equation in a parametric form
(x,y,z) = (x1,y1,z1) - t * (x1-x2, y1-y2, z1-z2)
x = x1 - (x1 - x2)*t
y = y1 - (y1 - y2)*t
z = z1 - (z1 - z2)*t
"""
x1, y1, z1 = pt1
x2, y2, z2 = pt2
modulus = np.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2 + (z1 - z2) ** 2)
x = x1 + (x2 - x1) / modulus * t
y = y1 + (y2 - y1) / modulus * t
z = z1 + (z2 - z1) / modulus * t
return [x, y, z]
def vector_perpendicular_3D(pt1, pt2, which, Z, Sx):
"""
Returns a vector S perpendicular to a line
between pt1 and pt2 AND such that lies in x-y plane
at height Z
'which' describes through which point to draw it (pt1 or pt2)
Sx describes the position along the perpendicular vector.
"""
v = ((pt2[0] - pt1[0]), (pt2[1] - pt1[1]), (pt2[2] - pt1[2]))
if which == 1:
Sx, Sy = (pt1[0] - v[1] / np.sqrt(v[0]**2 + v[1]**2) * Sx,
pt1[1] + v[0] / np.sqrt(v[0]**2 + v[1]**2) * Sx)
Sz = pt1[2]
elif which == 2:
Sx, Sy = (pt2[0] - v[1] / np.sqrt(v[0]**2 + v[1]**2) * Sx,
pt2[1] + v[0] / np.sqrt(v[0]**2 + v[1]**2) * Sx)
Sz = pt2[2]
return [Sx, Sy, Sz + Z]
def vector_perpendicular_ct_pt(pt1, pt2, r1, Sx):
"""
Vector must be perpendicular to the one
connecting the centres of the spheres, v1, and
to the vector perpendicular to v1, that goes
throught he point of contact
"""
v = ((pt2[0] - pt1[0]), (pt2[1] - pt1[1]), (pt2[2] - pt1[2]))
ct_pt = vector_3D(pt1, pt2, r1)
perp_v_in_xy = np.array(vector_perpendicular_3D(ct_pt, pt2, 1, 0, -1)) -\
np.array(vector_perpendicular_3D(ct_pt, pt2, 1, 0, 1))
vect = np.cross(v, perp_v_in_xy)
mod_vect = modulus(vect)
x = ct_pt[0] + vect[0] / mod_vect * Sx
y = ct_pt[1] + vect[1] / mod_vect * Sx
z = ct_pt[2] + vect[2] / mod_vect * Sx
return [x, y, z]
# Find perpendicular vector components
# if np.isinf(1. / np.sqrt(v[0]**2 + v[2]**2)):
# v1 = np.array([ct_pt[0],
# ct_pt[1] - v[2] / np.sqrt(v[1]**2 + v[2]**2) * Sx,
# ct_pt[2] + v[1] / np.sqrt(v[1]**2 + v[2]**2) * Sx])
#
# elif np.isinf(1. / np.sqrt(v[1]**2 + v[2]**2)):
# v1 = np.array([ct_pt[0] - v[2] / np.sqrt(v[0]**2 + v[2]**2) * Sx,
# ct_pt[1],
# ct_pt[2] + v[0] / np.sqrt(v[0]**2 + v[2]**2) * Sx])
# else:
# v1 = np.array([ct_pt[0] - v[2] / np.sqrt(v[0]**2 + v[2]**2) * Sx,
# ct_pt[1] - v[2] / np.sqrt(v[1]**2 + v[2]**2) * Sx,
# ct_pt[2] + v[0] / np.sqrt(v[0]**2 + v[2]**2) * Sx])
#
# # Add them to get the final vector
# vector_sum = v1 + v2
#
# return v1
# v1 = (0, 0, 0)
# v2 = (5, 0, 5)
#
# vector1 = [vector_3D(v1, v2, i) for i in range(5)]
#
# vector2 = [vector_perpendicular_ct_pt(v1, v2, 1, i) for i in np.arange(5)]
#
# print vector1
# print vector2
def project_onto_plane(vect):
"""
Return vector projection onto the xy plane
"""
x, y, z = vect
return (x, y, 0.)
def find_contact_3D(centroids, radius, tol = 20.):
"""
Arrays of all the centroids and all radii
tol defines the error tolerance between radii distance
Check all centre pairs and determine,
based on their radii, if they are in contact or not
"""
touch_pts = []
centres = []
radii = []
N = len(centroids)
for i in range(N - 1):
for j in range(i + 1, N):
c1 = centroids[i]
c2 = centroids[j]
r1 = radius[i]
r2 = radius[j]
D = r1 + r2
L = distance_3D(c1, c2)
print ""
print "Difference between radii sum and centre distance is", abs(D - L)
print "Distance is ", L
print "Radii sum is ", D
print ""
if abs(D - L) <= tol:
touch_pt = vector_3D(c1, c2, r1)
touch_pts.append(touch_pt)
centres.append((c1, c2))
radii.append((r1, r2))
return centres, touch_pts, radii
def sample_rate(P2, P1):
"""
When we have to loop through pixels at
an angle, the angle needs to be taken
into account. This calculates the change in distance
depending on where the vector is pointing
"""
from numpy import (array, dot)
from numpy.linalg import norm
v = np.array([P1[0] - P2[0], P1[1] - P2[1], P1[2] - P2[2]])
normal = np.array([0,0,1])
projection = np.cross(normal, np.cross(v,normal))
c = np.dot(v, projection) / modulus(projection) / modulus(v)
return 1. / c
def get_slice(P1, P2, name):
"""
Get slice through centre for analysis
"""
centre_dist = distance_3D(P1, P2)
sampling = sample_rate(P1, P2) - 1
plot_img = np.zeros((centre_dist / 2. + 1, centre_dist + 1))
Xrange = np.linspace(-centre_dist / 4., centre_dist /4.,
centre_dist / 2. + 1)
Trange = np.linspace(0., centre_dist,
centre_dist * 2.)
for time in Trange:
# Go up along the line
pt = vector_3D(P1, P2, time + sampling)
interpolated = trilinear(name, pt)
for X in Xrange:
# Get along the X direction for every height
x, y, z = vector_perpendicular_3D(pt, P2, 1, 0, X)
# pixel_value = interpolation(x, y, img)
pixel_value = interpolated([x, y, z])
plot_img[X + centre_dist / 4., time] = pixel_value
return plot_img
def get_slice_perpendicular(P1, P2, r1, name):
"""
Finds a vector between the centres.
Takes the point on the vector that is on the contact point.
Finds a vector going through the contact point that is also
perpendicular to the line connecting the centres.
The slice is then reconstructed from several images
"""
# time goes along the vector between P1 and P2
# since it might be at an angle, I can't loop in 1
# pixel increments - this will miss certain slices. Therefore,
# I need to loop through by 1/cosA, where A is angle between
# the xy plane and vector P1->P2
centre_dist = distance_3D(P1, P2)
perp1 = vector_perpendicular_ct_pt(P1, P2, r1, centre_dist /4.)
perp2 = vector_perpendicular_ct_pt(P1, P2, r1, -centre_dist /4.)
sampling = sample_rate(perp1, perp2) - 1
plot_img = np.zeros((np.int(np.round(centre_dist / 2. + 1, 0)), np.int(np.round(centre_dist / 2. + 1, 0))))
Xrange = np.linspace(-centre_dist / 4., centre_dist /4.,
centre_dist / 2. + 1)
Trange = np.linspace(-centre_dist / 4., centre_dist /4.,
centre_dist / 2. + 1)
for time in Trange:
# Go up along the line
pt = vector_perpendicular_ct_pt(P1, P2, r1, time + sampling)
interpolated = trilinear(name, pt)
for X in Xrange:
# Get along the X direction for every height
x, y, z = vector_perpendicular_3D(pt, P2, 1, 0, X)
# pixel_value = interpolation(x, y, img)
pixel_value = interpolated([x, y, z])
plot_img[X + centre_dist / 4., time + centre_dist / 4.] = pixel_value
return plot_img
def check_alignment(image, r1, r2):
"""
Take a particular line though the image and check
if the spheres were properly aligned in the z direction.
It happens to be off by a pixel or two sometimes
"""
distance = dist_between_spheres(r1, r2, image.shape[0] / 2. + 10, image.shape[0] / 2.)
gap_signal = []
denoised = median_filter(image.copy(), 3)
for j in np.arange(0., image.shape[1]):
# Take the region around the gap, which later on will be used
# to define the intensity at the gap between the spheres.
# The width of the gap is not exact
if image.shape[1] / 2. + distance + 5 > j > image.shape[1] / 2. - distance - 5:
gap_signal.append(denoised[image.shape[0] / 2. + 10, j])
centre = np.mean(np.argwhere(np.min(gap_signal) == gap_signal))
print centre
print len(gap_signal) / 2.
print
if abs(centre - len(gap_signal) / 2.) <= 1.5:
return True
else:
return False
def interpolation(x, y, img):
"""
http://en.wikipedia.org/wiki/Bilinear_interpolation
"""
from math import floor, ceil
x1 = ceil(x)
x2 = floor(x)
y1 = ceil(y)
y2 = floor(y)
Q11 = (x1, y1)
Q12 = (x1, y2)
Q21 = (x2, y1)
Q22 = (x2, y2)
f11 = img[Q11[0], Q11[1]]
f12 = img[Q12[0], Q12[1]]
f21 = img[Q21[0], Q21[1]]
f22 = img[Q22[0], Q22[1]]
try:
pixel_value = 1 / ((x2 - x1) * (y2 - y1)) * (f11 * (x2 - x) * (y2 - y) +
f21 * (x - x1) * (y2 - y) +
f12 * (x2 - x) * (y - y1) +
f22 * (x - x1) * (y - y1))
except:
pixel_value = np.mean([f11, f12, f21, f22])
return pixel_value
def trilinear(name, pt):
"""
Trilinear interpolation
http://docs.scipy.org/doc/scipy-dev/reference/generated/
scipy.interpolate.RegularGridInterpolator.html
"""
from scipy.interpolate import RegularGridInterpolator
input_file = name % int(np.floor(pt[2]))
data0 = io.imread(input_file)
input_file = name % int(np.ceil(pt[2]))
data1 = io.imread(input_file)
xdim = data0.shape[0]
ydim = data0.shape[1]
zdim = 2
empty_arr = np.empty((xdim, ydim, zdim))
empty_arr[:, :, 0] = data0
empty_arr[:, :, 1] = data1
x = np.linspace(0, xdim - 1, xdim)
y = np.linspace(0, ydim - 1, ydim)
z = np.linspace(int(np.floor(pt[2])), int(np.ceil(pt[2])), zdim)
interp = RegularGridInterpolator((x, y, z), empty_arr)
return interp
# def load_up_images(pt1, pt2, name):
# """
# Stack up the region of interest from image slices
# """
# zstart = int(np.min(pt1[2], pt2[2]))
# zend = int(np.max(pt1[2], pt2[2]))
#
# xdim =
# ydim
#
# input_file = name % zstart
# data = io.imread(input_file)
#
# zrange = np.linspace(zstart, zend, zend-zstart)
# store_ROI = np.empty((data.shape[0]))
#
# for i in zrange:
# input_file = name % i
# data = io.imread(input_file)
#
# return
def touch_lines_3D(pt1, pt2, folder_name, name, r1, r2, window_size):
"""
Goes along lines in the region between
two points.
Used for obtaining the widths of the gaussian fitted
to the gap between spheres
"""
# Create an array to store the slanted image slices
# used for plotting
L = distance_3D(pt1, pt2)
D = r1 + r2
print ""
print "Difference between radii sum and centre distance is", abs(D - L)
print "Distance is ", L
print "Radii sum is ", D
print ""
create_dir(folder_name + "plots/")
perpendicular_slice = get_slice_perpendicular(pt1, pt2, r1, name)
misc.imsave(folder_name + "perp_slice.tif", perpendicular_slice)
print "saving the perpendicular slice"
ROI = get_slice(pt1, pt2, name)
fit_and_visualize(ROI, folder_name + "plots/", r1, r2, window_size)
print "saving the slice for MTF"
return
# import cPickle
# import pylab as pl
#
# f = open("/dls/tmp/jjl36382/50867/plots/(793.0, 1143.07, 801.86),(682.61, 1141.0, 1410.12)/plots/gap_width.npy", 'r')
# x1 = cPickle.load(f)
# f.close()
# f = open("/dls/tmp/jjl36382/50873/plots/(796.04, 1146.95, 806.3),(685.0, 1143.98, 1414.78)/plots/gap_width.npy", 'r')
# x2 = cPickle.load(f)
# f.close()
# f = open("/dls/tmp/jjl36382/50880/plots/(798.04, 1147.99, 811.83),(685.0, 1143.0, 1418.03)/plots/gap_width.npy", 'r')
# x3 = cPickle.load(f)
# f.close()
# f = open("/dls/tmp/jjl36382/50867/plots/(793.0, 1143.07, 801.86),(682.61, 1141.0, 1410.12)/plots/mtf_cleft.npy", 'r')
# y1 = cPickle.load(f)
# f.close()
# f = open("/dls/tmp/jjl36382/50873/plots/(796.04, 1146.95, 806.3),(685.0, 1143.98, 1414.78)/plots/mtf_cleft.npy", 'r')
# y2 = cPickle.load(f)
# f.close()
# f = open("/dls/tmp/jjl36382/50880/plots/(798.04, 1147.99, 811.83),(685.0, 1143.0, 1418.03)/plots/mtf_cleft.npy", 'r')
# y3 = cPickle.load(f)
# f.close()
#
# pl.plot(x1, y1, 'r', label = "53keV")
# pl.plot(x3, y3, 'g', label = "75keV")
# pl.plot(x2, y2, 'b', label = "130keV")
#
#
# pl.xlabel("Distance between spheres (pixels)")
# pl.ylabel("MTF %")
# pl.legend()
# pl.gca().invert_xaxis()
# pl.savefig("./median_0.tif")
# pl.show()
# #
|
suncycheng/intellij-community | refs/heads/master | python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_1/_pkg0_1_1_0/_pkg0_1_1_0_0/_mod0_1_1_0_0_1.py | 30 | name0_1_1_0_0_1_0 = None
name0_1_1_0_0_1_1 = None
name0_1_1_0_0_1_2 = None
name0_1_1_0_0_1_3 = None
name0_1_1_0_0_1_4 = None |
kidmaple/CoolWall | refs/heads/nios2 | user/python/Tools/i18n/pygettext.py | 3 | #! /usr/bin/env python
# Originally written by Barry Warsaw <[email protected]>
#
# minimally patched to make it even more xgettext compatible
# by Peter Funk <[email protected]>
# for selftesting
try:
import fintl
_ = fintl.gettext
except ImportError:
def _(s): return s
__doc__ = _("""pygettext -- Python equivalent of xgettext(1)
Many systems (Solaris, Linux, Gnu) provide extensive tools that ease the
internationalization of C programs. Most of these tools are independent of
the programming language and can be used from within Python programs. Martin
von Loewis' work[1] helps considerably in this regard.
There's one problem though; xgettext is the program that scans source code
looking for message strings, but it groks only C (or C++). Python introduces
a few wrinkles, such as dual quoting characters, triple quoted strings, and
raw strings. xgettext understands none of this.
Enter pygettext, which uses Python's standard tokenize module to scan Python
source code, generating .pot files identical to what GNU xgettext[2] generates
for C and C++ code. From there, the standard GNU tools can be used.
A word about marking Python strings as candidates for translation. GNU
xgettext recognizes the following keywords: gettext, dgettext, dcgettext, and
gettext_noop. But those can be a lot of text to include all over your code.
C and C++ have a trick: they use the C preprocessor. Most internationalized C
source includes a #define for gettext() to _() so that what has to be written
in the source is much less. Thus these are both translatable strings:
gettext("Translatable String")
_("Translatable String")
Python of course has no preprocessor so this doesn't work so well. Thus,
pygettext searches only for _() by default, but see the -k/--keyword flag
below for how to augment this.
[1] http://www.python.org/workshops/1997-10/proceedings/loewis.html
[2] http://www.gnu.org/software/gettext/gettext.html
NOTE: pygettext attempts to be option and feature compatible with GNU xgettext
where ever possible. However some options are still missing or are not fully
implemented. Also, xgettext's use of command line switches with option
arguments is broken, and in these cases, pygettext just defines additional
switches.
Usage: pygettext [options] inputfile ...
Options:
-a
--extract-all
Extract all strings
-d name
--default-domain=name
Rename the default output file from messages.pot to name.pot
-E
--escape
replace non-ASCII characters with octal escape sequences.
-h
--help
print this help message and exit
-k word
--keyword=word
Keywords to look for in addition to the default set, which are:
%(DEFAULTKEYWORDS)s
You can have multiple -k flags on the command line.
-K
--no-default-keywords
Disable the default set of keywords (see above). Any keywords
explicitly added with the -k/--keyword option are still recognized.
--no-location
Do not write filename/lineno location comments.
-n
--add-location
Write filename/lineno location comments indicating where each
extracted string is found in the source. These lines appear before
each msgid. The style of comments is controlled by the -S/--style
option. This is the default.
-S stylename
--style stylename
Specify which style to use for location comments. Two styles are
supported:
Solaris # File: filename, line: line-number
GNU #: filename:line
The style name is case insensitive. GNU style is the default.
-o filename
--output=filename
Rename the default output file from messages.pot to filename. If
filename is `-' then the output is sent to standard out.
-p dir
--output-dir=dir
Output files will be placed in directory dir.
-v
--verbose
Print the names of the files being processed.
-V
--version
Print the version of pygettext and exit.
-w columns
--width=columns
Set width of output to columns.
-x filename
--exclude-file=filename
Specify a file that contains a list of strings that are not be
extracted from the input files. Each string to be excluded must
appear on a line by itself in the file.
If `inputfile' is -, standard input is read.
""")
import os
import sys
import time
import getopt
import tokenize
__version__ = '1.1'
default_keywords = ['_']
DEFAULTKEYWORDS = ', '.join(default_keywords)
EMPTYSTRING = ''
# The normal pot-file header. msgmerge and EMACS' po-mode work better if
# it's there.
pot_header = _('''\
# SOME DESCRIPTIVE TITLE.
# Copyright (C) YEAR ORGANIZATION
# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
#
msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\\n"
"PO-Revision-Date: %(time)s\\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\\n"
"Language-Team: LANGUAGE <[email protected]>\\n"
"MIME-Version: 1.0\\n"
"Content-Type: text/plain; charset=CHARSET\\n"
"Content-Transfer-Encoding: ENCODING\\n"
"Generated-By: pygettext.py %(version)s\\n"
''')
def usage(code, msg=''):
print __doc__ % globals()
if msg:
print msg
sys.exit(code)
escapes = []
def make_escapes(pass_iso8859):
global escapes
if pass_iso8859:
# Allow iso-8859 characters to pass through so that e.g. 'msgid
# "Höhe"' would result not result in 'msgid "H\366he"'. Otherwise we
# escape any character outside the 32..126 range.
mod = 128
else:
mod = 256
for i in range(256):
if 32 <= (i % mod) <= 126:
escapes.append(chr(i))
else:
escapes.append("\\%03o" % i)
escapes[ord('\\')] = '\\\\'
escapes[ord('\t')] = '\\t'
escapes[ord('\r')] = '\\r'
escapes[ord('\n')] = '\\n'
escapes[ord('\"')] = '\\"'
def escape(s):
global escapes
s = list(s)
for i in range(len(s)):
s[i] = escapes[ord(s[i])]
return EMPTYSTRING.join(s)
def safe_eval(s):
# unwrap quotes, safely
return eval(s, {'__builtins__':{}}, {})
def normalize(s):
# This converts the various Python string types into a format that is
# appropriate for .po files, namely much closer to C style.
lines = s.split('\n')
if len(lines) == 1:
s = '"' + escape(s) + '"'
else:
if not lines[-1]:
del lines[-1]
lines[-1] = lines[-1] + '\n'
for i in range(len(lines)):
lines[i] = escape(lines[i])
lineterm = '\\n"\n"'
s = '""\n"' + lineterm.join(lines) + '"'
return s
class TokenEater:
def __init__(self, options):
self.__options = options
self.__messages = {}
self.__state = self.__waiting
self.__data = []
self.__lineno = -1
def __call__(self, ttype, tstring, stup, etup, line):
# dispatch
self.__state(ttype, tstring, stup[0])
def __waiting(self, ttype, tstring, lineno):
if ttype == tokenize.NAME and tstring in self.__options.keywords:
self.__state = self.__keywordseen
def __keywordseen(self, ttype, tstring, lineno):
if ttype == tokenize.OP and tstring == '(':
self.__data = []
self.__lineno = lineno
self.__state = self.__openseen
else:
self.__state = self.__waiting
def __openseen(self, ttype, tstring, lineno):
if ttype == tokenize.OP and tstring == ')':
# We've seen the last of the translatable strings. Record the
# line number of the first line of the strings and update the list
# of messages seen. Reset state for the next batch. If there
# were no strings inside _(), then just ignore this entry.
if self.__data:
msg = EMPTYSTRING.join(self.__data)
if not msg in self.__options.toexclude:
entry = (self.__curfile, self.__lineno)
linenos = self.__messages.get(msg)
if linenos is None:
self.__messages[msg] = [entry]
else:
linenos.append(entry)
self.__state = self.__waiting
elif ttype == tokenize.STRING:
self.__data.append(safe_eval(tstring))
# TBD: should we warn if we seen anything else?
def set_filename(self, filename):
self.__curfile = filename
def write(self, fp):
options = self.__options
timestamp = time.ctime(time.time())
# common header
try:
sys.stdout = fp
# The time stamp in the header doesn't have the same format
# as that generated by xgettext...
print pot_header % {'time': timestamp, 'version': __version__}
for k, v in self.__messages.items():
if not options.writelocations:
pass
# location comments are different b/w Solaris and GNU:
elif options.locationstyle == options.SOLARIS:
for filename, lineno in v:
d = {'filename': filename, 'lineno': lineno}
print _('# File: %(filename)s, line: %(lineno)d') % d
elif options.locationstyle == options.GNU:
# fit as many locations on one line, as long as the
# resulting line length doesn't exceeds 'options.width'
locline = '#:'
for filename, lineno in v:
d = {'filename': filename, 'lineno': lineno}
s = _(' %(filename)s:%(lineno)d') % d
if len(locline) + len(s) <= options.width:
locline = locline + s
else:
print locline
locline = "#:" + s
if len(locline) > 2:
print locline
# TBD: sorting, normalizing
print 'msgid', normalize(k)
print 'msgstr ""\n'
finally:
sys.stdout = sys.__stdout__
def main():
global default_keywords
try:
opts, args = getopt.getopt(
sys.argv[1:],
'ad:Ehk:Kno:p:S:Vvw:x:',
['extract-all', 'default-domain', 'escape', 'help',
'keyword=', 'no-default-keywords',
'add-location', 'no-location', 'output=', 'output-dir=',
'style=', 'verbose', 'version', 'width=', 'exclude-file=',
])
except getopt.error, msg:
usage(1, msg)
# for holding option values
class Options:
# constants
GNU = 1
SOLARIS = 2
# defaults
extractall = 0 # FIXME: currently this option has no effect at all.
escape = 0
keywords = []
outpath = ''
outfile = 'messages.pot'
writelocations = 1
locationstyle = GNU
verbose = 0
width = 78
excludefilename = ''
options = Options()
locations = {'gnu' : options.GNU,
'solaris' : options.SOLARIS,
}
# parse options
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-a', '--extract-all'):
options.extractall = 1
elif opt in ('-d', '--default-domain'):
options.outfile = arg + '.pot'
elif opt in ('-E', '--escape'):
options.escape = 1
elif opt in ('-k', '--keyword'):
options.keywords.append(arg)
elif opt in ('-K', '--no-default-keywords'):
default_keywords = []
elif opt in ('-n', '--add-location'):
options.writelocations = 1
elif opt in ('--no-location',):
options.writelocations = 0
elif opt in ('-S', '--style'):
options.locationstyle = locations.get(arg.lower())
if options.locationstyle is None:
usage(1, _('Invalid value for --style: %s') % arg)
elif opt in ('-o', '--output'):
options.outfile = arg
elif opt in ('-p', '--output-dir'):
options.outpath = arg
elif opt in ('-v', '--verbose'):
options.verbose = 1
elif opt in ('-V', '--version'):
print _('pygettext.py (xgettext for Python) %s') % __version__
sys.exit(0)
elif opt in ('-w', '--width'):
try:
options.width = int(arg)
except ValueError:
usage(1, _('--width argument must be an integer: %s') % arg)
elif opt in ('-x', '--exclude-file'):
options.excludefilename = arg
# calculate escapes
make_escapes(options.escape)
# calculate all keywords
options.keywords.extend(default_keywords)
# initialize list of strings to exclude
if options.excludefilename:
try:
fp = open(options.excludefilename)
options.toexclude = fp.readlines()
fp.close()
except IOError:
sys.stderr.write(_("Can't read --exclude-file: %s") %
options.excludefilename)
sys.exit(1)
else:
options.toexclude = []
# slurp through all the files
eater = TokenEater(options)
for filename in args:
if filename == '-':
if options.verbose:
print _('Reading standard input')
fp = sys.stdin
closep = 0
else:
if options.verbose:
print _('Working on %s') % filename
fp = open(filename)
closep = 1
try:
eater.set_filename(filename)
tokenize.tokenize(fp.readline, eater)
finally:
if closep:
fp.close()
# write the output
if options.outfile == '-':
fp = sys.stdout
closep = 0
else:
if options.outpath:
options.outfile = os.path.join(options.outpath, options.outfile)
fp = open(options.outfile, 'w')
closep = 1
try:
eater.write(fp)
finally:
if closep:
fp.close()
if __name__ == '__main__':
main()
# some more test strings
_(u'a unicode string')
|
santazhang/simple-rpc | refs/heads/master | pylib/simplerpc/server.py | 1 | import traceback
from simplerpc import _pyrpc
from simplerpc.marshal import Marshal
class MarshalWrap(object):
def __init__(self, f, input_types, output_types):
# f input: input_marshal (id only), f output: output_marshal (id only)
self.f = f
self.input_types = input_types
self.output_types = output_types
# def __del__(self):
# print "properly cleaned up!"
def __call__(self, input_marshal_id):
input_m = Marshal(id=input_marshal_id, should_release=False)
input_values = []
for input_ty in self.input_types:
input_values += input_m.read_obj(input_ty),
try:
output = self.f(*input_values)
except:
traceback.print_exc()
raise
if len(self.output_types) == 0:
# void rpc
return 0 # mark as a NULL reply
output_m = Marshal(should_release=False) # C++ code will release the marshal object
if len(self.output_types) == 1:
# single return value
output_m.write_obj(output, self.output_types[0])
else:
# multiple return values
for i in range(len(self.output_types)):
output_m.write_obj(output[i], self.output_types[i])
return output_m.id
class Server(object):
def __init__(self, n_threads=1):
self.id = _pyrpc.init_server(n_threads)
self.func_ids = {} # rpc_id => func_ptr
def __del__(self):
all_rpc_ids = self.func_ids.keys()
for rpc_id in all_rpc_ids:
self.unreg(rpc_id)
_pyrpc.fini_server(self.id)
def __reg_func__(self, rpc_id, func, input_types, output_types):
rpc_func = MarshalWrap(func, input_types, output_types)
ret = _pyrpc.server_reg(self.id, rpc_id, rpc_func)
if ret != 0:
_pyrpc.helper_decr_ref(rpc_func)
else:
self.func_ids[rpc_id] = rpc_func
return ret
def enable_udp(self):
_pyrpc.server_enable_udp(self.id)
def reg_svc(self, svc):
svc.__reg_to__(self)
def unreg(self, rpc_id):
_pyrpc.server_unreg(self.id, rpc_id)
rpc_func = self.func_ids[rpc_id]
del self.func_ids[rpc_id]
_pyrpc.helper_decr_ref(rpc_func)
def start(self, addr):
return _pyrpc.server_start(self.id, addr)
|
myRisk/dynamicDNA | refs/heads/master | setuptools-15.2/setuptools/tests/test_build_ext.py | 151 | import distutils.command.build_ext as orig
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExt:
def test_get_ext_filename(self):
"""
Setuptools needs to give back the same
result as distutils, even if the fullname
is not in ext_map.
"""
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
|
conan-io/conan | refs/heads/develop | conans/test/integration/conan_v2/conanfile/test_package_method.py | 1 | import textwrap
from conans.test.utils.conan_v2_tests import ConanV2ModeTestCase
class ConanfileSourceTestCase(ConanV2ModeTestCase):
""" Conan v2: 'self.info' is not available in 'package()' """
def test_info_not_in_package(self):
# self.info is not available in 'package'
t = self.get_client()
conanfile = textwrap.dedent("""
from conans import ConanFile
class Recipe(ConanFile):
def package(self):
self.info.header_only()
""")
t.save({'conanfile.py': conanfile})
t.run('create . name/version@ -s os=Linux', assert_error=True)
self.assertIn("Conan v2 incompatible: 'self.info' access in package() method is deprecated", t.out)
|
Dandandan/wikiprogramming | refs/heads/master | jsrepl/build/extern/python/closured/lib/python2.7/encodings/iso8859_10.py | 593 | """ Python Character Mapping Codec iso8859_10 generated from 'MAPPINGS/ISO8859/8859-10.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-10',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0104' # 0xA1 -> LATIN CAPITAL LETTER A WITH OGONEK
u'\u0112' # 0xA2 -> LATIN CAPITAL LETTER E WITH MACRON
u'\u0122' # 0xA3 -> LATIN CAPITAL LETTER G WITH CEDILLA
u'\u012a' # 0xA4 -> LATIN CAPITAL LETTER I WITH MACRON
u'\u0128' # 0xA5 -> LATIN CAPITAL LETTER I WITH TILDE
u'\u0136' # 0xA6 -> LATIN CAPITAL LETTER K WITH CEDILLA
u'\xa7' # 0xA7 -> SECTION SIGN
u'\u013b' # 0xA8 -> LATIN CAPITAL LETTER L WITH CEDILLA
u'\u0110' # 0xA9 -> LATIN CAPITAL LETTER D WITH STROKE
u'\u0160' # 0xAA -> LATIN CAPITAL LETTER S WITH CARON
u'\u0166' # 0xAB -> LATIN CAPITAL LETTER T WITH STROKE
u'\u017d' # 0xAC -> LATIN CAPITAL LETTER Z WITH CARON
u'\xad' # 0xAD -> SOFT HYPHEN
u'\u016a' # 0xAE -> LATIN CAPITAL LETTER U WITH MACRON
u'\u014a' # 0xAF -> LATIN CAPITAL LETTER ENG
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\u0105' # 0xB1 -> LATIN SMALL LETTER A WITH OGONEK
u'\u0113' # 0xB2 -> LATIN SMALL LETTER E WITH MACRON
u'\u0123' # 0xB3 -> LATIN SMALL LETTER G WITH CEDILLA
u'\u012b' # 0xB4 -> LATIN SMALL LETTER I WITH MACRON
u'\u0129' # 0xB5 -> LATIN SMALL LETTER I WITH TILDE
u'\u0137' # 0xB6 -> LATIN SMALL LETTER K WITH CEDILLA
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\u013c' # 0xB8 -> LATIN SMALL LETTER L WITH CEDILLA
u'\u0111' # 0xB9 -> LATIN SMALL LETTER D WITH STROKE
u'\u0161' # 0xBA -> LATIN SMALL LETTER S WITH CARON
u'\u0167' # 0xBB -> LATIN SMALL LETTER T WITH STROKE
u'\u017e' # 0xBC -> LATIN SMALL LETTER Z WITH CARON
u'\u2015' # 0xBD -> HORIZONTAL BAR
u'\u016b' # 0xBE -> LATIN SMALL LETTER U WITH MACRON
u'\u014b' # 0xBF -> LATIN SMALL LETTER ENG
u'\u0100' # 0xC0 -> LATIN CAPITAL LETTER A WITH MACRON
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
u'\u012e' # 0xC7 -> LATIN CAPITAL LETTER I WITH OGONEK
u'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\u0118' # 0xCA -> LATIN CAPITAL LETTER E WITH OGONEK
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\u0116' # 0xCC -> LATIN CAPITAL LETTER E WITH DOT ABOVE
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xd0' # 0xD0 -> LATIN CAPITAL LETTER ETH (Icelandic)
u'\u0145' # 0xD1 -> LATIN CAPITAL LETTER N WITH CEDILLA
u'\u014c' # 0xD2 -> LATIN CAPITAL LETTER O WITH MACRON
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\u0168' # 0xD7 -> LATIN CAPITAL LETTER U WITH TILDE
u'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\u0172' # 0xD9 -> LATIN CAPITAL LETTER U WITH OGONEK
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xde' # 0xDE -> LATIN CAPITAL LETTER THORN (Icelandic)
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German)
u'\u0101' # 0xE0 -> LATIN SMALL LETTER A WITH MACRON
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
u'\u012f' # 0xE7 -> LATIN SMALL LETTER I WITH OGONEK
u'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\u0119' # 0xEA -> LATIN SMALL LETTER E WITH OGONEK
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\u0117' # 0xEC -> LATIN SMALL LETTER E WITH DOT ABOVE
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf0' # 0xF0 -> LATIN SMALL LETTER ETH (Icelandic)
u'\u0146' # 0xF1 -> LATIN SMALL LETTER N WITH CEDILLA
u'\u014d' # 0xF2 -> LATIN SMALL LETTER O WITH MACRON
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\u0169' # 0xF7 -> LATIN SMALL LETTER U WITH TILDE
u'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
u'\u0173' # 0xF9 -> LATIN SMALL LETTER U WITH OGONEK
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE
u'\xfe' # 0xFE -> LATIN SMALL LETTER THORN (Icelandic)
u'\u0138' # 0xFF -> LATIN SMALL LETTER KRA
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
silviuapostu/project-euler-solutions | refs/heads/master | 25.py | 1 | # first Fibonacci number with n digits. Don't waste memory by storing the whole
# Fibonacci sequence, but only the last two members since we're only testing
# their sum
def n_digit_fibo(n=1000):
last_fibs = [1, 1, 2]
ix = 3 # index of Fibo number we're about to test
while last_fibs[-1] / 10**(n-1) < 1:
last_fibs[-1] = last_fibs[-2] + last_fibs[-3]
last_fibs[-3] = last_fibs[-2]
last_fibs[-2] = last_fibs[-1]
ix += 1
return ix-1
|
yogiben/Sublime-AdvancedNewFile | refs/heads/master | advanced_new_file/commands/helper_commands.py | 4 | import sublime
import sublime_plugin
class AnfReplaceCommand(sublime_plugin.TextCommand):
def run(self, edit, content):
self.view.replace(edit, sublime.Region(0, self.view.size()), content)
class AdvancedNewFileCommand(sublime_plugin.WindowCommand):
def run(self, is_python=False, initial_path=None,
rename=False, rename_file=None):
args = {}
if rename:
args["is_python"] = is_python
args["initial_path"] = initial_path
args["rename_file"] = rename_file
self.window.run_command("advanced_new_file_move", args)
else:
args["is_python"] = is_python
args["initial_path"] = initial_path
self.window.run_command("advanced_new_file_new", args)
class AnfRemoveRegionContentAndRegionCommand(sublime_plugin.TextCommand):
def run(self, edit, region_key):
regions = self.view.get_regions(region_key)
for region in regions:
self.view.erase(edit, region)
self.view.erase_regions(region_key)
|
theju/f1oracle | refs/heads/master | race/context_processors.py | 1 | from .models import Driver, Constructor, Race, \
OverallDriverPredictionHistory, OverallConstructorPredictionHistory, \
OverallDriverPrediction, OverallConstructorPrediction, \
RaceDriverPrediction, RaceConstructorPrediction
import datetime
def race_context_processor(request):
if not request.user.is_authenticated():
return {
"today": datetime.date.today()
}
drivers = Driver.objects.all()
constructors = Constructor.objects.all()
races = Race.objects.all()
driver_predictions = OverallDriverPredictionHistory.objects.filter(user=request.user).count()
constructor_predictions = OverallConstructorPredictionHistory.objects.filter(user=request.user).count()
num_tries_remaining = {"driver": 3 - driver_predictions,
"constructor": 3 - constructor_predictions}
race_driver_predictions = []
race_constructor_predictions = []
for race in races:
race_driver_predictions.extend(list(RaceDriverPrediction.objects.filter(user=request.user)))
race_constructor_predictions.extend(list(RaceConstructorPrediction.objects.filter(user=request.user)))
try:
overall_driver_prediction = OverallDriverPrediction.objects.get(user=request.user)
except OverallDriverPrediction.DoesNotExist:
overall_driver_prediction = None
try:
overall_constructor_prediction = OverallConstructorPrediction.objects.get(user=request.user)
except OverallConstructorPrediction.DoesNotExist:
overall_constructor_prediction = None
return {
"races": races,
"drivers": drivers,
"constructors": constructors,
"num_tries_remaining": num_tries_remaining,
"overall_driver_prediction": overall_driver_prediction,
"overall_constructor_prediction": overall_constructor_prediction,
"race_driver_predictions": race_driver_predictions,
"race_constructor_predictions": race_constructor_predictions,
"today": datetime.date.today()
}
|
MarvinBertin/Theano-Lights | refs/heads/master | models/draw_at_lstm1.py | 6 | import theano
import theano.tensor as T
from theano.sandbox.rng_mrg import MRG_RandomStreams
from theano.tensor.nnet.conv import conv2d
from theano.tensor.signal.downsample import max_pool_2d
from theano.tensor.shared_randomstreams import RandomStreams
import numpy as np
import scipy.io
import time
import sys
import logging
import copy
from toolbox import *
from modelbase import *
class Draw_at_lstm1(ModelULBase):
'''
Draw with Attention and LSTM (Scan version)
'''
def __init__(self, data, hp):
super(Draw_at_lstm1, self).__init__(self.__class__.__name__, data, hp)
self.sample_steps = True
self.n_h = 256
self.n_t = 32
self.n_zpt = 100
self.n_z = self.n_t * self.n_zpt
self.gates = 4
self.params = Parameters()
n_x = self.data['n_x']
n_h = self.n_h
n_z = self.n_z
n_zpt = self.n_zpt
n_t = self.n_t
gates = self.gates
scale = hp.init_scale
# Attention
read_n = 3
self.reader = AttentionDraw(self.data['shape_x'][0], self.data['shape_x'][1], read_n)
write_n = 3
self.writer = AttentionDraw(self.data['shape_x'][0], self.data['shape_x'][1], write_n)
if hp.load_model and os.path.isfile(self.filename):
self.params.load(self.filename)
else:
with self.params:
AR = shared_normal((n_h, self.reader.n_att_params), scale=scale)
AW_l = shared_normal((n_h, self.writer.n_att_params), scale=scale)
AW_w = shared_normal((n_h, self.writer.N**2), scale=scale)
baw_l = shared_zeros((self.writer.n_att_params,))
baw_w = shared_zeros((self.writer.N**2,))
W1 = shared_normal((self.reader.N**2 * 2 + n_h, n_h*gates), scale=scale)
W11 = shared_normal((n_h, n_h*gates), scale=scale)
W4 = shared_normal((n_zpt, n_h*gates), scale=scale)
W44 = shared_normal((n_h, n_h*gates), scale=scale)
b1 = shared_zeros((n_h*gates,))
b4 = shared_zeros((n_h*gates,))
b10_h = shared_zeros((n_h,))
b40_h = shared_zeros((n_h,))
b10_c = shared_zeros((n_h,))
b40_c = shared_zeros((n_h,))
W2 = shared_normal((n_h, n_zpt), scale=scale)
W3 = shared_normal((n_h, n_zpt), scale=scale)
b2 = shared_zeros((n_zpt,))
b3 = shared_zeros((n_zpt,))
ex0 = shared_zeros((n_x,))
def lstm(X, h, c, W, U, b, t):
g_on = T.dot(X,W) + T.dot(h,U) + b
i_on = T.nnet.sigmoid(g_on[:,:n_h])
f_on = T.nnet.sigmoid(g_on[:,n_h:2*n_h])
o_on = T.nnet.sigmoid(g_on[:,2*n_h:3*n_h])
c = f_on * c + i_on * T.tanh(g_on[:,3*n_h:])
h = o_on * T.tanh(c)
return h, c
def attreader(x, x_e, h_decoder, t, p):
l = T.dot(h_decoder, p.AR)
rx = self.reader.read(x, l)
rx_e = self.reader.read(x_e, l)
return concatenate([rx, rx_e, h_decoder], axis=1)
def attwriter(h_decoder, t, p):
w = T.dot(h_decoder, p.AW_w) + p.baw_w
l = T.dot(h_decoder, p.AW_l) + p.baw_l
c_update = self.writer.write(w, l)
return c_update
# Encoder
p = self.params
frnn = lstm
#x = binomial(self.X)
x = self.X
input_size = x.shape[0]
outputs_info = [T.zeros((input_size, p.ex0.shape[0])) + p.ex0,
0.0,
T.zeros((input_size, p.b10_h.shape[0])) + p.b10_h,
T.zeros((input_size, p.b40_h.shape[0])) + p.b40_h,
T.zeros((input_size, p.b10_c.shape[0])) + p.b10_c,
T.zeros((input_size, p.b40_c.shape[0])) + p.b40_c]
eps = srnd.normal((n_t, input_size, n_zpt), dtype=theano.config.floatX)
def stepFull(t, ex, log_qpz, h_encoder, h_decoder, c_encoder, c_decoder, x, eps):
x_e = x - T.nnet.sigmoid(ex)
r_x = attreader(x, x_e, h_decoder, t, p)
h_encoder, c_encoder = frnn(r_x, h_encoder, c_encoder, p.W1, p.W11, p.b1, t)
mu_encoder_t = T.dot(h_encoder, p.W2) + p.b2
log_sigma_encoder_t = 0.5*(T.dot(h_encoder, p.W3) + p.b3)
log_qpz += -0.5* T.sum(1 + 2*log_sigma_encoder_t - mu_encoder_t**2 - T.exp(2*log_sigma_encoder_t))
z = mu_encoder_t + eps[t]*T.exp(log_sigma_encoder_t)
h_decoder, c_decoder = frnn(z, h_decoder, c_decoder, p.W4, p.W44, p.b4, t)
ex += attwriter(h_decoder, t, p)
return ex, log_qpz, h_encoder, h_decoder, c_encoder, c_decoder
[lex, llog_qpz, _, _, _, _], _ = theano.scan(stepFull, n_steps=n_t, sequences=[T.arange(n_t)], non_sequences=[x, eps], outputs_info=outputs_info)
ex = lex[-1]
log_qpz = llog_qpz[-1]
pxz = T.nnet.sigmoid(ex)
log_pxz = T.nnet.binary_crossentropy(pxz, x).sum()
cost = log_pxz + log_qpz
# Generate
z = self.Z.reshape((-1, n_t, n_zpt), ndim=3)
input_size = z.shape[0]
outputs_info = [T.zeros((input_size, p.ex0.shape[0])) + p.ex0,
T.zeros((input_size, p.b40_h.shape[0])) + p.b40_h,
T.zeros((input_size, p.b40_c.shape[0])) + p.b40_c]
def stepGenerate(t, s_ex, s_h_decoder_h, s_h_decoder_c):
s_h_decoder_h, s_h_decoder_c = frnn(z[:,t,:], s_h_decoder_h, s_h_decoder_c, p.W4, p.W44, p.b4, t)
s_ex += attwriter(s_h_decoder_h, t, p)
return s_ex, s_h_decoder_h, s_h_decoder_c
[s_ex, _, _], _ = theano.scan(stepGenerate, n_steps=n_t, sequences=[T.arange(n_t)], outputs_info=outputs_info)
if self.sample_steps:
a_pxz = T.zeros((n_t + 1, input_size, n_x))
for t in xrange(n_t):
a_pxz = T.set_subtensor(a_pxz[t,:,:], T.nnet.sigmoid(s_ex[t]))
else:
a_pxz = T.zeros((1, input_size, n_x))
a_pxz = T.set_subtensor(a_pxz[-1,:,:], T.nnet.sigmoid(s_ex[-1]))
self.compile(log_pxz, log_qpz, cost, a_pxz)
|
melaniebeck/GZExpress | refs/heads/master | analysis/GZX_paper_figures.py | 1 | #!/usr/bin/env python -W ignore::DeprecationWarning
from __future__ import division
from simulation import Simulation
from astropy.table import Table, join, vstack
from argparse import ArgumentParser
import numpy as np
import pdb, sys
from datetime import *
import cPickle, glob
import swap
from GZX_SWAP_evaluation import generate_SWAP_eval_report, \
calculate_confusion_matrix, \
GZ2_label_SMOOTH_NOT
from GZX_paper_figure_functions import *
###############################################################################
# MAIN
###############################################################################
def main():
"""
This script makes ALL THE MUTHAFUCKIN FIGURES FOR MAH PAYPAH.
1. VOLUNTEER PROBABILITIES
NAME
plot_user_probabilities()
REQUIRES
swap bureau file and # of users to plot
2. VOTE DISTRIBUTIONS COMPARED TO GZ2
NAME
plot_vote_distributions()
REQUIRES
gz2_metadata and simulation to compare to
3. BASELINE SWAP SIMULATION COMPARED TO GZ2
NAME
plot_GZX_baseline()
REQUIRES
baseline simulation, evaluation ascii file for baseline run,
gz2_retired (cumulative retired subjects in GZ2)
NOTES
this plots the retired subject rate AND the corresponding
quality metrics ON THE SAME AXES
The eval file and the GZ2 retired subjects file must be
created in separate script: (generate_SWAP_eval_report)
4. VARIATIONS IN SWAP
NAME
plot_GZX_evaluation_spread()
plot_GZX_cumulative_retirement_spread()
REQUIRES
three simulations to compare (for spread in retirement)
three evaluation files to compare (for spread in eval)
NOTES
the eval files have to be created with generate_SWAP_eval_report
5. SWAP AND GZ2 DISAGREE
swap_gets_it_wrong()
6. MONEYPLOT
MONEYPLOT()
7. 1D MORPHOLOGY DISTRIBUTIONS
NAME
plot_morph_params_1D()
REQUIRES
"""
make_volunteer_probabilties_plot = False
make_subject_trajectory_plot = False
make_vote_distributions_plot = False
make_baseline_simulation_plot = False
make_swap_variations_plot = False
make_swap_gets_it_wrong_plot = False
make_moneyplot = True
make_morph_distributions_plot = False
make_roc_curves = False
calculate_GX_human_effort = False
survey = 'GZ2_sup_PLPD5_p5_flipfeature2b'
dir_tertiary = 'tertiary_simulation_output'
dir_sim_machine = 'sims_Machine/redo_with_correct_ell_morphs'
dir_sim_swap = 'sims_SWAP/S_PLPD5_p5_ff_norand/'
# Load up some GZ2 data
# -----------------------------------------------
gz2_metadata = Table.read('metadata_ground_truth_labels.fits')
if 'GZ2_deb_combo' not in gz2_metadata.colnames:
gz2_metadata['GZ2_raw_combo'] = GZ2_label_SMOOTH_NOT(bigfuckingtable,type='raw')
gz2_metadata.write('metadata_ground_truth_labels.fits', overwrite=True)
gz2_metadata['zooid'] = gz2_metadata['SDSS_id']
gz2_metadata['id'] = gz2_metadata['asset_id']
F = open('GZ2_cumulative_retired_subjects_expert.pickle','r')
gz2_cum_sub_retired = cPickle.load(F)
morph = Table.read("metadata_ground_truth_labels.fits")
pdb.set_trace()
# Load up BASELINE simulation
# ------------------------------------------------------
mid_name = 'sup_PLPD5_p5_flipfeature2b_norandom2'
#stuff = generate_SWAP_eval_report(mid_sim, gz2_metadata, outname=mid_name+'_raw_combo',
# write_file=True, gz_kind='raw_combo')
mid_eval2 = Table.read('{0}/GZX_evaluation_{1}.txt'.format(dir_tertiary,
mid_name+'_raw_combo'),
format='ascii')
mid_sim = Simulation(config='configfiles/update_sup_PLPD5_p5_flipfeature2b_norandom2.config',
directory=dir_sim_swap,
variety='feat_or_not')
""" MAKE VOLUNTEER PROBABILTIES PLOT """
if make_volunteer_probabilties_plot:
# Load up the SWAP Simulation AGENT BUREAU
picklename = '{0}/{1}_bureau.pickle'.format(dir_sim_swap,survey)
bureau = swap.read_pickle(picklename, 'bureau')
plot_user_probabilities(bureau, 200)
if make_subject_trajectory_plot:
# Load up the SWAP Simulation AGENT BUREAU
picklename = '{0}/{1}_collection.pickle'.format(dir_sim_swap,survey)
collection = swap.read_pickle(picklename, 'collection')
plot_subject_trajectories(collection, 200)
""" MAKE BASELINE SIMULATION PLOT """
if make_baseline_simulation_plot:
# BASELINE fig requires BASELINE Simulation,
# evaluation output for that sim,
# cumulative retirement for GZ2
plot_GZX_baseline(mid_sim, mid_eval2, gz2_cum_sub_retired)
""" MAKE MONEY PLOT """
if make_moneyplot:
outfile = '{}/{}'.format(dir_sim_machine,survey)
# this file made by analaze_GZX_simulation.py
filename = glob.glob('{}*_combo_analysis*.pickle'.format(outfile))
F = open(filename[0], 'rb')
combo_run = cPickle.load(F)
F.close()
# Load up the Machine bureau
F = open('{0}/{1}_MLbureau.pickle'.format(dir_sim_machine, survey),'rb')
MLbureau = cPickle.load(F)
F.close()
MONEYPLOT(92, mid_sim, mid_eval2, gz2_cum_sub_retired, combo_run, MLbureau, outfile=outfile)
""" MORPH DISTRIBUTIONS """
if make_morph_distributions_plot:
# Plotting FEAT vs NOT, FALSE POS & FALSE NEGs, RETIRED vs NOT RETIRED
# to do all that, need files that were created.... GZX_SWAP_eval?
filename = glob.glob('{}/*_machine_retired_subjects.fits'.format(dir_sim_machine))
machine_retired = Table.read(filename[0])
#machine_not_retired = Table.read('tertiary_simulation_output/{}_machine_not_retired_subjects.fits'.format(outfile))
plot_morph_params_1D(machine_retired, gz2_metadata, outfile=dir_sim_machine)
""" MAKE SWAP GETS IT WRONG PLOT """
if make_swap_gets_it_wrong_plot:
# Compare SWAP-retired subjects to various parameters in the GZ2 Main Catalog
bigfuckingtable = Table.read('../SpaceWarps/analysis/GZ2ASSETS_NAIR_MORPH_MAIN.fits')
gz2_bigfuckingtable = join(gz2_metadata, bigfuckingtable, keys='id')
all_retired = mid_sim.fetchCatalog(mid_sim.retiredFileList[-1])
gz2_baseline = join(gz2_bigfuckingtable, all_retired, keys='zooid')
tps2, fps2, tns2, fns2 = calculate_confusion_matrix(gz2_baseline[gz2_baseline['P']>0.3],
gz2_baseline[gz2_baseline['P']<0.3],
smooth_or_not=False, gz_kind='raw_combo')
correct = vstack([tps2, tns2])
#print len(correct)
swap_gets_it_wrong(fps2, fns2, correct)
""" MAKE VOTE DISTRIBUTION PLOT """
if make_vote_distributions_plot:
# Requires the Vote Distributions for GZ2 and those from the Simulation
plot_vote_distributions(gz2_metadata, mid_sim)
if calculate_GX_human_effort:
mlbureaufile = 'sims_Machine/redo_with_correct_ell_morphs/GZ2_sup_PLPD5_p5_flipfeature2b_MLbureau.pickle'
MLbureau = swap.read_pickle(mlbureaufile,'bureau')
machine_meta = 'sims_Machine/redo_with_correct_ell_morphs/GZ2_sup_PLPD5_p5_flipfeature2b_metadata.pickle'
all_subjects = swap.read_pickle(machine_meta, 'metadata').subjects
#subjects = all_subjects[all_subjects['retired_date']!='2016-09-10']
mclass = all_subjects[all_subjects['MLsample']=='mclas']
swaps = all_subjects[(all_subjects['MLsample']=='train') |
(all_subjects['MLsample']=='valid')]
catalog = mid_sim.fetchCatalog(mid_sim.retiredFileList[-1])
catalog['SDSS_id'] = catalog['zooid']
# How many machine-retired subjects would have been retired by SWAP anyway?
#swap_mach_retired = join(catalog, mclass, keys='SDSS_id')
swap_retired = join(catalog, swaps, keys='SDSS_id')
# Assume that only Human Effort came from training sample
effort = np.sum(swap_retired['Nclass'])
print "Human effort for GZX:", effort
# LOOK AT MOST IMPORTANT FEATURES FOR MACHINE
machine = MLbureau.member['RF_accuracy']
trainhist = machine.traininghistory
models = trainhist['Model']
for i, model in enumerate(models):
if i==0:
feature_importances = model.feature_importances_
else:
feature_importances = np.vstack([feature_importances,
model.feature_importances_])
labels = ['M$_{20}$', '$C$', '$1-b/a$', '$A$', '$G$']
fi = feature_importances
avg, std = [], []
for i in range(5):
avg.append(np.mean(fi[:,i]))
std.append(np.std(fi[:,i]))
avg = np.array(avg)
std = np.array(std)
labels = np.array(labels)
sort_indices = np.argsort(avg)
ind = np.arange(len(labels))
#pdb.set_trace()
fig = plt.figure(figsize=(11,8))
ax = fig.add_subplot(111)
rects1 = ax.bar(ind, avg[sort_indices], yerr=std[sort_indices],
color='red', edgecolor='black',
capsize=5, align='center')
ax.set_ylabel('Feature Importance')
ax.set_xticks(ind)
ax.set_xticklabels(labels[sort_indices])
ax.set_ylim(0, 0.45)
ax.set_yticks([0., .1, .2, .3, .4])
plt.savefig('RF_feature_importance_4paper.pdf', bbox_inches='tight')
plt.show()
#pdb.set_trace()
if make_roc_curves:
candidateFileList = mid_sim.fetchFileList(kind='candidate')
"""
# SWAP situation at ~30 days into simulation
candidates1 = mid_sim.fetchCatalog(candidateFileList[30])
rejected1 = mid_sim.fetchCatalog(mid_sim.rejectedFileList[30])
swap_subjects1 = np.concatenate([candidates1, rejected1])
subjects1 = join(gz2_metadata, swap_subjects1, keys='zooid')
# SWAP situation at ~60 days into simualtion
candidates2 = mid_sim.fetchCatalog(candidateFileList[60])
rejected2 = mid_sim.fetchCatalog(mid_sim.rejectedFileList[60])
swap_subjects2 = np.concatenate([candidates2, rejected2])
subjects2 = join(gz2_metadata, swap_subjects2, keys='zooid')
"""
# SWAP situation at the end of the siulation
candidates3 = mid_sim.fetchCatalog(candidateFileList[-1])
rejected3 = mid_sim.fetchCatalog(mid_sim.rejectedFileList[-1])
swap_subjects3 = np.concatenate([candidates3, rejected3])
subjects3 = join(gz2_metadata, swap_subjects3, keys='zooid')
subject_sets = [subjects3]#subjects1, subjects2,
plot_roc_curve(subject_sets, smooth_or_not=False, gz_kind='raw_combo', swap=True, outname=None)
""" MAKE SWAP VARIATIONS PLOT(S) """
if make_swap_variations_plot:
#"""
# Load up simulations varying subject PRIOR
# -------------------------------------------------------
low_p = 'sup_PLPD5_p2_flipfeature2_norand'
high_p = 'sup_PLPD5_p8_flipfeature2_norand'
p35 = 'sup_PLPD5_p35_flipfeature2_norand'
low_p_eval2 = Table.read('tertiary_simulation_output/GZX_evaluation_{0}.txt'.format(low_p+'_raw_combo'), format='ascii')
high_p_eval2 = Table.read('tertiary_simulation_output/GZX_evaluation_{0}.txt'.format(high_p+'_raw_combo'), format='ascii')
#p35_eval2 = Table.read('tertiary_simulation_output/GZX_evaluation_{0}.txt'.format(p35+'_raw_combo'), format='ascii')
low_p_sim = Simulation(config='configfiles/update_sup_PLPD5_p2_flipfeature2_norand.config',
directory='sims_SWAP/S_PLPD5_p2_ff_norand/',
variety='feat_or_not')
high_p_sim = Simulation(config='configfiles/update_sup_PLPD5_p8_flipfeature2_norand.config',
directory='sims_SWAP/S_PLPD5_p8_ff_norand/',
variety='feat_or_not')
#p35_sim = Simulation(config='configfiles/update_sup_PLPD5_p35_flipfeature2_norand.config',
# directory='sims_SWAP/S_PLPD5_p35_ff_norand/',
# variety='feat_or_not')
#"""
# Load up simulations for varying user PL/PD
# -------------------------------------------------------
low_plpd = 'sup_PLPD4_p5_flipfeature2_norand'
high_plpd = 'sup_PLPD6_p5_flipfeature2_norand'
low_plpd_eval2 = Table.read('tertiary_simulation_output/GZX_evaluation_{0}.txt'.format(low_plpd+'_raw_combo'), format='ascii')
high_plpd_eval2 = Table.read('tertiary_simulation_output/GZX_evaluation_{0}.txt'.format(high_plpd+'_raw_combo'), format='ascii')
low_plpd_sim = Simulation(config='configfiles/update_sup_PLPD4_p5_flipfeature2_norand.config',
directory='sims_SWAP/S_PLPD4_p5_ff_norand/',
variety='feat_or_not')
high_plpd_sim = Simulation(config='configfiles/update_sup_PLPD6_p5_flipfeature2_norand.config',
directory='sims_SWAP/S_PLPD6_p5_ff_norand/',
variety='feat_or_not')
#"""
# VARY PRIOR
fig = plt.figure(figsize=(11,16))
plt.rc('text', usetex=True)
gs = gridspec.GridSpec(2,1)
gs.update(wspace=0.05, hspace=0.01)
ax = fig.add_subplot(gs[0])
plot_GZX_evaluation_spread(92, low_p_eval2, mid_eval2, high_p_eval2,
outfile='compare_PLPD_4paper', ax=ax)
ax2 = fig.add_subplot(gs[1])
plot_GZX_cumulative_retirement_spread(92, low_p_sim, mid_sim, high_p_sim,
gz2_cum_sub_retired,
outfile='compare_prior_4paper', ax=ax2)
fig.suptitle(r'$0.1 \le \mathrm{Subject~Prior} \le 0.8$', fontsize=30)
gs.tight_layout(fig, rect=[0, 0, 1, 0.97])
plt.savefig('GZX_eval_and_retirement_prior_spread_4paper_v2.pdf')
plt.show()
plt.close()
#"""
# -----------------------------------------------------------
# VARY PLPD
fig = plt.figure(figsize=(11,16))
plt.rc('text', usetex=True)
gs = gridspec.GridSpec(2,1)
gs.update(wspace=0.01, hspace=0.01)
ax = fig.add_subplot(gs[0])
plot_GZX_evaluation_spread(92, low_plpd_eval2, mid_eval2, high_plpd_eval2,
outfile='compare_PLPD_4paper', ax=ax)
ax2 = fig.add_subplot(gs[1])
plot_GZX_cumulative_retirement_spread(92, low_plpd_sim, mid_sim, high_plpd_sim,
gz2_cum_sub_retired,
outfile='compare_prior_4paper', ax=ax2)
fig.suptitle(r'$(0.4, 0.4) \le \mathrm{Confusion~Matrix} \le (0.6, 0.6)$', fontsize=30)
gs.tight_layout(fig, rect=[0, 0, 1, 0.97])
plt.savefig('GZX_eval_and_retirement_PLPD_spread_4paper_v2.pdf')
plt.show()
plt.close()
#"""
# These were created in order to compare the ORDER in which subjects
# were classified in different SWAP runs
#baseline_retired = mid_sim.fetchRetiredSubjectsByDate()
#low_plpd_retired = low_plpd_sim.fetchRetiredSubjectsByDate()
#high_plpd_retired = high_plpd_sim.fetchRetiredSubjectsByDate()
if __name__ == '__main__':
main() |
wtanaka/beam | refs/heads/master | sdks/python/apache_beam/io/gcp/internal/clients/bigquery/__init__.py | 25 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Common imports for generated bigquery client library."""
# pylint:disable=wildcard-import
import pkgutil
# Protect against environments where apitools library is not available.
# pylint: disable=wrong-import-order, wrong-import-position
try:
from apitools.base.py import *
from apache_beam.io.gcp.internal.clients.bigquery.bigquery_v2_client import *
from apache_beam.io.gcp.internal.clients.bigquery.bigquery_v2_messages import *
except ImportError:
pass
# pylint: enable=wrong-import-order, wrong-import-position
__path__ = pkgutil.extend_path(__path__, __name__)
|
mdaniel/intellij-community | refs/heads/master | python/testData/folding/collapseExpandDocCommentsTokenType.py | 11 | class Class:
"""Class doc
string"""
def __init__(self):
"""Docstring in init method
Addition information
"""
pass
def params(a, v):
"""
:param a:
:param v:
"""
pass
def empty():
"""this is doc string
this is the second line of it
"""
pass |
leiferikb/bitpop | refs/heads/master | src/tools/cr/cr/actions/gdb.py | 103 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import cr
class GdbDebugger(cr.Debugger):
"""An implementation of cr.Debugger that launches gdb."""
DETECTED = cr.Config('DETECTED')
@property
def enabled(self):
return (cr.LinuxPlatform.GetInstance().is_active and
self.DETECTED.Find('CR_GDB'))
def Invoke(self, targets, arguments):
for target in targets:
with target:
cr.Host.Execute(
'{CR_GDB}', '--eval-command=run', '--args',
'{CR_BINARY}',
'{CR_RUN_ARGUMENTS}',
*arguments
)
def Attach(self, targets, arguments):
raise NotImplementedError('Attach not currently supported for gdb.')
@classmethod
def ClassInit(cls):
# Attempt to find a valid gdb on the path.
gdb_binaries = cr.Host.SearchPath('gdb')
if gdb_binaries:
cls.DETECTED.Set(CR_GDB=gdb_binaries[0])
|
uranusjr/django | refs/heads/master | django/db/backends/base/__init__.py | 12133432 | |
obi1kenobi/pyre | refs/heads/master | RemoteControlCollection/__init__.py | 12133432 | |
mattmccarthy11/vidly-development | refs/heads/master | mediadrop/bk/templates/__init__.py | 12133432 | |
caphrim007/ansible-modules-extras | refs/heads/devel | packaging/os/layman.py | 61 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Jakub Jirutka <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import shutil
from os import path
DOCUMENTATION = '''
---
module: layman
author: "Jakub Jirutka (@jirutka)"
version_added: "1.6"
short_description: Manage Gentoo overlays
description:
- Uses Layman to manage an additional repositories for the Portage package manager on Gentoo Linux.
Please note that Layman must be installed on a managed node prior using this module.
requirements:
- "python >= 2.6"
- layman python module
options:
name:
description:
- The overlay id to install, synchronize, or uninstall.
Use 'ALL' to sync all of the installed overlays (can be used only when C(state=updated)).
required: true
list_url:
description:
- An URL of the alternative overlays list that defines the overlay to install.
This list will be fetched and saved under C(${overlay_defs})/${name}.xml), where
C(overlay_defs) is readed from the Layman's configuration.
required: false
state:
description:
- Whether to install (C(present)), sync (C(updated)), or uninstall (C(absent)) the overlay.
required: false
default: present
choices: [present, absent, updated]
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be
set to C(no) when no other option exists. Prior to 1.9.3 the code
defaulted to C(no).
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: '1.9.3'
'''
EXAMPLES = '''
# Install the overlay 'mozilla' which is on the central overlays list.
- layman: name=mozilla
# Install the overlay 'cvut' from the specified alternative list.
- layman: name=cvut list_url=http://raw.github.com/cvut/gentoo-overlay/master/overlay.xml
# Update (sync) the overlay 'cvut', or install if not installed yet.
- layman: name=cvut list_url=http://raw.github.com/cvut/gentoo-overlay/master/overlay.xml state=updated
# Update (sync) all of the installed overlays.
- layman: name=ALL state=updated
# Uninstall the overlay 'cvut'.
- layman: name=cvut state=absent
'''
USERAGENT = 'ansible-httpget'
try:
from layman.api import LaymanAPI
from layman.config import BareConfig
HAS_LAYMAN_API = True
except ImportError:
HAS_LAYMAN_API = False
class ModuleError(Exception): pass
def init_layman(config=None):
'''Returns the initialized ``LaymanAPI``.
:param config: the layman's configuration to use (optional)
'''
if config is None:
config = BareConfig(read_configfile=True, quietness=1)
return LaymanAPI(config)
def download_url(module, url, dest):
'''
:param url: the URL to download
:param dest: the absolute path of where to save the downloaded content to;
it must be writable and not a directory
:raises ModuleError
'''
# Hack to add params in the form that fetch_url expects
module.params['http_agent'] = USERAGENT
response, info = fetch_url(module, url)
if info['status'] != 200:
raise ModuleError("Failed to get %s: %s" % (url, info['msg']))
try:
with open(dest, 'w') as f:
shutil.copyfileobj(response, f)
except IOError, e:
raise ModuleError("Failed to write: %s" % str(e))
def install_overlay(module, name, list_url=None):
'''Installs the overlay repository. If not on the central overlays list,
then :list_url of an alternative list must be provided. The list will be
fetched and saved under ``%(overlay_defs)/%(name.xml)`` (location of the
``overlay_defs`` is read from the Layman's configuration).
:param name: the overlay id
:param list_url: the URL of the remote repositories list to look for the overlay
definition (optional, default: None)
:returns: True if the overlay was installed, or False if already exists
(i.e. nothing has changed)
:raises ModuleError
'''
# read Layman configuration
layman_conf = BareConfig(read_configfile=True)
layman = init_layman(layman_conf)
if layman.is_installed(name):
return False
if module.check_mode:
mymsg = 'Would add layman repo \'' + name + '\''
module.exit_json(changed=True, msg=mymsg)
if not layman.is_repo(name):
if not list_url:
raise ModuleError("Overlay '%s' is not on the list of known " \
"overlays and URL of the remote list was not provided." % name)
overlay_defs = layman_conf.get_option('overlay_defs')
dest = path.join(overlay_defs, name + '.xml')
download_url(module, list_url, dest)
# reload config
layman = init_layman()
if not layman.add_repos(name):
raise ModuleError(layman.get_errors())
return True
def uninstall_overlay(module, name):
'''Uninstalls the given overlay repository from the system.
:param name: the overlay id to uninstall
:returns: True if the overlay was uninstalled, or False if doesn't exist
(i.e. nothing has changed)
:raises ModuleError
'''
layman = init_layman()
if not layman.is_installed(name):
return False
if module.check_mode:
mymsg = 'Would remove layman repo \'' + name + '\''
module.exit_json(changed=True, msg=mymsg)
layman.delete_repos(name)
if layman.get_errors(): raise ModuleError(layman.get_errors())
return True
def sync_overlay(name):
'''Synchronizes the specified overlay repository.
:param name: the overlay repository id to sync
:raises ModuleError
'''
layman = init_layman()
if not layman.sync(name):
messages = [ str(item[1]) for item in layman.sync_results[2] ]
raise ModuleError(messages)
def sync_overlays():
'''Synchronize all of the installed overlays.
:raises ModuleError
'''
layman = init_layman()
for name in layman.get_installed():
sync_overlay(name)
def main():
# define module
module = AnsibleModule(
argument_spec = dict(
name = dict(required=True),
list_url = dict(aliases=['url']),
state = dict(default="present", choices=['present', 'absent', 'updated']),
validate_certs = dict(required=False, default=True, type='bool'),
),
supports_check_mode=True
)
if not HAS_LAYMAN_API:
module.fail_json(msg='Layman is not installed')
state, name, url = (module.params[key] for key in ['state', 'name', 'list_url'])
changed = False
try:
if state == 'present':
changed = install_overlay(module, name, url)
elif state == 'updated':
if name == 'ALL':
sync_overlays()
elif install_overlay(module, name, url):
changed = True
else:
sync_overlay(name)
else:
changed = uninstall_overlay(module, name)
except ModuleError, e:
module.fail_json(msg=e.message)
else:
module.exit_json(changed=changed, name=name)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
|
peacekeeper/indy-sdk | refs/heads/master | wrappers/python/tests/pool/test_open_pool_ledger.py | 2 | import pytest
from indy import pool, error
@pytest.mark.parametrize(
"pool_genesis_txn_count, pool_config",
[(2, None), (3, None), (4, None), (4, '{"timeout": 20}')])
@pytest.mark.asyncio
async def test_open_pool_ledger_works(pool_handle):
pass
@pytest.mark.asyncio
async def test_open_pool_ledger_works_for_twice(pool_name, pool_config, pool_handle):
with pytest.raises(error.PoolLedgerInvalidPoolHandle):
await pool.open_pool_ledger(pool_name, pool_config)
@pytest.mark.asyncio
async def test_open_pool_ledger_works_for_incompatible_protocol_version(pool_ledger_config, pool_name,
protocol_version):
await pool.set_protocol_version(1)
with pytest.raises(error.PoolIncompatibleProtocolVersion):
await pool.open_pool_ledger(pool_name, None)
await pool.set_protocol_version(protocol_version)
|
BJDev95/ardupilot | refs/heads/master | mk/PX4/Tools/genmsg/test/test_genmsg_command_line.py | 216 | # Software License Agreement (BSD License)
#
# Copyright (c) 2011, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
def test_includepath_to_dict():
from genmsg.command_line import includepath_to_dict
assert {} == includepath_to_dict([])
assert {'std_msgs': [ 'foo' ]} == includepath_to_dict(['std_msgs:foo'])
assert {'std_msgs': [ 'foo' ], 'bar_msgs': [ 'baz:colon' ]} == includepath_to_dict(['std_msgs:foo', 'bar_msgs:baz:colon'])
|
Hipo/django-sloop | refs/heads/master | test_app/urls.py | 1 | """test_app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
try:
from django.urls import include
except ImportError:
from django.conf.urls import include
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api/devices/', include('django_sloop.urls')),
]
|
vitan/django | refs/heads/master | django/contrib/auth/forms.py | 3 | from __future__ import unicode_literals
from django import forms
from django.contrib.auth import authenticate, get_user_model
from django.contrib.auth.hashers import (
UNUSABLE_PASSWORD_PREFIX, identify_hasher,
)
from django.contrib.auth.models import User
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.core.mail import EmailMultiAlternatives
from django.forms.utils import flatatt
from django.template import loader
from django.utils.encoding import force_bytes
from django.utils.html import format_html, format_html_join
from django.utils.http import urlsafe_base64_encode
from django.utils.safestring import mark_safe
from django.utils.text import capfirst
from django.utils.translation import ugettext, ugettext_lazy as _
class ReadOnlyPasswordHashWidget(forms.Widget):
def render(self, name, value, attrs):
encoded = value
final_attrs = self.build_attrs(attrs)
if not encoded or encoded.startswith(UNUSABLE_PASSWORD_PREFIX):
summary = mark_safe("<strong>%s</strong>" % ugettext("No password set."))
else:
try:
hasher = identify_hasher(encoded)
except ValueError:
summary = mark_safe("<strong>%s</strong>" % ugettext(
"Invalid password format or unknown hashing algorithm."))
else:
summary = format_html_join('',
"<strong>{}</strong>: {} ",
((ugettext(key), value)
for key, value in hasher.safe_summary(encoded).items())
)
return format_html("<div{}>{}</div>", flatatt(final_attrs), summary)
class ReadOnlyPasswordHashField(forms.Field):
widget = ReadOnlyPasswordHashWidget
def __init__(self, *args, **kwargs):
kwargs.setdefault("required", False)
super(ReadOnlyPasswordHashField, self).__init__(*args, **kwargs)
def bound_data(self, data, initial):
# Always return initial because the widget doesn't
# render an input field.
return initial
def has_changed(self, initial, data):
return False
class UserCreationForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
password1 = forms.CharField(label=_("Password"),
widget=forms.PasswordInput)
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as above, for verification."))
class Meta:
model = User
fields = ("username",)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
return password2
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
password = ReadOnlyPasswordHashField(label=_("Password"),
help_text=_("Raw passwords are not stored, so there is no way to see "
"this user's password, but you can change the password "
"using <a href=\"password/\">this form</a>."))
class Meta:
model = User
fields = '__all__'
def __init__(self, *args, **kwargs):
super(UserChangeForm, self).__init__(*args, **kwargs)
f = self.fields.get('user_permissions', None)
if f is not None:
f.queryset = f.queryset.select_related('content_type')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class AuthenticationForm(forms.Form):
"""
Base class for authenticating users. Extend this to get a form that accepts
username/password logins.
"""
username = forms.CharField(max_length=254)
password = forms.CharField(label=_("Password"), widget=forms.PasswordInput)
error_messages = {
'invalid_login': _("Please enter a correct %(username)s and password. "
"Note that both fields may be case-sensitive."),
'inactive': _("This account is inactive."),
}
def __init__(self, request=None, *args, **kwargs):
"""
The 'request' parameter is set for custom auth use by subclasses.
The form data comes in via the standard 'data' kwarg.
"""
self.request = request
self.user_cache = None
super(AuthenticationForm, self).__init__(*args, **kwargs)
# Set the label for the "username" field.
UserModel = get_user_model()
self.username_field = UserModel._meta.get_field(UserModel.USERNAME_FIELD)
if self.fields['username'].label is None:
self.fields['username'].label = capfirst(self.username_field.verbose_name)
def clean(self):
username = self.cleaned_data.get('username')
password = self.cleaned_data.get('password')
if username and password:
self.user_cache = authenticate(username=username,
password=password)
if self.user_cache is None:
raise forms.ValidationError(
self.error_messages['invalid_login'],
code='invalid_login',
params={'username': self.username_field.verbose_name},
)
else:
self.confirm_login_allowed(self.user_cache)
return self.cleaned_data
def confirm_login_allowed(self, user):
"""
Controls whether the given User may log in. This is a policy setting,
independent of end-user authentication. This default behavior is to
allow login by active users, and reject login by inactive users.
If the given user cannot log in, this method should raise a
``forms.ValidationError``.
If the given user may log in, this method should return None.
"""
if not user.is_active:
raise forms.ValidationError(
self.error_messages['inactive'],
code='inactive',
)
def get_user_id(self):
if self.user_cache:
return self.user_cache.id
return None
def get_user(self):
return self.user_cache
class PasswordResetForm(forms.Form):
email = forms.EmailField(label=_("Email"), max_length=254)
def send_mail(self, subject_template_name, email_template_name,
context, from_email, to_email, html_email_template_name=None):
"""
Sends a django.core.mail.EmailMultiAlternatives to `to_email`.
"""
subject = loader.render_to_string(subject_template_name, context)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
body = loader.render_to_string(email_template_name, context)
email_message = EmailMultiAlternatives(subject, body, from_email, [to_email])
if html_email_template_name is not None:
html_email = loader.render_to_string(html_email_template_name, context)
email_message.attach_alternative(html_email, 'text/html')
email_message.send()
def get_users(self, email):
"""Given an email, return matching user(s) who should receive a reset.
This allows subclasses to more easily customize the default policies
that prevent inactive users and users with unusable passwords from
resetting their password.
"""
active_users = get_user_model()._default_manager.filter(
email__iexact=email, is_active=True)
return (u for u in active_users if u.has_usable_password())
def save(self, domain_override=None,
subject_template_name='registration/password_reset_subject.txt',
email_template_name='registration/password_reset_email.html',
use_https=False, token_generator=default_token_generator,
from_email=None, request=None, html_email_template_name=None):
"""
Generates a one-use only link for resetting password and sends to the
user.
"""
email = self.cleaned_data["email"]
for user in self.get_users(email):
if not domain_override:
current_site = get_current_site(request)
site_name = current_site.name
domain = current_site.domain
else:
site_name = domain = domain_override
context = {
'email': user.email,
'domain': domain,
'site_name': site_name,
'uid': urlsafe_base64_encode(force_bytes(user.pk)),
'user': user,
'token': token_generator.make_token(user),
'protocol': 'https' if use_https else 'http',
}
self.send_mail(subject_template_name, email_template_name,
context, from_email, user.email,
html_email_template_name=html_email_template_name)
class SetPasswordForm(forms.Form):
"""
A form that lets a user change set their password without entering the old
password
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
new_password1 = forms.CharField(label=_("New password"),
widget=forms.PasswordInput)
new_password2 = forms.CharField(label=_("New password confirmation"),
widget=forms.PasswordInput)
def __init__(self, user, *args, **kwargs):
self.user = user
super(SetPasswordForm, self).__init__(*args, **kwargs)
def clean_new_password2(self):
password1 = self.cleaned_data.get('new_password1')
password2 = self.cleaned_data.get('new_password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
return password2
def save(self, commit=True):
self.user.set_password(self.cleaned_data['new_password1'])
if commit:
self.user.save()
return self.user
class PasswordChangeForm(SetPasswordForm):
"""
A form that lets a user change their password by entering their old
password.
"""
error_messages = dict(SetPasswordForm.error_messages, **{
'password_incorrect': _("Your old password was entered incorrectly. "
"Please enter it again."),
})
old_password = forms.CharField(label=_("Old password"),
widget=forms.PasswordInput)
field_order = ['old_password', 'new_password1', 'new_password2']
def clean_old_password(self):
"""
Validates that the old_password field is correct.
"""
old_password = self.cleaned_data["old_password"]
if not self.user.check_password(old_password):
raise forms.ValidationError(
self.error_messages['password_incorrect'],
code='password_incorrect',
)
return old_password
class AdminPasswordChangeForm(forms.Form):
"""
A form used to change the password of a user in the admin interface.
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
required_css_class = 'required'
password1 = forms.CharField(
label=_("Password"),
widget=forms.PasswordInput,
)
password2 = forms.CharField(
label=_("Password (again)"),
widget=forms.PasswordInput,
help_text=_("Enter the same password as above, for verification."),
)
def __init__(self, user, *args, **kwargs):
self.user = user
super(AdminPasswordChangeForm, self).__init__(*args, **kwargs)
def clean_password2(self):
password1 = self.cleaned_data.get('password1')
password2 = self.cleaned_data.get('password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'],
code='password_mismatch',
)
return password2
def save(self, commit=True):
"""
Saves the new password.
"""
self.user.set_password(self.cleaned_data["password1"])
if commit:
self.user.save()
return self.user
def _get_changed_data(self):
data = super(AdminPasswordChangeForm, self).changed_data
for name in self.fields.keys():
if name not in data:
return []
return ['password']
changed_data = property(_get_changed_data)
|
jjs0sbw/CSPLN | refs/heads/master | apps/scaffolding/win/web2py/gluon/streamer.py | 15 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
| This file is part of the web2py Web Framework
| Copyrighted by Massimo Di Pierro <[email protected]>
| License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
Facilities to handle file streaming
------------------------------------
"""
import os
import stat
import time
import re
import errno
import rewrite
from gluon.http import HTTP
from gluon.contenttype import contenttype
regex_start_range = re.compile('\d+(?=\-)')
regex_stop_range = re.compile('(?<=\-)\d+')
DEFAULT_CHUNK_SIZE = 64 * 1024
def streamer(stream, chunk_size=DEFAULT_CHUNK_SIZE, bytes=None):
offset = 0
while bytes is None or offset < bytes:
if not bytes is None and bytes - offset < chunk_size:
chunk_size = bytes - offset
data = stream.read(chunk_size)
length = len(data)
if not length:
break
else:
yield data
if length < chunk_size:
break
offset += length
stream.close()
def stream_file_or_304_or_206(
static_file,
chunk_size=DEFAULT_CHUNK_SIZE,
request=None,
headers={},
status=200,
error_message=None
):
if error_message is None:
error_message = rewrite.THREAD_LOCAL.routes.error_message % 'invalid request'
try:
open = file # this makes no sense but without it GAE cannot open files
fp = open(static_file)
except IOError, e:
if e[0] == errno.EISDIR:
raise HTTP(403, error_message, web2py_error='file is a directory')
elif e[0] == errno.EACCES:
raise HTTP(403, error_message, web2py_error='inaccessible file')
else:
raise HTTP(404, error_message, web2py_error='invalid file')
else:
fp.close()
stat_file = os.stat(static_file)
fsize = stat_file[stat.ST_SIZE]
modified = stat_file[stat.ST_MTIME]
mtime = time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(modified))
headers.setdefault('Content-Type', contenttype(static_file))
headers.setdefault('Last-Modified', mtime)
headers.setdefault('Pragma', 'cache')
headers.setdefault('Cache-Control', 'private')
# if this is a normal response and not a respnse to an error page
if status == 200:
if request and request.env.http_if_modified_since == mtime:
raise HTTP(304, **{'Content-Type': headers['Content-Type']})
elif request and request.env.http_range:
start_items = regex_start_range.findall(request.env.http_range)
if not start_items:
start_items = [0]
stop_items = regex_stop_range.findall(request.env.http_range)
if not stop_items or int(stop_items[0]) > fsize - 1:
stop_items = [fsize - 1]
part = (int(start_items[0]), int(stop_items[0]), fsize)
bytes = part[1] - part[0] + 1
try:
stream = open(static_file, 'rb')
except IOError, e:
if e[0] in (errno.EISDIR, errno.EACCES):
raise HTTP(403)
else:
raise HTTP(404)
stream.seek(part[0])
headers['Content-Range'] = 'bytes %i-%i/%i' % part
headers['Content-Length'] = '%i' % bytes
status = 206
# in all the other cases (not 304, not 206, but 200 or error page)
if status != 206:
enc = request.env.http_accept_encoding
if enc and 'gzip' in enc and not 'Content-Encoding' in headers:
gzipped = static_file + '.gz'
if os.path.isfile(gzipped) and os.path.getmtime(gzipped) >= modified:
static_file = gzipped
fsize = os.path.getsize(gzipped)
headers['Content-Encoding'] = 'gzip'
headers['Vary'] = 'Accept-Encoding'
try:
stream = open(static_file, 'rb')
except IOError, e:
# this better does not happer when returning an error page ;-)
if e[0] in (errno.EISDIR, errno.EACCES):
raise HTTP(403)
else:
raise HTTP(404)
headers['Content-Length'] = fsize
bytes = None
if request and request.env.web2py_use_wsgi_file_wrapper:
wrapped = request.env.wsgi_file_wrapper(stream, chunk_size)
else:
wrapped = streamer(stream, chunk_size=chunk_size, bytes=bytes)
raise HTTP(status, wrapped, **headers)
|
epheo/shaddock | refs/heads/master | setup.py | 1 | #!/usr/bin/env python
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr'],
pbr=True)
|
javierTerry/odoo | refs/heads/8.0 | addons/website_mail_group/models/mail_group.py | 321 | # -*- coding: utf-8 -*-
from openerp.osv import osv
from openerp import tools
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval as eval
from openerp.addons.website.models.website import slug
class MailGroup(osv.Model):
_inherit = 'mail.group'
def message_get_email_values(self, cr, uid, id, notif_mail=None, context=None):
res = super(MailGroup, self).message_get_email_values(cr, uid, id, notif_mail=notif_mail, context=context)
group = self.browse(cr, uid, id, context=context)
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
headers = {}
if res.get('headers'):
try:
headers = eval(res['headers'])
except Exception:
pass
headers.update({
'List-Archive': '<%s/groups/%s>' % (base_url, slug(group)),
'List-Subscribe': '<%s/groups>' % (base_url),
'List-Unsubscribe': '<%s/groups?unsubscribe>' % (base_url,),
})
res['headers'] = repr(headers)
return res
class MailMail(osv.Model):
_inherit = 'mail.mail'
def send_get_mail_body(self, cr, uid, mail, partner=None, context=None):
""" Short-circuit parent method for mail groups, replace the default
footer with one appropriate for mailing-lists."""
if mail.model == 'mail.group' and mail.res_id:
# no super() call on purpose, no private links that could be quoted!
group = self.pool['mail.group'].browse(cr, uid, mail.res_id, context=context)
base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url')
vals = {
'maillist': _('Mailing-List'),
'post_to': _('Post to'),
'unsub': _('Unsubscribe'),
'mailto': 'mailto:%s@%s' % (group.alias_name, group.alias_domain),
'group_url': '%s/groups/%s' % (base_url, slug(group)),
'unsub_url': '%s/groups?unsubscribe' % (base_url,),
}
footer = """_______________________________________________
%(maillist)s: %(group_url)s
%(post_to)s: %(mailto)s
%(unsub)s: %(unsub_url)s
""" % vals
body = tools.append_content_to_html(mail.body, footer, container_tag='div')
return body
else:
return super(MailMail, self).send_get_mail_body(cr, uid, mail,
partner=partner,
context=context)
|
arpheno/django-rest-framework | refs/heads/master | rest_framework/serializers.py | 30 | """
Serializers and ModelSerializers are similar to Forms and ModelForms.
Unlike forms, they are not constrained to dealing with HTML output, and
form encoded input.
Serialization in REST framework is a two-phase process:
1. Serializers marshal between complex types like model instances, and
python primitives.
2. The process of marshalling between python primitives and request and
response content is handled by parsers and renderers.
"""
from __future__ import unicode_literals
from django.db import models
from django.db.models.fields import Field as DjangoModelField
from django.db.models.fields import FieldDoesNotExist
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from rest_framework.compat import DurationField as ModelDurationField
from rest_framework.compat import postgres_fields, unicode_to_repr
from rest_framework.utils import model_meta
from rest_framework.utils.field_mapping import (
ClassLookupDict, get_field_kwargs, get_nested_relation_kwargs,
get_relation_kwargs, get_url_kwargs
)
from rest_framework.utils.serializer_helpers import (
BindingDict, BoundField, NestedBoundField, ReturnDict, ReturnList
)
from rest_framework.validators import (
UniqueForDateValidator, UniqueForMonthValidator, UniqueForYearValidator,
UniqueTogetherValidator
)
# Note: We do the following so that users of the framework can use this style:
#
# example_field = serializers.CharField(...)
#
# This helps keep the separation between model fields, form fields, and
# serializer fields more explicit.
from rest_framework.fields import * # NOQA # isort:skip
from rest_framework.relations import * # NOQA # isort:skip
# We assume that 'validators' are intended for the child serializer,
# rather than the parent serializer.
LIST_SERIALIZER_KWARGS = (
'read_only', 'write_only', 'required', 'default', 'initial', 'source',
'label', 'help_text', 'style', 'error_messages', 'allow_empty',
'instance', 'data', 'partial', 'context', 'allow_null'
)
# BaseSerializer
# --------------
class BaseSerializer(Field):
"""
The BaseSerializer class provides a minimal class which may be used
for writing custom serializer implementations.
Note that we strongly restrict the ordering of operations/properties
that may be used on the serializer in order to enforce correct usage.
In particular, if a `data=` argument is passed then:
.is_valid() - Available.
.initial_data - Available.
.validated_data - Only available after calling `is_valid()`
.errors - Only available after calling `is_valid()`
.data - Only available after calling `is_valid()`
If a `data=` argument is not passed then:
.is_valid() - Not available.
.initial_data - Not available.
.validated_data - Not available.
.errors - Not available.
.data - Available.
"""
def __init__(self, instance=None, data=empty, **kwargs):
self.instance = instance
if data is not empty:
self.initial_data = data
self.partial = kwargs.pop('partial', False)
self._context = kwargs.pop('context', {})
kwargs.pop('many', None)
super(BaseSerializer, self).__init__(**kwargs)
def __new__(cls, *args, **kwargs):
# We override this method in order to automagically create
# `ListSerializer` classes instead when `many=True` is set.
if kwargs.pop('many', False):
return cls.many_init(*args, **kwargs)
return super(BaseSerializer, cls).__new__(cls, *args, **kwargs)
@classmethod
def many_init(cls, *args, **kwargs):
"""
This method implements the creation of a `ListSerializer` parent
class when `many=True` is used. You can customize it if you need to
control which keyword arguments are passed to the parent, and
which are passed to the child.
Note that we're over-cautious in passing most arguments to both parent
and child classes in order to try to cover the general case. If you're
overriding this method you'll probably want something much simpler, eg:
@classmethod
def many_init(cls, *args, **kwargs):
kwargs['child'] = cls()
return CustomListSerializer(*args, **kwargs)
"""
child_serializer = cls(*args, **kwargs)
list_kwargs = {'child': child_serializer}
list_kwargs.update(dict([
(key, value) for key, value in kwargs.items()
if key in LIST_SERIALIZER_KWARGS
]))
meta = getattr(cls, 'Meta', None)
list_serializer_class = getattr(meta, 'list_serializer_class', ListSerializer)
return list_serializer_class(*args, **list_kwargs)
def to_internal_value(self, data):
raise NotImplementedError('`to_internal_value()` must be implemented.')
def to_representation(self, instance):
raise NotImplementedError('`to_representation()` must be implemented.')
def update(self, instance, validated_data):
raise NotImplementedError('`update()` must be implemented.')
def create(self, validated_data):
raise NotImplementedError('`create()` must be implemented.')
def save(self, **kwargs):
assert not hasattr(self, 'save_object'), (
'Serializer `%s.%s` has old-style version 2 `.save_object()` '
'that is no longer compatible with REST framework 3. '
'Use the new-style `.create()` and `.update()` methods instead.' %
(self.__class__.__module__, self.__class__.__name__)
)
assert hasattr(self, '_errors'), (
'You must call `.is_valid()` before calling `.save()`.'
)
assert not self.errors, (
'You cannot call `.save()` on a serializer with invalid data.'
)
# Guard against incorrect use of `serializer.save(commit=False)`
assert 'commit' not in kwargs, (
"'commit' is not a valid keyword argument to the 'save()' method. "
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
"You can also pass additional keyword arguments to 'save()' if you "
"need to set extra attributes on the saved model instance. "
"For example: 'serializer.save(owner=request.user)'.'"
)
validated_data = dict(
list(self.validated_data.items()) +
list(kwargs.items())
)
if self.instance is not None:
self.instance = self.update(self.instance, validated_data)
assert self.instance is not None, (
'`update()` did not return an object instance.'
)
else:
self.instance = self.create(validated_data)
assert self.instance is not None, (
'`create()` did not return an object instance.'
)
return self.instance
def is_valid(self, raise_exception=False):
assert not hasattr(self, 'restore_object'), (
'Serializer `%s.%s` has old-style version 2 `.restore_object()` '
'that is no longer compatible with REST framework 3. '
'Use the new-style `.create()` and `.update()` methods instead.' %
(self.__class__.__module__, self.__class__.__name__)
)
assert hasattr(self, 'initial_data'), (
'Cannot call `.is_valid()` as no `data=` keyword argument was '
'passed when instantiating the serializer instance.'
)
if not hasattr(self, '_validated_data'):
try:
self._validated_data = self.run_validation(self.initial_data)
except ValidationError as exc:
self._validated_data = {}
self._errors = exc.detail
else:
self._errors = {}
if self._errors and raise_exception:
raise ValidationError(self.errors)
return not bool(self._errors)
@property
def data(self):
if hasattr(self, 'initial_data') and not hasattr(self, '_validated_data'):
msg = (
'When a serializer is passed a `data` keyword argument you '
'must call `.is_valid()` before attempting to access the '
'serialized `.data` representation.\n'
'You should either call `.is_valid()` first, '
'or access `.initial_data` instead.'
)
raise AssertionError(msg)
if not hasattr(self, '_data'):
if self.instance is not None and not getattr(self, '_errors', None):
self._data = self.to_representation(self.instance)
elif hasattr(self, '_validated_data') and not getattr(self, '_errors', None):
self._data = self.to_representation(self.validated_data)
else:
self._data = self.get_initial()
return self._data
@property
def errors(self):
if not hasattr(self, '_errors'):
msg = 'You must call `.is_valid()` before accessing `.errors`.'
raise AssertionError(msg)
return self._errors
@property
def validated_data(self):
if not hasattr(self, '_validated_data'):
msg = 'You must call `.is_valid()` before accessing `.validated_data`.'
raise AssertionError(msg)
return self._validated_data
# Serializer & ListSerializer classes
# -----------------------------------
class SerializerMetaclass(type):
"""
This metaclass sets a dictionary named `_declared_fields` on the class.
Any instances of `Field` included as attributes on either the class
or on any of its superclasses will be include in the
`_declared_fields` dictionary.
"""
@classmethod
def _get_declared_fields(cls, bases, attrs):
fields = [(field_name, attrs.pop(field_name))
for field_name, obj in list(attrs.items())
if isinstance(obj, Field)]
fields.sort(key=lambda x: x[1]._creation_counter)
# If this class is subclassing another Serializer, add that Serializer's
# fields. Note that we loop over the bases in *reverse*. This is necessary
# in order to maintain the correct order of fields.
for base in reversed(bases):
if hasattr(base, '_declared_fields'):
fields = list(base._declared_fields.items()) + fields
return OrderedDict(fields)
def __new__(cls, name, bases, attrs):
attrs['_declared_fields'] = cls._get_declared_fields(bases, attrs)
return super(SerializerMetaclass, cls).__new__(cls, name, bases, attrs)
def get_validation_error_detail(exc):
assert isinstance(exc, (ValidationError, DjangoValidationError))
if isinstance(exc, DjangoValidationError):
# Normally you should raise `serializers.ValidationError`
# inside your codebase, but we handle Django's validation
# exception class as well for simpler compat.
# Eg. Calling Model.clean() explicitly inside Serializer.validate()
return {
api_settings.NON_FIELD_ERRORS_KEY: list(exc.messages)
}
elif isinstance(exc.detail, dict):
# If errors may be a dict we use the standard {key: list of values}.
# Here we ensure that all the values are *lists* of errors.
return dict([
(key, value if isinstance(value, list) else [value])
for key, value in exc.detail.items()
])
elif isinstance(exc.detail, list):
# Errors raised as a list are non-field errors.
return {
api_settings.NON_FIELD_ERRORS_KEY: exc.detail
}
# Errors raised as a string are non-field errors.
return {
api_settings.NON_FIELD_ERRORS_KEY: [exc.detail]
}
@six.add_metaclass(SerializerMetaclass)
class Serializer(BaseSerializer):
default_error_messages = {
'invalid': _('Invalid data. Expected a dictionary, but got {datatype}.')
}
@property
def fields(self):
"""
A dictionary of {field_name: field_instance}.
"""
# `fields` is evaluated lazily. We do this to ensure that we don't
# have issues importing modules that use ModelSerializers as fields,
# even if Django's app-loading stage has not yet run.
if not hasattr(self, '_fields'):
self._fields = BindingDict(self)
for key, value in self.get_fields().items():
self._fields[key] = value
return self._fields
@cached_property
def _writable_fields(self):
return [
field for field in self.fields.values()
if (not field.read_only) or (field.default is not empty)
]
@cached_property
def _readable_fields(self):
return [
field for field in self.fields.values()
if not field.write_only
]
def get_fields(self):
"""
Returns a dictionary of {field_name: field_instance}.
"""
# Every new serializer is created with a clone of the field instances.
# This allows users to dynamically modify the fields on a serializer
# instance without affecting every other serializer class.
return copy.deepcopy(self._declared_fields)
def get_validators(self):
"""
Returns a list of validator callables.
"""
# Used by the lazily-evaluated `validators` property.
meta = getattr(self, 'Meta', None)
validators = getattr(meta, 'validators', None)
return validators[:] if validators else []
def get_initial(self):
if hasattr(self, 'initial_data'):
return OrderedDict([
(field_name, field.get_value(self.initial_data))
for field_name, field in self.fields.items()
if (field.get_value(self.initial_data) is not empty) and
not field.read_only
])
return OrderedDict([
(field.field_name, field.get_initial())
for field in self.fields.values()
if not field.read_only
])
def get_value(self, dictionary):
# We override the default field access in order to support
# nested HTML forms.
if html.is_html_input(dictionary):
return html.parse_html_dict(dictionary, prefix=self.field_name) or empty
return dictionary.get(self.field_name, empty)
def run_validation(self, data=empty):
"""
We override the default `run_validation`, because the validation
performed by validators and the `.validate()` method should
be coerced into an error dictionary with a 'non_fields_error' key.
"""
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
try:
self.run_validators(value)
value = self.validate(value)
assert value is not None, '.validate() should return the validated data'
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=get_validation_error_detail(exc))
return value
def to_internal_value(self, data):
"""
Dict of native values <- Dict of primitive datatypes.
"""
if not isinstance(data, dict):
message = self.error_messages['invalid'].format(
datatype=type(data).__name__
)
raise ValidationError({
api_settings.NON_FIELD_ERRORS_KEY: [message]
})
ret = OrderedDict()
errors = OrderedDict()
fields = self._writable_fields
for field in fields:
validate_method = getattr(self, 'validate_' + field.field_name, None)
primitive_value = field.get_value(data)
try:
validated_value = field.run_validation(primitive_value)
if validate_method is not None:
validated_value = validate_method(validated_value)
except ValidationError as exc:
errors[field.field_name] = exc.detail
except DjangoValidationError as exc:
errors[field.field_name] = list(exc.messages)
except SkipField:
pass
else:
set_value(ret, field.source_attrs, validated_value)
if errors:
raise ValidationError(errors)
return ret
def to_representation(self, instance):
"""
Object instance -> Dict of primitive datatypes.
"""
ret = OrderedDict()
fields = self._readable_fields
for field in fields:
try:
attribute = field.get_attribute(instance)
except SkipField:
continue
if attribute is None:
# We skip `to_representation` for `None` values so that
# fields do not have to explicitly deal with that case.
ret[field.field_name] = None
else:
ret[field.field_name] = field.to_representation(attribute)
return ret
def validate(self, attrs):
return attrs
def __repr__(self):
return unicode_to_repr(representation.serializer_repr(self, indent=1))
# The following are used for accessing `BoundField` instances on the
# serializer, for the purposes of presenting a form-like API onto the
# field values and field errors.
def __iter__(self):
for field in self.fields.values():
yield self[field.field_name]
def __getitem__(self, key):
field = self.fields[key]
value = self.data.get(key)
error = self.errors.get(key) if hasattr(self, '_errors') else None
if isinstance(field, Serializer):
return NestedBoundField(field, value, error)
return BoundField(field, value, error)
# Include a backlink to the serializer class on return objects.
# Allows renderers such as HTMLFormRenderer to get the full field info.
@property
def data(self):
ret = super(Serializer, self).data
return ReturnDict(ret, serializer=self)
@property
def errors(self):
ret = super(Serializer, self).errors
return ReturnDict(ret, serializer=self)
# There's some replication of `ListField` here,
# but that's probably better than obfuscating the call hierarchy.
class ListSerializer(BaseSerializer):
child = None
many = True
default_error_messages = {
'not_a_list': _('Expected a list of items but got type "{input_type}".'),
'empty': _('This list may not be empty.')
}
def __init__(self, *args, **kwargs):
self.child = kwargs.pop('child', copy.deepcopy(self.child))
self.allow_empty = kwargs.pop('allow_empty', True)
assert self.child is not None, '`child` is a required argument.'
assert not inspect.isclass(self.child), '`child` has not been instantiated.'
super(ListSerializer, self).__init__(*args, **kwargs)
self.child.bind(field_name='', parent=self)
def get_initial(self):
if hasattr(self, 'initial_data'):
return self.to_representation(self.initial_data)
return []
def get_value(self, dictionary):
"""
Given the input dictionary, return the field value.
"""
# We override the default field access in order to support
# lists in HTML forms.
if html.is_html_input(dictionary):
return html.parse_html_list(dictionary, prefix=self.field_name)
return dictionary.get(self.field_name, empty)
def run_validation(self, data=empty):
"""
We override the default `run_validation`, because the validation
performed by validators and the `.validate()` method should
be coerced into an error dictionary with a 'non_fields_error' key.
"""
(is_empty_value, data) = self.validate_empty_values(data)
if is_empty_value:
return data
value = self.to_internal_value(data)
try:
self.run_validators(value)
value = self.validate(value)
assert value is not None, '.validate() should return the validated data'
except (ValidationError, DjangoValidationError) as exc:
raise ValidationError(detail=get_validation_error_detail(exc))
return value
def to_internal_value(self, data):
"""
List of dicts of native values <- List of dicts of primitive datatypes.
"""
if html.is_html_input(data):
data = html.parse_html_list(data)
if not isinstance(data, list):
message = self.error_messages['not_a_list'].format(
input_type=type(data).__name__
)
raise ValidationError({
api_settings.NON_FIELD_ERRORS_KEY: [message]
})
if not self.allow_empty and len(data) == 0:
message = self.error_messages['empty']
raise ValidationError({
api_settings.NON_FIELD_ERRORS_KEY: [message]
})
ret = []
errors = []
for item in data:
try:
validated = self.child.run_validation(item)
except ValidationError as exc:
errors.append(exc.detail)
else:
ret.append(validated)
errors.append({})
if any(errors):
raise ValidationError(errors)
return ret
def to_representation(self, data):
"""
List of object instances -> List of dicts of primitive datatypes.
"""
# Dealing with nested relationships, data can be a Manager,
# so, first get a queryset from the Manager if needed
iterable = data.all() if isinstance(data, models.Manager) else data
return [
self.child.to_representation(item) for item in iterable
]
def validate(self, attrs):
return attrs
def update(self, instance, validated_data):
raise NotImplementedError(
"Serializers with many=True do not support multiple update by "
"default, only multiple create. For updates it is unclear how to "
"deal with insertions and deletions. If you need to support "
"multiple update, use a `ListSerializer` class and override "
"`.update()` so you can specify the behavior exactly."
)
def create(self, validated_data):
return [
self.child.create(attrs) for attrs in validated_data
]
def save(self, **kwargs):
"""
Save and return a list of object instances.
"""
# Guard against incorrect use of `serializer.save(commit=False)`
assert 'commit' not in kwargs, (
"'commit' is not a valid keyword argument to the 'save()' method. "
"If you need to access data before committing to the database then "
"inspect 'serializer.validated_data' instead. "
"You can also pass additional keyword arguments to 'save()' if you "
"need to set extra attributes on the saved model instance. "
"For example: 'serializer.save(owner=request.user)'.'"
)
validated_data = [
dict(list(attrs.items()) + list(kwargs.items()))
for attrs in self.validated_data
]
if self.instance is not None:
self.instance = self.update(self.instance, validated_data)
assert self.instance is not None, (
'`update()` did not return an object instance.'
)
else:
self.instance = self.create(validated_data)
assert self.instance is not None, (
'`create()` did not return an object instance.'
)
return self.instance
def __repr__(self):
return unicode_to_repr(representation.list_repr(self, indent=1))
# Include a backlink to the serializer class on return objects.
# Allows renderers such as HTMLFormRenderer to get the full field info.
@property
def data(self):
ret = super(ListSerializer, self).data
return ReturnList(ret, serializer=self)
@property
def errors(self):
ret = super(ListSerializer, self).errors
if isinstance(ret, dict):
return ReturnDict(ret, serializer=self)
return ReturnList(ret, serializer=self)
# ModelSerializer & HyperlinkedModelSerializer
# --------------------------------------------
def raise_errors_on_nested_writes(method_name, serializer, validated_data):
"""
Give explicit errors when users attempt to pass writable nested data.
If we don't do this explicitly they'd get a less helpful error when
calling `.save()` on the serializer.
We don't *automatically* support these sorts of nested writes because
there are too many ambiguities to define a default behavior.
Eg. Suppose we have a `UserSerializer` with a nested profile. How should
we handle the case of an update, where the `profile` relationship does
not exist? Any of the following might be valid:
* Raise an application error.
* Silently ignore the nested part of the update.
* Automatically create a profile instance.
"""
# Ensure we don't have a writable nested field. For example:
#
# class UserSerializer(ModelSerializer):
# ...
# profile = ProfileSerializer()
assert not any(
isinstance(field, BaseSerializer) and
(key in validated_data) and
isinstance(validated_data[key], (list, dict))
for key, field in serializer.fields.items()
), (
'The `.{method_name}()` method does not support writable nested'
'fields by default.\nWrite an explicit `.{method_name}()` method for '
'serializer `{module}.{class_name}`, or set `read_only=True` on '
'nested serializer fields.'.format(
method_name=method_name,
module=serializer.__class__.__module__,
class_name=serializer.__class__.__name__
)
)
# Ensure we don't have a writable dotted-source field. For example:
#
# class UserSerializer(ModelSerializer):
# ...
# address = serializer.CharField('profile.address')
assert not any(
'.' in field.source and
(key in validated_data) and
isinstance(validated_data[key], (list, dict))
for key, field in serializer.fields.items()
), (
'The `.{method_name}()` method does not support writable dotted-source '
'fields by default.\nWrite an explicit `.{method_name}()` method for '
'serializer `{module}.{class_name}`, or set `read_only=True` on '
'dotted-source serializer fields.'.format(
method_name=method_name,
module=serializer.__class__.__module__,
class_name=serializer.__class__.__name__
)
)
class ModelSerializer(Serializer):
"""
A `ModelSerializer` is just a regular `Serializer`, except that:
* A set of default fields are automatically populated.
* A set of default validators are automatically populated.
* Default `.create()` and `.update()` implementations are provided.
The process of automatically determining a set of serializer fields
based on the model fields is reasonably complex, but you almost certainly
don't need to dig into the implementation.
If the `ModelSerializer` class *doesn't* generate the set of fields that
you need you should either declare the extra/differing fields explicitly on
the serializer class, or simply use a `Serializer` class.
"""
serializer_field_mapping = {
models.AutoField: IntegerField,
models.BigIntegerField: IntegerField,
models.BooleanField: BooleanField,
models.CharField: CharField,
models.CommaSeparatedIntegerField: CharField,
models.DateField: DateField,
models.DateTimeField: DateTimeField,
models.DecimalField: DecimalField,
models.EmailField: EmailField,
models.Field: ModelField,
models.FileField: FileField,
models.FloatField: FloatField,
models.ImageField: ImageField,
models.IntegerField: IntegerField,
models.NullBooleanField: NullBooleanField,
models.PositiveIntegerField: IntegerField,
models.PositiveSmallIntegerField: IntegerField,
models.SlugField: SlugField,
models.SmallIntegerField: IntegerField,
models.TextField: CharField,
models.TimeField: TimeField,
models.URLField: URLField,
models.GenericIPAddressField: IPAddressField,
models.FilePathField: FilePathField,
}
if ModelDurationField is not None:
serializer_field_mapping[ModelDurationField] = DurationField
serializer_related_field = PrimaryKeyRelatedField
serializer_url_field = HyperlinkedIdentityField
serializer_choice_field = ChoiceField
# The field name for hyperlinked identity fields. Defaults to 'url'.
# You can modify this using the API setting.
#
# Note that if you instead need modify this on a per-serializer basis,
# you'll also need to ensure you update the `create` method on any generic
# views, to correctly handle the 'Location' response header for
# "HTTP 201 Created" responses.
url_field_name = api_settings.URL_FIELD_NAME
# Default `create` and `update` behavior...
def create(self, validated_data):
"""
We have a bit of extra checking around this in order to provide
descriptive messages when something goes wrong, but this method is
essentially just:
return ExampleModel.objects.create(**validated_data)
If there are many to many fields present on the instance then they
cannot be set until the model is instantiated, in which case the
implementation is like so:
example_relationship = validated_data.pop('example_relationship')
instance = ExampleModel.objects.create(**validated_data)
instance.example_relationship = example_relationship
return instance
The default implementation also does not handle nested relationships.
If you want to support writable nested relationships you'll need
to write an explicit `.create()` method.
"""
raise_errors_on_nested_writes('create', self, validated_data)
ModelClass = self.Meta.model
# Remove many-to-many relationships from validated_data.
# They are not valid arguments to the default `.create()` method,
# as they require that the instance has already been saved.
info = model_meta.get_field_info(ModelClass)
many_to_many = {}
for field_name, relation_info in info.relations.items():
if relation_info.to_many and (field_name in validated_data):
many_to_many[field_name] = validated_data.pop(field_name)
try:
instance = ModelClass.objects.create(**validated_data)
except TypeError as exc:
msg = (
'Got a `TypeError` when calling `%s.objects.create()`. '
'This may be because you have a writable field on the '
'serializer class that is not a valid argument to '
'`%s.objects.create()`. You may need to make the field '
'read-only, or override the %s.create() method to handle '
'this correctly.\nOriginal exception text was: %s.' %
(
ModelClass.__name__,
ModelClass.__name__,
self.__class__.__name__,
exc
)
)
raise TypeError(msg)
# Save many-to-many relationships after the instance is created.
if many_to_many:
for field_name, value in many_to_many.items():
setattr(instance, field_name, value)
return instance
def update(self, instance, validated_data):
raise_errors_on_nested_writes('update', self, validated_data)
# Simply set each attribute on the instance, and then save it.
# Note that unlike `.create()` we don't need to treat many-to-many
# relationships as being a special case. During updates we already
# have an instance pk for the relationships to be associated with.
for attr, value in validated_data.items():
setattr(instance, attr, value)
instance.save()
return instance
# Determine the fields to apply...
def get_fields(self):
"""
Return the dict of field names -> field instances that should be
used for `self.fields` when instantiating the serializer.
"""
assert hasattr(self, 'Meta'), (
'Class {serializer_class} missing "Meta" attribute'.format(
serializer_class=self.__class__.__name__
)
)
assert hasattr(self.Meta, 'model'), (
'Class {serializer_class} missing "Meta.model" attribute'.format(
serializer_class=self.__class__.__name__
)
)
if model_meta.is_abstract_model(self.Meta.model):
raise ValueError(
'Cannot use ModelSerializer with Abstract Models.'
)
declared_fields = copy.deepcopy(self._declared_fields)
model = getattr(self.Meta, 'model')
depth = getattr(self.Meta, 'depth', 0)
if depth is not None:
assert depth >= 0, "'depth' may not be negative."
assert depth <= 10, "'depth' may not be greater than 10."
# Retrieve metadata about fields & relationships on the model class.
info = model_meta.get_field_info(model)
field_names = self.get_field_names(declared_fields, info)
# Determine any extra field arguments and hidden fields that
# should be included
extra_kwargs = self.get_extra_kwargs()
extra_kwargs, hidden_fields = self.get_uniqueness_extra_kwargs(
field_names, declared_fields, extra_kwargs
)
# Determine the fields that should be included on the serializer.
fields = OrderedDict()
for field_name in field_names:
# If the field is explicitly declared on the class then use that.
if field_name in declared_fields:
fields[field_name] = declared_fields[field_name]
continue
# Determine the serializer field class and keyword arguments.
field_class, field_kwargs = self.build_field(
field_name, info, model, depth
)
# Include any kwargs defined in `Meta.extra_kwargs`
extra_field_kwargs = extra_kwargs.get(field_name, {})
field_kwargs = self.include_extra_kwargs(
field_kwargs, extra_field_kwargs
)
# Create the serializer field.
fields[field_name] = field_class(**field_kwargs)
# Add in any hidden fields.
fields.update(hidden_fields)
return fields
# Methods for determining the set of field names to include...
def get_field_names(self, declared_fields, info):
"""
Returns the list of all field names that should be created when
instantiating this serializer class. This is based on the default
set of fields, but also takes into account the `Meta.fields` or
`Meta.exclude` options if they have been specified.
"""
fields = getattr(self.Meta, 'fields', None)
exclude = getattr(self.Meta, 'exclude', None)
if fields and not isinstance(fields, (list, tuple)):
raise TypeError(
'The `fields` option must be a list or tuple. Got %s.' %
type(fields).__name__
)
if exclude and not isinstance(exclude, (list, tuple)):
raise TypeError(
'The `exclude` option must be a list or tuple. Got %s.' %
type(exclude).__name__
)
assert not (fields and exclude), (
"Cannot set both 'fields' and 'exclude' options on "
"serializer {serializer_class}.".format(
serializer_class=self.__class__.__name__
)
)
if fields is not None:
# Ensure that all declared fields have also been included in the
# `Meta.fields` option.
# Do not require any fields that are declared a parent class,
# in order to allow serializer subclasses to only include
# a subset of fields.
required_field_names = set(declared_fields)
for cls in self.__class__.__bases__:
required_field_names -= set(getattr(cls, '_declared_fields', []))
for field_name in required_field_names:
assert field_name in fields, (
"The field '{field_name}' was declared on serializer "
"{serializer_class}, but has not been included in the "
"'fields' option.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
return fields
# Use the default set of field names if `Meta.fields` is not specified.
fields = self.get_default_field_names(declared_fields, info)
if exclude is not None:
# If `Meta.exclude` is included, then remove those fields.
for field_name in exclude:
assert field_name in fields, (
"The field '{field_name}' was included on serializer "
"{serializer_class} in the 'exclude' option, but does "
"not match any model field.".format(
field_name=field_name,
serializer_class=self.__class__.__name__
)
)
fields.remove(field_name)
return fields
def get_default_field_names(self, declared_fields, model_info):
"""
Return the default list of field names that will be used if the
`Meta.fields` option is not specified.
"""
return (
[model_info.pk.name] +
list(declared_fields.keys()) +
list(model_info.fields.keys()) +
list(model_info.forward_relations.keys())
)
# Methods for constructing serializer fields...
def build_field(self, field_name, info, model_class, nested_depth):
"""
Return a two tuple of (cls, kwargs) to build a serializer field with.
"""
if field_name in info.fields_and_pk:
model_field = info.fields_and_pk[field_name]
return self.build_standard_field(field_name, model_field)
elif field_name in info.relations:
relation_info = info.relations[field_name]
if not nested_depth:
return self.build_relational_field(field_name, relation_info)
else:
return self.build_nested_field(field_name, relation_info, nested_depth)
elif hasattr(model_class, field_name):
return self.build_property_field(field_name, model_class)
elif field_name == self.url_field_name:
return self.build_url_field(field_name, model_class)
return self.build_unknown_field(field_name, model_class)
def build_standard_field(self, field_name, model_field):
"""
Create regular model fields.
"""
field_mapping = ClassLookupDict(self.serializer_field_mapping)
field_class = field_mapping[model_field]
field_kwargs = get_field_kwargs(field_name, model_field)
if 'choices' in field_kwargs:
# Fields with choices get coerced into `ChoiceField`
# instead of using their regular typed field.
field_class = self.serializer_choice_field
# Some model fields may introduce kwargs that would not be valid
# for the choice field. We need to strip these out.
# Eg. models.DecimalField(max_digits=3, decimal_places=1, choices=DECIMAL_CHOICES)
valid_kwargs = set((
'read_only', 'write_only',
'required', 'default', 'initial', 'source',
'label', 'help_text', 'style',
'error_messages', 'validators', 'allow_null', 'allow_blank',
'choices'
))
for key in list(field_kwargs.keys()):
if key not in valid_kwargs:
field_kwargs.pop(key)
if not issubclass(field_class, ModelField):
# `model_field` is only valid for the fallback case of
# `ModelField`, which is used when no other typed field
# matched to the model field.
field_kwargs.pop('model_field', None)
if not issubclass(field_class, CharField) and not issubclass(field_class, ChoiceField):
# `allow_blank` is only valid for textual fields.
field_kwargs.pop('allow_blank', None)
if postgres_fields and isinstance(model_field, postgres_fields.ArrayField):
# Populate the `child` argument on `ListField` instances generated
# for the PostgrSQL specfic `ArrayField`.
child_model_field = model_field.base_field
child_field_class, child_field_kwargs = self.build_standard_field(
'child', child_model_field
)
field_kwargs['child'] = child_field_class(**child_field_kwargs)
return field_class, field_kwargs
def build_relational_field(self, field_name, relation_info):
"""
Create fields for forward and reverse relationships.
"""
field_class = self.serializer_related_field
field_kwargs = get_relation_kwargs(field_name, relation_info)
# `view_name` is only valid for hyperlinked relationships.
if not issubclass(field_class, HyperlinkedRelatedField):
field_kwargs.pop('view_name', None)
return field_class, field_kwargs
def build_nested_field(self, field_name, relation_info, nested_depth):
"""
Create nested fields for forward and reverse relationships.
"""
class NestedSerializer(ModelSerializer):
class Meta:
model = relation_info.related_model
depth = nested_depth - 1
field_class = NestedSerializer
field_kwargs = get_nested_relation_kwargs(relation_info)
return field_class, field_kwargs
def build_property_field(self, field_name, model_class):
"""
Create a read only field for model methods and properties.
"""
field_class = ReadOnlyField
field_kwargs = {}
return field_class, field_kwargs
def build_url_field(self, field_name, model_class):
"""
Create a field representing the object's own URL.
"""
field_class = self.serializer_url_field
field_kwargs = get_url_kwargs(model_class)
return field_class, field_kwargs
def build_unknown_field(self, field_name, model_class):
"""
Raise an error on any unknown fields.
"""
raise ImproperlyConfigured(
'Field name `%s` is not valid for model `%s`.' %
(field_name, model_class.__name__)
)
def include_extra_kwargs(self, kwargs, extra_kwargs):
"""
Include any 'extra_kwargs' that have been included for this field,
possibly removing any incompatible existing keyword arguments.
"""
if extra_kwargs.get('read_only', False):
for attr in [
'required', 'default', 'allow_blank', 'allow_null',
'min_length', 'max_length', 'min_value', 'max_value',
'validators', 'queryset'
]:
kwargs.pop(attr, None)
if extra_kwargs.get('default') and kwargs.get('required') is False:
kwargs.pop('required')
if extra_kwargs.get('read_only', kwargs.get('read_only', False)):
extra_kwargs.pop('required', None) # Read only fields should always omit the 'required' argument.
kwargs.update(extra_kwargs)
return kwargs
# Methods for determining additional keyword arguments to apply...
def get_extra_kwargs(self):
"""
Return a dictionary mapping field names to a dictionary of
additional keyword arguments.
"""
extra_kwargs = getattr(self.Meta, 'extra_kwargs', {})
read_only_fields = getattr(self.Meta, 'read_only_fields', None)
if read_only_fields is not None:
for field_name in read_only_fields:
kwargs = extra_kwargs.get(field_name, {})
kwargs['read_only'] = True
extra_kwargs[field_name] = kwargs
return extra_kwargs
def get_uniqueness_extra_kwargs(self, field_names, declared_fields, extra_kwargs):
"""
Return any additional field options that need to be included as a
result of uniqueness constraints on the model. This is returned as
a two-tuple of:
('dict of updated extra kwargs', 'mapping of hidden fields')
"""
model = getattr(self.Meta, 'model')
model_fields = self._get_model_fields(
field_names, declared_fields, extra_kwargs
)
# Determine if we need any additional `HiddenField` or extra keyword
# arguments to deal with `unique_for` dates that are required to
# be in the input data in order to validate it.
unique_constraint_names = set()
for model_field in model_fields.values():
# Include each of the `unique_for_*` field names.
unique_constraint_names |= set([
model_field.unique_for_date,
model_field.unique_for_month,
model_field.unique_for_year
])
unique_constraint_names -= set([None])
# Include each of the `unique_together` field names,
# so long as all the field names are included on the serializer.
for parent_class in [model] + list(model._meta.parents.keys()):
for unique_together_list in parent_class._meta.unique_together:
if set(field_names).issuperset(set(unique_together_list)):
unique_constraint_names |= set(unique_together_list)
# Now we have all the field names that have uniqueness constraints
# applied, we can add the extra 'required=...' or 'default=...'
# arguments that are appropriate to these fields, or add a `HiddenField` for it.
hidden_fields = {}
uniqueness_extra_kwargs = {}
for unique_constraint_name in unique_constraint_names:
# Get the model field that is referred too.
unique_constraint_field = model._meta.get_field(unique_constraint_name)
if getattr(unique_constraint_field, 'auto_now_add', None):
default = CreateOnlyDefault(timezone.now)
elif getattr(unique_constraint_field, 'auto_now', None):
default = timezone.now
elif unique_constraint_field.has_default():
default = unique_constraint_field.default
else:
default = empty
if unique_constraint_name in model_fields:
# The corresponding field is present in the serializer
if default is empty:
uniqueness_extra_kwargs[unique_constraint_name] = {'required': True}
else:
uniqueness_extra_kwargs[unique_constraint_name] = {'default': default}
elif default is not empty:
# The corresponding field is not present in the,
# serializer. We have a default to use for it, so
# add in a hidden field that populates it.
hidden_fields[unique_constraint_name] = HiddenField(default=default)
# Update `extra_kwargs` with any new options.
for key, value in uniqueness_extra_kwargs.items():
if key in extra_kwargs:
extra_kwargs[key].update(value)
else:
extra_kwargs[key] = value
return extra_kwargs, hidden_fields
def _get_model_fields(self, field_names, declared_fields, extra_kwargs):
"""
Returns all the model fields that are being mapped to by fields
on the serializer class.
Returned as a dict of 'model field name' -> 'model field'.
Used internally by `get_uniqueness_field_options`.
"""
model = getattr(self.Meta, 'model')
model_fields = {}
for field_name in field_names:
if field_name in declared_fields:
# If the field is declared on the serializer
field = declared_fields[field_name]
source = field.source or field_name
else:
try:
source = extra_kwargs[field_name]['source']
except KeyError:
source = field_name
if '.' in source or source == '*':
# Model fields will always have a simple source mapping,
# they can't be nested attribute lookups.
continue
try:
field = model._meta.get_field(source)
if isinstance(field, DjangoModelField):
model_fields[source] = field
except FieldDoesNotExist:
pass
return model_fields
# Determine the validators to apply...
def get_validators(self):
"""
Determine the set of validators to use when instantiating serializer.
"""
# If the validators have been declared explicitly then use that.
validators = getattr(getattr(self, 'Meta', None), 'validators', None)
if validators is not None:
return validators[:]
# Otherwise use the default set of validators.
return (
self.get_unique_together_validators() +
self.get_unique_for_date_validators()
)
def get_unique_together_validators(self):
"""
Determine a default set of validators for any unique_together contraints.
"""
model_class_inheritance_tree = (
[self.Meta.model] +
list(self.Meta.model._meta.parents.keys())
)
# The field names we're passing though here only include fields
# which may map onto a model field. Any dotted field name lookups
# cannot map to a field, and must be a traversal, so we're not
# including those.
field_names = set([
field.source for field in self.fields.values()
if (field.source != '*') and ('.' not in field.source)
])
# Note that we make sure to check `unique_together` both on the
# base model class, but also on any parent classes.
validators = []
for parent_class in model_class_inheritance_tree:
for unique_together in parent_class._meta.unique_together:
if field_names.issuperset(set(unique_together)):
validator = UniqueTogetherValidator(
queryset=parent_class._default_manager,
fields=unique_together
)
validators.append(validator)
return validators
def get_unique_for_date_validators(self):
"""
Determine a default set of validators for the following contraints:
* unique_for_date
* unique_for_month
* unique_for_year
"""
info = model_meta.get_field_info(self.Meta.model)
default_manager = self.Meta.model._default_manager
field_names = [field.source for field in self.fields.values()]
validators = []
for field_name, field in info.fields_and_pk.items():
if field.unique_for_date and field_name in field_names:
validator = UniqueForDateValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_date
)
validators.append(validator)
if field.unique_for_month and field_name in field_names:
validator = UniqueForMonthValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_month
)
validators.append(validator)
if field.unique_for_year and field_name in field_names:
validator = UniqueForYearValidator(
queryset=default_manager,
field=field_name,
date_field=field.unique_for_year
)
validators.append(validator)
return validators
if hasattr(models, 'UUIDField'):
ModelSerializer.serializer_field_mapping[models.UUIDField] = UUIDField
# IPAddressField is deprecated in Django
if hasattr(models, 'IPAddressField'):
ModelSerializer.serializer_field_mapping[models.IPAddressField] = IPAddressField
if postgres_fields:
class CharMappingField(DictField):
child = CharField(allow_blank=True)
ModelSerializer.serializer_field_mapping[postgres_fields.HStoreField] = CharMappingField
ModelSerializer.serializer_field_mapping[postgres_fields.ArrayField] = ListField
class HyperlinkedModelSerializer(ModelSerializer):
"""
A type of `ModelSerializer` that uses hyperlinked relationships instead
of primary key relationships. Specifically:
* A 'url' field is included instead of the 'id' field.
* Relationships to other instances are hyperlinks, instead of primary keys.
"""
serializer_related_field = HyperlinkedRelatedField
def get_default_field_names(self, declared_fields, model_info):
"""
Return the default list of field names that will be used if the
`Meta.fields` option is not specified.
"""
return (
[self.url_field_name] +
list(declared_fields.keys()) +
list(model_info.fields.keys()) +
list(model_info.forward_relations.keys())
)
def build_nested_field(self, field_name, relation_info, nested_depth):
"""
Create nested fields for forward and reverse relationships.
"""
class NestedSerializer(HyperlinkedModelSerializer):
class Meta:
model = relation_info.related_model
depth = nested_depth - 1
field_class = NestedSerializer
field_kwargs = get_nested_relation_kwargs(relation_info)
return field_class, field_kwargs
|
unicef/un-partner-portal | refs/heads/develop | backend/unpp_api/apps/partner/migrations/0034_auto_20171114_0917.py | 1 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-11-14 09:17
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('partner', '0033_partnerotherinfo_org_logo_thumbnail'),
]
operations = [
migrations.AlterField(
model_name='partnermailingaddress',
name='org_email',
field=models.EmailField(blank=True, max_length=254, null=True),
),
migrations.AlterField(
model_name='partnerotherinfo',
name='info_to_share',
field=models.CharField(blank=True, max_length=200, null=True),
),
]
|
bright-sparks/chromium-spacewalk | refs/heads/master | tools/idl_parser/idl_node.py | 125 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
#
# IDL Node
#
# IDL Node defines the IDLAttribute and IDLNode objects which are constructed
# by the parser as it processes the various 'productions'. The IDLAttribute
# objects are assigned to the IDLNode's property dictionary instead of being
# applied as children of The IDLNodes, so they do not exist in the final tree.
# The AST of IDLNodes is the output from the parsing state and will be used
# as the source data by the various generators.
#
#
# CopyToList
#
# Takes an input item, list, or None, and returns a new list of that set.
def CopyToList(item):
# If the item is 'Empty' make it an empty list
if not item:
item = []
# If the item is not a list
if type(item) is not type([]):
item = [item]
# Make a copy we can modify
return list(item)
# IDLSearch
#
# A temporary object used by the parsing process to hold an Extended Attribute
# which will be passed as a child to a standard IDLNode.
#
class IDLSearch(object):
def __init__(self):
self.depth = 0
def Enter(self, node):
pass
def Exit(self, node):
pass
# IDLAttribute
#
# A temporary object used by the parsing process to hold an Extended Attribute
# which will be passed as a child to a standard IDLNode.
#
class IDLAttribute(object):
def __init__(self, name, value):
self._cls = 'Property'
self.name = name
self.value = value
def __str__(self):
return '%s=%s' % (self.name, self.value)
def GetClass(self):
return self._cls
#
# IDLNode
#
# This class implements the AST tree, providing the associations between
# parents and children. It also contains a namepsace and propertynode to
# allow for look-ups. IDLNode is derived from IDLRelease, so it is
# version aware.
#
class IDLNode(object):
def __init__(self, cls, filename, lineno, pos, children=None):
self._cls = cls
self._properties = {
'ERRORS' : [],
'WARNINGS': [],
'FILENAME': filename,
'LINENO' : lineno,
'POSSITION' : pos,
}
self._children = []
self._parent = None
self.AddChildren(children)
#
#
#
# Return a string representation of this node
def __str__(self):
name = self.GetProperty('NAME','')
return '%s(%s)' % (self._cls, name)
def GetLogLine(self, msg):
filename, lineno = self.GetFileAndLine()
return '%s(%d) : %s\n' % (filename, lineno, msg)
# Log an error for this object
def Error(self, msg):
self.GetProperty('ERRORS').append(msg)
sys.stderr.write(self.GetLogLine('error: ' + msg))
# Log a warning for this object
def Warning(self, msg):
self.GetProperty('WARNINGS').append(msg)
sys.stdout.write(self.GetLogLine('warning:' + msg))
# Return file and line number for where node was defined
def GetFileAndLine(self):
return self.GetProperty('FILENAME'), self.GetProperty('LINENO')
def GetClass(self):
return self._cls
def GetName(self):
return self.GetProperty('NAME')
def GetParent(self):
return self._parent
def Traverse(self, search, filter_nodes):
if self._cls in filter_nodes:
return ''
search.Enter(self)
search.depth += 1
for child in self._children:
child.Traverse(search, filter_nodes)
search.depth -= 1
search.Exit(self)
def Tree(self, filter_nodes=None, accept_props=None):
class DumpTreeSearch(IDLSearch):
def __init__(self, props):
IDLSearch.__init__(self)
self.out = []
self.props = props
def Enter(self, node):
tab = ''.rjust(self.depth * 2)
self.out.append(tab + str(node))
if self.props:
proplist = []
for key, value in node.GetProperties().iteritems():
if key in self.props:
proplist.append(tab + ' %s: %s' % (key, str(value)))
if proplist:
self.out.append(tab + ' PROPERTIES')
self.out.extend(proplist)
if filter_nodes == None:
filter_nodes = ['Comment', 'Copyright']
search = DumpTreeSearch(accept_props)
self.Traverse(search, filter_nodes)
return search.out
#
# Search related functions
#
# Check if node is of a given type
def IsA(self, *typelist):
if self._cls in typelist:
return True
return False
# Get a list of all children
def GetChildren(self):
return self._children
def GetListOf(self, *keys):
out = []
for child in self.GetChildren():
if child.GetClass() in keys:
out.append(child)
return out
def GetOneOf(self, *keys):
out = self.GetListOf(*keys)
if out:
return out[0]
return None
def AddChildren(self, children):
children = CopyToList(children)
for child in children:
if not child:
continue
if type(child) == IDLAttribute:
self.SetProperty(child.name, child.value)
continue
if type(child) == IDLNode:
child._parent = self
self._children.append(child)
continue
raise RuntimeError('Adding child of type %s.\n' % type(child).__name__)
#
# Property Functions
#
def SetProperty(self, name, val):
self._properties[name] = val
def GetProperty(self, name, default=None):
return self._properties.get(name, default)
def GetProperties(self):
return self._properties
|
draperjames/bokeh | refs/heads/master | bokeh/sampledata/autompg.py | 13 | """
"""
from __future__ import absolute_import
from bokeh.util.dependencies import import_required
pd = import_required('pandas',
'autompg sample data requires Pandas (http://pandas.pydata.org) to be installed')
from os.path import dirname, join
autompg = pd.read_csv(join(dirname(__file__), 'auto-mpg.csv'))
|
simshadows/discord-mentionbot | refs/heads/master | mentionbot/servermodules/dynamicchannels.py | 1 | import sys
import asyncio
import threading
from copy import deepcopy
import re
import traceback
import concurrent
import textwrap
import collections
import discord
from .. import utils, errors, cmd
from ..servermodule import ServerModule, registered
from ..enums import PrivilegeLevel
from ..attributedictwrapper import AttributeDictWrapper
@registered
class DynamicChannels(ServerModule):
MODULE_NAME = "Dynamic Channels"
MODULE_SHORT_DESCRIPTION = "Allows users to create temporary channels. (NOT YET FUNCTIONAL.)"
RECOMMENDED_CMD_NAMES = ["dchannel", "dchannels", "dynamicchannels"]
_SECRET_TOKEN = utils.SecretToken()
_cmdd = {}
_HELP_SUMMARY = """
`{modhelp}` - Temporary channels.
"""
_default_settings = {
"default channels": [],
"channel timeout": 10,
"max active temp channels": 5,
"bot flairs": [],
"max stored last opened": 10,
"last opened": [], # List of channel IDs
}
_re_non_alnum_or_dash = re.compile("[^-0-9a-zA-Z]")
async def _initialize(self, resources):
self._res = resources
self._client = self._res.client
self._server = self._res.server
self._default_role = self._server.default_role
self._default_channels = None
self._channel_timeout = None # Channel timeout in seconds.
self._max_active_temp_channels = None # If <0, then there's no limit.
self._bot_flairs = None
self._max_stored_last_opened = None
self._last_opened = None
await self._load_settings()
self._scheduler = ChannelCloseScheduler(self._client, self._server, self)
loop = asyncio.get_event_loop()
await self._res.start_nonreturning_coro(self._scheduler.run())
self._res.suppress_autokill(True)
return
@property
def bot_flairs(self):
return self._bot_flairs
async def _load_settings(self):
settings_dict = self._res.get_settings(default=self._default_settings)
settings = AttributeDictWrapper(settings_dict, self._default_settings)
self._default_channels = []
default_ch_data = settings.get("default channels")
for item in default_ch_data:
if not "id" in item:
continue
ch_id = item["id"]
if not isinstance(ch_id, str):
continue
ch = self._client.search_for_channel(ch_id, serverrestriction=self._server)
if not ch is None:
# Missed channels are simply skipped.
self._default_channels.append(ch)
self._channel_timeout = settings.get("channel timeout", accept_if=lambda x: 100000 >= x > 0)
self._max_active_temp_channels = settings.get("max active temp channels", accept_if=lambda x: x <= 100000)
def no_issues_in_bot_flairs(x):
if not isinstance(x, list):
return False
for i in x:
if not isinstance(i, str):
return False
return True
self._bot_flairs = settings.get("bot flairs", accept_if=no_issues_in_bot_flairs)
self._max_stored_last_opened = settings.get("max stored last opened", accept_if=lambda x: 200 >= x > 0)
self._last_opened = []
last_opened_data = settings.get("last opened")
for ch_id in last_opened_data:
if not isinstance(ch_id, str):
continue
ch = self._client.search_for_channel(ch_id, serverrestriction=self._server)
if not ch is None:
# Missed channels are simply skipped.
self._last_opened.append(ch)
await settings.report_if_changed(self._client, self, server=self._server)
self._save_settings()
return
def _save_settings(self):
settings = {}
settings["channel timeout"] = self._channel_timeout
settings["max active temp channels"] = self._max_active_temp_channels
settings["bot flairs"] = self._bot_flairs
default_channels = []
for ch in self._default_channels:
save_object = {}
save_object["id"] = ch.id
save_object["name"] = ch.name
default_channels.append(save_object)
settings["default channels"] = default_channels
settings["max stored last opened"] = self._max_stored_last_opened
settings["last opened"] = [x.id for x in self._last_opened]
self._res.save_settings(settings)
return
async def msg_preprocessor(self, content, msg, default_cmd_prefix):
if content.startswith("+++"):
content = default_cmd_prefix + self._res.module_cmd_aliases[0] + " open " + content[3:]
elif content.startswith("++"):
content = default_cmd_prefix + self._res.module_cmd_aliases[0] + " search " + content[2:]
return content
async def process_cmd(self, substr, msg, privilege_level):
if substr == "":
substr = "status"
return await super(DynamicChannels, self).process_cmd(substr, msg, privilege_level)
async def on_message(self, msg, privilege_level):
if self._name_is_default_channel(msg.channel.name):
try:
self._scheduler.unschedule_closure(msg.channel)
except KeyError:
pass
else:
self._scheduler.schedule_closure(msg.channel, self._channel_timeout)
return
@cmd.add(_cmdd, "search")
async def _cmdf_search(self, substr, msg, privilege_level):
"""
`++` - See a list of the last opened channels.
`++[string]` - Search list of hidden channels.
"""
ch_name = utils.convert_to_legal_channel_name(substr)
if len(ch_name) == 0:
buf = "**Last {} channels opened:**"
buf2 = ""
listed = 0
for ch in self._last_opened:
if listed == self._max_stored_last_opened:
break
buf2 += "\n" + ch.name
listed += 1
if listed == 0:
buf = "No channels were recently opened."
elif listed == 1:
buf = "**Last channel opened:**" + buf2
else:
buf = buf.format(str(listed)) + buf2
await self._client.send_msg(msg, buf)
return
available_channels = []
for ch in self._server.channels:
if self._name_is_default_channel(ch.name) or (ch.type != discord.ChannelType.text):
continue
if ch_name in ch.name:
available_channels.append(ch)
buf = None
if len(available_channels) == 0:
buf = "No channels meet the search criteria."
else:
buf = "**The following channels are available for re-opening:**"
for ch in available_channels:
buf += "\n" + ch.name
buf += "\n\nReopen a channel with the command `+++[channel name]`."
await self._client.send_msg(msg, buf)
return
@cmd.add(_cmdd, "open", "create")
async def _cmdf_open(self, substr, msg, privilege_level):
"""`+++[string]` - Create/unhide channel."""
if len(self._scheduler.get_scheduled()) >= self._max_active_temp_channels >= 0:
buf = "No more than {}".format(str(self._max_active_temp_channels))
buf += " active temporary channels are allowed."
await self._client.send_msg(msg.channel, buf)
raise errors.OperationAborted
ch_name = substr.strip().replace(" ", "-").lower()
await self._chopen_name_check(msg, ch_name)
try:
await utils.open_channel(self._client, ch_name, self._server, self._bot_flairs)
except discord.errors.Forbidden:
await self._client.send_msg(msg.channel, "Bot is not allowed to open that.")
raise errors.OperationAborted
ch = self._client.search_for_channel_by_name(ch_name, self._server)
self._scheduler.schedule_closure(ch, self._channel_timeout)
buf = "Channel opened by <@{}>.".format(msg.author.id)
buf += " Closing after {} minutes of inactivity.".format(str(self._channel_timeout))
await self._client.send_msg(ch, buf)
await self._client.send_msg(msg, "Channel <#{}> successfully opened.".format(ch.id))
self._add_to_last_opened(ch)
return
@cmd.add(_cmdd, "status", "admin", "s", "stat", "settings", default=True)
@cmd.category("Status")
@cmd.minimum_privilege(PrivilegeLevel.ADMIN)
async def _cmdf_status(self, substr, msg, privilege_level):
"""`{cmd}` - A summary of the module's settings."""
buf = textwrap.dedent("""\
**Timeout**: {timeout}
**Max Active**: {max_active}
**Max Stored Last-Opened Channels**: {max_stored_last_opened}
**Bot flairs**: {bot_flairs}
**Default Channels**:
{default_channels}
""").strip()
format_kwargs = {
"timeout": str(self._channel_timeout) + " minutes",
"max_active": None, # Placeholder
"max_stored_last_opened": str(self._max_stored_last_opened),
"bot_flairs": None, # Placeholder
"default_channels": None, # Placeholder
}
if self._max_active_temp_channels < 0:
format_kwargs["max_active"] = "unlimited channels"
else:
format_kwargs["max_active"] = str(self._max_active_temp_channels) + " channels"
bot_flairs = ""
if len(self._bot_flairs) == 0:
bot_flairs = "*(none)*"
else:
for flair_name in self._bot_flairs:
bot_flairs += flair_name + ", "
bot_flairs = bot_flairs[:-2]
format_kwargs["bot_flairs"] = bot_flairs
default_channels = ""
if len(self._default_channels) == 0:
default_channels = "*(none)*"
else:
for ch in self._default_channels:
default_channels += "\n<#{0}> (ID: {0})".format(ch.id)
default_channels = default_channels[1:]
format_kwargs["default_channels"] = default_channels
buf = buf.format(**format_kwargs)
await self._client.send_msg(msg, buf)
return
@cmd.add(_cmdd, "scheduled")
@cmd.category("Status")
async def _cmdf_debug(self, substr, msg, privilege_level):
"""
`{cmd}` - View what's scheduled for closure.
This command gives you the time for closure for each channel, rounded UP to the nearest minute. (i.e. 20 minutes and 1 second will round up to 21 minutes.)
"""
scheduled = self._scheduler.get_scheduled()
if len(scheduled) == 0:
await self._client.send_msg(msg, "No channels are scheduled.")
else:
buf = "**The following channels are currently scheduled:**"
for (ch, timeout) in self._scheduler.get_scheduled():
buf += "\n<#" + ch.id + "> in " + str(timeout) + " minutes"
await self._client.send_msg(msg, buf)
return
@cmd.add(_cmdd, "clearscheduled")
@cmd.category("Admin - Misc")
@cmd.minimum_privilege(PrivilegeLevel.ADMIN)
async def _cmdf_debug(self, substr, msg, privilege_level):
"""
`{cmd}` - Reset the scheduled-for-closure list.
Each temporary channel has an associated countdown timer which is reset every time a message is sent into the channel.
This command will clear all currently scheduled temporary channels AND stop their countdown timers temporarily.
Sending a message into the channel again, however, will start the channel's countdown timer again.
"""
self._scheduler.unschedule_all()
await self._client.send_msg(msg, "Scheduled closure list is cleared.")
return
@cmd.add(_cmdd, "addbotflair")
@cmd.category("Admin - Designated Bot Flairs")
@cmd.minimum_privilege(PrivilegeLevel.ADMIN)
async def _cmdf_addbotflair(self, substr, msg, privilege_level):
"""`{cmd} [flair name]` - Add a bot flair, identified by name."""
if len(substr) == 0:
raise errors.InvalidCommandArgumentsError
elif len(utils.flair_names_to_object(self._server, [substr])) == 0:
await self._client.send_msg(msg, "`{}` is not an existing flair.".format(substr))
raise errors.OperationAborted
if not substr in self._bot_flairs:
self._bot_flairs.append(substr)
await self._client.send_msg(msg, "`{}` added as a bot flair.".format(substr))
else:
await self._client.send_msg(msg, "`{}` is already a bot flair.".format(substr))
self._save_settings()
return
@cmd.add(_cmdd, "removebotflair")
@cmd.category("Admin - Designated Bot Flairs")
@cmd.minimum_privilege(PrivilegeLevel.ADMIN)
async def _cmdf_addbotflair(self, substr, msg, privilege_level):
"""`{cmd} [flair name]` - Remove a bot flair, identified by name."""
try:
self._bot_flairs.remove(substr)
await self._client.send_msg(msg, "`{}` removed as a bot flair.".format(substr))
except ValueError:
await self._client.send_msg(msg, "`{}` is not a bot flair.".format(substr))
self._save_settings()
return
@cmd.add(_cmdd, "adddefault")
@cmd.category("Admin - Adding/Removing default channels")
@cmd.minimum_privilege(PrivilegeLevel.ADMIN)
async def _cmdf_adddefault(self, substr, msg, privilege_level):
"""`{cmd} [channel]` - Add a default channel."""
new_default = None
if len(substr) == 0:
new_default = msg.channel
else:
new_default = self._client.search_for_channel(substr, serverrestriction=self._server)
try:
self._scheduler.unschedule_closure(new_default)
except KeyError:
pass
if new_default is None:
await self._client.send_msg(msg, "Error: Channel not found.")
elif new_default in self._default_channels:
await self._client.send_msg(msg, "Error: <#{}> is already in default channels.".format(new_default.id))
else:
self._default_channels.append(new_default)
self._save_settings()
await self._client.send_msg(msg, "<#{}> successfully added to default list.".format(new_default.id))
return
@cmd.add(_cmdd, "removedefault")
@cmd.category("Admin - Adding/Removing default channels")
@cmd.minimum_privilege(PrivilegeLevel.ADMIN)
async def _cmdf_removedefault(self, substr, msg, privilege_level):
"""`{cmd} [channel]` - Remove a default channel."""
to_remove = None
if len(substr) == 0:
to_remove = msg.channel
else:
to_remove = self._client.search_for_channel(substr, serverrestriction=self._server)
if to_remove is None:
await self._client.send_msg(msg, "Error: Channel not found.")
elif to_remove in self._default_channels:
self._default_channels.remove(to_remove)
self._save_settings()
await self._client.send_msg(msg, "<#{}> successfully removed from default list.".format(to_remove.id))
else:
await self._client.send_msg(msg, "Error: Channel is not default.")
return
@cmd.add(_cmdd, "settimeout")
@cmd.category("Admin - Misc")
@cmd.minimum_privilege(PrivilegeLevel.ADMIN)
async def _cmdf_settimeout(self, substr, msg, privilege_level):
"""
`{cmd} [int]` - Set the channel closure timeout, in minutes.
More specifically, this is the time it takes for a channel to close after the last message that was sent into it.
"""
try:
new_timeout = int(substr)
except ValueError:
await self._client.send_msg(msg, "Error: Must enter an integer.")
raise errors.OperationAborted
if new_timeout < 1:
await self._client.send_msg(msg, "Error: Timeout must be >0 minutes.")
raise errors.OperationAborted
elif new_timeout > 100000:
await self._client.send_msg(msg, "Error: Timeout too large.")
raise errors.OperationAborted
self._channel_timeout = new_timeout
self._save_settings()
await self._client.send_msg(msg, "Timeout set to {} minutes.".format(str(new_timeout)))
return
@cmd.add(_cmdd, "setmaxactive")
@cmd.category("Admin - Misc")
@cmd.minimum_privilege(PrivilegeLevel.ADMIN)
async def _cmdf_setmaxactive(self, substr, msg, privilege_level):
"""
`{cmd} [int]` - Set the maximum active channels.
`{cmd} -1` - Set maximum active channels to unlimited.
"""
try:
new_value = int(substr)
except ValueError:
await self._client.send_msg(msg, "Error: Must enter an integer.")
raise errors.OperationAborted
set_to_message = None
if self._max_active_temp_channels < 0:
await self._client.send_msg(msg, "Max active channels set to unlimited.")
set_to_message = "unlimited"
elif self._max_active_temp_channels > 100000:
await self._client.send_msg(msg, "Error: Max active channels is too large.")
raise errors.OperationAborted
else:
set_to_message = str(new_value)
self._max_active_temp_channels = new_value
self._save_settings()
await self._client.send_msg(msg, "Max active channels set to {}.".format(set_to_message))
return
@cmd.add(_cmdd, "setmaxlastopened")
@cmd.category("Admin - Misc")
@cmd.minimum_privilege(PrivilegeLevel.ADMIN)
async def _cmdf_setmaxactive(self, substr, msg, privilege_level):
"""
`{cmd} [int]` - Set the maximum stored last opened channels.
This is to limit the number of channels that appear when you use the open channel command with no arguments.
"""
try:
new_value = int(substr)
except ValueError:
await self._client.send_msg(msg, "Error: Must enter an integer.")
raise errors.OperationAborted
if new_value < 1:
await self._client.send_msg(msg, "Error: Value must be >0.")
raise errors.OperationAborted
elif new_value > 100000:
await self._client.send_msg(msg, "Error: Value too large.")
raise errors.OperationAborted
self._max_stored_last_opened = new_value
self._save_settings()
await self._client.send_msg(msg, "Max stored last opened channels set to {}.".format(str(new_value)))
return
@cmd.add(_cmdd, "clearlastopened")
@cmd.category("Admin - Misc")
@cmd.minimum_privilege(PrivilegeLevel.ADMIN)
async def _cmdf_setmaxactive(self, substr, msg, privilege_level):
"""
`{cmd} [int]` - Clear the list of last opened channels.
"""
old_elements = len(self._last_opened)
self._last_opened = []
await self._client.send_msg(msg, "{} removed from the last opened list.".format(str(old_elements)))
return
@cmd.add(_cmdd, "permissionrecovery")
@cmd.category("Admin - Misc")
@cmd.minimum_privilege(PrivilegeLevel.BOT_OWNER)
async def _cmdf_setmaxactive(self, substr, msg, privilege_level):
"""
`{cmd} [blacklisted channels]` - Re-write bot flair permissions.
This command simply re-writes full access permissions to all temporary channels, but ignores default channels and channels listed by `[blacklisted channels]`.
Requires that the bot **temporarily be given full admin rights** to the server before running this command.
`[blacklisted channels]` is simply a space-delineated string of channel mentions or channel IDs.
e.g. `{cmd} 123 456 789` will blacklist channels with the IDs `123`, `456`, and `789` from having their permissions overwritten.
This command is used if changes to underlying APIs without the bot being updated have caused issues with this module's functionality.
"""
client = self._client
server = self._server
args = substr.split()
to_ignore = set()
for arg in args:
ch = client.search_for_channel(arg, serverrestriction=server)
if ch is None:
buf = "Error: No channel matching `{}` has been found.".format(arg)
buf += "\nOperation aborted. No changes have been made."
await client.send_msg(msg, buf)
raise errors.OperationAborted
to_ignore.add(ch.id)
for ch in self._default_channels:
to_ignore.add(ch.id)
buf = "Writing permissions. This operation is not pipelined, so this will take a while."
await client.send_msg(msg, buf)
print(buf)
total_channels = 0
ignored = 0
non_text_skipped = 0
errors = collections.defaultdict(lambda: 0) # errors[error_name] = count
success = 0
for ch in server.channels:
total_channels += 1
if ch.id in to_ignore:
print("Ignoring #" + ch.name)
ignored += 1
continue
elif ch.type != discord.ChannelType.text:
print("Non-text channel #" + ch.name)
non_text_skipped += 1
continue
try:
await utils.ensure_bot_permissions(client, ch, self._bot_flairs)
success += 1
except Exception as e:
name = type(e).__name__
print("Error for channel #" + ch.name)
print(traceback.format_exc())
errors[name] += 1
buf = textwrap.dedent("""\
Operation complete.
**Statistics**
**Total channels seen**: {total_channels}
**Ignored by blacklist or default**: {ignored}
**Non-text channels skipped**: {non_text_skipped}
**Successful permission changes**: {success}
**Errors**:
{errors}
""").strip()
buf2 = ""
for (error_name, count) in errors.items():
buf2 += " {} x{}\n".format(error_name, str(count))
if len(buf2) == 0:
buf2 = " No errors."
else:
buf2 = buf2[:-1]
new_kwargs = {
"total_channels": str(total_channels),
"ignored": str(ignored),
"non_text_skipped": str(non_text_skipped),
"success": str(success),
"errors": buf2
}
await client.send_msg(msg, buf.format(**new_kwargs))
return
async def _chopen_name_check(self, msg, ch_name):
if len(ch_name) < 2:
await self._client.send_msg(msg, "Channel name must be at least 2 characters long.")
raise errors.OperationAborted
elif (ch_name[:1] == "-") or self._re_non_alnum_or_dash.search(ch_name):
await self._client.send_msg(msg, "`{}` is an illegal channel name.".format(ch_name))
raise errors.OperationAborted
elif len(ch_name) > 100:
await self._client.send_msg(msg, "Channel name can't be larger than 100 characters.")
raise errors.OperationAborted
elif self._name_is_default_channel(ch_name):
await self._client.send_msg(msg, "Can't open a default channel.")
raise errors.OperationAborted
return
def _name_is_default_channel(self, ch_name):
for ch in self._default_channels:
if ch.name == ch_name:
return True
return False
def _add_to_last_opened(self, channel):
if not channel in self._last_opened:
self._last_opened.insert(0, channel)
del self._last_opened[self._max_stored_last_opened:]
assert len(self._last_opened) <= self._max_stored_last_opened
self._save_settings()
return
# # Generator, yields all temporary channels.
# def _gen_temp_channels(self):
# for channel in self._server.channels:
# if channel in self._default_channels:
# continue
# yield channel
class ChannelCloseScheduler:
def __init__(self, client, server, module):
self._client = client
self._server = server
self._module = module
self._scheduled = {} # Maps channel name -> time until closure
return
def schedule_closure(self, channel, timeout_min):
self._scheduled[channel] = timeout_min + 1
return
def unschedule_closure(self, channel):
del self._scheduled[channel]
return
def unschedule_all(self):
self._scheduled = {}
return
def get_scheduled(self):
return self._scheduled.items()
# Run this indefinitely.
async def run(self):
while True:
try:
print(">>>>>>>>>>>>>>>>>>> DYNAMICCHANNELS TICK!!!")
to_close = []
for (ch_name, timeout_min) in self._scheduled.items():
if timeout_min <= 1:
to_close.append(ch_name)
else:
self._scheduled[ch_name] = timeout_min - 1
for ch_name in to_close:
del self._scheduled[ch_name]
ch = self._client.search_for_channel_by_name(ch_name, self._server)
try:
await utils.close_channel(self._client, ch, self._module.bot_flairs)
except discord.errors.Forbidden:
print("!!!!!!!! FAILED TO CLOSE #{}.".format(ch_name))
except:
print(traceback.format_exc())
await asyncio.sleep(60)
except concurrent.futures.CancelledError:
raise # Allow the coroutine to be cancelled.
|
GRArmstrong/invenio-inspire-ops | refs/heads/prod | modules/bibdocfile/lib/bibdocfile_webinterface.py | 1 | ## This file is part of Invenio.
## Copyright (C) 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import cgi
import os
import time
import shutil
from invenio.config import \
CFG_ACCESS_CONTROL_LEVEL_SITE, \
CFG_SITE_LANG, \
CFG_TMPSHAREDDIR, \
CFG_SITE_URL, \
CFG_SITE_SECURE_URL, \
CFG_WEBSUBMIT_STORAGEDIR, \
CFG_SITE_RECORD, \
CFG_INSPIRE_SITE
from invenio.bibdocfile_config import CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_DOCTYPES, \
CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_MISC, \
CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_RESTRICTIONS, \
CFG_BIBDOCFILE_ICON_SUBFORMAT_RE
from invenio import webinterface_handler_config as apache
from invenio.access_control_config import VIEWRESTRCOLL
from invenio.access_control_mailcookie import mail_cookie_create_authorize_action
from invenio.access_control_engine import acc_authorize_action
from invenio.access_control_admin import acc_is_role
from invenio.webpage import page, pageheaderonly, \
pagefooteronly, warning_page, write_warning
from invenio.webuser import getUid, page_not_authorized, collect_user_info, isUserSuperAdmin, \
isGuestUser
from invenio import webjournal_utils
from invenio.webinterface_handler import wash_urlargd, WebInterfaceDirectory
from invenio.urlutils import make_canonical_urlargd, redirect_to_url
from invenio.messages import gettext_set_language
from invenio.search_engine import \
guess_primary_collection_of_a_record, get_colID, record_exists, \
create_navtrail_links, check_user_can_view_record, record_empty, \
is_user_owner_of_record
from invenio.bibdocfile import BibRecDocs, normalize_format, file_strip_ext, \
stream_restricted_icon, BibDoc, InvenioBibDocFileError, \
get_subformat_from_format
from invenio.errorlib import register_exception
from invenio.websearchadminlib import get_detailed_page_tabs
import invenio.template
bibdocfile_templates = invenio.template.load('bibdocfile')
webstyle_templates = invenio.template.load('webstyle')
websubmit_templates = invenio.template.load('websubmit')
websearch_templates = invenio.template.load('websearch')
from invenio.bibdocfile_managedocfiles import \
create_file_upload_interface, \
get_upload_file_interface_javascript, \
get_upload_file_interface_css, \
move_uploaded_files_to_storage
bibdocfile_templates = invenio.template.load('bibdocfile')
class WebInterfaceFilesPages(WebInterfaceDirectory):
def __init__(self, recid):
self.recid = recid
def _lookup(self, component, path):
# after /<CFG_SITE_RECORD>/<recid>/files/ every part is used as the file
# name
filename = component
def getfile(req, form):
args = wash_urlargd(form, bibdocfile_templates.files_default_urlargd)
ln = args['ln']
_ = gettext_set_language(ln)
uid = getUid(req)
user_info = collect_user_info(req)
verbose = args['verbose']
if verbose >= 1 and not isUserSuperAdmin(user_info):
# Only SuperUser can see all the details!
verbose = 0
if uid == -1 or CFG_ACCESS_CONTROL_LEVEL_SITE > 1:
return page_not_authorized(req, "/%s/%s" % (CFG_SITE_RECORD, self.recid),
navmenuid='submit')
if record_exists(self.recid) < 1:
msg = "<p>%s</p>" % _("Requested record does not seem to exist.")
return warning_page(msg, req, ln)
if record_empty(self.recid):
msg = "<p>%s</p>" % _("Requested record does not seem to have been integrated.")
return warning_page(msg, req, ln)
(auth_code, auth_message) = check_user_can_view_record(user_info, self.recid)
if auth_code and user_info['email'] == 'guest':
if webjournal_utils.is_recid_in_released_issue(self.recid):
# We can serve the file
pass
else:
cookie = mail_cookie_create_authorize_action(VIEWRESTRCOLL, {'collection' : guess_primary_collection_of_a_record(self.recid)})
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'action': cookie, 'ln' : ln, 'referer' : \
CFG_SITE_SECURE_URL + user_info['uri']}, {})
return redirect_to_url(req, target, norobot=True)
elif auth_code:
if webjournal_utils.is_recid_in_released_issue(self.recid):
# We can serve the file
pass
else:
return page_not_authorized(req, "../", \
text = auth_message)
readonly = CFG_ACCESS_CONTROL_LEVEL_SITE == 1
# From now on: either the user provided a specific file
# name (and a possible version), or we return a list of
# all the available files. In no case are the docids
# visible.
try:
bibarchive = BibRecDocs(self.recid)
except InvenioBibDocFileError:
register_exception(req=req, alert_admin=True)
msg = "<p>%s</p><p>%s</p>" % (
_("The system has encountered an error in retrieving the list of files for this document."),
_("The error has been logged and will be taken in consideration as soon as possible."))
return warning_page(msg, req, ln)
if bibarchive.deleted_p():
req.status = apache.HTTP_GONE
return warning_page(_("Requested record does not seem to exist."), req, ln)
docname = ''
docformat = ''
version = ''
warn = ''
if filename:
# We know the complete file name, guess which docid it
# refers to
## TODO: Change the extension system according to ext.py from setlink
## and have a uniform extension mechanism...
docname = file_strip_ext(filename)
docformat = filename[len(docname):]
if docformat and docformat[0] != '.':
docformat = '.' + docformat
if args['subformat']:
docformat += ';%s' % args['subformat']
else:
docname = args['docname']
if not docformat:
docformat = args['format']
if args['subformat']:
docformat += ';%s' % args['subformat']
if not version:
version = args['version']
## Download as attachment
is_download = False
if args['download']:
is_download = True
# version could be either empty, or all or an integer
try:
int(version)
except ValueError:
if version != 'all':
version = ''
display_hidden = isUserSuperAdmin(user_info)
if version != 'all':
# search this filename in the complete list of files
for doc in bibarchive.list_bibdocs():
if docname == bibarchive.get_docname(doc.id):
try:
try:
docfile = doc.get_file(docformat, version)
except InvenioBibDocFileError, msg:
req.status = apache.HTTP_NOT_FOUND
if not CFG_INSPIRE_SITE and req.headers_in.get('referer'):
## There must be a broken link somewhere.
## Maybe it's good to alert the admin
register_exception(req=req, alert_admin=True)
warn += write_warning(_("The format %s does not exist for the given version: %s") % (cgi.escape(docformat), cgi.escape(str(msg))))
break
(auth_code, auth_message) = docfile.is_restricted(user_info)
if auth_code != 0 and not is_user_owner_of_record(user_info, self.recid):
if CFG_BIBDOCFILE_ICON_SUBFORMAT_RE.match(get_subformat_from_format(docformat)):
return stream_restricted_icon(req)
if user_info['email'] == 'guest':
cookie = mail_cookie_create_authorize_action('viewrestrdoc', {'status' : docfile.get_status()})
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'action': cookie, 'ln' : ln, 'referer' : \
CFG_SITE_SECURE_URL + user_info['uri']}, {})
redirect_to_url(req, target)
else:
req.status = apache.HTTP_UNAUTHORIZED
warn += write_warning(_("This file is restricted: ") + str(auth_message))
break
if not docfile.hidden_p():
if not readonly:
ip = str(req.remote_ip)
doc.register_download(ip, docfile.get_version(), docformat, uid)
try:
return docfile.stream(req, download=is_download)
except InvenioBibDocFileError, msg:
register_exception(req=req, alert_admin=True)
req.status = apache.HTTP_INTERNAL_SERVER_ERROR
warn += write_warning(_("An error has happened in trying to stream the request file."))
else:
req.status = apache.HTTP_UNAUTHORIZED
warn += write_warning(_("The requested file is hidden and can not be accessed."))
except InvenioBibDocFileError, msg:
register_exception(req=req, alert_admin=True)
if docname and docformat and not warn:
req.status = apache.HTTP_NOT_FOUND
warn += write_warning(_("Requested file does not seem to exist."))
# filelist = bibarchive.display("", version, ln=ln, verbose=verbose, display_hidden=display_hidden)
filelist = bibdocfile_templates.tmpl_display_bibrecdocs(bibarchive, "", version, ln=ln, verbose=verbose, display_hidden=display_hidden)
t = warn + bibdocfile_templates.tmpl_filelist(
ln=ln,
filelist=filelist)
cc = guess_primary_collection_of_a_record(self.recid)
unordered_tabs = get_detailed_page_tabs(get_colID(cc), self.recid, ln)
ordered_tabs_id = [(tab_id, values['order']) for (tab_id, values) in unordered_tabs.iteritems()]
ordered_tabs_id.sort(lambda x, y: cmp(x[1], y[1]))
link_ln = ''
if ln != CFG_SITE_LANG:
link_ln = '?ln=%s' % ln
tabs = [(unordered_tabs[tab_id]['label'], \
'%s/%s/%s/%s%s' % (CFG_SITE_URL, CFG_SITE_RECORD, self.recid, tab_id, link_ln), \
tab_id == 'files',
unordered_tabs[tab_id]['enabled']) \
for (tab_id, dummy_order) in ordered_tabs_id
if unordered_tabs[tab_id]['visible'] == True]
top = webstyle_templates.detailed_record_container_top(self.recid,
tabs,
args['ln'])
bottom = webstyle_templates.detailed_record_container_bottom(self.recid,
tabs,
args['ln'])
title, description, keywords = websearch_templates.tmpl_record_page_header_content(req, self.recid, args['ln'])
return pageheaderonly(title=title,
navtrail=create_navtrail_links(cc=cc, aas=0, ln=ln) + \
''' > <a class="navtrail" href="%s/%s/%s">%s</a>
> %s''' % \
(CFG_SITE_URL, CFG_SITE_RECORD, self.recid, title, _("Access to Fulltext")),
description=description,
keywords=keywords,
uid=uid,
language=ln,
req=req,
navmenuid='search',
navtrail_append_title_p=0) + \
websearch_templates.tmpl_search_pagestart(ln) + \
top + t + bottom + \
websearch_templates.tmpl_search_pageend(ln) + \
pagefooteronly(language=ln, req=req)
return getfile, []
def __call__(self, req, form):
"""Called in case of URLs like /CFG_SITE_RECORD/123/files without
trailing slash.
"""
args = wash_urlargd(form, bibdocfile_templates.files_default_urlargd)
ln = args['ln']
link_ln = ''
if ln != CFG_SITE_LANG:
link_ln = '?ln=%s' % ln
return redirect_to_url(req, '%s/%s/%s/files/%s' % (CFG_SITE_URL, CFG_SITE_RECORD, self.recid, link_ln))
def bibdocfile_legacy_getfile(req, form):
""" Handle legacy /getfile.py URLs """
args = wash_urlargd(form, {
'recid': (int, 0),
'docid': (int, 0),
'version': (str, ''),
'name': (str, ''),
'format': (str, ''),
'ln' : (str, CFG_SITE_LANG)
})
_ = gettext_set_language(args['ln'])
def _getfile_py(req, recid=0, docid=0, version="", name="", docformat="", ln=CFG_SITE_LANG):
if not recid:
## Let's obtain the recid from the docid
if docid:
try:
bibdoc = BibDoc(docid=docid)
recid = bibdoc.bibrec_links[0]["recid"]
except InvenioBibDocFileError:
return warning_page(_("An error has happened in trying to retrieve the requested file."), req, ln)
else:
return warning_page(_('Not enough information to retrieve the document'), req, ln)
else:
brd = BibRecDocs(recid)
if not name and docid:
## Let's obtain the name from the docid
try:
name = brd.get_docname(docid)
except InvenioBibDocFileError:
return warning_page(_("An error has happened in trying to retrieving the requested file."), req, ln)
docformat = normalize_format(docformat)
redirect_to_url(req, '%s/%s/%s/files/%s%s?ln=%s%s' % (CFG_SITE_URL, CFG_SITE_RECORD, recid, name, docformat, ln, version and 'version=%s' % version or ''), apache.HTTP_MOVED_PERMANENTLY)
return _getfile_py(req, **args)
# --------------------------------------------------
class WebInterfaceManageDocFilesPages(WebInterfaceDirectory):
_exports = ['', 'managedocfiles', 'managedocfilesasync']
def managedocfiles(self, req, form):
"""
Display admin interface to manage files of a record
"""
argd = wash_urlargd(form, {
'ln': (str, ''),
'access': (str, ''),
'recid': (int, None),
'do': (int, 0),
'cancel': (str, None),
})
_ = gettext_set_language(argd['ln'])
uid = getUid(req)
user_info = collect_user_info(req)
# Check authorization
(auth_code, auth_msg) = acc_authorize_action(req,
'runbibdocfile')
if auth_code and user_info['email'] == 'guest':
# Ask to login
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'ln' : argd['ln'],
'referer' : CFG_SITE_SECURE_URL + user_info['uri']}, {})
return redirect_to_url(req, target)
elif auth_code:
return page_not_authorized(req, referer="/%s/managedocfiles" % CFG_SITE_RECORD,
uid=uid, text=auth_msg,
ln=argd['ln'],
navmenuid="admin")
# Prepare navtrail
navtrail = '''<a class="navtrail" href="%(CFG_SITE_URL)s/help/admin">Admin Area</a> > %(manage_files)s''' \
% {'CFG_SITE_URL': CFG_SITE_URL,
'manage_files': _("Manage Document Files")}
body = ''
if argd['do'] != 0 and not argd['cancel']:
# Apply modifications
working_dir = os.path.join(CFG_TMPSHAREDDIR,
'websubmit_upload_interface_config_' + str(uid),
argd['access'])
if not os.path.isdir(working_dir):
# We accessed the url without preliminary steps
# (we did not upload a file)
# Our working dir does not exist
# Display the file manager
argd['do'] = 0
else:
move_uploaded_files_to_storage(working_dir=working_dir,
recid=argd['recid'],
icon_sizes=['180>', '700>'],
create_icon_doctypes=['*'],
force_file_revision=False)
# Clean temporary directory
shutil.rmtree(working_dir)
# Confirm modifications
body += '<p style="color:#0f0">%s</p>' % \
(_('Your modifications to record #%i have been submitted') % argd['recid'])
elif argd['cancel']:
# Clean temporary directory
working_dir = os.path.join(CFG_TMPSHAREDDIR,
'websubmit_upload_interface_config_' + str(uid),
argd['access'])
shutil.rmtree(working_dir)
body += '<p style="color:#c00">%s</p>' % \
(_('Your modifications to record #%i have been cancelled') % argd['recid'])
if not argd['recid'] or argd['do'] != 0:
body += '''
<form method="post" action="%(CFG_SITE_URL)s/%(CFG_SITE_RECORD)s/managedocfiles">
<label for="recid">%(edit_record)s:</label>
<input type="text" name="recid" id="recid" />
<input type="submit" value="%(edit)s" class="adminbutton" />
</form>
''' % {'edit': _('Edit'),
'edit_record': _('Edit record'),
'CFG_SITE_URL': CFG_SITE_URL,
'CFG_SITE_RECORD': CFG_SITE_RECORD}
access = time.strftime('%Y%m%d_%H%M%S')
if argd['recid'] and argd['do'] == 0:
# Displaying interface to manage files
# Prepare navtrail
title, dummy_description, dummy_keywords = websearch_templates.tmpl_record_page_header_content(req, argd['recid'],
argd['ln'])
navtrail = '''<a class="navtrail" href="%(CFG_SITE_URL)s/help/admin">Admin Area</a> >
<a class="navtrail" href="%(CFG_SITE_URL)s/%(CFG_SITE_RECORD)s/managedocfiles">%(manage_files)s</a> >
%(record)s: %(title)s
''' \
% {'CFG_SITE_URL': CFG_SITE_URL,
'title': title,
'manage_files': _("Document File Manager"),
'record': _("Record #%i") % argd['recid'],
'CFG_SITE_RECORD': CFG_SITE_RECORD}
body += create_file_upload_interface(\
recid=argd['recid'],
ln=argd['ln'],
uid=uid,
sbm_access=access,
display_hidden_files=True,
restrictions_and_desc=CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_RESTRICTIONS,
doctypes_and_desc=CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_DOCTYPES,
**CFG_BIBDOCFILE_DOCUMENT_FILE_MANAGER_MISC)[1]
body += '''<br />
<form method="post" action="%(CFG_SITE_URL)s/%(CFG_SITE_RECORD)s/managedocfiles">
<input type="hidden" name="recid" value="%(recid)s" />
<input type="hidden" name="do" value="1" />
<input type="hidden" name="access" value="%(access)s" />
<input type="hidden" name="ln" value="%(ln)s" />
<div style="font-size:small">
<input type="submit" name="cancel" value="%(cancel_changes)s" />
<input type="submit" onclick="user_must_confirm_before_leaving_page=false;return true;" class="adminbutton" name="submit" id="applyChanges" value="%(apply_changes)s" />
</div></form>''' % \
{'apply_changes': _("Apply changes"),
'cancel_changes': _("Cancel all changes"),
'recid': argd['recid'],
'access': access,
'ln': argd['ln'],
'CFG_SITE_URL': CFG_SITE_URL,
'CFG_SITE_RECORD': CFG_SITE_RECORD}
body += websubmit_templates.tmpl_page_do_not_leave_submission_js(argd['ln'], enabled=True)
return page(title = _("Document File Manager") + (argd['recid'] and (': ' + _("Record #%i") % argd['recid']) or ''),
navtrail=navtrail,
navtrail_append_title_p=0,
metaheaderadd = get_upload_file_interface_javascript(form_url_params='?access='+access) + \
get_upload_file_interface_css(),
body = body,
uid = uid,
language=argd['ln'],
req=req,
navmenuid='admin')
def managedocfilesasync(self, req, form):
"Upload file and returns upload interface"
argd = wash_urlargd(form, {
'ln': (str, ''),
'recid': (int, 1),
'doctype': (str, ''),
'access': (str, ''),
'indir': (str, ''),
})
user_info = collect_user_info(req)
include_headers = False
# User submitted either through WebSubmit, or admin interface.
if form.has_key('doctype') and form.has_key('indir') \
and form.has_key('access'):
# Submitted through WebSubmit. Check rights
include_headers = True
working_dir = os.path.join(CFG_WEBSUBMIT_STORAGEDIR,
argd['indir'], argd['doctype'],
argd['access'])
try:
assert(working_dir == os.path.abspath(working_dir))
except AssertionError:
raise apache.SERVER_RETURN(apache.HTTP_UNAUTHORIZED)
try:
# Retrieve recid from working_dir, safer.
recid_fd = file(os.path.join(working_dir, 'SN'))
recid = int(recid_fd.read())
recid_fd.close()
except:
recid = ""
try:
act_fd = file(os.path.join(working_dir, 'act'))
action = act_fd.read()
act_fd.close()
except:
action = ""
# Is user authorized to perform this action?
auth_code = acc_authorize_action(user_info,
"submit",
authorized_if_no_roles=not isGuestUser(getUid(req)),
doctype=argd['doctype'],
act=action)[0]
if auth_code and not acc_is_role("submit", doctype=argd['doctype'], act=action):
# There is NO authorization plugged. User should have access
auth_code = 0
else:
# User must be allowed to attach files
auth_code = acc_authorize_action(user_info, 'runbibdocfile')[0]
recid = argd['recid']
if auth_code:
raise apache.SERVER_RETURN(apache.HTTP_UNAUTHORIZED)
return create_file_upload_interface(recid=recid,
ln=argd['ln'],
print_outside_form_tag=False,
print_envelope=False,
form=form,
include_headers=include_headers,
sbm_indir=argd['indir'],
sbm_access=argd['access'],
sbm_doctype=argd['doctype'],
uid=user_info['uid'])[1]
__call__ = managedocfiles
|
kimimj/scrapy | refs/heads/master | scrapy/core/downloader/__init__.py | 85 | from __future__ import absolute_import
import random
import warnings
from time import time
from datetime import datetime
from collections import deque
import six
from twisted.internet import reactor, defer, task
from scrapy.utils.defer import mustbe_deferred
from scrapy.utils.httpobj import urlparse_cached
from scrapy.resolver import dnscache
from scrapy import signals
from .middleware import DownloaderMiddlewareManager
from .handlers import DownloadHandlers
class Slot(object):
"""Downloader slot"""
def __init__(self, concurrency, delay, randomize_delay):
self.concurrency = concurrency
self.delay = delay
self.randomize_delay = randomize_delay
self.active = set()
self.queue = deque()
self.transferring = set()
self.lastseen = 0
self.latercall = None
def free_transfer_slots(self):
return self.concurrency - len(self.transferring)
def download_delay(self):
if self.randomize_delay:
return random.uniform(0.5 * self.delay, 1.5 * self.delay)
return self.delay
def close(self):
if self.latercall and self.latercall.active():
self.latercall.cancel()
def __repr__(self):
cls_name = self.__class__.__name__
return "%s(concurrency=%r, delay=%0.2f, randomize_delay=%r)" % (
cls_name, self.concurrency, self.delay, self.randomize_delay)
def __str__(self):
return (
"<downloader.Slot concurrency=%r delay=%0.2f randomize_delay=%r "
"len(active)=%d len(queue)=%d len(transferring)=%d lastseen=%s>" % (
self.concurrency, self.delay, self.randomize_delay,
len(self.active), len(self.queue), len(self.transferring),
datetime.fromtimestamp(self.lastseen).isoformat()
)
)
def _get_concurrency_delay(concurrency, spider, settings):
delay = settings.getfloat('DOWNLOAD_DELAY')
if hasattr(spider, 'DOWNLOAD_DELAY'):
warnings.warn("%s.DOWNLOAD_DELAY attribute is deprecated, use %s.download_delay instead" %
(type(spider).__name__, type(spider).__name__))
delay = spider.DOWNLOAD_DELAY
if hasattr(spider, 'download_delay'):
delay = spider.download_delay
if hasattr(spider, 'max_concurrent_requests'):
concurrency = spider.max_concurrent_requests
return concurrency, delay
class Downloader(object):
def __init__(self, crawler):
self.settings = crawler.settings
self.signals = crawler.signals
self.slots = {}
self.active = set()
self.handlers = DownloadHandlers(crawler)
self.total_concurrency = self.settings.getint('CONCURRENT_REQUESTS')
self.domain_concurrency = self.settings.getint('CONCURRENT_REQUESTS_PER_DOMAIN')
self.ip_concurrency = self.settings.getint('CONCURRENT_REQUESTS_PER_IP')
self.randomize_delay = self.settings.getbool('RANDOMIZE_DOWNLOAD_DELAY')
self.middleware = DownloaderMiddlewareManager.from_crawler(crawler)
self._slot_gc_loop = task.LoopingCall(self._slot_gc)
self._slot_gc_loop.start(60)
def fetch(self, request, spider):
def _deactivate(response):
self.active.remove(request)
return response
self.active.add(request)
dfd = self.middleware.download(self._enqueue_request, request, spider)
return dfd.addBoth(_deactivate)
def needs_backout(self):
return len(self.active) >= self.total_concurrency
def _get_slot(self, request, spider):
key = self._get_slot_key(request, spider)
if key not in self.slots:
conc = self.ip_concurrency if self.ip_concurrency else self.domain_concurrency
conc, delay = _get_concurrency_delay(conc, spider, self.settings)
self.slots[key] = Slot(conc, delay, self.randomize_delay)
return key, self.slots[key]
def _get_slot_key(self, request, spider):
if 'download_slot' in request.meta:
return request.meta['download_slot']
key = urlparse_cached(request).hostname or ''
if self.ip_concurrency:
key = dnscache.get(key, key)
return key
def _enqueue_request(self, request, spider):
key, slot = self._get_slot(request, spider)
request.meta['download_slot'] = key
def _deactivate(response):
slot.active.remove(request)
return response
slot.active.add(request)
deferred = defer.Deferred().addBoth(_deactivate)
slot.queue.append((request, deferred))
self._process_queue(spider, slot)
return deferred
def _process_queue(self, spider, slot):
if slot.latercall and slot.latercall.active():
return
# Delay queue processing if a download_delay is configured
now = time()
delay = slot.download_delay()
if delay:
penalty = delay - now + slot.lastseen
if penalty > 0:
slot.latercall = reactor.callLater(penalty, self._process_queue, spider, slot)
return
# Process enqueued requests if there are free slots to transfer for this slot
while slot.queue and slot.free_transfer_slots() > 0:
slot.lastseen = now
request, deferred = slot.queue.popleft()
dfd = self._download(slot, request, spider)
dfd.chainDeferred(deferred)
# prevent burst if inter-request delays were configured
if delay:
self._process_queue(spider, slot)
break
def _download(self, slot, request, spider):
# The order is very important for the following deferreds. Do not change!
# 1. Create the download deferred
dfd = mustbe_deferred(self.handlers.download_request, request, spider)
# 2. Notify response_downloaded listeners about the recent download
# before querying queue for next request
def _downloaded(response):
self.signals.send_catch_log(signal=signals.response_downloaded,
response=response,
request=request,
spider=spider)
return response
dfd.addCallback(_downloaded)
# 3. After response arrives, remove the request from transferring
# state to free up the transferring slot so it can be used by the
# following requests (perhaps those which came from the downloader
# middleware itself)
slot.transferring.add(request)
def finish_transferring(_):
slot.transferring.remove(request)
self._process_queue(spider, slot)
return _
return dfd.addBoth(finish_transferring)
def close(self):
self._slot_gc_loop.stop()
for slot in six.itervalues(self.slots):
slot.close()
def _slot_gc(self, age=60):
mintime = time() - age
for key, slot in self.slots.items():
if not slot.active and slot.lastseen + slot.delay < mintime:
self.slots.pop(key).close()
|
bottompawn/kbengine | refs/heads/master | kbe/res/scripts/common/Lib/test/test_plistlib.py | 78 | # Copyright (C) 2003-2013 Python Software Foundation
import unittest
import plistlib
import os
import datetime
import codecs
import binascii
import collections
import struct
from test import support
from io import BytesIO
ALL_FORMATS=(plistlib.FMT_XML, plistlib.FMT_BINARY)
# The testdata is generated using Mac/Tools/plistlib_generate_testdata.py
# (which using PyObjC to control the Cocoa classes for generating plists)
TESTDATA={
plistlib.FMT_XML: binascii.a2b_base64(b'''
PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPCFET0NU
WVBFIHBsaXN0IFBVQkxJQyAiLS8vQXBwbGUvL0RURCBQTElTVCAxLjAvL0VO
IiAiaHR0cDovL3d3dy5hcHBsZS5jb20vRFREcy9Qcm9wZXJ0eUxpc3QtMS4w
LmR0ZCI+CjxwbGlzdCB2ZXJzaW9uPSIxLjAiPgo8ZGljdD4KCTxrZXk+YUJp
Z0ludDwva2V5PgoJPGludGVnZXI+OTIyMzM3MjAzNjg1NDc3NTc2NDwvaW50
ZWdlcj4KCTxrZXk+YUJpZ0ludDI8L2tleT4KCTxpbnRlZ2VyPjkyMjMzNzIw
MzY4NTQ3NzU4NTI8L2ludGVnZXI+Cgk8a2V5PmFEYXRlPC9rZXk+Cgk8ZGF0
ZT4yMDA0LTEwLTI2VDEwOjMzOjMzWjwvZGF0ZT4KCTxrZXk+YURpY3Q8L2tl
eT4KCTxkaWN0PgoJCTxrZXk+YUZhbHNlVmFsdWU8L2tleT4KCQk8ZmFsc2Uv
PgoJCTxrZXk+YVRydWVWYWx1ZTwva2V5PgoJCTx0cnVlLz4KCQk8a2V5PmFV
bmljb2RlVmFsdWU8L2tleT4KCQk8c3RyaW5nPk3DpHNzaWcsIE1hw588L3N0
cmluZz4KCQk8a2V5PmFub3RoZXJTdHJpbmc8L2tleT4KCQk8c3RyaW5nPiZs
dDtoZWxsbyAmYW1wOyAnaGknIHRoZXJlISZndDs8L3N0cmluZz4KCQk8a2V5
PmRlZXBlckRpY3Q8L2tleT4KCQk8ZGljdD4KCQkJPGtleT5hPC9rZXk+CgkJ
CTxpbnRlZ2VyPjE3PC9pbnRlZ2VyPgoJCQk8a2V5PmI8L2tleT4KCQkJPHJl
YWw+MzIuNTwvcmVhbD4KCQkJPGtleT5jPC9rZXk+CgkJCTxhcnJheT4KCQkJ
CTxpbnRlZ2VyPjE8L2ludGVnZXI+CgkJCQk8aW50ZWdlcj4yPC9pbnRlZ2Vy
PgoJCQkJPHN0cmluZz50ZXh0PC9zdHJpbmc+CgkJCTwvYXJyYXk+CgkJPC9k
aWN0PgoJPC9kaWN0PgoJPGtleT5hRmxvYXQ8L2tleT4KCTxyZWFsPjAuNTwv
cmVhbD4KCTxrZXk+YUxpc3Q8L2tleT4KCTxhcnJheT4KCQk8c3RyaW5nPkE8
L3N0cmluZz4KCQk8c3RyaW5nPkI8L3N0cmluZz4KCQk8aW50ZWdlcj4xMjwv
aW50ZWdlcj4KCQk8cmVhbD4zMi41PC9yZWFsPgoJCTxhcnJheT4KCQkJPGlu
dGVnZXI+MTwvaW50ZWdlcj4KCQkJPGludGVnZXI+MjwvaW50ZWdlcj4KCQkJ
PGludGVnZXI+MzwvaW50ZWdlcj4KCQk8L2FycmF5PgoJPC9hcnJheT4KCTxr
ZXk+YU5lZ2F0aXZlQmlnSW50PC9rZXk+Cgk8aW50ZWdlcj4tODAwMDAwMDAw
MDA8L2ludGVnZXI+Cgk8a2V5PmFOZWdhdGl2ZUludDwva2V5PgoJPGludGVn
ZXI+LTU8L2ludGVnZXI+Cgk8a2V5PmFTdHJpbmc8L2tleT4KCTxzdHJpbmc+
RG9vZGFoPC9zdHJpbmc+Cgk8a2V5PmFuRW1wdHlEaWN0PC9rZXk+Cgk8ZGlj
dC8+Cgk8a2V5PmFuRW1wdHlMaXN0PC9rZXk+Cgk8YXJyYXkvPgoJPGtleT5h
bkludDwva2V5PgoJPGludGVnZXI+NzI4PC9pbnRlZ2VyPgoJPGtleT5uZXN0
ZWREYXRhPC9rZXk+Cgk8YXJyYXk+CgkJPGRhdGE+CgkJUEd4dmRITWdiMlln
WW1sdVlYSjVJR2QxYm1zK0FBRUNBenhzYjNSeklHOW1JR0pwYm1GeWVTQm5k
VzVyCgkJUGdBQkFnTThiRzkwY3lCdlppQmlhVzVoY25rZ1ozVnVhejRBQVFJ
RFBHeHZkSE1nYjJZZ1ltbHVZWEo1CgkJSUdkMWJtcytBQUVDQXp4c2IzUnpJ
RzltSUdKcGJtRnllU0JuZFc1clBnQUJBZ004Ykc5MGN5QnZaaUJpCgkJYVc1
aGNua2daM1Z1YXo0QUFRSURQR3h2ZEhNZ2IyWWdZbWx1WVhKNUlHZDFibXMr
QUFFQ0F6eHNiM1J6CgkJSUc5bUlHSnBibUZ5ZVNCbmRXNXJQZ0FCQWdNOGJH
OTBjeUJ2WmlCaWFXNWhjbmtnWjNWdWF6NEFBUUlECgkJUEd4dmRITWdiMlln
WW1sdVlYSjVJR2QxYm1zK0FBRUNBdz09CgkJPC9kYXRhPgoJPC9hcnJheT4K
CTxrZXk+c29tZURhdGE8L2tleT4KCTxkYXRhPgoJUEdKcGJtRnllU0JuZFc1
clBnPT0KCTwvZGF0YT4KCTxrZXk+c29tZU1vcmVEYXRhPC9rZXk+Cgk8ZGF0
YT4KCVBHeHZkSE1nYjJZZ1ltbHVZWEo1SUdkMWJtcytBQUVDQXp4c2IzUnpJ
RzltSUdKcGJtRnllU0JuZFc1clBnQUJBZ004CgliRzkwY3lCdlppQmlhVzVo
Y25rZ1ozVnVhejRBQVFJRFBHeHZkSE1nYjJZZ1ltbHVZWEo1SUdkMWJtcytB
QUVDQXp4cwoJYjNSeklHOW1JR0pwYm1GeWVTQm5kVzVyUGdBQkFnTThiRzkw
Y3lCdlppQmlhVzVoY25rZ1ozVnVhejRBQVFJRFBHeHYKCWRITWdiMllnWW1s
dVlYSjVJR2QxYm1zK0FBRUNBenhzYjNSeklHOW1JR0pwYm1GeWVTQm5kVzVy
UGdBQkFnTThiRzkwCgljeUJ2WmlCaWFXNWhjbmtnWjNWdWF6NEFBUUlEUEd4
dmRITWdiMllnWW1sdVlYSjVJR2QxYm1zK0FBRUNBdz09Cgk8L2RhdGE+Cgk8
a2V5PsOFYmVucmFhPC9rZXk+Cgk8c3RyaW5nPlRoYXQgd2FzIGEgdW5pY29k
ZSBrZXkuPC9zdHJpbmc+CjwvZGljdD4KPC9wbGlzdD4K'''),
plistlib.FMT_BINARY: binascii.a2b_base64(b'''
YnBsaXN0MDDfEBABAgMEBQYHCAkKCwwNDg8QERITFCgpLzAxMjM0NTc2OFdh
QmlnSW50WGFCaWdJbnQyVWFEYXRlVWFEaWN0VmFGbG9hdFVhTGlzdF8QD2FO
ZWdhdGl2ZUJpZ0ludFxhTmVnYXRpdmVJbnRXYVN0cmluZ1thbkVtcHR5RGlj
dFthbkVtcHR5TGlzdFVhbkludFpuZXN0ZWREYXRhWHNvbWVEYXRhXHNvbWVN
b3JlRGF0YWcAxQBiAGUAbgByAGEAYRN/////////1BQAAAAAAAAAAIAAAAAA
AAAsM0GcuX30AAAA1RUWFxgZGhscHR5bYUZhbHNlVmFsdWVaYVRydWVWYWx1
ZV1hVW5pY29kZVZhbHVlXWFub3RoZXJTdHJpbmdaZGVlcGVyRGljdAgJawBN
AOQAcwBzAGkAZwAsACAATQBhAN9fEBU8aGVsbG8gJiAnaGknIHRoZXJlIT7T
HyAhIiMkUWFRYlFjEBEjQEBAAAAAAACjJSYnEAEQAlR0ZXh0Iz/gAAAAAAAA
pSorLCMtUUFRQhAMoyUmLhADE////+1foOAAE//////////7VkRvb2RhaNCg
EQLYoTZPEPo8bG90cyBvZiBiaW5hcnkgZ3Vuaz4AAQIDPGxvdHMgb2YgYmlu
YXJ5IGd1bms+AAECAzxsb3RzIG9mIGJpbmFyeSBndW5rPgABAgM8bG90cyBv
ZiBiaW5hcnkgZ3Vuaz4AAQIDPGxvdHMgb2YgYmluYXJ5IGd1bms+AAECAzxs
b3RzIG9mIGJpbmFyeSBndW5rPgABAgM8bG90cyBvZiBiaW5hcnkgZ3Vuaz4A
AQIDPGxvdHMgb2YgYmluYXJ5IGd1bms+AAECAzxsb3RzIG9mIGJpbmFyeSBn
dW5rPgABAgM8bG90cyBvZiBiaW5hcnkgZ3Vuaz4AAQIDTTxiaW5hcnkgZ3Vu
az5fEBdUaGF0IHdhcyBhIHVuaWNvZGUga2V5LgAIACsAMwA8AEIASABPAFUA
ZwB0AHwAiACUAJoApQCuALsAygDTAOQA7QD4AQQBDwEdASsBNgE3ATgBTwFn
AW4BcAFyAXQBdgF/AYMBhQGHAYwBlQGbAZ0BnwGhAaUBpwGwAbkBwAHBAcIB
xQHHAsQC0gAAAAAAAAIBAAAAAAAAADkAAAAAAAAAAAAAAAAAAALs'''),
}
class TestPlistlib(unittest.TestCase):
def tearDown(self):
try:
os.unlink(support.TESTFN)
except:
pass
def _create(self, fmt=None):
pl = dict(
aString="Doodah",
aList=["A", "B", 12, 32.5, [1, 2, 3]],
aFloat = 0.5,
anInt = 728,
aBigInt = 2 ** 63 - 44,
aBigInt2 = 2 ** 63 + 44,
aNegativeInt = -5,
aNegativeBigInt = -80000000000,
aDict=dict(
anotherString="<hello & 'hi' there!>",
aUnicodeValue='M\xe4ssig, Ma\xdf',
aTrueValue=True,
aFalseValue=False,
deeperDict=dict(a=17, b=32.5, c=[1, 2, "text"]),
),
someData = b"<binary gunk>",
someMoreData = b"<lots of binary gunk>\0\1\2\3" * 10,
nestedData = [b"<lots of binary gunk>\0\1\2\3" * 10],
aDate = datetime.datetime(2004, 10, 26, 10, 33, 33),
anEmptyDict = dict(),
anEmptyList = list()
)
pl['\xc5benraa'] = "That was a unicode key."
return pl
def test_create(self):
pl = self._create()
self.assertEqual(pl["aString"], "Doodah")
self.assertEqual(pl["aDict"]["aFalseValue"], False)
def test_io(self):
pl = self._create()
with open(support.TESTFN, 'wb') as fp:
plistlib.dump(pl, fp)
with open(support.TESTFN, 'rb') as fp:
pl2 = plistlib.load(fp)
self.assertEqual(dict(pl), dict(pl2))
self.assertRaises(AttributeError, plistlib.dump, pl, 'filename')
self.assertRaises(AttributeError, plistlib.load, 'filename')
def test_invalid_type(self):
pl = [ object() ]
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
self.assertRaises(TypeError, plistlib.dumps, pl, fmt=fmt)
def test_int(self):
for pl in [0, 2**8-1, 2**8, 2**16-1, 2**16, 2**32-1, 2**32,
2**63-1, 2**64-1, 1, -2**63]:
for fmt in ALL_FORMATS:
with self.subTest(pl=pl, fmt=fmt):
data = plistlib.dumps(pl, fmt=fmt)
pl2 = plistlib.loads(data)
self.assertIsInstance(pl2, int)
self.assertEqual(pl, pl2)
data2 = plistlib.dumps(pl2, fmt=fmt)
self.assertEqual(data, data2)
for fmt in ALL_FORMATS:
for pl in (2 ** 64 + 1, 2 ** 127-1, -2**64, -2 ** 127):
with self.subTest(pl=pl, fmt=fmt):
self.assertRaises(OverflowError, plistlib.dumps,
pl, fmt=fmt)
def test_bytes(self):
pl = self._create()
data = plistlib.dumps(pl)
pl2 = plistlib.loads(data)
self.assertNotIsInstance(pl, plistlib._InternalDict)
self.assertEqual(dict(pl), dict(pl2))
data2 = plistlib.dumps(pl2)
self.assertEqual(data, data2)
def test_indentation_array(self):
data = [[[[[[[[{'test': b'aaaaaa'}]]]]]]]]
self.assertEqual(plistlib.loads(plistlib.dumps(data)), data)
def test_indentation_dict(self):
data = {'1': {'2': {'3': {'4': {'5': {'6': {'7': {'8': {'9': b'aaaaaa'}}}}}}}}}
self.assertEqual(plistlib.loads(plistlib.dumps(data)), data)
def test_indentation_dict_mix(self):
data = {'1': {'2': [{'3': [[[[[{'test': b'aaaaaa'}]]]]]}]}}
self.assertEqual(plistlib.loads(plistlib.dumps(data)), data)
def test_appleformatting(self):
for use_builtin_types in (True, False):
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt, use_builtin_types=use_builtin_types):
pl = plistlib.loads(TESTDATA[fmt],
use_builtin_types=use_builtin_types)
data = plistlib.dumps(pl, fmt=fmt)
self.assertEqual(data, TESTDATA[fmt],
"generated data was not identical to Apple's output")
def test_appleformattingfromliteral(self):
self.maxDiff = None
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
pl = self._create(fmt=fmt)
pl2 = plistlib.loads(TESTDATA[fmt], fmt=fmt)
self.assertEqual(dict(pl), dict(pl2),
"generated data was not identical to Apple's output")
pl2 = plistlib.loads(TESTDATA[fmt])
self.assertEqual(dict(pl), dict(pl2),
"generated data was not identical to Apple's output")
def test_bytesio(self):
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
b = BytesIO()
pl = self._create(fmt=fmt)
plistlib.dump(pl, b, fmt=fmt)
pl2 = plistlib.load(BytesIO(b.getvalue()), fmt=fmt)
self.assertEqual(dict(pl), dict(pl2))
pl2 = plistlib.load(BytesIO(b.getvalue()))
self.assertEqual(dict(pl), dict(pl2))
def test_keysort_bytesio(self):
pl = collections.OrderedDict()
pl['b'] = 1
pl['a'] = 2
pl['c'] = 3
for fmt in ALL_FORMATS:
for sort_keys in (False, True):
with self.subTest(fmt=fmt, sort_keys=sort_keys):
b = BytesIO()
plistlib.dump(pl, b, fmt=fmt, sort_keys=sort_keys)
pl2 = plistlib.load(BytesIO(b.getvalue()),
dict_type=collections.OrderedDict)
self.assertEqual(dict(pl), dict(pl2))
if sort_keys:
self.assertEqual(list(pl2.keys()), ['a', 'b', 'c'])
else:
self.assertEqual(list(pl2.keys()), ['b', 'a', 'c'])
def test_keysort(self):
pl = collections.OrderedDict()
pl['b'] = 1
pl['a'] = 2
pl['c'] = 3
for fmt in ALL_FORMATS:
for sort_keys in (False, True):
with self.subTest(fmt=fmt, sort_keys=sort_keys):
data = plistlib.dumps(pl, fmt=fmt, sort_keys=sort_keys)
pl2 = plistlib.loads(data, dict_type=collections.OrderedDict)
self.assertEqual(dict(pl), dict(pl2))
if sort_keys:
self.assertEqual(list(pl2.keys()), ['a', 'b', 'c'])
else:
self.assertEqual(list(pl2.keys()), ['b', 'a', 'c'])
def test_keys_no_string(self):
pl = { 42: 'aNumber' }
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
self.assertRaises(TypeError, plistlib.dumps, pl, fmt=fmt)
b = BytesIO()
self.assertRaises(TypeError, plistlib.dump, pl, b, fmt=fmt)
def test_skipkeys(self):
pl = {
42: 'aNumber',
'snake': 'aWord',
}
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
data = plistlib.dumps(
pl, fmt=fmt, skipkeys=True, sort_keys=False)
pl2 = plistlib.loads(data)
self.assertEqual(pl2, {'snake': 'aWord'})
fp = BytesIO()
plistlib.dump(
pl, fp, fmt=fmt, skipkeys=True, sort_keys=False)
data = fp.getvalue()
pl2 = plistlib.loads(fp.getvalue())
self.assertEqual(pl2, {'snake': 'aWord'})
def test_tuple_members(self):
pl = {
'first': (1, 2),
'second': (1, 2),
'third': (3, 4),
}
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
data = plistlib.dumps(pl, fmt=fmt)
pl2 = plistlib.loads(data)
self.assertEqual(pl2, {
'first': [1, 2],
'second': [1, 2],
'third': [3, 4],
})
self.assertIsNot(pl2['first'], pl2['second'])
def test_list_members(self):
pl = {
'first': [1, 2],
'second': [1, 2],
'third': [3, 4],
}
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
data = plistlib.dumps(pl, fmt=fmt)
pl2 = plistlib.loads(data)
self.assertEqual(pl2, {
'first': [1, 2],
'second': [1, 2],
'third': [3, 4],
})
self.assertIsNot(pl2['first'], pl2['second'])
def test_dict_members(self):
pl = {
'first': {'a': 1},
'second': {'a': 1},
'third': {'b': 2 },
}
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
data = plistlib.dumps(pl, fmt=fmt)
pl2 = plistlib.loads(data)
self.assertEqual(pl2, {
'first': {'a': 1},
'second': {'a': 1},
'third': {'b': 2 },
})
self.assertIsNot(pl2['first'], pl2['second'])
def test_controlcharacters(self):
for i in range(128):
c = chr(i)
testString = "string containing %s" % c
if i >= 32 or c in "\r\n\t":
# \r, \n and \t are the only legal control chars in XML
plistlib.dumps(testString, fmt=plistlib.FMT_XML)
else:
self.assertRaises(ValueError,
plistlib.dumps,
testString)
def test_nondictroot(self):
for fmt in ALL_FORMATS:
with self.subTest(fmt=fmt):
test1 = "abc"
test2 = [1, 2, 3, "abc"]
result1 = plistlib.loads(plistlib.dumps(test1, fmt=fmt))
result2 = plistlib.loads(plistlib.dumps(test2, fmt=fmt))
self.assertEqual(test1, result1)
self.assertEqual(test2, result2)
def test_invalidarray(self):
for i in ["<key>key inside an array</key>",
"<key>key inside an array2</key><real>3</real>",
"<true/><key>key inside an array3</key>"]:
self.assertRaises(ValueError, plistlib.loads,
("<plist><array>%s</array></plist>"%i).encode())
def test_invaliddict(self):
for i in ["<key><true/>k</key><string>compound key</string>",
"<key>single key</key>",
"<string>missing key</string>",
"<key>k1</key><string>v1</string><real>5.3</real>"
"<key>k1</key><key>k2</key><string>double key</string>"]:
self.assertRaises(ValueError, plistlib.loads,
("<plist><dict>%s</dict></plist>"%i).encode())
self.assertRaises(ValueError, plistlib.loads,
("<plist><array><dict>%s</dict></array></plist>"%i).encode())
def test_invalidinteger(self):
self.assertRaises(ValueError, plistlib.loads,
b"<plist><integer>not integer</integer></plist>")
def test_invalidreal(self):
self.assertRaises(ValueError, plistlib.loads,
b"<plist><integer>not real</integer></plist>")
def test_xml_encodings(self):
base = TESTDATA[plistlib.FMT_XML]
for xml_encoding, encoding, bom in [
(b'utf-8', 'utf-8', codecs.BOM_UTF8),
(b'utf-16', 'utf-16-le', codecs.BOM_UTF16_LE),
(b'utf-16', 'utf-16-be', codecs.BOM_UTF16_BE),
# Expat does not support UTF-32
#(b'utf-32', 'utf-32-le', codecs.BOM_UTF32_LE),
#(b'utf-32', 'utf-32-be', codecs.BOM_UTF32_BE),
]:
pl = self._create(fmt=plistlib.FMT_XML)
with self.subTest(encoding=encoding):
data = base.replace(b'UTF-8', xml_encoding)
data = bom + data.decode('utf-8').encode(encoding)
pl2 = plistlib.loads(data)
self.assertEqual(dict(pl), dict(pl2))
def test_nonstandard_refs_size(self):
# Issue #21538: Refs and offsets are 24-bit integers
data = (b'bplist00'
b'\xd1\x00\x00\x01\x00\x00\x02QaQb'
b'\x00\x00\x08\x00\x00\x0f\x00\x00\x11'
b'\x00\x00\x00\x00\x00\x00'
b'\x03\x03'
b'\x00\x00\x00\x00\x00\x00\x00\x03'
b'\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x13')
self.assertEqual(plistlib.loads(data), {'a': 'b'})
class TestPlistlibDeprecated(unittest.TestCase):
def test_io_deprecated(self):
pl_in = {
'key': 42,
'sub': {
'key': 9,
'alt': 'value',
'data': b'buffer',
}
}
pl_out = plistlib._InternalDict({
'key': 42,
'sub': plistlib._InternalDict({
'key': 9,
'alt': 'value',
'data': plistlib.Data(b'buffer'),
})
})
self.addCleanup(support.unlink, support.TESTFN)
with self.assertWarns(DeprecationWarning):
plistlib.writePlist(pl_in, support.TESTFN)
with self.assertWarns(DeprecationWarning):
pl2 = plistlib.readPlist(support.TESTFN)
self.assertEqual(pl_out, pl2)
os.unlink(support.TESTFN)
with open(support.TESTFN, 'wb') as fp:
with self.assertWarns(DeprecationWarning):
plistlib.writePlist(pl_in, fp)
with open(support.TESTFN, 'rb') as fp:
with self.assertWarns(DeprecationWarning):
pl2 = plistlib.readPlist(fp)
self.assertEqual(pl_out, pl2)
def test_bytes_deprecated(self):
pl = {
'key': 42,
'sub': {
'key': 9,
'alt': 'value',
'data': b'buffer',
}
}
with self.assertWarns(DeprecationWarning):
data = plistlib.writePlistToBytes(pl)
with self.assertWarns(DeprecationWarning):
pl2 = plistlib.readPlistFromBytes(data)
self.assertIsInstance(pl2, plistlib._InternalDict)
self.assertEqual(pl2, plistlib._InternalDict(
key=42,
sub=plistlib._InternalDict(
key=9,
alt='value',
data=plistlib.Data(b'buffer'),
)
))
with self.assertWarns(DeprecationWarning):
data2 = plistlib.writePlistToBytes(pl2)
self.assertEqual(data, data2)
def test_dataobject_deprecated(self):
in_data = { 'key': plistlib.Data(b'hello') }
out_data = { 'key': b'hello' }
buf = plistlib.dumps(in_data)
cur = plistlib.loads(buf)
self.assertEqual(cur, out_data)
self.assertNotEqual(cur, in_data)
cur = plistlib.loads(buf, use_builtin_types=False)
self.assertNotEqual(cur, out_data)
self.assertEqual(cur, in_data)
with self.assertWarns(DeprecationWarning):
cur = plistlib.readPlistFromBytes(buf)
self.assertNotEqual(cur, out_data)
self.assertEqual(cur, in_data)
def test_main():
support.run_unittest(TestPlistlib, TestPlistlibDeprecated)
if __name__ == '__main__':
test_main()
|
cloudera/hue | refs/heads/master | apps/spark/setup.py | 31 | # Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
from hueversion import VERSION
setup(
name = "spark",
version = VERSION,
author = "Hue",
url = 'http://github.com/cloudera/hue',
description = "Web UI for submitting Spark applications",
packages = find_packages('src'),
package_dir = {'': 'src'},
install_requires = ['setuptools', 'desktop'],
entry_points = { 'desktop.sdk.application': 'spark=spark' },
)
|
johncollins/metric-learn | refs/heads/master | examples/wine_comparison.py | 1 | import numpy as np
import os
# Read the csv file wherein the class is the first column and transform to data
# Everything is numeric so we can just use a numpy array right off the bat
data = np.genfromtxt(os.path.join('data', 'wine', 'wine.csv'), skip_header=True, delimiter=',')
y, X = data[:,0], data[:,1:]
#X = (X - np.mean(X, axis=0)) / np.std(X, axis=0)
# For comparison, do classification using the standars KNN classifier
from sklearn.neighbors import KNeighborsClassifier
knn = KNeighborsClassifier(n_neighbors=3)
knn.fit(X, y)
score = knn.score(X, y)
print 'K Nearest Neighbors Score = %f' % score
# Now we'll learn a metric on the same dataset and see if we can improve
import sys
sys.path.append('..')
from metric_learn.itml.ItmlAlgorithm import ItmlAlgorithm
from metric_learn.ParameterizedKNeighborsClassifier import ParameterizedKNeighborsClassifier
itml_alg = ItmlAlgorithm(X, y, parameters={'constant_factor': 1})
itml = itml_alg.get_metric()
knn = ParameterizedKNeighborsClassifier(M=itml.get_M(), n_neighbors=3)
knn.fit(X, y)
print knn.score(X, y)
|
BeATz-UnKNoWN/python-for-android | refs/heads/master | python3-alpha/python3-src/Doc/includes/mp_newtype.py | 48 | #
# This module shows how to use arbitrary callables with a subclass of
# `BaseManager`.
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
from multiprocessing import freeze_support
from multiprocessing.managers import BaseManager, BaseProxy
import operator
##
class Foo:
def f(self):
print('you called Foo.f()')
def g(self):
print('you called Foo.g()')
def _h(self):
print('you called Foo._h()')
# A simple generator function
def baz():
for i in range(10):
yield i*i
# Proxy type for generator objects
class GeneratorProxy(BaseProxy):
_exposed_ = ('next', '__next__')
def __iter__(self):
return self
def __next__(self):
return self._callmethod('next')
def __next__(self):
return self._callmethod('__next__')
# Function to return the operator module
def get_operator_module():
return operator
##
class MyManager(BaseManager):
pass
# register the Foo class; make `f()` and `g()` accessible via proxy
MyManager.register('Foo1', Foo)
# register the Foo class; make `g()` and `_h()` accessible via proxy
MyManager.register('Foo2', Foo, exposed=('g', '_h'))
# register the generator function baz; use `GeneratorProxy` to make proxies
MyManager.register('baz', baz, proxytype=GeneratorProxy)
# register get_operator_module(); make public functions accessible via proxy
MyManager.register('operator', get_operator_module)
##
def test():
manager = MyManager()
manager.start()
print('-' * 20)
f1 = manager.Foo1()
f1.f()
f1.g()
assert not hasattr(f1, '_h')
assert sorted(f1._exposed_) == sorted(['f', 'g'])
print('-' * 20)
f2 = manager.Foo2()
f2.g()
f2._h()
assert not hasattr(f2, 'f')
assert sorted(f2._exposed_) == sorted(['g', '_h'])
print('-' * 20)
it = manager.baz()
for i in it:
print('<%d>' % i, end=' ')
print()
print('-' * 20)
op = manager.operator()
print('op.add(23, 45) =', op.add(23, 45))
print('op.pow(2, 94) =', op.pow(2, 94))
print('op.getslice(range(10), 2, 6) =', op.getslice(list(range(10)), 2, 6))
print('op.repeat(range(5), 3) =', op.repeat(list(range(5)), 3))
print('op._exposed_ =', op._exposed_)
##
if __name__ == '__main__':
freeze_support()
test()
|
entropy1337/infernal-twin | refs/heads/master | Modules/build/pip/build/lib.linux-i686-2.7/pip/_vendor/progress/__init__.py | 916 | # Copyright (c) 2012 Giorgos Verigakis <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import division
from collections import deque
from datetime import timedelta
from math import ceil
from sys import stderr
from time import time
__version__ = '1.2'
class Infinite(object):
file = stderr
sma_window = 10
def __init__(self, *args, **kwargs):
self.index = 0
self.start_ts = time()
self._ts = self.start_ts
self._dt = deque(maxlen=self.sma_window)
for key, val in kwargs.items():
setattr(self, key, val)
def __getitem__(self, key):
if key.startswith('_'):
return None
return getattr(self, key, None)
@property
def avg(self):
return sum(self._dt) / len(self._dt) if self._dt else 0
@property
def elapsed(self):
return int(time() - self.start_ts)
@property
def elapsed_td(self):
return timedelta(seconds=self.elapsed)
def update(self):
pass
def start(self):
pass
def finish(self):
pass
def next(self, n=1):
if n > 0:
now = time()
dt = (now - self._ts) / n
self._dt.append(dt)
self._ts = now
self.index = self.index + n
self.update()
def iter(self, it):
for x in it:
yield x
self.next()
self.finish()
class Progress(Infinite):
def __init__(self, *args, **kwargs):
super(Progress, self).__init__(*args, **kwargs)
self.max = kwargs.get('max', 100)
@property
def eta(self):
return int(ceil(self.avg * self.remaining))
@property
def eta_td(self):
return timedelta(seconds=self.eta)
@property
def percent(self):
return self.progress * 100
@property
def progress(self):
return min(1, self.index / self.max)
@property
def remaining(self):
return max(self.max - self.index, 0)
def start(self):
self.update()
def goto(self, index):
incr = index - self.index
self.next(incr)
def iter(self, it):
try:
self.max = len(it)
except TypeError:
pass
for x in it:
yield x
self.next()
self.finish()
|
cyberden/CouchPotatoServer | refs/heads/develop | libs/rsa/_version133.py | 177 | """RSA module
pri = k[1] //Private part of keys d,p,q
Module for calculating large primes, and RSA encryption, decryption,
signing and verification. Includes generating public and private keys.
WARNING: this code implements the mathematics of RSA. It is not suitable for
real-world secure cryptography purposes. It has not been reviewed by a security
expert. It does not include padding of data. There are many ways in which the
output of this module, when used without any modification, can be sucessfully
attacked.
"""
__author__ = "Sybren Stuvel, Marloes de Boer and Ivo Tamboer"
__date__ = "2010-02-05"
__version__ = '1.3.3'
# NOTE: Python's modulo can return negative numbers. We compensate for
# this behaviour using the abs() function
from cPickle import dumps, loads
import base64
import math
import os
import random
import sys
import types
import zlib
from rsa._compat import byte
# Display a warning that this insecure version is imported.
import warnings
warnings.warn('Insecure version of the RSA module is imported as %s, be careful'
% __name__)
def gcd(p, q):
"""Returns the greatest common divisor of p and q
>>> gcd(42, 6)
6
"""
if p<q: return gcd(q, p)
if q == 0: return p
return gcd(q, abs(p%q))
def bytes2int(bytes):
"""Converts a list of bytes or a string to an integer
>>> (128*256 + 64)*256 + + 15
8405007
>>> l = [128, 64, 15]
>>> bytes2int(l)
8405007
"""
if not (type(bytes) is types.ListType or type(bytes) is types.StringType):
raise TypeError("You must pass a string or a list")
# Convert byte stream to integer
integer = 0
for byte in bytes:
integer *= 256
if type(byte) is types.StringType: byte = ord(byte)
integer += byte
return integer
def int2bytes(number):
"""Converts a number to a string of bytes
>>> bytes2int(int2bytes(123456789))
123456789
"""
if not (type(number) is types.LongType or type(number) is types.IntType):
raise TypeError("You must pass a long or an int")
string = ""
while number > 0:
string = "%s%s" % (byte(number & 0xFF), string)
number /= 256
return string
def fast_exponentiation(a, p, n):
"""Calculates r = a^p mod n
"""
result = a % n
remainders = []
while p != 1:
remainders.append(p & 1)
p = p >> 1
while remainders:
rem = remainders.pop()
result = ((a ** rem) * result ** 2) % n
return result
def read_random_int(nbits):
"""Reads a random integer of approximately nbits bits rounded up
to whole bytes"""
nbytes = ceil(nbits/8.)
randomdata = os.urandom(nbytes)
return bytes2int(randomdata)
def ceil(x):
"""ceil(x) -> int(math.ceil(x))"""
return int(math.ceil(x))
def randint(minvalue, maxvalue):
"""Returns a random integer x with minvalue <= x <= maxvalue"""
# Safety - get a lot of random data even if the range is fairly
# small
min_nbits = 32
# The range of the random numbers we need to generate
range = maxvalue - minvalue
# Which is this number of bytes
rangebytes = ceil(math.log(range, 2) / 8.)
# Convert to bits, but make sure it's always at least min_nbits*2
rangebits = max(rangebytes * 8, min_nbits * 2)
# Take a random number of bits between min_nbits and rangebits
nbits = random.randint(min_nbits, rangebits)
return (read_random_int(nbits) % range) + minvalue
def fermat_little_theorem(p):
"""Returns 1 if p may be prime, and something else if p definitely
is not prime"""
a = randint(1, p-1)
return fast_exponentiation(a, p-1, p)
def jacobi(a, b):
"""Calculates the value of the Jacobi symbol (a/b)
"""
if a % b == 0:
return 0
result = 1
while a > 1:
if a & 1:
if ((a-1)*(b-1) >> 2) & 1:
result = -result
b, a = a, b % a
else:
if ((b ** 2 - 1) >> 3) & 1:
result = -result
a = a >> 1
return result
def jacobi_witness(x, n):
"""Returns False if n is an Euler pseudo-prime with base x, and
True otherwise.
"""
j = jacobi(x, n) % n
f = fast_exponentiation(x, (n-1)/2, n)
if j == f: return False
return True
def randomized_primality_testing(n, k):
"""Calculates whether n is composite (which is always correct) or
prime (which is incorrect with error probability 2**-k)
Returns False if the number if composite, and True if it's
probably prime.
"""
q = 0.5 # Property of the jacobi_witness function
# t = int(math.ceil(k / math.log(1/q, 2)))
t = ceil(k / math.log(1/q, 2))
for i in range(t+1):
x = randint(1, n-1)
if jacobi_witness(x, n): return False
return True
def is_prime(number):
"""Returns True if the number is prime, and False otherwise.
>>> is_prime(42)
0
>>> is_prime(41)
1
"""
"""
if not fermat_little_theorem(number) == 1:
# Not prime, according to Fermat's little theorem
return False
"""
if randomized_primality_testing(number, 5):
# Prime, according to Jacobi
return True
# Not prime
return False
def getprime(nbits):
"""Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In
other words: nbits is rounded up to whole bytes.
>>> p = getprime(8)
>>> is_prime(p-1)
0
>>> is_prime(p)
1
>>> is_prime(p+1)
0
"""
nbytes = int(math.ceil(nbits/8.))
while True:
integer = read_random_int(nbits)
# Make sure it's odd
integer |= 1
# Test for primeness
if is_prime(integer): break
# Retry if not prime
return integer
def are_relatively_prime(a, b):
"""Returns True if a and b are relatively prime, and False if they
are not.
>>> are_relatively_prime(2, 3)
1
>>> are_relatively_prime(2, 4)
0
"""
d = gcd(a, b)
return (d == 1)
def find_p_q(nbits):
"""Returns a tuple of two different primes of nbits bits"""
p = getprime(nbits)
while True:
q = getprime(nbits)
if not q == p: break
return (p, q)
def extended_euclid_gcd(a, b):
"""Returns a tuple (d, i, j) such that d = gcd(a, b) = ia + jb
"""
if b == 0:
return (a, 1, 0)
q = abs(a % b)
r = long(a / b)
(d, k, l) = extended_euclid_gcd(b, q)
return (d, l, k - l*r)
# Main function: calculate encryption and decryption keys
def calculate_keys(p, q, nbits):
"""Calculates an encryption and a decryption key for p and q, and
returns them as a tuple (e, d)"""
n = p * q
phi_n = (p-1) * (q-1)
while True:
# Make sure e has enough bits so we ensure "wrapping" through
# modulo n
e = getprime(max(8, nbits/2))
if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break
(d, i, j) = extended_euclid_gcd(e, phi_n)
if not d == 1:
raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n))
if not (e * i) % phi_n == 1:
raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n))
return (e, i)
def gen_keys(nbits):
"""Generate RSA keys of nbits bits. Returns (p, q, e, d).
Note: this can take a long time, depending on the key size.
"""
while True:
(p, q) = find_p_q(nbits)
(e, d) = calculate_keys(p, q, nbits)
# For some reason, d is sometimes negative. We don't know how
# to fix it (yet), so we keep trying until everything is shiny
if d > 0: break
return (p, q, e, d)
def gen_pubpriv_keys(nbits):
"""Generates public and private keys, and returns them as (pub,
priv).
The public key consists of a dict {e: ..., , n: ....). The private
key consists of a dict {d: ...., p: ...., q: ....).
"""
(p, q, e, d) = gen_keys(nbits)
return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} )
def encrypt_int(message, ekey, n):
"""Encrypts a message using encryption key 'ekey', working modulo
n"""
if type(message) is types.IntType:
return encrypt_int(long(message), ekey, n)
if not type(message) is types.LongType:
raise TypeError("You must pass a long or an int")
if message > 0 and \
math.floor(math.log(message, 2)) > math.floor(math.log(n, 2)):
raise OverflowError("The message is too long")
return fast_exponentiation(message, ekey, n)
def decrypt_int(cyphertext, dkey, n):
"""Decrypts a cypher text using the decryption key 'dkey', working
modulo n"""
return encrypt_int(cyphertext, dkey, n)
def sign_int(message, dkey, n):
"""Signs 'message' using key 'dkey', working modulo n"""
return decrypt_int(message, dkey, n)
def verify_int(signed, ekey, n):
"""verifies 'signed' using key 'ekey', working modulo n"""
return encrypt_int(signed, ekey, n)
def picklechops(chops):
"""Pickles and base64encodes it's argument chops"""
value = zlib.compress(dumps(chops))
encoded = base64.encodestring(value)
return encoded.strip()
def unpicklechops(string):
"""base64decodes and unpickes it's argument string into chops"""
return loads(zlib.decompress(base64.decodestring(string)))
def chopstring(message, key, n, funcref):
"""Splits 'message' into chops that are at most as long as n,
converts these into integers, and calls funcref(integer, key, n)
for each chop.
Used by 'encrypt' and 'sign'.
"""
msglen = len(message)
mbits = msglen * 8
nbits = int(math.floor(math.log(n, 2)))
nbytes = nbits / 8
blocks = msglen / nbytes
if msglen % nbytes > 0:
blocks += 1
cypher = []
for bindex in range(blocks):
offset = bindex * nbytes
block = message[offset:offset+nbytes]
value = bytes2int(block)
cypher.append(funcref(value, key, n))
return picklechops(cypher)
def gluechops(chops, key, n, funcref):
"""Glues chops back together into a string. calls
funcref(integer, key, n) for each chop.
Used by 'decrypt' and 'verify'.
"""
message = ""
chops = unpicklechops(chops)
for cpart in chops:
mpart = funcref(cpart, key, n)
message += int2bytes(mpart)
return message
def encrypt(message, key):
"""Encrypts a string 'message' with the public key 'key'"""
return chopstring(message, key['e'], key['n'], encrypt_int)
def sign(message, key):
"""Signs a string 'message' with the private key 'key'"""
return chopstring(message, key['d'], key['p']*key['q'], decrypt_int)
def decrypt(cypher, key):
"""Decrypts a cypher with the private key 'key'"""
return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int)
def verify(cypher, key):
"""Verifies a cypher with the public key 'key'"""
return gluechops(cypher, key['e'], key['n'], encrypt_int)
# Do doctest if we're not imported
if __name__ == "__main__":
import doctest
doctest.testmod()
__all__ = ["gen_pubpriv_keys", "encrypt", "decrypt", "sign", "verify"]
|
vipulkanade/EventbriteDjango | refs/heads/master | lib/python2.7/encodings/iso2022_jp_2.py | 816 | #
# iso2022_jp_2.py: Python Unicode Codec for ISO2022_JP_2
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_jp_2')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_jp_2',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.