code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.bigip_config import Parameters
from library.bigip_config import ModuleManager
from library.bigip_config import ArgumentSpec
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_config import Parameters
from ansible.modules.network.f5.bigip_config import ModuleManager
from ansible.modules.network.f5.bigip_config import ArgumentSpec
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
save='yes',
reset='yes',
merge_content='asdasd',
verify='no',
server='localhost',
user='admin',
password='password'
)
p = Parameters(params=args)
assert p.save == 'yes'
assert p.reset == 'yes'
assert p.merge_content == 'asdasd'
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_run_single_command(self, *args):
set_module_args(dict(
save='yes',
reset='yes',
merge_content='asdasd',
verify='no',
server='localhost',
user='admin',
password='password'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.exit_json = Mock(return_value=True)
mm.reset_device = Mock(return_value='reset output')
mm.upload_to_device = Mock(return_value=True)
mm.move_on_device = Mock(return_value=True)
mm.merge_on_device = Mock(return_value='merge output')
mm.remove_temporary_file = Mock(return_value=True)
mm.save_on_device = Mock(return_value='save output')
results = mm.exec_module()
assert results['changed'] is True
| ravibhure/ansible | test/units/modules/network/f5/test_bigip_config.py | Python | gpl-3.0 | 3,507 |
"""This module contains the general information for BiosVfPOSTErrorPause ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class BiosVfPOSTErrorPauseConsts:
VP_POSTERROR_PAUSE_DISABLED = "Disabled"
VP_POSTERROR_PAUSE_ENABLED = "Enabled"
_VP_POSTERROR_PAUSE_DISABLED = "disabled"
_VP_POSTERROR_PAUSE_ENABLED = "enabled"
VP_POSTERROR_PAUSE_PLATFORM_DEFAULT = "platform-default"
class BiosVfPOSTErrorPause(ManagedObject):
"""This is BiosVfPOSTErrorPause class."""
consts = BiosVfPOSTErrorPauseConsts()
naming_props = set([])
mo_meta = {
"classic": MoMeta("BiosVfPOSTErrorPause", "biosVfPOSTErrorPause", "POST-error-pause", VersionMeta.Version151f, "InputOutput", 0x1f, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"modular": MoMeta("BiosVfPOSTErrorPause", "biosVfPOSTErrorPause", "POST-error-pause", VersionMeta.Version2013e, "InputOutput", 0x1f, [], ["admin", "read-only", "user"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"])
}
prop_meta = {
"classic": {
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x2, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"vp_post_error_pause": MoPropertyMeta("vp_post_error_pause", "vpPOSTErrorPause", "string", VersionMeta.Version151f, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version151f, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
},
"modular": {
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x2, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"vp_post_error_pause": MoPropertyMeta("vp_post_error_pause", "vpPOSTErrorPause", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version2013e, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
},
}
prop_map = {
"classic": {
"dn": "dn",
"rn": "rn",
"status": "status",
"vpPOSTErrorPause": "vp_post_error_pause",
"childAction": "child_action",
},
"modular": {
"dn": "dn",
"rn": "rn",
"status": "status",
"vpPOSTErrorPause": "vp_post_error_pause",
"childAction": "child_action",
},
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.status = None
self.vp_post_error_pause = None
self.child_action = None
ManagedObject.__init__(self, "BiosVfPOSTErrorPause", parent_mo_or_dn, **kwargs)
| ragupta-git/ImcSdk | imcsdk/mometa/bios/BiosVfPOSTErrorPause.py | Python | apache-2.0 | 3,763 |
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class ZapWalletTXesTest (BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
self.is_network_split=False
self.sync_all()
def run_test (self):
print("Mining blocks...")
self.nodes[0].generate(1)
self.sync_all()
self.nodes[1].generate(101)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 10000)
txid0 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
txid1 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
txid2 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 11)
txid3 = self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
tx0 = self.nodes[0].gettransaction(txid0)
assert_equal(tx0['txid'], txid0) #tx0 must be available (confirmed)
tx1 = self.nodes[0].gettransaction(txid1)
assert_equal(tx1['txid'], txid1) #tx1 must be available (confirmed)
tx2 = self.nodes[0].gettransaction(txid2)
assert_equal(tx2['txid'], txid2) #tx2 must be available (unconfirmed)
tx3 = self.nodes[0].gettransaction(txid3)
assert_equal(tx3['txid'], txid3) #tx3 must be available (unconfirmed)
#restart bitcoind
self.nodes[0].stop()
bitcoind_processes[0].wait()
self.nodes[0] = start_node(0,self.options.tmpdir)
tx3 = self.nodes[0].gettransaction(txid3)
assert_equal(tx3['txid'], txid3) #tx must be available (unconfirmed)
self.nodes[0].stop()
bitcoind_processes[0].wait()
#restart bitcoind with zapwallettxes
self.nodes[0] = start_node(0,self.options.tmpdir, ["-zapwallettxes=1"])
assert_raises(JSONRPCException, self.nodes[0].gettransaction, [txid3])
#there must be a expection because the unconfirmed wallettx0 must be gone by now
tx0 = self.nodes[0].gettransaction(txid0)
assert_equal(tx0['txid'], txid0) #tx0 (confirmed) must still be available because it was confirmed
if __name__ == '__main__':
ZapWalletTXesTest ().main ()
| goldcoin/goldcoin | qa/rpc-tests/zapwallettxes.py | Python | mit | 2,879 |
"""Calendar backend definition."""
from importlib import import_module
from modoboa.lib.cryptutils import decrypt
class CalendarBackend(object):
"""Base backend class."""
def __init__(self, calendar=None):
"""Default constructor."""
self.calendar = calendar
def create_event(self, event):
"""Create a new event."""
raise NotImplementedError
def get_event(self, uid):
"""Retrieve an even using its uid."""
raise NotImplementedError
def get_events(self, start, end):
"""Retrieve a list of event."""
raise NotImplementedError
def delete_event(self, uid):
"""Delete an event using its uid."""
raise NotImplementedError
def get_backend(name, *args, **kwargs):
"""Return a backend instance."""
module = import_module("modoboa_radicale.backends.{}".format(name))
return getattr(
module, "{}Backend".format(name.capitalize()))(*args, **kwargs)
def get_backend_from_request(name, request, calendar=None):
"""Return a backend instance from a request."""
password = decrypt(request.session["password"])
return get_backend(
name, request.user.email, password, calendar=calendar)
| modoboa/modoboa-radicale | modoboa_radicale/backends/__init__.py | Python | mit | 1,224 |
>>> s = 'How might I make alternating caps in python?'
>>> ''.join([l.upper() if i % 2 != 0 else l for i, l in enumerate(s)])
'HOw mIgHt I mAkE AlTeRnAtInG CaPs iN PyThOn?'
| bandarji/lekhan | python/reddit/altcaps.py | Python | apache-2.0 | 173 |
# -*- coding: utf-8 -*-
from south.v2 import DataMigration
class Migration(DataMigration):
depends_on = (
('avocado', '0036_initialize_indexable'),
)
def forwards(self, orm):
"Perform a 'safe' load using Avocado's backup utilities."
from avocado.core import backup
backup.safe_load(u'0002_avocado_metadata', backup_path=None,
using='default')
def backwards(self, orm):
"No backwards migration applicable."
pass
| chop-dbhi/varify | varify/migrations/0002_avocado_metadata_migration.py | Python | bsd-2-clause | 490 |
import logging
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.mail import EmailMultiAlternatives
from django.core.urlresolvers import reverse
from django.template.loader import render_to_string
from django.utils import timezone
from djblets.siteconfig.models import SiteConfiguration
from reviewboard.accounts.signals import user_registered
from reviewboard.reviews.models import ReviewRequest, Review
from reviewboard.reviews.signals import review_request_published, \
review_published, reply_published
from reviewboard.reviews.views import build_diff_comment_fragments
def review_request_published_cb(sender, user, review_request, changedesc,
**kwargs):
"""
Listens to the ``review_request_published`` signal and sends an
email if this type of notification is enabled (through
``mail_send_review_mail`` site configuration).
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get("mail_send_review_mail"):
mail_review_request(user, review_request, changedesc)
def review_published_cb(sender, user, review, **kwargs):
"""
Listens to the ``review_published`` signal and sends an email if
this type of notification is enabled (through
``mail_send_review_mail`` site configuration).
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get("mail_send_review_mail"):
mail_review(user, review)
def reply_published_cb(sender, user, reply, **kwargs):
"""
Listens to the ``reply_published`` signal and sends an email if
this type of notification is enabled (through
``mail_send_review_mail`` site configuration).
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get("mail_send_review_mail"):
mail_reply(user, reply)
def user_registered_cb(user, **kwargs):
"""
Listens for new user registrations and sends a new user registration
e-mail to administrators, if enabled.
"""
siteconfig = SiteConfiguration.objects.get_current()
if siteconfig.get("mail_send_new_user_mail"):
mail_new_user(user)
def connect_signals():
review_request_published.connect(review_request_published_cb,
sender=ReviewRequest)
review_published.connect(review_published_cb, sender=Review)
reply_published.connect(reply_published_cb, sender=Review)
user_registered.connect(user_registered_cb)
def build_email_address(fullname, email):
if not fullname:
return email
else:
return u'"%s" <%s>' % (fullname, email)
def get_email_address_for_user(u):
return build_email_address(u.get_full_name(), u.email)
def get_email_addresses_for_group(g):
if g.mailing_list:
if g.mailing_list.find(",") == -1:
# The mailing list field has only one e-mail address in it,
# so we can just use that and the group's display name.
return [u'"%s" <%s>' % (g.display_name, g.mailing_list)]
else:
# The mailing list field has multiple e-mail addresses in it.
# We don't know which one should have the group's display name
# attached to it, so just return their custom list as-is.
return g.mailing_list.split(',')
else:
return [get_email_address_for_user(u)
for u in g.users.filter(is_active=True)]
class SpiffyEmailMessage(EmailMultiAlternatives):
"""An EmailMessage subclass with improved header and message ID support.
This also knows about several headers (standard and variations),
including Sender/X-Sender, In-Reply-To/References, and Reply-To.
The generated Message-ID header from the e-mail can be accessed
through the :py:attr:`message_id` attribute after the e-mail is sent.
"""
def __init__(self, subject, text_body, html_body, from_email, sender,
to, cc, in_reply_to, headers={}):
headers = headers.copy()
if sender:
headers['Sender'] = sender
headers['X-Sender'] = sender
if in_reply_to:
headers['In-Reply-To'] = in_reply_to
headers['References'] = in_reply_to
headers['Reply-To'] = from_email
# Mark the mail as 'auto-generated' (according to RFC 3834) to
# hopefully avoid auto replies.
headers['Auto-Submitted'] = 'auto-generated'
headers['From'] = from_email
super(SpiffyEmailMessage, self).__init__(subject, text_body,
settings.DEFAULT_FROM_EMAIL,
to, headers=headers)
self.cc = cc or []
self.message_id = None
self.attach_alternative(html_body, "text/html")
def message(self):
msg = super(SpiffyEmailMessage, self).message()
self.message_id = msg['Message-ID']
return msg
def recipients(self):
"""Returns a list of all recipients of the e-mail. """
return self.to + self.bcc + self.cc
def send_review_mail(user, review_request, subject, in_reply_to,
extra_recipients, text_template_name,
html_template_name, context={}):
"""
Formats and sends an e-mail out with the current domain and review request
being added to the template context. Returns the resulting message ID.
"""
current_site = Site.objects.get_current()
from_email = get_email_address_for_user(user)
recipients = set()
to_field = set()
if from_email:
recipients.add(from_email)
if review_request.submitter.is_active:
recipients.add(get_email_address_for_user(review_request.submitter))
for u in review_request.target_people.filter(is_active=True):
recipients.add(get_email_address_for_user(u))
to_field.add(get_email_address_for_user(u))
for group in review_request.target_groups.all():
for address in get_email_addresses_for_group(group):
recipients.add(address)
for profile in review_request.starred_by.all():
if profile.user.is_active:
recipients.add(get_email_address_for_user(profile.user))
if extra_recipients:
for recipient in extra_recipients:
if recipient.is_active:
recipients.add(get_email_address_for_user(recipient))
siteconfig = current_site.config.get()
domain_method = siteconfig.get("site_domain_method")
context['user'] = user
context['domain'] = current_site.domain
context['domain_method'] = domain_method
context['review_request'] = review_request
if review_request.local_site:
context['local_site_name'] = review_request.local_site.name
text_body = render_to_string(text_template_name, context)
html_body = render_to_string(html_template_name, context)
# Set the cc field only when the to field (i.e People) are mentioned,
# so that to field consists of Reviewers and cc consists of all the
# other members of the group
if to_field:
cc_field = recipients.symmetric_difference(to_field)
else:
to_field = recipients
cc_field = set()
base_url = '%s://%s' % (domain_method, current_site.domain)
headers = {
'X-ReviewBoard-URL': base_url,
'X-ReviewRequest-URL': base_url + review_request.get_absolute_url(),
'X-ReviewGroup': ', '.join(group.name for group in \
review_request.target_groups.all())
}
sender = None
if settings.DEFAULT_FROM_EMAIL:
sender = build_email_address(user.get_full_name(),
settings.DEFAULT_FROM_EMAIL)
if sender == from_email:
# RFC 2822 states that we should only include Sender if the
# two are not equal.
sender = None
message = SpiffyEmailMessage(subject.strip(), text_body, html_body,
from_email, sender, list(to_field),
list(cc_field), in_reply_to, headers)
try:
message.send()
except Exception, e:
logging.error("Error sending e-mail notification with subject '%s' on "
"behalf of '%s' to '%s': %s",
subject.strip(),
from_email,
','.join(list(to_field) + list(cc_field)),
e,
exc_info=1)
return message.message_id
def mail_review_request(user, review_request, changedesc=None):
"""
Send an e-mail representing the supplied review request.
The "changedesc" argument is an optional ChangeDescription showing
what changed in a review request, possibly with explanatory text from
the submitter. This is created when saving a draft on a public review
request, and will be None when publishing initially. This is used by
the template to add contextual (updated) flags to inform people what
changed.
"""
# If the review request is not yet public or has been discarded, don't send
# any mail.
if not review_request.public or review_request.status == 'D':
return
subject = u"Review Request %d: %s" % (review_request.id, review_request.summary)
reply_message_id = None
if review_request.email_message_id:
# Fancy quoted "replies"
subject = "Re: " + subject
reply_message_id = review_request.email_message_id
extra_recipients = review_request.participants
else:
extra_recipients = None
extra_context = {}
if changedesc:
extra_context['change_text'] = changedesc.text
extra_context['changes'] = changedesc.fields_changed
review_request.time_emailed = timezone.now()
review_request.email_message_id = \
send_review_mail(user, review_request, subject, reply_message_id,
extra_recipients,
'notifications/review_request_email.txt',
'notifications/review_request_email.html',
extra_context)
review_request.save()
def mail_review(user, review):
"""Sends an e-mail representing the supplied review."""
review_request = review.review_request
if not review_request.public:
return
review.ordered_comments = \
review.comments.order_by('filediff', 'first_line')
extra_context = {
'user': user,
'review': review,
}
has_error, extra_context['comment_entries'] = \
build_diff_comment_fragments(
review.ordered_comments, extra_context,
"notifications/email_diff_comment_fragment.html")
review.email_message_id = \
send_review_mail(user,
review_request,
u"Re: Review Request %d: %s" % (review_request.id, review_request.summary),
review_request.email_message_id,
None,
'notifications/review_email.txt',
'notifications/review_email.html',
extra_context)
review.time_emailed = timezone.now()
review.save()
def mail_reply(user, reply):
"""
Sends an e-mail representing the supplied reply to a review.
"""
review = reply.base_reply_to
review_request = review.review_request
if not review_request.public:
return
extra_context = {
'user': user,
'review': review,
'reply': reply,
}
has_error, extra_context['comment_entries'] = \
build_diff_comment_fragments(
reply.comments.order_by('filediff', 'first_line'),
extra_context,
"notifications/email_diff_comment_fragment.html")
reply.email_message_id = \
send_review_mail(user,
review_request,
u"Re: Review Request %d: %s" % (review_request.id, review_request.summary),
review.email_message_id,
review.participants,
'notifications/reply_email.txt',
'notifications/reply_email.html',
extra_context)
reply.time_emailed = timezone.now()
reply.save()
def mail_new_user(user):
"""Sends an e-mail to administrators for newly registered users."""
current_site = Site.objects.get_current()
siteconfig = current_site.config.get_current()
domain_method = siteconfig.get("site_domain_method")
subject = "New Review Board user registration for %s" % user.username
from_email = get_email_address_for_user(user)
context = {
'domain': current_site.domain,
'domain_method': domain_method,
'user': user,
'user_url': reverse('admin:auth_user_change', args=(user.id,))
}
text_message = render_to_string('notifications/new_user_email.txt', context)
html_message = render_to_string('notifications/new_user_email.html',
context)
message = SpiffyEmailMessage(subject.strip(), text_message, html_message,
settings.SERVER_EMAIL, settings.SERVER_EMAIL,
[build_email_address(*a)
for a in settings.ADMINS], None, None)
try:
message.send()
except Exception, e:
logging.error("Error sending e-mail notification with subject '%s' on "
"behalf of '%s' to admin: %s",
subject.strip(), from_email, e, exc_info=1)
| atagar/ReviewBoard | reviewboard/notifications/email.py | Python | mit | 13,727 |
# -*- coding: utf-8 -*-
"""
Unit tests for instructor.api methods.
"""
import datetime
import ddt
import functools
import random
import pytz
import io
import json
import shutil
import tempfile
from urllib import quote
from django.conf import settings
from django.contrib.auth.models import User
from django.core import mail
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from django.http import HttpRequest, HttpResponse
from django.test import RequestFactory, TestCase
from django.test.utils import override_settings
from django.utils.timezone import utc
from django.utils.translation import ugettext as _
from mock import Mock, patch
from nose.tools import raises
from nose.plugins.attrib import attr
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import UsageKey
from course_modes.models import CourseMode
from courseware.models import StudentModule
from courseware.tests.factories import StaffFactory, InstructorFactory, BetaTesterFactory, UserProfileFactory
from courseware.tests.helpers import LoginEnrollmentTestCase
from django_comment_common.models import FORUM_ROLE_COMMUNITY_TA
from django_comment_common.utils import seed_permissions_roles
from microsite_configuration import microsite
from shoppingcart.models import (
RegistrationCodeRedemption, Order, CouponRedemption,
PaidCourseRegistration, Coupon, Invoice, CourseRegistrationCode, CourseRegistrationCodeInvoiceItem,
InvoiceTransaction)
from shoppingcart.pdf import PDFInvoice
from student.models import (
CourseEnrollment, CourseEnrollmentAllowed, NonExistentCourseError,
ManualEnrollmentAudit, UNENROLLED_TO_ENROLLED, ENROLLED_TO_UNENROLLED,
ALLOWEDTOENROLL_TO_UNENROLLED, ENROLLED_TO_ENROLLED, UNENROLLED_TO_ALLOWEDTOENROLL,
UNENROLLED_TO_UNENROLLED, ALLOWEDTOENROLL_TO_ENROLLED
)
from student.tests.factories import UserFactory, CourseModeFactory, AdminFactory
from student.roles import CourseBetaTesterRole, CourseSalesAdminRole, CourseFinanceAdminRole, CourseInstructorRole
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.fields import Date
from courseware.models import StudentFieldOverride
import instructor_task.api
import instructor.views.api
from instructor.views.api import require_finance_admin
from instructor.tests.utils import FakeContentTask, FakeEmail, FakeEmailInfo
from instructor.views.api import _split_input_list, common_exceptions_400, generate_unique_password
from instructor_task.api_helper import AlreadyRunningError
from certificates.tests.factories import GeneratedCertificateFactory
from certificates.models import CertificateStatuses
from openedx.core.djangoapps.course_groups.cohorts import set_course_cohort_settings
from .test_tools import msk_from_problem_urlname
DATE_FIELD = Date()
EXPECTED_CSV_HEADER = (
'"code","redeem_code_url","course_id","company_name","created_by","redeemed_by","invoice_id","purchaser",'
'"customer_reference_number","internal_reference"'
)
EXPECTED_COUPON_CSV_HEADER = '"Coupon Code","Course Id","% Discount","Description","Expiration Date",' \
'"Is Active","Code Redeemed Count","Total Discounted Seats","Total Discounted Amount"'
# ddt data for test cases involving reports
REPORTS_DATA = (
{
'report_type': 'grade',
'instructor_api_endpoint': 'calculate_grades_csv',
'task_api_endpoint': 'instructor_task.api.submit_calculate_grades_csv',
'extra_instructor_api_kwargs': {}
},
{
'report_type': 'enrolled learner profile',
'instructor_api_endpoint': 'get_students_features',
'task_api_endpoint': 'instructor_task.api.submit_calculate_students_features_csv',
'extra_instructor_api_kwargs': {'csv': '/csv'}
},
{
'report_type': 'detailed enrollment',
'instructor_api_endpoint': 'get_enrollment_report',
'task_api_endpoint': 'instructor_task.api.submit_detailed_enrollment_features_csv',
'extra_instructor_api_kwargs': {}
},
{
'report_type': 'enrollment',
'instructor_api_endpoint': 'get_students_who_may_enroll',
'task_api_endpoint': 'instructor_task.api.submit_calculate_may_enroll_csv',
'extra_instructor_api_kwargs': {},
},
{
'report_type': 'proctored exam results',
'instructor_api_endpoint': 'get_proctored_exam_results',
'task_api_endpoint': 'instructor_task.api.submit_proctored_exam_results_report',
'extra_instructor_api_kwargs': {},
},
{
'report_type': 'problem responses',
'instructor_api_endpoint': 'get_problem_responses',
'task_api_endpoint': 'instructor_task.api.submit_calculate_problem_responses_csv',
'extra_instructor_api_kwargs': {},
}
)
# ddt data for test cases involving executive summary report
EXECUTIVE_SUMMARY_DATA = (
{
'report_type': 'executive summary',
'instructor_api_endpoint': 'get_exec_summary_report',
'task_api_endpoint': 'instructor_task.api.submit_executive_summary_report',
'extra_instructor_api_kwargs': {}
},
)
@common_exceptions_400
def view_success(request): # pylint: disable=unused-argument
"A dummy view for testing that returns a simple HTTP response"
return HttpResponse('success')
@common_exceptions_400
def view_user_doesnotexist(request): # pylint: disable=unused-argument
"A dummy view that raises a User.DoesNotExist exception"
raise User.DoesNotExist()
@common_exceptions_400
def view_alreadyrunningerror(request): # pylint: disable=unused-argument
"A dummy view that raises an AlreadyRunningError exception"
raise AlreadyRunningError()
@attr('shard_1')
class TestCommonExceptions400(TestCase):
"""
Testing the common_exceptions_400 decorator.
"""
def setUp(self):
super(TestCommonExceptions400, self).setUp()
self.request = Mock(spec=HttpRequest)
self.request.META = {}
def test_happy_path(self):
resp = view_success(self.request)
self.assertEqual(resp.status_code, 200)
def test_user_doesnotexist(self):
self.request.is_ajax.return_value = False
resp = view_user_doesnotexist(self.request) # pylint: disable=assignment-from-no-return
self.assertEqual(resp.status_code, 400)
self.assertIn("User does not exist", resp.content)
def test_user_doesnotexist_ajax(self):
self.request.is_ajax.return_value = True
resp = view_user_doesnotexist(self.request) # pylint: disable=assignment-from-no-return
self.assertEqual(resp.status_code, 400)
result = json.loads(resp.content)
self.assertIn("User does not exist", result["error"])
def test_alreadyrunningerror(self):
self.request.is_ajax.return_value = False
resp = view_alreadyrunningerror(self.request) # pylint: disable=assignment-from-no-return
self.assertEqual(resp.status_code, 400)
self.assertIn("Task is already running", resp.content)
def test_alreadyrunningerror_ajax(self):
self.request.is_ajax.return_value = True
resp = view_alreadyrunningerror(self.request) # pylint: disable=assignment-from-no-return
self.assertEqual(resp.status_code, 400)
result = json.loads(resp.content)
self.assertIn("Task is already running", result["error"])
@attr('shard_1')
@patch('bulk_email.models.html_to_text', Mock(return_value='Mocking CourseEmail.text_message', autospec=True))
@patch.dict(settings.FEATURES, {'ENABLE_INSTRUCTOR_EMAIL': True, 'REQUIRE_COURSE_EMAIL_AUTH': False})
class TestInstructorAPIDenyLevels(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Ensure that users cannot access endpoints they shouldn't be able to.
"""
@classmethod
def setUpClass(cls):
super(TestInstructorAPIDenyLevels, cls).setUpClass()
cls.course = CourseFactory.create()
cls.problem_location = msk_from_problem_urlname(
cls.course.id,
'robot-some-problem-urlname'
)
cls.problem_urlname = cls.problem_location.to_deprecated_string()
def setUp(self):
super(TestInstructorAPIDenyLevels, self).setUp()
self.user = UserFactory.create()
CourseEnrollment.enroll(self.user, self.course.id)
_module = StudentModule.objects.create(
student=self.user,
course_id=self.course.id,
module_state_key=self.problem_location,
state=json.dumps({'attempts': 10}),
)
# Endpoints that only Staff or Instructors can access
self.staff_level_endpoints = [
('students_update_enrollment',
{'identifiers': '[email protected]', 'action': 'enroll'}),
('get_grading_config', {}),
('get_students_features', {}),
('get_student_progress_url', {'unique_student_identifier': self.user.username}),
('reset_student_attempts',
{'problem_to_reset': self.problem_urlname, 'unique_student_identifier': self.user.email}),
('update_forum_role_membership',
{'unique_student_identifier': self.user.email, 'rolename': 'Moderator', 'action': 'allow'}),
('list_forum_members', {'rolename': FORUM_ROLE_COMMUNITY_TA}),
('send_email', {'send_to': 'staff', 'subject': 'test', 'message': 'asdf'}),
('list_instructor_tasks', {}),
('list_background_email_tasks', {}),
('list_report_downloads', {}),
('list_financial_report_downloads', {}),
('calculate_grades_csv', {}),
('get_students_features', {}),
('get_enrollment_report', {}),
('get_students_who_may_enroll', {}),
('get_exec_summary_report', {}),
('get_proctored_exam_results', {}),
('get_problem_responses', {}),
]
# Endpoints that only Instructors can access
self.instructor_level_endpoints = [
('bulk_beta_modify_access', {'identifiers': '[email protected]', 'action': 'add'}),
('modify_access', {'unique_student_identifier': self.user.email, 'rolename': 'beta', 'action': 'allow'}),
('list_course_role_members', {'rolename': 'beta'}),
('rescore_problem',
{'problem_to_reset': self.problem_urlname, 'unique_student_identifier': self.user.email}),
]
def _access_endpoint(self, endpoint, args, status_code, msg):
"""
Asserts that accessing the given `endpoint` gets a response of `status_code`.
endpoint: string, endpoint for instructor dash API
args: dict, kwargs for `reverse` call
status_code: expected HTTP status code response
msg: message to display if assertion fails.
"""
url = reverse(endpoint, kwargs={'course_id': self.course.id.to_deprecated_string()})
if endpoint in ['send_email', 'students_update_enrollment', 'bulk_beta_modify_access']:
response = self.client.post(url, args)
else:
response = self.client.get(url, args)
self.assertEqual(
response.status_code,
status_code,
msg=msg
)
def test_student_level(self):
"""
Ensure that an enrolled student can't access staff or instructor endpoints.
"""
self.client.login(username=self.user.username, password='test')
for endpoint, args in self.staff_level_endpoints:
self._access_endpoint(
endpoint,
args,
403,
"Student should not be allowed to access endpoint " + endpoint
)
for endpoint, args in self.instructor_level_endpoints:
self._access_endpoint(
endpoint,
args,
403,
"Student should not be allowed to access endpoint " + endpoint
)
def _access_problem_responses_endpoint(self, msg):
"""
Access endpoint for problem responses report, ensuring that
UsageKey.from_string returns a problem key that the endpoint
can work with.
msg: message to display if assertion fails.
"""
mock_problem_key = Mock(return_value=u'')
mock_problem_key.course_key = self.course.id
with patch.object(UsageKey, 'from_string') as patched_method:
patched_method.return_value = mock_problem_key
self._access_endpoint('get_problem_responses', {}, 200, msg)
def test_staff_level(self):
"""
Ensure that a staff member can't access instructor endpoints.
"""
staff_member = StaffFactory(course_key=self.course.id)
CourseEnrollment.enroll(staff_member, self.course.id)
CourseFinanceAdminRole(self.course.id).add_users(staff_member)
self.client.login(username=staff_member.username, password='test')
# Try to promote to forums admin - not working
# update_forum_role(self.course.id, staff_member, FORUM_ROLE_ADMINISTRATOR, 'allow')
for endpoint, args in self.staff_level_endpoints:
# TODO: make these work
if endpoint in ['update_forum_role_membership', 'list_forum_members']:
continue
elif endpoint == 'get_problem_responses':
self._access_problem_responses_endpoint(
"Staff member should be allowed to access endpoint " + endpoint
)
continue
self._access_endpoint(
endpoint,
args,
200,
"Staff member should be allowed to access endpoint " + endpoint
)
for endpoint, args in self.instructor_level_endpoints:
self._access_endpoint(
endpoint,
args,
403,
"Staff member should not be allowed to access endpoint " + endpoint
)
def test_instructor_level(self):
"""
Ensure that an instructor member can access all endpoints.
"""
inst = InstructorFactory(course_key=self.course.id)
CourseEnrollment.enroll(inst, self.course.id)
CourseFinanceAdminRole(self.course.id).add_users(inst)
self.client.login(username=inst.username, password='test')
for endpoint, args in self.staff_level_endpoints:
# TODO: make these work
if endpoint in ['update_forum_role_membership']:
continue
elif endpoint == 'get_problem_responses':
self._access_problem_responses_endpoint(
"Instructor should be allowed to access endpoint " + endpoint
)
continue
self._access_endpoint(
endpoint,
args,
200,
"Instructor should be allowed to access endpoint " + endpoint
)
for endpoint, args in self.instructor_level_endpoints:
# TODO: make this work
if endpoint in ['rescore_problem']:
continue
self._access_endpoint(
endpoint,
args,
200,
"Instructor should be allowed to access endpoint " + endpoint
)
@attr('shard_1')
@patch.dict(settings.FEATURES, {'ALLOW_AUTOMATED_SIGNUPS': True})
class TestInstructorAPIBulkAccountCreationAndEnrollment(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test Bulk account creation and enrollment from csv file
"""
@classmethod
def setUpClass(cls):
super(TestInstructorAPIBulkAccountCreationAndEnrollment, cls).setUpClass()
cls.course = CourseFactory.create()
# Create a course with mode 'audit'
cls.audit_course = CourseFactory.create()
CourseModeFactory(course_id=cls.audit_course.id, mode_slug=CourseMode.AUDIT)
cls.url = reverse(
'register_and_enroll_students', kwargs={'course_id': unicode(cls.course.id)}
)
cls.audit_course_url = reverse(
'register_and_enroll_students', kwargs={'course_id': unicode(cls.audit_course.id)}
)
def setUp(self):
super(TestInstructorAPIBulkAccountCreationAndEnrollment, self).setUp()
# Create a course with mode 'honor' and with price
self.white_label_course = CourseFactory.create()
self.white_label_course_mode = CourseModeFactory(
course_id=self.white_label_course.id,
mode_slug=CourseMode.HONOR,
min_price=10,
suggested_prices='10',
)
self.white_label_course_url = reverse(
'register_and_enroll_students', kwargs={'course_id': unicode(self.white_label_course.id)}
)
self.request = RequestFactory().request()
self.instructor = InstructorFactory(course_key=self.course.id)
self.audit_course_instructor = InstructorFactory(course_key=self.audit_course.id)
self.white_label_course_instructor = InstructorFactory(course_key=self.white_label_course.id)
self.client.login(username=self.instructor.username, password='test')
self.not_enrolled_student = UserFactory(
username='NotEnrolledStudent',
email='[email protected]',
first_name='NotEnrolled',
last_name='Student'
)
@patch('instructor.views.api.log.info')
def test_account_creation_and_enrollment_with_csv(self, info_log):
"""
Happy path test to create a single new user
"""
csv_content = "[email protected],test_student_1,tester1,USA"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(len(data['row_errors']), 0)
self.assertEquals(len(data['warnings']), 0)
self.assertEquals(len(data['general_errors']), 0)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ENROLLED)
# test the log for email that's send to new created user.
info_log.assert_called_with('email sent to new created user at %s', '[email protected]')
@patch('instructor.views.api.log.info')
def test_account_creation_and_enrollment_with_csv_with_blank_lines(self, info_log):
"""
Happy path test to create a single new user
"""
csv_content = "\[email protected],test_student_1,tester1,USA\n\n"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(len(data['row_errors']), 0)
self.assertEquals(len(data['warnings']), 0)
self.assertEquals(len(data['general_errors']), 0)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ENROLLED)
# test the log for email that's send to new created user.
info_log.assert_called_with('email sent to new created user at %s', '[email protected]')
@patch('instructor.views.api.log.info')
def test_email_and_username_already_exist(self, info_log):
"""
If the email address and username already exists
and the user is enrolled in the course, do nothing (including no email gets sent out)
"""
csv_content = "[email protected],test_student_1,tester1,USA\n" \
"[email protected],test_student_1,tester2,US"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(len(data['row_errors']), 0)
self.assertEquals(len(data['warnings']), 0)
self.assertEquals(len(data['general_errors']), 0)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ENROLLED)
# test the log for email that's send to new created user.
info_log.assert_called_with(
u"user already exists with username '%s' and email '%s'",
'test_student_1',
'[email protected]'
)
def test_file_upload_type_not_csv(self):
"""
Try uploading some non-CSV file and verify that it is rejected
"""
uploaded_file = SimpleUploadedFile("temp.jpg", io.BytesIO(b"some initial binary data: \x00\x01").read())
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertNotEquals(len(data['general_errors']), 0)
self.assertEquals(data['general_errors'][0]['response'], 'Make sure that the file you upload is in CSV format with no extraneous characters or rows.')
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 0)
def test_bad_file_upload_type(self):
"""
Try uploading some non-CSV file and verify that it is rejected
"""
uploaded_file = SimpleUploadedFile("temp.csv", io.BytesIO(b"some initial binary data: \x00\x01").read())
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertNotEquals(len(data['general_errors']), 0)
self.assertEquals(data['general_errors'][0]['response'], 'Could not read uploaded file.')
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 0)
def test_insufficient_data(self):
"""
Try uploading a CSV file which does not have the exact four columns of data
"""
csv_content = "[email protected],test_student_1\n"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(len(data['row_errors']), 0)
self.assertEquals(len(data['warnings']), 0)
self.assertEquals(len(data['general_errors']), 1)
self.assertEquals(data['general_errors'][0]['response'], 'Data in row #1 must have exactly four columns: email, username, full name, and country')
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 0)
def test_invalid_email_in_csv(self):
"""
Test failure case of a poorly formatted email field
"""
csv_content = "test_student.example.com,test_student_1,tester1,USA"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.url, {'students_list': uploaded_file})
data = json.loads(response.content)
self.assertEqual(response.status_code, 200)
self.assertNotEquals(len(data['row_errors']), 0)
self.assertEquals(len(data['warnings']), 0)
self.assertEquals(len(data['general_errors']), 0)
self.assertEquals(data['row_errors'][0]['response'], 'Invalid email {0}.'.format('test_student.example.com'))
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 0)
@patch('instructor.views.api.log.info')
def test_csv_user_exist_and_not_enrolled(self, info_log):
"""
If the email address and username already exists
and the user is not enrolled in the course, enrolled him/her and iterate to next one.
"""
csv_content = "[email protected],NotEnrolledStudent,tester1,USA"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
info_log.assert_called_with(
u'user %s enrolled in the course %s',
u'NotEnrolledStudent',
self.course.id
)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertTrue(manual_enrollments[0].state_transition, UNENROLLED_TO_ENROLLED)
def test_user_with_already_existing_email_in_csv(self):
"""
If the email address already exists, but the username is different,
assume it is the correct user and just register the user in the course.
"""
csv_content = "[email protected],test_student_1,tester1,USA\n" \
"[email protected],test_student_2,tester2,US"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
warning_message = 'An account with email {email} exists but the provided username {username} ' \
'is different. Enrolling anyway with {email}.'.format(email='[email protected]', username='test_student_2')
self.assertNotEquals(len(data['warnings']), 0)
self.assertEquals(data['warnings'][0]['response'], warning_message)
user = User.objects.get(email='[email protected]')
self.assertTrue(CourseEnrollment.is_enrolled(user, self.course.id))
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertTrue(manual_enrollments[0].state_transition, UNENROLLED_TO_ENROLLED)
def test_user_with_already_existing_username_in_csv(self):
"""
If the username already exists (but not the email),
assume it is a different user and fail to create the new account.
"""
csv_content = "[email protected],test_student_1,tester1,USA\n" \
"[email protected],test_student_1,tester2,US"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertNotEquals(len(data['row_errors']), 0)
self.assertEquals(data['row_errors'][0]['response'], 'Username {user} already exists.'.format(user='test_student_1'))
def test_csv_file_not_attached(self):
"""
Test when the user does not attach a file
"""
csv_content = "[email protected],test_student_1,tester1,USA\n" \
"[email protected],test_student_1,tester2,US"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.url, {'file_not_found': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertNotEquals(len(data['general_errors']), 0)
self.assertEquals(data['general_errors'][0]['response'], 'File is not attached.')
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 0)
def test_raising_exception_in_auto_registration_and_enrollment_case(self):
"""
Test that exceptions are handled well
"""
csv_content = "[email protected],test_student_1,tester1,USA\n" \
"[email protected],test_student_1,tester2,US"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
with patch('instructor.views.api.create_manual_course_enrollment') as mock:
mock.side_effect = NonExistentCourseError()
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertNotEquals(len(data['row_errors']), 0)
self.assertEquals(data['row_errors'][0]['response'], 'NonExistentCourseError')
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 0)
def test_generate_unique_password(self):
"""
generate_unique_password should generate a unique password string that excludes certain characters.
"""
password = generate_unique_password([], 12)
self.assertEquals(len(password), 12)
for letter in password:
self.assertNotIn(letter, 'aAeEiIoOuU1l')
def test_users_created_and_enrolled_successfully_if_others_fail(self):
csv_content = "[email protected],test_student_1,tester1,USA\n" \
"[email protected],test_student_1,tester3,CA\n" \
"[email protected],test_student_2,tester2,USA"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertNotEquals(len(data['row_errors']), 0)
self.assertEquals(data['row_errors'][0]['response'], 'Username {user} already exists.'.format(user='test_student_1'))
self.assertTrue(User.objects.filter(username='test_student_1', email='[email protected]').exists())
self.assertTrue(User.objects.filter(username='test_student_2', email='[email protected]').exists())
self.assertFalse(User.objects.filter(email='[email protected]').exists())
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 2)
@patch.object(instructor.views.api, 'generate_random_string',
Mock(side_effect=['first', 'first', 'second']))
def test_generate_unique_password_no_reuse(self):
"""
generate_unique_password should generate a unique password string that hasn't been generated before.
"""
generated_password = ['first']
password = generate_unique_password(generated_password, 12)
self.assertNotEquals(password, 'first')
@patch.dict(settings.FEATURES, {'ALLOW_AUTOMATED_SIGNUPS': False})
def test_allow_automated_signups_flag_not_set(self):
csv_content = "[email protected],test_student_1,tester1,USA"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.url, {'students_list': uploaded_file})
self.assertEquals(response.status_code, 403)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 0)
@patch.dict(settings.FEATURES, {'ALLOW_AUTOMATED_SIGNUPS': True})
def test_audit_enrollment_mode(self):
"""
Test that enrollment mode for audit courses (paid courses) is 'audit'.
"""
# Login Audit Course instructor
self.client.login(username=self.audit_course_instructor.username, password='test')
csv_content = "[email protected],test_student_wl,Test Student,USA"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.audit_course_url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(len(data['row_errors']), 0)
self.assertEquals(len(data['warnings']), 0)
self.assertEquals(len(data['general_errors']), 0)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ENROLLED)
# Verify enrollment modes to be 'audit'
for enrollment in manual_enrollments:
self.assertEqual(enrollment.enrollment.mode, CourseMode.AUDIT)
@patch.dict(settings.FEATURES, {'ALLOW_AUTOMATED_SIGNUPS': True})
def test_honor_enrollment_mode(self):
"""
Test that enrollment mode for unpaid honor courses is 'honor'.
"""
# Remove white label course price
self.white_label_course_mode.min_price = 0
self.white_label_course_mode.suggested_prices = ''
self.white_label_course_mode.save() # pylint: disable=no-member
# Login Audit Course instructor
self.client.login(username=self.white_label_course_instructor.username, password='test')
csv_content = "[email protected],test_student_wl,Test Student,USA"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.white_label_course_url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(len(data['row_errors']), 0)
self.assertEquals(len(data['warnings']), 0)
self.assertEquals(len(data['general_errors']), 0)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ENROLLED)
# Verify enrollment modes to be 'honor'
for enrollment in manual_enrollments:
self.assertEqual(enrollment.enrollment.mode, CourseMode.HONOR)
@patch.dict(settings.FEATURES, {'ALLOW_AUTOMATED_SIGNUPS': True})
def test_default_shopping_cart_enrollment_mode_for_white_label(self):
"""
Test that enrollment mode for white label courses (paid courses) is DEFAULT_SHOPPINGCART_MODE_SLUG.
"""
# Login white label course instructor
self.client.login(username=self.white_label_course_instructor.username, password='test')
csv_content = "[email protected],test_student_wl,Test Student,USA"
uploaded_file = SimpleUploadedFile("temp.csv", csv_content)
response = self.client.post(self.white_label_course_url, {'students_list': uploaded_file})
self.assertEqual(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(len(data['row_errors']), 0)
self.assertEquals(len(data['warnings']), 0)
self.assertEquals(len(data['general_errors']), 0)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ENROLLED)
# Verify enrollment modes to be CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG
for enrollment in manual_enrollments:
self.assertEqual(enrollment.enrollment.mode, CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG)
@attr('shard_1')
@ddt.ddt
class TestInstructorAPIEnrollment(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test enrollment modification endpoint.
This test does NOT exhaustively test state changes, that is the
job of test_enrollment. This tests the response and action switch.
"""
@classmethod
def setUpClass(cls):
super(TestInstructorAPIEnrollment, cls).setUpClass()
cls.course = CourseFactory.create()
# Email URL values
cls.site_name = microsite.get_value(
'SITE_NAME',
settings.SITE_NAME
)
cls.about_path = '/courses/{}/about'.format(cls.course.id)
cls.course_path = '/courses/{}/'.format(cls.course.id)
def setUp(self):
super(TestInstructorAPIEnrollment, self).setUp()
self.request = RequestFactory().request()
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
self.enrolled_student = UserFactory(username='EnrolledStudent', first_name='Enrolled', last_name='Student')
CourseEnrollment.enroll(
self.enrolled_student,
self.course.id
)
self.notenrolled_student = UserFactory(username='NotEnrolledStudent', first_name='NotEnrolled',
last_name='Student')
# Create invited, but not registered, user
cea = CourseEnrollmentAllowed(email='[email protected]', course_id=self.course.id)
cea.save()
self.allowed_email = '[email protected]'
self.notregistered_email = '[email protected]'
self.assertEqual(User.objects.filter(email=self.notregistered_email).count(), 0)
# uncomment to enable enable printing of large diffs
# from failed assertions in the event of a test failure.
# (comment because pylint C0103(invalid-name))
# self.maxDiff = None
def test_missing_params(self):
""" Test missing all query parameters. """
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url)
self.assertEqual(response.status_code, 400)
def test_bad_action(self):
""" Test with an invalid action. """
action = 'robot-not-an-action'
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': self.enrolled_student.email, 'action': action})
self.assertEqual(response.status_code, 400)
def test_invalid_email(self):
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': 'percivaloctavius@', 'action': 'enroll', 'email_students': False})
self.assertEqual(response.status_code, 200)
# test the response data
expected = {
"action": "enroll",
'auto_enroll': False,
"results": [
{
"identifier": 'percivaloctavius@',
"invalidIdentifier": True,
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
def test_invalid_username(self):
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url,
{'identifiers': 'percivaloctavius', 'action': 'enroll', 'email_students': False})
self.assertEqual(response.status_code, 200)
# test the response data
expected = {
"action": "enroll",
'auto_enroll': False,
"results": [
{
"identifier": 'percivaloctavius',
"invalidIdentifier": True,
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
def test_enroll_with_username(self):
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': self.notenrolled_student.username, 'action': 'enroll',
'email_students': False})
self.assertEqual(response.status_code, 200)
# test the response data
expected = {
"action": "enroll",
'auto_enroll': False,
"results": [
{
"identifier": self.notenrolled_student.username,
"before": {
"enrollment": False,
"auto_enroll": False,
"user": True,
"allowed": False,
},
"after": {
"enrollment": True,
"auto_enroll": False,
"user": True,
"allowed": False,
}
}
]
}
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ENROLLED)
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
def test_enroll_without_email(self):
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': self.notenrolled_student.email, 'action': 'enroll',
'email_students': False})
print "type(self.notenrolled_student.email): {}".format(type(self.notenrolled_student.email))
self.assertEqual(response.status_code, 200)
# test that the user is now enrolled
user = User.objects.get(email=self.notenrolled_student.email)
self.assertTrue(CourseEnrollment.is_enrolled(user, self.course.id))
# test the response data
expected = {
"action": "enroll",
"auto_enroll": False,
"results": [
{
"identifier": self.notenrolled_student.email,
"before": {
"enrollment": False,
"auto_enroll": False,
"user": True,
"allowed": False,
},
"after": {
"enrollment": True,
"auto_enroll": False,
"user": True,
"allowed": False,
}
}
]
}
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ENROLLED)
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
# Check the outbox
self.assertEqual(len(mail.outbox), 0)
@ddt.data('http', 'https')
def test_enroll_with_email(self, protocol):
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
params = {'identifiers': self.notenrolled_student.email, 'action': 'enroll', 'email_students': True}
environ = {'wsgi.url_scheme': protocol}
response = self.client.post(url, params, **environ)
print "type(self.notenrolled_student.email): {}".format(type(self.notenrolled_student.email))
self.assertEqual(response.status_code, 200)
# test that the user is now enrolled
user = User.objects.get(email=self.notenrolled_student.email)
self.assertTrue(CourseEnrollment.is_enrolled(user, self.course.id))
# test the response data
expected = {
"action": "enroll",
"auto_enroll": False,
"results": [
{
"identifier": self.notenrolled_student.email,
"before": {
"enrollment": False,
"auto_enroll": False,
"user": True,
"allowed": False,
},
"after": {
"enrollment": True,
"auto_enroll": False,
"user": True,
"allowed": False,
}
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
# Check the outbox
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
u'You have been enrolled in {}'.format(self.course.display_name)
)
self.assertEqual(
mail.outbox[0].body,
"Dear NotEnrolled Student\n\nYou have been enrolled in {} "
"at edx.org by a member of the course staff. "
"The course should now appear on your edx.org dashboard.\n\n"
"To start accessing course materials, please visit "
"{proto}://{site}{course_path}\n\n----\n"
"This email was automatically sent from edx.org to NotEnrolled Student".format(
self.course.display_name,
proto=protocol, site=self.site_name, course_path=self.course_path
)
)
@ddt.data('http', 'https')
def test_enroll_with_email_not_registered(self, protocol):
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
params = {'identifiers': self.notregistered_email, 'action': 'enroll', 'email_students': True}
environ = {'wsgi.url_scheme': protocol}
response = self.client.post(url, params, **environ)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ALLOWEDTOENROLL)
self.assertEqual(response.status_code, 200)
# Check the outbox
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
u'You have been invited to register for {}'.format(self.course.display_name)
)
self.assertEqual(
mail.outbox[0].body,
"Dear student,\n\nYou have been invited to join {} at edx.org by a member of the course staff.\n\n"
"To finish your registration, please visit {proto}://{site}/register and fill out the "
"registration form making sure to use [email protected] in the E-mail field.\n"
"Once you have registered and activated your account, "
"visit {proto}://{site}{about_path} to join the course.\n\n----\n"
"This email was automatically sent from edx.org to [email protected]".format(
self.course.display_name, proto=protocol, site=self.site_name, about_path=self.about_path
)
)
@ddt.data('http', 'https')
@patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': True})
def test_enroll_email_not_registered_mktgsite(self, protocol):
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
params = {'identifiers': self.notregistered_email, 'action': 'enroll', 'email_students': True}
environ = {'wsgi.url_scheme': protocol}
response = self.client.post(url, params, **environ)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ALLOWEDTOENROLL)
self.assertEqual(response.status_code, 200)
self.assertEqual(
mail.outbox[0].body,
"Dear student,\n\nYou have been invited to join {display_name}"
" at edx.org by a member of the course staff.\n\n"
"To finish your registration, please visit {proto}://{site}/register and fill out the registration form "
"making sure to use [email protected] in the E-mail field.\n"
"You can then enroll in {display_name}.\n\n----\n"
"This email was automatically sent from edx.org to [email protected]".format(
display_name=self.course.display_name, proto=protocol, site=self.site_name
)
)
@ddt.data('http', 'https')
def test_enroll_with_email_not_registered_autoenroll(self, protocol):
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
params = {'identifiers': self.notregistered_email, 'action': 'enroll', 'email_students': True,
'auto_enroll': True}
environ = {'wsgi.url_scheme': protocol}
response = self.client.post(url, params, **environ)
print "type(self.notregistered_email): {}".format(type(self.notregistered_email))
self.assertEqual(response.status_code, 200)
# Check the outbox
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
u'You have been invited to register for {}'.format(self.course.display_name)
)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ALLOWEDTOENROLL)
self.assertEqual(
mail.outbox[0].body,
"Dear student,\n\nYou have been invited to join {display_name}"
" at edx.org by a member of the course staff.\n\n"
"To finish your registration, please visit {proto}://{site}/register and fill out the registration form "
"making sure to use [email protected] in the E-mail field.\n"
"Once you have registered and activated your account,"
" you will see {display_name} listed on your dashboard.\n\n----\n"
"This email was automatically sent from edx.org to [email protected]".format(
proto=protocol, site=self.site_name, display_name=self.course.display_name
)
)
def test_unenroll_without_email(self):
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': self.enrolled_student.email, 'action': 'unenroll',
'email_students': False})
print "type(self.enrolled_student.email): {}".format(type(self.enrolled_student.email))
self.assertEqual(response.status_code, 200)
# test that the user is now unenrolled
user = User.objects.get(email=self.enrolled_student.email)
self.assertFalse(CourseEnrollment.is_enrolled(user, self.course.id))
# test the response data
expected = {
"action": "unenroll",
"auto_enroll": False,
"results": [
{
"identifier": self.enrolled_student.email,
"before": {
"enrollment": True,
"auto_enroll": False,
"user": True,
"allowed": False,
},
"after": {
"enrollment": False,
"auto_enroll": False,
"user": True,
"allowed": False,
}
}
]
}
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, ENROLLED_TO_UNENROLLED)
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
# Check the outbox
self.assertEqual(len(mail.outbox), 0)
def test_unenroll_with_email(self):
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': self.enrolled_student.email, 'action': 'unenroll',
'email_students': True})
print "type(self.enrolled_student.email): {}".format(type(self.enrolled_student.email))
self.assertEqual(response.status_code, 200)
# test that the user is now unenrolled
user = User.objects.get(email=self.enrolled_student.email)
self.assertFalse(CourseEnrollment.is_enrolled(user, self.course.id))
# test the response data
expected = {
"action": "unenroll",
"auto_enroll": False,
"results": [
{
"identifier": self.enrolled_student.email,
"before": {
"enrollment": True,
"auto_enroll": False,
"user": True,
"allowed": False,
},
"after": {
"enrollment": False,
"auto_enroll": False,
"user": True,
"allowed": False,
}
}
]
}
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, ENROLLED_TO_UNENROLLED)
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
# Check the outbox
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'You have been un-enrolled from {display_name}'.format(display_name=self.course.display_name,)
)
self.assertEqual(
mail.outbox[0].body,
"Dear Enrolled Student\n\nYou have been un-enrolled in {display_name} "
"at edx.org by a member of the course staff. "
"The course will no longer appear on your edx.org dashboard.\n\n"
"Your other courses have not been affected.\n\n----\n"
"This email was automatically sent from edx.org to Enrolled Student".format(
display_name=self.course.display_name,
)
)
def test_unenroll_with_email_allowed_student(self):
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url,
{'identifiers': self.allowed_email, 'action': 'unenroll', 'email_students': True})
print "type(self.allowed_email): {}".format(type(self.allowed_email))
self.assertEqual(response.status_code, 200)
# test the response data
expected = {
"action": "unenroll",
"auto_enroll": False,
"results": [
{
"identifier": self.allowed_email,
"before": {
"enrollment": False,
"auto_enroll": False,
"user": False,
"allowed": True,
},
"after": {
"enrollment": False,
"auto_enroll": False,
"user": False,
"allowed": False,
}
}
]
}
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, ALLOWEDTOENROLL_TO_UNENROLLED)
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
# Check the outbox
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'You have been un-enrolled from {display_name}'.format(display_name=self.course.display_name,)
)
self.assertEqual(
mail.outbox[0].body,
"Dear Student,\n\nYou have been un-enrolled from course {display_name} by a member of the course staff. "
"Please disregard the invitation previously sent.\n\n----\n"
"This email was automatically sent from edx.org to [email protected]".format(
display_name=self.course.display_name,
)
)
@ddt.data('http', 'https')
@patch('instructor.enrollment.uses_shib')
def test_enroll_with_email_not_registered_with_shib(self, protocol, mock_uses_shib):
mock_uses_shib.return_value = True
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
params = {'identifiers': self.notregistered_email, 'action': 'enroll', 'email_students': True}
environ = {'wsgi.url_scheme': protocol}
response = self.client.post(url, params, **environ)
self.assertEqual(response.status_code, 200)
# Check the outbox
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'You have been invited to register for {display_name}'.format(display_name=self.course.display_name,)
)
self.assertEqual(
mail.outbox[0].body,
"Dear student,\n\nYou have been invited to join {display_name} at edx.org by a member of the course staff.\n\n"
"To access the course visit {proto}://{site}{about_path} and register for the course.\n\n----\n"
"This email was automatically sent from edx.org to [email protected]".format(
proto=protocol, site=self.site_name, about_path=self.about_path,
display_name=self.course.display_name,
)
)
@patch('instructor.enrollment.uses_shib')
@patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': True})
def test_enroll_email_not_registered_shib_mktgsite(self, mock_uses_shib):
# Try with marketing site enabled and shib on
mock_uses_shib.return_value = True
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
# Try with marketing site enabled
with patch.dict('django.conf.settings.FEATURES', {'ENABLE_MKTG_SITE': True}):
response = self.client.post(url, {'identifiers': self.notregistered_email, 'action': 'enroll',
'email_students': True})
self.assertEqual(response.status_code, 200)
self.assertEqual(
mail.outbox[0].body,
"Dear student,\n\nYou have been invited to join {} at edx.org by a member of the course staff.\n\n----\n"
"This email was automatically sent from edx.org to [email protected]".format(
self.course.display_name,
)
)
@ddt.data('http', 'https')
@patch('instructor.enrollment.uses_shib')
def test_enroll_with_email_not_registered_with_shib_autoenroll(self, protocol, mock_uses_shib):
mock_uses_shib.return_value = True
url = reverse('students_update_enrollment', kwargs={'course_id': self.course.id.to_deprecated_string()})
params = {'identifiers': self.notregistered_email, 'action': 'enroll', 'email_students': True,
'auto_enroll': True}
environ = {'wsgi.url_scheme': protocol}
response = self.client.post(url, params, **environ)
print "type(self.notregistered_email): {}".format(type(self.notregistered_email))
self.assertEqual(response.status_code, 200)
# Check the outbox
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'You have been invited to register for {display_name}'.format(display_name=self.course.display_name,)
)
self.assertEqual(
mail.outbox[0].body,
"Dear student,\n\nYou have been invited to join {display_name}"
" at edx.org by a member of the course staff.\n\n"
"To access the course visit {proto}://{site}{course_path} and login.\n\n----\n"
"This email was automatically sent from edx.org to [email protected]".format(
display_name=self.course.display_name,
proto=protocol, site=self.site_name, course_path=self.course_path
)
)
def test_enroll_already_enrolled_student(self):
"""
Ensure that already enrolled "verified" students cannot be downgraded
to "honor"
"""
course_enrollment = CourseEnrollment.objects.get(
user=self.enrolled_student, course_id=self.course.id
)
# make this enrollment "verified"
course_enrollment.mode = u'verified'
course_enrollment.save()
self.assertEqual(course_enrollment.mode, u'verified')
# now re-enroll the student through the instructor dash
self._change_student_enrollment(self.enrolled_student, self.course, 'enroll')
# affirm that the student is still in "verified" mode
course_enrollment = CourseEnrollment.objects.get(
user=self.enrolled_student, course_id=self.course.id
)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, ENROLLED_TO_ENROLLED)
self.assertEqual(course_enrollment.mode, u"verified")
def create_paid_course(self):
"""
create paid course mode.
"""
paid_course = CourseFactory.create()
CourseModeFactory.create(course_id=paid_course.id, min_price=50, mode_slug=CourseMode.HONOR)
CourseInstructorRole(paid_course.id).add_users(self.instructor)
return paid_course
def test_reason_field_should_not_be_empty(self):
"""
test to check that reason field should not be empty when
manually enrolling the students for the paid courses.
"""
paid_course = self.create_paid_course()
url = reverse('students_update_enrollment', kwargs={'course_id': paid_course.id.to_deprecated_string()})
params = {'identifiers': self.notregistered_email, 'action': 'enroll', 'email_students': False,
'auto_enroll': False}
response = self.client.post(url, params)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 0)
# test the response data
expected = {
"action": "enroll",
"auto_enroll": False,
"results": [
{
"error": True
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
def test_unenrolled_allowed_to_enroll_user(self):
"""
test to unenroll allow to enroll user.
"""
paid_course = self.create_paid_course()
url = reverse('students_update_enrollment', kwargs={'course_id': paid_course.id.to_deprecated_string()})
params = {'identifiers': self.notregistered_email, 'action': 'enroll', 'email_students': False,
'auto_enroll': False, 'reason': 'testing..'}
response = self.client.post(url, params)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_ALLOWEDTOENROLL)
self.assertEqual(response.status_code, 200)
# now registered the user
UserFactory(email=self.notregistered_email)
url = reverse('students_update_enrollment', kwargs={'course_id': paid_course.id.to_deprecated_string()})
params = {'identifiers': self.notregistered_email, 'action': 'enroll', 'email_students': False,
'auto_enroll': False, 'reason': 'testing'}
response = self.client.post(url, params)
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 2)
self.assertEqual(manual_enrollments[1].state_transition, ALLOWEDTOENROLL_TO_ENROLLED)
self.assertEqual(response.status_code, 200)
# test the response data
expected = {
"action": "enroll",
"auto_enroll": False,
"results": [
{
"identifier": self.notregistered_email,
"before": {
"enrollment": False,
"auto_enroll": False,
"user": True,
"allowed": True,
},
"after": {
"enrollment": True,
"auto_enroll": False,
"user": True,
"allowed": True,
}
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
def test_unenrolled_already_not_enrolled_user(self):
"""
test unenrolled user already not enrolled in a course.
"""
paid_course = self.create_paid_course()
course_enrollment = CourseEnrollment.objects.filter(
user__email=self.notregistered_email, course_id=paid_course.id
)
self.assertEqual(course_enrollment.count(), 0)
url = reverse('students_update_enrollment', kwargs={'course_id': paid_course.id.to_deprecated_string()})
params = {'identifiers': self.notregistered_email, 'action': 'unenroll', 'email_students': False,
'auto_enroll': False, 'reason': 'testing'}
response = self.client.post(url, params)
self.assertEqual(response.status_code, 200)
# test the response data
expected = {
"action": "unenroll",
"auto_enroll": False,
"results": [
{
"identifier": self.notregistered_email,
"before": {
"enrollment": False,
"auto_enroll": False,
"user": False,
"allowed": False,
},
"after": {
"enrollment": False,
"auto_enroll": False,
"user": False,
"allowed": False,
}
}
]
}
manual_enrollments = ManualEnrollmentAudit.objects.all()
self.assertEqual(manual_enrollments.count(), 1)
self.assertEqual(manual_enrollments[0].state_transition, UNENROLLED_TO_UNENROLLED)
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
def test_unenroll_and_enroll_verified(self):
"""
Test that unenrolling and enrolling a student from a verified track
results in that student being in the default track
"""
course_enrollment = CourseEnrollment.objects.get(
user=self.enrolled_student, course_id=self.course.id
)
# upgrade enrollment
course_enrollment.mode = u'verified'
course_enrollment.save()
self.assertEqual(course_enrollment.mode, u'verified')
self._change_student_enrollment(self.enrolled_student, self.course, 'unenroll')
self._change_student_enrollment(self.enrolled_student, self.course, 'enroll')
course_enrollment = CourseEnrollment.objects.get(
user=self.enrolled_student, course_id=self.course.id
)
self.assertEqual(course_enrollment.mode, CourseMode.DEFAULT_MODE_SLUG)
def _change_student_enrollment(self, user, course, action):
"""
Helper function that posts to 'students_update_enrollment' to change
a student's enrollment
"""
url = reverse(
'students_update_enrollment',
kwargs={'course_id': course.id.to_deprecated_string()},
)
params = {
'identifiers': user.email,
'action': action,
'email_students': True,
'reason': 'change user enrollment'
}
response = self.client.post(url, params)
self.assertEqual(response.status_code, 200)
return response
@attr('shard_1')
@ddt.ddt
class TestInstructorAPIBulkBetaEnrollment(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test bulk beta modify access endpoint.
"""
@classmethod
def setUpClass(cls):
super(TestInstructorAPIBulkBetaEnrollment, cls).setUpClass()
cls.course = CourseFactory.create()
# Email URL values
cls.site_name = microsite.get_value(
'SITE_NAME',
settings.SITE_NAME
)
cls.about_path = '/courses/{}/about'.format(cls.course.id)
cls.course_path = '/courses/{}/'.format(cls.course.id)
def setUp(self):
super(TestInstructorAPIBulkBetaEnrollment, self).setUp()
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
self.beta_tester = BetaTesterFactory(course_key=self.course.id)
CourseEnrollment.enroll(
self.beta_tester,
self.course.id
)
self.assertTrue(CourseBetaTesterRole(self.course.id).has_user(self.beta_tester))
self.notenrolled_student = UserFactory(username='NotEnrolledStudent')
self.notregistered_email = '[email protected]'
self.assertEqual(User.objects.filter(email=self.notregistered_email).count(), 0)
self.request = RequestFactory().request()
# uncomment to enable enable printing of large diffs
# from failed assertions in the event of a test failure.
# (comment because pylint C0103(invalid-name))
# self.maxDiff = None
def test_missing_params(self):
""" Test missing all query parameters. """
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url)
self.assertEqual(response.status_code, 400)
def test_bad_action(self):
""" Test with an invalid action. """
action = 'robot-not-an-action'
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': self.beta_tester.email, 'action': action})
self.assertEqual(response.status_code, 400)
def add_notenrolled(self, response, identifier):
"""
Test Helper Method (not a test, called by other tests)
Takes a client response from a call to bulk_beta_modify_access with 'email_students': False,
and the student identifier (email or username) given as 'identifiers' in the request.
Asserts the reponse returns cleanly, that the student was added as a beta tester, and the
response properly contains their identifier, 'error': False, and 'userDoesNotExist': False.
Additionally asserts no email was sent.
"""
self.assertEqual(response.status_code, 200)
self.assertTrue(CourseBetaTesterRole(self.course.id).has_user(self.notenrolled_student))
# test the response data
expected = {
"action": "add",
"results": [
{
"identifier": identifier,
"error": False,
"userDoesNotExist": False
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
# Check the outbox
self.assertEqual(len(mail.outbox), 0)
def test_add_notenrolled_email(self):
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': self.notenrolled_student.email, 'action': 'add', 'email_students': False})
self.add_notenrolled(response, self.notenrolled_student.email)
self.assertFalse(CourseEnrollment.is_enrolled(self.notenrolled_student, self.course.id))
def test_add_notenrolled_email_autoenroll(self):
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': self.notenrolled_student.email, 'action': 'add', 'email_students': False, 'auto_enroll': True})
self.add_notenrolled(response, self.notenrolled_student.email)
self.assertTrue(CourseEnrollment.is_enrolled(self.notenrolled_student, self.course.id))
def test_add_notenrolled_username(self):
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': self.notenrolled_student.username, 'action': 'add', 'email_students': False})
self.add_notenrolled(response, self.notenrolled_student.username)
self.assertFalse(CourseEnrollment.is_enrolled(self.notenrolled_student, self.course.id))
def test_add_notenrolled_username_autoenroll(self):
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': self.notenrolled_student.username, 'action': 'add', 'email_students': False, 'auto_enroll': True})
self.add_notenrolled(response, self.notenrolled_student.username)
self.assertTrue(CourseEnrollment.is_enrolled(self.notenrolled_student, self.course.id))
@ddt.data('http', 'https')
def test_add_notenrolled_with_email(self, protocol):
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
params = {'identifiers': self.notenrolled_student.email, 'action': 'add', 'email_students': True}
environ = {'wsgi.url_scheme': protocol}
response = self.client.post(url, params, **environ)
self.assertEqual(response.status_code, 200)
self.assertTrue(CourseBetaTesterRole(self.course.id).has_user(self.notenrolled_student))
# test the response data
expected = {
"action": "add",
"results": [
{
"identifier": self.notenrolled_student.email,
"error": False,
"userDoesNotExist": False
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
# Check the outbox
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'You have been invited to a beta test for {display_name}'.format(display_name=self.course.display_name,)
)
self.assertEqual(
mail.outbox[0].body,
u"Dear {student_name}\n\nYou have been invited to be a beta tester "
"for {display_name} at edx.org by a member of the course staff.\n\n"
"Visit {proto}://{site}{about_path} to join "
"the course and begin the beta test.\n\n----\n"
"This email was automatically sent from edx.org to {student_email}".format(
display_name=self.course.display_name,
student_name=self.notenrolled_student.profile.name,
student_email=self.notenrolled_student.email,
proto=protocol,
site=self.site_name,
about_path=self.about_path
)
)
@ddt.data('http', 'https')
def test_add_notenrolled_with_email_autoenroll(self, protocol):
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
params = {'identifiers': self.notenrolled_student.email, 'action': 'add', 'email_students': True,
'auto_enroll': True}
environ = {'wsgi.url_scheme': protocol}
response = self.client.post(url, params, **environ)
self.assertEqual(response.status_code, 200)
self.assertTrue(CourseBetaTesterRole(self.course.id).has_user(self.notenrolled_student))
# test the response data
expected = {
"action": "add",
"results": [
{
"identifier": self.notenrolled_student.email,
"error": False,
"userDoesNotExist": False
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
# Check the outbox
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
'You have been invited to a beta test for {display_name}'.format(display_name=self.course.display_name)
)
self.assertEqual(
mail.outbox[0].body,
u"Dear {student_name}\n\nYou have been invited to be a beta tester "
"for {display_name} at edx.org by a member of the course staff.\n\n"
"To start accessing course materials, please visit "
"{proto}://{site}{course_path}\n\n----\n"
"This email was automatically sent from edx.org to {student_email}".format(
display_name=self.course.display_name,
student_name=self.notenrolled_student.profile.name,
student_email=self.notenrolled_student.email,
proto=protocol,
site=self.site_name,
course_path=self.course_path
)
)
@patch.dict(settings.FEATURES, {'ENABLE_MKTG_SITE': True})
def test_add_notenrolled_email_mktgsite(self):
# Try with marketing site enabled
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {'identifiers': self.notenrolled_student.email, 'action': 'add', 'email_students': True})
self.assertEqual(response.status_code, 200)
self.assertEqual(
mail.outbox[0].body,
u"Dear {}\n\nYou have been invited to be a beta tester "
"for {} at edx.org by a member of the course staff.\n\n"
"Visit edx.org to enroll in the course and begin the beta test.\n\n----\n"
"This email was automatically sent from edx.org to {}".format(
self.notenrolled_student.profile.name,
self.course.display_name,
self.notenrolled_student.email,
)
)
def test_enroll_with_email_not_registered(self):
# User doesn't exist
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url,
{'identifiers': self.notregistered_email, 'action': 'add', 'email_students': True,
'reason': 'testing'})
self.assertEqual(response.status_code, 200)
# test the response data
expected = {
"action": "add",
"results": [
{
"identifier": self.notregistered_email,
"error": True,
"userDoesNotExist": True
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
# Check the outbox
self.assertEqual(len(mail.outbox), 0)
def test_remove_without_email(self):
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url,
{'identifiers': self.beta_tester.email, 'action': 'remove', 'email_students': False,
'reason': 'testing'})
self.assertEqual(response.status_code, 200)
# Works around a caching bug which supposedly can't happen in prod. The instance here is not ==
# the instance fetched from the email above which had its cache cleared
if hasattr(self.beta_tester, '_roles'):
del self.beta_tester._roles
self.assertFalse(CourseBetaTesterRole(self.course.id).has_user(self.beta_tester))
# test the response data
expected = {
"action": "remove",
"results": [
{
"identifier": self.beta_tester.email,
"error": False,
"userDoesNotExist": False
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
# Check the outbox
self.assertEqual(len(mail.outbox), 0)
def test_remove_with_email(self):
url = reverse('bulk_beta_modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url,
{'identifiers': self.beta_tester.email, 'action': 'remove', 'email_students': True,
'reason': 'testing'})
self.assertEqual(response.status_code, 200)
# Works around a caching bug which supposedly can't happen in prod. The instance here is not ==
# the instance fetched from the email above which had its cache cleared
if hasattr(self.beta_tester, '_roles'):
del self.beta_tester._roles
self.assertFalse(CourseBetaTesterRole(self.course.id).has_user(self.beta_tester))
# test the response data
expected = {
"action": "remove",
"results": [
{
"identifier": self.beta_tester.email,
"error": False,
"userDoesNotExist": False
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
# Check the outbox
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
mail.outbox[0].subject,
u'You have been removed from a beta test for {display_name}'.format(display_name=self.course.display_name,)
)
self.assertEqual(
mail.outbox[0].body,
"Dear {full_name}\n\nYou have been removed as a beta tester for "
"{display_name} at edx.org by a member of the course staff. "
"The course will remain on your dashboard, but you will no longer "
"be part of the beta testing group.\n\n"
"Your other courses have not been affected.\n\n----\n"
"This email was automatically sent from edx.org to {email_address}".format(
display_name=self.course.display_name,
full_name=self.beta_tester.profile.name,
email_address=self.beta_tester.email
)
)
@attr('shard_1')
class TestInstructorAPILevelsAccess(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test endpoints whereby instructors can change permissions
of other users.
This test does NOT test whether the actions had an effect on the
database, that is the job of test_access.
This tests the response and action switch.
Actually, modify_access does not have a very meaningful
response yet, so only the status code is tested.
"""
@classmethod
def setUpClass(cls):
super(TestInstructorAPILevelsAccess, cls).setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
super(TestInstructorAPILevelsAccess, self).setUp()
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
self.other_instructor = InstructorFactory(course_key=self.course.id)
self.other_staff = StaffFactory(course_key=self.course.id)
self.other_user = UserFactory()
def test_modify_access_noparams(self):
""" Test missing all query parameters. """
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url)
self.assertEqual(response.status_code, 400)
def test_modify_access_bad_action(self):
""" Test with an invalid action parameter. """
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.other_staff.email,
'rolename': 'staff',
'action': 'robot-not-an-action',
})
self.assertEqual(response.status_code, 400)
def test_modify_access_bad_role(self):
""" Test with an invalid action parameter. """
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.other_staff.email,
'rolename': 'robot-not-a-roll',
'action': 'revoke',
})
self.assertEqual(response.status_code, 400)
def test_modify_access_allow(self):
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.other_user.email,
'rolename': 'staff',
'action': 'allow',
})
self.assertEqual(response.status_code, 200)
def test_modify_access_allow_with_uname(self):
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.other_instructor.username,
'rolename': 'staff',
'action': 'allow',
})
self.assertEqual(response.status_code, 200)
def test_modify_access_revoke(self):
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.other_staff.email,
'rolename': 'staff',
'action': 'revoke',
})
self.assertEqual(response.status_code, 200)
def test_modify_access_revoke_with_username(self):
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.other_staff.username,
'rolename': 'staff',
'action': 'revoke',
})
self.assertEqual(response.status_code, 200)
def test_modify_access_with_fake_user(self):
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': 'GandalfTheGrey',
'rolename': 'staff',
'action': 'revoke',
})
self.assertEqual(response.status_code, 200)
expected = {
'unique_student_identifier': 'GandalfTheGrey',
'userDoesNotExist': True,
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
def test_modify_access_with_inactive_user(self):
self.other_user.is_active = False
self.other_user.save() # pylint: disable=no-member
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.other_user.username,
'rolename': 'beta',
'action': 'allow',
})
self.assertEqual(response.status_code, 200)
expected = {
'unique_student_identifier': self.other_user.username,
'inactiveUser': True,
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
def test_modify_access_revoke_not_allowed(self):
""" Test revoking access that a user does not have. """
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.other_staff.email,
'rolename': 'instructor',
'action': 'revoke',
})
self.assertEqual(response.status_code, 200)
def test_modify_access_revoke_self(self):
"""
Test that an instructor cannot remove instructor privelages from themself.
"""
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.instructor.email,
'rolename': 'instructor',
'action': 'revoke',
})
self.assertEqual(response.status_code, 200)
# check response content
expected = {
'unique_student_identifier': self.instructor.username,
'rolename': 'instructor',
'action': 'revoke',
'removingSelfAsInstructor': True,
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
def test_list_course_role_members_noparams(self):
""" Test missing all query parameters. """
url = reverse('list_course_role_members', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url)
self.assertEqual(response.status_code, 400)
def test_list_course_role_members_bad_rolename(self):
""" Test with an invalid rolename parameter. """
url = reverse('list_course_role_members', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'rolename': 'robot-not-a-rolename',
})
self.assertEqual(response.status_code, 400)
def test_list_course_role_members_staff(self):
url = reverse('list_course_role_members', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'rolename': 'staff',
})
self.assertEqual(response.status_code, 200)
# check response content
expected = {
'course_id': self.course.id.to_deprecated_string(),
'staff': [
{
'username': self.other_staff.username,
'email': self.other_staff.email,
'first_name': self.other_staff.first_name,
'last_name': self.other_staff.last_name,
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
def test_list_course_role_members_beta(self):
url = reverse('list_course_role_members', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'rolename': 'beta',
})
self.assertEqual(response.status_code, 200)
# check response content
expected = {
'course_id': self.course.id.to_deprecated_string(),
'beta': []
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected)
def test_update_forum_role_membership(self):
"""
Test update forum role membership with user's email and username.
"""
# Seed forum roles for course.
seed_permissions_roles(self.course.id)
for user in [self.instructor, self.other_user]:
for identifier_attr in [user.email, user.username]:
for rolename in ["Administrator", "Moderator", "Community TA"]:
for action in ["allow", "revoke"]:
self.assert_update_forum_role_membership(user, identifier_attr, rolename, action)
def assert_update_forum_role_membership(self, current_user, identifier, rolename, action):
"""
Test update forum role membership.
Get unique_student_identifier, rolename and action and update forum role.
"""
url = reverse('update_forum_role_membership', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(
url,
{
'unique_student_identifier': identifier,
'rolename': rolename,
'action': action,
}
)
# Status code should be 200.
self.assertEqual(response.status_code, 200)
user_roles = current_user.roles.filter(course_id=self.course.id).values_list("name", flat=True)
if action == 'allow':
self.assertIn(rolename, user_roles)
elif action == 'revoke':
self.assertNotIn(rolename, user_roles)
@attr('shard_1')
@ddt.ddt
@patch.dict('django.conf.settings.FEATURES', {'ENABLE_PAID_COURSE_REGISTRATION': True})
class TestInstructorAPILevelsDataDump(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test endpoints that show data without side effects.
"""
@classmethod
def setUpClass(cls):
super(TestInstructorAPILevelsDataDump, cls).setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
super(TestInstructorAPILevelsDataDump, self).setUp()
self.course_mode = CourseMode(course_id=self.course.id,
mode_slug="honor",
mode_display_name="honor cert",
min_price=40)
self.course_mode.save()
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
self.cart = Order.get_cart_for_user(self.instructor)
self.coupon_code = 'abcde'
self.coupon = Coupon(code=self.coupon_code, description='testing code', course_id=self.course.id,
percentage_discount=10, created_by=self.instructor, is_active=True)
self.coupon.save()
# Create testing invoice 1
self.sale_invoice_1 = Invoice.objects.create(
total_amount=1234.32, company_name='Test1', company_contact_name='TestName', company_contact_email='[email protected]',
recipient_name='Testw', recipient_email='[email protected]', customer_reference_number='2Fwe23S',
internal_reference="A", course_id=self.course.id, is_valid=True
)
self.invoice_item = CourseRegistrationCodeInvoiceItem.objects.create(
invoice=self.sale_invoice_1,
qty=1,
unit_price=1234.32,
course_id=self.course.id
)
self.students = [UserFactory() for _ in xrange(6)]
for student in self.students:
CourseEnrollment.enroll(student, self.course.id)
self.students_who_may_enroll = self.students + [UserFactory() for _ in range(5)]
for student in self.students_who_may_enroll:
CourseEnrollmentAllowed.objects.create(
email=student.email, course_id=self.course.id
)
def register_with_redemption_code(self, user, code):
"""
enroll user using a registration code
"""
redeem_url = reverse('register_code_redemption', args=[code])
self.client.login(username=user.username, password='test')
response = self.client.get(redeem_url)
self.assertEquals(response.status_code, 200)
# check button text
self.assertTrue('Activate Course Enrollment' in response.content)
response = self.client.post(redeem_url)
self.assertEquals(response.status_code, 200)
def test_invalidate_sale_record(self):
"""
Testing the sale invalidating scenario.
"""
for i in range(2):
course_registration_code = CourseRegistrationCode(
code='sale_invoice{}'.format(i),
course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor,
invoice=self.sale_invoice_1,
invoice_item=self.invoice_item,
mode_slug='honor'
)
course_registration_code.save()
data = {'invoice_number': self.sale_invoice_1.id, 'event_type': "invalidate"}
url = reverse('sale_validation', kwargs={'course_id': self.course.id.to_deprecated_string()})
self.assert_request_status_code(200, url, method="POST", data=data)
#Now try to fetch data against not existing invoice number
test_data_1 = {'invoice_number': 100, 'event_type': "invalidate"}
self.assert_request_status_code(404, url, method="POST", data=test_data_1)
# Now invalidate the same invoice number and expect an Bad request
response = self.assert_request_status_code(400, url, method="POST", data=data)
self.assertIn("The sale associated with this invoice has already been invalidated.", response.content)
# now re_validate the invoice number
data['event_type'] = "re_validate"
self.assert_request_status_code(200, url, method="POST", data=data)
# Now re_validate the same active invoice number and expect an Bad request
response = self.assert_request_status_code(400, url, method="POST", data=data)
self.assertIn("This invoice is already active.", response.content)
test_data_2 = {'invoice_number': self.sale_invoice_1.id}
response = self.assert_request_status_code(400, url, method="POST", data=test_data_2)
self.assertIn("Missing required event_type parameter", response.content)
test_data_3 = {'event_type': "re_validate"}
response = self.assert_request_status_code(400, url, method="POST", data=test_data_3)
self.assertIn("Missing required invoice_number parameter", response.content)
# submitting invalid invoice number
data['invoice_number'] = 'testing'
response = self.assert_request_status_code(400, url, method="POST", data=data)
self.assertIn("invoice_number must be an integer, {value} provided".format(value=data['invoice_number']), response.content)
def test_get_sale_order_records_features_csv(self):
"""
Test that the response from get_sale_order_records is in csv format.
"""
# add the coupon code for the course
coupon = Coupon(
code='test_code', description='test_description', course_id=self.course.id,
percentage_discount='10', created_by=self.instructor, is_active=True
)
coupon.save()
self.cart.order_type = 'business'
self.cart.save()
self.cart.add_billing_details(company_name='Test Company', company_contact_name='Test',
company_contact_email='test@123', recipient_name='R1',
recipient_email='', customer_reference_number='PO#23')
paid_course_reg_item = PaidCourseRegistration.add_to_order(
self.cart,
self.course.id,
mode_slug=CourseMode.HONOR
)
# update the quantity of the cart item paid_course_reg_item
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'), {'ItemId': paid_course_reg_item.id, 'qty': '4'})
self.assertEqual(resp.status_code, 200)
# apply the coupon code to the item in the cart
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': coupon.code})
self.assertEqual(resp.status_code, 200)
self.cart.purchase()
# get the updated item
item = self.cart.orderitem_set.all().select_subclasses()[0]
# get the redeemed coupon information
coupon_redemption = CouponRedemption.objects.select_related('coupon').filter(order=self.cart)
sale_order_url = reverse('get_sale_order_records', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(sale_order_url)
self.assertEqual(response['Content-Type'], 'text/csv')
self.assertIn('36', response.content.split('\r\n')[1])
self.assertIn(str(item.unit_cost), response.content.split('\r\n')[1],)
self.assertIn(str(item.list_price), response.content.split('\r\n')[1],)
self.assertIn(item.status, response.content.split('\r\n')[1],)
self.assertIn(coupon_redemption[0].coupon.code, response.content.split('\r\n')[1],)
def test_coupon_redeem_count_in_ecommerce_section(self):
"""
Test that checks the redeem count in the instructor_dashboard coupon section
"""
# add the coupon code for the course
coupon = Coupon(
code='test_code', description='test_description', course_id=self.course.id,
percentage_discount='10', created_by=self.instructor, is_active=True
)
coupon.save()
# Coupon Redeem Count only visible for Financial Admins.
CourseFinanceAdminRole(self.course.id).add_users(self.instructor)
PaidCourseRegistration.add_to_order(self.cart, self.course.id)
# apply the coupon code to the item in the cart
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': coupon.code})
self.assertEqual(resp.status_code, 200)
# URL for instructor dashboard
instructor_dashboard = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
# visit the instructor dashboard page and
# check that the coupon redeem count should be 0
resp = self.client.get(instructor_dashboard)
self.assertEqual(resp.status_code, 200)
self.assertIn('Number Redeemed', resp.content)
self.assertIn('<td>0</td>', resp.content)
# now make the payment of your cart items
self.cart.purchase()
# visit the instructor dashboard page and
# check that the coupon redeem count should be 1
resp = self.client.get(instructor_dashboard)
self.assertEqual(resp.status_code, 200)
self.assertIn('Number Redeemed', resp.content)
self.assertIn('<td>1</td>', resp.content)
def test_get_sale_records_features_csv(self):
"""
Test that the response from get_sale_records is in csv format.
"""
for i in range(2):
course_registration_code = CourseRegistrationCode(
code='sale_invoice{}'.format(i),
course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor,
invoice=self.sale_invoice_1,
invoice_item=self.invoice_item,
mode_slug='honor'
)
course_registration_code.save()
url = reverse(
'get_sale_records',
kwargs={'course_id': self.course.id.to_deprecated_string()}
)
response = self.client.get(url + '/csv', {})
self.assertEqual(response['Content-Type'], 'text/csv')
def test_get_sale_records_features_json(self):
"""
Test that the response from get_sale_records is in json format.
"""
for i in range(5):
course_registration_code = CourseRegistrationCode(
code='sale_invoice{}'.format(i),
course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor,
invoice=self.sale_invoice_1,
invoice_item=self.invoice_item,
mode_slug='honor'
)
course_registration_code.save()
url = reverse('get_sale_records', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {})
res_json = json.loads(response.content)
self.assertIn('sale', res_json)
for res in res_json['sale']:
self.validate_sale_records_response(
res,
course_registration_code,
self.sale_invoice_1,
0,
invoice_item=self.invoice_item
)
def test_get_sale_records_features_with_multiple_invoices(self):
"""
Test that the response from get_sale_records is in json format for multiple invoices
"""
for i in range(5):
course_registration_code = CourseRegistrationCode(
code='qwerty{}'.format(i),
course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor,
invoice=self.sale_invoice_1,
invoice_item=self.invoice_item,
mode_slug='honor'
)
course_registration_code.save()
# Create test invoice 2
sale_invoice_2 = Invoice.objects.create(
total_amount=1234.32, company_name='Test1', company_contact_name='TestName', company_contact_email='[email protected]',
recipient_name='Testw_2', recipient_email='[email protected]', customer_reference_number='2Fwe23S',
internal_reference="B", course_id=self.course.id
)
invoice_item_2 = CourseRegistrationCodeInvoiceItem.objects.create(
invoice=sale_invoice_2,
qty=1,
unit_price=1234.32,
course_id=self.course.id
)
for i in range(5):
course_registration_code = CourseRegistrationCode(
code='xyzmn{}'.format(i), course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor, invoice=sale_invoice_2, invoice_item=invoice_item_2, mode_slug='honor'
)
course_registration_code.save()
url = reverse('get_sale_records', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {})
res_json = json.loads(response.content)
self.assertIn('sale', res_json)
self.validate_sale_records_response(
res_json['sale'][0],
course_registration_code,
self.sale_invoice_1,
0,
invoice_item=self.invoice_item
)
self.validate_sale_records_response(
res_json['sale'][1],
course_registration_code,
sale_invoice_2,
0,
invoice_item=invoice_item_2
)
def validate_sale_records_response(self, res, course_registration_code, invoice, used_codes, invoice_item):
"""
validate sale records attribute values with the response object
"""
self.assertEqual(res['total_amount'], invoice.total_amount)
self.assertEqual(res['recipient_email'], invoice.recipient_email)
self.assertEqual(res['recipient_name'], invoice.recipient_name)
self.assertEqual(res['company_name'], invoice.company_name)
self.assertEqual(res['company_contact_name'], invoice.company_contact_name)
self.assertEqual(res['company_contact_email'], invoice.company_contact_email)
self.assertEqual(res['internal_reference'], invoice.internal_reference)
self.assertEqual(res['customer_reference_number'], invoice.customer_reference_number)
self.assertEqual(res['invoice_number'], invoice.id)
self.assertEqual(res['created_by'], course_registration_code.created_by.username)
self.assertEqual(res['course_id'], invoice_item.course_id.to_deprecated_string())
self.assertEqual(res['total_used_codes'], used_codes)
self.assertEqual(res['total_codes'], 5)
def test_get_problem_responses_invalid_location(self):
"""
Test whether get_problem_responses returns an appropriate status
message when users submit an invalid problem location.
"""
url = reverse(
'get_problem_responses',
kwargs={'course_id': unicode(self.course.id)}
)
problem_location = ''
response = self.client.get(url, {'problem_location': problem_location})
res_json = json.loads(response.content)
self.assertEqual(res_json, 'Could not find problem with this location.')
def valid_problem_location(test): # pylint: disable=no-self-argument
"""
Decorator for tests that target get_problem_responses endpoint and
need to pretend user submitted a valid problem location.
"""
@functools.wraps(test)
def wrapper(self, *args, **kwargs):
"""
Run `test` method, ensuring that UsageKey.from_string returns a
problem key that the get_problem_responses endpoint can
work with.
"""
mock_problem_key = Mock(return_value=u'')
mock_problem_key.course_key = self.course.id
with patch.object(UsageKey, 'from_string') as patched_method:
patched_method.return_value = mock_problem_key
test(self, *args, **kwargs)
return wrapper
@valid_problem_location
def test_get_problem_responses_successful(self):
"""
Test whether get_problem_responses returns an appropriate status
message if CSV generation was started successfully.
"""
url = reverse(
'get_problem_responses',
kwargs={'course_id': unicode(self.course.id)}
)
problem_location = ''
response = self.client.get(url, {'problem_location': problem_location})
res_json = json.loads(response.content)
self.assertIn('status', res_json)
status = res_json['status']
self.assertIn('is being created', status)
self.assertNotIn('already in progress', status)
@valid_problem_location
def test_get_problem_responses_already_running(self):
"""
Test whether get_problem_responses returns an appropriate status
message if CSV generation is already in progress.
"""
url = reverse(
'get_problem_responses',
kwargs={'course_id': unicode(self.course.id)}
)
with patch('instructor_task.api.submit_calculate_problem_responses_csv') as submit_task_function:
error = AlreadyRunningError()
submit_task_function.side_effect = error
response = self.client.get(url, {})
res_json = json.loads(response.content)
self.assertIn('status', res_json)
self.assertIn('already in progress', res_json['status'])
def test_get_students_features(self):
"""
Test that some minimum of information is formatted
correctly in the response to get_students_features.
"""
url = reverse('get_students_features', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {})
res_json = json.loads(response.content)
self.assertIn('students', res_json)
for student in self.students:
student_json = [
x for x in res_json['students']
if x['username'] == student.username
][0]
self.assertEqual(student_json['username'], student.username)
self.assertEqual(student_json['email'], student.email)
@ddt.data(True, False)
def test_get_students_features_cohorted(self, is_cohorted):
"""
Test that get_students_features includes cohort info when the course is
cohorted, and does not when the course is not cohorted.
"""
url = reverse('get_students_features', kwargs={'course_id': unicode(self.course.id)})
set_course_cohort_settings(self.course.id, is_cohorted=is_cohorted)
response = self.client.get(url, {})
res_json = json.loads(response.content)
self.assertEqual('cohort' in res_json['feature_names'], is_cohorted)
@ddt.data(True, False)
def test_get_students_features_teams(self, has_teams):
"""
Test that get_students_features includes team info when the course is
has teams enabled, and does not when the course does not have teams enabled
"""
if has_teams:
self.course = CourseFactory.create(teams_configuration={
'max_size': 2, 'topics': [{'topic-id': 'topic', 'name': 'Topic', 'description': 'A Topic'}]
})
course_instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=course_instructor.username, password='test')
url = reverse('get_students_features', kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {})
res_json = json.loads(response.content)
self.assertEqual('team' in res_json['feature_names'], has_teams)
def test_get_students_who_may_enroll(self):
"""
Test whether get_students_who_may_enroll returns an appropriate
status message when users request a CSV file of students who
may enroll in a course.
"""
url = reverse(
'get_students_who_may_enroll',
kwargs={'course_id': unicode(self.course.id)}
)
# Successful case:
response = self.client.get(url, {})
res_json = json.loads(response.content)
self.assertIn('status', res_json)
self.assertNotIn('currently being created', res_json['status'])
# CSV generation already in progress:
with patch('instructor_task.api.submit_calculate_may_enroll_csv') as submit_task_function:
error = AlreadyRunningError()
submit_task_function.side_effect = error
response = self.client.get(url, {})
res_json = json.loads(response.content)
self.assertIn('status', res_json)
self.assertIn('currently being created', res_json['status'])
def test_get_student_exam_results(self):
"""
Test whether get_proctored_exam_results returns an appropriate
status message when users request a CSV file.
"""
url = reverse(
'get_proctored_exam_results',
kwargs={'course_id': unicode(self.course.id)}
)
# Successful case:
response = self.client.get(url, {})
res_json = json.loads(response.content)
self.assertIn('status', res_json)
self.assertNotIn('currently being created', res_json['status'])
# CSV generation already in progress:
with patch('instructor_task.api.submit_proctored_exam_results_report') as submit_task_function:
error = AlreadyRunningError()
submit_task_function.side_effect = error
response = self.client.get(url, {})
res_json = json.loads(response.content)
self.assertIn('status', res_json)
self.assertIn('currently being created', res_json['status'])
def test_access_course_finance_admin_with_invalid_course_key(self):
"""
Test assert require_course fiance_admin before generating
a detailed enrollment report
"""
func = Mock()
decorated_func = require_finance_admin(func)
request = self.mock_request()
response = decorated_func(request, 'invalid_course_key')
self.assertEqual(response.status_code, 404)
self.assertFalse(func.called)
def mock_request(self):
"""
mock request
"""
request = Mock()
request.user = self.instructor
return request
def test_access_course_finance_admin_with_valid_course_key(self):
"""
Test to check the course_finance_admin role with valid key
but doesn't have access to the function
"""
func = Mock()
decorated_func = require_finance_admin(func)
request = self.mock_request()
response = decorated_func(request, 'valid/course/key')
self.assertEqual(response.status_code, 403)
self.assertFalse(func.called)
def test_add_user_to_fiance_admin_role_with_valid_course(self):
"""
test to check that a function is called using a fiance_admin
rights.
"""
func = Mock()
decorated_func = require_finance_admin(func)
request = self.mock_request()
CourseFinanceAdminRole(self.course.id).add_users(self.instructor)
decorated_func(request, self.course.id.to_deprecated_string())
self.assertTrue(func.called)
def test_enrollment_report_features_csv(self):
"""
test to generate enrollment report.
enroll users, admin staff using registration codes.
"""
InvoiceTransaction.objects.create(
invoice=self.sale_invoice_1,
amount=self.sale_invoice_1.total_amount,
status='completed',
created_by=self.instructor,
last_modified_by=self.instructor
)
course_registration_code = CourseRegistrationCode.objects.create(
code='abcde',
course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor,
invoice=self.sale_invoice_1,
invoice_item=self.invoice_item,
mode_slug='honor'
)
admin_user = AdminFactory()
admin_cart = Order.get_cart_for_user(admin_user)
PaidCourseRegistration.add_to_order(admin_cart, self.course.id)
admin_cart.purchase()
# create a new user/student and enroll
# in the course using a registration code
# and then validates the generated detailed enrollment report
test_user = UserFactory()
self.register_with_redemption_code(test_user, course_registration_code.code)
CourseFinanceAdminRole(self.course.id).add_users(self.instructor)
UserProfileFactory.create(user=self.students[0], meta='{"company": "asdasda"}')
self.client.login(username=self.instructor.username, password='test')
url = reverse('get_enrollment_report', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {})
self.assertIn('The detailed enrollment report is being created.', response.content)
def test_bulk_purchase_detailed_report(self):
"""
test to generate detailed enrollment report.
1 Purchase registration codes.
2 Enroll users via registration code.
3 Validate generated enrollment report.
"""
paid_course_reg_item = PaidCourseRegistration.add_to_order(self.cart, self.course.id)
# update the quantity of the cart item paid_course_reg_item
resp = self.client.post(reverse('shoppingcart.views.update_user_cart'),
{'ItemId': paid_course_reg_item.id, 'qty': '4'})
self.assertEqual(resp.status_code, 200)
# apply the coupon code to the item in the cart
resp = self.client.post(reverse('shoppingcart.views.use_code'), {'code': self.coupon_code})
self.assertEqual(resp.status_code, 200)
self.cart.purchase()
course_reg_codes = CourseRegistrationCode.objects.filter(order=self.cart)
self.register_with_redemption_code(self.instructor, course_reg_codes[0].code)
test_user = UserFactory()
test_user_cart = Order.get_cart_for_user(test_user)
PaidCourseRegistration.add_to_order(test_user_cart, self.course.id)
test_user_cart.purchase()
InvoiceTransaction.objects.create(
invoice=self.sale_invoice_1,
amount=-self.sale_invoice_1.total_amount,
status='refunded',
created_by=self.instructor,
last_modified_by=self.instructor
)
course_registration_code = CourseRegistrationCode.objects.create(
code='abcde',
course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor,
invoice=self.sale_invoice_1,
invoice_item=self.invoice_item,
mode_slug='honor'
)
test_user1 = UserFactory()
self.register_with_redemption_code(test_user1, course_registration_code.code)
CourseFinanceAdminRole(self.course.id).add_users(self.instructor)
self.client.login(username=self.instructor.username, password='test')
url = reverse('get_enrollment_report', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {})
self.assertIn('The detailed enrollment report is being created.', response.content)
def test_create_registration_code_without_invoice_and_order(self):
"""
test generate detailed enrollment report,
used a registration codes which has been created via invoice or bulk
purchase scenario.
"""
course_registration_code = CourseRegistrationCode.objects.create(
code='abcde',
course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor,
mode_slug='honor'
)
test_user1 = UserFactory()
self.register_with_redemption_code(test_user1, course_registration_code.code)
CourseFinanceAdminRole(self.course.id).add_users(self.instructor)
self.client.login(username=self.instructor.username, password='test')
url = reverse('get_enrollment_report', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {})
self.assertIn('The detailed enrollment report is being created.', response.content)
def test_invoice_payment_is_still_pending_for_registration_codes(self):
"""
test generate enrollment report
enroll a user in a course using registration code
whose invoice has not been paid yet
"""
course_registration_code = CourseRegistrationCode.objects.create(
code='abcde',
course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor,
invoice=self.sale_invoice_1,
invoice_item=self.invoice_item,
mode_slug='honor'
)
test_user1 = UserFactory()
self.register_with_redemption_code(test_user1, course_registration_code.code)
CourseFinanceAdminRole(self.course.id).add_users(self.instructor)
self.client.login(username=self.instructor.username, password='test')
url = reverse('get_enrollment_report', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {})
self.assertIn('The detailed enrollment report is being created.', response.content)
@patch.object(instructor.views.api, 'anonymous_id_for_user', Mock(return_value='42'))
@patch.object(instructor.views.api, 'unique_id_for_user', Mock(return_value='41'))
def test_get_anon_ids(self):
"""
Test the CSV output for the anonymized user ids.
"""
url = reverse('get_anon_ids', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {})
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(
'"User ID","Anonymized User ID","Course Specific Anonymized User ID"'
'\n"{user_id}","41","42"\n'.format(user_id=self.students[0].id)
))
self.assertTrue(
body.endswith('"{user_id}","41","42"\n'.format(user_id=self.students[-1].id))
)
def test_list_report_downloads(self):
url = reverse('list_report_downloads', kwargs={'course_id': self.course.id.to_deprecated_string()})
with patch('instructor_task.models.LocalFSReportStore.links_for') as mock_links_for:
mock_links_for.return_value = [
('mock_file_name_1', 'https://1.mock.url'),
('mock_file_name_2', 'https://2.mock.url'),
]
response = self.client.get(url, {})
expected_response = {
"downloads": [
{
"url": "https://1.mock.url",
"link": "<a href=\"https://1.mock.url\">mock_file_name_1</a>",
"name": "mock_file_name_1"
},
{
"url": "https://2.mock.url",
"link": "<a href=\"https://2.mock.url\">mock_file_name_2</a>",
"name": "mock_file_name_2"
}
]
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected_response)
@ddt.data(*REPORTS_DATA)
@ddt.unpack
@valid_problem_location
def test_calculate_report_csv_success(self, report_type, instructor_api_endpoint, task_api_endpoint, extra_instructor_api_kwargs):
kwargs = {'course_id': unicode(self.course.id)}
kwargs.update(extra_instructor_api_kwargs)
url = reverse(instructor_api_endpoint, kwargs=kwargs)
success_status = "The {report_type} report is being created.".format(report_type=report_type)
if report_type == 'problem responses':
with patch(task_api_endpoint):
response = self.client.get(url, {'problem_location': ''})
self.assertIn(success_status, response.content)
else:
CourseFinanceAdminRole(self.course.id).add_users(self.instructor)
with patch(task_api_endpoint):
response = self.client.get(url, {})
self.assertIn(success_status, response.content)
@ddt.data(*EXECUTIVE_SUMMARY_DATA)
@ddt.unpack
def test_executive_summary_report_success(
self,
report_type,
instructor_api_endpoint,
task_api_endpoint,
extra_instructor_api_kwargs
):
kwargs = {'course_id': unicode(self.course.id)}
kwargs.update(extra_instructor_api_kwargs)
url = reverse(instructor_api_endpoint, kwargs=kwargs)
CourseFinanceAdminRole(self.course.id).add_users(self.instructor)
with patch(task_api_endpoint):
response = self.client.get(url, {})
success_status = "The {report_type} report is being created." \
" To view the status of the report, see Pending" \
" Instructor Tasks" \
" below".format(report_type=report_type)
self.assertIn(success_status, response.content)
@ddt.data(*EXECUTIVE_SUMMARY_DATA)
@ddt.unpack
def test_executive_summary_report_already_running(
self,
report_type,
instructor_api_endpoint,
task_api_endpoint,
extra_instructor_api_kwargs
):
kwargs = {'course_id': unicode(self.course.id)}
kwargs.update(extra_instructor_api_kwargs)
url = reverse(instructor_api_endpoint, kwargs=kwargs)
CourseFinanceAdminRole(self.course.id).add_users(self.instructor)
with patch(task_api_endpoint) as mock:
mock.side_effect = AlreadyRunningError()
response = self.client.get(url, {})
already_running_status = "The {report_type} report is currently being created." \
" To view the status of the report, see Pending Instructor Tasks below." \
" You will be able to download the report" \
" when it is" \
" complete.".format(report_type=report_type)
self.assertIn(already_running_status, response.content)
def test_get_student_progress_url(self):
""" Test that progress_url is in the successful response. """
url = reverse('get_student_progress_url', kwargs={'course_id': self.course.id.to_deprecated_string()})
url += "?unique_student_identifier={}".format(
quote(self.students[0].email.encode("utf-8"))
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
res_json = json.loads(response.content)
self.assertIn('progress_url', res_json)
def test_get_student_progress_url_from_uname(self):
""" Test that progress_url is in the successful response. """
url = reverse('get_student_progress_url', kwargs={'course_id': self.course.id.to_deprecated_string()})
url += "?unique_student_identifier={}".format(
quote(self.students[0].username.encode("utf-8"))
)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
res_json = json.loads(response.content)
self.assertIn('progress_url', res_json)
def test_get_student_progress_url_noparams(self):
""" Test that the endpoint 404's without the required query params. """
url = reverse('get_student_progress_url', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url)
self.assertEqual(response.status_code, 400)
def test_get_student_progress_url_nostudent(self):
""" Test that the endpoint 400's when requesting an unknown email. """
url = reverse('get_student_progress_url', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url)
self.assertEqual(response.status_code, 400)
@attr('shard_1')
class TestInstructorAPIRegradeTask(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test endpoints whereby instructors can change student grades.
This includes resetting attempts and starting rescore tasks.
This test does NOT test whether the actions had an effect on the
database, that is the job of task tests and test_enrollment.
"""
@classmethod
def setUpClass(cls):
super(TestInstructorAPIRegradeTask, cls).setUpClass()
cls.course = CourseFactory.create()
cls.problem_location = msk_from_problem_urlname(
cls.course.id,
'robot-some-problem-urlname'
)
cls.problem_urlname = cls.problem_location.to_deprecated_string()
def setUp(self):
super(TestInstructorAPIRegradeTask, self).setUp()
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
self.student = UserFactory()
CourseEnrollment.enroll(self.student, self.course.id)
self.module_to_reset = StudentModule.objects.create(
student=self.student,
course_id=self.course.id,
module_state_key=self.problem_location,
state=json.dumps({'attempts': 10}),
)
def test_reset_student_attempts_deletall(self):
""" Make sure no one can delete all students state on a problem. """
url = reverse('reset_student_attempts', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'problem_to_reset': self.problem_urlname,
'all_students': True,
'delete_module': True,
})
self.assertEqual(response.status_code, 400)
def test_reset_student_attempts_single(self):
""" Test reset single student attempts. """
url = reverse('reset_student_attempts', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'problem_to_reset': self.problem_urlname,
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 200)
# make sure problem attempts have been reset.
changed_module = StudentModule.objects.get(pk=self.module_to_reset.pk)
self.assertEqual(
json.loads(changed_module.state)['attempts'],
0
)
# mock out the function which should be called to execute the action.
@patch.object(instructor_task.api, 'submit_reset_problem_attempts_for_all_students')
def test_reset_student_attempts_all(self, act):
""" Test reset all student attempts. """
url = reverse('reset_student_attempts', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'problem_to_reset': self.problem_urlname,
'all_students': True,
})
self.assertEqual(response.status_code, 200)
self.assertTrue(act.called)
def test_reset_student_attempts_missingmodule(self):
""" Test reset for non-existant problem. """
url = reverse('reset_student_attempts', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'problem_to_reset': 'robot-not-a-real-module',
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 400)
def test_reset_student_attempts_delete(self):
""" Test delete single student state. """
url = reverse('reset_student_attempts', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'problem_to_reset': self.problem_urlname,
'unique_student_identifier': self.student.email,
'delete_module': True,
})
self.assertEqual(response.status_code, 200)
# make sure the module has been deleted
self.assertEqual(
StudentModule.objects.filter(
student=self.module_to_reset.student,
course_id=self.module_to_reset.course_id,
# module_id=self.module_to_reset.module_id,
).count(),
0
)
def test_reset_student_attempts_nonsense(self):
""" Test failure with both unique_student_identifier and all_students. """
url = reverse('reset_student_attempts', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'problem_to_reset': self.problem_urlname,
'unique_student_identifier': self.student.email,
'all_students': True,
})
self.assertEqual(response.status_code, 400)
@patch.object(instructor_task.api, 'submit_rescore_problem_for_student')
def test_rescore_problem_single(self, act):
""" Test rescoring of a single student. """
url = reverse('rescore_problem', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'problem_to_reset': self.problem_urlname,
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 200)
self.assertTrue(act.called)
@patch.object(instructor_task.api, 'submit_rescore_problem_for_student')
def test_rescore_problem_single_from_uname(self, act):
""" Test rescoring of a single student. """
url = reverse('rescore_problem', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'problem_to_reset': self.problem_urlname,
'unique_student_identifier': self.student.username,
})
self.assertEqual(response.status_code, 200)
self.assertTrue(act.called)
@patch.object(instructor_task.api, 'submit_rescore_problem_for_all_students')
def test_rescore_problem_all(self, act):
""" Test rescoring for all students. """
url = reverse('rescore_problem', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'problem_to_reset': self.problem_urlname,
'all_students': True,
})
self.assertEqual(response.status_code, 200)
self.assertTrue(act.called)
@patch.dict(settings.FEATURES, {'ENTRANCE_EXAMS': True})
def test_course_has_entrance_exam_in_student_attempts_reset(self):
""" Test course has entrance exam id set while resetting attempts"""
url = reverse('reset_student_attempts_for_entrance_exam',
kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'all_students': True,
'delete_module': False,
})
self.assertEqual(response.status_code, 400)
@patch.dict(settings.FEATURES, {'ENTRANCE_EXAMS': True})
def test_rescore_entrance_exam_with_invalid_exam(self):
""" Test course has entrance exam id set while re-scoring. """
url = reverse('rescore_entrance_exam', kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 400)
@attr('shard_1')
@patch.dict(settings.FEATURES, {'ENTRANCE_EXAMS': True})
class TestEntranceExamInstructorAPIRegradeTask(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test endpoints whereby instructors can rescore student grades,
reset student attempts and delete state for entrance exam.
"""
@classmethod
def setUpClass(cls):
super(TestEntranceExamInstructorAPIRegradeTask, cls).setUpClass()
cls.course = CourseFactory.create(
org='test_org',
course='test_course',
run='test_run',
entrance_exam_id='i4x://{}/{}/chapter/Entrance_exam'.format('test_org', 'test_course')
)
cls.course_with_invalid_ee = CourseFactory.create(entrance_exam_id='invalid_exam')
with cls.store.bulk_operations(cls.course.id, emit_signals=False):
cls.entrance_exam = ItemFactory.create(
parent=cls.course,
category='chapter',
display_name='Entrance exam'
)
subsection = ItemFactory.create(
parent=cls.entrance_exam,
category='sequential',
display_name='Subsection 1'
)
vertical = ItemFactory.create(
parent=subsection,
category='vertical',
display_name='Vertical 1'
)
cls.ee_problem_1 = ItemFactory.create(
parent=vertical,
category="problem",
display_name="Exam Problem - Problem 1"
)
cls.ee_problem_2 = ItemFactory.create(
parent=vertical,
category="problem",
display_name="Exam Problem - Problem 2"
)
def setUp(self):
super(TestEntranceExamInstructorAPIRegradeTask, self).setUp()
self.instructor = InstructorFactory(course_key=self.course.id)
# Add instructor to invalid ee course
CourseInstructorRole(self.course_with_invalid_ee.id).add_users(self.instructor)
self.client.login(username=self.instructor.username, password='test')
self.student = UserFactory()
CourseEnrollment.enroll(self.student, self.course.id)
ee_module_to_reset1 = StudentModule.objects.create(
student=self.student,
course_id=self.course.id,
module_state_key=self.ee_problem_1.location,
state=json.dumps({'attempts': 10, 'done': True}),
)
ee_module_to_reset2 = StudentModule.objects.create(
student=self.student,
course_id=self.course.id,
module_state_key=self.ee_problem_2.location,
state=json.dumps({'attempts': 10, 'done': True}),
)
self.ee_modules = [ee_module_to_reset1.module_state_key, ee_module_to_reset2.module_state_key]
def test_reset_entrance_exam_student_attempts_deletall(self):
""" Make sure no one can delete all students state on entrance exam. """
url = reverse('reset_student_attempts_for_entrance_exam',
kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'all_students': True,
'delete_module': True,
})
self.assertEqual(response.status_code, 400)
def test_reset_entrance_exam_student_attempts_single(self):
""" Test reset single student attempts for entrance exam. """
url = reverse('reset_student_attempts_for_entrance_exam',
kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 200)
# make sure problem attempts have been reset.
changed_modules = StudentModule.objects.filter(module_state_key__in=self.ee_modules)
for changed_module in changed_modules:
self.assertEqual(
json.loads(changed_module.state)['attempts'],
0
)
# mock out the function which should be called to execute the action.
@patch.object(instructor_task.api, 'submit_reset_problem_attempts_in_entrance_exam')
def test_reset_entrance_exam_all_student_attempts(self, act):
""" Test reset all student attempts for entrance exam. """
url = reverse('reset_student_attempts_for_entrance_exam',
kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'all_students': True,
})
self.assertEqual(response.status_code, 200)
self.assertTrue(act.called)
def test_reset_student_attempts_invalid_entrance_exam(self):
""" Test reset for invalid entrance exam. """
url = reverse('reset_student_attempts_for_entrance_exam',
kwargs={'course_id': unicode(self.course_with_invalid_ee.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 400)
def test_entrance_exam_sttudent_delete_state(self):
""" Test delete single student entrance exam state. """
url = reverse('reset_student_attempts_for_entrance_exam',
kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
'delete_module': True,
})
self.assertEqual(response.status_code, 200)
# make sure the module has been deleted
changed_modules = StudentModule.objects.filter(module_state_key__in=self.ee_modules)
self.assertEqual(changed_modules.count(), 0)
def test_entrance_exam_delete_state_with_staff(self):
""" Test entrance exam delete state failure with staff access. """
self.client.logout()
staff_user = StaffFactory(course_key=self.course.id)
self.client.login(username=staff_user.username, password='test')
url = reverse('reset_student_attempts_for_entrance_exam',
kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
'delete_module': True,
})
self.assertEqual(response.status_code, 403)
def test_entrance_exam_reset_student_attempts_nonsense(self):
""" Test failure with both unique_student_identifier and all_students. """
url = reverse('reset_student_attempts_for_entrance_exam',
kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
'all_students': True,
})
self.assertEqual(response.status_code, 400)
@patch.object(instructor_task.api, 'submit_rescore_entrance_exam_for_student')
def test_rescore_entrance_exam_single_student(self, act):
""" Test re-scoring of entrance exam for single student. """
url = reverse('rescore_entrance_exam', kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 200)
self.assertTrue(act.called)
def test_rescore_entrance_exam_all_student(self):
""" Test rescoring for all students. """
url = reverse('rescore_entrance_exam', kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'all_students': True,
})
self.assertEqual(response.status_code, 200)
def test_rescore_entrance_exam_all_student_and_single(self):
""" Test re-scoring with both all students and single student parameters. """
url = reverse('rescore_entrance_exam', kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
'all_students': True,
})
self.assertEqual(response.status_code, 400)
def test_rescore_entrance_exam_with_invalid_exam(self):
""" Test re-scoring of entrance exam with invalid exam. """
url = reverse('rescore_entrance_exam', kwargs={'course_id': unicode(self.course_with_invalid_ee.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 400)
def test_list_entrance_exam_instructor_tasks_student(self):
""" Test list task history for entrance exam AND student. """
# create a re-score entrance exam task
url = reverse('rescore_entrance_exam', kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 200)
url = reverse('list_entrance_exam_instructor_tasks', kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 200)
# check response
tasks = json.loads(response.content)['tasks']
self.assertEqual(len(tasks), 1)
self.assertEqual(tasks[0]['status'], _('Complete'))
def test_list_entrance_exam_instructor_tasks_all_student(self):
""" Test list task history for entrance exam AND all student. """
url = reverse('list_entrance_exam_instructor_tasks', kwargs={'course_id': unicode(self.course.id)})
response = self.client.get(url, {})
self.assertEqual(response.status_code, 200)
# check response
tasks = json.loads(response.content)['tasks']
self.assertEqual(len(tasks), 0)
def test_list_entrance_exam_instructor_with_invalid_exam_key(self):
""" Test list task history for entrance exam failure if course has invalid exam. """
url = reverse('list_entrance_exam_instructor_tasks',
kwargs={'course_id': unicode(self.course_with_invalid_ee.id)})
response = self.client.get(url, {
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 400)
def test_skip_entrance_exam_student(self):
""" Test skip entrance exam api for student. """
# create a re-score entrance exam task
url = reverse('mark_student_can_skip_entrance_exam', kwargs={'course_id': unicode(self.course.id)})
response = self.client.post(url, {
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 200)
# check response
message = _('This student (%s) will skip the entrance exam.') % self.student.email
self.assertContains(response, message)
# post again with same student
response = self.client.post(url, {
'unique_student_identifier': self.student.email,
})
# This time response message should be different
message = _('This student (%s) is already allowed to skip the entrance exam.') % self.student.email
self.assertContains(response, message)
@attr('shard_1')
@patch('bulk_email.models.html_to_text', Mock(return_value='Mocking CourseEmail.text_message', autospec=True))
@patch.dict(settings.FEATURES, {'ENABLE_INSTRUCTOR_EMAIL': True, 'REQUIRE_COURSE_EMAIL_AUTH': False})
class TestInstructorSendEmail(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Checks that only instructors have access to email endpoints, and that
these endpoints are only accessible with courses that actually exist,
only with valid email messages.
"""
@classmethod
def setUpClass(cls):
super(TestInstructorSendEmail, cls).setUpClass()
cls.course = CourseFactory.create()
test_subject = u'\u1234 test subject'
test_message = u'\u6824 test message'
cls.full_test_message = {
'send_to': 'staff',
'subject': test_subject,
'message': test_message,
}
def setUp(self):
super(TestInstructorSendEmail, self).setUp()
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
def test_send_email_as_logged_in_instructor(self):
url = reverse('send_email', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, self.full_test_message)
self.assertEqual(response.status_code, 200)
def test_send_email_but_not_logged_in(self):
self.client.logout()
url = reverse('send_email', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, self.full_test_message)
self.assertEqual(response.status_code, 403)
def test_send_email_but_not_staff(self):
self.client.logout()
student = UserFactory()
self.client.login(username=student.username, password='test')
url = reverse('send_email', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, self.full_test_message)
self.assertEqual(response.status_code, 403)
def test_send_email_but_course_not_exist(self):
url = reverse('send_email', kwargs={'course_id': 'GarbageCourse/DNE/NoTerm'})
response = self.client.post(url, self.full_test_message)
self.assertNotEqual(response.status_code, 200)
def test_send_email_no_sendto(self):
url = reverse('send_email', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {
'subject': 'test subject',
'message': 'test message',
})
self.assertEqual(response.status_code, 400)
def test_send_email_no_subject(self):
url = reverse('send_email', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {
'send_to': 'staff',
'message': 'test message',
})
self.assertEqual(response.status_code, 400)
def test_send_email_no_message(self):
url = reverse('send_email', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url, {
'send_to': 'staff',
'subject': 'test subject',
})
self.assertEqual(response.status_code, 400)
class MockCompletionInfo(object):
"""Mock for get_task_completion_info"""
times_called = 0
def mock_get_task_completion_info(self, *args): # pylint: disable=unused-argument
"""Mock for get_task_completion_info"""
self.times_called += 1
if self.times_called % 2 == 0:
return True, 'Task Completed'
return False, 'Task Errored In Some Way'
@attr('shard_1')
class TestInstructorAPITaskLists(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test instructor task list endpoint.
"""
class FakeTask(object):
""" Fake task object """
FEATURES = [
'task_type',
'task_input',
'task_id',
'requester',
'task_state',
'created',
'status',
'task_message',
'duration_sec'
]
def __init__(self, completion):
for feature in self.FEATURES:
setattr(self, feature, 'expected')
# created needs to be a datetime
self.created = datetime.datetime(2013, 10, 25, 11, 42, 35)
# set 'status' and 'task_message' attrs
success, task_message = completion()
if success:
self.status = "Complete"
else:
self.status = "Incomplete"
self.task_message = task_message
# Set 'task_output' attr, which will be parsed to the 'duration_sec' attr.
self.task_output = '{"duration_ms": 1035000}'
self.duration_sec = 1035000 / 1000.0
def make_invalid_output(self):
"""Munge task_output to be invalid json"""
self.task_output = 'HI MY NAME IS INVALID JSON'
# This should be given the value of 'unknown' if the task output
# can't be properly parsed
self.duration_sec = 'unknown'
def to_dict(self):
""" Convert fake task to dictionary representation. """
attr_dict = {key: getattr(self, key) for key in self.FEATURES}
attr_dict['created'] = attr_dict['created'].isoformat()
return attr_dict
@classmethod
def setUpClass(cls):
super(TestInstructorAPITaskLists, cls).setUpClass()
cls.course = CourseFactory.create(
entrance_exam_id='i4x://{}/{}/chapter/Entrance_exam'.format('test_org', 'test_course')
)
cls.problem_location = msk_from_problem_urlname(
cls.course.id,
'robot-some-problem-urlname'
)
cls.problem_urlname = cls.problem_location.to_deprecated_string()
def setUp(self):
super(TestInstructorAPITaskLists, self).setUp()
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
self.student = UserFactory()
CourseEnrollment.enroll(self.student, self.course.id)
self.module = StudentModule.objects.create(
student=self.student,
course_id=self.course.id,
module_state_key=self.problem_location,
state=json.dumps({'attempts': 10}),
)
mock_factory = MockCompletionInfo()
self.tasks = [self.FakeTask(mock_factory.mock_get_task_completion_info) for _ in xrange(7)]
self.tasks[-1].make_invalid_output()
@patch.object(instructor_task.api, 'get_running_instructor_tasks')
def test_list_instructor_tasks_running(self, act):
""" Test list of all running tasks. """
act.return_value = self.tasks
url = reverse('list_instructor_tasks', kwargs={'course_id': self.course.id.to_deprecated_string()})
mock_factory = MockCompletionInfo()
with patch('instructor.views.instructor_task_helpers.get_task_completion_info') as mock_completion_info:
mock_completion_info.side_effect = mock_factory.mock_get_task_completion_info
response = self.client.get(url, {})
self.assertEqual(response.status_code, 200)
# check response
self.assertTrue(act.called)
expected_tasks = [ftask.to_dict() for ftask in self.tasks]
actual_tasks = json.loads(response.content)['tasks']
for exp_task, act_task in zip(expected_tasks, actual_tasks):
self.assertDictEqual(exp_task, act_task)
self.assertEqual(actual_tasks, expected_tasks)
@patch.object(instructor_task.api, 'get_instructor_task_history')
def test_list_background_email_tasks(self, act):
"""Test list of background email tasks."""
act.return_value = self.tasks
url = reverse('list_background_email_tasks', kwargs={'course_id': self.course.id.to_deprecated_string()})
mock_factory = MockCompletionInfo()
with patch('instructor.views.instructor_task_helpers.get_task_completion_info') as mock_completion_info:
mock_completion_info.side_effect = mock_factory.mock_get_task_completion_info
response = self.client.get(url, {})
self.assertEqual(response.status_code, 200)
# check response
self.assertTrue(act.called)
expected_tasks = [ftask.to_dict() for ftask in self.tasks]
actual_tasks = json.loads(response.content)['tasks']
for exp_task, act_task in zip(expected_tasks, actual_tasks):
self.assertDictEqual(exp_task, act_task)
self.assertEqual(actual_tasks, expected_tasks)
@patch.object(instructor_task.api, 'get_instructor_task_history')
def test_list_instructor_tasks_problem(self, act):
""" Test list task history for problem. """
act.return_value = self.tasks
url = reverse('list_instructor_tasks', kwargs={'course_id': self.course.id.to_deprecated_string()})
mock_factory = MockCompletionInfo()
with patch('instructor.views.instructor_task_helpers.get_task_completion_info') as mock_completion_info:
mock_completion_info.side_effect = mock_factory.mock_get_task_completion_info
response = self.client.get(url, {
'problem_location_str': self.problem_urlname,
})
self.assertEqual(response.status_code, 200)
# check response
self.assertTrue(act.called)
expected_tasks = [ftask.to_dict() for ftask in self.tasks]
actual_tasks = json.loads(response.content)['tasks']
for exp_task, act_task in zip(expected_tasks, actual_tasks):
self.assertDictEqual(exp_task, act_task)
self.assertEqual(actual_tasks, expected_tasks)
@patch.object(instructor_task.api, 'get_instructor_task_history')
def test_list_instructor_tasks_problem_student(self, act):
""" Test list task history for problem AND student. """
act.return_value = self.tasks
url = reverse('list_instructor_tasks', kwargs={'course_id': self.course.id.to_deprecated_string()})
mock_factory = MockCompletionInfo()
with patch('instructor.views.instructor_task_helpers.get_task_completion_info') as mock_completion_info:
mock_completion_info.side_effect = mock_factory.mock_get_task_completion_info
response = self.client.get(url, {
'problem_location_str': self.problem_urlname,
'unique_student_identifier': self.student.email,
})
self.assertEqual(response.status_code, 200)
# check response
self.assertTrue(act.called)
expected_tasks = [ftask.to_dict() for ftask in self.tasks]
actual_tasks = json.loads(response.content)['tasks']
for exp_task, act_task in zip(expected_tasks, actual_tasks):
self.assertDictEqual(exp_task, act_task)
self.assertEqual(actual_tasks, expected_tasks)
@attr('shard_1')
@patch.object(instructor_task.api, 'get_instructor_task_history', autospec=True)
class TestInstructorEmailContentList(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test the instructor email content history endpoint.
"""
@classmethod
def setUpClass(cls):
super(TestInstructorEmailContentList, cls).setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
super(TestInstructorEmailContentList, self).setUp()
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
self.tasks = {}
self.emails = {}
self.emails_info = {}
def setup_fake_email_info(self, num_emails, with_failures=False):
""" Initialize the specified number of fake emails """
for email_id in range(num_emails):
num_sent = random.randint(1, 15401)
if with_failures:
failed = random.randint(1, 15401)
else:
failed = 0
self.tasks[email_id] = FakeContentTask(email_id, num_sent, failed, 'expected')
self.emails[email_id] = FakeEmail(email_id)
self.emails_info[email_id] = FakeEmailInfo(self.emails[email_id], num_sent, failed)
def get_matching_mock_email(self, **kwargs):
""" Returns the matching mock emails for the given id """
email_id = kwargs.get('id', 0)
return self.emails[email_id]
def get_email_content_response(self, num_emails, task_history_request, with_failures=False):
""" Calls the list_email_content endpoint and returns the repsonse """
self.setup_fake_email_info(num_emails, with_failures)
task_history_request.return_value = self.tasks.values()
url = reverse('list_email_content', kwargs={'course_id': self.course.id.to_deprecated_string()})
with patch('instructor.views.api.CourseEmail.objects.get') as mock_email_info:
mock_email_info.side_effect = self.get_matching_mock_email
response = self.client.get(url, {})
self.assertEqual(response.status_code, 200)
return response
def check_emails_sent(self, num_emails, task_history_request, with_failures=False):
""" Tests sending emails with or without failures """
response = self.get_email_content_response(num_emails, task_history_request, with_failures)
self.assertTrue(task_history_request.called)
expected_email_info = [email_info.to_dict() for email_info in self.emails_info.values()]
actual_email_info = json.loads(response.content)['emails']
self.assertEqual(len(actual_email_info), num_emails)
for exp_email, act_email in zip(expected_email_info, actual_email_info):
self.assertDictEqual(exp_email, act_email)
self.assertEqual(expected_email_info, actual_email_info)
def test_content_list_one_email(self, task_history_request):
""" Test listing of bulk emails when email list has one email """
response = self.get_email_content_response(1, task_history_request)
self.assertTrue(task_history_request.called)
email_info = json.loads(response.content)['emails']
# Emails list should have one email
self.assertEqual(len(email_info), 1)
# Email content should be what's expected
expected_message = self.emails[0].html_message
returned_email_info = email_info[0]
received_message = returned_email_info[u'email'][u'html_message']
self.assertEqual(expected_message, received_message)
def test_content_list_no_emails(self, task_history_request):
""" Test listing of bulk emails when email list empty """
response = self.get_email_content_response(0, task_history_request)
self.assertTrue(task_history_request.called)
email_info = json.loads(response.content)['emails']
# Emails list should be empty
self.assertEqual(len(email_info), 0)
def test_content_list_email_content_many(self, task_history_request):
""" Test listing of bulk emails sent large amount of emails """
self.check_emails_sent(50, task_history_request)
def test_list_email_content_error(self, task_history_request):
""" Test handling of error retrieving email """
invalid_task = FakeContentTask(0, 0, 0, 'test')
invalid_task.make_invalid_input()
task_history_request.return_value = [invalid_task]
url = reverse('list_email_content', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {})
self.assertEqual(response.status_code, 200)
self.assertTrue(task_history_request.called)
returned_email_info = json.loads(response.content)['emails']
self.assertEqual(len(returned_email_info), 1)
returned_info = returned_email_info[0]
for info in ['created', 'sent_to', 'email', 'number_sent', 'requester']:
self.assertEqual(returned_info[info], None)
def test_list_email_with_failure(self, task_history_request):
""" Test the handling of email task that had failures """
self.check_emails_sent(1, task_history_request, True)
def test_list_many_emails_with_failures(self, task_history_request):
""" Test the handling of many emails with failures """
self.check_emails_sent(50, task_history_request, True)
def test_list_email_with_no_successes(self, task_history_request):
task_info = FakeContentTask(0, 0, 10, 'expected')
email = FakeEmail(0)
email_info = FakeEmailInfo(email, 0, 10)
task_history_request.return_value = [task_info]
url = reverse('list_email_content', kwargs={'course_id': self.course.id.to_deprecated_string()})
with patch('instructor.views.api.CourseEmail.objects.get') as mock_email_info:
mock_email_info.return_value = email
response = self.client.get(url, {})
self.assertEqual(response.status_code, 200)
self.assertTrue(task_history_request.called)
returned_info_list = json.loads(response.content)['emails']
self.assertEqual(len(returned_info_list), 1)
returned_info = returned_info_list[0]
expected_info = email_info.to_dict()
self.assertDictEqual(expected_info, returned_info)
@attr('shard_1')
class TestInstructorAPIHelpers(TestCase):
""" Test helpers for instructor.api """
def test_split_input_list(self):
strings = []
lists = []
strings.append(
"[email protected], [email protected]\[email protected]\r [email protected]\r, [email protected]")
lists.append(['[email protected]', '[email protected]', '[email protected]', '[email protected]',
'[email protected]'])
for (stng, lst) in zip(strings, lists):
self.assertEqual(_split_input_list(stng), lst)
def test_split_input_list_unicode(self):
self.assertEqual(_split_input_list('[email protected], [email protected]'),
['[email protected]', '[email protected]'])
self.assertEqual(_split_input_list(u'[email protected], [email protected]'),
['[email protected]', '[email protected]'])
self.assertEqual(_split_input_list(u'[email protected], [email protected]'),
[u'[email protected]', '[email protected]'])
scary_unistuff = unichr(40960) + u'abcd' + unichr(1972)
self.assertEqual(_split_input_list(scary_unistuff), [scary_unistuff])
def test_msk_from_problem_urlname(self):
course_id = SlashSeparatedCourseKey('MITx', '6.002x', '2013_Spring')
name = 'L2Node1'
output = 'i4x://MITx/6.002x/problem/L2Node1'
self.assertEqual(msk_from_problem_urlname(course_id, name).to_deprecated_string(), output)
@raises(ValueError)
def test_msk_from_problem_urlname_error(self):
args = ('notagoodcourse', 'L2Node1')
msk_from_problem_urlname(*args)
def get_extended_due(course, unit, user):
"""
Gets the overridden due date for the given user on the given unit. Returns
`None` if there is no override set.
"""
try:
override = StudentFieldOverride.objects.get(
course_id=course.id,
student=user,
location=unit.location,
field='due'
)
return DATE_FIELD.from_json(json.loads(override.value))
except StudentFieldOverride.DoesNotExist:
return None
@attr('shard_1')
class TestDueDateExtensions(SharedModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Test data dumps for reporting.
"""
@classmethod
def setUpClass(cls):
super(TestDueDateExtensions, cls).setUpClass()
cls.course = CourseFactory.create()
cls.due = datetime.datetime(2010, 5, 12, 2, 42, tzinfo=utc)
with cls.store.bulk_operations(cls.course.id, emit_signals=False):
cls.week1 = ItemFactory.create(due=cls.due)
cls.week2 = ItemFactory.create(due=cls.due)
cls.week3 = ItemFactory.create() # No due date
cls.course.children = [
cls.week1.location.to_deprecated_string(),
cls.week2.location.to_deprecated_string(),
cls.week3.location.to_deprecated_string()
]
cls.homework = ItemFactory.create(
parent_location=cls.week1.location,
due=cls.due
)
cls.week1.children = [cls.homework.location.to_deprecated_string()]
def setUp(self):
"""
Fixtures.
"""
super(TestDueDateExtensions, self).setUp()
user1 = UserFactory.create()
StudentModule(
state='{}',
student_id=user1.id,
course_id=self.course.id,
module_state_key=self.week1.location).save()
StudentModule(
state='{}',
student_id=user1.id,
course_id=self.course.id,
module_state_key=self.week2.location).save()
StudentModule(
state='{}',
student_id=user1.id,
course_id=self.course.id,
module_state_key=self.week3.location).save()
StudentModule(
state='{}',
student_id=user1.id,
course_id=self.course.id,
module_state_key=self.homework.location).save()
user2 = UserFactory.create()
StudentModule(
state='{}',
student_id=user2.id,
course_id=self.course.id,
module_state_key=self.week1.location).save()
StudentModule(
state='{}',
student_id=user2.id,
course_id=self.course.id,
module_state_key=self.homework.location).save()
user3 = UserFactory.create()
StudentModule(
state='{}',
student_id=user3.id,
course_id=self.course.id,
module_state_key=self.week1.location).save()
StudentModule(
state='{}',
student_id=user3.id,
course_id=self.course.id,
module_state_key=self.homework.location).save()
self.user1 = user1
self.user2 = user2
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
def test_change_due_date(self):
url = reverse('change_due_date', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'student': self.user1.username,
'url': self.week1.location.to_deprecated_string(),
'due_datetime': '12/30/2013 00:00'
})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(datetime.datetime(2013, 12, 30, 0, 0, tzinfo=utc),
get_extended_due(self.course, self.week1, self.user1))
def test_change_to_invalid_due_date(self):
url = reverse('change_due_date', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'student': self.user1.username,
'url': self.week1.location.to_deprecated_string(),
'due_datetime': '01/01/2009 00:00'
})
self.assertEqual(response.status_code, 400, response.content)
self.assertEqual(
None,
get_extended_due(self.course, self.week1, self.user1)
)
def test_change_nonexistent_due_date(self):
url = reverse('change_due_date', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'student': self.user1.username,
'url': self.week3.location.to_deprecated_string(),
'due_datetime': '12/30/2013 00:00'
})
self.assertEqual(response.status_code, 400, response.content)
self.assertEqual(
None,
get_extended_due(self.course, self.week3, self.user1)
)
def test_reset_date(self):
self.test_change_due_date()
url = reverse('reset_due_date', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'student': self.user1.username,
'url': self.week1.location.to_deprecated_string(),
})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(
None,
get_extended_due(self.course, self.week1, self.user1)
)
def test_reset_nonexistent_extension(self):
url = reverse('reset_due_date', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'student': self.user1.username,
'url': self.week1.location.to_deprecated_string(),
})
self.assertEqual(response.status_code, 400, response.content)
@SharedModuleStoreTestCase.modifies_courseware
def test_reset_extension_to_deleted_date(self):
"""
Test that we can delete a due date extension after deleting the normal
due date, without causing an error.
"""
self.test_change_due_date()
self.week1.due = None
self.week1 = self.store.update_item(self.week1, self.user1.id)
# Now, week1's normal due date is deleted but the extension still exists.
url = reverse('reset_due_date', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'student': self.user1.username,
'url': self.week1.location.to_deprecated_string(),
})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(
None,
get_extended_due(self.course, self.week1, self.user1)
)
def test_show_unit_extensions(self):
self.test_change_due_date()
url = reverse('show_unit_extensions',
kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {'url': self.week1.location.to_deprecated_string()})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(json.loads(response.content), {
u'data': [{u'Extended Due Date': u'2013-12-30 00:00',
u'Full Name': self.user1.profile.name,
u'Username': self.user1.username}],
u'header': [u'Username', u'Full Name', u'Extended Due Date'],
u'title': u'Users with due date extensions for %s' %
self.week1.display_name})
def test_show_student_extensions(self):
self.test_change_due_date()
url = reverse('show_student_extensions',
kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {'student': self.user1.username})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(json.loads(response.content), {
u'data': [{u'Extended Due Date': u'2013-12-30 00:00',
u'Unit': self.week1.display_name}],
u'header': [u'Unit', u'Extended Due Date'],
u'title': u'Due date extensions for %s (%s)' % (
self.user1.profile.name, self.user1.username)})
@attr('shard_1')
class TestCourseIssuedCertificatesData(SharedModuleStoreTestCase):
"""
Test data dumps for issued certificates.
"""
@classmethod
def setUpClass(cls):
super(TestCourseIssuedCertificatesData, cls).setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
super(TestCourseIssuedCertificatesData, self).setUp()
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
def generate_certificate(self, course_id, mode, status):
"""
Generate test certificate
"""
test_user = UserFactory()
GeneratedCertificateFactory.create(
user=test_user,
course_id=course_id,
mode=mode,
status=status
)
def test_certificates_features_against_status(self):
"""
Test certificates with status 'downloadable' should be in the response.
"""
url = reverse('get_issued_certificates', kwargs={'course_id': unicode(self.course.id)})
# firstly generating downloadable certificates with 'honor' mode
certificate_count = 3
for __ in xrange(certificate_count):
self.generate_certificate(course_id=self.course.id, mode='honor', status=CertificateStatuses.generating)
response = self.client.get(url)
res_json = json.loads(response.content)
self.assertIn('certificates', res_json)
self.assertEqual(len(res_json['certificates']), 0)
# Certificates with status 'downloadable' should be in response.
self.generate_certificate(course_id=self.course.id, mode='honor', status=CertificateStatuses.downloadable)
response = self.client.get(url)
res_json = json.loads(response.content)
self.assertIn('certificates', res_json)
self.assertEqual(len(res_json['certificates']), 1)
def test_certificates_features_group_by_mode(self):
"""
Test for certificate csv features against mode. Certificates should be group by 'mode' in reponse.
"""
url = reverse('get_issued_certificates', kwargs={'course_id': unicode(self.course.id)})
# firstly generating downloadable certificates with 'honor' mode
certificate_count = 3
for __ in xrange(certificate_count):
self.generate_certificate(course_id=self.course.id, mode='honor', status=CertificateStatuses.downloadable)
response = self.client.get(url)
res_json = json.loads(response.content)
self.assertIn('certificates', res_json)
self.assertEqual(len(res_json['certificates']), 1)
# retrieve the first certificate from the list, there should be 3 certificates for 'honor' mode.
certificate = res_json['certificates'][0]
self.assertEqual(certificate.get('total_issued_certificate'), 3)
self.assertEqual(certificate.get('mode'), 'honor')
self.assertEqual(certificate.get('course_id'), str(self.course.id))
# Now generating downloadable certificates with 'verified' mode
for __ in xrange(certificate_count):
self.generate_certificate(
course_id=self.course.id,
mode='verified',
status=CertificateStatuses.downloadable
)
response = self.client.get(url)
res_json = json.loads(response.content)
self.assertIn('certificates', res_json)
# total certificate count should be 2 for 'verified' mode.
self.assertEqual(len(res_json['certificates']), 2)
# retrieve the second certificate from the list
certificate = res_json['certificates'][1]
self.assertEqual(certificate.get('total_issued_certificate'), 3)
self.assertEqual(certificate.get('mode'), 'verified')
def test_certificates_features_csv(self):
"""
Test for certificate csv features.
"""
url = reverse('get_issued_certificates', kwargs={'course_id': unicode(self.course.id)})
url += '?csv=true'
# firstly generating downloadable certificates with 'honor' mode
certificate_count = 3
for __ in xrange(certificate_count):
self.generate_certificate(course_id=self.course.id, mode='honor', status=CertificateStatuses.downloadable)
current_date = datetime.date.today().strftime("%B %d, %Y")
response = self.client.get(url)
self.assertEqual(response['Content-Type'], 'text/csv')
self.assertEqual(response['Content-Disposition'], 'attachment; filename={0}'.format('issued_certificates.csv'))
self.assertEqual(
response.content.strip(),
'"CourseID","Certificate Type","Total Certificates Issued","Date Report Run"\r\n"'
+ str(self.course.id) + '","honor","3","' + current_date + '"'
)
@attr('shard_1')
@override_settings(REGISTRATION_CODE_LENGTH=8)
class TestCourseRegistrationCodes(SharedModuleStoreTestCase):
"""
Test data dumps for E-commerce Course Registration Codes.
"""
@classmethod
def setUpClass(cls):
super(TestCourseRegistrationCodes, cls).setUpClass()
cls.course = CourseFactory.create()
cls.url = reverse(
'generate_registration_codes',
kwargs={'course_id': cls.course.id.to_deprecated_string()}
)
def setUp(self):
"""
Fixtures.
"""
super(TestCourseRegistrationCodes, self).setUp()
CourseModeFactory.create(course_id=self.course.id, min_price=50)
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
CourseSalesAdminRole(self.course.id).add_users(self.instructor)
data = {
'total_registration_codes': 12, 'company_name': 'Test Group', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 122.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street',
'address_line_2': '', 'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': ''
}
response = self.client.post(self.url, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
for i in range(5):
order = Order(user=self.instructor, status='purchased')
order.save()
# Spent(used) Registration Codes
for i in range(5):
i += 1
registration_code_redemption = RegistrationCodeRedemption(
registration_code_id=i,
redeemed_by=self.instructor
)
registration_code_redemption.save()
@override_settings(FINANCE_EMAIL='[email protected]')
def test_finance_email_in_recipient_list_when_generating_registration_codes(self):
"""
Test to verify that the invoice will also be sent to the FINANCE_EMAIL when
generating registration codes
"""
url_reg_code = reverse('generate_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {
'total_registration_codes': 5, 'company_name': 'Group Alpha', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 121.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': 'True'
}
response = self.client.post(url_reg_code, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
# check for the last mail.outbox, The FINANCE_EMAIL has been appended at the
# very end, when generating registration codes
self.assertEqual(mail.outbox[-1].to[0], '[email protected]')
def test_user_invoice_copy_preference(self):
"""
Test to remember user invoice copy preference
"""
url_reg_code = reverse('generate_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {
'total_registration_codes': 5, 'company_name': 'Group Alpha', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 121.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': 'True'
}
# user invoice copy preference will be saved in api user preference; model
response = self.client.post(url_reg_code, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
# get user invoice copy preference.
url_user_invoice_preference = reverse('get_user_invoice_preference',
kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url_user_invoice_preference, data)
result = json.loads(response.content)
self.assertEqual(result['invoice_copy'], True)
# updating the user invoice copy preference during code generation flow
data['invoice'] = ''
response = self.client.post(url_reg_code, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
# get user invoice copy preference.
url_user_invoice_preference = reverse('get_user_invoice_preference',
kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.post(url_user_invoice_preference, data)
result = json.loads(response.content)
self.assertEqual(result['invoice_copy'], False)
def test_generate_course_registration_codes_csv(self):
"""
Test to generate a response of all the generated course registration codes
"""
url = reverse('generate_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {
'total_registration_codes': 15, 'company_name': 'Group Alpha', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 122.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': ''
}
response = self.client.post(url, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_CSV_HEADER))
self.assertEqual(len(body.split('\n')), 17)
def test_generate_course_registration_with_redeem_url_codes_csv(self):
"""
Test to generate a response of all the generated course registration codes
"""
url = reverse('generate_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {
'total_registration_codes': 15, 'company_name': 'Group Alpha', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 122.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': ''
}
response = self.client.post(url, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_CSV_HEADER))
self.assertEqual(len(body.split('\n')), 17)
rows = body.split('\n')
index = 1
while index < len(rows):
if rows[index]:
row_data = rows[index].split(',')
code = row_data[0].replace('"', '')
self.assertTrue(row_data[1].startswith('"http')
and row_data[1].endswith('/shoppingcart/register/redeem/{0}/"'.format(code)))
index += 1
@patch.object(instructor.views.api, 'random_code_generator',
Mock(side_effect=['first', 'second', 'third', 'fourth']))
def test_generate_course_registration_codes_matching_existing_coupon_code(self):
"""
Test the generated course registration code is already in the Coupon Table
"""
url = reverse('generate_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
coupon = Coupon(code='first', course_id=self.course.id.to_deprecated_string(), created_by=self.instructor)
coupon.save()
data = {
'total_registration_codes': 3, 'company_name': 'Group Alpha', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 122.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': ''
}
response = self.client.post(url, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_CSV_HEADER))
self.assertEqual(len(body.split('\n')), 5) # 1 for headers, 1 for new line at the end and 3 for the actual data
@patch.object(instructor.views.api, 'random_code_generator',
Mock(side_effect=['first', 'first', 'second', 'third']))
def test_generate_course_registration_codes_integrity_error(self):
"""
Test for the Integrity error against the generated code
"""
url = reverse('generate_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {
'total_registration_codes': 2, 'company_name': 'Test Group', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 122.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': ''
}
response = self.client.post(url, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_CSV_HEADER))
self.assertEqual(len(body.split('\n')), 4)
def test_spent_course_registration_codes_csv(self):
"""
Test to generate a response of all the spent course registration codes
"""
url = reverse('spent_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {'spent_company_name': ''}
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_CSV_HEADER))
self.assertEqual(len(body.split('\n')), 7)
generate_code_url = reverse(
'generate_registration_codes', kwargs={'course_id': self.course.id.to_deprecated_string()}
)
data = {
'total_registration_codes': 9, 'company_name': 'Group Alpha', 'company_contact_name': '[email protected]',
'unit_price': 122.45, 'company_contact_email': '[email protected]', 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': ''
}
response = self.client.post(generate_code_url, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
for i in range(9):
order = Order(user=self.instructor, status='purchased')
order.save()
# Spent(used) Registration Codes
for i in range(9):
i += 13
registration_code_redemption = RegistrationCodeRedemption(
registration_code_id=i,
redeemed_by=self.instructor
)
registration_code_redemption.save()
data = {'spent_company_name': 'Group Alpha'}
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_CSV_HEADER))
self.assertEqual(len(body.split('\n')), 11)
def test_active_course_registration_codes_csv(self):
"""
Test to generate a response of all the active course registration codes
"""
url = reverse('active_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {'active_company_name': ''}
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_CSV_HEADER))
self.assertEqual(len(body.split('\n')), 9)
generate_code_url = reverse(
'generate_registration_codes', kwargs={'course_id': self.course.id.to_deprecated_string()}
)
data = {
'total_registration_codes': 9, 'company_name': 'Group Alpha', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 122.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': ''
}
response = self.client.post(generate_code_url, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
data = {'active_company_name': 'Group Alpha'}
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_CSV_HEADER))
self.assertEqual(len(body.split('\n')), 11)
def test_get_all_course_registration_codes_csv(self):
"""
Test to generate a response of all the course registration codes
"""
url = reverse(
'get_registration_codes', kwargs={'course_id': self.course.id.to_deprecated_string()}
)
data = {'download_company_name': ''}
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_CSV_HEADER))
self.assertEqual(len(body.split('\n')), 14)
generate_code_url = reverse(
'generate_registration_codes', kwargs={'course_id': self.course.id.to_deprecated_string()}
)
data = {
'total_registration_codes': 9, 'company_name': 'Group Alpha', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 122.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': ''
}
response = self.client.post(generate_code_url, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
data = {'download_company_name': 'Group Alpha'}
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_CSV_HEADER))
self.assertEqual(len(body.split('\n')), 11)
def test_pdf_file_throws_exception(self):
"""
test to mock the pdf file generation throws an exception
when generating registration codes.
"""
generate_code_url = reverse(
'generate_registration_codes', kwargs={'course_id': self.course.id.to_deprecated_string()}
)
data = {
'total_registration_codes': 9, 'company_name': 'Group Alpha', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 122.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': ''
}
with patch.object(PDFInvoice, 'generate_pdf', side_effect=Exception):
response = self.client.post(generate_code_url, data)
self.assertEqual(response.status_code, 200, response.content)
def test_get_codes_with_sale_invoice(self):
"""
Test to generate a response of all the course registration codes
"""
generate_code_url = reverse(
'generate_registration_codes', kwargs={'course_id': self.course.id.to_deprecated_string()}
)
data = {
'total_registration_codes': 5.5, 'company_name': 'Group Invoice', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 122.45, 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': True
}
response = self.client.post(generate_code_url, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 200, response.content)
url = reverse('get_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {'download_company_name': 'Group Invoice'}
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200, response.content)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_CSV_HEADER))
def test_with_invalid_unit_price(self):
"""
Test to generate a response of all the course registration codes
"""
generate_code_url = reverse(
'generate_registration_codes', kwargs={'course_id': self.course.id.to_deprecated_string()}
)
data = {
'total_registration_codes': 10, 'company_name': 'Group Invoice', 'company_contact_name': '[email protected]',
'company_contact_email': '[email protected]', 'unit_price': 'invalid', 'recipient_name': 'Test123',
'recipient_email': '[email protected]', 'address_line_1': 'Portland Street', 'address_line_2': '',
'address_line_3': '', 'city': '', 'state': '', 'zip': '', 'country': '',
'customer_reference_number': '123A23F', 'internal_reference': '', 'invoice': True
}
response = self.client.post(generate_code_url, data, **{'HTTP_HOST': 'localhost'})
self.assertEqual(response.status_code, 400, response.content)
self.assertIn('Could not parse amount as', response.content)
def test_get_historical_coupon_codes(self):
"""
Test to download a response of all the active coupon codes
"""
get_coupon_code_url = reverse(
'get_coupon_codes', kwargs={'course_id': self.course.id.to_deprecated_string()}
)
for i in range(10):
coupon = Coupon(
code='test_code{0}'.format(i), description='test_description', course_id=self.course.id,
percentage_discount='{0}'.format(i), created_by=self.instructor, is_active=True
)
coupon.save()
#now create coupons with the expiration dates
for i in range(5):
coupon = Coupon(
code='coupon{0}'.format(i), description='test_description', course_id=self.course.id,
percentage_discount='{0}'.format(i), created_by=self.instructor, is_active=True,
expiration_date=datetime.datetime.now(pytz.UTC) + datetime.timedelta(days=2)
)
coupon.save()
response = self.client.get(get_coupon_code_url)
self.assertEqual(response.status_code, 200, response.content)
# filter all the coupons
for coupon in Coupon.objects.all():
self.assertIn(
'"{coupon_code}","{course_id}","{discount}","{description}","{expiration_date}","{is_active}",'
'"{code_redeemed_count}","{total_discounted_seats}","{total_discounted_amount}"'.format(
coupon_code=coupon.code,
course_id=coupon.course_id,
discount=coupon.percentage_discount,
description=coupon.description,
expiration_date=coupon.display_expiry_date,
is_active=coupon.is_active,
code_redeemed_count="0",
total_discounted_seats="0",
total_discounted_amount="0",
), response.content
)
self.assertEqual(response['Content-Type'], 'text/csv')
body = response.content.replace('\r', '')
self.assertTrue(body.startswith(EXPECTED_COUPON_CSV_HEADER))
@attr('shard_1')
class TestBulkCohorting(SharedModuleStoreTestCase):
"""
Test adding users to cohorts in bulk via CSV upload.
"""
@classmethod
def setUpClass(cls):
super(TestBulkCohorting, cls).setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
super(TestBulkCohorting, self).setUp()
self.staff_user = StaffFactory(course_key=self.course.id)
self.non_staff_user = UserFactory.create()
self.tempdir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, self.tempdir)
def call_add_users_to_cohorts(self, csv_data, suffix='.csv', method='POST'):
"""
Call `add_users_to_cohorts` with a file generated from `csv_data`.
"""
# this temporary file will be removed in `self.tearDown()`
__, file_name = tempfile.mkstemp(suffix=suffix, dir=self.tempdir)
with open(file_name, 'w') as file_pointer:
file_pointer.write(csv_data.encode('utf-8'))
with open(file_name, 'r') as file_pointer:
url = reverse('add_users_to_cohorts', kwargs={'course_id': unicode(self.course.id)})
if method == 'POST':
return self.client.post(url, {'uploaded-file': file_pointer})
elif method == 'GET':
return self.client.get(url, {'uploaded-file': file_pointer})
def expect_error_on_file_content(self, file_content, error, file_suffix='.csv'):
"""
Verify that we get the error we expect for a given file input.
"""
self.client.login(username=self.staff_user.username, password='test')
response = self.call_add_users_to_cohorts(file_content, suffix=file_suffix)
self.assertEqual(response.status_code, 400)
result = json.loads(response.content)
self.assertEqual(result['error'], error)
def verify_success_on_file_content(self, file_content, mock_store_upload, mock_cohort_task):
"""
Verify that `addd_users_to_cohorts` successfully validates the
file content, uploads the input file, and triggers the
background task.
"""
mock_store_upload.return_value = (None, 'fake_file_name.csv')
self.client.login(username=self.staff_user.username, password='test')
response = self.call_add_users_to_cohorts(file_content)
self.assertEqual(response.status_code, 204)
self.assertTrue(mock_store_upload.called)
self.assertTrue(mock_cohort_task.called)
def test_no_cohort_field(self):
"""
Verify that we get a descriptive verification error when we haven't
included a cohort field in the uploaded CSV.
"""
self.expect_error_on_file_content(
'username,email\n', "The file must contain a 'cohort' column containing cohort names."
)
def test_no_username_or_email_field(self):
"""
Verify that we get a descriptive verification error when we haven't
included a username or email field in the uploaded CSV.
"""
self.expect_error_on_file_content(
'cohort\n', "The file must contain a 'username' column, an 'email' column, or both."
)
def test_empty_csv(self):
"""
Verify that we get a descriptive verification error when we haven't
included any data in the uploaded CSV.
"""
self.expect_error_on_file_content(
'', "The file must contain a 'cohort' column containing cohort names."
)
def test_wrong_extension(self):
"""
Verify that we get a descriptive verification error when we haven't
uploaded a file with a '.csv' extension.
"""
self.expect_error_on_file_content(
'', "The file must end with the extension '.csv'.", file_suffix='.notcsv'
)
def test_non_staff_no_access(self):
"""
Verify that we can't access the view when we aren't a staff user.
"""
self.client.login(username=self.non_staff_user.username, password='test')
response = self.call_add_users_to_cohorts('')
self.assertEqual(response.status_code, 403)
def test_post_only(self):
"""
Verify that we can't call the view when we aren't using POST.
"""
self.client.login(username=self.staff_user.username, password='test')
response = self.call_add_users_to_cohorts('', method='GET')
self.assertEqual(response.status_code, 405)
@patch('instructor.views.api.instructor_task.api.submit_cohort_students')
@patch('instructor.views.api.store_uploaded_file')
def test_success_username(self, mock_store_upload, mock_cohort_task):
"""
Verify that we store the input CSV and call a background task when
the CSV has username and cohort columns.
"""
self.verify_success_on_file_content(
'username,cohort\nfoo_username,bar_cohort', mock_store_upload, mock_cohort_task
)
@patch('instructor.views.api.instructor_task.api.submit_cohort_students')
@patch('instructor.views.api.store_uploaded_file')
def test_success_email(self, mock_store_upload, mock_cohort_task):
"""
Verify that we store the input CSV and call the cohorting background
task when the CSV has email and cohort columns.
"""
self.verify_success_on_file_content(
'email,cohort\nfoo_email,bar_cohort', mock_store_upload, mock_cohort_task
)
@patch('instructor.views.api.instructor_task.api.submit_cohort_students')
@patch('instructor.views.api.store_uploaded_file')
def test_success_username_and_email(self, mock_store_upload, mock_cohort_task):
"""
Verify that we store the input CSV and call the cohorting background
task when the CSV has username, email and cohort columns.
"""
self.verify_success_on_file_content(
'username,email,cohort\nfoo_username,bar_email,baz_cohort', mock_store_upload, mock_cohort_task
)
@patch('instructor.views.api.instructor_task.api.submit_cohort_students')
@patch('instructor.views.api.store_uploaded_file')
def test_success_carriage_return(self, mock_store_upload, mock_cohort_task):
"""
Verify that we store the input CSV and call the cohorting background
task when lines in the CSV are delimited by carriage returns.
"""
self.verify_success_on_file_content(
'username,email,cohort\rfoo_username,bar_email,baz_cohort', mock_store_upload, mock_cohort_task
)
@patch('instructor.views.api.instructor_task.api.submit_cohort_students')
@patch('instructor.views.api.store_uploaded_file')
def test_success_carriage_return_line_feed(self, mock_store_upload, mock_cohort_task):
"""
Verify that we store the input CSV and call the cohorting background
task when lines in the CSV are delimited by carriage returns and line
feeds.
"""
self.verify_success_on_file_content(
'username,email,cohort\r\nfoo_username,bar_email,baz_cohort', mock_store_upload, mock_cohort_task
)
| IndonesiaX/edx-platform | lms/djangoapps/instructor/tests/test_api.py | Python | agpl-3.0 | 212,156 |
import re
def test_phones_on_home_page(app):
contact_from_home_page = app.contact.get_contact_list()[0]
contact_from_edit_page = app.contact.get_contact_info_from_edit_page(0)
assert contact_from_home_page.all_phones_from_home_page == merge_phones_like_on_home_page(contact_from_edit_page)
def test_phones_on_contact_view_page(app):
contact_from_view_page = app.contact.get_contact_from_view_page(0)
contact_from_edit_page = app.contact.get_contact_info_from_edit_page(0)
assert contact_from_view_page.homephone == contact_from_edit_page.homephone
assert contact_from_view_page.workphone == contact_from_edit_page.workphone
assert contact_from_view_page.mobilephone == contact_from_edit_page.mobilephone
def clear(s):
return re.sub("[() -]", "", s)
def merge_phones_like_on_home_page(contact):
return "\n".join(filter(lambda x: x!="", map(lambda x: clear(x),
filter (lambda x: x is not None,[contact.homephone,
contact.workphone, contact.mobilephone])))) | volkodav1985/volkodavpython | test 1/test_phones.py | Python | apache-2.0 | 1,116 |
'''
HashDb2
Usage:
hashdb2 -h | --help
hashdb2 --version
hashdb2 hash [-f|-q|-n] DATABASE -- INPUTS...
hashdb2 comp [-f|-q|-n] [-steb0cid] [--separator SEP] ((--lhs-db LHSDB [(--lhs-path LHSPATH [--lhs-update])]) | --lhs-path LHSPATH) [(--rhs-db RHSDB ([--rhs-path RHSPATH [--rhs-update]])) | --rhs-path RHSPATH] -- COMMAND...
Options:
hash Create/Update DATABASE with INPUTS
comp Compare inputs, executing COMMAND for each result according to the special arguments provided to COMMAND
-f, --full Generate/Compare complete hash
-q, --quick Generate/Compare quick hash
-n, --none Do not generate/compare hashes [default]
-s, --size Compare using size
-t, --time Compare using modification time
-e, --extension Compare using file extension
-b, --basename Compare using basename
-0, --skip-empty Skip empty files
-c, --echo Echo command before execution
-i, --ignore-errors Ignore errors when executing command
-d, --dry-run Echo command but do not run it
--lhs-db LHSDB Left database input
--lhs-update Update left database as required
--lhs-path LHSPATH Left sub-path
--rhs-db RHSDB Right database input
--rhs-update Update right database as required
--rhs-path RHSPATH Right sub-path
--separator SEP Separator used in GROUP results [default: ,]
DATABASE Name of the database to create/update
INPUTS List files/folders to add to DATABASE
COMMAND Command which is executed according to matched groups
The following values within command have special meaning:
{LHS}
{LHS} {RHS}
{LHS} {RHSGROUP}
{LHSGROUP}
{LHSGROUP} {RHS}
{LHSGROUP} {RHSGROUP}
{LHSONLY}
{LHSONLYGROUP}
{RHS}
{RHSGROUP}
{RHSONLY}
{RHSONLYGROUP}
{DUPE}
{DUPEGROUP}
{UNIQUE}
{UNIQUEGROUP}
LHS and RHS specifies the input
The GROUP suffix combines items into a list using the separator specified by --separator.
The ONLY suffix finds elements which have no match
Use DUPE to get inputs which have duplicates (not valid with rhs)
Use UNIQUE to get inputs which are unique (not valid with rhs)
In addition, each of the special names above can be suffixed with one of the following:
dirpath full folder path
basename full basename including extension
ext file extension (including the '.')
name basename, excluding the extension
drive drive
dirpathnodrive full folder path, without the drive
fullpath full path
eg:
{LHS:basename}
{RHSGROUP:dirpath}
'''
from docopt import docopt
import pkg_resources # part of setuptools
version = pkg_resources.require("hashdb2")[0].version
import sys
def main(argv=None, fcapture=None):
if argv is None:
argv = sys.argv
else:
argv = [__file__] + argv
global __doc__
arguments = docopt(__doc__, argv[1:], version='HashDb2 ' + version)
print(arguments)
if arguments['hash']:
from .command_hash import command_hash
command_hash(arguments)
elif arguments['comp']:
from .command_comp import command_comp
command_comp(arguments, fcapture=fcapture)
if __name__ == '__main__':
main()
| WHenderson/HashDb | hashdb2/command_line.py | Python | apache-2.0 | 4,335 |
from pyparsing import *
class VmiPlGrammar ():
ParserElement.setDefaultWhitespaceChars(" ")
@staticmethod
def parse(input_string):
tab = Suppress('\t')
newline = Suppress('\n')
ob = Suppress('(')
cb = Suppress(')')
ocb = Suppress('{')
ccb = Suppress('}')
comma = Suppress(',')
colon = Suppress(':')
hash_symbol = '#'
hex_indicator = Suppress('0x')
va = hex_indicator + Word(hexnums, exact=8)
hex_value = hex_indicator + Word(hexnums)
number = Word(nums)
size = number
offset = number
mac_address = Word(alphanums + ':')
filepath = Word(alphanums + '/' + '_')
stream_id = Combine(hash_symbol + Word(alphanums))
readable_register = oneOf('''EAX EBX ECX EDX ESI EDI ESP EBP EFLAGS CS SS DS
ES FS GS TR LDTR GDTR IDTR CR0 CR2 CR3 CR4''')
vmcs_field = oneOf('''VIRTUAL_PROCESSOR_ID VM_EXIT_REASON VM_EXIT_INTR_INFO
VM_EXIT_INTR_ERROR_CODE IDT_VECTORING_INFO_FIELD
IDT_VECTORING_ERROR_CODE EXIT_QUALIFICATION''')
proc_field = oneOf('''PID NAME PATH PGDP''')
config_item = (tab + Group('ProcessListHead' + colon + va |
'TasksOffset' + colon + number |
'PIDOffset' + colon + number |
'ProcessNameOffset' + colon + number |
'MMStructOffset' + colon + number |
'ExeFileOffset' + colon + number |
'DEntryOffset' + colon + number |
'ParentOffset' + colon + number |
'DNameOffset' + colon + number |
'PGDOffset' + colon + number |
'SyscallDispatcherAddress' + colon + va |
'SyscallInterruptNumber' + colon + number |
'SyscallNumberLocation' + colon + readable_register)
+ newline)
configuration = Group(Group('Configuration') + ocb + newline +
OneOrMore(config_item) + ccb
+ newline).setResultsName('configuration')
vmcs_field_item = comma + vmcs_field
vmcs_fields = vmcs_field + ZeroOrMore(vmcs_field_item)
proc_field_item = comma + proc_field
proc_fields = proc_field + ZeroOrMore(proc_field_item)
data_probe_name = tab + ("ReadRegister" + ob + readable_register + comma |
"ReadMemory" + ob + va + comma + size + comma |
"ReadMemoryAt" + ob + readable_register + comma
+ size + comma + offset + comma |
"ReadVMCS" + ob + vmcs_fields + comma |
"ProcessList" + ob + proc_fields + comma)
probe_output = (stream_id | filepath)
data_probe = Group( data_probe_name
+ probe_output + cb
+ newline)
filter_data_probe = tab + data_probe
filter_name = Group("RegisterHasValue" + ob + readable_register + comma
+ hex_value + cb |
"ValueAtAddressIs" + ob + va + comma + hex_value
+ cb).setResultsName('filter_name')
filter_block = (ocb + newline
+ OneOrMore(filter_data_probe).setResultsName('data_probes')
+ tab + ccb)
filters = Group(tab + filter_name + filter_block + newline)
evt_probe_name = Group("CRWrite" + ob + number + cb |
"IDTRWrite" |
"GDTRWrite" |
"IDTWrite" |
"GDTWrite" |
"MSRWrite" |
"OnResume" |
"ReadAt" + ob + va + cb |
"WriteAt" + ob + va + cb |
"ExecuteAt" + ob + va + cb |
"OnExternalRequest" + ob + number + cb |
"Syscall" + Optional(ob + number + cb)).setResultsName('probe_name')
evt_probe_block = (ocb + newline
+ ZeroOrMore(filters).setResultsName('filters')
+ ZeroOrMore(data_probe).setResultsName('data_probes')
+ ccb)
event_probe = Group(evt_probe_name + evt_probe_block) + newline
str_probe_name = ("CaptureNetwork" + ob + mac_address + comma |
"CaptureKeyboardInput" + ob)
stream_probe = Group(str_probe_name + probe_output + cb + newline)
probes = (ZeroOrMore(event_probe).setResultsName('event_probes')
+ ZeroOrMore(stream_probe).setResultsName('stream_probes'))
script = ZeroOrMore(configuration) + probes
script.parseWithTabs()
return script.parseString(input_string)
| FlorianWestphal/VMI-PL | front_end/vmipl/vmipl_grammar.py | Python | mit | 3,942 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models
class IrConfigParameter(models.Model):
_inherit = 'ir.config_parameter'
def init(self, force=False):
super(IrConfigParameter, self).init(force=force)
if force:
oauth_oe = self.env.ref('auth_oauth.provider_openerp')
if not oauth_oe:
return
dbuuid = self.sudo().get_param('database.uuid')
oauth_oe.write({'client_id': dbuuid})
| richard-willowit/odoo | addons/auth_oauth/models/ir_config_parameter.py | Python | gpl-3.0 | 540 |
import numpy as np
import pandas as pd
# from matplotlib.pyplot import plot,show,draw
import scipy.io
import sys
sys.path.append("../")
from functions import *
from pylab import *
from sklearn.decomposition import PCA
import _pickle as cPickle
import matplotlib.cm as cm
import os
###############################################################################################################
# TO LOAD
###############################################################################################################
store = pd.HDFStore("../../figures/figures_articles/figure6/determinant_corr.h5", 'r')
det_all = store['det_all']
shufflings = store['shufflings']
store.close()
data_directory = '/mnt/DataGuillaume/MergedData/'
datasets = np.loadtxt(data_directory+'datasets_ThalHpc.list', delimiter = '\n', dtype = str, comments = '#')
# WHICH NEURONS
space = pd.read_hdf("../../figures/figures_articles/figure1/space.hdf5")
burst = pd.HDFStore("/mnt/DataGuillaume/MergedData/BURSTINESS.h5")['w']
burst = burst.loc[space.index]
hd_index = space.index.values[space['hd'] == 1]
neurontoplot = [np.intersect1d(hd_index, space.index.values[space['cluster'] == 1])[0],
burst.loc[space.index.values[space['cluster'] == 0]].sort_values('sws').index[3],
burst.sort_values('sws').index.values[-20]]
firing_rate = pd.read_hdf("/mnt/DataGuillaume/MergedData/FIRING_RATE_ALL.h5")
fr_index = firing_rate.index.values[((firing_rate >= 1.0).sum(1) == 3).values]
# SWR MODULATION
swr_mod, swr_ses = loadSWRMod('/mnt/DataGuillaume/MergedData/SWR_THAL_corr.pickle', datasets, return_index=True)
nbins = 400
binsize = 5
times = np.arange(0, binsize*(nbins+1), binsize) - (nbins*binsize)/2
swr = pd.DataFrame( columns = swr_ses,
index = times,
data = gaussFilt(swr_mod, (5,)).transpose())
swr = swr.loc[-500:500]
# AUTOCORR FAST
store_autocorr = pd.HDFStore("/mnt/DataGuillaume/MergedData/AUTOCORR_ALL.h5")
autocorr_wak = store_autocorr['wake'].loc[0.5:]
autocorr_rem = store_autocorr['rem'].loc[0.5:]
autocorr_sws = store_autocorr['sws'].loc[0.5:]
autocorr_wak = autocorr_wak.rolling(window = 20, win_type = 'gaussian', center = True, min_periods = 1).mean(std = 3.0)
autocorr_rem = autocorr_rem.rolling(window = 20, win_type = 'gaussian', center = True, min_periods = 1).mean(std = 3.0)
autocorr_sws = autocorr_sws.rolling(window = 20, win_type = 'gaussian', center = True, min_periods = 1).mean(std = 3.0)
autocorr_wak = autocorr_wak[2:20]
autocorr_rem = autocorr_rem[2:20]
autocorr_sws = autocorr_sws[2:20]
neurons = np.intersect1d(swr.dropna(1).columns.values, autocorr_sws.dropna(1).columns.values)
neurons = np.intersect1d(neurons, fr_index)
X = np.copy(swr[neurons].values.T)
Y = np.copy(np.vstack((autocorr_wak[neurons].values,autocorr_rem[neurons].values, autocorr_sws[neurons].values))).T
Y = Y - Y.mean(1)[:,np.newaxis]
Y = Y / Y.std(1)[:,np.newaxis]
pca_swr = PCA(n_components=10).fit(X)
pca_aut = PCA(n_components=10).fit(Y)
pc_swr = pca_swr.transform(X)
pc_aut = pca_aut.transform(Y)
All = np.hstack((pc_swr, pc_aut))
corr = np.corrcoef(All.T)
#shuffle
Xs = np.copy(X)
Ys = np.copy(Y)
np.random.shuffle(Xs)
np.random.shuffle(Ys)
pc_swr_sh = PCA(n_components=10).fit_transform(Xs)
pc_aut_sh = PCA(n_components=10).fit_transform(Ys)
Alls = np.hstack((pc_swr_sh, pc_aut_sh))
corrsh = np.corrcoef(Alls.T)
# HEllinger distance
store = pd.HDFStore("../../figures/figures_articles/figure6/score_hellinger.h5", 'r')
HL = store['HL']
HLS = store['HLS']
store.close()
# XGB score
mean_score = pd.read_hdf(data_directory+'SCORE_XGB.h5')
###############################################################################################################
# PLOT
###############################################################################################################
def figsize(scale):
fig_width_pt = 483.69687 # Get this from LaTeX using \the\textwidth
inches_per_pt = 1.0/72.27 # Convert pt to inch
golden_mean = (np.sqrt(5.0)-1.0)/2.0 # Aesthetic ratio (you could change this)
fig_width = fig_width_pt*inches_per_pt*scale # width in inches
fig_height = fig_width*golden_mean*0.9 # height in inches
fig_size = [fig_width,fig_height]
return fig_size
def simpleaxis(ax):
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
# ax.xaxis.set_tick_params(size=6)
# ax.yaxis.set_tick_params(size=6)
def noaxis(ax):
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.spines['bottom'].set_visible(False)
ax.get_xaxis().tick_bottom()
ax.get_yaxis().tick_left()
ax.set_xticks([])
ax.set_yticks([])
# ax.xaxis.set_tick_params(size=6)
# ax.yaxis.set_tick_params(size=6)
import matplotlib as mpl
from mpl_toolkits.axes_grid1 import make_axes_locatable
# mpl.use("pdf")
pdf_with_latex = { # setup matplotlib to use latex for output
"pgf.texsystem": "pdflatex", # change this if using xetex or lautex
# "text.usetex": True, # use LaTeX to write all text
# "font.family": "serif",
"font.serif": [], # blank entries should cause plots to inherit fonts from the document
"font.sans-serif": [],
"font.monospace": [],
"axes.labelsize": 8, # LaTeX default is 10pt font.
"font.size": 7,
"legend.fontsize": 7, # Make the legend/label fonts a little smaller
"xtick.labelsize": 7,
"ytick.labelsize": 7,
"pgf.preamble": [
r"\usepackage[utf8x]{inputenc}", # use utf8 fonts becasue your computer can handle it :)
r"\usepackage[T1]{fontenc}", # plots will be generated using this preamble
],
"lines.markeredgewidth" : 0.2,
"axes.linewidth" : 0.8,
"ytick.major.size" : 1.5,
"xtick.major.size" : 1.5
}
mpl.rcParams.update(pdf_with_latex)
import matplotlib.gridspec as gridspec
from matplotlib.pyplot import *
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import matplotlib.cm as cmx
import matplotlib.colors as colors
# colors = ['#444b6e', '#708b75', '#9ab87a']
fig = figure(figsize = figsize(1.0))
gs = gridspec.GridSpec(2,3, wspace = 0.4, hspace = 0.5, width_ratios = [1,0.8,1])
#########################################################################
# A. Exemple
#########################################################################
labels = ['1\nHD', '2\nNon-bursty', '3\nBursty']
titles = ['Wake', 'REM', 'SWS']
viridis = get_cmap('viridis')
cNorm = colors.Normalize(vmin=burst['sws'].min(), vmax = burst['sws'].max())
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap = viridis)
color_ex = ['red', scalarMap.to_rgba(burst.loc[neurontoplot[1], 'sws']), scalarMap.to_rgba(burst.loc[neurontoplot[2], 'sws'])]
# gsA = gridspec.GridSpecFromSubplotSpec(3,1,subplot_spec=gs[:,0], height_ratios = [1,0.2,0.2], hspace = 0.4, wspace = 0.1)
# SWR EXAMPLES
subplot(gs[0,0])
simpleaxis(gca())
for i,n in enumerate(neurontoplot):
plot(swr[n], color = color_ex[i], linewidth = 1.5)
xlabel("Time from SWRs (ms)")
ylabel("SWR modulation (z)")
locator_params(axis='y', nbins=4)
gca().text(-0.15, 1.05, "A", transform = gca().transAxes, fontsize = 9)
########################################################################
# B. SCORE CLASSIFICATION
########################################################################
subplot(gs[1,0])
simpleaxis(gca())
gca().text(-0.15, 1.05, "B", transform = gca().transAxes, fontsize = 9)
xlabel("Nuclei")
ylabel("Classification score")
title("SWR classification")
tmp = mean_score[('score', 'swr', 'mean')]
tmp2 = mean_score[('shuffle', 'swr', 'mean')]
tmp3 = (tmp-tmp2)/(1-tmp2)
tmp3 = tmp3.sort_values(ascending = False)
order = tmp3.index.values
# bar(np.arange(len(tmp)), tmp2.values, linewidth = 1, color = 'none', edgecolor = 'black', linestyle = '--')
bar(np.arange(len(tmp3)), tmp3.values, yerr = mean_score.loc[order,('score','swr','sem')],
linewidth = 1, color = 'none', edgecolor = 'black')
xticks(np.arange(mean_score.shape[0]), order)
# axhline(1/8, linestyle = '--', color = 'black', linewidth = 0.5)
# yticks([0, 0.2,0.4], [0, 20,40])
#########################################################################
# C. SCORE for the three neurons
#########################################################################
gsB = gridspec.GridSpecFromSubplotSpec(2,1,subplot_spec=gs[0,1], hspace = 0.2, wspace = 0.1)
store = pd.HDFStore("../../figures/figures_articles/figure6/example_proba.h5", 'r')
proba_aut = store["proba_aut"]
proba_swr = store["proba_swr"]
store.close()
order2 = ['CM']+list(order)
# score SWR
subplot(gsB[0,0])
simpleaxis(gca())
ylabel("p/SWR")
# title("p(Nucleus/SWR)")
title("Classifier")
gca().text(-0.15, 1.10, "C", transform = gca().transAxes, fontsize = 9)
ct = 0
for i,n in enumerate(neurontoplot):
bar(np.arange(len(proba_swr.index))+ct, proba_swr.loc[order2,n].values, width = 0.2, color = color_ex[i])
ct += 0.21
xticks(np.arange(len(order2)),[])
# score AUTO
subplot(gsB[1,0])
simpleaxis(gca())
xlabel("Nuclei")
ylabel("p/Autocorr.")
# title("p(Nucleus/Autocorr.)")
ct = 0
for i,n in enumerate(neurontoplot):
bar(np.arange(len(proba_aut.index))+ct, proba_aut.loc[order2,n].values, width = 0.2, color = color_ex[i])
ct += 0.21
xticks(np.arange(len(order2)), order2, rotation = 90)
#########################################################################
# D. Hellingger deistance
#########################################################################
subplot(gs[0,2])
simpleaxis(gca())
gca().text(-0.15, 1.05, "D", transform = gca().transAxes, fontsize = 9)
# title()
xlabel("Hellinger Distance (a.u.)")
hist(HLS.mean(), 10, color = 'black', weights = np.ones(HLS.shape[1])/float(HLS.shape[1]))
axvline(HL.mean(), color = 'red')
ylabel("Probability (%)")
yticks([0, 0.1, 0.2], ['0', '10', '20'])
gca().text(HL.mean()+0.01, gca().get_ylim()[1], "p<0.001",fontsize = 7, ha = 'center', color = 'red')
# cax = inset_axes(axbig, "20%", "20%",
# bbox_to_anchor=(0, 2.5, 1, 1),
# bbox_transform=axbig.transData,
# loc = 'lower left')
########################################################################
# E. PCA + MATRIX
########################################################################
gsA = gridspec.GridSpecFromSubplotSpec(4,2,subplot_spec=gs[1,1], hspace = 0.1, wspace = 0.5, height_ratios = [1,1,0.2,1])
# EXEMPLE PCA SWR
subplot(gsA[0,:])
simpleaxis(gca())
gca().spines['bottom'].set_visible(False)
gca().set_xticks([])
axhline(0, linewidth = 0.5, color = 'black')
for i, n in enumerate(neurontoplot):
idx = np.where(n == neurons)[0][0]
scatter(np.arange(pc_swr.shape[1])+i*0.2, pc_swr[idx], 2, color = color_ex[i])
for j in np.arange(pc_swr.shape[1]):
plot([j+i*0.2, j+i*0.2],[0, pc_swr[idx][j]], linewidth = 1.2, color = color_ex[i])
ylabel("PCA weights")
gca().yaxis.set_label_coords(-0.2,0.1)
# title("PCA")
gca().text(-0.15, 1.05, "E", transform = gca().transAxes, fontsize = 9)
gca().text(0.15, 1.05, "SWR", transform = gca().transAxes, fontsize = 7)
# EXEMPLE PCA AUTOCORR
gsAA = gridspec.GridSpecFromSubplotSpec(1,1,subplot_spec=gsA[1,:])#, hspace = 0.1, height_ratios = [1,0.4])
ax1 = subplot(gsAA[0,:])
# ax2 = subplot(gsAA[1,:], sharex = ax1)
simpleaxis(ax1)
# simpleaxis(ax2)
ax1.spines['bottom'].set_visible(False)
# ax2.spines['bottom'].set_visible(False)
# ax1.set_ylabel("PC")
ax1.set_xticks([])
# ax2.set_xticks([])
ax1.axhline(0, linewidth = 0.5, color = 'black')
for i, n in enumerate(neurontoplot):
idx = np.where(n == neurons)[0][0]
ax1.scatter(np.arange(pc_aut.shape[1])+i*0.2, pc_aut[idx], 2, color = color_ex[i])
# ax2.scatter(np.arange(pc_aut.shape[1])+i*0.2, pc_aut[idx], 2, color = color_ex[i])
for j in np.arange(pc_aut.shape[1]):
ax1.plot([j+i*0.2, j+i*0.2], [0, pc_aut[idx][j]], linewidth = 1.2, color = color_ex[i])
# ax2.plot([j+i*0.2, j+i*0.2], [0, pc_aut[idx][j]], linewidth = 1.2, color = color_ex[i])
idx = [np.where(n == neurons)[0][0] for n in neurontoplot]
# ax1.set_ylim(pc_aut[idx,1:].min()-8.0, pc_aut[idx,1:].max()+8.0)
# ax2.set_ylim(pc_aut[idx,0].min()-2.0, pc_aut[idx,0].max()+2.0)
# ax1.set_yticks([0])
# ax2.set_yticks([-260])
# d = .005 # how big to make the diagonal lines in axes coordinates
# arguments to pass to plot, just so we don't keep repeating them
# kwargs = dict(transform=ax1.transAxes, color='k', clip_on=False, linewidth = 1)
# ax1.plot((-d, +d), (-d, +d), **kwargs) # top-left diagonal
# kwargs.update(transform=ax2.transAxes) # switch to the bottom axes
# ax2.plot((-d, +d), (1 - d, 1 + d), **kwargs) # bottom-left diagonal
ax1.text(0.15, 0.95, "Autocorr.", transform = ax1.transAxes, fontsize = 7)
# title("PCA")
# MATRIX CORRELATION
subplot(gsA[-1,0])
noaxis(gca())
vmin = np.minimum(corr[0:10,10:].min(), corrsh[0:10,10:].min())
vmax = np.maximum(corr[0:10,10:].max(), corrsh[0:10,10:].max())
imshow(corr[0:10,10:], vmin = vmin, vmax = vmax)
ylabel("SWR")
xlabel("Autocorr.")
# title("Correlation", fontsize = 8)
# gca().text(-0.25, 1.15, "B", transform = gca().transAxes, fontsize = 9)
gca().text(0.02, -0.7, r"$\rho^{2} = $"+str(np.round(1-np.linalg.det(corr),2)), transform = gca().transAxes, fontsize = 7)
# MATRIX SHUFFLED
subplot(gsA[-1,1])
noaxis(gca())
imshow(corrsh[0:10,10:], vmin = vmin, vmax = vmax)
title("Shuffle", fontsize = 8, pad = 0.3)
ylabel("SWR")
xlabel("Autocorr.")
gca().text(0.02, -0.7, r"$\rho^{2} = $"+str(np.round(1-np.linalg.det(corrsh),2)), transform = gca().transAxes, fontsize = 7)
#########################################################################
# F. SHUFFLING + CORR
#########################################################################
subplot(gs[1,2])
simpleaxis(gca())
axvline(1-det_all['all'], color = 'red')
hist(1-shufflings['all'], 100, color = 'black', weights = np.ones(len(shufflings['all']))/len(shufflings['all']))
xlabel(r"Total correlation $\rho^{2}$")
ylabel("Probability (%)")
yticks([0,0.02,0.04], ['0','2','4'])
gca().text(-0.15, 1.05, "F", transform = gca().transAxes, fontsize = 9)
gca().text(1-det_all['all']-0.05, gca().get_ylim()[1], "p<0.001",fontsize = 7, ha = 'center', color = 'red')
subplots_adjust(top = 0.93, bottom = 0.1, right = 0.96, left = 0.08)
savefig("../../figures/figures_articles/figart_6.pdf", dpi = 900, facecolor = 'white')
os.system("evince ../../figures/figures_articles/figart_6.pdf &")
| gviejo/ThalamusPhysio | python/figure_article/main_article_fig_6.py | Python | gpl-3.0 | 14,479 |
import pandas as pd
import networkx as nx
import sys
import os
class GraphCombiner(object):
"""docstring for GraphCombiner"""
def __init__(self, handle):
super(GraphCombiner, self).__init__()
self.handle = handle
self.G = nx.read_gpickle('{0} Initialized Graph.pkl'.format(handle))
self.current_g = None
def run(self):
for f in os.listdir('edges'):
print(f)
if f.split('.')[1] == 'pkl':
f = 'edges/{0}'.format(f)
self.current_g = nx.read_gpickle(f)
self.copy_nodes_and_edges()
nx.write_gpickle(self.G, '{0} Full Complement Graph.pkl'.format(self.handle))
print('{0} Nodes'.format(len(self.G.nodes())))
print('{0} Edges'.format(len(self.G.edges())))
def copy_nodes_and_edges(self):
for n, d in self.current_g.nodes(data=True):
self.G.add_node(n, attr_dict=d)
if len(self.current_g.edges()) > 0:
for n1, n2, d in self.current_g.edges(data=True):
self.G.add_edge(n1, n2, attr_dict=d)
if __name__ == '__main__':
handle = sys.argv[1]
gc = GraphCombiner(handle)
gc.run()
| ericmjl/influenza-reassortment-detector | graph_combiner.py | Python | mit | 1,214 |
# -*- coding: utf-8 -*-
import subprocess
import sys
from distutils.cmd import Command
from setuptools import setup
try:
from babel import __version__
except SyntaxError as exc:
sys.stderr.write("Unable to import Babel (%s). Are you running a supported version of Python?\n" % exc)
sys.exit(1)
class import_cldr(Command):
description = 'imports and converts the CLDR data'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
subprocess.check_call([sys.executable, 'scripts/download_import_cldr.py'])
setup(
name='Babel',
version=__version__,
description='Internationalization utilities',
long_description="""A collection of tools for internationalizing Python applications.""",
author='Armin Ronacher',
author_email='[email protected]',
license='BSD',
url='http://babel.pocoo.org/',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules',
],
python_requires='>=3.6',
packages=['babel', 'babel.messages', 'babel.localtime'],
include_package_data=True,
install_requires=[
# This version identifier is currently necessary as
# pytz otherwise does not install on pip 1.4 or
# higher.
'pytz>=2015.7',
],
cmdclass={'import_cldr': import_cldr},
zip_safe=False,
# Note when adding extractors: builtin extractors we also want to
# work if packages are not installed to simplify testing. If you
# add an extractor here also manually add it to the "extract"
# function in babel.messages.extract.
entry_points="""
[console_scripts]
pybabel = babel.messages.frontend:main
[distutils.commands]
compile_catalog = babel.messages.frontend:compile_catalog
extract_messages = babel.messages.frontend:extract_messages
init_catalog = babel.messages.frontend:init_catalog
update_catalog = babel.messages.frontend:update_catalog
[distutils.setup_keywords]
message_extractors = babel.messages.frontend:check_message_extractors
[babel.checkers]
num_plurals = babel.messages.checkers:num_plurals
python_format = babel.messages.checkers:python_format
[babel.extractors]
ignore = babel.messages.extract:extract_nothing
python = babel.messages.extract:extract_python
javascript = babel.messages.extract:extract_javascript
"""
)
| mitsuhiko/babel | setup.py | Python | bsd-3-clause | 3,111 |
#!/usr/bin/env python
# coding=utf-8
"""
Sub-string divisibility
Problem 43
The number, 1406357289, is a 0 to 9 pandigital number because it is made up of
each of the digits 0 to 9 in some order, but it also has a rather interesting
sub-string divisibility property.
Let d1 be the 1st digit, d2 be the 2nd digit, and so on. In this way, we note
the following:
d2d3d4=406 is divisible by 2
d3d4d5=063 is divisible by 3
d4d5d6=635 is divisible by 5
d5d6d7=357 is divisible by 7
d6d7d8=572 is divisible by 11
d7d8d9=728 is divisible by 13
d8d9d10=289 is divisible by 17
Find the sum of all 0 to 9 pandigital numbers with this property.
"""
from __future__ import print_function
from pe024_lexicographic_permutations import permutations
from pe032_pandigital_products import int_seq
def _sub_divisibility(s):
primes = [2, 3, 5, 7, 11, 13, 17]
for n, i in enumerate(primes, 1):
if int_seq(s[n:n + 3]) % i != 0:
return False
return True
if __name__ == '__main__':
total = 0
for i in permutations('0123456789'):
if _sub_divisibility(i):
print('>', int_seq(i))
total += int_seq(i)
print(total) # 16695334890
| openqt/algorithms | projecteuler/ac/old/pe043_sub_string_divisibility.py | Python | gpl-3.0 | 1,213 |
# coding: utf-8
# In[1]:
import csv
from scipy.misc import imread, imsave
import cv2
import numpy as np
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Flatten, Dropout, Lambda
from keras.layers.convolutional import Convolution2D
from keras.layers.pooling import MaxPooling2D
import pickle
import json
# In[3]:
X_train = []
y_train = []
drives = ['vector79_run_1.pkl']
for drive in drives:
with open(drive, 'rb') as f:
data = pickle.load(f)
X_train.extend(data['images'])
y_train.extend(data['steering_throttle'].astype(np.float64))
X_train.extend(np.array([np.fliplr(x) for x in data['images']]))
y_train.extend(np.negative(data['steering_throttle'].astype(np.float64)))
X_train = np.array(X_train)
y_train = np.array(y_train)[:,[0]]
# In[4]:
print(X_train.shape, y_train.shape)
def create_model():
model = Sequential()
# preprocess
model.add(Lambda(lambda x: x/255.0 - 0.5, input_shape=(80, 320, 3)))
# conv1 layer
model.add(Convolution2D(32, (5, 5)))
model.add(MaxPooling2D((2, 2)))
model.add(Activation('relu'))
# conv2 layer
model.add(Convolution2D(64, (5, 5)))
model.add(MaxPooling2D((3, 3)))
model.add(Activation('relu'))
# conv3 layer
model.add(Convolution2D(128, (3, 3)))
model.add(MaxPooling2D((2, 2)))
model.add(Activation('relu'))
# conv4 layer
model.add(Convolution2D(128, (3, 3)))
model.add(MaxPooling2D((2, 2)))
model.add(Activation('relu'))
#add fully connected layers
model.add(Flatten()) #Flatten input image
# fc1
model.add(Dense(1024))
model.add(Dropout(0.5))
model.add(Activation('relu'))
# fc2
model.add(Dense(128))
model.add(Dropout(0.5))
model.add(Activation('relu'))
# fc2
model.add(Dense(64))
model.add(Dropout(0.5))
model.add(Activation('relu'))
model.add(Dense(1)) #output layer with 1 regression value
model.compile(loss="mse", optimizer="adam")
return model
# In[29]:
#create the model and save it as json
model = create_model()
with open("model.json", "w") as f:
json.dump(model.to_json(), f)
# # In[6]:
# #loading the model
# from keras.models import model_from_json
# with open("model.json") as f:
# model = model_from_json(json.load(f))
# model.compile(loss="mse", optimizer="adam")
model.load_weights("model.h5")
# In[34]:
history = []
for i in range(30):
h = model.fit(X_train, y_train, shuffle=True, epochs=3, validation_split=.2, batch_size=64)
history.append(h)
model.save("data_model_tranfered_3_{}.h5".format(i))
print("DONE GO RACE")
# # In[19]:
# #create small test set to transfer to pi for testing
# small_data = {
# "X": X_train[1500:1520].tolist(),
# "y": y_train[1500:1520].tolist()
# }
# # In[20]:
# with open('small_data.json', 'w') as f:
# json.dump(small_data, f)
# # In[22]:
# #inference
# img = X_train[1550]
# actual = y_train[1550]
# print(actual)
# plt.imshow(img)
# # In[23]:
# model.predict(np.array([img]))
# # In[24]:
# 0.04938826 * 45
# # In[ ]:
| DrClick/ARCRacing | ros_system_ws/src/vector79/scripts/transfer_learn.py | Python | mit | 3,149 |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.7.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class AuthenticationV1Api(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def create_token_review(self, body, **kwargs):
"""
create a TokenReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_token_review(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param V1TokenReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1TokenReview
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_token_review_with_http_info(body, **kwargs)
else:
(data) = self.create_token_review_with_http_info(body, **kwargs)
return data
def create_token_review_with_http_info(self, body, **kwargs):
"""
create a TokenReview
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_token_review_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param V1TokenReview body: (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V1TokenReview
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'pretty']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_token_review" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_token_review`")
collection_formats = {}
resource_path = '/apis/authentication.k8s.io/v1/tokenreviews'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'pretty' in params:
query_params['pretty'] = params['pretty']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1TokenReview',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_api_resources(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_api_resources_with_http_info(**kwargs)
else:
(data) = self.get_api_resources_with_http_info(**kwargs)
return data
def get_api_resources_with_http_info(self, **kwargs):
"""
get available resources
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_api_resources_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:return: V1APIResourceList
If the method is called asynchronously,
returns the request thread.
"""
all_params = []
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_api_resources" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/apis/authentication.k8s.io/v1/'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf'])
# Authentication setting
auth_settings = ['BearerToken']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='V1APIResourceList',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| djkonro/client-python | kubernetes/client/apis/authentication_v1_api.py | Python | apache-2.0 | 9,608 |
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
import decimal
import re
from decimal import Decimal
from typing import Any, Optional, Tuple, Union
from mbstrdecoder import MultiByteStrDecoder
from typepy import Integer, RealNumber, TypeConversionError
decimal.setcontext(decimal.Context(prec=60, rounding=decimal.ROUND_HALF_DOWN))
_ansi_escape = re.compile(r"(\x9b|\x1b\[)[0-?]*[ -\/]*[@-~]", re.IGNORECASE)
def get_integer_digit(value) -> int:
float_type = RealNumber(value)
try:
abs_value = abs(float_type.convert())
except TypeConversionError:
try:
abs_value = abs(Integer(value).convert())
except TypeConversionError:
raise ValueError(f"the value must be a number: value='{value}' type='{type(value)}'")
return len(str(abs_value))
if abs_value.is_zero():
return 1
try:
return len(str(abs_value.quantize(Decimal("1."), rounding=decimal.ROUND_DOWN)))
except decimal.InvalidOperation:
return len(str(abs_value))
class DigitCalculator:
REGEXP_COMMON_LOG = re.compile(r"[\d\.]+[eE]\-\d+")
REGEXP_SPLIT = re.compile(r"[eE]\-")
def get_decimal_places(self, value: Union[str, float, int, Decimal]) -> int:
if Integer(value).is_type():
return 0
float_digit_len = 0
abs_value = abs(float(value))
text_value = str(abs_value)
float_text = "0"
if text_value.find(".") != -1:
float_text = text_value.split(".")[1]
float_digit_len = len(float_text)
elif self.REGEXP_COMMON_LOG.search(text_value):
float_text = self.REGEXP_SPLIT.split(text_value)[1]
float_digit_len = int(float_text)
return float_digit_len
_digit_calculator = DigitCalculator()
def get_number_of_digit(
value: Any, max_decimal_places: int = 99
) -> Tuple[Optional[int], Optional[int]]:
try:
integer_digits = get_integer_digit(value)
except (ValueError, TypeError, OverflowError):
return (None, None)
try:
decimal_places: Optional[int] = min(
_digit_calculator.get_decimal_places(value), max_decimal_places
)
except (ValueError, TypeError):
decimal_places = None
return (integer_digits, decimal_places)
def is_multibyte_str(text) -> bool:
from typepy import StrictLevel, String
if not String(text, strict_level=StrictLevel.MIN).is_type():
return False
try:
unicode_text = MultiByteStrDecoder(text).unicode_str
except ValueError:
return False
try:
unicode_text.encode("ascii")
except UnicodeEncodeError:
return True
return False
def _validate_eaaw(east_asian_ambiguous_width: int) -> None:
if east_asian_ambiguous_width in (1, 2):
return
raise ValueError(
"invalid east_asian_ambiguous_width: expected=1 or 2, actual={}".format(
east_asian_ambiguous_width
)
)
def strip_ansi_escape(unicode_str: str) -> str:
return _ansi_escape.sub("", unicode_str)
def calc_ascii_char_width(unicode_str: str, east_asian_ambiguous_width: int = 1) -> int:
import unicodedata
width = 0
for char in unicode_str:
char_width = unicodedata.east_asian_width(char)
if char_width in "WF":
width += 2
elif char_width == "A":
_validate_eaaw(east_asian_ambiguous_width)
width += east_asian_ambiguous_width
else:
width += 1
return width
| thombashi/DataProperty | dataproperty/_function.py | Python | mit | 3,554 |
# (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# stdlib
from hashlib import md5
import logging
import os
import platform
import re
import signal
import socket
import sys
import time
import types
import urllib2
import uuid
# 3p
import simplejson as json
import yaml # noqa, let's guess, probably imported somewhere
from tornado import ioloop
try:
from yaml import CLoader as yLoader
from yaml import CDumper as yDumper
except ImportError:
# On source install C Extensions might have not been built
from yaml import Loader as yLoader # noqa, imported from here elsewhere
from yaml import Dumper as yDumper # noqa, imported from here elsewhere
# These classes are now in utils/, they are just here for compatibility reasons,
# if a user actually uses them in a custom check
# If you're this user, please use utils.pidfile or utils.platform instead
# FIXME: remove them at a point (6.x)
from utils.dockerutil import DockerUtil
from utils.pidfile import PidFile # noqa, see ^^^
from utils.platform import Platform
from utils.proxy import get_proxy
from utils.subprocess_output import get_subprocess_output
VALID_HOSTNAME_RFC_1123_PATTERN = re.compile(r"^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$")
MAX_HOSTNAME_LEN = 255
COLON_NON_WIN_PATH = re.compile(':(?!\\\\)')
log = logging.getLogger(__name__)
NumericTypes = (float, int, long)
def plural(count):
if count == 1:
return ""
return "s"
def get_tornado_ioloop():
return ioloop.IOLoop.current()
def get_uuid():
# Generate a unique name that will stay constant between
# invocations, such as platform.node() + uuid.getnode()
# Use uuid5, which does not depend on the clock and is
# recommended over uuid3.
# This is important to be able to identify a server even if
# its drives have been wiped clean.
# Note that this is not foolproof but we can reconcile servers
# on the back-end if need be, based on mac addresses.
return uuid.uuid5(uuid.NAMESPACE_DNS, platform.node() + str(uuid.getnode())).hex
def get_os():
"Human-friendly OS name"
if sys.platform == 'darwin':
return 'mac'
elif sys.platform.find('freebsd') != -1:
return 'freebsd'
elif sys.platform.find('linux') != -1:
return 'linux'
elif sys.platform.find('win32') != -1:
return 'windows'
elif sys.platform.find('sunos') != -1:
return 'solaris'
else:
return sys.platform
def headers(agentConfig):
# Build the request headers
return {
'User-Agent': 'Datadog Agent/%s' % agentConfig['version'],
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'text/html, */*',
}
def windows_friendly_colon_split(config_string):
'''
Perform a split by ':' on the config_string
without splitting on the start of windows path
'''
if Platform.is_win32():
# will split on path/to/module.py:blabla but not on C:\\path
return COLON_NON_WIN_PATH.split(config_string)
else:
return config_string.split(':')
def cast_metric_val(val):
# ensure that the metric value is a numeric type
if not isinstance(val, NumericTypes):
# Try the int conversion first because want to preserve
# whether the value is an int or a float. If neither work,
# raise a ValueError to be handled elsewhere
for cast in [int, float]:
try:
val = cast(val)
return val
except ValueError:
continue
raise ValueError
return val
_IDS = {}
def get_next_id(name):
global _IDS
current_id = _IDS.get(name, 0)
current_id += 1
_IDS[name] = current_id
return current_id
def is_valid_hostname(hostname):
if hostname.lower() in set([
'localhost',
'localhost.localdomain',
'localhost6.localdomain6',
'ip6-localhost',
]):
log.warning("Hostname: %s is local" % hostname)
return False
if len(hostname) > MAX_HOSTNAME_LEN:
log.warning("Hostname: %s is too long (max length is %s characters)" % (hostname, MAX_HOSTNAME_LEN))
return False
if VALID_HOSTNAME_RFC_1123_PATTERN.match(hostname) is None:
log.warning("Hostname: %s is not complying with RFC 1123" % hostname)
return False
return True
def check_yaml(conf_path):
with open(conf_path) as f:
check_config = yaml.load(f.read(), Loader=yLoader)
assert 'init_config' in check_config, "No 'init_config' section found"
assert 'instances' in check_config, "No 'instances' section found"
valid_instances = True
if check_config['instances'] is None or not isinstance(check_config['instances'], list):
valid_instances = False
else:
for i in check_config['instances']:
if not isinstance(i, dict):
valid_instances = False
break
if not valid_instances:
raise Exception('You need to have at least one instance defined in the YAML file for this check')
else:
return check_config
def get_hostname(config=None):
"""
Get the canonical host name this agent should identify as. This is
the authoritative source of the host name for the agent.
Tries, in order:
* agent config (datadog.conf, "hostname:")
* 'hostname -f' (on unix)
* socket.gethostname()
"""
hostname = None
# first, try the config
if config is None:
from config import get_config
config = get_config(parse_args=True)
config_hostname = config.get('hostname')
if config_hostname and is_valid_hostname(config_hostname):
return config_hostname
# Try to get GCE instance name
if hostname is None:
gce_hostname = GCE.get_hostname(config)
if gce_hostname is not None:
if is_valid_hostname(gce_hostname):
return gce_hostname
# Try to get the docker hostname
docker_util = DockerUtil()
if hostname is None and docker_util.is_dockerized():
docker_hostname = docker_util.get_hostname()
if docker_hostname is not None and is_valid_hostname(docker_hostname):
return docker_hostname
# then move on to os-specific detection
if hostname is None:
def _get_hostname_unix():
try:
# try fqdn
out, _, rtcode = get_subprocess_output(['/bin/hostname', '-f'], log)
if rtcode == 0:
return out.strip()
except Exception:
return None
os_name = get_os()
if os_name in ['mac', 'freebsd', 'linux', 'solaris']:
unix_hostname = _get_hostname_unix()
if unix_hostname and is_valid_hostname(unix_hostname):
hostname = unix_hostname
# if we have an ec2 default hostname, see if there's an instance-id available
if (Platform.is_ecs_instance()) or (hostname is not None and True in [hostname.lower().startswith(p) for p in [u'ip-', u'domu']]):
instanceid = EC2.get_instance_id(config)
if instanceid:
hostname = instanceid
# fall back on socket.gethostname(), socket.getfqdn() is too unreliable
if hostname is None:
try:
socket_hostname = socket.gethostname()
except socket.error:
socket_hostname = None
if socket_hostname and is_valid_hostname(socket_hostname):
hostname = socket_hostname
if hostname is None:
log.critical('Unable to reliably determine host name. You can define one in datadog.conf or in your hosts file')
raise Exception('Unable to reliably determine host name. You can define one in datadog.conf or in your hosts file')
else:
return hostname
class GCE(object):
URL = "http://169.254.169.254/computeMetadata/v1/?recursive=true"
TIMEOUT = 0.1 # second
SOURCE_TYPE_NAME = 'google cloud platform'
metadata = None
EXCLUDED_ATTRIBUTES = ["kube-env", "startup-script", "sshKeys", "user-data",
"cli-cert", "ipsec-cert", "ssl-cert"]
@staticmethod
def _get_metadata(agentConfig):
if GCE.metadata is not None:
return GCE.metadata
if not agentConfig['collect_instance_metadata']:
log.info("Instance metadata collection is disabled. Not collecting it.")
GCE.metadata = {}
return GCE.metadata
socket_to = None
try:
socket_to = socket.getdefaulttimeout()
socket.setdefaulttimeout(GCE.TIMEOUT)
except Exception:
pass
try:
opener = urllib2.build_opener()
opener.addheaders = [('X-Google-Metadata-Request','True')]
GCE.metadata = json.loads(opener.open(GCE.URL).read().strip())
except Exception:
GCE.metadata = {}
try:
if socket_to is None:
socket_to = 3
socket.setdefaulttimeout(socket_to)
except Exception:
pass
return GCE.metadata
@staticmethod
def get_tags(agentConfig):
if not agentConfig['collect_instance_metadata']:
return None
try:
host_metadata = GCE._get_metadata(agentConfig)
tags = []
for key, value in host_metadata['instance'].get('attributes', {}).iteritems():
if key in GCE.EXCLUDED_ATTRIBUTES:
continue
tags.append("%s:%s" % (key, value))
tags.extend(host_metadata['instance'].get('tags', []))
tags.append('zone:%s' % host_metadata['instance']['zone'].split('/')[-1])
tags.append('instance-type:%s' % host_metadata['instance']['machineType'].split('/')[-1])
tags.append('internal-hostname:%s' % host_metadata['instance']['hostname'])
tags.append('instance-id:%s' % host_metadata['instance']['id'])
tags.append('project:%s' % host_metadata['project']['projectId'])
tags.append('numeric_project_id:%s' % host_metadata['project']['numericProjectId'])
GCE.metadata['hostname'] = host_metadata['instance']['hostname'].split('.')[0]
return tags
except Exception:
return None
@staticmethod
def get_hostname(agentConfig):
try:
host_metadata = GCE._get_metadata(agentConfig)
hostname = host_metadata['instance']['hostname']
if agentConfig.get('gce_updated_hostname'):
return hostname
else:
return hostname.split('.')[0]
except Exception:
return None
@staticmethod
def get_host_aliases(agentConfig):
try:
host_metadata = GCE._get_metadata(agentConfig)
project_id = host_metadata['project']['projectId']
instance_name = host_metadata['instance']['hostname'].split('.')[0]
return ['%s.%s' % (instance_name, project_id)]
except Exception:
return None
class EC2(object):
"""Retrieve EC2 metadata
"""
EC2_METADATA_HOST = "http://169.254.169.254"
METADATA_URL_BASE = EC2_METADATA_HOST + "/latest/meta-data"
INSTANCE_IDENTITY_URL = EC2_METADATA_HOST + "/latest/dynamic/instance-identity/document"
TIMEOUT = 0.1 # second
metadata = {}
class NoIAMRole(Exception):
"""
Instance has no associated IAM role.
"""
pass
@staticmethod
def get_iam_role():
"""
Retrieve instance's IAM role.
Raise `NoIAMRole` when unavailable.
"""
try:
return urllib2.urlopen(EC2.METADATA_URL_BASE + "/iam/security-credentials/").read().strip()
except urllib2.HTTPError as err:
if err.code == 404:
raise EC2.NoIAMRole()
raise
@staticmethod
def get_tags(agentConfig):
"""
Retrieve AWS EC2 tags.
"""
if not agentConfig['collect_instance_metadata']:
log.info("Instance metadata collection is disabled. Not collecting it.")
return []
EC2_tags = []
socket_to = None
try:
socket_to = socket.getdefaulttimeout()
socket.setdefaulttimeout(EC2.TIMEOUT)
except Exception:
pass
try:
iam_role = EC2.get_iam_role()
iam_params = json.loads(urllib2.urlopen(EC2.METADATA_URL_BASE + "/iam/security-credentials/" + unicode(iam_role)).read().strip())
instance_identity = json.loads(urllib2.urlopen(EC2.INSTANCE_IDENTITY_URL).read().strip())
region = instance_identity['region']
import boto.ec2
proxy_settings = get_proxy(agentConfig) or {}
connection = boto.ec2.connect_to_region(
region,
aws_access_key_id=iam_params['AccessKeyId'],
aws_secret_access_key=iam_params['SecretAccessKey'],
security_token=iam_params['Token'],
proxy=proxy_settings.get('host'), proxy_port=proxy_settings.get('port'),
proxy_user=proxy_settings.get('user'), proxy_pass=proxy_settings.get('password')
)
tag_object = connection.get_all_tags({'resource-id': EC2.metadata['instance-id']})
EC2_tags = [u"%s:%s" % (tag.name, tag.value) for tag in tag_object]
if agentConfig.get('collect_security_groups') and EC2.metadata.get('security-groups'):
EC2_tags.append(u"security-group-name:{0}".format(EC2.metadata.get('security-groups')))
except EC2.NoIAMRole:
log.warning(
u"Unable to retrieve AWS EC2 custom tags: "
u"an IAM role associated with the instance is required"
)
except Exception:
log.exception("Problem retrieving custom EC2 tags")
try:
if socket_to is None:
socket_to = 3
socket.setdefaulttimeout(socket_to)
except Exception:
pass
return EC2_tags
@staticmethod
def get_metadata(agentConfig):
"""Use the ec2 http service to introspect the instance. This adds latency if not running on EC2
"""
# >>> import urllib2
# >>> urllib2.urlopen('http://169.254.169.254/latest/', timeout=1).read()
# 'meta-data\nuser-data'
# >>> urllib2.urlopen('http://169.254.169.254/latest/meta-data', timeout=1).read()
# 'ami-id\nami-launch-index\nami-manifest-path\nhostname\ninstance-id\nlocal-ipv4\npublic-keys/\nreservation-id\nsecurity-groups'
# >>> urllib2.urlopen('http://169.254.169.254/latest/meta-data/instance-id', timeout=1).read()
# 'i-deadbeef'
# Every call may add TIMEOUT seconds in latency so don't abuse this call
# python 2.4 does not support an explicit timeout argument so force it here
# Rather than monkey-patching urllib2, just lower the timeout globally for these calls
if not agentConfig['collect_instance_metadata']:
log.info("Instance metadata collection is disabled. Not collecting it.")
return {}
socket_to = None
try:
socket_to = socket.getdefaulttimeout()
socket.setdefaulttimeout(EC2.TIMEOUT)
except Exception:
pass
for k in ('instance-id', 'hostname', 'local-hostname', 'public-hostname', 'ami-id', 'local-ipv4', 'public-keys/', 'public-ipv4', 'reservation-id', 'security-groups'):
try:
v = urllib2.urlopen(EC2.METADATA_URL_BASE + "/" + unicode(k)).read().strip()
assert type(v) in (types.StringType, types.UnicodeType) and len(v) > 0, "%s is not a string" % v
EC2.metadata[k.rstrip('/')] = v
except Exception:
pass
try:
if socket_to is None:
socket_to = 3
socket.setdefaulttimeout(socket_to)
except Exception:
pass
return EC2.metadata
@staticmethod
def get_instance_id(agentConfig):
try:
return EC2.get_metadata(agentConfig).get("instance-id", None)
except Exception:
return None
class Watchdog(object):
"""Simple signal-based watchdog that will scuttle the current process
if it has not been reset every N seconds, or if the processes exceeds
a specified memory threshold.
Can only be invoked once per process, so don't use with multiple threads.
If you instantiate more than one, you're also asking for trouble.
"""
def __init__(self, duration, max_mem_mb = None):
import resource
#Set the duration
self._duration = int(duration)
signal.signal(signal.SIGALRM, Watchdog.self_destruct)
# cap memory usage
if max_mem_mb is not None:
self._max_mem_kb = 1024 * max_mem_mb
max_mem_bytes = 1024 * self._max_mem_kb
resource.setrlimit(resource.RLIMIT_AS, (max_mem_bytes, max_mem_bytes))
self.memory_limit_enabled = True
else:
self.memory_limit_enabled = False
@staticmethod
def self_destruct(signum, frame):
try:
import traceback
log.error("Self-destructing...")
log.error(traceback.format_exc())
finally:
os.kill(os.getpid(), signal.SIGKILL)
def reset(self):
# self destruct if using too much memory, as tornado will swallow MemoryErrors
if self.memory_limit_enabled:
mem_usage_kb = int(os.popen('ps -p %d -o %s | tail -1' % (os.getpid(), 'rss')).read())
if mem_usage_kb > (0.95 * self._max_mem_kb):
Watchdog.self_destruct(signal.SIGKILL, sys._getframe(0))
log.debug("Resetting watchdog for %d" % self._duration)
signal.alarm(self._duration)
class LaconicFilter(logging.Filter):
"""
Filters messages, only print them once while keeping memory under control
"""
LACONIC_MEM_LIMIT = 1024
def __init__(self, name=""):
logging.Filter.__init__(self, name)
self.hashed_messages = {}
def hash(self, msg):
return md5(msg).hexdigest()
def filter(self, record):
try:
h = self.hash(record.getMessage())
if h in self.hashed_messages:
return 0
else:
# Don't blow up our memory
if len(self.hashed_messages) >= LaconicFilter.LACONIC_MEM_LIMIT:
self.hashed_messages.clear()
self.hashed_messages[h] = True
return 1
except Exception:
return 1
class Timer(object):
""" Helper class """
def __init__(self):
self.start()
def _now(self):
return time.time()
def start(self):
self.started = self._now()
self.last = self.started
return self
def step(self):
now = self._now()
step = now - self.last
self.last = now
return step
def total(self, as_sec=True):
return self._now() - self.started
"""
Iterable Recipes
"""
def chunks(iterable, chunk_size):
"""Generate sequences of `chunk_size` elements from `iterable`."""
iterable = iter(iterable)
while True:
chunk = [None] * chunk_size
count = 0
try:
for _ in range(chunk_size):
chunk[count] = iterable.next()
count += 1
yield chunk[:count]
except StopIteration:
if count:
yield chunk[:count]
break
| c960657/dd-agent | util.py | Python | bsd-3-clause | 19,873 |
def fibonacci(num):
previous, current, n = 0, 1, num - 1
yield previous
yield current
while n:
previous, current, n = current, previous + current, n - 1
yield current
| PavlovVitaly/python__homework_ITMO | task_07_01.py | Python | gpl-3.0 | 200 |
"""
Utility functions for transcripts.
++++++++++++++++++++++++++++++++++
"""
import copy
import logging
import os
from functools import wraps
import requests
import simplejson as json
import six
from django.conf import settings
from lxml import etree
from opaque_keys.edx.locator import BundleDefinitionLocator
from pysrt import SubRipFile, SubRipItem, SubRipTime
from pysrt.srtexc import Error
from six import text_type
from six.moves import range, zip
from six.moves.html_parser import HTMLParser
from openedx.core.djangolib import blockstore_cache
from openedx.core.lib import blockstore_api
from xmodule.contentstore.content import StaticContent
from xmodule.contentstore.django import contentstore
from xmodule.exceptions import NotFoundError
from .bumper_utils import get_bumper_settings
try:
from edxval import api as edxval_api
except ImportError:
edxval_api = None
log = logging.getLogger(__name__)
NON_EXISTENT_TRANSCRIPT = 'non_existent_dummy_file_name'
class TranscriptException(Exception):
pass
class TranscriptsGenerationException(Exception):
pass
class GetTranscriptsFromYouTubeException(Exception):
pass
class TranscriptsRequestValidationException(Exception):
pass
def exception_decorator(func):
"""
Generate NotFoundError for TranscriptsGenerationException, UnicodeDecodeError.
Args:
`func`: Input function
Returns:
'wrapper': Decorated function
"""
@wraps(func)
def wrapper(*args, **kwds):
try:
return func(*args, **kwds)
except (TranscriptsGenerationException, UnicodeDecodeError) as ex:
log.exception(text_type(ex))
raise NotFoundError
return wrapper
def generate_subs(speed, source_speed, source_subs):
"""
Generate transcripts from one speed to another speed.
Args:
`speed`: float, for this speed subtitles will be generated,
`source_speed`: float, speed of source_subs
`source_subs`: dict, existing subtitles for speed `source_speed`.
Returns:
`subs`: dict, actual subtitles.
"""
if speed == source_speed:
return source_subs
coefficient = 1.0 * speed / source_speed
subs = {
'start': [
int(round(timestamp * coefficient)) for
timestamp in source_subs['start']
],
'end': [
int(round(timestamp * coefficient)) for
timestamp in source_subs['end']
],
'text': source_subs['text']}
return subs
def save_to_store(content, name, mime_type, location):
"""
Save named content to store by location.
Returns location of saved content.
"""
content_location = Transcript.asset_location(location, name)
content = StaticContent(content_location, name, mime_type, content)
contentstore().save(content)
return content_location
def save_subs_to_store(subs, subs_id, item, language='en'):
"""
Save transcripts into `StaticContent`.
Args:
`subs_id`: str, subtitles id
`item`: video module instance
`language`: two chars str ('uk'), language of translation of transcripts
Returns: location of saved subtitles.
"""
filedata = json.dumps(subs, indent=2).encode('utf-8')
filename = subs_filename(subs_id, language)
return save_to_store(filedata, filename, 'application/json', item.location)
def youtube_video_transcript_name(youtube_text_api):
"""
Get the transcript name from available transcripts of video
with respect to language from youtube server
"""
utf8_parser = etree.XMLParser(encoding='utf-8')
transcripts_param = {'type': 'list', 'v': youtube_text_api['params']['v']}
lang = youtube_text_api['params']['lang']
# get list of transcripts of specific video
# url-form
# http://video.google.com/timedtext?type=list&v={VideoId}
youtube_response = requests.get('http://' + youtube_text_api['url'], params=transcripts_param)
if youtube_response.status_code == 200 and youtube_response.text:
youtube_data = etree.fromstring(youtube_response.text.encode('utf-8'), parser=utf8_parser)
# iterate all transcripts information from youtube server
for element in youtube_data:
# search specific language code such as 'en' in transcripts info list
if element.tag == 'track' and element.get('lang_code', '') == lang:
return element.get('name')
return None
def get_transcripts_from_youtube(youtube_id, settings, i18n, youtube_transcript_name=''):
"""
Gets transcripts from youtube for youtube_id.
Parses only utf-8 encoded transcripts.
Other encodings are not supported at the moment.
Returns (status, transcripts): bool, dict.
"""
_ = i18n.ugettext
utf8_parser = etree.XMLParser(encoding='utf-8')
youtube_text_api = copy.deepcopy(settings.YOUTUBE['TEXT_API'])
youtube_text_api['params']['v'] = youtube_id
# if the transcript name is not empty on youtube server we have to pass
# name param in url in order to get transcript
# example http://video.google.com/timedtext?lang=en&v={VideoId}&name={transcript_name}
youtube_transcript_name = youtube_video_transcript_name(youtube_text_api)
if youtube_transcript_name:
youtube_text_api['params']['name'] = youtube_transcript_name
data = requests.get('http://' + youtube_text_api['url'], params=youtube_text_api['params'])
if data.status_code != 200 or not data.text:
msg = _("Can't receive transcripts from Youtube for {youtube_id}. Status code: {status_code}.").format(
youtube_id=youtube_id,
status_code=data.status_code
)
raise GetTranscriptsFromYouTubeException(msg)
sub_starts, sub_ends, sub_texts = [], [], []
xmltree = etree.fromstring(data.content, parser=utf8_parser)
for element in xmltree:
if element.tag == "text":
start = float(element.get("start"))
duration = float(element.get("dur", 0)) # dur is not mandatory
text = element.text
end = start + duration
if text:
# Start and end should be ints representing the millisecond timestamp.
sub_starts.append(int(start * 1000))
sub_ends.append(int((end + 0.0001) * 1000))
sub_texts.append(text.replace('\n', ' '))
return {'start': sub_starts, 'end': sub_ends, 'text': sub_texts}
def download_youtube_subs(youtube_id, video_descriptor, settings):
"""
Download transcripts from Youtube.
Args:
youtube_id: str, actual youtube_id of the video.
video_descriptor: video descriptor instance.
We save transcripts for 1.0 speed, as for other speed conversion is done on front-end.
Returns:
Serialized sjson transcript content, if transcripts were successfully downloaded and saved.
Raises:
GetTranscriptsFromYouTubeException, if fails.
"""
i18n = video_descriptor.runtime.service(video_descriptor, "i18n")
_ = i18n.ugettext
subs = get_transcripts_from_youtube(youtube_id, settings, i18n)
return json.dumps(subs, indent=2)
def remove_subs_from_store(subs_id, item, lang='en'):
"""
Remove from store, if transcripts content exists.
"""
filename = subs_filename(subs_id, lang)
Transcript.delete_asset(item.location, filename)
def generate_subs_from_source(speed_subs, subs_type, subs_filedata, item, language='en'):
"""Generate transcripts from source files (like SubRip format, etc.)
and save them to assets for `item` module.
We expect, that speed of source subs equal to 1
:param speed_subs: dictionary {speed: sub_id, ...}
:param subs_type: type of source subs: "srt", ...
:param subs_filedata:unicode, content of source subs.
:param item: module object.
:param language: str, language of translation of transcripts
:returns: True, if all subs are generated and saved successfully.
"""
_ = item.runtime.service(item, "i18n").ugettext
if subs_type.lower() != 'srt':
raise TranscriptsGenerationException(_("We support only SubRip (*.srt) transcripts format."))
try:
srt_subs_obj = SubRipFile.from_string(subs_filedata)
except Exception as ex:
msg = _("Something wrong with SubRip transcripts file during parsing. Inner message is {error_message}").format(
error_message=text_type(ex)
)
raise TranscriptsGenerationException(msg)
if not srt_subs_obj:
raise TranscriptsGenerationException(_("Something wrong with SubRip transcripts file during parsing."))
sub_starts = []
sub_ends = []
sub_texts = []
for sub in srt_subs_obj:
sub_starts.append(sub.start.ordinal)
sub_ends.append(sub.end.ordinal)
sub_texts.append(sub.text.replace('\n', ' '))
subs = {
'start': sub_starts,
'end': sub_ends,
'text': sub_texts}
for speed, subs_id in six.iteritems(speed_subs):
save_subs_to_store(
generate_subs(speed, 1, subs),
subs_id,
item,
language
)
return subs
def generate_srt_from_sjson(sjson_subs, speed):
"""Generate transcripts with speed = 1.0 from sjson to SubRip (*.srt).
:param sjson_subs: "sjson" subs.
:param speed: speed of `sjson_subs`.
:returns: "srt" subs.
"""
output = ''
equal_len = len(sjson_subs['start']) == len(sjson_subs['end']) == len(sjson_subs['text'])
if not equal_len:
return output
sjson_speed_1 = generate_subs(speed, 1, sjson_subs)
for i in range(len(sjson_speed_1['start'])):
item = SubRipItem(
index=i,
start=SubRipTime(milliseconds=sjson_speed_1['start'][i]),
end=SubRipTime(milliseconds=sjson_speed_1['end'][i]),
text=sjson_speed_1['text'][i]
)
output += (six.text_type(item))
output += '\n'
return output
def generate_sjson_from_srt(srt_subs):
"""
Generate transcripts from sjson to SubRip (*.srt).
Arguments:
srt_subs(SubRip): "SRT" subs object
Returns:
Subs converted to "SJSON" format.
"""
sub_starts = []
sub_ends = []
sub_texts = []
for sub in srt_subs:
sub_starts.append(sub.start.ordinal)
sub_ends.append(sub.end.ordinal)
sub_texts.append(sub.text.replace('\n', ' '))
sjson_subs = {
'start': sub_starts,
'end': sub_ends,
'text': sub_texts
}
return sjson_subs
def copy_or_rename_transcript(new_name, old_name, item, delete_old=False, user=None):
"""
Renames `old_name` transcript file in storage to `new_name`.
If `old_name` is not found in storage, raises `NotFoundError`.
If `delete_old` is True, removes `old_name` files from storage.
"""
filename = u'subs_{0}.srt.sjson'.format(old_name)
content_location = StaticContent.compute_location(item.location.course_key, filename)
transcripts = contentstore().find(content_location).data.decode('utf-8')
save_subs_to_store(json.loads(transcripts), new_name, item)
item.sub = new_name
item.save_with_metadata(user)
if delete_old:
remove_subs_from_store(old_name, item)
def get_html5_ids(html5_sources):
"""
Helper method to parse out an HTML5 source into the ideas
NOTE: This assumes that '/' are not in the filename
"""
html5_ids = [x.split('/')[-1].rsplit('.', 1)[0] for x in html5_sources]
return html5_ids
def manage_video_subtitles_save(item, user, old_metadata=None, generate_translation=False):
"""
Does some specific things, that can be done only on save.
Video player item has some video fields: HTML5 ones and Youtube one.
If value of `sub` field of `new_item` is cleared, transcripts should be removed.
`item` is video module instance with updated values of fields,
but actually have not been saved to store yet.
`old_metadata` contains old values of XFields.
# 1.
If value of `sub` field of `new_item` is different from values of video fields of `new_item`,
and `new_item.sub` file is present, then code in this function creates copies of
`new_item.sub` file with new names. That names are equal to values of video fields of `new_item`
After that `sub` field of `new_item` is changed to one of values of video fields.
This whole action ensures that after user changes video fields, proper `sub` files, corresponding
to new values of video fields, will be presented in system.
# 2. convert /static/filename.srt to filename.srt in self.transcripts.
(it is done to allow user to enter both /static/filename.srt and filename.srt)
# 3. Generate transcripts translation only when user clicks `save` button, not while switching tabs.
a) delete sjson translation for those languages, which were removed from `item.transcripts`.
Note: we are not deleting old SRT files to give user more flexibility.
b) For all SRT files in`item.transcripts` regenerate new SJSON files.
(To avoid confusing situation if you attempt to correct a translation by uploading
a new version of the SRT file with same name).
"""
_ = item.runtime.service(item, "i18n").ugettext
# # 1.
# html5_ids = get_html5_ids(item.html5_sources)
# # Youtube transcript source should always have a higher priority than html5 sources. Appending
# # `youtube_id_1_0` at the end helps achieve this when we read transcripts list.
# possible_video_id_list = html5_ids + [item.youtube_id_1_0]
# sub_name = item.sub
# for video_id in possible_video_id_list:
# if not video_id:
# continue
# if not sub_name:
# remove_subs_from_store(video_id, item)
# continue
# # copy_or_rename_transcript changes item.sub of module
# try:
# # updates item.sub with `video_id`, if it is successful.
# copy_or_rename_transcript(video_id, sub_name, item, user=user)
# except NotFoundError:
# # subtitles file `sub_name` is not presented in the system. Nothing to copy or rename.
# log.debug(
# "Copying %s file content to %s name is failed, "
# "original file does not exist.",
# sub_name, video_id
# )
# 2.
if generate_translation:
for lang, filename in item.transcripts.items():
item.transcripts[lang] = os.path.split(filename)[-1]
# 3.
if generate_translation:
old_langs = set(old_metadata.get('transcripts', {})) if old_metadata else set()
new_langs = set(item.transcripts)
html5_ids = get_html5_ids(item.html5_sources)
possible_video_id_list = html5_ids + [item.youtube_id_1_0]
for lang in old_langs.difference(new_langs): # 3a
for video_id in possible_video_id_list:
if video_id:
remove_subs_from_store(video_id, item, lang)
reraised_message = ''
for lang in new_langs: # 3b
try:
generate_sjson_for_all_speeds(
item,
item.transcripts[lang],
{speed: subs_id for subs_id, speed in six.iteritems(youtube_speed_dict(item))},
lang,
)
except TranscriptException as ex:
pass
if reraised_message:
item.save_with_metadata(user)
raise TranscriptException(reraised_message)
def youtube_speed_dict(item):
"""
Returns {speed: youtube_ids, ...} dict for existing youtube_ids
"""
yt_ids = [item.youtube_id_0_75, item.youtube_id_1_0, item.youtube_id_1_25, item.youtube_id_1_5]
yt_speeds = [0.75, 1.00, 1.25, 1.50]
youtube_ids = {p[0]: p[1] for p in zip(yt_ids, yt_speeds) if p[0]}
return youtube_ids
def subs_filename(subs_id, lang='en'):
"""
Generate proper filename for storage.
"""
if lang == 'en':
return u'subs_{0}.srt.sjson'.format(subs_id)
else:
return u'{0}_subs_{1}.srt.sjson'.format(lang, subs_id)
def generate_sjson_for_all_speeds(item, user_filename, result_subs_dict, lang):
"""
Generates sjson from srt for given lang.
`item` is module object.
"""
_ = item.runtime.service(item, "i18n").ugettext
try:
srt_transcripts = contentstore().find(Transcript.asset_location(item.location, user_filename))
except NotFoundError as ex:
raise TranscriptException(_("{exception_message}: Can't find uploaded transcripts: {user_filename}").format(
exception_message=text_type(ex),
user_filename=user_filename
))
if not lang:
lang = item.transcript_language
# Used utf-8-sig encoding type instead of utf-8 to remove BOM(Byte Order Mark), e.g. U+FEFF
generate_subs_from_source(
result_subs_dict,
os.path.splitext(user_filename)[1][1:],
srt_transcripts.data.decode('utf-8-sig'),
item,
lang
)
def get_or_create_sjson(item, transcripts):
"""
Get sjson if already exists, otherwise generate it.
Generate sjson with subs_id name, from user uploaded srt.
Subs_id is extracted from srt filename, which was set by user.
Args:
transcipts (dict): dictionary of (language: file) pairs.
Raises:
TranscriptException: when srt subtitles do not exist,
and exceptions from generate_subs_from_source.
`item` is module object.
"""
user_filename = transcripts[item.transcript_language]
user_subs_id = os.path.splitext(user_filename)[0]
source_subs_id, result_subs_dict = user_subs_id, {1.0: user_subs_id}
try:
sjson_transcript = Transcript.asset(item.location, source_subs_id, item.transcript_language).data
except NotFoundError: # generating sjson from srt
generate_sjson_for_all_speeds(item, user_filename, result_subs_dict, item.transcript_language)
sjson_transcript = Transcript.asset(item.location, source_subs_id, item.transcript_language).data
return sjson_transcript
def get_video_ids_info(edx_video_id, youtube_id_1_0, html5_sources):
"""
Returns list internal or external video ids.
Arguments:
edx_video_id (unicode): edx_video_id
youtube_id_1_0 (unicode): youtube id
html5_sources (list): html5 video ids
Returns:
tuple: external or internal, video ids list
"""
clean = lambda item: item.strip() if isinstance(item, six.string_types) else item
external = not bool(clean(edx_video_id))
video_ids = [edx_video_id, youtube_id_1_0] + get_html5_ids(html5_sources)
# video_ids cleanup
video_ids = [item for item in video_ids if bool(clean(item))]
return external, video_ids
def clean_video_id(edx_video_id):
"""
Cleans an edx video ID.
Arguments:
edx_video_id(unicode): edx-val's video identifier
"""
return edx_video_id and edx_video_id.strip()
def get_video_transcript_content(edx_video_id, language_code):
"""
Gets video transcript content, only if the corresponding feature flag is enabled for the given `course_id`.
Arguments:
language_code(unicode): Language code of the requested transcript
edx_video_id(unicode): edx-val's video identifier
Returns:
A dict containing transcript's file name and its sjson content.
"""
transcript = None
edx_video_id = clean_video_id(edx_video_id)
if edxval_api and edx_video_id:
transcript = edxval_api.get_video_transcript_data(edx_video_id, language_code)
return transcript
def get_available_transcript_languages(edx_video_id):
"""
Gets available transcript languages for a video.
Arguments:
edx_video_id(unicode): edx-val's video identifier
Returns:
A list containing distinct transcript language codes against all the passed video ids.
"""
available_languages = []
edx_video_id = clean_video_id(edx_video_id)
if edxval_api and edx_video_id:
available_languages = edxval_api.get_available_transcript_languages(video_id=edx_video_id)
return available_languages
def convert_video_transcript(file_name, content, output_format):
"""
Convert video transcript into desired format
Arguments:
file_name: name of transcript file along with its extension
content: transcript content stream
output_format: the format in which transcript will be converted
Returns:
A dict containing the new transcript filename and the content converted into desired format.
"""
name_and_extension = os.path.splitext(file_name)
basename, input_format = name_and_extension[0], name_and_extension[1][1:]
filename = u'{base_name}.{ext}'.format(base_name=basename, ext=output_format)
converted_transcript = Transcript.convert(content, input_format=input_format, output_format=output_format)
return dict(filename=filename, content=converted_transcript)
class Transcript(object):
"""
Container for transcript methods.
"""
SRT = u'srt'
TXT = u'txt'
SJSON = u'sjson'
mime_types = {
SRT: u'application/x-subrip; charset=utf-8',
TXT: u'text/plain; charset=utf-8',
SJSON: u'application/json',
}
@staticmethod
def convert(content, input_format, output_format):
"""
Convert transcript `content` from `input_format` to `output_format`.
Accepted input formats: sjson, srt.
Accepted output format: srt, txt, sjson.
Raises:
TranscriptsGenerationException: On parsing the invalid srt content during conversion from srt to sjson.
"""
assert input_format in ('srt', 'sjson')
assert output_format in ('txt', 'srt', 'sjson')
if input_format == output_format:
return content
if input_format == 'srt':
# Standardize content into bytes for later decoding.
if isinstance(content, text_type):
content = content.encode('utf-8')
if output_format == 'txt':
text = SubRipFile.from_string(content.decode('utf-8')).text
return HTMLParser().unescape(text)
elif output_format == 'sjson':
try:
srt_subs = SubRipFile.from_string(
# Skip byte order mark(BOM) character
content.decode('utf-8-sig'),
error_handling=SubRipFile.ERROR_RAISE
)
except Error as ex: # Base exception from pysrt
raise TranscriptsGenerationException(text_type(ex))
return json.dumps(generate_sjson_from_srt(srt_subs))
if input_format == 'sjson':
if output_format == 'txt':
text = json.loads(content)['text']
text_without_none = [line if line else '' for line in text]
return HTMLParser().unescape("\n".join(text_without_none))
elif output_format == 'srt':
return generate_srt_from_sjson(json.loads(content), speed=1.0)
@staticmethod
def asset(location, subs_id, lang='en', filename=None):
"""
Get asset from contentstore, asset location is built from subs_id and lang.
`location` is module location.
"""
# HACK Warning! this is temporary and will be removed once edx-val take over the
# transcript module and contentstore will only function as fallback until all the
# data is migrated to edx-val. It will be saving a contentstore hit for a hardcoded
# dummy-non-existent-transcript name.
if NON_EXISTENT_TRANSCRIPT in [subs_id, filename]:
raise NotFoundError
asset_filename = subs_filename(subs_id, lang) if not filename else filename
return Transcript.get_asset(location, asset_filename)
@staticmethod
def get_asset(location, filename):
"""
Return asset by location and filename.
"""
return contentstore().find(Transcript.asset_location(location, filename))
@staticmethod
def asset_location(location, filename):
"""
Return asset location. `location` is module location.
"""
# If user transcript filename is empty, raise `TranscriptException` to avoid `InvalidKeyError`.
if not filename:
raise TranscriptException("Transcript not uploaded yet")
return StaticContent.compute_location(location.course_key, filename)
@staticmethod
def delete_asset(location, filename):
"""
Delete asset by location and filename.
"""
try:
contentstore().delete(Transcript.asset_location(location, filename))
log.info("Transcript asset %s was removed from store.", filename)
except NotFoundError:
pass
return StaticContent.compute_location(location.course_key, filename)
class VideoTranscriptsMixin(object):
"""Mixin class for transcript functionality.
This is necessary for VideoBlock.
"""
def available_translations(self, transcripts, verify_assets=None, is_bumper=False):
"""
Return a list of language codes for which we have transcripts.
Arguments:
verify_assets (boolean): If True, checks to ensure that the transcripts
really exist in the contentstore. If False, we just look at the
VideoBlock fields and do not query the contentstore. One reason
we might do this is to avoid slamming contentstore() with queries
when trying to make a listing of videos and their languages.
Defaults to `not FALLBACK_TO_ENGLISH_TRANSCRIPTS`.
transcripts (dict): A dict with all transcripts and a sub.
include_val_transcripts(boolean): If True, adds the edx-val transcript languages as well.
"""
translations = []
if verify_assets is None:
verify_assets = not settings.FEATURES.get('FALLBACK_TO_ENGLISH_TRANSCRIPTS')
sub, other_langs = transcripts["sub"], transcripts["transcripts"]
if verify_assets:
all_langs = dict(**other_langs)
if sub:
all_langs.update({'en': sub})
for language, filename in six.iteritems(all_langs):
try:
# for bumper videos, transcripts are stored in content store only
if is_bumper:
get_transcript_for_video(self.location, filename, filename, language)
else:
get_transcript(self, language)
except NotFoundError:
continue
translations.append(language)
else:
# If we're not verifying the assets, we just trust our field values
translations = list(other_langs)
if not translations or sub:
translations += ['en']
# to clean redundant language codes.
return list(set(translations))
def get_transcript(self, transcripts, transcript_format='srt', lang=None):
"""
Returns transcript, filename and MIME type.
transcripts (dict): A dict with all transcripts and a sub.
Raises:
- NotFoundError if cannot find transcript file in storage.
- ValueError if transcript file is empty or incorrect JSON.
- KeyError if transcript file has incorrect format.
If language is 'en', self.sub should be correct subtitles name.
If language is 'en', but if self.sub is not defined, this means that we
should search for video name in order to get proper transcript (old style courses).
If language is not 'en', give back transcript in proper language and format.
"""
if not lang:
lang = self.get_default_transcript_language(transcripts)
sub, other_lang = transcripts["sub"], transcripts["transcripts"]
if lang == 'en':
if sub: # HTML5 case and (Youtube case for new style videos)
transcript_name = sub
elif self.youtube_id_1_0: # old courses
transcript_name = self.youtube_id_1_0
else:
log.debug("No subtitles for 'en' language")
raise ValueError
data = Transcript.asset(self.location, transcript_name, lang).data.decode('utf-8')
filename = u'{}.{}'.format(transcript_name, transcript_format)
content = Transcript.convert(data, 'sjson', transcript_format)
else:
data = Transcript.asset(self.location, None, None, other_lang[lang]).data.decode('utf-8')
filename = u'{}.{}'.format(os.path.splitext(other_lang[lang])[0], transcript_format)
content = Transcript.convert(data, 'srt', transcript_format)
if not content:
log.debug('no subtitles produced in get_transcript')
raise ValueError
return content, filename, Transcript.mime_types[transcript_format]
def get_default_transcript_language(self, transcripts):
"""
Returns the default transcript language for this video module.
Args:
transcripts (dict): A dict with all transcripts and a sub.
"""
sub, other_lang = transcripts["sub"], transcripts["transcripts"]
if self.transcript_language in other_lang:
transcript_language = self.transcript_language
elif sub:
transcript_language = u'en'
elif len(other_lang) > 0:
transcript_language = sorted(other_lang)[0]
else:
transcript_language = u'en'
return transcript_language
def get_transcripts_info(self, is_bumper=False):
"""
Returns a transcript dictionary for the video.
Arguments:
is_bumper(bool): If True, the request is for the bumper transcripts
include_val_transcripts(bool): If True, include edx-val transcripts as well
"""
if is_bumper:
transcripts = copy.deepcopy(get_bumper_settings(self).get('transcripts', {}))
sub = transcripts.pop("en", "")
else:
transcripts = self.transcripts if self.transcripts else {}
sub = self.sub
# Only attach transcripts that are not empty.
transcripts = {
language_code: transcript_file
for language_code, transcript_file in transcripts.items() if transcript_file != ''
}
# bumper transcripts are stored in content store so we don't need to include val transcripts
if not is_bumper:
transcript_languages = get_available_transcript_languages(edx_video_id=self.edx_video_id)
# HACK Warning! this is temporary and will be removed once edx-val take over the
# transcript module and contentstore will only function as fallback until all the
# data is migrated to edx-val.
for language_code in transcript_languages:
if language_code == 'en' and not sub:
sub = NON_EXISTENT_TRANSCRIPT
elif not transcripts.get(language_code):
transcripts[language_code] = NON_EXISTENT_TRANSCRIPT
return {
"sub": sub,
"transcripts": transcripts,
}
@exception_decorator
def get_transcript_from_val(edx_video_id, lang=None, output_format=Transcript.SRT):
"""
Get video transcript from edx-val.
Arguments:
edx_video_id (unicode): video identifier
lang (unicode): transcript language
output_format (unicode): transcript output format
Returns:
tuple containing content, filename, mimetype
"""
transcript = get_video_transcript_content(edx_video_id, lang)
if not transcript:
raise NotFoundError(u'Transcript not found for {}, lang: {}'.format(edx_video_id, lang))
transcript_conversion_props = dict(transcript, output_format=output_format)
transcript = convert_video_transcript(**transcript_conversion_props)
filename = transcript['filename']
content = transcript['content']
mimetype = Transcript.mime_types[output_format]
return content, filename, mimetype
def get_transcript_for_video(video_location, subs_id, file_name, language):
"""
Get video transcript from content store.
NOTE: Transcripts can be searched from content store by two ways:
1. by an id(a.k.a subs_id) which will be used to construct transcript filename
2. by providing transcript filename
Arguments:
video_location (Locator): Video location
subs_id (unicode): id for a transcript in content store
file_name (unicode): file_name for a transcript in content store
language (unicode): transcript language
Returns:
tuple containing transcript input_format, basename, content
"""
try:
if subs_id is None:
raise NotFoundError
content = Transcript.asset(video_location, subs_id, language).data.decode('utf-8')
base_name = subs_id
input_format = Transcript.SJSON
except NotFoundError:
content = Transcript.asset(video_location, None, language, file_name).data.decode('utf-8')
base_name = os.path.splitext(file_name)[0]
input_format = Transcript.SRT
return input_format, base_name, content
@exception_decorator
def get_transcript_from_contentstore(video, language, output_format, transcripts_info, youtube_id=None):
"""
Get video transcript from content store.
Arguments:
video (Video Descriptor): Video descriptor
language (unicode): transcript language
output_format (unicode): transcript output format
transcripts_info (dict): transcript info for a video
youtube_id (unicode): youtube video id
Returns:
tuple containing content, filename, mimetype
"""
input_format, base_name, transcript_content = None, None, None
if output_format not in (Transcript.SRT, Transcript.SJSON, Transcript.TXT):
raise NotFoundError('Invalid transcript format `{output_format}`'.format(output_format=output_format))
sub, other_languages = transcripts_info['sub'], transcripts_info['transcripts']
transcripts = dict(other_languages)
# this is sent in case of a translation dispatch and we need to use it as our subs_id.
possible_sub_ids = [youtube_id, sub, video.youtube_id_1_0] + get_html5_ids(video.html5_sources)
for sub_id in possible_sub_ids:
try:
transcripts[u'en'] = sub_id
input_format, base_name, transcript_content = get_transcript_for_video(
video.location,
subs_id=sub_id,
file_name=transcripts[language],
language=language
)
break
except (KeyError, NotFoundError):
continue
if transcript_content is None:
raise NotFoundError('No transcript for `{lang}` language'.format(
lang=language
))
# add language prefix to transcript file only if language is not None
language_prefix = '{}_'.format(language) if language else ''
transcript_name = u'{}{}.{}'.format(language_prefix, base_name, output_format)
transcript_content = Transcript.convert(transcript_content, input_format=input_format, output_format=output_format)
if not transcript_content.strip():
raise NotFoundError('No transcript content')
if youtube_id:
youtube_ids = youtube_speed_dict(video)
transcript_content = json.dumps(
generate_subs(youtube_ids.get(youtube_id, 1), 1, json.loads(transcript_content))
)
return transcript_content, transcript_name, Transcript.mime_types[output_format]
def get_transcript_from_blockstore(video_block, language, output_format, transcripts_info):
"""
Get video transcript from Blockstore.
Blockstore expects video transcripts to be placed into the 'static/'
subfolder of the XBlock's folder in a Blockstore bundle. For example, if the
video XBlock's definition is in the standard location of
video/video1/definition.xml
Then the .srt files should be placed at e.g.
video/video1/static/video1-en.srt
This is the same place where other public static files are placed for other
XBlocks, such as image files used by HTML blocks.
Video XBlocks in Blockstore must set the 'transcripts' XBlock field to a
JSON dictionary listing the filename of the transcript for each language:
<video
youtube_id_1_0="3_yD_cEKoCk"
transcripts='{"en": "3_yD_cEKoCk-en.srt"}'
display_name="Welcome Video with Transcript"
download_track="true"
/>
This method is tested in openedx/core/djangoapps/content_libraries/tests/test_static_assets.py
Arguments:
video_block (Video XBlock): The video XBlock
language (str): transcript language
output_format (str): transcript output format
transcripts_info (dict): transcript info for a video, from video_block.get_transcripts_info()
Returns:
tuple containing content, filename, mimetype
"""
if output_format not in (Transcript.SRT, Transcript.SJSON, Transcript.TXT):
raise NotFoundError('Invalid transcript format `{output_format}`'.format(output_format=output_format))
transcripts = transcripts_info['transcripts']
if language not in transcripts:
raise NotFoundError("Video {} does not have a transcript file defined for the '{}' language in its OLX.".format(
video_block.scope_ids.usage_id,
language,
))
filename = transcripts[language]
if not filename.endswith('.srt'):
# We want to standardize on .srt
raise NotFoundError("Video XBlocks in Blockstore only support .srt transcript files.")
# Try to load the transcript file out of Blockstore
# In lieu of an XBlock API for this (like block.runtime.resources_fs), we use the blockstore API directly.
bundle_uuid = video_block.scope_ids.def_id.bundle_uuid
path = video_block.scope_ids.def_id.olx_path.rpartition('/')[0] + '/static/' + filename
bundle_version = video_block.scope_ids.def_id.bundle_version # Either bundle_version or draft_name will be set.
draft_name = video_block.scope_ids.def_id.draft_name
try:
content_binary = blockstore_cache.get_bundle_file_data_with_cache(bundle_uuid, path, bundle_version, draft_name)
except blockstore_api.BundleFileNotFound:
raise NotFoundError("Transcript file '{}' missing for video XBlock {}".format(
path,
video_block.scope_ids.usage_id,
))
# Now convert the transcript data to the requested format:
filename_no_extension = os.path.splitext(filename)[0]
output_filename = '{}.{}'.format(filename_no_extension, output_format)
output_transcript = Transcript.convert(
content_binary.decode('utf-8'),
input_format=Transcript.SRT,
output_format=output_format,
)
if not output_transcript.strip():
raise NotFoundError('No transcript content')
return output_transcript, output_filename, Transcript.mime_types[output_format]
def get_transcript(video, lang=None, output_format=Transcript.SRT, youtube_id=None):
"""
Get video transcript from edx-val or content store.
Arguments:
video (Video Descriptor): Video Descriptor
lang (unicode): transcript language
output_format (unicode): transcript output format
youtube_id (unicode): youtube video id
Returns:
tuple containing content, filename, mimetype
"""
transcripts_info = video.get_transcripts_info()
if not lang:
lang = video.get_default_transcript_language(transcripts_info)
if isinstance(video.scope_ids.def_id, BundleDefinitionLocator):
# This block is in Blockstore.
# For Blockstore, VAL is considered deprecated and we can load the transcript file
# directly using the Blockstore API:
return get_transcript_from_blockstore(video, lang, output_format, transcripts_info)
try:
edx_video_id = clean_video_id(video.edx_video_id)
if not edx_video_id:
raise NotFoundError
return get_transcript_from_val(edx_video_id, lang, output_format)
except NotFoundError:
return get_transcript_from_contentstore(
video,
lang,
youtube_id=youtube_id,
output_format=output_format,
transcripts_info=transcripts_info
)
| appsembler/edx-platform | common/lib/xmodule/xmodule/video_module/transcripts_utils.py | Python | agpl-3.0 | 40,929 |
# Generated by Django 1.11.12 on 2018-05-25 12:19
from django.db import migrations
import taggit.managers
class Migration(migrations.Migration):
dependencies = [("blog", "0012_auto_20180425_1523")]
operations = [
migrations.AlterField(
model_name="post",
name="genre",
field=taggit.managers.TaggableManager(
blank=True,
help_text="A comma-separated list of tags.",
through="blog.GenresProxy",
to="taggit.Tag",
verbose_name="Genre",
),
)
]
| manti-by/M2-Blog-Engine | manti_by/apps/blog/migrations/0013_auto_20180525_1219.py | Python | bsd-3-clause | 600 |
# Test for one implementation of the interface
from lexicon.providers.nsone import Provider
from integration_tests import IntegrationTests
from unittest import TestCase
import pytest
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from define_tests.TheTests
class Ns1ProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'nsone'
domain = 'lexicon-example.com'
def _filter_headers(self):
return ['X-NSONE-Key', 'Authorization']
@pytest.mark.skip(reason="can not set ttl when creating/updating records")
def test_Provider_when_calling_list_records_after_setting_ttl(self):
return
# TODO: this should be enabled
@pytest.mark.skip(reason="regenerating auth keys required")
def test_Provider_when_calling_update_record_should_modify_record_name_specified(self):
return
| tnwhitwell/lexicon | tests/providers/test_nsone.py | Python | mit | 972 |
"""
SoftLayer.tests.CLI.modules.vs_tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:license: MIT, see LICENSE for more details.
"""
import mock
from SoftLayer import testing
import json
class VirtTests(testing.TestCase):
def test_list_vs(self):
result = self.run_command(['vs', 'list', '--tag=tag'])
self.assertEqual(result.exit_code, 0)
self.assertEqual(json.loads(result.output),
[{'datacenter': 'TEST00',
'primary_ip': '172.16.240.2',
'hostname': 'vs-test1',
'action': None,
'id': 100,
'backend_ip': '10.45.19.37'},
{'datacenter': 'TEST00',
'primary_ip': '172.16.240.7',
'hostname': 'vs-test2',
'action': None,
'id': 104,
'backend_ip': '10.45.19.35'}])
def test_detail_vs(self):
result = self.run_command(['vs', 'detail', '100',
'--passwords', '--price'])
self.assertEqual(result.exit_code, 0)
self.assertEqual(json.loads(result.output),
{'active_transaction': None,
'cores': 2,
'created': '2013-08-01 15:23:45',
'datacenter': 'TEST00',
'hostname': 'vs-test1',
'domain': 'test.sftlyr.ws',
'fqdn': 'vs-test1.test.sftlyr.ws',
'id': 100,
'guid': '1a2b3c-1701',
'memory': 1024,
'modified': {},
'os': '12.04-64 Minimal for VSI',
'os_version': '12.04-64 Minimal for VSI',
'notes': 'notes',
'price rate': 1.54,
'tags': ['production'],
'private_cpu': {},
'private_ip': '10.45.19.37',
'private_only': {},
'ptr': 'test.softlayer.com.',
'public_ip': '172.16.240.2',
'state': 'RUNNING',
'status': 'ACTIVE',
'users': [{'password': 'pass', 'username': 'user'}],
'vlans': [{'type': 'PUBLIC',
'number': 23,
'id': 1}],
'owner': 'chechu'})
def test_detail_vs_empty_tag(self):
mock = self.set_mock('SoftLayer_Virtual_Guest', 'getObject')
mock.return_value = {
'id': 100,
'maxCpu': 2,
'maxMemory': 1024,
'tagReferences': [
{'tag': {'name': 'example-tag'}},
{},
],
}
result = self.run_command(['vs', 'detail', '100'])
self.assertEqual(result.exit_code, 0)
self.assertEqual(
json.loads(result.output)['tags'],
['example-tag'],
)
def test_create_options(self):
result = self.run_command(['vs', 'create-options'])
self.assertEqual(result.exit_code, 0)
self.assertEqual(json.loads(result.output),
{'cpus (private)': [],
'cpus (standard)': ['1', '2', '3', '4'],
'datacenter': ['ams01', 'dal05'],
'local disk(0)': ['25', '100'],
'memory': ['1024', '2048', '3072', '4096'],
'nic': ['10', '100', '1000'],
'os (CENTOS)': 'CENTOS_6_64',
'os (DEBIAN)': 'DEBIAN_7_64',
'os (UBUNTU)': 'UBUNTU_12_64'})
@mock.patch('SoftLayer.CLI.formatting.confirm')
def test_create(self, confirm_mock):
confirm_mock.return_value = True
result = self.run_command(['vs', 'create',
'--cpu=2',
'--domain=example.com',
'--hostname=host',
'--os=UBUNTU_LATEST',
'--memory=1',
'--network=100',
'--billing=hourly',
'--datacenter=dal05',
'--tag=dev',
'--tag=green'])
self.assertEqual(result.exit_code, 0)
self.assertEqual(json.loads(result.output),
{'guid': '1a2b3c-1701',
'id': 100,
'created': '2013-08-01 15:23:45'})
args = ({'datacenter': {'name': 'dal05'},
'domain': 'example.com',
'hourlyBillingFlag': True,
'localDiskFlag': True,
'maxMemory': 1024,
'hostname': 'host',
'startCpus': 2,
'operatingSystemReferenceCode': 'UBUNTU_LATEST',
'networkComponents': [{'maxSpeed': '100'}]},)
self.assert_called_with('SoftLayer_Virtual_Guest', 'createObject',
args=args)
| briancline/softlayer-python | SoftLayer/tests/CLI/modules/vs_tests.py | Python | mit | 5,479 |
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
import socket
import string
import base64
import platform
try:
import http.client as httplib
from urllib import request as url_request
from urllib import parse
except ImportError: # above is available in py3+, below is py2.7
import httplib as httplib
import urllib2 as url_request
import urlparse as parse
from selenium.webdriver.common import utils as common_utils
from selenium import __version__
from .command import Command
from .errorhandler import ErrorCode
from . import utils
LOGGER = logging.getLogger(__name__)
class Request(url_request.Request):
"""
Extends the url_request.Request to support all HTTP request types.
"""
def __init__(self, url, data=None, method=None):
"""
Initialise a new HTTP request.
:Args:
- url - String for the URL to send the request to.
- data - Data to send with the request.
"""
if method is None:
method = data is not None and 'POST' or 'GET'
elif method != 'POST' and method != 'PUT':
data = None
self._method = method
url_request.Request.__init__(self, url, data=data)
def get_method(self):
"""
Returns the HTTP method used by this request.
"""
return self._method
class Response(object):
"""
Represents an HTTP response.
"""
def __init__(self, fp, code, headers, url):
"""
Initialise a new Response.
:Args:
- fp - The response body file object.
- code - The HTTP status code returned by the server.
- headers - A dictionary of headers returned by the server.
- url - URL of the retrieved resource represented by this Response.
"""
self.fp = fp
self.read = fp.read
self.code = code
self.headers = headers
self.url = url
def close(self):
"""
Close the response body file object.
"""
self.read = None
self.fp = None
def info(self):
"""
Returns the response headers.
"""
return self.headers
def geturl(self):
"""
Returns the URL for the resource returned in this response.
"""
return self.url
class HttpErrorHandler(url_request.HTTPDefaultErrorHandler):
"""
A custom HTTP error handler.
Used to return Response objects instead of raising an HTTPError exception.
"""
def http_error_default(self, req, fp, code, msg, headers):
"""
Default HTTP error handler.
:Args:
- req - The original Request object.
- fp - The response body file object.
- code - The HTTP status code returned by the server.
- msg - The HTTP status message returned by the server.
- headers - The response headers.
:Returns:
A new Response object.
"""
return Response(fp, code, headers, req.get_full_url())
class RemoteConnection(object):
"""A connection with the Remote WebDriver server.
Communicates with the server using the WebDriver wire protocol:
https://github.com/SeleniumHQ/selenium/wiki/JsonWireProtocol"""
_timeout = socket._GLOBAL_DEFAULT_TIMEOUT
@classmethod
def get_timeout(cls):
"""
:Returns:
Timeout value in seconds for all http requests made to the Remote Connection
"""
return None if cls._timeout == socket._GLOBAL_DEFAULT_TIMEOUT else cls._timeout
@classmethod
def set_timeout(cls, timeout):
"""
Override the default timeout
:Args:
- timeout - timeout value for http requests in seconds
"""
cls._timeout = timeout
@classmethod
def reset_timeout(cls):
"""
Reset the http request timeout to socket._GLOBAL_DEFAULT_TIMEOUT
"""
cls._timeout = socket._GLOBAL_DEFAULT_TIMEOUT
@classmethod
def get_remote_connection_headers(cls, parsed_url, keep_alive=False):
"""
Get headers for remote request.
:Args:
- parsed_url - The parsed url
- keep_alive (Boolean) - Is this a keep-alive connection (default: False)
"""
system = platform.system().lower()
if system == "darwin":
system = "mac"
headers = {
'Accept': 'application/json',
'Content-Type': 'application/json;charset=UTF-8',
'User-Agent': 'selenium/{} (python {})'.format(__version__, system)
}
if parsed_url.username:
base64string = base64.b64encode('{0.username}:{0.password}'.format(parsed_url).encode())
headers.update({
'Authorization': 'Basic {}'.format(base64string.decode())
})
if keep_alive:
headers.update({
'Connection': 'keep-alive'
})
return headers
def __init__(self, remote_server_addr, keep_alive=False, resolve_ip=True):
# Attempt to resolve the hostname and get an IP address.
self.keep_alive = keep_alive
parsed_url = parse.urlparse(remote_server_addr)
addr = parsed_url.hostname
if parsed_url.hostname and resolve_ip:
port = parsed_url.port or None
if parsed_url.scheme == "https":
ip = parsed_url.hostname
elif port and not common_utils.is_connectable(port, parsed_url.hostname):
ip = None
LOGGER.info('Could not connect to port {} on host '
'{}'.format(port, parsed_url.hostname))
else:
ip = common_utils.find_connectable_ip(parsed_url.hostname,
port=port)
if ip:
netloc = ip
addr = netloc
if parsed_url.port:
netloc = common_utils.join_host_port(netloc,
parsed_url.port)
if parsed_url.username:
auth = parsed_url.username
if parsed_url.password:
auth += ':%s' % parsed_url.password
netloc = '%s@%s' % (auth, netloc)
remote_server_addr = parse.urlunparse(
(parsed_url.scheme, netloc, parsed_url.path,
parsed_url.params, parsed_url.query, parsed_url.fragment))
else:
LOGGER.info('Could not get IP address for host: %s' %
parsed_url.hostname)
self._url = remote_server_addr
if keep_alive:
self._conn = httplib.HTTPConnection(
str(addr), str(parsed_url.port), timeout=self._timeout)
self._commands = {
Command.STATUS: ('GET', '/status'),
Command.NEW_SESSION: ('POST', '/session'),
Command.GET_ALL_SESSIONS: ('GET', '/sessions'),
Command.QUIT: ('DELETE', '/session/$sessionId'),
Command.GET_CURRENT_WINDOW_HANDLE:
('GET', '/session/$sessionId/window_handle'),
Command.W3C_GET_CURRENT_WINDOW_HANDLE:
('GET', '/session/$sessionId/window'),
Command.GET_WINDOW_HANDLES:
('GET', '/session/$sessionId/window_handles'),
Command.W3C_GET_WINDOW_HANDLES:
('GET', '/session/$sessionId/window/handles'),
Command.GET: ('POST', '/session/$sessionId/url'),
Command.GO_FORWARD: ('POST', '/session/$sessionId/forward'),
Command.GO_BACK: ('POST', '/session/$sessionId/back'),
Command.REFRESH: ('POST', '/session/$sessionId/refresh'),
Command.EXECUTE_SCRIPT: ('POST', '/session/$sessionId/execute'),
Command.W3C_EXECUTE_SCRIPT:
('POST', '/session/$sessionId/execute/sync'),
Command.W3C_EXECUTE_SCRIPT_ASYNC:
('POST', '/session/$sessionId/execute/async'),
Command.GET_CURRENT_URL: ('GET', '/session/$sessionId/url'),
Command.GET_TITLE: ('GET', '/session/$sessionId/title'),
Command.GET_PAGE_SOURCE: ('GET', '/session/$sessionId/source'),
Command.SCREENSHOT: ('GET', '/session/$sessionId/screenshot'),
Command.ELEMENT_SCREENSHOT: ('GET', '/session/$sessionId/element/$id/screenshot'),
Command.FIND_ELEMENT: ('POST', '/session/$sessionId/element'),
Command.FIND_ELEMENTS: ('POST', '/session/$sessionId/elements'),
Command.W3C_GET_ACTIVE_ELEMENT: ('GET', '/session/$sessionId/element/active'),
Command.GET_ACTIVE_ELEMENT:
('POST', '/session/$sessionId/element/active'),
Command.FIND_CHILD_ELEMENT:
('POST', '/session/$sessionId/element/$id/element'),
Command.FIND_CHILD_ELEMENTS:
('POST', '/session/$sessionId/element/$id/elements'),
Command.CLICK_ELEMENT: ('POST', '/session/$sessionId/element/$id/click'),
Command.CLEAR_ELEMENT: ('POST', '/session/$sessionId/element/$id/clear'),
Command.SUBMIT_ELEMENT: ('POST', '/session/$sessionId/element/$id/submit'),
Command.GET_ELEMENT_TEXT: ('GET', '/session/$sessionId/element/$id/text'),
Command.SEND_KEYS_TO_ELEMENT:
('POST', '/session/$sessionId/element/$id/value'),
Command.SEND_KEYS_TO_ACTIVE_ELEMENT:
('POST', '/session/$sessionId/keys'),
Command.UPLOAD_FILE: ('POST', "/session/$sessionId/file"),
Command.GET_ELEMENT_VALUE:
('GET', '/session/$sessionId/element/$id/value'),
Command.GET_ELEMENT_TAG_NAME:
('GET', '/session/$sessionId/element/$id/name'),
Command.IS_ELEMENT_SELECTED:
('GET', '/session/$sessionId/element/$id/selected'),
Command.SET_ELEMENT_SELECTED:
('POST', '/session/$sessionId/element/$id/selected'),
Command.IS_ELEMENT_ENABLED:
('GET', '/session/$sessionId/element/$id/enabled'),
Command.IS_ELEMENT_DISPLAYED:
('GET', '/session/$sessionId/element/$id/displayed'),
Command.GET_ELEMENT_LOCATION:
('GET', '/session/$sessionId/element/$id/location'),
Command.GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW:
('GET', '/session/$sessionId/element/$id/location_in_view'),
Command.GET_ELEMENT_SIZE:
('GET', '/session/$sessionId/element/$id/size'),
Command.GET_ELEMENT_RECT:
('GET', '/session/$sessionId/element/$id/rect'),
Command.GET_ELEMENT_ATTRIBUTE:
('GET', '/session/$sessionId/element/$id/attribute/$name'),
Command.GET_ELEMENT_PROPERTY:
('GET', '/session/$sessionId/element/$id/property/$name'),
Command.ELEMENT_EQUALS:
('GET', '/session/$sessionId/element/$id/equals/$other'),
Command.GET_ALL_COOKIES: ('GET', '/session/$sessionId/cookie'),
Command.ADD_COOKIE: ('POST', '/session/$sessionId/cookie'),
Command.DELETE_ALL_COOKIES:
('DELETE', '/session/$sessionId/cookie'),
Command.DELETE_COOKIE:
('DELETE', '/session/$sessionId/cookie/$name'),
Command.SWITCH_TO_FRAME: ('POST', '/session/$sessionId/frame'),
Command.SWITCH_TO_PARENT_FRAME: ('POST', '/session/$sessionId/frame/parent'),
Command.SWITCH_TO_WINDOW: ('POST', '/session/$sessionId/window'),
Command.CLOSE: ('DELETE', '/session/$sessionId/window'),
Command.GET_ELEMENT_VALUE_OF_CSS_PROPERTY:
('GET', '/session/$sessionId/element/$id/css/$propertyName'),
Command.IMPLICIT_WAIT:
('POST', '/session/$sessionId/timeouts/implicit_wait'),
Command.EXECUTE_ASYNC_SCRIPT: ('POST', '/session/$sessionId/execute_async'),
Command.SET_SCRIPT_TIMEOUT:
('POST', '/session/$sessionId/timeouts/async_script'),
Command.SET_TIMEOUTS:
('POST', '/session/$sessionId/timeouts'),
Command.DISMISS_ALERT:
('POST', '/session/$sessionId/dismiss_alert'),
Command.W3C_DISMISS_ALERT:
('POST', '/session/$sessionId/alert/dismiss'),
Command.ACCEPT_ALERT:
('POST', '/session/$sessionId/accept_alert'),
Command.W3C_ACCEPT_ALERT:
('POST', '/session/$sessionId/alert/accept'),
Command.SET_ALERT_VALUE:
('POST', '/session/$sessionId/alert_text'),
Command.W3C_SET_ALERT_VALUE:
('POST', '/session/$sessionId/alert/text'),
Command.GET_ALERT_TEXT:
('GET', '/session/$sessionId/alert_text'),
Command.W3C_GET_ALERT_TEXT:
('GET', '/session/$sessionId/alert/text'),
Command.SET_ALERT_CREDENTIALS:
('POST', '/session/$sessionId/alert/credentials'),
Command.CLICK:
('POST', '/session/$sessionId/click'),
Command.W3C_ACTIONS:
('POST', '/session/$sessionId/actions'),
Command.W3C_CLEAR_ACTIONS:
('DELETE', '/session/$sessionId/actions'),
Command.DOUBLE_CLICK:
('POST', '/session/$sessionId/doubleclick'),
Command.MOUSE_DOWN:
('POST', '/session/$sessionId/buttondown'),
Command.MOUSE_UP:
('POST', '/session/$sessionId/buttonup'),
Command.MOVE_TO:
('POST', '/session/$sessionId/moveto'),
Command.GET_WINDOW_SIZE:
('GET', '/session/$sessionId/window/$windowHandle/size'),
Command.SET_WINDOW_SIZE:
('POST', '/session/$sessionId/window/$windowHandle/size'),
Command.GET_WINDOW_POSITION:
('GET', '/session/$sessionId/window/$windowHandle/position'),
Command.SET_WINDOW_POSITION:
('POST', '/session/$sessionId/window/$windowHandle/position'),
Command.SET_WINDOW_RECT:
('POST', '/session/$sessionId/window/rect'),
Command.GET_WINDOW_RECT:
('GET', '/session/$sessionId/window/rect'),
Command.MAXIMIZE_WINDOW:
('POST', '/session/$sessionId/window/$windowHandle/maximize'),
Command.W3C_MAXIMIZE_WINDOW:
('POST', '/session/$sessionId/window/maximize'),
Command.SET_SCREEN_ORIENTATION:
('POST', '/session/$sessionId/orientation'),
Command.GET_SCREEN_ORIENTATION:
('GET', '/session/$sessionId/orientation'),
Command.SINGLE_TAP:
('POST', '/session/$sessionId/touch/click'),
Command.TOUCH_DOWN:
('POST', '/session/$sessionId/touch/down'),
Command.TOUCH_UP:
('POST', '/session/$sessionId/touch/up'),
Command.TOUCH_MOVE:
('POST', '/session/$sessionId/touch/move'),
Command.TOUCH_SCROLL:
('POST', '/session/$sessionId/touch/scroll'),
Command.DOUBLE_TAP:
('POST', '/session/$sessionId/touch/doubleclick'),
Command.LONG_PRESS:
('POST', '/session/$sessionId/touch/longclick'),
Command.FLICK:
('POST', '/session/$sessionId/touch/flick'),
Command.EXECUTE_SQL:
('POST', '/session/$sessionId/execute_sql'),
Command.GET_LOCATION:
('GET', '/session/$sessionId/location'),
Command.SET_LOCATION:
('POST', '/session/$sessionId/location'),
Command.GET_APP_CACHE:
('GET', '/session/$sessionId/application_cache'),
Command.GET_APP_CACHE_STATUS:
('GET', '/session/$sessionId/application_cache/status'),
Command.CLEAR_APP_CACHE:
('DELETE', '/session/$sessionId/application_cache/clear'),
Command.GET_NETWORK_CONNECTION:
('GET', '/session/$sessionId/network_connection'),
Command.SET_NETWORK_CONNECTION:
('POST', '/session/$sessionId/network_connection'),
Command.GET_LOCAL_STORAGE_ITEM:
('GET', '/session/$sessionId/local_storage/key/$key'),
Command.REMOVE_LOCAL_STORAGE_ITEM:
('DELETE', '/session/$sessionId/local_storage/key/$key'),
Command.GET_LOCAL_STORAGE_KEYS:
('GET', '/session/$sessionId/local_storage'),
Command.SET_LOCAL_STORAGE_ITEM:
('POST', '/session/$sessionId/local_storage'),
Command.CLEAR_LOCAL_STORAGE:
('DELETE', '/session/$sessionId/local_storage'),
Command.GET_LOCAL_STORAGE_SIZE:
('GET', '/session/$sessionId/local_storage/size'),
Command.GET_SESSION_STORAGE_ITEM:
('GET', '/session/$sessionId/session_storage/key/$key'),
Command.REMOVE_SESSION_STORAGE_ITEM:
('DELETE', '/session/$sessionId/session_storage/key/$key'),
Command.GET_SESSION_STORAGE_KEYS:
('GET', '/session/$sessionId/session_storage'),
Command.SET_SESSION_STORAGE_ITEM:
('POST', '/session/$sessionId/session_storage'),
Command.CLEAR_SESSION_STORAGE:
('DELETE', '/session/$sessionId/session_storage'),
Command.GET_SESSION_STORAGE_SIZE:
('GET', '/session/$sessionId/session_storage/size'),
Command.GET_LOG:
('POST', '/session/$sessionId/log'),
Command.GET_AVAILABLE_LOG_TYPES:
('GET', '/session/$sessionId/log/types'),
Command.CURRENT_CONTEXT_HANDLE:
('GET', '/session/$sessionId/context'),
Command.CONTEXT_HANDLES:
('GET', '/session/$sessionId/contexts'),
Command.SWITCH_TO_CONTEXT:
('POST', '/session/$sessionId/context'),
Command.FULLSCREEN_WINDOW:
('POST', '/session/$sessionId/window/fullscreen'),
Command.MINIMIZE_WINDOW:
('POST', '/session/$sessionId/window/minimize')
}
def execute(self, command, params):
"""
Send a command to the remote server.
Any path subtitutions required for the URL mapped to the command should be
included in the command parameters.
:Args:
- command - A string specifying the command to execute.
- params - A dictionary of named parameters to send with the command as
its JSON payload.
"""
command_info = self._commands[command]
assert command_info is not None, 'Unrecognised command %s' % command
path = string.Template(command_info[1]).substitute(params)
if hasattr(self, 'w3c') and self.w3c and isinstance(params, dict) and 'sessionId' in params:
del params['sessionId']
data = utils.dump_json(params)
url = '%s%s' % (self._url, path)
return self._request(command_info[0], url, body=data)
def _request(self, method, url, body=None):
"""
Send an HTTP request to the remote server.
:Args:
- method - A string for the HTTP method to send the request with.
- url - A string for the URL to send the request to.
- body - A string for request body. Ignored unless method is POST or PUT.
:Returns:
A dictionary with the server's parsed JSON response.
"""
LOGGER.debug('%s %s %s' % (method, url, body))
parsed_url = parse.urlparse(url)
headers = self.get_remote_connection_headers(parsed_url, self.keep_alive)
if self.keep_alive:
if body and method != 'POST' and method != 'PUT':
body = None
try:
self._conn.request(method, parsed_url.path, body, headers)
resp = self._conn.getresponse()
except (httplib.HTTPException, socket.error):
self._conn.close()
raise
statuscode = resp.status
else:
password_manager = None
if parsed_url.username:
netloc = parsed_url.hostname
if parsed_url.port:
netloc += ":%s" % parsed_url.port
cleaned_url = parse.urlunparse((
parsed_url.scheme,
netloc,
parsed_url.path,
parsed_url.params,
parsed_url.query,
parsed_url.fragment))
password_manager = url_request.HTTPPasswordMgrWithDefaultRealm()
password_manager.add_password(None,
"%s://%s" % (parsed_url.scheme, netloc),
parsed_url.username,
parsed_url.password)
request = Request(cleaned_url, data=body.encode('utf-8'), method=method)
else:
request = Request(url, data=body.encode('utf-8'), method=method)
for key, val in headers.items():
request.add_header(key, val)
if password_manager:
opener = url_request.build_opener(url_request.HTTPRedirectHandler(),
HttpErrorHandler(),
url_request.HTTPBasicAuthHandler(password_manager))
else:
opener = url_request.build_opener(url_request.HTTPRedirectHandler(),
HttpErrorHandler())
resp = opener.open(request, timeout=self._timeout)
statuscode = resp.code
if not hasattr(resp, 'getheader'):
if hasattr(resp.headers, 'getheader'):
resp.getheader = lambda x: resp.headers.getheader(x)
elif hasattr(resp.headers, 'get'):
resp.getheader = lambda x: resp.headers.get(x)
data = resp.read()
LOGGER.debug(data)
try:
if 300 <= statuscode < 304:
return self._request('GET', resp.getheader('location'))
body = data.decode('utf-8').replace('\x00', '').strip()
if 399 < statuscode <= 500:
return {'status': statuscode, 'value': body}
content_type = []
if resp.getheader('Content-Type') is not None:
content_type = resp.getheader('Content-Type').split(';')
if not any([x.startswith('image/png') for x in content_type]):
try:
data = utils.load_json(body.strip())
except ValueError:
if 199 < statuscode < 300:
status = ErrorCode.SUCCESS
else:
status = ErrorCode.UNKNOWN_ERROR
return {'status': status, 'value': body.strip()}
assert type(data) is dict, (
'Invalid server response body: %s' % body)
# Some of the drivers incorrectly return a response
# with no 'value' field when they should return null.
if 'value' not in data:
data['value'] = None
return data
else:
data = {'status': 0, 'value': body.strip()}
return data
finally:
LOGGER.debug("Finished Request")
resp.close()
| ryfeus/lambda-packs | Selenium_Chromium/source/selenium/webdriver/remote/remote_connection.py | Python | mit | 24,770 |
# Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import ddt
from keystoneclient.auth import token_endpoint
from keystoneclient import exceptions as keystone_exceptions
import mock
from oslo_config import cfg
from rally.common import objects
from rally import consts
from rally import exceptions
from rally import osclients
from tests.unit import fakes
from tests.unit import test
@osclients.configure("dummy")
class DummyClient(osclients.OSClient):
def create_client(self, *args, **kwargs):
pass
class OSClientTestCase(test.TestCase):
def test_choose_service_type(self):
default_service_type = "default_service_type"
@osclients.configure("test_choose_service_type",
default_service_type=default_service_type)
class FakeClient(osclients.OSClient):
create_client = mock.MagicMock()
fake_client = FakeClient(mock.MagicMock(), {}, {})
self.assertEqual(default_service_type,
fake_client.choose_service_type())
self.assertEqual("foo",
fake_client.choose_service_type("foo"))
class CachedTestCase(test.TestCase):
def test_cached(self):
clients = osclients.Clients(mock.MagicMock())
client_name = "CachedTestCase.test_cached"
fake_client = osclients.configure(client_name)(
osclients.OSClient(clients.credential, clients.api_info,
clients.cache))
fake_client.create_client = mock.MagicMock()
self.assertEqual({}, clients.cache)
fake_client()
self.assertEqual(
{client_name: fake_client.create_client.return_value},
clients.cache)
fake_client.create_client.assert_called_once_with()
fake_client()
fake_client.create_client.assert_called_once_with()
fake_client("2")
self.assertEqual(
{client_name: fake_client.create_client.return_value,
"%s('2',)" % client_name: fake_client.create_client.return_value},
clients.cache)
clients.clear()
self.assertEqual({}, clients.cache)
class TestCreateKeystoneClient(test.TestCase):
def make_auth_args(self):
auth_kwargs = {
"auth_url": "http://auth_url", "username": "user",
"password": "password", "tenant_name": "tenant",
"domain_name": "domain", "project_name": "project_name",
"project_domain_name": "project_domain_name",
"user_domain_name": "user_domain_name",
}
kwargs = {"https_insecure": False, "https_cacert": None}
kwargs.update(auth_kwargs)
return auth_kwargs, kwargs
def set_up_keystone_mocks(self):
self.ksc_module = mock.MagicMock()
self.ksc_client = mock.MagicMock()
self.ksc_identity = mock.MagicMock()
self.ksc_password = mock.MagicMock()
self.ksc_session = mock.MagicMock()
self.ksc_auth = mock.MagicMock()
self.patcher = mock.patch.dict("sys.modules",
{"keystoneclient": self.ksc_module,
"keystoneclient.auth": self.ksc_auth})
self.patcher.start()
self.addCleanup(self.patcher.stop)
self.ksc_module.client = self.ksc_client
self.ksc_auth.identity = self.ksc_identity
self.ksc_auth.identity.Password = self.ksc_password
self.ksc_module.session = self.ksc_session
def test_create_keystone_client(self):
# NOTE(bigjools): This is a very poor testing strategy as it
# tightly couples the test implementation to the tested
# function's implementation. Ideally, we'd use a fake keystone
# but all that's happening here is that it's checking the right
# parameters were passed to the various parts that create a
# client. Hopefully one day we'll get a real fake from the
# keystone guys.
self.set_up_keystone_mocks()
auth_kwargs, all_kwargs = self.make_auth_args()
keystone = osclients.Keystone(
mock.MagicMock(), mock.sentinel, mock.sentinel)
client = keystone._create_keystone_client(all_kwargs)
self.ksc_password.assert_called_once_with(**auth_kwargs)
self.ksc_session.Session.assert_called_once_with(
auth=self.ksc_identity.Password(), timeout=mock.ANY,
verify=mock.ANY)
self.ksc_client.Client.assert_called_once_with(**all_kwargs)
self.assertIs(client, self.ksc_client.Client())
def test_client_is_pre_authed(self):
# The client needs to be pre-authed so that service_catalog
# works. This is because when using sessions, lazy auth is done
# in keystoneclient.
self.set_up_keystone_mocks()
_, all_kwargs = self.make_auth_args()
keystone = osclients.Keystone(
mock.MagicMock(), mock.sentinel, mock.sentinel)
client = keystone._create_keystone_client(all_kwargs)
auth_ref = getattr(client, "auth_ref", None)
self.assertIsNot(auth_ref, None)
def test_create_keystone_client_with_v2_url_omits_domain(self):
# NOTE(bigjools): Test that domain-related info is not present
# when forcing a v2 URL, because it breaks keystoneclient's
# service discovery.
self.set_up_keystone_mocks()
auth_kwargs, all_kwargs = self.make_auth_args()
all_kwargs["auth_url"] = "http://auth_url/v2.0"
auth_kwargs["auth_url"] = all_kwargs["auth_url"]
keystone = osclients.Keystone(
mock.MagicMock(), mock.sentinel, mock.sentinel)
client = keystone._create_keystone_client(all_kwargs)
auth_kwargs.pop("user_domain_name")
auth_kwargs.pop("project_domain_name")
auth_kwargs.pop("domain_name")
self.ksc_password.assert_called_once_with(**auth_kwargs)
self.ksc_session.Session.assert_called_once_with(
auth=self.ksc_identity.Password(), timeout=mock.ANY,
verify=mock.ANY)
self.ksc_client.Client.assert_called_once_with(**all_kwargs)
self.assertIs(client, self.ksc_client.Client())
@ddt.ddt
class OSClientsTestCase(test.TestCase):
def setUp(self):
super(OSClientsTestCase, self).setUp()
self.credential = objects.Credential("http://auth_url/v2.0", "use",
"pass", "tenant")
self.clients = osclients.Clients(self.credential, {})
self.fake_keystone = fakes.FakeKeystoneClient()
self.fake_keystone.auth_token = mock.MagicMock()
self.service_catalog = self.fake_keystone.service_catalog
self.service_catalog.url_for = mock.MagicMock()
keystone_patcher = mock.patch(
"rally.osclients.Keystone._create_keystone_client")
self.mock_create_keystone_client = keystone_patcher.start()
self.addCleanup(keystone_patcher.stop)
self.mock_create_keystone_client.return_value = self.fake_keystone
def tearDown(self):
super(OSClientsTestCase, self).tearDown()
def test_create_from_env(self):
with mock.patch.dict("os.environ",
{"OS_AUTH_URL": "foo_auth_url",
"OS_USERNAME": "foo_username",
"OS_PASSWORD": "foo_password",
"OS_TENANT_NAME": "foo_tenant_name",
"OS_REGION_NAME": "foo_region_name"}):
clients = osclients.Clients.create_from_env()
self.assertEqual("foo_auth_url", clients.credential.auth_url)
self.assertEqual("foo_username", clients.credential.username)
self.assertEqual("foo_password", clients.credential.password)
self.assertEqual("foo_tenant_name", clients.credential.tenant_name)
self.assertEqual("foo_region_name", clients.credential.region_name)
@mock.patch.object(DummyClient, "_get_endpoint")
@mock.patch("keystoneclient.session.Session")
def test_get_session(self, mock_session, mock_dummy_client__get_endpoint):
# Use DummyClient since if not the abc meta kicks in
osc = DummyClient(self.credential, {}, {})
with mock.patch.object(token_endpoint, "Token") as token:
osc._get_session()
token.assert_called_once_with(
mock_dummy_client__get_endpoint.return_value,
self.fake_keystone.auth_token
)
mock_session.assert_called_once_with(
auth=token.return_value, verify=not self.credential.insecure,
timeout=cfg.CONF.openstack_client_http_timeout)
@mock.patch.object(DummyClient, "_get_endpoint")
@mock.patch("keystoneclient.session.Session")
def test_get_session_with_endpoint(
self, mock_session, mock_dummy_client__get_endpoint):
# Use DummyClient since if not the abc meta kicks in
osc = DummyClient(self.credential, {}, {})
fake_endpoint = mock.Mock()
with mock.patch.object(token_endpoint, "Token") as token:
osc._get_session(endpoint=fake_endpoint)
self.assertFalse(mock_dummy_client__get_endpoint.called)
token.assert_called_once_with(
fake_endpoint,
self.fake_keystone.auth_token
)
mock_session.assert_called_once_with(
auth=token.return_value, verify=not self.credential.insecure,
timeout=cfg.CONF.openstack_client_http_timeout)
@mock.patch("keystoneclient.session.Session")
def test_get_session_with_auth(self, mock_session):
# Use DummyClient since if not the abc meta kicks in
osc = DummyClient(self.credential, {}, {})
fake_auth = mock.Mock()
osc._get_session(auth=fake_auth)
mock_session.assert_called_once_with(
auth=fake_auth, verify=not self.credential.insecure,
timeout=cfg.CONF.openstack_client_http_timeout)
def test_keystone(self):
self.assertNotIn("keystone", self.clients.cache)
client = self.clients.keystone()
self.assertEqual(client, self.fake_keystone)
credential = {"timeout": cfg.CONF.openstack_client_http_timeout,
"insecure": False, "cacert": None}
kwargs = self.credential.to_dict()
kwargs.update(credential.items())
self.mock_create_keystone_client.assert_called_once_with(kwargs)
self.assertEqual(self.fake_keystone, self.clients.cache["keystone"])
@mock.patch("rally.osclients.Keystone.create_client")
def test_verified_keystone_user_not_admin(self,
mock_keystone_create_client):
# naming rule for mocks sucks
mock_keystone = mock_keystone_create_client
mock_keystone.return_value = fakes.FakeKeystoneClient()
mock_keystone.return_value.auth_ref.role_names = ["notadmin"]
self.assertRaises(exceptions.InvalidAdminException,
self.clients.verified_keystone)
@mock.patch("rally.osclients.Keystone.create_client")
def test_verified_keystone_unauthorized(self, mock_keystone_create_client):
mock_keystone_create_client.return_value = fakes.FakeKeystoneClient()
mock_keystone_create_client.side_effect = (
keystone_exceptions.Unauthorized)
self.assertRaises(exceptions.InvalidEndpointsException,
self.clients.verified_keystone)
@mock.patch("rally.osclients.Keystone.create_client")
def test_verified_keystone_unreachable(self, mock_keystone_create_client):
mock_keystone_create_client.return_value = fakes.FakeKeystoneClient()
mock_keystone_create_client.side_effect = (
keystone_exceptions.AuthorizationFailure
)
self.assertRaises(exceptions.HostUnreachableException,
self.clients.verified_keystone)
def test_nova(self):
fake_nova = fakes.FakeNovaClient()
mock_nova = mock.MagicMock()
mock_nova.client.Client.return_value = fake_nova
self.assertNotIn("nova", self.clients.cache)
with mock.patch.dict("sys.modules", {"novaclient": mock_nova}):
client = self.clients.nova()
self.assertEqual(fake_nova, client)
self.service_catalog.url_for.assert_called_once_with(
service_type="compute",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name)
mock_nova.client.Client.assert_called_once_with(
"2",
auth_token=self.fake_keystone.auth_token,
http_log_debug=False,
timeout=cfg.CONF.openstack_client_http_timeout,
insecure=False, cacert=None,
username=self.credential.username,
api_key=self.credential.password,
project_id=self.credential.tenant_name,
auth_url=self.credential.auth_url)
client.set_management_url.assert_called_once_with(
self.service_catalog.url_for.return_value)
self.assertEqual(fake_nova, self.clients.cache["nova"])
def test_neutron(self):
fake_neutron = fakes.FakeNeutronClient()
mock_neutron = mock.MagicMock()
mock_neutron.client.Client.return_value = fake_neutron
self.assertNotIn("neutron", self.clients.cache)
with mock.patch.dict("sys.modules", {"neutronclient.neutron":
mock_neutron}):
client = self.clients.neutron()
self.assertEqual(fake_neutron, client)
kw = {
"token": self.fake_keystone.auth_token,
"endpoint_url": self.service_catalog.url_for.return_value,
"timeout": cfg.CONF.openstack_client_http_timeout,
"insecure": self.credential.insecure,
"ca_cert": self.credential.cacert,
"username": self.credential.username,
"password": self.credential.password,
"tenant_name": self.credential.tenant_name,
"auth_url": self.credential.auth_url
}
self.service_catalog.url_for.assert_called_once_with(
service_type="network",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name)
mock_neutron.client.Client.assert_called_once_with("2.0", **kw)
self.assertEqual(fake_neutron, self.clients.cache["neutron"])
def test_glance(self):
fake_glance = fakes.FakeGlanceClient()
mock_glance = mock.MagicMock()
mock_glance.Client = mock.MagicMock(return_value=fake_glance)
with mock.patch.dict("sys.modules", {"glanceclient": mock_glance}):
self.assertNotIn("glance", self.clients.cache)
client = self.clients.glance()
self.assertEqual(fake_glance, client)
kw = {"endpoint": self.service_catalog.url_for.return_value,
"token": self.fake_keystone.auth_token,
"timeout": cfg.CONF.openstack_client_http_timeout,
"insecure": False, "cacert": None}
self.service_catalog.url_for.assert_called_once_with(
service_type="image",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name)
mock_glance.Client.assert_called_once_with("1", **kw)
self.assertEqual(fake_glance, self.clients.cache["glance"])
def test_cinder(self):
fake_cinder = mock.MagicMock(client=fakes.FakeCinderClient())
mock_cinder = mock.MagicMock()
mock_cinder.client.Client.return_value = fake_cinder
self.assertNotIn("cinder", self.clients.cache)
with mock.patch.dict("sys.modules", {"cinderclient": mock_cinder}):
client = self.clients.cinder()
self.assertEqual(fake_cinder, client)
self.service_catalog.url_for.assert_called_once_with(
service_type="volumev2",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name)
mock_cinder.client.Client.assert_called_once_with(
"2",
http_log_debug=False,
timeout=cfg.CONF.openstack_client_http_timeout,
insecure=False, cacert=None,
username=self.credential.username,
api_key=self.credential.password,
project_id=self.credential.tenant_name,
auth_url=self.credential.auth_url)
self.assertEqual(fake_cinder.client.management_url,
self.service_catalog.url_for.return_value)
self.assertEqual(fake_cinder.client.auth_token,
self.fake_keystone.auth_token)
self.assertEqual(fake_cinder, self.clients.cache["cinder"])
def test_manila(self):
mock_manila = mock.MagicMock()
self.assertNotIn("manila", self.clients.cache)
with mock.patch.dict("sys.modules", {"manilaclient": mock_manila}):
client = self.clients.manila()
self.assertEqual(mock_manila.client.Client.return_value, client)
self.service_catalog.url_for.assert_called_once_with(
service_type="share",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name)
mock_manila.client.Client.assert_called_once_with(
"1",
http_log_debug=False,
timeout=cfg.CONF.openstack_client_http_timeout,
insecure=False, cacert=None,
username=self.credential.username,
api_key=self.credential.password,
region_name=self.credential.region_name,
project_name=self.credential.tenant_name,
auth_url=self.credential.auth_url)
self.assertEqual(
mock_manila.client.Client.return_value.client.management_url,
self.service_catalog.url_for.return_value)
self.assertEqual(
mock_manila.client.Client.return_value.client.auth_token,
self.fake_keystone.auth_token)
self.assertEqual(
mock_manila.client.Client.return_value,
self.clients.cache["manila"])
def test_ceilometer(self):
fake_ceilometer = fakes.FakeCeilometerClient()
mock_ceilometer = mock.MagicMock()
mock_ceilometer.client.get_client = mock.MagicMock(
return_value=fake_ceilometer)
self.assertNotIn("ceilometer", self.clients.cache)
with mock.patch.dict("sys.modules",
{"ceilometerclient": mock_ceilometer}):
client = self.clients.ceilometer()
self.assertEqual(fake_ceilometer, client)
self.service_catalog.url_for.assert_called_once_with(
service_type="metering",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name)
kw = {"os_endpoint": self.service_catalog.url_for.return_value,
"token": self.fake_keystone.auth_token,
"timeout": cfg.CONF.openstack_client_http_timeout,
"insecure": False, "cacert": None,
"username": self.credential.username,
"password": self.credential.password,
"tenant_name": self.credential.tenant_name,
"auth_url": self.credential.auth_url
}
mock_ceilometer.client.get_client.assert_called_once_with("2",
**kw)
self.assertEqual(fake_ceilometer,
self.clients.cache["ceilometer"])
def test_monasca(self):
fake_monasca = fakes.FakeMonascaClient()
mock_monasca = mock.MagicMock()
mock_monasca.client.Client.return_value = fake_monasca
self.assertNotIn("monasca", self.clients.cache)
with mock.patch.dict("sys.modules",
{"monascaclient": mock_monasca}):
client = self.clients.monasca()
self.assertEqual(fake_monasca, client)
self.service_catalog.url_for.assert_called_once_with(
service_type="monitoring",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name)
os_endpoint = self.service_catalog.url_for.return_value
kw = {"token": self.fake_keystone.auth_token,
"timeout": cfg.CONF.openstack_client_http_timeout,
"insecure": False, "cacert": None,
"username": self.credential.username,
"password": self.credential.password,
"tenant_name": self.credential.tenant_name,
"auth_url": self.credential.auth_url
}
mock_monasca.client.Client.assert_called_once_with("2_0",
os_endpoint,
**kw)
self.assertEqual(mock_monasca.client.Client.return_value,
self.clients.cache["monasca"])
def test_ironic(self):
fake_ironic = fakes.FakeIronicClient()
mock_ironic = mock.MagicMock()
mock_ironic.client.get_client = mock.MagicMock(
return_value=fake_ironic)
self.assertNotIn("ironic", self.clients.cache)
with mock.patch.dict("sys.modules", {"ironicclient": mock_ironic}):
client = self.clients.ironic()
self.assertEqual(fake_ironic, client)
self.service_catalog.url_for.assert_called_once_with(
service_type="baremetal",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name)
kw = {
"os_auth_token": self.fake_keystone.auth_token,
"ironic_url": self.service_catalog.url_for.return_value,
"timeout": cfg.CONF.openstack_client_http_timeout,
"insecure": self.credential.insecure,
"cacert": self.credential.cacert
}
mock_ironic.client.get_client.assert_called_once_with("1", **kw)
self.assertEqual(fake_ironic, self.clients.cache["ironic"])
def test_sahara(self):
fake_sahara = fakes.FakeSaharaClient()
mock_sahara = mock.MagicMock()
mock_sahara.client.Client = mock.MagicMock(return_value=fake_sahara)
self.assertNotIn("sahara", self.clients.cache)
with mock.patch.dict("sys.modules", {"saharaclient": mock_sahara}):
client = self.clients.sahara()
self.assertEqual(fake_sahara, client)
kw = {
"service_type": "data-processing",
"insecure": False,
"username": self.credential.username,
"api_key": self.credential.password,
"project_name": self.credential.tenant_name,
"auth_url": self.credential.auth_url
}
mock_sahara.client.Client.assert_called_once_with(1.1, **kw)
self.assertEqual(fake_sahara, self.clients.cache["sahara"])
def test_zaqar(self):
fake_zaqar = fakes.FakeZaqarClient()
mock_zaqar = mock.MagicMock()
mock_zaqar.client.Client = mock.MagicMock(return_value=fake_zaqar)
self.assertNotIn("zaqar", self.clients.cache)
with mock.patch.dict("sys.modules", {"zaqarclient.queues":
mock_zaqar}):
client = self.clients.zaqar()
self.assertEqual(fake_zaqar, client)
self.service_catalog.url_for.assert_called_once_with(
service_type="messaging",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name)
fake_zaqar_url = self.service_catalog.url_for.return_value
conf = {"auth_opts": {"backend": "keystone", "options": {
"os_username": self.credential.username,
"os_password": self.credential.password,
"os_project_name": self.credential.tenant_name,
"os_project_id": self.fake_keystone.auth_tenant_id,
"os_auth_url": self.credential.auth_url,
"insecure": self.credential.insecure,
}}}
mock_zaqar.client.Client.assert_called_once_with(
url=fake_zaqar_url, version=1.1, conf=conf)
self.assertEqual(fake_zaqar, self.clients.cache["zaqar"])
def test_trove(self):
fake_trove = fakes.FakeTroveClient()
mock_trove = mock.MagicMock()
mock_trove.client.Client = mock.MagicMock(return_value=fake_trove)
self.assertNotIn("trove", self.clients.cache)
with mock.patch.dict("sys.modules", {"troveclient": mock_trove}):
client = self.clients.trove()
self.assertEqual(fake_trove, client)
kw = {
"username": self.credential.username,
"api_key": self.credential.password,
"project_id": self.credential.tenant_name,
"auth_url": self.credential.auth_url,
"region_name": self.credential.region_name,
"timeout": cfg.CONF.openstack_client_http_timeout,
"insecure": self.credential.insecure,
"cacert": self.credential.cacert
}
mock_trove.client.Client.assert_called_once_with("1.0", **kw)
self.assertEqual(fake_trove, self.clients.cache["trove"])
def test_mistral(self):
fake_mistral = fakes.FakeMistralClient()
mock_mistral = mock.Mock()
mock_mistral.client.client.return_value = fake_mistral
self.assertNotIn("mistral", self.clients.cache)
with mock.patch.dict(
"sys.modules", {"mistralclient": mock_mistral,
"mistralclient.api": mock_mistral}):
client = self.clients.mistral()
self.assertEqual(fake_mistral, client)
self.service_catalog.url_for.assert_called_once_with(
service_type="workflowv2",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name
)
fake_mistral_url = self.service_catalog.url_for.return_value
mock_mistral.client.client.assert_called_once_with(
mistral_url=fake_mistral_url,
service_type="workflowv2",
auth_token=self.fake_keystone.auth_token
)
self.assertEqual(fake_mistral, self.clients.cache["mistral"])
def test_swift(self):
fake_swift = fakes.FakeSwiftClient()
mock_swift = mock.MagicMock()
mock_swift.client.Connection = mock.MagicMock(return_value=fake_swift)
self.assertNotIn("swift", self.clients.cache)
with mock.patch.dict("sys.modules", {"swiftclient": mock_swift}):
client = self.clients.swift()
self.assertEqual(client, fake_swift)
self.service_catalog.url_for.assert_called_once_with(
service_type="object-store",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name)
kw = {"retries": 1,
"preauthurl": self.service_catalog.url_for.return_value,
"preauthtoken": self.fake_keystone.auth_token,
"insecure": False,
"cacert": None,
"user": self.credential.username,
"key": self.credential.password,
"tenant_name": self.credential.tenant_name,
"authurl": self.credential.auth_url
}
mock_swift.client.Connection.assert_called_once_with(**kw)
self.assertEqual(self.clients.cache["swift"], fake_swift)
def test_ec2(self):
mock_boto = mock.Mock()
self.service_catalog.url_for.return_value = "http://fake.to:1/fake"
self.fake_keystone.ec2 = mock.Mock()
self.fake_keystone.ec2.create.return_value = mock.Mock(
access="fake_access", secret="fake_secret")
fake_ec2 = fakes.FakeEC2Client()
mock_boto.connect_ec2_endpoint.return_value = fake_ec2
self.assertNotIn("ec2", self.clients.cache)
with mock.patch.dict("sys.modules", {"boto": mock_boto}):
client = self.clients.ec2()
self.assertEqual(fake_ec2, client)
self.service_catalog.url_for.assert_called_once_with(
service_type="ec2",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name)
kw = {
"url": "http://fake.to:1/fake",
"aws_access_key_id": "fake_access",
"aws_secret_access_key": "fake_secret",
"is_secure": self.credential.insecure,
}
mock_boto.connect_ec2_endpoint.assert_called_once_with(**kw)
self.assertEqual(fake_ec2, self.clients.cache["ec2"])
@mock.patch("rally.osclients.Keystone.create_client")
def test_services(self, mock_keystone_create_client):
available_services = {consts.ServiceType.IDENTITY: {},
consts.ServiceType.COMPUTE: {},
"some_service": {}}
mock_keystone_create_client.return_value = mock.Mock(
service_catalog=mock.Mock(
get_endpoints=lambda: available_services))
clients = osclients.Clients(self.credential)
self.assertEqual(
{consts.ServiceType.IDENTITY: consts.Service.KEYSTONE,
consts.ServiceType.COMPUTE: consts.Service.NOVA,
"some_service": "__unknown__"},
clients.services())
def test_murano(self):
fake_murano = fakes.FakeMuranoClient()
mock_murano = mock.Mock()
mock_murano.client.Client.return_value = fake_murano
self.assertNotIn("murano", self.clients.cache)
with mock.patch.dict("sys.modules", {"muranoclient": mock_murano}):
client = self.clients.murano()
self.assertEqual(fake_murano, client)
self.service_catalog.url_for.assert_called_once_with(
service_type="application-catalog",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name
)
kw = {"endpoint": self.service_catalog.url_for.return_value,
"token": self.fake_keystone.auth_token}
mock_murano.client.Client.assert_called_once_with("1", **kw)
self.assertEqual(fake_murano, self.clients.cache["murano"])
@mock.patch("rally.osclients.Designate._get_session")
@ddt.data(
{},
{"version": "2"},
{"version": "1"},
{"version": None}
)
@ddt.unpack
def test_designate(self, mock_designate__get_session, version=None):
fake_designate = fakes.FakeDesignateClient()
mock_designate = mock.Mock()
mock_designate.client.Client.return_value = fake_designate
mock_designate__get_session.return_value = self.fake_keystone.session
self.assertNotIn("designate", self.clients.cache)
with mock.patch.dict("sys.modules",
{"designateclient": mock_designate}):
if version is not None:
client = self.clients.designate(version=version)
else:
client = self.clients.designate()
self.assertEqual(fake_designate, client)
self.service_catalog.url_for.assert_called_once_with(
service_type="dns",
endpoint_type=consts.EndpointType.PUBLIC,
region_name=self.credential.region_name
)
default = version or "1"
# Check that we append /v<version>
url = self.service_catalog.url_for.return_value
url.__iadd__.assert_called_once_with("/v%s" % default)
mock_designate__get_session.assert_called_once_with(
endpoint=url.__iadd__.return_value)
mock_designate.client.Client.assert_called_once_with(
default, session=self.fake_keystone.session)
key = "designate"
if version is not None:
key += "%s" % {"version": version}
self.assertEqual(fake_designate, self.clients.cache[key])
@mock.patch("rally.osclients.Cue._get_session")
def test_cue(self, mock_cue__get_session):
fake_cue = fakes.FakeCueClient()
mock_cue = mock.MagicMock()
mock_cue.client.Client = mock.MagicMock(return_value=fake_cue)
mock_cue__get_session.return_value = self.fake_keystone.session
self.assertNotIn("cue", self.clients.cache)
with mock.patch.dict("sys.modules", {"cueclient": mock_cue,
"cueclient.v1": mock_cue}):
client = self.clients.cue()
self.assertEqual(fake_cue, client)
mock_cue.client.Client.assert_called_once_with(
interface=consts.EndpointType.PUBLIC,
session=self.fake_keystone.session)
self.assertEqual(fake_cue, self.clients.cache["cue"])
| amit0701/rally | tests/unit/test_osclients.py | Python | apache-2.0 | 34,441 |
import signal, sys, ssl
from SimpleWebSocketServer import WebSocket, SimpleWebSocketServer, SimpleSSLWebSocketServer
from optparse import OptionParser
from thread import start_new_thread
import time
import websocket
import json
import codecs
import os
clients = []
class MessageBroker(WebSocket):
def __init__(self, server, sock, address):
self.subscriptions = []
self.grandAccess = False
self.wronLogonAttempts = 0
self._clientType = None
self._hardware = None
self._groupConfigFile = os.path.join(os.path.dirname(__file__), 'homeconfig.json')
return super(MessageBroker, self).__init__(server, sock, address)
def handleMessage(self):
print "Handle Message: "
print self.data
self.parseMessage(self.data)
def handleConnected(self):
print self.address, 'connected'
clients.append(self)
def handleClose(self):
clients.remove(self)
print self.address, 'closed'
# Can be used later to inform the UI if a hardware node is down
# or if a chat user is leaving.
for client in clients:
client.sendMessage(self.address[0] + u' - disconnected')
def parseMessage(self, data):
try:
item = json.loads(data)
if item:
self.parseJsonMessage(item)
except:
self.parseMessageAsString(data)
def parseMessageAsString(self, data):
# no action
pass
def readPagesConfig(self):
result = ""
try:
# Can not handle utf8!!!!
input_file = file(self._groupConfigFile, "r")
result = json.loads(input_file.read().decode("utf-8-sig"))
except Exception, e:
print e
return result
def savePagesConfigFile(self, data):
"""Saves the config file."""
try:
if data:
prettyOutput = json.dumps(data, indent=4, separators=(',', ': '))
f=open(self._groupConfigFile, 'w')
f.write(prettyOutput)
f.flush()
f.close
pass
except Exception, e:
print e
pass
def envelopeMessage(self, messagetype, data):
result = {
"messagetype" : messagetype,
"data": data
}
return unicode(json.dumps(result, ensure_ascii=True))
def getPages(self, data):
if (self.grandAccess):
pages = self.readPagesConfig()
msg = self.envelopeMessage("PageList", pages)
self.sendMessage(msg)
else:
self.sendRequireLogon()
pass
def parseJsonMessage(self, message):
if message:
messagetype = message['messagetype']
if messagetype:
if messagetype == u'subscribe': #client subscribes a message type
self.subscribe(message)
elif messagetype == u'unsubscribe': #client unsubscribes from message type
self.unsubscribe(message)
elif messagetype == u'getPages': # client requests page info
self.getPages(message)
elif messagetype == u'pullupdates': # Client requests for hardware status update
self.sendRefreshBroadcast()
elif messagetype == u'logon': # Client tries to authenticate
self.logon(message)
elif messagetype == u'authHardware': # Hardware client tries to authenticate
self.logonHardware(message)
elif messagetype == u'nodeinfo': # Hardware node sends node infos
self.nodeInfo(message)
elif messagetype == u'getMappingInfo': # Browser client requests for mapping infos
self.sendMappingInfo()
elif messagetype == u'savePages':
self.savePages(message)
else:
# Sent to all except me
self.sentToAll(message)
def savePages(self, message):
if self.grandAccess:
print "save pages!!!"
self.savePagesConfigFile(message["data"])
pass
def sendMappingInfo(self):
"""Send current hardware mapping data to client"""
mapping = []
for client in clients:
if client._clientType == "hardware":
mapping.append(client._hardware)
message = self.envelopeMessage("MappingInfo", mapping)
self.sendMessage(message)
pass
def nodeInfo(self, message):
"""Reads the node info from message."""
if message["data"]:
self._hardware = message["data"]
pass
def sendRefreshBroadcast(self):
if self.grandAccess:
for client in clients:
if client != self:
client.sendMessage(self.refreshMessageString())
pass
def refreshMessageString(self):
"""Create a refresh message json string."""
msg = {}
return self.envelopeMessage("Refresh", msg)
def sentToAll(self, message):
if self.grandAccess:
for client in clients:
if client != self:
if client.hasSubscribed(message["messagetype"]):
#client.sendMessageObjectAsJson(message)
client.sendMessage(self.data)
def hasSubscribed(self, messagetype):
# Implement check
hasJoker = "*" in self.subscriptions
result = (messagetype in self.subscriptions) | hasJoker
return result
def subscribe(self, message):
sub = message["data"]
if not sub in self.subscriptions:
self.subscriptions.append(sub)
print self.address, 'subscribed for: ', sub
def unsubscribe(self, message):
sub = message["data"]
if sub in self.subscriptions:
self.subscriptions.remove(sub)
print self.address, 'unsubscribed for: ', sub
def logon(self, message):
credentials = message["data"]
if (self.checkUserAccess(credentials) and self.wronLogonAttempts < 4):
self.grandAccess = True
self._clientType = "browser"
self.sendGrandAccess(True)
else:
self.grandAccess = False
self.wronLogonAttempts += 1
self.sendGrandAccess(False)
def logonHardware(self, message):
credentials = message["data"]
# Todo: implement security check
#
# now we don´t check just grand access, this is a security hole!!!
self.grandAccess = True
self._clientType = "hardware"
self.sendGrandAccess(True)
pass
#def sendMessageObjectAsJson(self, message):
# try:
# client.sendMessage(json.dumps(message))
# except Exception, e: print e
def checkUserAccess(self, credentials):
"""Check user credentials"""
# just for having someting
result = False;
if (credentials["username"]=='admin' or (credentials["username"]=="gerold" and credentials["password"]=="test")):
return True
else: return False
def sendGrandAccess(self, success):
"""Send the logon result"""
msg = {"success" : success}
js = self.envelopeMessage("LogonResult", msg)
self.sendMessage(js)
def sendRequireLogon(self):
"""Force the user or node to logon"""
msg = {}
js = self.envelopeMessage("LogonRequired", msg)
self.sendMessage(js)
server = SimpleWebSocketServer('', 8000, MessageBroker)
def startWebSocketServer():
server.serveforever()
def startMessageBroker():
start_new_thread(startWebSocketServer, ())
| huvermann/MyPiHomeAutomation | HomeAutomation/MessageBroker.py | Python | mit | 7,901 |
import logging
import numpy as np
import zmq
import osgeo.osr
import shapely.geometry
from . import send_array, recv_array
logging.basicConfig()
logger = logging.getLogger(__name__)
class Views(object):
# TODO: rewrite using config file per engine
@staticmethod
def grid(context):
meta = context["value"]
# Get connection info
node = meta['node']
node = 'localhost'
req_port = meta["ports"]["REQ"]
ctx = context["ctx"]
req = ctx.socket(zmq.REQ)
req.connect("tcp://%s:%s" % (node, req_port))
# Get grid variables
send_array(req, metadata={"get_var": "xk"})
xk, A = recv_array(req)
send_array(req, metadata={"get_var": "yk"})
yk, A = recv_array(req)
# Spatial transform
points = np.c_[xk, yk]
logger.info("points shape: %s, values: %s", points.shape, points)
src_srs = osgeo.osr.SpatialReference()
src_srs.ImportFromEPSG(meta["epsg"])
dst_srs = osgeo.osr.SpatialReference()
dst_srs.ImportFromEPSG(4326)
transform = osgeo.osr.CoordinateTransformation(src_srs, dst_srs)
wkt_points = transform.TransformPoints(points)
geom = shapely.geometry.MultiPoint(wkt_points)
geojson = shapely.geometry.mapping(geom)
return geojson
views = Views()
| openearth/mmi-python | mmi/tracker_views.py | Python | gpl-3.0 | 1,352 |
"""Everything that has to do with authentication."""
from flask import abort, g, request, session
from flask.ext.login import UserMixin, current_user
from flask.ext.restful import abort as abort_rest
from functools import wraps
from itsdangerous import (
TimedJSONWebSignatureSerializer as Serializer, SignatureExpired,
BadSignature, URLSafeSerializer)
from passlib.hash import bcrypt
from app import APP, BASIC_AUTH, ct_connect
class CTUser(UserMixin):
"""Handles churchtools users and passwords for authentication."""
def __init__(self, uid=None, password=None, active=True):
self.id = uid
self.active = active
def get_user(self):
"""Try to get churchtools user and return self object if
everything is alright.
"""
try:
with ct_connect.session_scope() as ct_session:
user = ct_connect.get_person(ct_session, self.id)
if user:
self.persons = self.get_persons(user)
return self
else:
return None
except:
return None
@staticmethod
def get_persons(user):
"""Create a dict with all person data that matches the logged in
email adress.
"""
person_list = []
for person in user:
person_list.append({
'email': person.email,
'password': person.password,
'id': person.id,
'vorname': person.vorname,
'name': person.name,
'active': False
})
return person_list
@staticmethod
def verify_auth_token(token):
"""Verify authentication token and return None if signature is
too old or bad. If not return user.
"""
s = Serializer(APP.config['SECRET_KEY'])
try:
data = s.loads(token)
except SignatureExpired:
# if token is valid but expired
return None
except BadSignature:
# invalid token
return None
user = CTUser(uid=data['id'])
return user.get_user()
def generate_auth_token(user, expiration=600):
"""Returns a authentication token."""
s = Serializer(APP.config['SECRET_KEY'], expires_in=expiration)
return s.dumps({'id': user['email']})
def get_valid_users(user, password):
"""Creates a list of valid users from user object and given password."""
return [person for person in user.persons if person['password']
if bcrypt.verify(password, person['password'])]
@BASIC_AUTH.verify_password
def verify_password(email_or_token, password):
"""Basic auth used for rest api."""
# check if its a token and if its right
user = CTUser.verify_auth_token(email_or_token)
valid_user = None
if not user:
user_obj = CTUser(uid=email_or_token, password=password)
user = user_obj.get_user()
# if it cant even get create a valid_user list it returns a False
try:
valid_user = get_valid_users(user, password)
except:
return False
if not valid_user or \
not user.is_active():
return False
# returns first valid user in the list. usually there should be only one.
# but in a really strange way that its allowed in churchtools that two
# persons can have the same email adress and password it can run into
# two or more valid users. for a rest api its not possible to switch
# between more valid_users. so in this case its always first in list.
# if user gets authenticated through a token there is now valid_user list
if not valid_user:
valid_user = [person for person in user.persons]
g.user = valid_user[0]
return True
def active_user():
"""Return the active user out of user session."""
return [user for user in session['user'] if user['active']][0]
def generate_feed_auth(user):
"""Return a authentication token for feed authentication."""
s = URLSafeSerializer(APP.config['SECRET_KEY'],
salt=APP.config['FEED_SALT'])
return s.dumps({'id': user['email']})
def is_basic_authorized():
"""Returns False if is not basic authorized."""
auth = request.authorization
if not auth:
return False
return verify_password(auth['username'], auth['password'])
def own_group(func):
"""A decorator that aborts if its not the own group."""
@wraps(func)
def decorated_function(*args, **kwargs):
with ct_connect.session_scope() as ct_session:
group = ct_connect.get_group(ct_session, kwargs['id'])
if group is not None:
heads = ct_connect.get_group_heads(ct_session, kwargs['id'])
if '/api/' in request.path:
is_head = any(head.email == g.user['email']
for head in heads)
if not is_head:
abort_rest(401)
else:
is_head = any(head.email == current_user.get_id()
for head in heads)
if not is_head:
abort(401)
else:
if '/api/' in request.path:
abort_rest(404)
else:
abort(404)
return func(*args, **kwargs)
return decorated_function
def prayer_owner(func):
"""A decorator that aborts the view if its not the prayer owner."""
@wraps(func)
def decorated_function(*args, **kwargs):
# needs to be done to fix some import problem
from models import get_prayer
# getting prayer
prayer = get_prayer(kwargs['id'])
# just do this if a prayer with that id exists
if prayer is not None:
if '/api/' in request.path:
if prayer.user_id != g.user['id']:
abort_rest(401)
else:
if prayer.user_id != [user['id'] for user in session['user']
if user['active']][0]:
abort(401)
# if there is there isnt a prayer abort it with a 404
else:
if '/api/' in request.path:
abort_rest(404)
else:
abort(404)
return func(*args, **kwargs)
return decorated_function
def own_profile(func):
"""A decorator that aborts if its not the logged in users profile."""
@wraps(func)
def decorated_function(*args, **kwargs):
with ct_connect.session_scope() as ct_session:
person = ct_connect.get_person_from_id(ct_session, kwargs['id'])
if person is not None:
if '/api/' in request.path:
if kwargs['id'] != g.user['id']:
abort_rest(401)
else:
if kwargs['id'] != [user['id']
for user in session['user']
if user['active']][0]:
abort(401)
else:
if '/api/' in request.path:
abort_rest(404)
else:
abort(404)
return func(*args, **kwargs)
return decorated_function
def valid_groups_and_users(users=None, groups=None):
"""Decorator to limit access a view to a list of users ids or members
of a list of groups.
"""
def decorator(func):
@wraps(func)
def decorated_function(*args, **kwargs):
# list of valid user ids
valid_users = []
# add user ids to the valid_user list
if users is not None:
valid_users.extend(users)
if groups is not None:
for group in groups:
with ct_connect.session_scope() as ct_session:
# create a list of ids of group members
group_users = []
for user in ct_connect.get_group_members(ct_session,
group):
group_users.append(user.id)
# add valid group members ids to valid_users list
valid_users.extend(group_users)
# do the checking
if '/api/' in request.path:
if g.user['id'] in valid_users:
return func(*args, **kwargs)
else:
abort_rest(401)
else:
if active_user()['id'] in valid_users:
return func(*args, **kwargs)
else:
abort(401)
return decorated_function
return decorator
def feed_authorized(func):
"""Decorator to limit access to the feeds."""
@wraps(func)
def decorated_function(*args, **kwargs):
s = URLSafeSerializer(APP.config['SECRET_KEY'],
salt=APP.config['FEED_SALT'])
try:
s.loads(request.args.get('token'))
except:
abort(401)
return func(*args, **kwargs)
return decorated_function
| ecclesianuernberg/genesis | app/auth.py | Python | mit | 9,309 |
# -*- coding: utf-8 -*-
# Ensure backwards compatibility with Python 2
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals)
from builtins import *
def {{cookiecutter.cli_entry_point_function_name }}():
""" Command-line entry point for {{ cookiecutter.repo_name }} """
print('{{ cookiecutter.repo_name }} placeholder CLI entry point')
def {{cookiecutter.gui_entry_point_function_name }}():
""" GUI entry point for {{ cookiecutter.repo_name }} """
print('{{ cookiecutter.repo_name }} placeholder GUI entry point')
if __name__ == 'main':
{{ cookiecutter.cli_entry_point_function_name }}()
| DC23/cookiecutter-dcpypackage | {{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}.py | Python | mit | 662 |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Exposes the TRT conversion for Windows platform."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import platform
from tensorflow.python.util.tf_export import tf_export
if platform.system() != "Windows":
raise RuntimeError(
"This module is expected to be loaded only on Windows platform.")
class TrtPrecisionMode(object):
FP32 = "FP32"
FP16 = "FP16"
INT8 = "INT8"
# Use a large enough number as the default max_workspace_size for TRT engines,
# so it can produce reasonable performance results with the default.
DEFAULT_TRT_MAX_WORKSPACE_SIZE_BYTES = 1 << 30
@tf_export("experimental.tensorrt.ConversionParams", v1=[])
class TrtConversionParams(object):
"""A class to encapsulate parameters that are used for TF-TRT conversion."""
def __init__(self,
rewriter_config_template=None,
max_workspace_size_bytes=DEFAULT_TRT_MAX_WORKSPACE_SIZE_BYTES,
precision_mode=TrtPrecisionMode.FP32,
minimum_segment_size=3,
is_dynamic_op=True,
maximum_cached_engines=1,
use_calibration=True,
max_batch_size=1):
"""Initialize TrtConversionParams.
Args:
rewriter_config_template: a template RewriterConfig proto used to create a
TRT-enabled RewriterConfig. If None, it will use a default one.
max_workspace_size_bytes: the maximum GPU temporary memory which the TRT
engine can use at execution time. This corresponds to the
'workspaceSize' parameter of nvinfer1::IBuilder::setMaxWorkspaceSize().
precision_mode: one of TrtPrecisionMode.supported_precision_modes().
minimum_segment_size: the minimum number of nodes required for a subgraph
to be replaced by TRTEngineOp.
is_dynamic_op: whether to generate dynamic TRT ops which will build the
TRT network and engine at run time. i.e. Since TensorRT version < 6.0
does not support dynamic dimensions other than the batch dimension, when
the TensorFlow graph has a non-batch dimension of dynamic size, we would
need to enable this option. This option should be set to True in TF 2.0.
maximum_cached_engines: max number of cached TRT engines for dynamic TRT
ops. Created TRT engines for a dynamic dimension are cached. This is the
maximum number of engines that can be cached. If the number of cached
engines is already at max but none of them supports the input shapes,
the TRTEngineOp will fall back to run the original TF subgraph that
corresponds to the TRTEngineOp.
use_calibration: this argument is ignored if precision_mode is not INT8.
If set to True, a calibration graph will be created to calibrate the
missing ranges. The calibration graph must be converted to an inference
graph by running calibration with calibrate(). If set to False,
quantization nodes will be expected for every tensor in the graph
(exlcuding those which will be fused). If a range is missing, an error
will occur. Please note that accuracy may be negatively affected if
there is a mismatch between which tensors TRT quantizes and which
tensors were trained with fake quantization.
max_batch_size: max size for the input batch. This parameter is only
effective when is_dynamic_op=False which is not supported in TF 2.0.
Raises:
NotImplementedError: TRT is not supported on Windows.
"""
raise NotImplementedError(
"TensorRT integration is not available on Windows.")
@tf_export("experimental.tensorrt.Converter", v1=[])
class TrtConverterWindows(object):
"""An offline converter for TF-TRT transformation for TF 2.0 SavedModels.
Currently this is not available on Windows platform.
"""
def __init__(self,
input_saved_model_dir=None,
input_saved_model_tags=None,
input_saved_model_signature_key=None,
conversion_params=None):
"""Initialize the converter.
Args:
input_saved_model_dir: the directory to load the SavedModel which contains
the input graph to transforms. Used only when input_graph_def is None.
input_saved_model_tags: list of tags to load the SavedModel.
input_saved_model_signature_key: the key of the signature to optimize the
graph for.
conversion_params: a TrtConversionParams instance.
Raises:
NotImplementedError: TRT is not supported on Windows.
"""
raise NotImplementedError(
"TensorRT integration is not available on Windows.")
| jhseu/tensorflow | tensorflow/python/compiler/tensorrt/trt_convert_windows.py | Python | apache-2.0 | 5,368 |
"""
$Id: __init__.py,v 1.2 2005/02/26 17:56:10 sidnei Exp $
"""
from zope.interface import Interface
class IAddProject(Interface):
"""Marker interface that indicates OpenProjects can be added.
"""
class IAddSubProject(IAddProject):
"""I add sub-projects"""
class IAmAPeopleFolder(Interface):
"""Marker interface that indicates that this folder contains people"""
class IAmANewsFolder(Interface):
"""Marker interface that indicates that this is the OpenPlans news folder"""
| socialplanning/opencore | opencore/interfaces/adding.py | Python | gpl-3.0 | 497 |
"""Config flow to configure the LCN integration."""
import logging
import pypck
from homeassistant import config_entries
from homeassistant.const import (
CONF_HOST,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
)
from .const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, DOMAIN
_LOGGER = logging.getLogger(__name__)
def get_config_entry(hass, data):
"""Check config entries for already configured entries based on the ip address/port."""
return next(
(
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.data[CONF_IP_ADDRESS] == data[CONF_IP_ADDRESS]
and entry.data[CONF_PORT] == data[CONF_PORT]
),
None,
)
async def validate_connection(host_name, data):
"""Validate if a connection to LCN can be established."""
host = data[CONF_IP_ADDRESS]
port = data[CONF_PORT]
username = data[CONF_USERNAME]
password = data[CONF_PASSWORD]
sk_num_tries = data[CONF_SK_NUM_TRIES]
dim_mode = data[CONF_DIM_MODE]
settings = {
"SK_NUM_TRIES": sk_num_tries,
"DIM_MODE": pypck.lcn_defs.OutputPortDimMode[dim_mode],
}
_LOGGER.debug("Validating connection parameters to PCHK host '%s'", host_name)
connection = pypck.connection.PchkConnectionManager(
host, port, username, password, settings=settings
)
await connection.async_connect(timeout=5)
_LOGGER.debug("LCN connection validated")
await connection.async_close()
return data
class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a LCN config flow."""
VERSION = 1
async def async_step_import(self, data):
"""Import existing configuration from LCN."""
host_name = data[CONF_HOST]
# validate the imported connection parameters
try:
await validate_connection(host_name, data)
except pypck.connection.PchkAuthenticationError:
_LOGGER.warning('Authentication on PCHK "%s" failed', host_name)
return self.async_abort(reason="authentication_error")
except pypck.connection.PchkLicenseError:
_LOGGER.warning(
'Maximum number of connections on PCHK "%s" was '
"reached. An additional license key is required",
host_name,
)
return self.async_abort(reason="license_error")
except TimeoutError:
_LOGGER.warning('Connection to PCHK "%s" failed', host_name)
return self.async_abort(reason="connection_timeout")
# check if we already have a host with the same address configured
entry = get_config_entry(self.hass, data)
if entry:
entry.source = config_entries.SOURCE_IMPORT
self.hass.config_entries.async_update_entry(entry, data=data)
return self.async_abort(reason="existing_configuration_updated")
return self.async_create_entry(title=f"{host_name}", data=data)
| kennedyshead/home-assistant | homeassistant/components/lcn/config_flow.py | Python | apache-2.0 | 3,022 |
#!/usr/bin/python3
import argparse
OUTPUT_FILENAME = 'SerializationMacros.hpp'
### Boilerplate Code Templates ###
pragma_once = '#pragma once\n'
include_header = '#include {path}\n'
serialize_begin = '#define DEFAULT_SERIALIZE{count}({args_list}) std::size_t to_bytes(char* ret) const {{ \\\n'
to_bytes_first_line = ' int bytes_written = mutils::to_bytes(a,ret); \\\n'
to_bytes_middle_line = ' bytes_written += mutils::to_bytes({field},ret + bytes_written); \\\n'
to_bytes_return_line = ' return bytes_written + mutils::to_bytes({field},ret + bytes_written); \\\n'
to_bytes_one_field_return = ' return mutils::to_bytes(a, ret); \\\n'
closing_brace = ' } \\\n' # Not a format string template, so curly brace doesn't need to be doubled
bytes_size_begin = ' std::size_t bytes_size() const { \\\n'
bytes_size_line_begin = ' return'
bytes_size_line_part = ' mutils::bytes_size({field}) '
bytes_size_line_end = '; \\\n'
post_object_begin = ' void post_object(const std::function<void (char const * const, std::size_t)>& func ) const { \\\n'
post_object_line = ' mutils::post_object(func,{field}); \\\n'
post_object_end = ' } \n\n' # Ends both post_object and the macro definition
deserialize_begin = '#define DEFAULT_DESERIALIZE{count}(Name,{args_list}) \\\n'
from_bytes_begin = (' template<typename... ctxs> static std::unique_ptr<Name> from_bytes(mutils::DeserializationManager<ctxs...>* dsm, char const * buf){ \\\n'
' auto a_obj = mutils::from_bytes<std::decay_t<decltype(a)> >(dsm, buf); \\\n')
declare_bytes_read = ' std::size_t bytes_read = mutils::bytes_size(*a_obj); \\\n'
from_bytes_mid_field = (' auto {field}_obj = mutils::from_bytes<std::decay_t<decltype({field})> >(dsm, buf + bytes_read); \\\n'
' bytes_read += mutils::bytes_size(*{field}_obj); \\\n')
from_bytes_last_field = (' auto {field}_obj = mutils::from_bytes<std::decay_t<decltype({field})> >(dsm, buf + bytes_read); \\\n'
' return std::make_unique<Name>({obj_ptrs_list}, '
'*(mutils::from_bytes<std::decay_t<decltype({last_field})> >(dsm, buf + bytes_read + mutils::bytes_size(*{field}_obj)))); \\\n')
from_bytes_one_field_return = ' return std::make_unique<Name>(*a_obj); \\\n'
from_bytes_two_fields_return = ' return std::make_unique<Name>(*a_obj, *(mutils::from_bytes<std::decay_t<decltype(b)> >(dsm, buf + mutils::bytes_size(*a_obj)))); \\\n'
from_bytes_end = ' } \n\n' # Ends both from_bytes and the macro definition
### Comment block that goes at the top of the file ###
header_comments = """
/**
* This is an automatically-generated file that implements default serialization
* support with a series of macros. Do not edit this file by hand; you should
* generate it with generate_macros.py. The public interface is at the bottom of
* the file.
*/
"""
### Complete snippet of code that goes at the end of the file ###
file_footer = r"""
#define DEFAULT_SERIALIZE_IMPL2(count, ...) DEFAULT_SERIALIZE ## count (__VA_ARGS__)
#define DEFAULT_SERIALIZE_IMPL(count, ...) DEFAULT_SERIALIZE_IMPL2(count, __VA_ARGS__)
#define DEFAULT_SERIALIZE(...) DEFAULT_SERIALIZE_IMPL(VA_NARGS(__VA_ARGS__), __VA_ARGS__)
#define DEFAULT_DESERIALIZE_IMPL2(count, ...) DEFAULT_DESERIALIZE ## count (__VA_ARGS__)
#define DEFAULT_DESERIALIZE_IMPL(count, ...) DEFAULT_DESERIALIZE_IMPL2(count, __VA_ARGS__)
#define DEFAULT_DESERIALIZE(...) DEFAULT_DESERIALIZE_IMPL(VA_NARGS(__VA_ARGS__), __VA_ARGS__)
/**
* THIS (below) is the only user-facing macro in this file.
* It's for automatically generating basic serialization support.
* plop this macro inside the body of a class which extends
* ByteRepresentable, providing the name of the class (that you plopped this into)
* as the first argument and the name of the class's fields as the remaining arguments.
* Right now we only support up to seven fields; adding more support is easy, just ask if
* you need.
*
* MAJOR CAVEAT: This macro assumes that there is a constructor
* which takes all the class members (in the order listed).
* it's fine if this is a private constructor, but it needs to exist.
*
*/
#define DEFAULT_SERIALIZATION_SUPPORT(CLASS_NAME,CLASS_MEMBERS...) \
DEFAULT_SERIALIZE(CLASS_MEMBERS) DEFAULT_DESERIALIZE(CLASS_NAME,CLASS_MEMBERS) \
template<typename... ctxs> void ensure_registered(mutils::DeserializationManager<ctxs...>&){}
"""
argparser = argparse.ArgumentParser(description='Generate ' + OUTPUT_FILENAME + \
' with support for the specified number of fields.')
argparser.add_argument('num_fields', metavar='N', type=int, help='The maximum number '
'of serialized fields that the serialization macros should support (the '
'larger the number, the more macros will be generated)')
args = argparser.parse_args()
with open(OUTPUT_FILENAME, 'w') as output:
output.write(pragma_once)
output.write(include_header.format(path='"../mutils/macro_utils.hpp"'))
output.write(header_comments)
# First, generate the serializers
for curr_num_fields in range(1,args.num_fields+1):
field_vars = [chr(i) for i in range(ord('a'), ord('a')+curr_num_fields)]
output.write(serialize_begin.format(count=curr_num_fields,
args_list=','.join(field_vars)))
# Write the "to_bytes" block
if curr_num_fields > 1:
output.write(to_bytes_first_line)
for middle_line_count in range(1,curr_num_fields - 1):
output.write(to_bytes_middle_line.format(field=field_vars[middle_line_count]))
output.write(to_bytes_return_line.format(field=field_vars[-1]))
else:
# Special case for DEFAULT_SERIALIZE1
output.write(to_bytes_one_field_return)
output.write(closing_brace)
# Write the "bytes_size" block
output.write(bytes_size_begin)
output.write(bytes_size_line_begin)
output.write('+'.join([bytes_size_line_part.format(field=field_vars[fieldnum])
for fieldnum in range(curr_num_fields)]))
output.write(bytes_size_line_end)
output.write(closing_brace)
# Write the "post_object" block
output.write(post_object_begin)
for fieldnum in range(curr_num_fields):
output.write(post_object_line.format(field=field_vars[fieldnum]))
output.write(post_object_end)
# Second, generate the deserializers
for curr_num_fields in range(1, args.num_fields + 1):
field_vars = [chr(i) for i in range(ord('a'), ord('a')+curr_num_fields)]
output.write(deserialize_begin.format(count=curr_num_fields+1,
args_list=','.join(field_vars)))
output.write(from_bytes_begin)
if curr_num_fields == 1:
output.write(from_bytes_one_field_return)
elif curr_num_fields == 2:
output.write(from_bytes_two_fields_return)
else:
output.write(declare_bytes_read)
for fieldnum in range(1, curr_num_fields - 2):
output.write(from_bytes_mid_field.format(field=field_vars[fieldnum]))
output.write(from_bytes_last_field.format(field=field_vars[curr_num_fields-2],
last_field=field_vars[-1],
obj_ptrs_list=','.join(['*' + var + '_obj' for var in field_vars[:-1]])))
output.write(from_bytes_end)
output.write(file_footer)
| mpmilano/mutils-serialization | generate_macros.py | Python | lgpl-3.0 | 7,501 |
import os
import sys
import codecs
from django.core.management import call_command
from django.core.management.base import CommandError
from onadata.apps.main.tests.test_base import TestBase
from onadata.apps.logger.models.xform import XForm
from onadata.libs.utils.logger_tools import report_exception
class TestPublishXLS(TestBase):
def test_publish_xls(self):
xls_file_path = os.path.join(
self.this_directory, "fixtures",
"transportation", "transportation.xls")
count = XForm.objects.count()
call_command('publish_xls', xls_file_path, self.user.username)
self.assertEqual(XForm.objects.count(), count + 1)
def test_publish_xls_replacement(self):
count = XForm.objects.count()
xls_file_path = os.path.join(
self.this_directory, "fixtures",
"transportation", "transportation.xls")
call_command('publish_xls', xls_file_path, self.user.username)
self.assertEqual(XForm.objects.count(), count + 1)
count = XForm.objects.count()
xls_file_path = os.path.join(
self.this_directory, "fixtures",
"transportation", "transportation_updated.xls")
# call command without replace param
with self.assertRaises(CommandError):
call_command('publish_xls', xls_file_path, self.user.username)
# now we call the command with the replace param
call_command(
'publish_xls', xls_file_path, self.user.username, replace=True)
# count should remain the same
self.assertEqual(XForm.objects.count(), count)
# check if the extra field has been added
self.xform = XForm.objects.order_by('id').reverse()[0]
data_dictionary = self.xform.data_dictionary()
is_updated_form = len([e.name for e in data_dictionary.survey_elements
if e.name == u'preferred_means']) > 0
self.assertTrue(is_updated_form)
def test_line_break_in_variables(self):
xls_file_path = os.path.join(
self.this_directory, "fixtures", 'exp_line_break.xlsx')
xml_file_path = os.path.join(
self.this_directory, "fixtures", 'exp_line_break.xml')
test_xml_file_path = os.path.join(
self.this_directory, "fixtures", 'test_exp_line_break.xml')
self._publish_xls_file(xls_file_path)
xforms = XForm.objects.filter(id_string='exp_line_break')
self.assertTrue(xforms.count() > 0)
xform = xforms[0]
xform.xml = xform.xml.replace(
xform.uuid, '663123a849e54bffa8f9832ef016bfac')
xform.save()
f = codecs.open(test_xml_file_path, 'w', encoding="utf-8")
f.write(xform.xml)
f.close()
with codecs.open(
xml_file_path, 'rb', encoding="utf-8") as expected_file:
with codecs.open(
test_xml_file_path, 'rb', encoding="utf-8") as actual_file:
self.assertMultiLineEqual(
expected_file.read(), actual_file.read())
os.remove(test_xml_file_path)
def test_report_exception_with_exc_info(self):
e = Exception("A test exception")
try:
raise e
except Exception as e:
exc_info = sys.exc_info()
try:
report_exception(subject="Test report exception", info=e,
exc_info=exc_info)
except Exception as e:
raise AssertionError("%s" % e)
def test_report_exception_without_exc_info(self):
e = Exception("A test exception")
try:
report_exception(subject="Test report exception", info=e)
except Exception as e:
raise AssertionError("%s" % e)
| eHealthAfrica/onadata | onadata/apps/logger/tests/test_publish_xls.py | Python | bsd-2-clause | 3,788 |
#!/usr/bin/python
# pieces.py
# fishlamp-install
#
# Created by Mike Fullerton on 8/3/13.
#
# begin boilerplate
import sys
import os
scriptName = os.path.basename(sys.argv[0])
scriptPath = os.path.dirname(sys.argv[0])
sharedPath = os.path.join(scriptPath, "../../shared/")
sys.path.append(os.path.abspath(sharedPath))
import Scripts
import Pieces
import GitHelper
#end boilerplate
class Script(Scripts.Script):
def helpString(self):
return "finds all installed Utils pieces";
def addPiece(self, name):
Pieces.addPiece(name);
def listAll(self):
all = Pieces.allPieces();
if all and len(all):
print ""
for piece in all:
piece.printSelf();
print ""
else:
print "# No pieces installed";
def run(self):
if self.hasParameter("add"):
self.addPiece(self.parameterAtIndex(2, "expected piece name"));
elif self.hasParameter("init"):
Pieces.createFishLampFolderIfNeeded();
else:
self.listAll();
Script().run();
| mikefullerton/Piecemeal-Scripts | Scripts/pieces/commands/pieces.py | Python | mit | 1,105 |
#
# See top-level LICENSE.rst file for Copyright information
#
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from collections import OrderedDict
from ..defs import (task_name_sep, task_state_to_int, task_int_to_state)
from ...util import option_list
from ...io import findfile
from .base import (BaseTask, task_classes)
from desiutil.log import get_logger
import sys,re,os,copy
from desiutil.log import get_logger
# NOTE: only one class in this file should have a name that starts with "Task".
class TaskStarFit(BaseTask):
"""Class containing the properties of one extraction task.
"""
def __init__(self):
super(TaskStarFit, self).__init__()
# then put int the specifics of this class
# _cols must have a state
self._type = "starfit"
self._cols = [
"night",
"spec",
"expid",
"state"
]
self._coltypes = [
"integer",
"integer",
"integer",
"integer"
]
# _name_fields must also be in _cols
self._name_fields = ["night","spec","expid"]
self._name_formats = ["08d","d","08d"]
def _paths(self, name):
"""See BaseTask.paths.
"""
props = self.name_split(name)
return [ findfile("stdstars", night=props["night"], expid=props["expid"],
groupname=None, nside=None, camera=None, band=None,
spectrograph=props["spec"]) ]
def _deps(self, name, db, inputs):
"""See BaseTask.deps.
"""
from .base import task_classes
props = self.name_split(name)
# we need all the cameras for the fit of standard stars
deptasks = dict()
for band in ["b","r","z"] :
props_and_band = props.copy()
props_and_band["band"] = band
deptasks[band+"-frame"]=task_classes["extract"].name_join(props_and_band)
deptasks[band+"-fiberflat"]=task_classes["fiberflatnight"].name_join(props_and_band)
deptasks[band+"-sky"]=task_classes["sky"].name_join(props_and_band)
return deptasks
def _run_max_procs(self):
# This is a serial task.
return 1
def _run_time(self, name, procs, db):
# Run time on one proc on machine with scale factor == 1.0
return 35.0
def _run_max_mem_proc(self, name, db):
# Per-process memory requirements
return 5.0
def _run_defaults(self):
"""See BaseTask.run_defaults.
"""
import glob
log = get_logger()
opts = {}
starmodels = None
if "DESI_BASIS_TEMPLATES" in os.environ:
filenames = sorted(glob.glob(os.environ["DESI_BASIS_TEMPLATES"]+"/stdstar_templates_*.fits"))
if len(filenames) > 0 :
starmodels = filenames[-1]
else:
filenames = sorted(glob.glob(os.environ["DESI_BASIS_TEMPLATES"]+"/star_templates_*.fits"))
log.warning('Unable to find stdstar templates in {}; using star templates instead'.format(
os.getenv('DESI_BASIS_TEMPLATES')))
if len(filenames) > 0 :
starmodels = filenames[-1]
else:
msg = 'Unable to find stdstar or star templates in {}'.format(
os.getenv('DESI_BASIS_TEMPLATES'))
log.error(msg)
raise RuntimeError(msg)
else:
log.error("DESI_BASIS_TEMPLATES not set!")
raise RuntimeError("could not find the stellar templates")
opts["starmodels"] = starmodels
opts["delta-color"] = 0.2
opts["color"] = "G-R"
return opts
def _option_list(self, name, opts):
"""Build the full list of options.
This includes appending the filenames and incorporating runtime
options.
"""
from .base import task_classes, task_type
log = get_logger()
deps = self.deps(name)
options = {}
### options["ncpu"] = 1
options["outfile"] = self.paths(name)[0]
options["frames"] = []
options["skymodels"] = []
options["fiberflats"] = []
# frames skymodels fiberflats
props = self.name_split(name)
for band in ["b", "r", "z"] :
props_and_band = props.copy()
props_and_band["band"] = band
task = task_classes["extract"].name_join(props_and_band)
frame_filename = task_classes["extract"].paths(task)[0]
task = task_classes["fiberflatnight"].name_join(props_and_band)
fiberflat_filename = task_classes["fiberflatnight"].paths(task)[0]
task = task_classes["sky"].name_join(props_and_band)
sky_filename = task_classes["sky"].paths(task)[0]
# check all files exist
if os.path.isfile(frame_filename) \
and os.path.isfile(fiberflat_filename) \
and os.path.isfile(sky_filename) :
options["frames"].append(frame_filename)
options["skymodels"].append(sky_filename)
options["fiberflats"].append(fiberflat_filename)
else :
log.warning("missing band {} for {}".format(band,name))
options.update(opts)
return option_list(options)
def _run_cli(self, name, opts, procs, db):
"""See BaseTask.run_cli.
"""
entry = "desi_fit_stdstars"
optlist = self._option_list(name, opts)
com = "{} {}".format(entry, " ".join(optlist))
return com
def _run(self, name, opts, comm, db):
"""See BaseTask.run.
"""
from ...scripts import stdstars
optlist = self._option_list(name, opts)
args = stdstars.parse(optlist)
stdstars.main(args)
return
def postprocessing(self, db, name, cur):
"""For successful runs, postprocessing on DB"""
# run getready on all fierflatnight with same night,band,spec
props = self.name_split(name)
log = get_logger()
tt="fluxcalib"
cmd = "select name from {} where night={} and expid={} and spec={} and state=0".format(tt,props["night"],props["expid"],props["spec"])
cur.execute(cmd)
tasks = [ x for (x,) in cur.fetchall() ]
log.debug("checking {}".format(tasks))
for task in tasks :
task_classes[tt].getready( db=db,name=task,cur=cur)
| desihub/desispec | py/desispec/pipeline/tasks/starfit.py | Python | bsd-3-clause | 6,596 |
# Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import sys
import logging
import unittest
import itertools
import random
from dashboard.common import clustering_change_detector as ccd
class ChangeDetectorTest(unittest.TestCase):
def setUp(self):
self.logger = logging.getLogger()
self.logger.level = logging.DEBUG
self.stream_handler = logging.StreamHandler(sys.stdout)
self.logger.addHandler(self.stream_handler)
self.addCleanup(self.logger.removeHandler, self.stream_handler)
self.rand = random.Random(x=1)
def testClusterPartitioning(self):
a, b = ccd.Cluster([1, 2, 3], 1)
self.assertEqual(a, [1])
self.assertEqual(b, [2, 3])
def testMidpoint_Long(self):
self.assertEqual(1, ccd.Midpoint([0, 0, 0]))
def testMidpoint_Short(self):
self.assertEqual(0, ccd.Midpoint([0, 0]))
def testMidpoint_LongEven(self):
self.assertEqual(1, ccd.Midpoint([0, 0, 0, 0]))
def testClusterAndCompare(self):
# We want to see that we can detect a contrived change point.
sequence = ([1] * 10) + ([2] * 10)
comparison, a, b = ccd.ClusterAndCompare(sequence, 9)
self.assertEqual(comparison.result, 'different')
self.assertEqual(len(a), 9)
self.assertEqual(len(b), 11)
def testClusterAndFindSplit_Simple(self):
# This tests that we can find a change point in a contrived scenario.
sequence = ([1] * 10) + ([10] * 10)
splits = ccd.ClusterAndFindSplit(sequence, self.rand)
self.assertIn((10, (10, 10)), splits)
def testClusterAndFindSplit_Steps(self):
# We actually can find the first step very well.
sequence = ([1] * 10) + ([2] * 10) + ([1] * 10)
splits = ccd.ClusterAndFindSplit(sequence, self.rand)
self.assertIn((10, (10, 10)), splits)
def testClusterAndFindSplit_Spikes(self):
# E-divisive can identify spikes very well, but it won't pass permutation
# tests because spikes is not significant enough to identify as
# adistribution change.
sequence = ([1] * 15) + [500, 1000, 500] + ([1] * 15)
self.assertRaises(
ccd.InsufficientData,
lambda: ccd.ClusterAndFindSplit(sequence, self.rand),
)
def testClusterAndFindSplit_Slope(self):
# E-divisive can identify spikes very well, but it won't pass permutation
# tests because spikes is not significant enough to identify as
# adistribution change.
sequence = ([1] * 15) + [800] + ([1000] * 20)
splits = ccd.ClusterAndFindSplit(sequence, self.rand)
self.assertIn((15, (15, 16)), splits)
def testClusterAndFindSplit_SpikeAndLevelChange(self):
# We actually can identify the spike, the drop, and the level change.
sequence = ([1] * 50) + [1000] * 10 + [1] * 50 + ([500] * 50)
splits = ccd.ClusterAndFindSplit(sequence, self.rand)
logging.debug('Splits = %s', splits)
self.assertEqual(
[(50, (48, 50)), (60, (60, 60)), (110, (108, 113))], splits)
def testClusterAndFindSplit_Windowing(self):
# We contrive a case where we'd like to find change points by doing a
# sliding window over steps, and finding each step point.
master_sequence = ([1] * 100) + ([10] * 100) + ([1] * 100)
def SlidingWindow(sequence, window_size, step):
for i in itertools.count(0, step):
if i + window_size > len(sequence):
return
yield sequence[i:i + window_size]
collected_indices = set()
for index_offset, sequence in enumerate(
SlidingWindow(master_sequence, 50, 10)):
try:
split_index = (index_offset * 10) + max(
idx for idx, _ in ccd.ClusterAndFindSplit(sequence, self.rand))
collected_indices.add(split_index)
except ccd.InsufficientData:
continue
self.assertEqual(collected_indices, {100, 200})
def testClusterAndFindSplit_MinSegmentSizeZero(self):
sequence = ([1] * 10 + [2] * 10)
splits = ccd.ClusterAndFindSplit(sequence, self.rand)
logging.debug('Splits = %s', splits)
self.assertEqual([(10, (10, 10))], splits)
def testClusterAndFindSplit_N_Pattern(self):
# In this test case we're ensuring that permutation testing is finding the
# local mimima for a sub-segment. We're introducing randomness here but
# seeding well-known inflection points to make it clear that we're able to
# see those inflection points.
sequence = (
# First we have a sequence of numbers in [100..200] with mode = 150.
[random.triangular(100, 200, 150) for _ in range(49)]
# Then we see our first inflection point.
+ [300]
# Then we have a sequence of numbers in [300..350] with mode = 325.
+ [random.triangular(300, 350, 325) for _ in range(49)]
# Then we see our next inflection point.
+ [400]
# Then finally we have a sequence of numbers in [400..500] with mode =
# 500.
+ [random.triangular(400, 500, 450) for _ in range(100)])
splits = ccd.ClusterAndFindSplit(sequence, self.rand)
logging.debug('Splits = %s', splits)
# Instead of asserting that we have specific indices, we're testing that the
# splits found are within certain ranges.
self.assertTrue(any(50 <= c < 100 for c, _ in splits))
def testClusterAndFindSplit_InifiniteLooper(self):
# We construct a case where we find a clear partition point in offset 240,
# but permutation testing of the segment [0:240] will find more plausible
# points. The important part is that we don't run into an infinite loop.
sequence = [100] * 120 + [200] * 10 + [100] * 110 + [500] * 2
splits = ccd.ClusterAndFindSplit(sequence, self.rand)
logging.debug('Splits = %s', splits)
self.assertIn((240, (240, 241)), splits)
self.assertEqual(sequence[240], 500)
self.assertIn((120, (114, 120)), splits)
self.assertEqual(sequence[120], 200)
| catapult-project/catapult | dashboard/dashboard/common/clustering_change_detector_test.py | Python | bsd-3-clause | 6,038 |
import django_rq
def clear_failed():
queue = django_rq.get_failed_queue()
return queue.empty()
def clear_all():
queue = django_rq.get_queue()
return queue.empty()
| tndatacommons/tndata_backend | tndata_backend/utils/queue.py | Python | mit | 183 |
# tests
from __future__ import absolute_import
from django.test import TestCase
# Import the plugin you wish to test
from .bot_plugin import TwitterPlugin
# Subclass your test class from LogosTestCase
from bot.testing.utils import LogosTestCase
class TestTwitter(LogosTestCase):
# set plugin_class to the actual class
# of the plugin you wish to test
plugin_class = TwitterPlugin
def setUp(self):
self.fred = self.create_user('fred', "[email protected]", "password1")
def testFollows(self):
self.assign_room_permission('fred', self.room, 'twitter_op')
self.set_nick("fred")
self.login("password1")
output = self.plugin.send_command("add follow {} @bible_101".format(self.room))
self.assertIn('Twitter follow added successfully', output)
output = self.plugin.send_command("list follows {}".format(self.room))
self.assertIn('@bible_101', output)
output = self.plugin.send_command("remove follow {} @bible_101".format(self.room))
self.assertIn('Twitter follow removed successfully', output)
output = self.plugin.send_command("list follows {}".format(self.room))
self.assertNotIn('@bible_101', output)
def tearDown(self):
self.fred.delete()
| kiwiheretic/logos-v2 | twitterapp/tests.py | Python | apache-2.0 | 1,274 |
#
# Copyright (C) 2013-2015 RoboIME
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
from numpy import array
from numpy.linalg import norm
from .goto import Goto
from ...utils.geom import Point
class FollowAndCover(Goto):
"""
When you need to follow a point but cover another
while maintaining a constant distance from the
followed, this is the way to go.
"""
def __init__(self, robot, follow, cover, distance=1.0, **kwargs):
"""
The argument names are pretty self explanatory,
If not, here's a drawing:
X <------- cover
\
\
(O) <--- robot
\ <--- distance
X <-- follow
Notice that the points follow, robot, and cover are
aligned. And that follow and robot are `distance` apart.
"""
super(FollowAndCover, self).__init__(robot, **kwargs)
self.follow = follow
self.cover = cover
self.distance = distance
def _step(self):
# vector from follow to cover:
f2c = array(self.cover) - array(self.follow)
# normalized:
vec = f2c / norm(f2c)
# target is follow displaced of distance over vec
self.target = Point(array(self.follow) + vec * self.distance)
# let Goto do its thing
super(FollowAndCover, self)._step()
| roboime/pyroboime | roboime/core/skills/followandcover.py | Python | agpl-3.0 | 1,829 |
from importlib import import_module
import numpy as np
import pandas as pd
from pandas.util import testing as tm
for imp in ['pandas.util', 'pandas.tools.hashing']:
try:
hashing = import_module(imp)
break
except (ImportError, TypeError, ValueError):
pass
class Factorize:
params = [[True, False], ['int', 'uint', 'float', 'string']]
param_names = ['sort', 'dtype']
def setup(self, sort, dtype):
N = 10**5
data = {'int': pd.Int64Index(np.arange(N).repeat(5)),
'uint': pd.UInt64Index(np.arange(N).repeat(5)),
'float': pd.Float64Index(np.random.randn(N).repeat(5)),
'string': tm.makeStringIndex(N).repeat(5)}
self.idx = data[dtype]
def time_factorize(self, sort, dtype):
self.idx.factorize(sort=sort)
class FactorizeUnique:
params = [[True, False], ['int', 'uint', 'float', 'string']]
param_names = ['sort', 'dtype']
def setup(self, sort, dtype):
N = 10**5
data = {'int': pd.Int64Index(np.arange(N)),
'uint': pd.UInt64Index(np.arange(N)),
'float': pd.Float64Index(np.arange(N)),
'string': tm.makeStringIndex(N)}
self.idx = data[dtype]
assert self.idx.is_unique
def time_factorize(self, sort, dtype):
self.idx.factorize(sort=sort)
class Duplicated:
params = [['first', 'last', False], ['int', 'uint', 'float', 'string']]
param_names = ['keep', 'dtype']
def setup(self, keep, dtype):
N = 10**5
data = {'int': pd.Int64Index(np.arange(N).repeat(5)),
'uint': pd.UInt64Index(np.arange(N).repeat(5)),
'float': pd.Float64Index(np.random.randn(N).repeat(5)),
'string': tm.makeStringIndex(N).repeat(5)}
self.idx = data[dtype]
# cache is_unique
self.idx.is_unique
def time_duplicated(self, keep, dtype):
self.idx.duplicated(keep=keep)
class DuplicatedUniqueIndex:
params = ['int', 'uint', 'float', 'string']
param_names = ['dtype']
def setup(self, dtype):
N = 10**5
data = {'int': pd.Int64Index(np.arange(N)),
'uint': pd.UInt64Index(np.arange(N)),
'float': pd.Float64Index(np.random.randn(N)),
'string': tm.makeStringIndex(N)}
self.idx = data[dtype]
# cache is_unique
self.idx.is_unique
def time_duplicated_unique(self, dtype):
self.idx.duplicated()
class Hashing:
def setup_cache(self):
N = 10**5
df = pd.DataFrame(
{'strings': pd.Series(tm.makeStringIndex(10000).take(
np.random.randint(0, 10000, size=N))),
'floats': np.random.randn(N),
'ints': np.arange(N),
'dates': pd.date_range('20110101', freq='s', periods=N),
'timedeltas': pd.timedelta_range('1 day', freq='s', periods=N)})
df['categories'] = df['strings'].astype('category')
df.iloc[10:20] = np.nan
return df
def time_frame(self, df):
hashing.hash_pandas_object(df)
def time_series_int(self, df):
hashing.hash_pandas_object(df['ints'])
def time_series_string(self, df):
hashing.hash_pandas_object(df['strings'])
def time_series_float(self, df):
hashing.hash_pandas_object(df['floats'])
def time_series_categorical(self, df):
hashing.hash_pandas_object(df['categories'])
def time_series_timedeltas(self, df):
hashing.hash_pandas_object(df['timedeltas'])
def time_series_dates(self, df):
hashing.hash_pandas_object(df['dates'])
class Quantile:
params = [[0, 0.5, 1],
['linear', 'nearest', 'lower', 'higher', 'midpoint'],
['float', 'int', 'uint']]
param_names = ['quantile', 'interpolation', 'dtype']
def setup(self, quantile, interpolation, dtype):
N = 10**5
data = {'int': np.arange(N),
'uint': np.arange(N).astype(np.uint64),
'float': np.random.randn(N)}
self.idx = pd.Series(data[dtype].repeat(5))
def time_quantile(self, quantile, interpolation, dtype):
self.idx.quantile(quantile, interpolation=interpolation)
from .pandas_vb_common import setup # noqa: F401 isort:skip
| cbertinato/pandas | asv_bench/benchmarks/algorithms.py | Python | bsd-3-clause | 4,333 |
#!/usr/bin/env python
"""Tests for the SimpleAPIAuthManager."""
from absl import app
from grr_response_core.lib.rdfvalues import structs as rdf_structs
from grr_response_core.lib.util import compatibility
from grr_response_proto import tests_pb2
from grr_response_server.authorization import groups
from grr_response_server.gui import api_auth_manager
from grr_response_server.gui import api_call_router
from grr_response_server.gui import api_test_lib
from grr.test_lib import test_lib
class DummyAuthManagerTestApiRouter(api_call_router.ApiCallRouter):
pass
class DummyAuthManagerTestApiRouter2(api_call_router.ApiCallRouter):
pass
class DummyAuthManagerTestApiRouter3(api_call_router.ApiCallRouter):
pass
class DefaultDummyAuthManagerTestApiRouter(api_call_router.ApiCallRouter):
pass
class DummyAuthManagerTestConfigurableApiRouterParams(rdf_structs.RDFProtoStruct
):
protobuf = tests_pb2.DummyAuthManagerTestConfigurableApiRouterParams
class DummyAuthManagerTestConfigurableApiRouter(api_call_router.ApiCallRouter):
params_type = DummyAuthManagerTestConfigurableApiRouterParams
def __init__(self, params=None):
super().__init__(params=params)
self.params = params
class DummyGroupAccessManager(groups.GroupAccessManager):
def __init__(self):
self.authorized_groups = {}
self.positive_matches = {"u1": ["g1", "g3"]}
def AuthorizeGroup(self, group, subject):
self.authorized_groups.setdefault(subject, []).append(group)
def MemberOfAuthorizedGroup(self, username, subject):
try:
group_names = self.positive_matches[username]
except KeyError:
return False
for group_name in group_names:
if group_name in self.authorized_groups[subject]:
return True
return False
class APIAuthorizationManagerTest(test_lib.GRRBaseTest):
def setUp(self):
super().setUp()
# API ACLs are off by default, we need to set this to something so the tests
# exercise the functionality. Each test will supply its own ACL data. We
# also have to set up a default API router that will be used when none of
# the rules matches.
name = compatibility.GetName(DummyGroupAccessManager)
config_overrider = test_lib.ConfigOverrider(
{"ACL.group_access_manager_class": name})
config_overrider.Start()
self.addCleanup(config_overrider.Stop)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter",
DummyAuthManagerTestApiRouter)
def testMatchesIfOneOfUsersIsMatching(self):
auth_mgr = api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestApiRouter"
users:
- "u1"
- "u2"
""", DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u1")
self.assertEqual(router.__class__, DummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u2")
self.assertEqual(router.__class__, DummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter",
DummyAuthManagerTestApiRouter)
def testReturnsDefaultOnNoMatchByUser(self):
auth_mgr = api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestApiRouter"
users:
- "u1"
- "u2"
""", DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u4")
self.assertEqual(router.__class__, DefaultDummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter",
DummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter2",
DummyAuthManagerTestApiRouter2)
def testMatchesFirstRouterIfMultipleRoutersMatchByUser(self):
auth_mgr = api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestApiRouter"
users:
- "u1"
- "u3"
---
router: "DummyAuthManagerTestApiRouter2"
users:
- "u1"
- "u2"
""", DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u1")
self.assertEqual(router.__class__, DummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter",
DummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter2",
DummyAuthManagerTestApiRouter2)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter3",
DummyAuthManagerTestApiRouter3)
def testReturnsFirstRouterWhenMatchingByUser(self):
auth_mgr = api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestApiRouter"
users:
- "u1"
- "u3"
---
router: "DummyAuthManagerTestApiRouter2"
users:
- "u1"
- "u2"
---
router: "DummyAuthManagerTestApiRouter3"
users:
- "u2"
- "u4"
""", DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u2")
self.assertEqual(router.__class__, DummyAuthManagerTestApiRouter2)
router = auth_mgr.GetRouterForUser("u4")
self.assertEqual(router.__class__, DummyAuthManagerTestApiRouter3)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter2",
DummyAuthManagerTestApiRouter2)
def testMatchingByGroupWorks(self):
auth_mgr = api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestApiRouter2"
groups:
- "g1"
""", DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u1")
self.assertEqual(router.__class__, DummyAuthManagerTestApiRouter2)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter",
DummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter2",
DummyAuthManagerTestApiRouter2)
def testMatchingByUserHasPriorityOverMatchingByGroup(self):
auth_mgr = api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestApiRouter"
users:
- "u1"
---
router: "DummyAuthManagerTestApiRouter2"
groups:
- "g1"
""", DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u1")
self.assertEqual(router.__class__, DummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter",
DummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter2",
DummyAuthManagerTestApiRouter2)
def testReturnsFirstRouterWhenMultipleMatchByGroup(self):
auth_mgr = api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestApiRouter"
groups:
- "g3"
---
router: "DummyAuthManagerTestApiRouter2"
groups:
- "g1"
""", DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u1")
self.assertEqual(router.__class__, DummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter",
DummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter2",
DummyAuthManagerTestApiRouter2)
def testReturnsFirstMatchingRouterWhenItMatchesByGroupAndOtherByUser(self):
auth_mgr = api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestApiRouter"
groups:
- "g3"
---
router: "DummyAuthManagerTestApiRouter2"
users:
- "u1"
""", DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u1")
self.assertEqual(router.__class__, DummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter",
DummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter2",
DummyAuthManagerTestApiRouter2)
def testReturnsDefaultRouterWhenNothingMatchesByGroup(self):
auth_mgr = api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestApiRouter"
groups:
- "g5"
---
router: "DummyAuthManagerTestApiRouter2"
groups:
- "g6"
""", DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u1")
self.assertEqual(router.__class__, DefaultDummyAuthManagerTestApiRouter)
def testDefaultRouterIsReturnedIfNoAclsAreDefined(self):
auth_mgr = api_auth_manager.APIAuthorizationManager(
[], DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u1")
self.assertEqual(router.__class__, DefaultDummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestApiRouter",
DummyAuthManagerTestApiRouter)
def testRaisesWhenNonConfigurableRouterInitializedWithParams(self):
exception = api_auth_manager.ApiCallRouterDoesNotExpectParameters
with self.assertRaises(exception):
api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestApiRouter"
router_params:
foo: "Oh no!"
bar: 42
users:
- "u1"
""", DefaultDummyAuthManagerTestApiRouter)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestConfigurableApiRouter",
DummyAuthManagerTestConfigurableApiRouter)
def testConfigurableRouterIsInitializedWithoutParameters(self):
auth_mgr = api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestConfigurableApiRouter"
users:
- "u1"
""", DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u1")
self.assertEqual(router.params.foo, "")
self.assertEqual(router.params.bar, 0)
@api_test_lib.WithApiCallRouter("DummyAuthManagerTestConfigurableApiRouter",
DummyAuthManagerTestConfigurableApiRouter)
def testConfigurableRouterIsInitializedWithParameters(self):
auth_mgr = api_auth_manager.APIAuthorizationManager.FromYaml(
"""
router: "DummyAuthManagerTestConfigurableApiRouter"
router_params:
foo: "Oh no!"
bar: 42
users:
- "u1"
""", DefaultDummyAuthManagerTestApiRouter)
router = auth_mgr.GetRouterForUser("u1")
self.assertEqual(router.params.foo, "Oh no!")
self.assertEqual(router.params.bar, 42)
def main(argv):
# Run the full test suite
test_lib.main(argv)
if __name__ == "__main__":
app.run(main)
| google/grr | grr/server/grr_response_server/gui/api_auth_manager_test.py | Python | apache-2.0 | 10,454 |
# (c) The James Hutton Institute 2013
# Author: Leighton Pritchard
#
# Contact:
# [email protected]
#
# Leighton Pritchard,
# Information and Computing Sciences,
# James Hutton Institute,
# Errol Road,
# Invergowrie,
# Dundee,
# DD6 9LH,
# Scotland,
# UK
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
""" This module provides classes to represent a KGML Pathway Map
The KGML definition is as of release KGML v0.7.1
(http://www.kegg.jp/kegg/xml/docs/)
Classes:
Pathway Specifies graph information for the pathway map
Relation Specifies a relationship between two proteins or KOs, or
protein and compound. There is an implied direction to
the relationship in some cases.
Reaction A specific chemical reaction between a substrate and a
product.
Entry A node in the pathway graph
Graphics Entry subelement describing its visual representation
"""
import time
from itertools import chain
from xml.dom import minidom
import xml.etree.ElementTree as ET
from reportlab.lib import colors
# Pathway
class Pathway(object):
""" Specifies graph information for the pathway map, as described in
release KGML v0.7.1 (http://www.kegg.jp/kegg/xml/docs/)
Attributes:
name KEGGID of the pathway map
org ko/ec/[org prefix]
number map number (integer)
title the map title
image URL of the image map for the pathway
link URL of information about the pathway
entries Dictionary of entries in the pathway, keyed by node ID
reactions Set of reactions in the pathway
The name attribute has a restricted format, so we make it a property and
enforce the formatting.
The Pathway object is the only allowed route for adding/removing
Entry, Reaction, or Relation elements.
Entries are held in a dictionary and keyed by the node ID for the
pathway graph - this allows for ready access via the Reaction/Relation
etc. elements. Entries must be added before reference by any other
element.
Reactions are held in a dictionary, keyed by node ID for the path.
The elements referred to in the reaction must be added before the
reaction itself.
"""
def __init__(self):
self._name = ''
self.org = ''
self._number = None
self.title = ''
self.image = ''
self.link = ''
self.entries = {}
self._reactions = {}
self._relations = set()
def get_KGML(self):
""" Return the pathway in prettified KGML format
"""
header = '\n'.join(['<?xml version="1.0"?>',
'<!DOCTYPE pathway SYSTEM "http://www.genome.jp/kegg/xml/KGML_v0.7.1_.dtd">',
'<!-- Created by KGML_Pathway.py %s -->' % time.asctime()])
rough_xml = header + ET.tostring(self.element, 'utf-8')
reparsed = minidom.parseString(rough_xml)
return reparsed.toprettyxml(indent=" ")
def add_entry(self, entry):
""" Add an Entry element to the pathway
"""
# We insist that the node ID is an integer
assert isinstance(entry.id, (int, long)), \
"Node ID must be an integer, got %s (%s)" % (type(entry.id),
entry.id)
entry._pathway = self # Let the entry know about the pathway
self.entries[entry.id] = entry
def remove_entry(self, entry):
""" Remove an Entry element from the pathway
"""
assert isinstance(entry.id, (int, long)), \
"Node ID must be an integer, got %s (%s)" % (type(entry.id),
entry.id)
# We need to remove the entry from any other elements that may
# contain it, which means removing those elements
# TODO
del self.entries[entry.id]
def add_reaction(self, reaction):
""" Add a Reaction element to the pathway
"""
# We insist that the node ID is an integer and corresponds to an entry
assert isinstance(reaction.id, (int, long)), \
"Node ID must be an integer, got %s (%s)" % (type(reaction.id),
reaction.id)
assert reaction.id in self.entries, \
"Reaction ID %d has no corresponding entry" % reaction.id
reaction._pathway = self # Let the reaction know about the pathway
self._reactions[reaction.id] = reaction
def remove_reaction(self, reaction):
""" Remove an Reaction element from the pathway
"""
assert isinstance(reaction.id, (int, long)), \
"Node ID must be an integer, got %s (%s)" % (type(reaction.id),
reaction.id)
# We need to remove the reaction from any other elements that may
# contain it, which means removing those elements
# TODO
del self._reactions[reaction.id]
def add_relation(self, relation):
""" Add a Relation element to the pathway
"""
relation._pathway = self # Let the reaction know about the pathway
self._relations.add(relation)
def remove_relation(self, relation):
""" Remove an Relation element from the pathway
"""
self._relations.remove(relation)
def __str__(self):
""" Returns a readable summary description string
"""
outstr = ['Pathway: %s' % self.title,
'KEGG ID: %s' % self.name,
'Image file: %s' % self.image,
'Organism: %s' % self.org,
'Entries: %d' % len(self.entries),
'Entry types:']
for t in ['ortholog', 'enzyme', 'reaction',
'gene', 'group', 'compound', 'map']:
etype = [e for e in self.entries.values() if e.type == t]
if len(etype):
outstr.append('\t%s: %d' % (t, len(etype)))
return '\n'.join(outstr) + '\n'
# Assert correct formatting of the pathway name, and other attributes
def getname(self):
return self._name
def setname(self, value):
assert value.startswith('path:'), \
"Pathway name should begin with 'path:', got %s" % value
self._name = value
def delname(self):
del self._name
def getnumber(self):
return self._number
def setnumber(self, value):
self._number = int(value)
def delnumber(self):
del self._number
name = property(getname, setname, delname, "The KEGGID for the pathway map")
number = property(getnumber, setnumber, delnumber, "The KEGG map number")
@property
def compounds(self):
""" Get a list of entries of type compound
"""
return [e for e in self.entries.values() if e.type == 'compound']
@property
def maps(self):
""" Get a list of entries of type map
"""
return [e for e in self.entries.values() if e.type == 'map']
@property
def orthologs(self):
""" Get a list of entries of type ortholog
"""
return [e for e in self.entries.values() if e.type == 'ortholog']
@property
def genes(self):
""" Get a list of entries of type gene
"""
return [e for e in self.entries.values() if e.type == 'gene']
@property
def reactions(self):
""" Get a list of reactions in the pathway
"""
return self._reactions.values()
@property
def reaction_entries(self):
""" Get a list of entries corresponding to each reaction in the pathway
"""
return [self.entries[i] for i in self._reactions]
@property
def relations(self):
""" Get a list of relations in the pathway
"""
return list(self._relations)
@property
def element(self):
""" Return the Pathway as a valid KGML element
"""
# The root is this Pathway element
pathway = ET.Element('pathway')
pathway.attrib = {'name': self._name,
'org': self.org,
'number': str(self._number),
'title': self.title,
'image': self.image,
'link': self.link,
}
# We add the Entries in node ID order
for eid, entry in sorted(self.entries.items()):
pathway.append(entry.element)
# Next we add Relations
for relation in self._relations:
pathway.append(relation.element)
for eid, reaction in sorted(self._reactions.items()):
pathway.append(reaction.element)
return pathway
@property
def bounds(self):
""" Return the [(xmin, ymin), (xmax, ymax)] co-ordinates for all
Graphics elements in the Pathway
"""
xlist, ylist = [], []
for b in [g.bounds for g in self.entries.values()]:
xlist.extend([b[0][0], b[1][0]])
ylist.extend([b[0][1], b[1][1]])
return [(min(xlist), min(ylist)),
(max(xlist), max(ylist))]
# Entry
class Entry(object):
""" Each Entry element is a node in the pathway graph, as described in
release KGML v0.7.1 (http://www.kegg.jp/kegg/xml/docs/)
Attributes:
id The ID of the entry in the pathway map (integer)
names List of KEGG IDs for the entry
type The type of the entry
link URL of information about the entry
reaction List of KEGG IDs of the corresponding reactions (integer)
graphics List of Graphics objects describing the Entry's visual
representation
components List of component node ID for this Entry ('group')
alt List of alternate names for the Entry
NOTE: The alt attribute represents a subelement of the substrate and
product elements in the KGML file
"""
def __init__(self):
self._id = None
self._names = []
self.type = ''
self.image = ''
self.link = ''
self.graphics = []
self.components = set()
self.alt = []
self._pathway = None
self._reactions = []
def __str__(self):
""" Return readable descriptive string
"""
outstr = ['Entry node ID: %d' % self.id,
'Names: %s' % self.name,
'Type: %s' % self.type,
'Components: %s' % self.components,
'Reactions: %s' % self.reaction,
'Graphics elements: %d %s' % (len(self.graphics),
self.graphics)]
return '\n'.join(outstr) + '\n'
def add_component(self, element):
""" If the Entry is already part of a pathway, make sure
the component already exists
"""
if self._pathway is not None:
assert element.id in self._pathway.entries, \
"Component %s is not an entry in the pathway" % value
self.components.add(element)
def remove_component(self, value):
""" Remove the entry with the passed ID from the group
"""
self.components.remove(value)
def add_graphics(self, entry):
""" Add the Graphics entry
"""
self.graphics.append(entry)
def remove_graphics(self, entry):
""" Remove the Graphics entry with the passed ID from the group
"""
self.graphics.remove(entry)
# Names may be given as a space-separated list of KEGG identifiers
def getname(self):
return ' '.join(self._names)
def setname(self, value):
self._names = value.split()
def delname(self):
self._names = []
# Reactions may be given as a space-separated list of KEGG identifiers
def getreaction(self):
return ' '.join(self._reactions)
def setreaction(self, value):
self._reactions = value.split()
def delreaction(self):
self._reactions = []
# We make sure that the node ID is an integer
def getid(self):
return self._id
def setid(self, value):
self._id = int(value)
def delid(self):
del self._id
id = property(getid, setid, delid,
"The pathway graph node ID for the Entry")
name = property(getname, setname, delname,
"List of KEGG identifiers for the Entry")
reaction = property(getreaction, setreaction, delreaction,
"List of reaction KEGG IDs for this Entry")
@property
def element(self):
""" Return the Entry as a valid KGML element
"""
# The root is this Entry element
entry = ET.Element('entry')
entry.attrib = {'id': str(self._id),
'name': self.name,
'link': self.link,
'type': self.type
}
if len(self._reactions):
entry.attrib['reaction'] = self.reaction
if len(self.graphics):
for g in self.graphics:
entry.append(g.element)
if len(self.components):
for c in self.components:
entry.append(c.element)
return entry
@property
def bounds(self):
""" Return the [(xmin, ymin), (xmax, ymax)] co-ordinates for the Entry
Graphics elements.
"""
xlist, ylist = [], []
for b in [g.bounds for g in self.graphics]:
xlist.extend([b[0][0], b[1][0]])
ylist.extend([b[0][1], b[1][1]])
return [(min(xlist), min(ylist)),
(max(xlist), max(ylist))]
@property
def is_reactant(self):
""" Returns True if the Entry participates in any reaction of its
parent Pathway
"""
for rxn in self._pathway.reactions:
if self._id in rxn.reactant_ids:
return True
return False
# Component
class Component(object):
""" A subelement of the Entry element, used when the Entry is a complex
node, as described in release KGML v0.7.1
(http://www.kegg.jp/kegg/xml/docs/)
The Component acts as a collection (with type 'group', and typically
its own Graphics subelement), having only an ID.
"""
def __init__(self, parent):
self._id = None
self._parent = parent
# We make sure that the node ID is an integer
def getid(self):
return self._id
def setid(self, value):
self._id = int(value)
def delid(self):
del self._id
id = property(getid, setid, delid,
"The pathway graph node ID for the Entry")
@property
def element(self):
""" Return the Component as a valid KGML element
"""
# The root is this Component element
component = ET.Element('component')
component.attrib = {'id': str(self._id)}
return component
# Graphics
class Graphics(object):
""" A subelement of Entry, specifying its visual representation, as
described in release KGML v0.7.1 (http://www.kegg.jp/kegg/xml/docs/)
Attributes:
name Label for the graphics object
x X-axis position of the object (int)
y Y-axis position of the object (int)
coords polyline co-ordinates, list of (int, int) tuples
type object shape
width object width (int)
height object height (int)
fgcolor object foreground colour (hex RGB)
bgcolor object background colour (hex RGB)
Some attributes are present only for specific graphics types. For
example, line types do not (typically) have a width.
We permit non-DTD attributes and attribute settings, such as
dash List of ints, describing an on/off pattern for dashes
"""
def __init__(self, parent):
self.name = ''
self._x = None
self._y = None
self._coords = None
self.type = ''
self._width = None
self._height = None
self.fgcolor = ''
self.bgcolor = ''
self._parent = parent
# We make sure that the XY coordinates, width and height are numbers
def getx(self):
return self._x
def setx(self, value):
self._x = float(value)
def delx(self):
del self._x
def gety(self):
return self._y
def sety(self, value):
self._y = float(value)
def dely(self):
del self._y
def getwidth(self):
return self._width
def setwidth(self, value):
self._width = float(value)
def delwidth(self):
del self._width
def getheight(self):
return self._height
def setheight(self, value):
self._height = float(value)
def delheight(self):
del self._height
# We make sure that the polyline co-ordinates are integers, too
def getcoords(self):
return self._coords
def setcoords(self, value):
clist = [int(e) for e in value.split(',')]
self._coords = [tuple(clist[i:i+2]) for i in range(0, len(clist), 2)]
def delcoords(self):
del self._coords
# Set default colours
def getfgcolor(self):
return self._fgcolor
def setfgcolor(self, value):
if value == 'none':
self._fgcolor = '#000000' # this default defined in KGML spec
else:
self._fgcolor = value
def delfgcolor(self):
del self._fgcolor
def getbgcolor(self):
return self._bgcolor
def setbgcolor(self, value):
if value == 'none':
self._bgcolor = '#000000' # this default defined in KGML spec
else:
self._bgcolor = value
def delbgcolor(self):
del self._bgcolor
x = property(getx, setx, delx, "The X coordinate for the graphics element")
y = property(gety, sety, dely, "The Y coordinate for the graphics element")
width = property(getwidth, setwidth, delwidth,
"The width of the graphics element")
height = property(getheight, setheight, delheight,
"The height of the graphics element")
coords = property(getcoords, setcoords, delcoords,
"Polyline coordinates for the graphics element")
fgcolor = property(getfgcolor, setfgcolor, delfgcolor)
bgcolor = property(getbgcolor, setbgcolor, delbgcolor)
@property
def element(self):
""" Return the Graphics as a valid KGML element
"""
# The root is this Component element
graphics = ET.Element('graphics')
if isinstance(self.fgcolor, str):
fghex = self.fgcolor
else:
fghex = '#' + self.fgcolor.hexval()[2:]
if isinstance(self.bgcolor, str):
bghex = self.bgcolor
else:
bghex = '#' + self.bgcolor.hexval()[2:]
graphics.attrib = {'name': self.name,
'type': self.type,
'fgcolor': fghex,
'bgcolor': bghex}
for (n, attr) in [('x', '_x'), ('y', '_y'),
('width', '_width'), ('height', '_height')]:
if getattr(self, attr) is not None:
graphics.attrib[n] = str(getattr(self, attr))
if self.type == 'line': # Need to write polycoords
graphics.attrib['coords'] = \
','.join([str(e) for e in chain.from_iterable(self.coords)])
return graphics
@property
def bounds(self):
""" Return the bounds of the Graphics object as an [(xmin, ymin),
(xmax, ymax)] tuple. Co-ordinates give the centre of the
circle, rectangle, roundrectangle elements, so we have to
adjust for the relevant width/height.
"""
if self.type == 'line':
xlist = [x for x, y in self.coords]
ylist = [y for x, y in self.coords]
return [(min(xlist), min(ylist)),
(max(xlist), max(ylist))]
else:
return [(self.x - self.width * 0.5, self.y - self.height * 0.5),
(self.x + self.width * 0.5, self.y + self.height * 0.5)]
@property
def centre(self):
""" Return the centre of the Graphics object as an (x, y) tuple
"""
return (0.5 * (self.bounds[0][0] + self.bounds[1][0]),
0.5 * (self.bounds[0][1] + self.bounds[1][1]))
# Reaction
class Reaction(object):
""" This describes a specific chemical reaction between one or more
substrates and one or more products.
Attributes:
id Pathway graph node ID of the entry
names List of KEGG identifier(s) from the REACTION database
type String: reversible or irreversible
substrate Entry object of the substrate
product Entry object of the product
"""
def __init__(self):
self._id = None
self._names = []
self.type = ''
self._substrates = set()
self._products = set()
self._pathway = None
def __str__(self):
""" Return an informative human-readable string
"""
outstr = ['Reaction node ID: %s' % self.id,
'Reaction KEGG IDs: %s' % self.name,
'Type: %s' % self.type,
'Substrates: %s' % \
','.join([s.name for s in self.substrates]),
'Products: %s' % \
','.join([s.name for s in self.products]),
]
return '\n'.join(outstr) + '\n'
def add_substrate(self, substrate_id):
""" Add a substrate, identified by its node ID, to the reaction
"""
if self._pathway is not None:
assert int(substrate_id) in self._pathway.entries, \
"Couldn't add substrate, no node ID %d in Pathway" % \
int(substrate_id)
self._substrates.add(substrate_id)
def add_product(self, product_id):
""" Add a product, identified by its node ID, to the reaction
"""
if self._pathway is not None:
assert int(product_id) in self._pathway.entries, \
"Couldn't add product, no node ID %d in Pathway" % product_id
self._products.add(int(product_id))
# The node ID is also the node ID of the Entry that corresponds to the
# reaction; we get the corresponding Entry when there is an associated
# Pathway
def getid(self):
return self._id
def setid(self, value):
self._id = int(value)
def delid(self):
del self._id
id = property(getid, setid, delid, "Node ID for the reaction")
# Names may show up as a space-separated list of several KEGG identifiers
def getnames(self):
return ' '.join(self._names)
def setnames(self, value):
self._names.extend(value.split())
def delnames(self):
del self.names
name = property(getnames, setnames, delnames,
"List of KEGG identifiers for the reaction")
# products and substrates are read-only properties, returning lists
# of Entry objects
@property
def substrates(self):
""" Return list of substrate Entry elements
"""
return [self._pathway.entries[sid] for sid in self._substrates]
@property
def products(self):
""" Return list of product Entry elements
"""
return [self._pathway.entries[pid] for pid in self._products]
@property
def entry(self):
""" Return the Entry corresponding to this reaction
"""
return self._pathway.entries[self._id]
@property
def reactant_ids(self):
""" Return a list of substrate and product reactant IDs
"""
return self._products.union(self._substrates)
@property
def entry(self):
""" Return the Entry corresponding to this reaction
"""
return self._pathway.entries[self._id]
@property
def element(self):
""" Return KGML element describing the Reaction
"""
# The root is this Relation element
reaction = ET.Element('reaction')
reaction.attrib = {'id': str(self.id),
'name': self.name,
'type': self.type}
for s in self._substrates:
substrate = ET.Element('substrate')
substrate.attrib['id'] = str(s)
substrate.attrib['name'] = self._pathway.entries[s].name
reaction.append(substrate)
for p in self._products:
product = ET.Element('product')
product.attrib['id'] = str(p)
product.attrib['name'] = self._pathway.entries[p].name
reaction.append(product)
return reaction
# Relation
class Relation(object):
""" This describes a relationship between two products, KOs, or protein
and compound, as described in release KGML v0.7.1
(http://www.kegg.jp/kegg/xml/docs/)
Attributes:
entry1 The first Entry object node ID defining the relation (int)
entry2 The second Entry object node ID defining the relation (int)
type The relation type
subtypes List of subtypes for the relation, as a list of
(name, value) tuples
"""
def __init__(self):
self._entry1 = None
self._entry2 = None
self.type = ''
self.subtypes = []
self._pathway = None
def __str__(self):
""" A useful human-readable string
"""
outstr = ['Relation (subtypes: %d):' % len(self.subtypes),
'Entry1:', str(self.entry1),
'Entry2:', str(self.entry2)]
for s in self.subtypes:
outstr.extend(['Subtype: %s' % s[0], str(s[1])])
return '\n'.join(outstr)
# Properties entry1 and entry2
def getentry1(self):
if self._pathway is not None:
return self._pathway.entries[self._entry1]
return self._entry1
def setentry1(self, value):
self._entry1 = int(value)
def delentry1(self):
del self._entry1
def getentry2(self):
if self._pathway is not None:
return self._pathway.entries[self._entry2]
return self._entry2
def setentry2(self, value):
self._entry2 = int(value)
def delentry2(self):
del self._entry2
entry1 = property(getentry1, setentry1, delentry1, "Entry1 of the relation")
entry2 = property(getentry2, setentry2, delentry2, "Entry2 of the relation")
@property
def element(self):
""" Return KGML element describing the Relation
"""
# The root is this Relation element
relation = ET.Element('relation')
relation.attrib = {'entry1': str(self._entry1),
'entry2': str(self._entry2),
'type': self.type}
for (name, value) in self.subtypes:
subtype = ET.Element('subtype')
subtype.attrib[name] = str(value)
relation.append(subtype)
return relation
| bluegenes/MakeMyTranscriptome | scripts/util/KGML_pathway.py | Python | bsd-3-clause | 28,215 |
# pylint: disable-msg=E1101,W0612
import operator
from datetime import datetime
import nose
from numpy import nan
import numpy as np
import pandas as pd
dec = np.testing.dec
from pandas.util.testing import (assert_almost_equal, assert_series_equal,
assert_frame_equal, assert_panel_equal, assertRaisesRegexp)
from numpy.testing import assert_equal
from pandas import Series, DataFrame, bdate_range, Panel
from pandas.core.datetools import BDay
from pandas.core.index import Index
from pandas.tseries.index import DatetimeIndex
import pandas.core.datetools as datetools
from pandas.core.common import isnull
import pandas.util.testing as tm
from pandas.compat import range, lrange, StringIO, lrange
from pandas import compat
import pandas.sparse.frame as spf
from pandas._sparse import BlockIndex, IntIndex
from pandas.sparse.api import (SparseSeries, SparseTimeSeries,
SparseDataFrame, SparsePanel,
SparseArray)
import pandas.tests.test_frame as test_frame
import pandas.tests.test_panel as test_panel
import pandas.tests.test_series as test_series
from .test_array import assert_sp_array_equal
import warnings
warnings.filterwarnings(action='ignore', category=FutureWarning)
def _test_data1():
# nan-based
arr = np.arange(20, dtype=float)
index = np.arange(20)
arr[:2] = nan
arr[5:10] = nan
arr[-3:] = nan
return arr, index
def _test_data2():
# nan-based
arr = np.arange(15, dtype=float)
index = np.arange(15)
arr[7:12] = nan
arr[-1:] = nan
return arr, index
def _test_data1_zero():
# zero-based
arr, index = _test_data1()
arr[np.isnan(arr)] = 0
return arr, index
def _test_data2_zero():
# zero-based
arr, index = _test_data2()
arr[np.isnan(arr)] = 0
return arr, index
def assert_sp_series_equal(a, b, exact_indices=True):
assert(a.index.equals(b.index))
assert_sp_array_equal(a, b)
def assert_sp_frame_equal(left, right, exact_indices=True):
"""
exact: Series SparseIndex objects must be exactly the same, otherwise just
compare dense representations
"""
for col, series in compat.iteritems(left):
assert(col in right)
# trade-off?
if exact_indices:
assert_sp_series_equal(series, right[col])
else:
assert_series_equal(series.to_dense(), right[col].to_dense())
assert_almost_equal(left.default_fill_value,
right.default_fill_value)
# do I care?
# assert(left.default_kind == right.default_kind)
for col in right:
assert(col in left)
def assert_sp_panel_equal(left, right, exact_indices=True):
for item, frame in compat.iteritems(left):
assert(item in right)
# trade-off?
assert_sp_frame_equal(frame, right[item], exact_indices=exact_indices)
assert_almost_equal(left.default_fill_value,
right.default_fill_value)
assert(left.default_kind == right.default_kind)
for item in right:
assert(item in left)
class TestSparseSeries(tm.TestCase,
test_series.CheckNameIntegration):
_multiprocess_can_split_ = True
def setUp(self):
arr, index = _test_data1()
date_index = bdate_range('1/1/2011', periods=len(index))
self.bseries = SparseSeries(arr, index=index, kind='block')
self.bseries.name = 'bseries'
self.ts = self.bseries
self.btseries = SparseSeries(arr, index=date_index, kind='block')
self.iseries = SparseSeries(arr, index=index, kind='integer')
arr, index = _test_data2()
self.bseries2 = SparseSeries(arr, index=index, kind='block')
self.iseries2 = SparseSeries(arr, index=index, kind='integer')
arr, index = _test_data1_zero()
self.zbseries = SparseSeries(arr, index=index, kind='block',
fill_value=0)
self.ziseries = SparseSeries(arr, index=index, kind='integer',
fill_value=0)
arr, index = _test_data2_zero()
self.zbseries2 = SparseSeries(arr, index=index, kind='block',
fill_value=0)
self.ziseries2 = SparseSeries(arr, index=index, kind='integer',
fill_value=0)
def test_iteration_and_str(self):
[x for x in self.bseries]
str(self.bseries)
def test_construct_DataFrame_with_sp_series(self):
# it works!
df = DataFrame({'col': self.bseries})
# printing & access
df.iloc[:1]
df['col']
df.dtypes
str(df)
assert_sp_series_equal(df['col'], self.bseries)
result = df.iloc[:,0]
assert_sp_series_equal(result, self.bseries)
# blocking
expected = Series({'col': 'float64:sparse'})
result = df.ftypes
assert_series_equal(expected, result)
def test_series_density(self):
# GH2803
ts = Series(np.random.randn(10))
ts[2:-2] = nan
sts = ts.to_sparse()
density = sts.density # don't die
self.assertEqual(density, 4 / 10.0)
def test_sparse_to_dense(self):
arr, index = _test_data1()
series = self.bseries.to_dense()
assert_equal(series, arr)
series = self.bseries.to_dense(sparse_only=True)
assert_equal(series, arr[np.isfinite(arr)])
series = self.iseries.to_dense()
assert_equal(series, arr)
arr, index = _test_data1_zero()
series = self.zbseries.to_dense()
assert_equal(series, arr)
series = self.ziseries.to_dense()
assert_equal(series, arr)
def test_dense_to_sparse(self):
series = self.bseries.to_dense()
bseries = series.to_sparse(kind='block')
iseries = series.to_sparse(kind='integer')
assert_sp_series_equal(bseries, self.bseries)
assert_sp_series_equal(iseries, self.iseries)
# non-NaN fill value
series = self.zbseries.to_dense()
zbseries = series.to_sparse(kind='block', fill_value=0)
ziseries = series.to_sparse(kind='integer', fill_value=0)
assert_sp_series_equal(zbseries, self.zbseries)
assert_sp_series_equal(ziseries, self.ziseries)
def test_to_dense_preserve_name(self):
assert(self.bseries.name is not None)
result = self.bseries.to_dense()
self.assertEqual(result.name, self.bseries.name)
def test_constructor(self):
# test setup guys
self.assertTrue(np.isnan(self.bseries.fill_value))
tm.assert_isinstance(self.bseries.sp_index, BlockIndex)
self.assertTrue(np.isnan(self.iseries.fill_value))
tm.assert_isinstance(self.iseries.sp_index, IntIndex)
self.assertEqual(self.zbseries.fill_value, 0)
assert_equal(self.zbseries.values.values,
self.bseries.to_dense().fillna(0).values)
# pass SparseSeries
s2 = SparseSeries(self.bseries)
s3 = SparseSeries(self.iseries)
s4 = SparseSeries(self.zbseries)
assert_sp_series_equal(s2, self.bseries)
assert_sp_series_equal(s3, self.iseries)
assert_sp_series_equal(s4, self.zbseries)
# Sparse time series works
date_index = bdate_range('1/1/2000', periods=len(self.bseries))
s5 = SparseSeries(self.bseries, index=date_index)
tm.assert_isinstance(s5, SparseTimeSeries)
# pass Series
bseries2 = SparseSeries(self.bseries.to_dense())
assert_equal(self.bseries.sp_values, bseries2.sp_values)
# pass dict?
# don't copy the data by default
values = np.ones(self.bseries.npoints)
sp = SparseSeries(values, sparse_index=self.bseries.sp_index)
sp.sp_values[:5] = 97
self.assertEqual(values[0], 97)
# but can make it copy!
sp = SparseSeries(values, sparse_index=self.bseries.sp_index,
copy=True)
sp.sp_values[:5] = 100
self.assertEqual(values[0], 97)
def test_constructor_scalar(self):
data = 5
sp = SparseSeries(data, np.arange(100))
sp = sp.reindex(np.arange(200))
self.assertTrue((sp.ix[:99] == data).all())
self.assertTrue(isnull(sp.ix[100:]).all())
data = np.nan
sp = SparseSeries(data, np.arange(100))
def test_constructor_ndarray(self):
pass
def test_constructor_nonnan(self):
arr = [0, 0, 0, nan, nan]
sp_series = SparseSeries(arr, fill_value=0)
assert_equal(sp_series.values.values, arr)
def test_copy_astype(self):
cop = self.bseries.astype(np.float64)
self.assertIsNot(cop, self.bseries)
self.assertIs(cop.sp_index, self.bseries.sp_index)
self.assertEqual(cop.dtype, np.float64)
cop2 = self.iseries.copy()
assert_sp_series_equal(cop, self.bseries)
assert_sp_series_equal(cop2, self.iseries)
# test that data is copied
cop[:5] = 97
self.assertEqual(cop.sp_values[0], 97)
self.assertNotEqual(self.bseries.sp_values[0], 97)
# correct fill value
zbcop = self.zbseries.copy()
zicop = self.ziseries.copy()
assert_sp_series_equal(zbcop, self.zbseries)
assert_sp_series_equal(zicop, self.ziseries)
# no deep copy
view = self.bseries.copy(deep=False)
view.sp_values[:5] = 5
self.assertTrue((self.bseries.sp_values[:5] == 5).all())
def test_astype(self):
self.assertRaises(Exception, self.bseries.astype, np.int64)
def test_kind(self):
self.assertEqual(self.bseries.kind, 'block')
self.assertEqual(self.iseries.kind, 'integer')
def test_pickle(self):
def _test_roundtrip(series):
unpickled = self.round_trip_pickle(series)
assert_sp_series_equal(series, unpickled)
assert_series_equal(series.to_dense(), unpickled.to_dense())
self._check_all(_test_roundtrip)
def _check_all(self, check_func):
check_func(self.bseries)
check_func(self.iseries)
check_func(self.zbseries)
check_func(self.ziseries)
def test_getitem(self):
def _check_getitem(sp, dense):
for idx, val in compat.iteritems(dense):
assert_almost_equal(val, sp[idx])
for i in range(len(dense)):
assert_almost_equal(sp[i], dense[i])
# j = np.float64(i)
# assert_almost_equal(sp[j], dense[j])
# API change 1/6/2012
# negative getitem works
# for i in xrange(len(dense)):
# assert_almost_equal(sp[-i], dense[-i])
_check_getitem(self.bseries, self.bseries.to_dense())
_check_getitem(self.btseries, self.btseries.to_dense())
_check_getitem(self.zbseries, self.zbseries.to_dense())
_check_getitem(self.iseries, self.iseries.to_dense())
_check_getitem(self.ziseries, self.ziseries.to_dense())
# exception handling
self.assertRaises(Exception, self.bseries.__getitem__,
len(self.bseries) + 1)
# index not contained
self.assertRaises(Exception, self.btseries.__getitem__,
self.btseries.index[-1] + BDay())
def test_get_get_value(self):
assert_almost_equal(self.bseries.get(10), self.bseries[10])
self.assertIsNone(self.bseries.get(len(self.bseries) + 1))
dt = self.btseries.index[10]
result = self.btseries.get(dt)
expected = self.btseries.to_dense()[dt]
assert_almost_equal(result, expected)
assert_almost_equal(self.bseries.get_value(10), self.bseries[10])
def test_set_value(self):
idx = self.btseries.index[7]
self.btseries.set_value(idx, 0)
self.assertEqual(self.btseries[idx], 0)
self.iseries.set_value('foobar', 0)
self.assertEqual(self.iseries.index[-1], 'foobar')
self.assertEqual(self.iseries['foobar'], 0)
def test_getitem_slice(self):
idx = self.bseries.index
res = self.bseries[::2]
tm.assert_isinstance(res, SparseSeries)
expected = self.bseries.reindex(idx[::2])
assert_sp_series_equal(res, expected)
res = self.bseries[:5]
tm.assert_isinstance(res, SparseSeries)
assert_sp_series_equal(res, self.bseries.reindex(idx[:5]))
res = self.bseries[5:]
assert_sp_series_equal(res, self.bseries.reindex(idx[5:]))
# negative indices
res = self.bseries[:-3]
assert_sp_series_equal(res, self.bseries.reindex(idx[:-3]))
def test_take(self):
def _compare_with_dense(sp):
dense = sp.to_dense()
def _compare(idx):
dense_result = dense.take(idx).values
sparse_result = sp.take(idx)
self.assertIsInstance(sparse_result, SparseSeries)
assert_almost_equal(dense_result, sparse_result.values.values)
_compare([1., 2., 3., 4., 5., 0.])
_compare([7, 2, 9, 0, 4])
_compare([3, 6, 3, 4, 7])
self._check_all(_compare_with_dense)
self.assertRaises(Exception, self.bseries.take,
[0, len(self.bseries) + 1])
# Corner case
sp = SparseSeries(np.ones(10.) * nan)
assert_almost_equal(sp.take([0, 1, 2, 3, 4]), np.repeat(nan, 5))
def test_setitem(self):
self.bseries[5] = 7.
self.assertEqual(self.bseries[5], 7.)
def test_setslice(self):
self.bseries[5:10] = 7.
assert_series_equal(self.bseries[5:10].to_dense(), Series(
7., index=range(5, 10), name=self.bseries.name))
def test_operators(self):
def _check_op(a, b, op):
sp_result = op(a, b)
adense = a.to_dense() if isinstance(a, SparseSeries) else a
bdense = b.to_dense() if isinstance(b, SparseSeries) else b
dense_result = op(adense, bdense)
assert_almost_equal(sp_result.to_dense(), dense_result)
def check(a, b):
_check_op(a, b, operator.add)
_check_op(a, b, operator.sub)
_check_op(a, b, operator.truediv)
_check_op(a, b, operator.floordiv)
_check_op(a, b, operator.mul)
_check_op(a, b, lambda x, y: operator.add(y, x))
_check_op(a, b, lambda x, y: operator.sub(y, x))
_check_op(a, b, lambda x, y: operator.truediv(y, x))
_check_op(a, b, lambda x, y: operator.floordiv(y, x))
_check_op(a, b, lambda x, y: operator.mul(y, x))
# NaN ** 0 = 1 in C?
# _check_op(a, b, operator.pow)
# _check_op(a, b, lambda x, y: operator.pow(y, x))
check(self.bseries, self.bseries)
check(self.iseries, self.iseries)
check(self.bseries, self.iseries)
check(self.bseries, self.bseries2)
check(self.bseries, self.iseries2)
check(self.iseries, self.iseries2)
# scalar value
check(self.bseries, 5)
# zero-based
check(self.zbseries, self.zbseries * 2)
check(self.zbseries, self.zbseries2)
check(self.ziseries, self.ziseries2)
# with dense
result = self.bseries + self.bseries.to_dense()
assert_sp_series_equal(result, self.bseries + self.bseries)
# @dec.knownfailureif(True, 'Known NumPy failer as of 1.5.1')
def test_operators_corner2(self):
raise nose.SkipTest('known failer on numpy 1.5.1')
# NumPy circumvents __r*__ operations
val = np.float64(3.0)
result = val - self.zbseries
assert_sp_series_equal(result, 3 - self.zbseries)
def test_binary_operators(self):
# skipping for now #####
raise nose.SkipTest("skipping sparse binary operators test")
def _check_inplace_op(iop, op):
tmp = self.bseries.copy()
expected = op(tmp, self.bseries)
iop(tmp, self.bseries)
assert_sp_series_equal(tmp, expected)
inplace_ops = ['add', 'sub', 'mul', 'truediv', 'floordiv', 'pow']
for op in inplace_ops:
_check_inplace_op(
getattr(operator, "i%s" % op), getattr(operator, op))
def test_reindex(self):
def _compare_with_series(sps, new_index):
spsre = sps.reindex(new_index)
series = sps.to_dense()
seriesre = series.reindex(new_index)
seriesre = seriesre.to_sparse(fill_value=sps.fill_value)
assert_sp_series_equal(spsre, seriesre)
assert_series_equal(spsre.to_dense(), seriesre.to_dense())
_compare_with_series(self.bseries, self.bseries.index[::2])
_compare_with_series(self.bseries, list(self.bseries.index[::2]))
_compare_with_series(self.bseries, self.bseries.index[:10])
_compare_with_series(self.bseries, self.bseries.index[5:])
_compare_with_series(self.zbseries, self.zbseries.index[::2])
_compare_with_series(self.zbseries, self.zbseries.index[:10])
_compare_with_series(self.zbseries, self.zbseries.index[5:])
# special cases
same_index = self.bseries.reindex(self.bseries.index)
assert_sp_series_equal(self.bseries, same_index)
self.assertIsNot(same_index, self.bseries)
# corner cases
sp = SparseSeries([], index=[])
sp_zero = SparseSeries([], index=[], fill_value=0)
_compare_with_series(sp, np.arange(10))
# with copy=False
reindexed = self.bseries.reindex(self.bseries.index, copy=True)
reindexed.sp_values[:] = 1.
self.assertTrue((self.bseries.sp_values != 1.).all())
reindexed = self.bseries.reindex(self.bseries.index, copy=False)
reindexed.sp_values[:] = 1.
np.testing.assert_array_equal(self.bseries.sp_values, 1.)
def test_sparse_reindex(self):
length = 10
def _check(values, index1, index2, fill_value):
first_series = SparseSeries(values, sparse_index=index1,
fill_value=fill_value)
reindexed = first_series.sparse_reindex(index2)
self.assertIs(reindexed.sp_index, index2)
int_indices1 = index1.to_int_index().indices
int_indices2 = index2.to_int_index().indices
expected = Series(values, index=int_indices1)
expected = expected.reindex(int_indices2).fillna(fill_value)
assert_almost_equal(expected.values, reindexed.sp_values)
# make sure level argument asserts
expected = expected.reindex(int_indices2).fillna(fill_value)
def _check_with_fill_value(values, first, second, fill_value=nan):
i_index1 = IntIndex(length, first)
i_index2 = IntIndex(length, second)
b_index1 = i_index1.to_block_index()
b_index2 = i_index2.to_block_index()
_check(values, i_index1, i_index2, fill_value)
_check(values, b_index1, b_index2, fill_value)
def _check_all(values, first, second):
_check_with_fill_value(values, first, second, fill_value=nan)
_check_with_fill_value(values, first, second, fill_value=0)
index1 = [2, 4, 5, 6, 8, 9]
values1 = np.arange(6.)
_check_all(values1, index1, [2, 4, 5])
_check_all(values1, index1, [2, 3, 4, 5, 6, 7, 8, 9])
_check_all(values1, index1, [0, 1])
_check_all(values1, index1, [0, 1, 7, 8, 9])
_check_all(values1, index1, [])
first_series = SparseSeries(values1, sparse_index=IntIndex(length,
index1),
fill_value=nan)
with tm.assertRaisesRegexp(TypeError,
'new index must be a SparseIndex'):
reindexed = first_series.sparse_reindex(0)
def test_repr(self):
bsrepr = repr(self.bseries)
isrepr = repr(self.iseries)
def test_iter(self):
pass
def test_truncate(self):
pass
def test_fillna(self):
pass
def test_groupby(self):
pass
def test_reductions(self):
def _compare_with_dense(obj, op):
sparse_result = getattr(obj, op)()
series = obj.to_dense()
dense_result = getattr(series, op)()
self.assertEqual(sparse_result, dense_result)
to_compare = ['count', 'sum', 'mean', 'std', 'var', 'skew']
def _compare_all(obj):
for op in to_compare:
_compare_with_dense(obj, op)
_compare_all(self.bseries)
self.bseries.sp_values[5:10] = np.NaN
_compare_all(self.bseries)
_compare_all(self.zbseries)
self.zbseries.sp_values[5:10] = np.NaN
_compare_all(self.zbseries)
series = self.zbseries.copy()
series.fill_value = 2
_compare_all(series)
nonna = Series(np.random.randn(20)).to_sparse()
_compare_all(nonna)
nonna2 = Series(np.random.randn(20)).to_sparse(fill_value=0)
_compare_all(nonna2)
def test_dropna(self):
sp = SparseSeries([0, 0, 0, nan, nan, 5, 6],
fill_value=0)
sp_valid = sp.valid()
expected = sp.to_dense().valid()
expected = expected[expected != 0]
assert_almost_equal(sp_valid.values, expected.values)
self.assertTrue(sp_valid.index.equals(expected.index))
self.assertEqual(len(sp_valid.sp_values), 2)
result = self.bseries.dropna()
expected = self.bseries.to_dense().dropna()
self.assertNotIsInstance(result, SparseSeries)
tm.assert_series_equal(result, expected)
def test_homogenize(self):
def _check_matches(indices, expected):
data = {}
for i, idx in enumerate(indices):
data[i] = SparseSeries(idx.to_int_index().indices,
sparse_index=idx)
homogenized = spf.homogenize(data)
for k, v in compat.iteritems(homogenized):
assert(v.sp_index.equals(expected))
indices1 = [BlockIndex(10, [2], [7]),
BlockIndex(10, [1, 6], [3, 4]),
BlockIndex(10, [0], [10])]
expected1 = BlockIndex(10, [2, 6], [2, 3])
_check_matches(indices1, expected1)
indices2 = [BlockIndex(10, [2], [7]),
BlockIndex(10, [2], [7])]
expected2 = indices2[0]
_check_matches(indices2, expected2)
# must have NaN fill value
data = {'a': SparseSeries(np.arange(7), sparse_index=expected2,
fill_value=0)}
assertRaisesRegexp(TypeError, "NaN fill value", spf.homogenize, data)
def test_fill_value_corner(self):
cop = self.zbseries.copy()
cop.fill_value = 0
result = self.bseries / cop
self.assertTrue(np.isnan(result.fill_value))
cop2 = self.zbseries.copy()
cop2.fill_value = 1
result = cop2 / cop
self.assertTrue(np.isnan(result.fill_value))
def test_shift(self):
series = SparseSeries([nan, 1., 2., 3., nan, nan],
index=np.arange(6))
shifted = series.shift(0)
self.assertIsNot(shifted, series)
assert_sp_series_equal(shifted, series)
f = lambda s: s.shift(1)
_dense_series_compare(series, f)
f = lambda s: s.shift(-2)
_dense_series_compare(series, f)
series = SparseSeries([nan, 1., 2., 3., nan, nan],
index=bdate_range('1/1/2000', periods=6))
f = lambda s: s.shift(2, freq='B')
_dense_series_compare(series, f)
f = lambda s: s.shift(2, freq=datetools.bday)
_dense_series_compare(series, f)
def test_cumsum(self):
result = self.bseries.cumsum()
expected = self.bseries.to_dense().cumsum()
tm.assert_isinstance(result, SparseSeries)
self.assertEqual(result.name, self.bseries.name)
assert_series_equal(result.to_dense(), expected)
result = self.zbseries.cumsum()
expected = self.zbseries.to_dense().cumsum()
tm.assert_isinstance(result, Series)
assert_series_equal(result, expected)
def test_combine_first(self):
s = self.bseries
result = s[::2].combine_first(s)
result2 = s[::2].combine_first(s.to_dense())
expected = s[::2].to_dense().combine_first(s.to_dense())
expected = expected.to_sparse(fill_value=s.fill_value)
assert_sp_series_equal(result, result2)
assert_sp_series_equal(result, expected)
class TestSparseTimeSeries(tm.TestCase):
pass
class TestSparseDataFrame(tm.TestCase, test_frame.SafeForSparse):
klass = SparseDataFrame
_multiprocess_can_split_ = True
def setUp(self):
self.data = {'A': [nan, nan, nan, 0, 1, 2, 3, 4, 5, 6],
'B': [0, 1, 2, nan, nan, nan, 3, 4, 5, 6],
'C': np.arange(10),
'D': [0, 1, 2, 3, 4, 5, nan, nan, nan, nan]}
self.dates = bdate_range('1/1/2011', periods=10)
self.frame = SparseDataFrame(self.data, index=self.dates)
self.iframe = SparseDataFrame(self.data, index=self.dates,
default_kind='integer')
values = self.frame.values.copy()
values[np.isnan(values)] = 0
self.zframe = SparseDataFrame(values, columns=['A', 'B', 'C', 'D'],
default_fill_value=0,
index=self.dates)
values = self.frame.values.copy()
values[np.isnan(values)] = 2
self.fill_frame = SparseDataFrame(values, columns=['A', 'B', 'C', 'D'],
default_fill_value=2,
index=self.dates)
self.empty = SparseDataFrame()
def test_as_matrix(self):
empty = self.empty.as_matrix()
self.assertEqual(empty.shape, (0, 0))
no_cols = SparseDataFrame(index=np.arange(10))
mat = no_cols.as_matrix()
self.assertEqual(mat.shape, (10, 0))
no_index = SparseDataFrame(columns=np.arange(10))
mat = no_index.as_matrix()
self.assertEqual(mat.shape, (0, 10))
def test_copy(self):
cp = self.frame.copy()
tm.assert_isinstance(cp, SparseDataFrame)
assert_sp_frame_equal(cp, self.frame)
# as of v0.15.0
# this is now identical (but not is_a )
self.assertTrue(cp.index.identical(self.frame.index))
def test_constructor(self):
for col, series in compat.iteritems(self.frame):
tm.assert_isinstance(series, SparseSeries)
tm.assert_isinstance(self.iframe['A'].sp_index, IntIndex)
# constructed zframe from matrix above
self.assertEqual(self.zframe['A'].fill_value, 0)
assert_almost_equal([0, 0, 0, 0, 1, 2, 3, 4, 5, 6],
self.zframe['A'].values)
# construct no data
sdf = SparseDataFrame(columns=np.arange(10), index=np.arange(10))
for col, series in compat.iteritems(sdf):
tm.assert_isinstance(series, SparseSeries)
# construct from nested dict
data = {}
for c, s in compat.iteritems(self.frame):
data[c] = s.to_dict()
sdf = SparseDataFrame(data)
assert_sp_frame_equal(sdf, self.frame)
# TODO: test data is copied from inputs
# init dict with different index
idx = self.frame.index[:5]
cons = SparseDataFrame(self.frame, index=idx,
columns=self.frame.columns,
default_fill_value=self.frame.default_fill_value,
default_kind=self.frame.default_kind,
copy=True)
reindexed = self.frame.reindex(idx)
assert_sp_frame_equal(cons, reindexed, exact_indices=False)
# assert level parameter breaks reindex
self.assertRaises(TypeError, self.frame.reindex, idx, level=0)
repr(self.frame)
def test_constructor_ndarray(self):
# no index or columns
sp = SparseDataFrame(self.frame.values)
# 1d
sp = SparseDataFrame(self.data['A'], index=self.dates,
columns=['A'])
assert_sp_frame_equal(sp, self.frame.reindex(columns=['A']))
# raise on level argument
self.assertRaises(TypeError, self.frame.reindex, columns=['A'],
level=1)
# wrong length index / columns
assertRaisesRegexp(
ValueError, "^Index length", SparseDataFrame, self.frame.values,
index=self.frame.index[:-1])
assertRaisesRegexp(
ValueError, "^Column length", SparseDataFrame, self.frame.values,
columns=self.frame.columns[:-1])
def test_constructor_empty(self):
sp = SparseDataFrame()
self.assertEqual(len(sp.index), 0)
self.assertEqual(len(sp.columns), 0)
def test_constructor_dataframe(self):
dense = self.frame.to_dense()
sp = SparseDataFrame(dense)
assert_sp_frame_equal(sp, self.frame)
def test_constructor_convert_index_once(self):
arr = np.array([1.5, 2.5, 3.5])
sdf = SparseDataFrame(columns=lrange(4), index=arr)
self.assertTrue(sdf[0].index is sdf[1].index)
def test_constructor_from_series(self):
# GH 2873
x = Series(np.random.randn(10000), name='a')
x = x.to_sparse(fill_value=0)
tm.assert_isinstance(x,SparseSeries)
df = SparseDataFrame(x)
tm.assert_isinstance(df,SparseDataFrame)
x = Series(np.random.randn(10000), name='a')
y = Series(np.random.randn(10000), name='b')
x2 = x.astype(float)
x2.ix[:9998] = np.NaN
x_sparse = x2.to_sparse(fill_value=np.NaN)
# Currently fails too with weird ufunc error
# df1 = SparseDataFrame([x_sparse, y])
y.ix[:9998] = 0
y_sparse = y.to_sparse(fill_value=0)
# without sparse value raises error
# df2 = SparseDataFrame([x2_sparse, y])
def test_dtypes(self):
df = DataFrame(np.random.randn(10000, 4))
df.ix[:9998] = np.nan
sdf = df.to_sparse()
result = sdf.get_dtype_counts()
expected = Series({'float64': 4})
assert_series_equal(result, expected)
def test_str(self):
df = DataFrame(np.random.randn(10000, 4))
df.ix[:9998] = np.nan
sdf = df.to_sparse()
str(sdf)
def test_array_interface(self):
res = np.sqrt(self.frame)
dres = np.sqrt(self.frame.to_dense())
assert_frame_equal(res.to_dense(), dres)
def test_pickle(self):
def _test_roundtrip(frame):
result = self.round_trip_pickle(frame)
assert_sp_frame_equal(frame, result)
_test_roundtrip(SparseDataFrame())
self._check_all(_test_roundtrip)
def test_dense_to_sparse(self):
df = DataFrame({'A': [nan, nan, nan, 1, 2],
'B': [1, 2, nan, nan, nan]})
sdf = df.to_sparse()
tm.assert_isinstance(sdf, SparseDataFrame)
self.assertTrue(np.isnan(sdf.default_fill_value))
tm.assert_isinstance(sdf['A'].sp_index, BlockIndex)
tm.assert_frame_equal(sdf.to_dense(), df)
sdf = df.to_sparse(kind='integer')
tm.assert_isinstance(sdf['A'].sp_index, IntIndex)
df = DataFrame({'A': [0, 0, 0, 1, 2],
'B': [1, 2, 0, 0, 0]}, dtype=float)
sdf = df.to_sparse(fill_value=0)
self.assertEqual(sdf.default_fill_value, 0)
tm.assert_frame_equal(sdf.to_dense(), df)
def test_density(self):
df = SparseSeries([nan, nan, nan, 0, 1, 2, 3, 4, 5, 6])
self.assertEqual(df.density, 0.7)
def test_sparse_to_dense(self):
pass
def test_sparse_series_ops(self):
import sys
buf = StringIO()
tmp = sys.stderr
sys.stderr = buf
try:
self._check_frame_ops(self.frame)
finally:
sys.stderr = tmp
def test_sparse_series_ops_i(self):
import sys
buf = StringIO()
tmp = sys.stderr
sys.stderr = buf
try:
self._check_frame_ops(self.iframe)
finally:
sys.stderr = tmp
def test_sparse_series_ops_z(self):
import sys
buf = StringIO()
tmp = sys.stderr
sys.stderr = buf
try:
self._check_frame_ops(self.zframe)
finally:
sys.stderr = tmp
def test_sparse_series_ops_fill(self):
import sys
buf = StringIO()
tmp = sys.stderr
sys.stderr = buf
try:
self._check_frame_ops(self.fill_frame)
finally:
sys.stderr = tmp
def _check_frame_ops(self, frame):
fill = frame.default_fill_value
def _compare_to_dense(a, b, da, db, op):
sparse_result = op(a, b)
dense_result = op(da, db)
dense_result = dense_result.to_sparse(fill_value=fill)
assert_sp_frame_equal(sparse_result, dense_result,
exact_indices=False)
if isinstance(a, DataFrame) and isinstance(db, DataFrame):
mixed_result = op(a, db)
tm.assert_isinstance(mixed_result, SparseDataFrame)
assert_sp_frame_equal(mixed_result, sparse_result,
exact_indices=False)
opnames = ['add', 'sub', 'mul', 'truediv', 'floordiv']
ops = [getattr(operator, name) for name in opnames]
fidx = frame.index
# time series operations
series = [frame['A'], frame['B'],
frame['C'], frame['D'],
frame['A'].reindex(fidx[:7]),
frame['A'].reindex(fidx[::2]),
SparseSeries([], index=[])]
for op in ops:
_compare_to_dense(frame, frame[::2], frame.to_dense(),
frame[::2].to_dense(), op)
for i, s in enumerate(series):
_compare_to_dense(frame, s, frame.to_dense(),
s.to_dense(), op)
_compare_to_dense(s, frame, s.to_dense(),
frame.to_dense(), op)
# cross-sectional operations
series = [frame.xs(fidx[0]),
frame.xs(fidx[3]),
frame.xs(fidx[5]),
frame.xs(fidx[7]),
frame.xs(fidx[5])[:2]]
for op in ops:
for s in series:
_compare_to_dense(frame, s, frame.to_dense(),
s, op)
_compare_to_dense(s, frame, s,
frame.to_dense(), op)
# it works!
result = self.frame + self.frame.ix[:, ['A', 'B']]
def test_op_corners(self):
empty = self.empty + self.empty
self.assertTrue(empty.empty)
foo = self.frame + self.empty
tm.assert_isinstance(foo.index, DatetimeIndex)
assert_frame_equal(foo, self.frame * np.nan)
foo = self.empty + self.frame
assert_frame_equal(foo, self.frame * np.nan)
def test_scalar_ops(self):
pass
def test_getitem(self):
# 1585 select multiple columns
sdf = SparseDataFrame(index=[0, 1, 2], columns=['a', 'b', 'c'])
result = sdf[['a', 'b']]
exp = sdf.reindex(columns=['a', 'b'])
assert_sp_frame_equal(result, exp)
self.assertRaises(Exception, sdf.__getitem__, ['a', 'd'])
def test_icol(self):
# 2227
result = self.frame.icol(0)
self.assertTrue(isinstance(result, SparseSeries))
assert_sp_series_equal(result, self.frame['A'])
# preserve sparse index type. #2251
data = {'A': [0, 1]}
iframe = SparseDataFrame(data, default_kind='integer')
self.assertEqual(type(iframe['A'].sp_index),
type(iframe.icol(0).sp_index))
def test_set_value(self):
# ok as the index gets conver to object
frame = self.frame.copy()
res = frame.set_value('foobar', 'B', 1.5)
self.assertEqual(res.index.dtype, 'object')
res = self.frame
res.index = res.index.astype(object)
res = self.frame.set_value('foobar', 'B', 1.5)
self.assertIsNot(res, self.frame)
self.assertEqual(res.index[-1], 'foobar')
self.assertEqual(res.get_value('foobar', 'B'), 1.5)
res2 = res.set_value('foobar', 'qux', 1.5)
self.assertIsNot(res2, res)
self.assert_numpy_array_equal(res2.columns,
list(self.frame.columns) + ['qux'])
self.assertEqual(res2.get_value('foobar', 'qux'), 1.5)
def test_fancy_index_misc(self):
# axis = 0
sliced = self.frame.ix[-2:, :]
expected = self.frame.reindex(index=self.frame.index[-2:])
assert_sp_frame_equal(sliced, expected)
# axis = 1
sliced = self.frame.ix[:, -2:]
expected = self.frame.reindex(columns=self.frame.columns[-2:])
assert_sp_frame_equal(sliced, expected)
def test_getitem_overload(self):
# slicing
sl = self.frame[:20]
assert_sp_frame_equal(sl, self.frame.reindex(self.frame.index[:20]))
# boolean indexing
d = self.frame.index[5]
indexer = self.frame.index > d
subindex = self.frame.index[indexer]
subframe = self.frame[indexer]
self.assert_numpy_array_equal(subindex, subframe.index)
self.assertRaises(Exception, self.frame.__getitem__, indexer[:-1])
def test_setitem(self):
def _check_frame(frame):
N = len(frame)
# insert SparseSeries
frame['E'] = frame['A']
tm.assert_isinstance(frame['E'], SparseSeries)
assert_sp_series_equal(frame['E'], frame['A'])
# insert SparseSeries differently-indexed
to_insert = frame['A'][::2]
frame['E'] = to_insert
expected = to_insert.to_dense().reindex(
frame.index).fillna(to_insert.fill_value)
assert_series_equal(frame['E'].to_dense(), expected)
# insert Series
frame['F'] = frame['A'].to_dense()
tm.assert_isinstance(frame['F'], SparseSeries)
assert_sp_series_equal(frame['F'], frame['A'])
# insert Series differently-indexed
to_insert = frame['A'].to_dense()[::2]
frame['G'] = to_insert
expected = to_insert.reindex(
frame.index).fillna(frame.default_fill_value)
assert_series_equal(frame['G'].to_dense(), expected)
# insert ndarray
frame['H'] = np.random.randn(N)
tm.assert_isinstance(frame['H'], SparseSeries)
to_sparsify = np.random.randn(N)
to_sparsify[N // 2:] = frame.default_fill_value
frame['I'] = to_sparsify
self.assertEqual(len(frame['I'].sp_values), N // 2)
# insert ndarray wrong size
self.assertRaises(Exception, frame.__setitem__, 'foo',
np.random.randn(N - 1))
# scalar value
frame['J'] = 5
self.assertEqual(len(frame['J'].sp_values), N)
self.assertTrue((frame['J'].sp_values == 5).all())
frame['K'] = frame.default_fill_value
self.assertEqual(len(frame['K'].sp_values), 0)
self._check_all(_check_frame)
def test_setitem_corner(self):
self.frame['a'] = self.frame['B']
assert_sp_series_equal(self.frame['a'], self.frame['B'])
def test_setitem_array(self):
arr = self.frame['B']
self.frame['E'] = arr
assert_sp_series_equal(self.frame['E'], self.frame['B'])
self.frame['F'] = arr[:-1]
index = self.frame.index[:-1]
assert_sp_series_equal(
self.frame['E'].reindex(index), self.frame['F'].reindex(index))
def test_delitem(self):
A = self.frame['A']
C = self.frame['C']
del self.frame['B']
self.assertNotIn('B', self.frame)
assert_sp_series_equal(self.frame['A'], A)
assert_sp_series_equal(self.frame['C'], C)
del self.frame['D']
self.assertNotIn('D', self.frame)
del self.frame['A']
self.assertNotIn('A', self.frame)
def test_set_columns(self):
self.frame.columns = self.frame.columns
self.assertRaises(Exception, setattr, self.frame, 'columns',
self.frame.columns[:-1])
def test_set_index(self):
self.frame.index = self.frame.index
self.assertRaises(Exception, setattr, self.frame, 'index',
self.frame.index[:-1])
def test_append(self):
a = self.frame[:5]
b = self.frame[5:]
appended = a.append(b)
assert_sp_frame_equal(appended, self.frame, exact_indices=False)
a = self.frame.ix[:5, :3]
b = self.frame.ix[5:]
appended = a.append(b)
assert_sp_frame_equal(
appended.ix[:, :3], self.frame.ix[:, :3], exact_indices=False)
def test_apply(self):
applied = self.frame.apply(np.sqrt)
tm.assert_isinstance(applied, SparseDataFrame)
assert_almost_equal(applied.values, np.sqrt(self.frame.values))
applied = self.fill_frame.apply(np.sqrt)
self.assertEqual(applied['A'].fill_value, np.sqrt(2))
# agg / broadcast
broadcasted = self.frame.apply(np.sum, broadcast=True)
tm.assert_isinstance(broadcasted, SparseDataFrame)
assert_frame_equal(broadcasted.to_dense(),
self.frame.to_dense().apply(np.sum, broadcast=True))
self.assertIs(self.empty.apply(np.sqrt), self.empty)
from pandas.core import nanops
applied = self.frame.apply(np.sum)
assert_series_equal(applied,
self.frame.to_dense().apply(nanops.nansum))
def test_apply_nonuq(self):
df_orig = DataFrame(
[[1, 2, 3], [4, 5, 6], [7, 8, 9]], index=['a', 'a', 'c'])
df = df_orig.to_sparse()
rs = df.apply(lambda s: s[0], axis=1)
xp = Series([1., 4., 7.], ['a', 'a', 'c'])
assert_series_equal(rs, xp)
# df.T breaks
df = df_orig.T.to_sparse()
rs = df.apply(lambda s: s[0], axis=0)
# no non-unique columns supported in sparse yet
# assert_series_equal(rs, xp)
def test_applymap(self):
# just test that it works
result = self.frame.applymap(lambda x: x * 2)
tm.assert_isinstance(result, SparseDataFrame)
def test_astype(self):
self.assertRaises(Exception, self.frame.astype, np.int64)
def test_fillna(self):
df = self.zframe.reindex(lrange(5))
result = df.fillna(0)
expected = df.to_dense().fillna(0).to_sparse(fill_value=0)
assert_sp_frame_equal(result, expected, exact_indices=False)
result = df.copy()
result.fillna(0, inplace=True)
expected = df.to_dense().fillna(0).to_sparse(fill_value=0)
assert_sp_frame_equal(result, expected, exact_indices=False)
result = df.copy()
result = df['A']
result.fillna(0, inplace=True)
assert_series_equal(result, df['A'].fillna(0))
def test_rename(self):
# just check this works
renamed = self.frame.rename(index=str)
renamed = self.frame.rename(columns=lambda x: '%s%d' % (x, len(x)))
def test_corr(self):
res = self.frame.corr()
assert_frame_equal(res, self.frame.to_dense().corr())
def test_describe(self):
self.frame['foo'] = np.nan
self.frame.get_dtype_counts()
str(self.frame)
desc = self.frame.describe()
def test_join(self):
left = self.frame.ix[:, ['A', 'B']]
right = self.frame.ix[:, ['C', 'D']]
joined = left.join(right)
assert_sp_frame_equal(joined, self.frame, exact_indices=False)
right = self.frame.ix[:, ['B', 'D']]
self.assertRaises(Exception, left.join, right)
with tm.assertRaisesRegexp(ValueError, 'Other Series must have a name'):
self.frame.join(Series(np.random.randn(len(self.frame)),
index=self.frame.index))
def test_reindex(self):
def _check_frame(frame):
index = frame.index
sidx = index[::2]
sidx2 = index[:5]
sparse_result = frame.reindex(sidx)
dense_result = frame.to_dense().reindex(sidx)
assert_frame_equal(sparse_result.to_dense(), dense_result)
assert_frame_equal(frame.reindex(list(sidx)).to_dense(),
dense_result)
sparse_result2 = sparse_result.reindex(index)
dense_result2 = dense_result.reindex(
index).fillna(frame.default_fill_value)
assert_frame_equal(sparse_result2.to_dense(), dense_result2)
# propagate CORRECT fill value
assert_almost_equal(sparse_result.default_fill_value,
frame.default_fill_value)
assert_almost_equal(sparse_result['A'].fill_value,
frame['A'].fill_value)
# length zero
length_zero = frame.reindex([])
self.assertEqual(len(length_zero), 0)
self.assertEqual(len(length_zero.columns), len(frame.columns))
self.assertEqual(len(length_zero['A']), 0)
# frame being reindexed has length zero
length_n = length_zero.reindex(index)
self.assertEqual(len(length_n), len(frame))
self.assertEqual(len(length_n.columns), len(frame.columns))
self.assertEqual(len(length_n['A']), len(frame))
# reindex columns
reindexed = frame.reindex(columns=['A', 'B', 'Z'])
self.assertEqual(len(reindexed.columns), 3)
assert_almost_equal(reindexed['Z'].fill_value,
frame.default_fill_value)
self.assertTrue(np.isnan(reindexed['Z'].sp_values).all())
_check_frame(self.frame)
_check_frame(self.iframe)
_check_frame(self.zframe)
_check_frame(self.fill_frame)
# with copy=False
reindexed = self.frame.reindex(self.frame.index, copy=False)
reindexed['F'] = reindexed['A']
self.assertIn('F', self.frame)
reindexed = self.frame.reindex(self.frame.index)
reindexed['G'] = reindexed['A']
self.assertNotIn('G', self.frame)
def test_reindex_fill_value(self):
rng = bdate_range('20110110', periods=20)
result = self.zframe.reindex(rng, fill_value=0)
expected = self.zframe.reindex(rng).fillna(0)
assert_sp_frame_equal(result, expected)
def test_take(self):
result = self.frame.take([1, 0, 2], axis=1)
expected = self.frame.reindex(columns=['B', 'A', 'C'])
assert_sp_frame_equal(result, expected)
def test_density(self):
df = SparseDataFrame({'A': [nan, nan, nan, 0, 1, 2, 3, 4, 5, 6],
'B': [0, 1, 2, nan, nan, nan, 3, 4, 5, 6],
'C': np.arange(10),
'D': [0, 1, 2, 3, 4, 5, nan, nan, nan, nan]})
self.assertEqual(df.density, 0.75)
def test_to_dense(self):
def _check(frame):
dense_dm = frame.to_dense()
assert_frame_equal(frame, dense_dm)
self._check_all(_check)
def test_stack_sparse_frame(self):
def _check(frame):
dense_frame = frame.to_dense()
wp = Panel.from_dict({'foo': frame})
from_dense_lp = wp.to_frame()
from_sparse_lp = spf.stack_sparse_frame(frame)
self.assert_numpy_array_equal(from_dense_lp.values,
from_sparse_lp.values)
_check(self.frame)
_check(self.iframe)
# for now
self.assertRaises(Exception, _check, self.zframe)
self.assertRaises(Exception, _check, self.fill_frame)
def test_transpose(self):
def _check(frame):
transposed = frame.T
untransposed = transposed.T
assert_sp_frame_equal(frame, untransposed)
self._check_all(_check)
def test_shift(self):
def _check(frame):
shifted = frame.shift(0)
assert_sp_frame_equal(shifted, frame)
f = lambda s: s.shift(1)
_dense_frame_compare(frame, f)
f = lambda s: s.shift(-2)
_dense_frame_compare(frame, f)
f = lambda s: s.shift(2, freq='B')
_dense_frame_compare(frame, f)
f = lambda s: s.shift(2, freq=datetools.bday)
_dense_frame_compare(frame, f)
self._check_all(_check)
def test_count(self):
result = self.frame.count()
dense_result = self.frame.to_dense().count()
assert_series_equal(result, dense_result)
result = self.frame.count(1)
dense_result = self.frame.to_dense().count(1)
# win32 don't check dtype
assert_series_equal(result, dense_result, check_dtype=False)
def test_cumsum(self):
result = self.frame.cumsum()
expected = self.frame.to_dense().cumsum()
tm.assert_isinstance(result, SparseDataFrame)
assert_frame_equal(result.to_dense(), expected)
def _check_all(self, check_func):
check_func(self.frame)
check_func(self.iframe)
check_func(self.zframe)
check_func(self.fill_frame)
def test_combine_first(self):
df = self.frame
result = df[::2].combine_first(df)
result2 = df[::2].combine_first(df.to_dense())
expected = df[::2].to_dense().combine_first(df.to_dense())
expected = expected.to_sparse(fill_value=df.default_fill_value)
assert_sp_frame_equal(result, result2)
assert_sp_frame_equal(result, expected)
def test_combine_add(self):
df = self.frame.to_dense()
df2 = df.copy()
df2['C'][:3] = np.nan
df['A'][:3] = 5.7
result = df.to_sparse().add(df2.to_sparse(), fill_value=0)
expected = df.add(df2, fill_value=0).to_sparse()
assert_sp_frame_equal(result, expected)
def test_isin(self):
sparse_df = DataFrame({'flag': [1., 0., 1.]}).to_sparse(fill_value=0.)
xp = sparse_df[sparse_df.flag == 1.]
rs = sparse_df[sparse_df.flag.isin([1.])]
assert_frame_equal(xp, rs)
def test_sparse_pow_issue(self):
# 2220
df = SparseDataFrame({'A': [1.1, 3.3], 'B': [2.5, -3.9]})
# note : no error without nan
df = SparseDataFrame({'A': [nan, 0, 1]})
# note that 2 ** df works fine, also df ** 1
result = 1 ** df
r1 = result.take([0], 1)['A']
r2 = result['A']
self.assertEqual(len(r2.sp_values), len(r1.sp_values))
def test_as_blocks(self):
df = SparseDataFrame({'A': [1.1, 3.3], 'B': [nan, -3.9]},
dtype='float64')
df_blocks = df.blocks
self.assertEqual(list(df_blocks.keys()), ['float64'])
assert_frame_equal(df_blocks['float64'], df)
def _dense_series_compare(s, f):
result = f(s)
assert(isinstance(result, SparseSeries))
dense_result = f(s.to_dense())
assert_series_equal(result.to_dense(), dense_result)
def _dense_frame_compare(frame, f):
result = f(frame)
assert(isinstance(frame, SparseDataFrame))
dense_result = f(frame.to_dense()).fillna(frame.default_fill_value)
assert_frame_equal(result.to_dense(), dense_result)
def panel_data1():
index = bdate_range('1/1/2011', periods=8)
return DataFrame({
'A': [nan, nan, nan, 0, 1, 2, 3, 4],
'B': [0, 1, 2, 3, 4, nan, nan, nan],
'C': [0, 1, 2, nan, nan, nan, 3, 4],
'D': [nan, 0, 1, nan, 2, 3, 4, nan]
}, index=index)
def panel_data2():
index = bdate_range('1/1/2011', periods=9)
return DataFrame({
'A': [nan, nan, nan, 0, 1, 2, 3, 4, 5],
'B': [0, 1, 2, 3, 4, 5, nan, nan, nan],
'C': [0, 1, 2, nan, nan, nan, 3, 4, 5],
'D': [nan, 0, 1, nan, 2, 3, 4, 5, nan]
}, index=index)
def panel_data3():
index = bdate_range('1/1/2011', periods=10).shift(-2)
return DataFrame({
'A': [nan, nan, nan, 0, 1, 2, 3, 4, 5, 6],
'B': [0, 1, 2, 3, 4, 5, 6, nan, nan, nan],
'C': [0, 1, 2, nan, nan, nan, 3, 4, 5, 6],
'D': [nan, 0, 1, nan, 2, 3, 4, 5, 6, nan]
}, index=index)
class TestSparsePanel(tm.TestCase,
test_panel.SafeForLongAndSparse,
test_panel.SafeForSparse):
_multiprocess_can_split_ = True
@classmethod
def assert_panel_equal(cls, x, y):
assert_sp_panel_equal(x, y)
def setUp(self):
self.data_dict = {
'ItemA': panel_data1(),
'ItemB': panel_data2(),
'ItemC': panel_data3(),
'ItemD': panel_data1(),
}
self.panel = SparsePanel(self.data_dict)
@staticmethod
def _test_op(panel, op):
# arithmetic tests
result = op(panel, 1)
assert_sp_frame_equal(result['ItemA'], op(panel['ItemA'], 1))
def test_constructor(self):
self.assertRaises(ValueError, SparsePanel, self.data_dict,
items=['Item0', 'ItemA', 'ItemB'])
with tm.assertRaisesRegexp(TypeError,
"input must be a dict, a 'list' was passed"):
SparsePanel(['a', 'b', 'c'])
def test_from_dict(self):
fd = SparsePanel.from_dict(self.data_dict)
assert_sp_panel_equal(fd, self.panel)
def test_pickle(self):
def _test_roundtrip(panel):
result = self.round_trip_pickle(panel)
tm.assert_isinstance(result.items, Index)
tm.assert_isinstance(result.major_axis, Index)
tm.assert_isinstance(result.minor_axis, Index)
assert_sp_panel_equal(panel, result)
_test_roundtrip(self.panel)
def test_dense_to_sparse(self):
wp = Panel.from_dict(self.data_dict)
dwp = wp.to_sparse()
tm.assert_isinstance(dwp['ItemA']['A'], SparseSeries)
def test_to_dense(self):
dwp = self.panel.to_dense()
dwp2 = Panel.from_dict(self.data_dict)
assert_panel_equal(dwp, dwp2)
def test_to_frame(self):
def _compare_with_dense(panel):
slp = panel.to_frame()
dlp = panel.to_dense().to_frame()
self.assert_numpy_array_equal(slp.values, dlp.values)
self.assertTrue(slp.index.equals(dlp.index))
_compare_with_dense(self.panel)
_compare_with_dense(self.panel.reindex(items=['ItemA']))
zero_panel = SparsePanel(self.data_dict, default_fill_value=0)
self.assertRaises(Exception, zero_panel.to_frame)
self.assertRaises(Exception, self.panel.to_frame,
filter_observations=False)
def test_long_to_wide_sparse(self):
pass
def test_values(self):
pass
def test_setitem(self):
self.panel['ItemE'] = self.panel['ItemC']
self.panel['ItemF'] = self.panel['ItemC'].to_dense()
assert_sp_frame_equal(self.panel['ItemE'], self.panel['ItemC'])
assert_sp_frame_equal(self.panel['ItemF'], self.panel['ItemC'])
assert_almost_equal(self.panel.items, ['ItemA', 'ItemB', 'ItemC',
'ItemD', 'ItemE', 'ItemF'])
self.assertRaises(Exception, self.panel.__setitem__, 'item6', 1)
def test_set_value(self):
def _check_loc(item, major, minor, val=1.5):
res = self.panel.set_value(item, major, minor, val)
self.assertIsNot(res, self.panel)
self.assertEqual(res.get_value(item, major, minor), val)
_check_loc('ItemA', self.panel.major_axis[4], self.panel.minor_axis[3])
_check_loc('ItemF', self.panel.major_axis[4], self.panel.minor_axis[3])
_check_loc('ItemF', 'foo', self.panel.minor_axis[3])
_check_loc('ItemE', 'foo', 'bar')
def test_delitem_pop(self):
del self.panel['ItemB']
assert_almost_equal(self.panel.items, ['ItemA', 'ItemC', 'ItemD'])
crackle = self.panel['ItemC']
pop = self.panel.pop('ItemC')
self.assertIs(pop, crackle)
assert_almost_equal(self.panel.items, ['ItemA', 'ItemD'])
self.assertRaises(KeyError, self.panel.__delitem__, 'ItemC')
def test_copy(self):
cop = self.panel.copy()
assert_sp_panel_equal(cop, self.panel)
def test_reindex(self):
def _compare_with_dense(swp, items, major, minor):
swp_re = swp.reindex(items=items, major=major,
minor=minor)
dwp_re = swp.to_dense().reindex(items=items, major=major,
minor=minor)
assert_panel_equal(swp_re.to_dense(), dwp_re)
_compare_with_dense(self.panel, self.panel.items[:2],
self.panel.major_axis[::2],
self.panel.minor_axis[::2])
_compare_with_dense(self.panel, None,
self.panel.major_axis[::2],
self.panel.minor_axis[::2])
self.assertRaises(ValueError, self.panel.reindex)
# TODO: do something about this later...
self.assertRaises(Exception, self.panel.reindex,
items=['item0', 'ItemA', 'ItemB'])
# test copying
cp = self.panel.reindex(self.panel.major_axis, copy=True)
cp['ItemA']['E'] = cp['ItemA']['A']
self.assertNotIn('E', self.panel['ItemA'])
def test_operators(self):
def _check_ops(panel):
def _dense_comp(op):
dense = panel.to_dense()
sparse_result = op(panel)
dense_result = op(dense)
assert_panel_equal(sparse_result.to_dense(), dense_result)
def _mixed_comp(op):
result = op(panel, panel.to_dense())
expected = op(panel.to_dense(), panel.to_dense())
assert_panel_equal(result, expected)
op1 = lambda x: x + 2
_dense_comp(op1)
op2 = lambda x: x.add(x.reindex(major=x.major_axis[::2]))
_dense_comp(op2)
op3 = lambda x: x.subtract(x.mean(0), axis=0)
_dense_comp(op3)
op4 = lambda x: x.subtract(x.mean(1), axis=1)
_dense_comp(op4)
op5 = lambda x: x.subtract(x.mean(2), axis=2)
_dense_comp(op5)
_mixed_comp(Panel.multiply)
_mixed_comp(Panel.subtract)
# TODO: this case not yet supported!
# op6 = lambda x: x.add(x.to_frame())
# _dense_comp(op6)
_check_ops(self.panel)
def test_major_xs(self):
def _dense_comp(sparse):
dense = sparse.to_dense()
for idx in sparse.major_axis:
dslice = dense.major_xs(idx)
sslice = sparse.major_xs(idx)
assert_frame_equal(dslice, sslice)
_dense_comp(self.panel)
def test_minor_xs(self):
def _dense_comp(sparse):
dense = sparse.to_dense()
for idx in sparse.minor_axis:
dslice = dense.minor_xs(idx)
sslice = sparse.minor_xs(idx).to_dense()
assert_frame_equal(dslice, sslice)
_dense_comp(self.panel)
if __name__ == '__main__':
import nose
nose.runmodule(argv=[__file__, '-vvs', '-x', '--pdb', '--pdb-failure'],
exit=False)
# nose.runmodule(argv=[__file__,'-vvs','-x','--pdb', '--pdb-failure',
# '--with-profile'],
# exit=False)
| dssg/wikienergy | disaggregator/build/pandas/pandas/sparse/tests/test_sparse.py | Python | mit | 60,675 |
# Copyright (C) 2011 Jason Anderson
#
#
# This file is part of PseudoTV.
#
# PseudoTV is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PseudoTV is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PseudoTV. If not, see <http://www.gnu.org/licenses/>.
import xbmc
import os, struct
from resources.lib.Globals import ascii
from resources.lib.FileAccess import FileAccess
class FLVTagHeader:
def __init__(self):
self.tagtype = 0
self.datasize = 0
self.timestamp = 0
self.timestampext = 0
def readHeader(self, thefile):
try:
data = struct.unpack('B', thefile.read(1))[0]
self.tagtype = (data & 0x1F)
self.datasize = struct.unpack('>H', thefile.read(2))[0]
data = struct.unpack('>B', thefile.read(1))[0]
self.datasize = (self.datasize << 8) | data
self.timestamp = struct.unpack('>H', thefile.read(2))[0]
data = struct.unpack('>B', thefile.read(1))[0]
self.timestamp = (self.timestamp << 8) | data
self.timestampext = struct.unpack('>B', thefile.read(1))[0]
except:
self.tagtype = 0
self.datasize = 0
self.timestamp = 0
self.timestampext = 0
class FLVParser:
def log(self, msg, level = xbmc.LOGDEBUG):
xbmc.log('FLVParser: ' + ascii(msg), level)
def determineLength(self, filename):
self.log("determineLength " + filename)
try:
self.File = FileAccess.open(filename, "rb", None)
except:
self.log("Unable to open the file")
return
if self.verifyFLV() == False:
self.log("Not a valid FLV")
self.File.close()
return 0
tagheader = self.findLastVideoTag()
if tagheader is None:
self.log("Unable to find a video tag")
self.File.close()
return 0
dur = self.getDurFromTag(tagheader)
self.File.close()
self.log("Duration: " + str(dur))
return dur
def verifyFLV(self):
data = self.File.read(3)
if data != 'FLV':
return False
return True
def findLastVideoTag(self):
try:
self.File.seek(0, 2)
curloc = self.File.tell()
except:
self.log("Exception seeking in findLastVideoTag")
return None
# Go through a limited amount of the file before quiting
maximum = curloc - (2 * 1024 * 1024)
if maximum < 0:
maximum = 8
while curloc > maximum:
try:
self.File.seek(-4, 1)
data = int(struct.unpack('>I', self.File.read(4))[0])
if data < 1:
self.log('Invalid packet data')
return None
if curloc - data <= 0:
self.log('No video packet found')
return None
self.File.seek(-4 - data, 1)
curloc = curloc - data
tag = FLVTagHeader()
tag.readHeader(self.File)
if tag.datasize <= 0:
self.log('Invalid packet header')
return None
if curloc - 8 <= 0:
self.log('No video packet found')
return None
self.File.seek(-8, 1)
self.log("detected tag type " + str(tag.tagtype))
curloc = self.File.tell()
if tag.tagtype == 9:
return tag
except:
self.log('Exception in findLastVideoTag')
return None
return None
def getDurFromTag(self, tag):
tottime = tag.timestamp | (tag.timestampext << 24)
tottime = int(tottime / 1000)
return tottime
| yolanother/script.pseudotv.live | resources/lib/parsers/FLVParser.py | Python | gpl-3.0 | 4,336 |
"""distutils.command.bdist_dumb
Implements the Distutils 'bdist_dumb' command (create a "dumb" built
distribution -- i.e., just an archive to be unpacked under $prefix or
$exec_prefix)."""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id: bdist_dumb.py,v 1.25 2004/11/10 22:23:14 loewis Exp $"
import os
from distutils.core import Command
from distutils.util import get_platform
from distutils.dir_util import create_tree, remove_tree, ensure_relative
from distutils.errors import *
from distutils import log
class bdist_dumb (Command):
description = "create a \"dumb\" built distribution"
user_options = [('bdist-dir=', 'd',
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('format=', 'f',
"archive format to create (tar, ztar, gztar, zip)"),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('relative', None,
"build the archive using relative paths"
"(default: false)"),
]
boolean_options = ['keep-temp', 'skip-build', 'relative']
default_format = { 'posix': 'gztar',
'nt': 'zip',
'os2': 'zip' }
def initialize_options (self):
self.bdist_dir = None
self.plat_name = None
self.format = None
self.keep_temp = 0
self.dist_dir = None
self.skip_build = 0
self.relative = 0
# initialize_options()
def finalize_options (self):
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'dumb')
if self.format is None:
try:
self.format = self.default_format[os.name]
except KeyError:
raise DistutilsPlatformError, \
("don't know how to create dumb built distributions " +
"on platform %s") % os.name
self.set_undefined_options('bdist',
('dist_dir', 'dist_dir'),
('plat_name', 'plat_name'))
# finalize_options()
def run (self):
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install', reinit_subcommands=1)
install.root = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = 0
log.info("installing to %s" % self.bdist_dir)
self.run_command('install')
# And make an archive relative to the root of the
# pseudo-installation tree.
archive_basename = "%s.%s" % (self.distribution.get_fullname(),
self.plat_name)
# OS/2 objects to any ":" characters in a filename (such as when
# a timestamp is used in a version) so change them to hyphens.
if os.name == "os2":
archive_basename = archive_basename.replace(":", "-")
pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
if not self.relative:
archive_root = self.bdist_dir
else:
if (self.distribution.has_ext_modules() and
(install.install_base != install.install_platbase)):
raise DistutilsPlatformError, \
("can't make a dumb built distribution where "
"base and platbase are different (%s, %s)"
% (repr(install.install_base),
repr(install.install_platbase)))
else:
archive_root = os.path.join(self.bdist_dir,
ensure_relative(install.install_base))
# Make the archive
self.make_archive(pseudoinstall_root,
self.format, root_dir=archive_root)
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
# run()
# class bdist_dumb
| trivoldus28/pulsarch-verilog | tools/local/bas-release/bas,3.9-SunOS-i386/lib/python/lib/python2.4/distutils/command/bdist_dumb.py | Python | gpl-2.0 | 4,572 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Nicolas Badoux <[email protected]>
#
# The licence is in the file __manifest__.py
#
##############################################################################
def migrate(cr, version):
if not version:
return
# Remove old view
cr.execute("""
DELETE FROM ir_ui_view WHERE arch_db LIKE '%ambassador_quote%'
AND model='res.partner'
""")
| ecino/compassion-switzerland | muskathlon/migrations/10.0.2.0.0/pre-migration.py | Python | agpl-3.0 | 623 |
#From https://gist.github.com/EndingCredits/b5f35e84df10d46cfa716178d9c862a3
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import hyperchamber as hc
import numpy as np
import inspect
from operator import itemgetter
from hypergan.train_hooks.base_train_hook import BaseTrainHook
class RollingMemoryTrainHook(BaseTrainHook):
"Keeps a rolling memory of the best scoring discriminator samples."
def __init__(self, gan=None, config=None, trainer=None, name="RollingMemoryTrainHook"):
super().__init__(config=config, gan=gan, trainer=trainer, name=name)
config = hc.Config(config)
s = self.gan.ops.shape(self.gan.generator.sample)
self.shape = s#[self.gan.batch_size() * (self.config.memory_size or 1), s[1], s[2], s[3]]
with tf.variable_scope((self.config.name or self.name), reuse=self.gan.reuse) as scope:
self.mx=tf.get_variable(self.gan.ops.generate_name()+"_dontsave", s, dtype=tf.float32,
initializer=tf.compat.v1.constant_initializer(-100), aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA, trainable=False)
self.mg=tf.get_variable(self.gan.ops.generate_name()+"_dontsave", s, dtype=tf.float32,
initializer=tf.compat.v1.constant_initializer(100), aggregation=tf.VariableAggregation.ONLY_FIRST_REPLICA, trainable=False)
self.m_discriminator = gan.create_component(gan.config.discriminator, name="discriminator", input=tf.concat([self.mx, self.mg],axis=0), features=[gan.features], reuse=True)
self.m_loss = gan.create_component(gan.config.loss, discriminator=self.m_discriminator)
swx = self.m_loss.d_real
swg = self.m_loss.d_fake
if self.config.reverse_mx:
swx = -swx
if self.config.reverse_mg:
swg = -swg
swx = tf.reshape(swx, [-1])
swg = tf.reshape(swg, [-1])
_, swx = tf.nn.top_k(swx, k=(self.config.top_k or 1), sorted=True, name=None)
_, swg = tf.nn.top_k(swg, k=(self.config.top_k or 1), sorted=True, name=None)
swx = tf.one_hot(swx, self.gan.batch_size(), dtype=tf.float32)
swg = tf.one_hot(swg, self.gan.batch_size(), dtype=tf.float32)
swx = tf.reduce_sum(swx, reduction_indices=0)
swg = tf.reduce_sum(swg, reduction_indices=0)
swx = tf.reshape(swx, [self.gan.batch_size(), 1, 1, 1])
swg = tf.reshape(swg, [self.gan.batch_size(), 1, 1, 1])
self.swx = swx
self.swg = swg
self.assign_mx = tf.assign(self.mx, self.gan.inputs.x * swx + (1.0 - swx) * self.mx)
self.assign_mg = tf.assign(self.mg, self.gan.generator.sample * swg + (1.0 - swg) * self.mg)
self.assign_ops = tf.group(*[self.assign_mx, self.assign_mg])
self.train_hook_index = len(trainer.train_hooks)
self.loss = [tf.zeros(1), tf.zeros(1)]
for _type in self.config.types or ['mx/mg']:
if _type == 'mg/g':
self.mg_discriminator = gan.create_component(gan.config.discriminator, name="discriminator", input=tf.concat([self.mg, self.gan.generator.sample],axis=0), features=[gan.features], reuse=True)
self.mg_loss = gan.create_component(gan.config.loss, discriminator=self.mg_discriminator)
self.gan.losses += [self.mg_loss]
self.loss[0] += (self.config.lam or 1.0) * self.mg_loss.sample[0]
self.loss[1] += (self.config.lam or 1.0) * self.mg_loss.sample[1]
self.gan.add_metric('roll_loss_mg/g', self.loss[0])
elif _type == 'mx/mg':
self.loss[0] += (self.config.lam or 1.0) * self.m_loss.sample[0]
self.loss[1] += (self.config.lam or 1.0) * self.m_loss.sample[1]
self.gan.add_metric('roll_loss_mx/mg', self.loss[0])
elif _type == 'mx/g':
self.mg_discriminator = gan.create_component(gan.config.discriminator, name="discriminator", input=tf.concat([self.mx, self.gan.generator.sample],axis=0), features=[gan.features], reuse=True)
self.mg_loss = gan.create_component(gan.config.loss, discriminator=self.mg_discriminator)
self.loss[0] += (self.config.lam or 1.0) * self.mg_loss.sample[0]
self.loss[1] += (self.config.lam or 1.0) * self.mg_loss.sample[1]
self.gan.add_metric('roll_loss_mx/g', self.loss[0])
elif _type == 'x/mg':
self.mg_discriminator = gan.create_component(gan.config.discriminator, name="discriminator", input=tf.concat([self.gan.inputs.x, self.mg],axis=0), features=[gan.features], reuse=True)
self.mg_loss = gan.create_component(gan.config.loss, discriminator=self.mg_discriminator)
self.loss[0] += (self.config.lam or 1.0) * self.mg_loss.sample[0]
self.loss[1] += (self.config.lam or 1.0) * self.mg_loss.sample[1]
self.gan.add_metric('roll_loss_x/mg', self.loss[0])
elif _type == 'x/mx':
self.mg_discriminator = gan.create_component(gan.config.discriminator, name="discriminator", input=tf.concat([self.mg, self.gan.generator.sample],axis=0), features=[gan.features], reuse=True)
self.mg_loss = gan.create_component(gan.config.loss, discriminator=self.mg_discriminator)
self.loss[0] += (self.config.lam or 1.0) * self.mg_loss.sample[0]
self.loss[1] += (self.config.lam or 1.0) * self.mg_loss.sample[1]
self.gan.add_metric('roll_loss_x/mx', self.loss[0])
def distributed_step(self, input_iterator_next):
def assign_mx(mx, inp, swx):
inp2 = self.gan.replica.inputs.x
op=self.gan.replica.trainer.train_hooks[self.train_hook_index].assign_mx
with tf.control_dependencies([op]):
return tf.no_op()
def assign_mg(mg, gen, swg):
op = self.gan.replica.trainer.train_hooks[self.train_hook_index].assign_mg
with tf.control_dependencies([op]):
return tf.no_op()
mxop = self.gan.distribution_strategy.extended.call_for_each_replica(assign_mx, args=(self.mx, input_iterator_next, self.swx,))
mgop = self.gan.distribution_strategy.extended.call_for_each_replica(assign_mg, args=(self.mg, self.gan.generator.sample, self.swg,))
return [mxop, mgop]
def distributed_debug(self):
mxop = self.gan.distribution_strategy.extended.read_var(self.mx)
mgop = self.gan.distribution_strategy.extended.read_var(self.mg)
return [mxop, mgop]
def distributed_initial_step(self, input_iterator_next):
def assign_mx(mx, inp):
return mx.assign(inp)
def assign_mg(mg, gen):
return mg.assign(gen)
mxop = self.gan.distribution_strategy.extended.call_for_each_replica(assign_mx, args=(self.mx,input_iterator_next,))
mgop = self.gan.distribution_strategy.extended.call_for_each_replica(assign_mg, args=(self.mg,self.gan.generator.sample,))
return [mxop, mgop]
def before_step(self, step, feed_dict):
if step == 0:
self.gan.session.run(tf.assign(self.mx, self.gan.inputs.x))
self.gan.session.run(tf.assign(self.mg, self.gan.generator.sample))
def after_step(self, step, feed_dict):
self.gan.session.run(self.assign_ops)
def variables(self):
return [self.mx, self.mg]
def losses(self):
return self.loss
| 255BITS/HyperGAN | hypergan/train_hooks/experimental/rolling_memory_train_hook.py | Python | mit | 7,133 |
# PyVision License
#
# Copyright (c) 2006-2009 David S. Bolme
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither name of copyright holders nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
'''
__author__ = "$Author$"
__version__ = "$Revision$"
# PIL Imports
import PIL.ImageDraw
import PIL.Image
from PIL.Image import BICUBIC, ANTIALIAS
import PIL.ImageFont as ImageFont
# Imaging imports
import numpy
import numpy as np
import cv
import cv2
import pyvision
import pyvision as pv
import cStringIO
import exif
import os
# iPython support for ipython notebook
try:
import pylab
import IPython
except:
pass # do nothing
TYPE_MATRIX_2D = "TYPE_MATRIX2D"
'''Image was created using a 2D "gray-scale" numpy array'''
TYPE_MATRIX_RGB = "TYPE_MATRIX_RGB"
'''Image was created using a 3D "color" numpy array'''
TYPE_PIL = "TYPE_PIL"
'''Image was created using a PIL image instance'''
TYPE_OPENCV = "TYPE_OPENCV"
'''Image was created using a OpenCV image instance'''
TYPE_OPENCV2 = "TYPE_OPENCV2"
'''Image was created using a OpenCV image instance'''
TYPE_OPENCV2BW = "TYPE_OPENCV2BW"
'''Image was created using a OpenCV image instance'''
LUMA = [0.299, 0.587, 0.114, 1.0]
'''Values used when converting color to gray-scale.'''
class Image:
'''
The primary purpose of the image class is to provide a structure that can
transform an image back and fourth for different python libraries such as
U{PIL<http://www.pythonware.com/products/pil>},
U{OpenCV <http://sourceforge.net/projects/opencvlibrary>}, and
U{Scipy<http://www.scipy.org">} Images. This class also
allows some simple operations on the image such as annotation.
B{Note:} When working with images in matrix format, they are transposed such
that x = col and y = row. You can therefore still work with coords
such that im[x,y] = mat[x,y]. #
Images have the following attributes:
- width = width of the image
- height = height of the image
- size = (width,height)
- channels = number of channels: 1(gray), 3(RGB)
- depth = bitdepth: 8(uchar), 32(float), 64(double)
'''
#------------------------------------------------------------------------
def __init__(self,data,bw_annotate=False):
'''
Create an image from a file or a PIL Image, OpenCV Image, or numpy array.
@param data: this can be a numpy array, PIL image, or opencv image.
@param bw_annotate: generate a black and white image to make color annotations show up better
@return: an Image object instance
'''
self.filename = None
self.pil = None
self.matrix2d = None
self.matrix3d = None
self.opencv = None
self.opencv2 = None
self.opencv2bw = None
self.annotated = None
self.bw_annotate = bw_annotate
# Convert floating point ipl images to numpy arrays
if isinstance(data,cv.iplimage) and data.nChannels == 3 and data.depth == 32:
w,h = cv.GetSize(data)
data = np.frombuffer(data.tostring(),dtype=np.float32)
data.shape = (h,w,3)
data = data.transpose((2,1,0))
data = data[::-1,:,:]
# Convert floating point ipl images to numpy arrays
if isinstance(data,cv.iplimage) and data.nChannels == 1 and data.depth == 32:
w,h = cv.GetSize(data)
data = np.frombuffer(data.tostring(),dtype=np.float32)
data.shape = (h,w)
data = data.transpose((2,1,0))
data = data[::-1,:,:]
# Numpy format
if isinstance(data,numpy.ndarray) and len(data.shape) == 2 and data.dtype != np.uint8:
self.type=TYPE_MATRIX_2D
self.matrix2d = data
self.width,self.height = self.matrix2d.shape
self.channels = 1
if self.matrix2d.dtype == numpy.float32:
self.depth=32
elif self.matrix2d.dtype == numpy.float64:
self.depth=64
else:
raise TypeError("Unsuppoted format for ndarray images: %s"%self.matrix2d.dtype)
# OpenCV2 gray scale format
elif isinstance(data,numpy.ndarray) and len(data.shape) == 2 and data.dtype == np.uint8:
self.type=TYPE_OPENCV2BW
self.opencv2bw = data
self.height,self.width = self.opencv2bw.shape
self.channels = 1
self.depth=8
# Numpy color format
elif isinstance(data,numpy.ndarray) and len(data.shape) == 3 and data.shape[0]==3 and data.dtype != np.uint8:
self.type=TYPE_MATRIX_RGB
self.matrix3d = data
self.channels=3
self.width = self.matrix3d.shape[1]
self.height = self.matrix3d.shape[2]
# set the types
if self.matrix3d.dtype == numpy.float32:
self.depth=32
elif self.matrix3d.dtype == numpy.float64:
self.depth=64
else:
raise TypeError("Unsuppoted format for ndarray images: %s"%self.matrix2d.dtype)
# OpenCV2 color format
elif isinstance(data,numpy.ndarray) and len(data.shape) == 3 and data.shape[2]==3 and data.dtype == np.uint8:
self.type=TYPE_OPENCV2
self.opencv2 = data
self.channels=3
self.width = self.opencv2.shape[1]
self.height = self.opencv2.shape[0]
self.depth=8
# Load as a pil image
elif isinstance(data,PIL.Image.Image) or type(data) == str:
if type(data) == str:
# Assume this is a filename
# TODO: Removing the filename causes errors in other unittest.
# Those errors should be corrected.
self.filename = data
data = PIL.Image.open(data)
self.type=TYPE_PIL
self.pil = data
self.width,self.height = self.pil.size
if self.pil.mode == 'L':
self.channels = 1
elif self.pil.mode == 'RGB':
self.channels = 3
#elif self.pil.mode == 'RGBA':
#
# self.pil = self.pil.convert('RGB')
# self.channels = 3
else:
self.pil.convert('RGB')
self.channels = 3
# raise TypeError("Unsuppoted format for PIL images: %s"%self.pil.mode)
self.depth = 8
# opencv format
elif isinstance(data,cv.iplimage):
self.type=TYPE_OPENCV
self.opencv=data
self.width = data.width
self.height = data.height
assert data.nChannels in (1,3)
self.channels = data.nChannels
assert data.depth == 8
self.depth = data.depth
# unknown type
else:
raise TypeError("Could not create from type: %s %s"%(data,type(data)))
self.size = (self.width,self.height)
self.data = data
def asBW(self):
'''
@return: a gray-scale version of this pyvision image
'''
if self.matrix2d is None:
self._generateMatrix2D()
return Image(self.matrix2d)
def asMatrix2D(self):
'''
@return: the gray-scale image data as a two dimensional numpy array
'''
if self.matrix2d is None:
self._generateMatrix2D()
return self.matrix2d
def asMatrix3D(self):
'''
@return: color image data as a 3D array with shape (3(rgb),w,h)
'''
if self.matrix3d is None:
self._generateMatrix3D()
return self.matrix3d
def asPIL(self):
'''
@return: image data as a pil image
'''
if self.pil is None:
self._generatePIL()
return self.pil
def asOpenCV(self):
'''
@return: the image data in an OpenCV format
'''
if self.opencv is None:
self._generateOpenCV()
return self.opencv
def asOpenCV2(self):
'''
@return: the image data in an OpenCV format that is a numpy array of shape (h,w,3) of uint8
'''
if self.opencv2 is None:
self._generateOpenCV2()
return self.opencv2
def asOpenCV2BW(self):
'''
@return: the image data in an OpenCV format that is a numpy array of shape (h,w,1) of uint8
'''
if self.opencv2bw is None:
self._generateOpenCV2BW()
return self.opencv2bw
def asOpenCVBW(self):
'''
@return: the image data in an OpenCV one channel format
'''
cvim = self.asOpenCV()
if cvim.nChannels == 1:
return cvim
elif cvim.nChannels == 3:
cvimbw = cv.CreateImage(cv.GetSize(cvim), cv.IPL_DEPTH_8U, 1);
cv.CvtColor(cvim, cvimbw, cv.CV_BGR2GRAY);
return cvimbw
else:
raise ValueError("Unsupported opencv image format: nChannels=%d"%cvim.nChannels)
def asThermal(self,clip_negative=False):
'''
@returns: a thermal colored representation of this image.
'''
w,h = self.size
mat = self.asMatrix2D()
if clip_negative:
mat = mat*(mat > 0.0)
# Rescale 0.0 to 1.0
mat = mat - mat.min()
mat = mat / mat.max()
therm = np.zeros((3,w,h),dtype=np.float)
# Black to blue
mask = mat <= 0.1
therm[2,:,:] += mask*(0.5 + 0.5*mat/0.1)
# blue to yellow
mask = (mat > 0.10) & (mat <= 0.4)
tmp = (mat - 0.10) / 0.30
therm[2,:,:] += mask*(1.0-tmp)
therm[1,:,:] += mask*tmp
therm[0,:,:] += mask*tmp
# yellow to orange
mask = (mat > 0.4) & (mat <= 0.7)
tmp = (mat - 0.4) / 0.3
therm[2,:,:] += mask*0
therm[1,:,:] += mask*(1-0.5*tmp)
therm[0,:,:] += mask*1
# the orange to red
mask = (mat > 0.7)
tmp = (mat - 0.7) / 0.3
therm[2,:,:] += mask*0
therm[1,:,:] += mask*(0.5-0.5*tmp)
therm[0,:,:] += mask*1
return pv.Image(therm)
def asAnnotated(self, as_type="PIL"):
'''
@param as_type: Specify either "PIL" or "PV". If
"PIL" (default) then the return type is a PIL image.
If "PV", then the return type is a pyvision image,
where the annotations have been 'flattened' onto
the original source image.
@return: the PIL image used for annotation.
'''
if self.annotated is None:
if self.bw_annotate:
# Make a black and white image that can be annotated with color.
self.annotated = self.asPIL().convert("L").copy().convert("RGB")
else:
# Annotate over color if available.
self.annotated = self.asPIL().copy().convert("RGB")
if as_type.upper() == "PV":
return pv.Image(self.annotated)
else:
return self.annotated
def asHSV(self):
'''
@return: an OpenCV HSV encoded image
'''
cvim = self.asOpenCV()
dst = cv.CreateImage(cv.GetSize(cvim), cv.IPL_DEPTH_8U, 3)
cv.CvtColor(cvim, dst, cv.CV_BGR2HSV)
return dst
def asLAB(self):
'''
@return: an OpenCV LAB encoded image
'''
cvim = self.asOpenCV()
dst = cv.CreateImage(cv.GetSize(cvim), cv.IPL_DEPTH_8U, 3)
cv.CvtColor(cvim, dst, cv.CV_BGR2Lab)
return dst
def getExif(self,output='simple'):
'''
This function returns the exif headers for an image. This only works
for images that have been read from disk.
@param output: select 'simple' or 'full'. 'full' output contains additional metadata.
@returns: a dictionary of EXIF data.
'''
if self.type == TYPE_PIL and self.filename is not None:
result = {}
info = self.pil._getexif()
if info is None:
return None
# iterate through exif tags
for key,value in info.iteritems():
tag = "ukn_%s"%key
# translate tags to text
if exif.EXIF_TAGS.has_key(key):
tag = exif.EXIF_TAGS[key][0]
datatype = exif.EXIF_TAGS[key][1]
category = exif.EXIF_TAGS[key][2]
description = exif.EXIF_TAGS[key][3]
# convert to floats
if isinstance(value,tuple) and len(value) == 2 and value[1] > 0:
value = float(value[0])/float(value[1])
if output == 'simple':
result[tag] = value
else:
result[tag] = (value,key,datatype,category,description)
return result
else:
return None
def annotateRect(self,rect,color='red', fill_color=None, alpha=1.0):
'''
Draws a rectangle on the annotation image
@param rect: a rectangle of type Rect
@param color: defined as ('#rrggbb' or 'name')
@param fill_color: defined as per color, but indicates the color
used to fill the rectangle. Specify None for no fill.
@param alpha: Ignored if no fill. Otherwise, this value controls
how opaque the fill is. Specify 1.0 (default) for a fully opaque
fill, or 0.0 for fully transparent. A value of 0.3, for example,
would show a partially transparent filled rectangle over
the background image.
'''
im = self.asAnnotated()
box = rect.box()
offset = (box[0],box[1])
#this supports filling a rectangle that is semi-transparent
if fill_color:
(r,g,b) = PIL.ImageColor.getrgb(fill_color)
rect_img = PIL.Image.new('RGBA', (int(rect.w),int(rect.h)), (r,g,b,int(alpha*255)))
im.paste(rect_img,offset,mask=rect_img) #use 'paste' method to support transparency
#just draws the rect outline in the outline color
draw = PIL.ImageDraw.Draw(im)
draw.rectangle(box,outline=color,fill=None)
del draw
def annotateImage(self,im,rect,color='red', fill_color=None):
'''
Draws an image
@param im: the image to render
@param rect: a rectangle of type Rect
@param color: defined as ('#rrggbb' or 'name')
@param fill_color: defined as per color, but indicates the color
used to fill the rectangle. Specify None for no fill.
'''
# Reduce the size of the image
thumb = im.thumbnail((rect.w,rect.h))
x = int(rect.x + rect.w/2 - thumb.size[0]/2)
y = int(rect.y + rect.h/2 - thumb.size[1]/2)
# Get the annotated image buffer
pil = self.asAnnotated()
# Draw a rect
draw = PIL.ImageDraw.Draw(pil)
box = [rect.x,rect.y,rect.x+rect.w,rect.y+rect.h]
draw.rectangle(box,outline=None,fill=fill_color)
del draw
# Paste the image
pil.paste(im.asPIL(),(x,y))
# Draw a rect over the top
draw = PIL.ImageDraw.Draw(pil)
box = [rect.x,rect.y,rect.x+rect.w,rect.y+rect.h]
draw.rectangle(box,outline=color,fill=None)
del draw
def annotateThickRect(self,rect,color='red',width=5):
'''
Draws a rectangle on the annotation image
@param rect: a rectangle of type Rect
@param color: defined as ('#rrggbb' or 'name')
'''
# get the image buffer
im = self.asAnnotated()
draw = PIL.ImageDraw.Draw(im)
x,y,w,h = [rect.x,rect.y,rect.w,rect.h]
# Draw individual lines
line = [x,y,x+w,y]
draw.line(line,fill=color,width=width)
line = [x,y,x,y+h]
draw.line(line,fill=color,width=width)
line = [x,y+h,x+w,y+h]
draw.line(line,fill=color,width=width)
line = [x+w,y,x+w,y+h]
draw.line(line,fill=color,width=width)
del draw
def annotateEllipse(self,rect,color='red'):
'''
Draws an ellipse on the annotation image
@param rect: the bounding box of the elipse of type Rect
@param color: defined as ('#rrggbb' or 'name')
'''
im = self.asAnnotated()
draw = PIL.ImageDraw.Draw(im)
box = [rect.x,rect.y,rect.x+rect.w,rect.y+rect.h]
draw.ellipse(box,outline=color)
del draw
def annotateLine(self,point1,point2,color='red',width=1):
'''
Draws a line from point1 to point2 on the annotation image
@param point1: the starting point as type Point
@param point2: the ending point as type Point
@param color: defined as ('#rrggbb' or 'name')
'''
im = self.asAnnotated()
draw = PIL.ImageDraw.Draw(im)
line = [point1.X(),point1.Y(),point2.X(),point2.Y()]
draw.line(line,fill=color,width=width)
del draw
def annotateLines(self,points,color='red',width=1):
'''
Draws a line from point1 to point2 on the annotation image
@param point1: the starting point as type Point
@param point2: the ending point as type Point
@param color: defined as ('#rrggbb' or 'name')
'''
n = len(points)-1
for i in range(n):
self.annotateLine(points[i],points[i+1],color=color,width=width)
def annotateMask(self,mask,color='red'):
'''
Shades the contents of a mask.
@param mask: a numpy array showing the mask.
@param color: defined as ('#rrggbb' or 'name')
'''
im = self.asAnnotated()
draw = PIL.ImageDraw.Draw(im)
pil = pv.Image(1.0*mask).asPIL()
pil = pil.convert('1')
draw.bitmap((0,0), pil, fill=color)
del draw
def annotatePolygon(self,points,color='red',width=1,fill=None):
'''
Draws a line from point1 to point2 on the annotation image
@param points: a list of pv points to be plotted
@param color: defined as ('#rrggbb' or 'name')
@param width: the line width
'''
# Fill the center
if fill is not None:
im = self.asAnnotated()
draw = PIL.ImageDraw.Draw(im)
poly = [(point.X(),point.Y()) for point in points]
draw.polygon(poly,outline=None,fill=fill)
del draw
# Draw lines
if color is not None:
n = len(points)
for i in range(n):
j = (i+1)%n
self.annotateLine(points[i],points[j],color=color,width=width)
def annotatePoint(self,point,color='red'):
'''
Marks a point in the annotation image using a small circle
@param point: the point to mark as type Point
@param color: defined as ('#rrggbb' or 'name')
'''
im = self.asAnnotated()
draw = PIL.ImageDraw.Draw(im)
box = [point.X()-3,point.Y()-3,point.X()+3,point.Y()+3]
draw.ellipse(box,outline=color)
del draw
def annotatePoints(self,points,color='red'):
'''
Marks a point in the annotation image using a small circle
@param point: the point to mark as type Point
@param color: defined as ('#rrggbb' or 'name')
'''
im = self.asAnnotated()
draw = PIL.ImageDraw.Draw(im)
for point in points:
box = [point.X()-3,point.Y()-3,point.X()+3,point.Y()+3]
draw.ellipse(box,outline=color)
del draw
def annotateCircle(self,point, radius=3, color='red',fill=None):
'''
Marks a circle in the annotation image
@param point: the center of the circle as type Point
@param radius: the radius of the circle
@param color: defined as ('#rrggbb' or 'name')
'''
im = self.asAnnotated()
draw = PIL.ImageDraw.Draw(im)
box = [point.X()-radius,point.Y()-radius,point.X()+radius,point.Y()+radius]
draw.ellipse(box,outline=color,fill=fill)
del draw
def annotateArc(self,point, radius=3, startangle=0, endangle=360, color='red'):
'''
Draws a circular arc on the image.
@param point: the center of the circle as type Point
@param radius: the radius of the circle
@param startangle: the starting angle of the arc segment to be drawn, in degrees
@param endangle: the ending angle in degrees. Arc will be drawn clockwise from
starting angle to ending angle.
@param color: defined as ('#rrggbb' or 'name')
'''
im = self.asAnnotated()
draw = PIL.ImageDraw.Draw(im)
box = [int(point.X()-radius),int(point.Y()-radius),
int(point.X()+radius),int(point.Y()+radius)]
draw.arc(box, int(startangle), int(endangle), fill=color)
del draw
def annotateLabel(self,point,label,color='red',mark=False, font=None, background=None):
'''
Marks a point in the image with text
@param point: the point to mark as type Point
@param label: the text to use as a string
@param color: defined as ('#rrggbb' or 'name')
@param mark: of True or ['right', 'left', 'below', or 'above','centered'] then also mark the point with a small circle
@param font: An optional PIL.ImageFont font object to use. Alternatively, specify an integer and the label
will use Arial font of that size. If None, then the default is used.
@param background: An optional color that will be used to draw a rectangular background underneath the text.
'''
# Get the image buffer
im = self.asAnnotated()
draw = PIL.ImageDraw.Draw(im)
# Load the font
if font is None:
font = ImageFont.load_default()
elif isinstance(font,int):
font = ImageFont.truetype(pv.FONT_ARIAL, font)
# Compute the size
tw,th = draw.textsize(label, font=font)
# Select the position relative to the point
if mark in [True, 'right']:
textpt = pv.Point(point.X()+5,point.Y()-th/2)
box = [point.X()-3,point.Y()-3,point.X()+3,point.Y()+3]
elif mark in ['left']:
textpt = pv.Point(point.X()-tw-5,point.Y()-th/2)
box = [point.X()-3,point.Y()-3,point.X()+3,point.Y()+3]
elif mark in ['below']: #
textpt = pv.Point(point.X()-tw/2,point.Y()+5)
box = [point.X()-3,point.Y()-3,point.X()+3,point.Y()+3]
elif mark in ['above']:
textpt = pv.Point(point.X()-tw/2,point.Y()-th-5)
box = [point.X()-3,point.Y()-3,point.X()+3,point.Y()+3]
elif mark in ['centered']:
textpt = pv.Point(point.X()-tw/2,point.Y()-th/2)
else:
textpt = point
# Fill in the background
if background is not None:
point2 = pv.Point( textpt.x + tw, textpt.y+th)
draw.rectangle([textpt.asTuple(), point2.asTuple()], fill=background)
# Render the text
draw.text([textpt.x,textpt.y],label,fill=color, font=font)
if mark not in [False,None,'centered']:
draw.ellipse(box,outline=color)
del draw
def annotateDot(self,point,color='red'):
'''
Like L{annotatePoint} but only draws a point on the given pixel.
This is useful to avoid clutter if many points are being annotated.
@param point: the point to mark as type Point
@param color: defined as ('#rrggbb' or 'name')
'''
im = self.asAnnotated()
draw = PIL.ImageDraw.Draw(im)
draw.point([point.X(),point.Y()],fill=color)
del draw
def valueNormalize(self):
'''TODO: Deprecated remove this sometime.'''
print "WARNING: Image.valueNormalize has been deprecated."
return self.normalize()
def getType(self):
'''Return the type of the image.'''
return self.type
def normalize(self):
''' Equalize and normalize the image. '''
import PIL.ImageOps
# Create a copy
pil = self.asPIL().copy()
# Equalize
pil = PIL.ImageOps.equalize(pil.convert('L'))
self.pil = pil
self.matrix2d = None
# Normalize
mat = self.asMatrix2D()
mean = mat.mean()
std = mat.std()
mat -= mean
mat /= std
self.matrix2d=mat
def equalize(self, bw=True):
''' Equalize the image '''
import PIL.ImageOps
pil = self.asPIL().copy()
if bw:
pil = PIL.ImageOps.equalize(pil.convert('L'))
else:
pil = PIL.ImageOps.equalize(pil)
return pv.Image(pil)
def _generateMatrix2D(self):
'''
Create a matrix version of the image.
'''
data_buffer = self.toBufferGray(32)
self.matrix2d = numpy.frombuffer(data_buffer,numpy.float32).reshape(self.height,self.width).transpose()
def _generateMatrix3D(self):
'''
Create a matrix version of the image.
'''
data_buffer = self.toBufferRGB(32)
self.matrix3d = numpy.frombuffer(data_buffer,numpy.float32).reshape(self.height,self.width,3).transpose()
def _generatePIL(self):
'''
Create a PIL version of the image
'''
if self.channels == 1:
try:
# PILLOW
self.pil = PIL.Image.frombytes("L",self.size,self.toBufferGray(8))
except:
# PIL
self.pil = PIL.Image.fromstring("L",self.size,self.toBufferGray(8))
elif self.channels == 3:
try:
self.pil = PIL.Image.frombytes("RGB",self.size,self.toBufferRGB(8))
except:
self.pil = PIL.Image.fromstring("RGB",self.size,self.toBufferRGB(8))
else:
raise NotImplementedError("Cannot convert image from type: %s"%self.type)
def _generateOpenCV(self):
'''
Create a color opencv representation of the image.
TODO: The OpenCV databuffer seems to be automatically swapped from RGB to BGR. This is counter intuitive.
'''
w,h = self.size
# generate a grayscale opencv image
if self.channels == 1:
gray = cv.CreateImage((w,h),cv.IPL_DEPTH_8U,1)
cv.SetData(gray,self.toBufferGray(8))
self.opencv = gray
# Generate a color opencv image
elif self.channels == 3:
rgb = cv.CreateImage((w,h),cv.IPL_DEPTH_8U,3)
bgr = cv.CreateImage((w,h),cv.IPL_DEPTH_8U,3)
cv.SetData(rgb, self.toBufferRGB(8))
# convert from RGB to BGR
cv.CvtColor(rgb,bgr,cv.CV_RGB2BGR)
self.opencv=bgr
else:
raise NotImplementedError("Cannot convert image from type: %s"%self.type)
def _generateOpenCV2(self):
'''
Create a matrix version of the image compatible with OpenCV 2 (cv2) in BGR format.
'''
data_buffer = self.toBufferRGB(8)
self.opencv2 = cv2.cvtColor(numpy.frombuffer(data_buffer,numpy.uint8).reshape(self.height,self.width,3),cv2.COLOR_RGB2BGR)
def _generateOpenCV2BW(self):
'''
Create a matrix version of the image compatible with OpenCV 2 (cv2) in BGR format.
'''
data_buffer = self.toBufferGray(8)
self.opencv2bw = numpy.frombuffer(data_buffer,numpy.uint8).reshape(self.height,self.width)
def toBufferGray(self,depth):
'''
@param depth: Use 8, 32, or 64, to specify the bit depth of the pixels.
@return: the image data as a binary python string.
'''
image_buffer = None
if self.type == TYPE_PIL:
# Convert to gray and then get buffer
pil = self.pil
if pil.mode != 'L':
pil = pil.convert('L')
try:
# PILLOW
image_buffer = pil.tobytes()
except:
# PIL
image_buffer = pil.tostring()
elif self.type == TYPE_MATRIX_2D:
# Just get the buffer
image_buffer = self.matrix2d.transpose().tostring()
elif self.type == TYPE_OPENCV2BW:
# Just get the buffer
image_buffer = self.opencv2bw.tostring()
elif self.type == TYPE_OPENCV2:
# Convert to gray then get buffer
tmp = cv2.cvtColor(self.opencv2, cv2.cv.CV_BGR2GRAY)
image_buffer = tmp.tostring()
elif self.type == TYPE_MATRIX_RGB:
# Convert to gray
mat = self.matrix3d
mat = LUMA[0]*mat[0] + LUMA[1]*mat[1] + LUMA[2]*mat[2]
image_buffer = mat.transpose().tostring()
elif self.type == TYPE_OPENCV:
if self.channels == 1:
# Just get buffer
image_buffer = self.opencv.tostring()
elif self.channels == 3:
# Convert to gray
w,h = self.width,self.height
gray = cv.CreateImage((w,h),cv.IPL_DEPTH_8U,1)
cv.CvtColor( self.opencv, gray, cv.CV_BGR2GRAY );
image_buffer = gray.tostring()
else:
raise TypeError("Operation not supported for image type.")
else:
raise TypeError("Operation not supported for image type.")
# Buffer should now be created
assert image_buffer
# Make sure the depth is correct
if depth == self.depth:
return image_buffer
else:
types = {8:numpy.uint8,32:numpy.float32,64:numpy.float64}
# convert the image_buffer to data
data = numpy.frombuffer(image_buffer,types[self.depth])
if depth==8:
# Make sure the data is in a valid range
max_value = data.max()
min_value = data.min()
data_range = max_value - min_value
if max_value <= 255 and min_value >= 0 and data_range >= 150:
# assume the values are already in a good range for the
# 8 bit image
pass
else:
# Rescale the values from 0 to 255
if max_value == min_value:
max_value = min_value+1
data = (255.0/(max_value-min_value))*(data-min_value)
data = data.astype(types[depth])
return data.tostring()
def toBufferRGB(self,depth):
'''
returns the image data as a binary python string.
'''
image_buffer = None
if self.type == TYPE_PIL:
# Convert to rgb then get buffer
pil = self.pil
if pil.mode != 'RGB':
pil = pil.convert('RGB')
try:
# PILLOW
image_buffer = pil.tobytes()
except:
# PIL
image_buffer = pil.tostring()
elif self.type == TYPE_MATRIX_2D:
# Convert to color
mat = self.matrix2d.transpose()
tmp = np.zeros((3,self.height,self.width),numpy.float32)
tmp[0,:] = mat
tmp[1,:] = mat
tmp[2,:] = mat
image_buffer = mat.tostring()
elif self.type == TYPE_OPENCV2BW:
# Convert to color
tmp = cv2.cvtColor(self.opencv2bw, cv2.cv.CV_GRAY2RGB)
image_buffer = tmp.tostring()
elif self.type == TYPE_OPENCV2:
# Convert BGR to RGB
tmp = cv2.cvtColor(self.opencv2, cv2.cv.CV_BGR2RGB)
image_buffer = tmp.tostring()
elif self.type == TYPE_MATRIX_RGB:
# Just get buffer
mat = self.matrix3d.transpose()
image_buffer = mat.tostring()
elif self.type == TYPE_OPENCV:
# Convert color BGR to RGB
w,h = self.width,self.height
if self.channels == 3:
rgb = cv.CreateImage((w,h),cv.IPL_DEPTH_8U,3)
cv.CvtColor( self.opencv, rgb, cv.CV_BGR2RGB );
image_buffer = rgb.tostring()
elif self.channels == 1:
rgb = cv.CreateImage((w,h),cv.IPL_DEPTH_8U,3)
cv.CvtColor( self.opencv, rgb, cv.CV_GRAY2RGB );
image_buffer = rgb.tostring()
else:
# Handle type errors
raise TypeError("Operation not supported for image type.")
else:
# Handle unsupported
raise TypeError("Operation not supported for image type.")
assert image_buffer
# Correct depth issues
if depth == self.depth:
return image_buffer
else:
types = {8:numpy.uint8,32:numpy.float32,64:numpy.float64}
# convert the image_buffer to data
data = numpy.frombuffer(image_buffer,types[self.depth])
if depth==8:
# Make sure the data is in a valid range
max_value = data.max()
min_value = data.min()
data_range = max_value - min_value
if max_value <= 255 and min_value >= 0 and data_range >= 50:
# assume the values are already in a good range for the
# 8 bit image
pass
else:
# Rescale the values from 0 to 255
if max_value == min_value:
max_value = min_value+1
data = (255.0/(max_value-min_value))*(data-min_value)
data = data.astype(types[depth])
return data.tostring()
def toBufferRGBA(self,depth):
'''
returns the image data as a binary python string.
TODO: Not yet implemented
'''
def thumbnail(self, newSize):
''' Returns a resized version of the image that fits in new_size but preserves the aspect ratio.
@param newSize: tuple (new_width, new_height)
@returns: a new pyvision image that is the resized version of this image.
'''
w,h = self.size
s1 = float(newSize[0])/w
s2 = float(newSize[1])/h
s = min(s1,s2)
return self.scale(s)
def resize(self, newSize, **kwargs):
''' Returns a resized version of the image. This is a convenience function.
For more control, look at the Affine class for arbitrary transformations.
@param newSize: tuple (new_width, new_height)
@returns: a new pyvision image that is the resized version of this image.
'''
tmp = self.asPIL()
if newSize[0] < self.size[0] or newSize[1] < self.size[1]:
#because at least one dimension is being shrinked, we need to use ANTIALIAS filter
tmp = tmp.resize(newSize, ANTIALIAS)
else:
#use bicubic interpolation
tmp = tmp.resize(newSize, BICUBIC)
return pyvision.Image(tmp,**kwargs)
def scale(self, scale):
''' Returns a scaled version of the image. This is a convenience function.
For more control, look at the Affine class for arbitrary transformations.
@param scale: a float indicating the scale factor
@returns: a new pyvision image that is the scaled version of this image.
'''
assert scale > 0.0
w,h = self.size
new_size = (int(round(scale*w)),int(round(scale*h)))
return self.resize(new_size)
def copy(self):
'''
Returns a new pv.Image which is a copy of (only) the current image.
Other internal data stored by the current pv.Image will NOT be copied.
This method uses cv.CloneImage so that the underlying image data will be
disconnected from the original data. (Deep copy)
'''
imgdat = self.asOpenCV()
imgdat2 = cv.CloneImage(imgdat)
return pv.Image(imgdat2)
def crop(self, rect, size=None, interpolation=None, return_affine=False):
'''
Crops an image to the given rectangle. Rectangle parameters are rounded to nearest
integer values. High quality resampling. The default behavior is to use cv.GetSubRect
to crop the image. This returns a slice the OpenCV image so modifying the resulting
image data will also modify the data in this image. If a size is provide a new OpenCV
image is created for that size and cv.Resize is used to copy the image data. If the
bounds of the rectangle are outside the image, an affine transform (pv.AffineFromRect)
is used to produce the croped image to properly handle regions outside the image.
In this case the downsampling quality may not be as good. #
@param rect: a Rectangle defining the region to be cropped.
@param size: a new size for the returned image. If None the result is not resized.
@param interpolation: None = Autoselect or one of CV_INTER_AREA, CV_INTER_NN, CV_INTER_LINEAR, CV_INTER_BICUBIC
@param return_affine: If True, also return an affine transform that can be used to transform points.
@returns: a cropped version of the image or if return affine a tuple of (image,affine)
@rtype: pv.Image
'''
# Notes: pv.Rect(0,0,w,h) should return the entire image. Since pixel values
# are indexed by zero this means that upper limits are not inclusive: x from [0,w)
# and y from [0,h)
x,y,w,h = rect.asTuple()
x = int(np.round(x))
y = int(np.round(y))
w = int(np.round(w))
h = int(np.round(h))
# Check the bounds for cropping
if x < 0 or y < 0 or x+w > self.size[0] or y+h > self.size[1]:
if size is None:
size = (w,h)
affine = pv.AffineFromRect(pv.Rect(x,y,w,h),size)
im = affine(self)
if return_affine:
return im,affine
else:
return im
# Get the image as opencv
cvim = self.asOpenCV()
# Set up ROI
subim = cv.GetSubRect(cvim,(x,y,w,h))
affine = pv.AffineTranslate(-x,-y,(w,h))
if size is None:
size = (w,h)
# Copy to new image
new_image = cv.CreateImage(size,cvim.depth,cvim.nChannels)
if interpolation is None:
if size[0] < w or size[1] < y:
# Downsampling so use area interpolation
interpolation = cv.CV_INTER_AREA
else:
# Upsampling so use linear
interpolation = cv.CV_INTER_CUBIC
# Resize to the correct size
cv.Resize(subim,new_image,interpolation)
affine = pv.AffineNonUniformScale(float(size[0])/w,float(size[1])/h,size)*affine
# Return the result as a pv.Image
if return_affine:
return pv.Image(new_image),affine
else:
return pv.Image(new_image)
def save(self,filename,annotations=False):
'''
Save the image to a file. This is performed by converting to PIL and
then saving to a file based on on the extension.
'''
if filename[-4:] == ".raw":
# TODO: save as a matrix
raise NotImplementedError("Cannot save as a matrix")
#elif filename[-4:] == ".mat":
# TODO: save as a matlab file
# raise NotImplementedError("Cannot save in matlab format")
else:
if annotations:
self.asAnnotated().save(filename)
else:
self.asPIL().save(filename)
def show(self, window=None, pos=None, delay=0, size=None):
'''
Displays the annotated version of the image using OpenCV highgui
@param window: the name of the highgui window to use, if one already exists by this name,
or it will create a new highgui window with this name.
@param pos: if a new window is being created, the (x,y) coordinate for the new window
@param delay: A delay in milliseconds to wait for keyboard input (passed to cv.WaitKey).
0 delays indefinitely, 30 is good for presenting a series of images like a video.
For performance reasons, namely when using the same window to display successive
frames of video, we don't want to tear-down and re-create the window each time.
Thus the window frame will persist beyond the scope of the call to img.show(). The window
will disappear after the program exits, or it can be destroyed with a call to cv.DestroyWindow.
@param size: Optional output size for image, None=native size.
@returns: the return value of the cv.WaitKey call.
'''
if window==None and pv.runningInNotebook() and 'pylab' in globals().keys():
# If running in notebook, then try to display the image inline.
if size is None:
size = self.size
# Constrain the size of the output
max_dim = max(size[0],size[1])
if max_dim > 800:
scale = 800.0/max_dim
size = (int(scale*size[0]),int(scale*size[1]))
w,h = size
# TODO: Cant quite figure out how figsize works and how to set it to native pixels
#pylab.figure()
IPython.core.pylabtools.figsize(1.25*w/72.0,1.25*h/72.0) #@UndefinedVariable
pylab.figure()
pylab.imshow(self.asAnnotated(),origin='upper',aspect='auto')
else:
# Otherwise, use an opencv window
if window is None:
window = "PyVisionImage"
# Create the window
cv.NamedWindow(window)
# Set the location
if pos is not None:
cv.MoveWindow(window, pos[0], pos[1])
# Resize the image.
if size is not None:
x = pyvision.Image(self.asAnnotated().resize(size) )
else:
x = pyvision.Image(self.asAnnotated())
# Display the result
cv.ShowImage(window, x.asOpenCV() )
key = cv.WaitKey(delay=delay)
del x
return key
def __repr__(self):
return "pv.Image(w=%d,h=%d,c=%d,type=%s)"%(self.width,self.height,self.channels,self.type)
def OpenCVToNumpy(cvmat):
'''
Convert an OpenCV matrix to a numpy matrix.
Based on code from: http://opencv.willowgarage.com/wiki/PythonInterface
'''
depth2dtype = {
cv.CV_8U: 'uint8',
cv.CV_8S: 'int8',
cv.CV_16U: 'uint16',
cv.CV_16S: 'int16',
cv.CV_32S: 'int32',
cv.CV_32F: 'float32',
cv.CV_64F: 'float64',
}
# Check the size and channels
assert cvmat.channels == 1
r = cvmat.rows
c = cvmat.cols
# Convert to numpy
a = np.fromstring(
cvmat.tostring(),
dtype=depth2dtype[cvmat.type],
count=r*c)
a.shape = (r,c)
return a
def NumpyToOpenCV(a):
'''
Convert a numpy matrix to an OpenCV matrix.
Based on code from: http://opencv.willowgarage.com/wiki/PythonInterface
'''
dtype2depth = {
'uint8': cv.CV_8U,
'int8': cv.CV_8S,
'uint16': cv.CV_16U,
'int16': cv.CV_16S,
'int32': cv.CV_32S,
'float32': cv.CV_32F,
'float64': cv.CV_64F,
}
# Check the size
assert len(a.shape) == 2
r,c = a.shape
# Convert to opencv
cv_im = cv.CreateMat(r,c,dtype2depth[str(a.dtype)])
cv.SetData(cv_im, a.tostring())
return cv_im
| mikeseven/pyvision | src/pyvision/types/img.py | Python | bsd-3-clause | 46,803 |
from django.contrib.auth.models import User, check_password
class EmailAuthBackend(object):
"""Email Authentication Backend
Allows users to sign in using email/password pair.
"""
supports_anonymous_user = False
supports_object_permissions = False
def authenticate(self, username=None, password=None):
try:
user = User.objects.get(email=username)
if user.check_password(password):
return user
except User.DoesNotExist:
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| jacobjbollinger/sorbet | sorbet/core/backends.py | Python | bsd-2-clause | 680 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from unittest import mock
from senlin.common import exception as exc
from senlin.profiles.os.heat import stack
from senlin.tests.unit.common import base
from senlin.tests.unit.common import utils
class TestHeatStackProfile(base.SenlinTestCase):
def setUp(self):
super(TestHeatStackProfile, self).setUp()
self.context = utils.dummy_context()
self.spec = {
'type': 'os.heat.stack',
'version': '1.0',
'properties': {
'template': {"Template": "data"},
'template_url': '/test_uri',
'context': {},
'parameters': {'foo': 'bar'},
'files': {},
'timeout': 60,
'disable_rollback': True,
'environment': {}
}
}
def test_stack_init(self):
profile = stack.StackProfile('t', self.spec)
self.assertIsNone(profile.stack_id)
def test_do_validate(self):
oc = mock.Mock()
profile = stack.StackProfile('t', self.spec)
profile._orchestrationclient = oc
node_obj = mock.Mock(user='fake_user', project='fake_project')
res = profile.do_validate(node_obj)
props = self.spec['properties']
call_args = {
'stack_name': mock.ANY,
'template': props['template'],
'template_url': props['template_url'],
'parameters': props['parameters'],
'files': props['files'],
'environment': props['environment'],
'preview': True,
}
self.assertTrue(res)
oc.stack_create.assert_called_once_with(**call_args)
def test_do_validate_fails(self):
oc = mock.Mock()
profile = stack.StackProfile('t', self.spec)
profile._orchestrationclient = oc
err = exc.InternalError(code=400, message='Boom')
oc.stack_create = mock.Mock(side_effect=err)
node_obj = mock.Mock()
node_obj.name = 'stack_node'
ex = self.assertRaises(exc.InvalidSpec,
profile.do_validate, node_obj)
props = self.spec['properties']
call_args = {
'stack_name': mock.ANY,
'template': props['template'],
'template_url': props['template_url'],
'parameters': props['parameters'],
'files': props['files'],
'environment': props['environment'],
'preview': True,
}
oc.stack_create.assert_called_once_with(**call_args)
self.assertEqual('Failed in validating template: Boom',
str(ex))
def test_do_create(self):
oc = mock.Mock()
profile = stack.StackProfile('t', self.spec)
profile._orchestrationclient = oc
node = mock.Mock(id='NODE_ID', cluster_id='CLUSTER_ID', index=123)
node.name = 'test_node'
fake_stack = mock.Mock(id='FAKE_ID')
oc.stack_create = mock.Mock(return_value=fake_stack)
# do it
res = profile.do_create(node)
# assertions
kwargs = {
'stack_name': mock.ANY,
'template': self.spec['properties']['template'],
'template_url': self.spec['properties']['template_url'],
'timeout_mins': self.spec['properties']['timeout'],
'disable_rollback': self.spec['properties']['disable_rollback'],
'parameters': self.spec['properties']['parameters'],
'files': self.spec['properties']['files'],
'environment': self.spec['properties']['environment'],
'tags': ",".join(['cluster_node_id=NODE_ID',
'cluster_id=CLUSTER_ID',
'cluster_node_index=123'])
}
self.assertEqual('FAKE_ID', res)
oc.stack_create.assert_called_once_with(**kwargs)
oc.wait_for_stack.assert_called_once_with('FAKE_ID', 'CREATE_COMPLETE',
timeout=3600)
def test_do_create_with_template_url(self):
spec = {
'type': 'os.heat.stack',
'version': '1.0',
'properties': {
'template': {},
'template_url': '/test_uri',
'context': {},
'parameters': {'foo': 'bar'},
'files': {},
'timeout': 60,
'disable_rollback': True,
'environment': {}
}
}
oc = mock.Mock()
profile = stack.StackProfile('t', spec)
profile._orchestrationclient = oc
node = mock.Mock(id='NODE_ID', cluster_id='CLUSTER_ID', index=123)
node.name = 'test_node'
fake_stack = mock.Mock(id='FAKE_ID')
oc.stack_create = mock.Mock(return_value=fake_stack)
# do it
res = profile.do_create(node)
# assertions
kwargs = {
'stack_name': mock.ANY,
'template': spec['properties']['template'],
'template_url': spec['properties']['template_url'],
'timeout_mins': spec['properties']['timeout'],
'disable_rollback': spec['properties']['disable_rollback'],
'parameters': spec['properties']['parameters'],
'files': spec['properties']['files'],
'environment': spec['properties']['environment'],
'tags': ",".join(['cluster_node_id=NODE_ID',
'cluster_id=CLUSTER_ID',
'cluster_node_index=123'])
}
self.assertEqual('FAKE_ID', res)
oc.stack_create.assert_called_once_with(**kwargs)
oc.wait_for_stack.assert_called_once_with('FAKE_ID', 'CREATE_COMPLETE',
timeout=3600)
def test_do_create_default_timeout(self):
spec = copy.deepcopy(self.spec)
del spec['properties']['timeout']
profile = stack.StackProfile('t', spec)
oc = mock.Mock()
profile._orchestrationclient = oc
node = mock.Mock(id='NODE_ID', cluster_id='CLUSTER_ID', index=123)
node.name = 'test_node'
fake_stack = mock.Mock(id='FAKE_ID')
oc.stack_create = mock.Mock(return_value=fake_stack)
oc.wait_for_stack = mock.Mock()
# do it
res = profile.do_create(node)
# assertions
self.assertEqual('FAKE_ID', res)
kwargs = {
'stack_name': mock.ANY,
'template': self.spec['properties']['template'],
'template_url': self.spec['properties']['template_url'],
'timeout_mins': None,
'disable_rollback': self.spec['properties']['disable_rollback'],
'parameters': self.spec['properties']['parameters'],
'files': self.spec['properties']['files'],
'environment': self.spec['properties']['environment'],
'tags': ",".join(['cluster_node_id=NODE_ID',
'cluster_id=CLUSTER_ID',
'cluster_node_index=123'])
}
oc.stack_create.assert_called_once_with(**kwargs)
oc.wait_for_stack.assert_called_once_with('FAKE_ID', 'CREATE_COMPLETE',
timeout=None)
def test_do_create_failed_create(self):
oc = mock.Mock()
profile = stack.StackProfile('t', self.spec)
node = mock.Mock(id='NODE_ID', cluster_id='CLUSTER_ID', index=123)
node.name = 'test_node'
err = exc.InternalError(code=400, message='Too Bad')
oc.stack_create = mock.Mock(side_effect=err)
profile._orchestrationclient = oc
# do it
ex = self.assertRaises(exc.EResourceCreation,
profile.do_create,
node)
# assertions
self.assertEqual('Failed in creating stack: Too Bad.',
str(ex))
call_args = {
'stack_name': mock.ANY,
'template': self.spec['properties']['template'],
'template_url': self.spec['properties']['template_url'],
'timeout_mins': self.spec['properties']['timeout'],
'disable_rollback': self.spec['properties']['disable_rollback'],
'parameters': self.spec['properties']['parameters'],
'files': self.spec['properties']['files'],
'environment': self.spec['properties']['environment'],
'tags': ",".join(['cluster_node_id=NODE_ID',
'cluster_id=CLUSTER_ID',
'cluster_node_index=123'])
}
oc.stack_create.assert_called_once_with(**call_args)
self.assertEqual(0, oc.wait_for_stack.call_count)
def test_do_create_failed_wait(self):
spec = copy.deepcopy(self.spec)
del spec['properties']['timeout']
profile = stack.StackProfile('t', spec)
oc = mock.Mock()
node = mock.Mock(id='NODE_ID', cluster_id='CLUSTER_ID', index=123)
node.name = 'test_node'
fake_stack = mock.Mock(id='FAKE_ID')
oc.stack_create = mock.Mock(return_value=fake_stack)
err = exc.InternalError(code=400, message='Timeout')
oc.wait_for_stack = mock.Mock(side_effect=err)
profile._orchestrationclient = oc
# do it
ex = self.assertRaises(exc.EResourceCreation,
profile.do_create,
node)
# assertions
self.assertEqual('Failed in creating stack: Timeout.',
str(ex))
kwargs = {
'stack_name': mock.ANY,
'template': self.spec['properties']['template'],
'template_url': self.spec['properties']['template_url'],
'timeout_mins': None,
'disable_rollback': self.spec['properties']['disable_rollback'],
'parameters': self.spec['properties']['parameters'],
'files': self.spec['properties']['files'],
'environment': self.spec['properties']['environment'],
'tags': ",".join(['cluster_node_id=NODE_ID',
'cluster_id=CLUSTER_ID',
'cluster_node_index=123'])
}
oc.stack_create.assert_called_once_with(**kwargs)
oc.wait_for_stack.assert_called_once_with('FAKE_ID', 'CREATE_COMPLETE',
timeout=None)
def test_do_delete(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
test_stack = mock.Mock(physical_id='FAKE_ID')
# do it
res = profile.do_delete(test_stack)
# assertions
self.assertTrue(res)
oc.stack_delete.assert_called_once_with('FAKE_ID', True)
oc.wait_for_stack_delete.assert_called_once_with('FAKE_ID')
def test_do_delete_no_physical_id(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
test_stack = mock.Mock(physical_id=None)
profile._orchestrationclient = oc
# do it
res = profile.do_delete(test_stack, ignore_missing=False)
# assertions
self.assertTrue(res)
self.assertFalse(oc.stack_delete.called)
self.assertFalse(oc.wait_for_stack_delete.called)
def test_do_delete_ignore_missing(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
test_stack = mock.Mock(physical_id='FAKE_ID')
profile._orchestrationclient = oc
# do it
res = profile.do_delete(test_stack, ignore_missing=False)
# assertions
self.assertTrue(res)
oc.stack_delete.assert_called_once_with('FAKE_ID', False)
oc.wait_for_stack_delete.assert_called_once_with('FAKE_ID')
def test_do_delete_failed_deletion(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
err = exc.InternalError(code=400, message='Boom')
oc.stack_delete = mock.Mock(side_effect=err)
test_stack = mock.Mock(physical_id='FAKE_ID')
# do it
ex = self.assertRaises(exc.EResourceDeletion,
profile.do_delete,
test_stack)
# assertions
self.assertEqual("Failed in deleting stack 'FAKE_ID': Boom.",
str(ex))
oc.stack_delete.assert_called_once_with('FAKE_ID', True)
self.assertEqual(0, oc.wait_for_stack_delete.call_count)
def test_do_delete_failed_timeout(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
test_stack = mock.Mock(physical_id='FAKE_ID')
profile._orchestrationclient = oc
err = exc.InternalError(code=400, message='Boom')
oc.wait_for_stack_delete = mock.Mock(side_effect=err)
# do it
ex = self.assertRaises(exc.EResourceDeletion,
profile.do_delete, test_stack)
# assertions
self.assertEqual("Failed in deleting stack 'FAKE_ID': Boom.",
str(ex))
oc.stack_delete.assert_called_once_with('FAKE_ID', True)
oc.wait_for_stack_delete.assert_called_once_with('FAKE_ID')
def test_do_update(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
test_stack = mock.Mock(physical_id='FAKE_ID')
new_spec = {
'type': 'os.heat.stack',
'version': '1.0',
'properties': {
'template': {"Template": "data update"},
'context': {},
'parameters': {'new': 'params'},
'files': {'file1': 'new_content'},
'timeout': 123,
'disable_rollback': False,
'environment': {'foo': 'bar'}
}
}
new_profile = stack.StackProfile('u', new_spec)
# do it
res = profile.do_update(test_stack, new_profile)
# assertions
self.assertTrue(res)
kwargs = {
'template': {'Template': 'data update'},
'parameters': {'new': 'params'},
'timeout_mins': 123,
'disable_rollback': False,
'files': {'file1': 'new_content'},
'environment': {'foo': 'bar'},
}
oc.stack_update.assert_called_once_with('FAKE_ID', **kwargs)
oc.wait_for_stack.assert_called_once_with(
'FAKE_ID', 'UPDATE_COMPLETE', timeout=3600)
def test_do_update_no_physical_stack(self):
profile = stack.StackProfile('t', self.spec)
test_stack = mock.Mock(physical_id=None)
new_profile = mock.Mock()
res = profile.do_update(test_stack, new_profile)
self.assertFalse(res)
def test_do_update_only_template(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
stack_obj = mock.Mock(physical_id='FAKE_ID')
new_spec = copy.deepcopy(self.spec)
new_spec['properties']['template'] = {"Template": "data update"}
new_profile = stack.StackProfile('u', new_spec)
res = profile.do_update(stack_obj, new_profile)
self.assertTrue(res)
oc.stack_update.assert_called_once_with(
'FAKE_ID', template={"Template": "data update"})
oc.wait_for_stack.assert_called_once_with(
'FAKE_ID', 'UPDATE_COMPLETE', timeout=3600)
def test_do_update_only_params(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
stack_obj = mock.Mock(physical_id='FAKE_ID')
new_spec = copy.deepcopy(self.spec)
new_spec['properties']['parameters'] = {"new": "params"}
new_profile = stack.StackProfile('u', new_spec)
res = profile.do_update(stack_obj, new_profile)
self.assertTrue(res)
oc.stack_update.assert_called_once_with(
'FAKE_ID', parameters={"new": "params"})
oc.wait_for_stack.assert_called_once_with(
'FAKE_ID', 'UPDATE_COMPLETE', timeout=3600)
def test_do_update_with_timeout_value(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
stack_obj = mock.Mock(physical_id='FAKE_ID')
new_spec = copy.deepcopy(self.spec)
new_spec['properties']['timeout'] = 120
new_profile = stack.StackProfile('u', new_spec)
# do it
res = profile.do_update(stack_obj, new_profile)
# assertions
self.assertTrue(res)
oc.stack_update.assert_called_once_with('FAKE_ID', timeout_mins=120)
oc.wait_for_stack.assert_called_once_with(
'FAKE_ID', 'UPDATE_COMPLETE', timeout=3600)
def test_do_update_disable_rollback(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
stack_obj = mock.Mock(physical_id='FAKE_ID')
new_spec = copy.deepcopy(self.spec)
new_spec['properties']['disable_rollback'] = False
new_profile = stack.StackProfile('u', new_spec)
# do it
res = profile.do_update(stack_obj, new_profile)
# assertions
self.assertTrue(res)
oc.stack_update.assert_called_once_with('FAKE_ID',
disable_rollback=False)
oc.wait_for_stack.assert_called_once_with('FAKE_ID', 'UPDATE_COMPLETE',
timeout=3600)
def test_do_update_files(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
stack_obj = mock.Mock(physical_id='FAKE_ID')
new_spec = copy.deepcopy(self.spec)
new_spec['properties']['files'] = {"new": "file1"}
new_profile = stack.StackProfile('u', new_spec)
# do it
res = profile.do_update(stack_obj, new_profile)
# assertions
self.assertTrue(res)
oc.stack_update.assert_called_once_with(
'FAKE_ID', files={"new": "file1"})
oc.wait_for_stack.assert_called_once_with(
'FAKE_ID', 'UPDATE_COMPLETE', timeout=3600)
def test_do_update_environment(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
stack_obj = mock.Mock(physical_id='FAKE_ID')
new_spec = copy.deepcopy(self.spec)
new_spec['properties']['environment'] = {"new": "env1"}
new_profile = stack.StackProfile('u', new_spec)
# do it
res = profile.do_update(stack_obj, new_profile)
# assertions
self.assertTrue(res)
oc.stack_update.assert_called_once_with(
'FAKE_ID', environment={"new": "env1"})
oc.wait_for_stack.assert_called_once_with(
'FAKE_ID', 'UPDATE_COMPLETE', timeout=3600)
def test_do_update_no_change(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
stack_obj = mock.Mock(physical_id='FAKE_ID')
new_spec = copy.deepcopy(self.spec)
new_profile = stack.StackProfile('u', new_spec)
res = profile.do_update(stack_obj, new_profile)
self.assertTrue(res)
self.assertEqual(0, oc.stack_update.call_count)
def test_do_update_failed_update(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
oc.stack_update = mock.Mock(
side_effect=exc.InternalError(code=400, message='Failed'))
stack_obj = mock.Mock(physical_id='FAKE_ID')
new_spec = copy.deepcopy(self.spec)
new_spec['properties']['environment'] = {"new": "env1"}
new_profile = stack.StackProfile('u', new_spec)
ex = self.assertRaises(exc.EResourceUpdate,
profile.do_update,
stack_obj, new_profile)
oc.stack_update.assert_called_once_with(
'FAKE_ID', environment={"new": "env1"})
self.assertEqual(0, oc.wait_for_stack.call_count)
self.assertEqual("Failed in updating stack 'FAKE_ID': "
"Failed.", str(ex))
def test_do_update_timeout(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
oc.wait_for_stack = mock.Mock(
side_effect=exc.InternalError(code=400, message='Timeout'))
stack_obj = mock.Mock(physical_id='FAKE_ID')
new_spec = copy.deepcopy(self.spec)
new_spec['properties']['environment'] = {"new": "env1"}
new_profile = stack.StackProfile('u', new_spec)
ex = self.assertRaises(exc.EResourceUpdate,
profile.do_update,
stack_obj, new_profile)
oc.stack_update.assert_called_once_with(
'FAKE_ID', environment={"new": "env1"})
oc.wait_for_stack.assert_called_once_with(
'FAKE_ID', 'UPDATE_COMPLETE', timeout=3600)
self.assertEqual("Failed in updating stack 'FAKE_ID': "
"Timeout.", str(ex))
def test_do_check(self):
node_obj = mock.Mock(physical_id='FAKE_ID')
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
# do it
res = profile.do_check(node_obj)
# assertions
self.assertTrue(res)
oc.stack_check.assert_called_once_with('FAKE_ID')
oc.wait_for_stack.assert_called_once_with(
'FAKE_ID', 'CHECK_COMPLETE', timeout=3600)
def test_do_check_no_physical_id(self):
node_obj = mock.Mock(physical_id=None)
profile = stack.StackProfile('t', self.spec)
res = profile.do_check(node_obj)
self.assertFalse(res)
def test_do_check_failed_checking(self):
node_obj = mock.Mock(physical_id='FAKE_ID')
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
oc.stack_check = mock.Mock(
side_effect=exc.InternalError(code=400, message='BOOM'))
self.assertRaises(exc.EResourceOperation, profile.do_check, node_obj)
oc.stack_check.assert_called_once_with('FAKE_ID')
self.assertEqual(0, oc.wait_for_stack.call_count)
def test_do_check_failed_in_waiting(self):
node_obj = mock.Mock(physical_id='FAKE_ID')
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
oc.wait_for_stack = mock.Mock(
side_effect=exc.InternalError(code=400, message='BOOM'))
self.assertRaises(exc.EResourceOperation, profile.do_check, node_obj)
oc.stack_check.assert_called_once_with('FAKE_ID')
oc.wait_for_stack.assert_called_once_with(
'FAKE_ID', 'CHECK_COMPLETE', timeout=3600)
def test_do_get_details(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
details = mock.Mock()
details.to_dict.return_value = {'foo': 'bar'}
oc.stack_get = mock.Mock(return_value=details)
node_obj = mock.Mock(physical_id='FAKE_ID')
res = profile.do_get_details(node_obj)
self.assertEqual({'foo': 'bar'}, res)
oc.stack_get.assert_called_once_with('FAKE_ID')
def test_do_get_details_no_physical_id(self):
profile = stack.StackProfile('t', self.spec)
node_obj = mock.Mock(physical_id=None)
res = profile.do_get_details(node_obj)
self.assertEqual({}, res)
def test_do_get_details_failed_retrieval(self):
profile = stack.StackProfile('t', self.spec)
node_obj = mock.Mock(physical_id='STACK_ID')
oc = mock.Mock()
oc.stack_get.side_effect = exc.InternalError(message='BOOM')
profile._orchestrationclient = oc
res = profile.do_get_details(node_obj)
self.assertEqual({'Error': {'code': 500, 'message': 'BOOM'}}, res)
oc.stack_get.assert_called_once_with('STACK_ID')
def test_do_adopt(self):
profile = stack.StackProfile('t', self.spec)
x_stack = mock.Mock(
parameters={'p1': 'v1', 'OS::stack_id': 'FAKE_ID'},
timeout_mins=123,
is_rollback_disabled=False
)
oc = mock.Mock()
oc.stack_get = mock.Mock(return_value=x_stack)
# mock template
templ = mock.Mock()
templ.to_dict.return_value = {'foo': 'bar'}
oc.stack_get_template = mock.Mock(return_value=templ)
# mock environment
env = mock.Mock()
env.to_dict.return_value = {'ke': 've'}
oc.stack_get_environment = mock.Mock(return_value=env)
oc.stack_get_files = mock.Mock(return_value={'fn': 'content'})
profile._orchestrationclient = oc
node_obj = mock.Mock(physical_id='FAKE_ID')
res = profile.do_adopt(node_obj)
expected = {
'environment': {'ke': 've'},
'files': {'fn': 'content'},
'template': {'foo': 'bar'},
'parameters': {'p1': 'v1'},
'timeout': 123,
'disable_rollback': False
}
self.assertEqual(expected, res)
oc.stack_get.assert_called_once_with('FAKE_ID')
oc.stack_get_template.assert_called_once_with('FAKE_ID')
oc.stack_get_environment.assert_called_once_with('FAKE_ID')
oc.stack_get_files.assert_called_once_with('FAKE_ID')
def test_do_adopt_failed_get(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
oc.stack_get.side_effect = exc.InternalError(message='BOOM')
profile._orchestrationclient = oc
node_obj = mock.Mock(physical_id='FAKE_ID')
res = profile.do_adopt(node_obj)
expected = {'Error': {'code': 500, 'message': 'BOOM'}}
self.assertEqual(expected, res)
oc.stack_get.assert_called_once_with('FAKE_ID')
def test_do_adopt_failed_get_template(self):
profile = stack.StackProfile('t', self.spec)
x_stack = mock.Mock()
oc = mock.Mock()
oc.stack_get = mock.Mock(return_value=x_stack)
oc.stack_get_template.side_effect = exc.InternalError(message='BOOM')
profile._orchestrationclient = oc
node_obj = mock.Mock(physical_id='FAKE_ID')
res = profile.do_adopt(node_obj)
expected = {'Error': {'code': 500, 'message': 'BOOM'}}
self.assertEqual(expected, res)
oc.stack_get.assert_called_once_with('FAKE_ID')
oc.stack_get_template.assert_called_once_with('FAKE_ID')
def test_do_adopt_failed_get_environment(self):
profile = stack.StackProfile('t', self.spec)
x_stack = mock.Mock()
oc = mock.Mock()
oc.stack_get = mock.Mock(return_value=x_stack)
oc.stack_get_template = mock.Mock(return_value={'foo': 'bar'})
err = exc.InternalError(message='BOOM')
oc.stack_get_environment.side_effect = err
profile._orchestrationclient = oc
node_obj = mock.Mock(physical_id='FAKE_ID')
res = profile.do_adopt(node_obj)
expected = {'Error': {'code': 500, 'message': 'BOOM'}}
self.assertEqual(expected, res)
oc.stack_get.assert_called_once_with('FAKE_ID')
oc.stack_get_template.assert_called_once_with('FAKE_ID')
oc.stack_get_environment.assert_called_once_with('FAKE_ID')
def test_do_adopt_failed_get_files(self):
profile = stack.StackProfile('t', self.spec)
x_stack = mock.Mock()
oc = mock.Mock()
oc.stack_get = mock.Mock(return_value=x_stack)
oc.stack_get_template = mock.Mock(return_value={'foo': 'bar'})
oc.stack_get_environment = mock.Mock(return_value={'ke': 've'})
oc.stack_get_files.side_effect = exc.InternalError(message='BOOM')
profile._orchestrationclient = oc
node_obj = mock.Mock(physical_id='FAKE_ID')
res = profile.do_adopt(node_obj)
expected = {'Error': {'code': 500, 'message': 'BOOM'}}
self.assertEqual(expected, res)
oc.stack_get.assert_called_once_with('FAKE_ID')
oc.stack_get_template.assert_called_once_with('FAKE_ID')
oc.stack_get_environment.assert_called_once_with('FAKE_ID')
oc.stack_get_files.assert_called_once_with('FAKE_ID')
def test_do_adopt_with_overrides(self):
profile = stack.StackProfile('t', self.spec)
x_stack = mock.Mock(
parameters={'p1': 'v1', 'OS::stack_id': 'FAKE_ID'},
timeout_mins=123,
is_rollback_disabled=False
)
oc = mock.Mock()
oc.stack_get = mock.Mock(return_value=x_stack)
# mock environment
env = mock.Mock()
env.to_dict.return_value = {'ke': 've'}
oc.stack_get_environment = mock.Mock(return_value=env)
# mock template
templ = mock.Mock()
templ.to_dict.return_value = {'foo': 'bar'}
oc.stack_get_template = mock.Mock(return_value=templ)
oc.stack_get_files = mock.Mock(return_value={'fn': 'content'})
profile._orchestrationclient = oc
node_obj = mock.Mock(physical_id='FAKE_ID')
overrides = {'environment': {'ENV': 'SETTING'}}
res = profile.do_adopt(node_obj, overrides=overrides)
expected = {
'environment': {'ENV': 'SETTING'},
'files': {'fn': 'content'},
'template': {'foo': 'bar'},
'parameters': {'p1': 'v1'},
'timeout': 123,
'disable_rollback': False
}
self.assertEqual(expected, res)
oc.stack_get.assert_called_once_with('FAKE_ID')
oc.stack_get_template.assert_called_once_with('FAKE_ID')
def test_refresh_tags_empty_no_add(self):
profile = stack.StackProfile('t', self.spec)
node = mock.Mock()
res = profile._refresh_tags([], node, False)
self.assertEqual(("", False), res)
def test_refresh_tags_with_contents_no_add(self):
profile = stack.StackProfile('t', self.spec)
node = mock.Mock()
res = profile._refresh_tags(['foo'], node, False)
self.assertEqual(('foo', False), res)
def test_refresh_tags_deleted_no_add(self):
profile = stack.StackProfile('t', self.spec)
node = mock.Mock()
res = profile._refresh_tags(['cluster_id=FOO', 'bar'], node, False)
self.assertEqual(('bar', True), res)
def test_refresh_tags_empty_and_add(self):
profile = stack.StackProfile('t', self.spec)
node = mock.Mock(id='NODE_ID', cluster_id='CLUSTER_ID', index=123)
res = profile._refresh_tags([], node, True)
expected = ",".join(['cluster_id=CLUSTER_ID',
'cluster_node_id=NODE_ID',
'cluster_node_index=123'])
self.assertEqual((expected, True), res)
def test_refresh_tags_with_contents_and_add(self):
profile = stack.StackProfile('t', self.spec)
node = mock.Mock(id='NODE_ID', cluster_id='CLUSTER_ID', index=123)
res = profile._refresh_tags(['foo'], node, True)
expected = ",".join(['foo',
'cluster_id=CLUSTER_ID',
'cluster_node_id=NODE_ID',
'cluster_node_index=123'])
self.assertEqual((expected, True), res)
def test_refresh_tags_deleted_and_add(self):
profile = stack.StackProfile('t', self.spec)
node = mock.Mock(id='NODE_ID', cluster_id='CLUSTER_ID', index=123)
res = profile._refresh_tags(['cluster_id=FOO', 'bar'], node, True)
expected = ",".join(['bar',
'cluster_id=CLUSTER_ID',
'cluster_node_id=NODE_ID',
'cluster_node_index=123'])
self.assertEqual((expected, True), res)
def test_do_join(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
x_stack = mock.Mock(tags='foo')
oc.stack_get.return_value = x_stack
node = mock.Mock(physical_id='STACK_ID')
mock_tags = self.patchobject(profile, '_refresh_tags',
return_value=('bar', True))
res = profile.do_join(node, 'CLUSTER_ID')
self.assertTrue(res)
oc.stack_get.assert_called_once_with('STACK_ID')
mock_tags.assert_called_once_with('foo', node, True)
oc.stack_update.assert_called_once_with('STACK_ID', **{'tags': 'bar'})
def test_do_join_no_physical_id(self):
profile = stack.StackProfile('t', self.spec)
node = mock.Mock(physical_id=None)
res = profile.do_join(node, 'CLUSTER_ID')
self.assertFalse(res)
def test_do_join_failed_get_stack(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
err = exc.InternalError(code=400, message='Boom')
oc.stack_get.side_effect = err
node = mock.Mock(physical_id='STACK_ID')
res = profile.do_join(node, 'CLUSTER_ID')
self.assertFalse(res)
oc.stack_get.assert_called_once_with('STACK_ID')
def test_do_join_no_update(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
x_stack = mock.Mock(tags='foo')
oc.stack_get.return_value = x_stack
node = mock.Mock(physical_id='STACK_ID')
mock_tags = self.patchobject(profile, '_refresh_tags',
return_value=('foo', False))
res = profile.do_join(node, 'CLUSTER_ID')
self.assertTrue(res)
oc.stack_get.assert_called_once_with('STACK_ID')
mock_tags.assert_called_once_with('foo', node, True)
self.assertEqual(0, oc.stack_update.call_count)
def test_do_join_failed_update(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
x_stack = mock.Mock(tags='foo')
oc.stack_get.return_value = x_stack
err = exc.InternalError(code=400, message='Boom')
oc.stack_update.side_effect = err
node = mock.Mock(physical_id='STACK_ID')
mock_tags = self.patchobject(profile, '_refresh_tags',
return_value=('bar', True))
res = profile.do_join(node, 'CLUSTER_ID')
self.assertFalse(res)
oc.stack_get.assert_called_once_with('STACK_ID')
mock_tags.assert_called_once_with('foo', node, True)
oc.stack_update.assert_called_once_with('STACK_ID', **{'tags': 'bar'})
def test_do_leave(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
x_stack = mock.Mock(tags='foo')
oc.stack_get.return_value = x_stack
node = mock.Mock(physical_id='STACK_ID')
mock_tags = self.patchobject(profile, '_refresh_tags',
return_value=('bar', True))
res = profile.do_leave(node)
self.assertTrue(res)
oc.stack_get.assert_called_once_with('STACK_ID')
mock_tags.assert_called_once_with('foo', node, False)
oc.stack_update.assert_called_once_with('STACK_ID', **{'tags': 'bar'})
def test_do_leave_no_physical_id(self):
profile = stack.StackProfile('t', self.spec)
node = mock.Mock(physical_id=None)
res = profile.do_leave(node)
self.assertFalse(res)
def test_do_leave_failed_get_stack(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
err = exc.InternalError(code=400, message='Boom')
oc.stack_get.side_effect = err
node = mock.Mock(physical_id='STACK_ID')
res = profile.do_leave(node)
self.assertFalse(res)
oc.stack_get.assert_called_once_with('STACK_ID')
def test_do_leave_no_update(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
x_stack = mock.Mock(tags='foo')
oc.stack_get.return_value = x_stack
node = mock.Mock(physical_id='STACK_ID')
mock_tags = self.patchobject(profile, '_refresh_tags',
return_value=('foo', False))
res = profile.do_leave(node)
self.assertTrue(res)
oc.stack_get.assert_called_once_with('STACK_ID')
mock_tags.assert_called_once_with('foo', node, False)
self.assertEqual(0, oc.stack_update.call_count)
def test_do_leave_failed_update(self):
profile = stack.StackProfile('t', self.spec)
oc = mock.Mock()
profile._orchestrationclient = oc
x_stack = mock.Mock(tags='foo')
oc.stack_get.return_value = x_stack
err = exc.InternalError(code=400, message='Boom')
oc.stack_update.side_effect = err
node = mock.Mock(physical_id='STACK_ID')
mock_tags = self.patchobject(profile, '_refresh_tags',
return_value=('bar', True))
res = profile.do_leave(node)
self.assertFalse(res)
oc.stack_get.assert_called_once_with('STACK_ID')
mock_tags.assert_called_once_with('foo', node, False)
oc.stack_update.assert_called_once_with('STACK_ID', **{'tags': 'bar'})
| openstack/senlin | senlin/tests/unit/profiles/test_heat_stack.py | Python | apache-2.0 | 38,522 |
from django import forms
from astrobin_apps_json_api.models import CkEditorFile
class CkEditorUploadForm(forms.ModelForm):
class Meta:
model = CkEditorFile
fields = ('upload',)
| astrobin/astrobin | astrobin_apps_json_api/common/forms/CkEditorUploadForm.py | Python | agpl-3.0 | 200 |
"""Define tests for device-related endpoints."""
from datetime import timedelta
from homeassistant.components.flo.const import DOMAIN as FLO_DOMAIN
from homeassistant.components.flo.device import FloDeviceDataUpdateCoordinator
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.setup import async_setup_component
from homeassistant.util import dt
from .common import TEST_PASSWORD, TEST_USER_ID
from tests.common import async_fire_time_changed
async def test_device(hass, config_entry, aioclient_mock_fixture, aioclient_mock):
"""Test Flo by Moen device."""
config_entry.add_to_hass(hass)
assert await async_setup_component(
hass, FLO_DOMAIN, {CONF_USERNAME: TEST_USER_ID, CONF_PASSWORD: TEST_PASSWORD}
)
await hass.async_block_till_done()
assert len(hass.data[FLO_DOMAIN][config_entry.entry_id]["devices"]) == 1
device: FloDeviceDataUpdateCoordinator = hass.data[FLO_DOMAIN][
config_entry.entry_id
]["devices"][0]
assert device.api_client is not None
assert device.available
assert device.consumption_today == 3.674
assert device.current_flow_rate == 0
assert device.current_psi == 54.20000076293945
assert device.current_system_mode == "home"
assert device.target_system_mode == "home"
assert device.firmware_version == "6.1.1"
assert device.device_type == "flo_device_v2"
assert device.id == "98765"
assert device.last_heard_from_time == "2020-07-24T12:45:00Z"
assert device.location_id == "mmnnoopp"
assert device.hass is not None
assert device.temperature == 70
assert device.mac_address == "111111111111"
assert device.model == "flo_device_075_v2"
assert device.manufacturer == "Flo by Moen"
assert device.device_name == "Flo by Moen flo_device_075_v2"
assert device.rssi == -47
assert device.pending_info_alerts_count == 0
assert device.pending_critical_alerts_count == 0
assert device.pending_warning_alerts_count == 2
assert device.has_alerts is True
assert device.last_known_valve_state == "open"
assert device.target_valve_state == "open"
call_count = aioclient_mock.call_count
async_fire_time_changed(hass, dt.utcnow() + timedelta(seconds=90))
await hass.async_block_till_done()
assert aioclient_mock.call_count == call_count + 2
| sdague/home-assistant | tests/components/flo/test_device.py | Python | apache-2.0 | 2,344 |
# -*- coding: utf-8 -*-
# EDIS - a simple cross-platform IDE for C
#
# This file is part of Edis
# Copyright 2014-2015 - Gabriel Acosta <acostadariogabriel at gmail>
# License: GPLv3 (see http://www.gnu.org/licenses/gpl.html)
import os
from PyQt4.QtCore import (
QObject,
QFile,
QIODevice,
QTextStream,
QFileSystemWatcher,
SIGNAL
)
from src.core import (
exceptions,
logger
)
log = logger.get_logger(__name__)
DEBUG = log.debug
class EdisFile(QObject):
""" Representación de un objeto archivo """
def __init__(self, filename=''):
QObject.__init__(self)
self._is_new = True
if not filename:
self._filename = "Untitled"
else:
self._filename = filename
self._is_new = False
self._last_modification = None
self._system_watcher = None
@property
def filename(self):
return self._filename
@property
def is_new(self):
return self._is_new
def read(self):
""" Itenta leer el contenido del archivo, si ocurre un error se lanza
una excepción.
"""
try:
with open(self.filename, mode='r') as f:
content = f.read()
return content
except IOError as reason:
raise exceptions.EdisIOError(reason)
def write(self, content, new_filename=''):
""" Escribe los datos en el archivo """
DEBUG("Saving file...")
# Por defecto, si el archivo no tiene extensión se agrega .c
ext = os.path.splitext(new_filename)
if not ext[-1]:
new_filename += '.c'
if self.is_new:
self._filename = new_filename
self._is_new = False
_file = QFile(self.filename)
if not _file.open(QIODevice.WriteOnly | QIODevice.Truncate):
raise exceptions.EdisIOError
out_file = QTextStream(_file)
out_file << content
if self._system_watcher is not None:
if self._filename is not None:
archivos = self._system_watcher.files()
self._system_watcher.removePath(archivos[0])
else:
self.run_system_watcher()
def run_system_watcher(self):
""" Inicializa el control de monitoreo para modificaciones """
if self._system_watcher is None:
self._system_watcher = QFileSystemWatcher()
self.connect(self._system_watcher,
SIGNAL("fileChanged(const QString&)"),
self._on_file_changed)
self._last_modification = os.lstat(self.filename).st_mtime
self._system_watcher.addPath(self.filename)
DEBUG("Watching {0}".format(self.filename))
def stop_system_watcher(self):
if self._system_watcher is not None:
self._system_watcher.removePath(self.filename)
DEBUG("Stoping watching {0}".format(self.filename))
def _on_file_changed(self, filename):
mtime = os.lstat(filename).st_mtime
if mtime != self._last_modification:
# Actualizo la última modificación
self._last_modification = mtime
self.emit(SIGNAL("fileChanged(PyQt_PyObject)"), self) | centaurialpha/edis | src/core/object_file.py | Python | gpl-3.0 | 3,249 |
# (c) 2012, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import pwd
import sys
import ConfigParser
def get_config(p, section, key, env_var, default):
if env_var is not None:
value = os.environ.get(env_var, None)
if value is not None:
return value
if p is not None:
try:
return p.get(section, key)
except:
return default
return default
def load_config_file():
p = ConfigParser.ConfigParser()
path1 = os.path.expanduser(os.environ.get('ANSIBLE_CONFIG', "~/.ansible.cfg"))
path2 = os.getcwd() + "/ansible.cfg"
path3 = "/etc/ansible/ansible.cfg"
if os.path.exists(path1):
p.read(path1)
elif os.path.exists(path2):
p.read(path2)
elif os.path.exists(path3):
p.read(path3)
else:
return None
return p
def shell_expand_path(path):
''' shell_expand_path is needed as os.path.expanduser does not work
when path is None, which is the default for ANSIBLE_PRIVATE_KEY_FILE '''
if path:
path = os.path.expanduser(path)
return path
p = load_config_file()
active_user = pwd.getpwuid(os.geteuid())[0]
# Needed so the RPM can call setup.py and have modules land in the
# correct location. See #1277 for discussion
if getattr(sys, "real_prefix", None):
DIST_MODULE_PATH = os.path.join(sys.prefix, 'share/ansible/')
else:
DIST_MODULE_PATH = '/usr/share/ansible/'
# sections in config file
DEFAULTS='defaults'
# configurable things
DEFAULT_HOST_LIST = shell_expand_path(get_config(p, DEFAULTS, 'hostfile', 'ANSIBLE_HOSTS', '/etc/ansible/hosts'))
DEFAULT_MODULE_PATH = shell_expand_path(get_config(p, DEFAULTS, 'library', 'ANSIBLE_LIBRARY', DIST_MODULE_PATH))
DEFAULT_REMOTE_TMP = shell_expand_path(get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '$HOME/.ansible/tmp'))
DEFAULT_MODULE_NAME = get_config(p, DEFAULTS, 'module_name', None, 'command')
DEFAULT_PATTERN = get_config(p, DEFAULTS, 'pattern', None, '*')
DEFAULT_FORKS = get_config(p, DEFAULTS, 'forks', 'ANSIBLE_FORKS', 5)
DEFAULT_MODULE_ARGS = get_config(p, DEFAULTS, 'module_args', 'ANSIBLE_MODULE_ARGS', '')
DEFAULT_MODULE_LANG = get_config(p, DEFAULTS, 'module_lang', 'ANSIBLE_MODULE_LANG', 'C')
DEFAULT_TIMEOUT = get_config(p, DEFAULTS, 'timeout', 'ANSIBLE_TIMEOUT', 10)
DEFAULT_POLL_INTERVAL = get_config(p, DEFAULTS, 'poll_interval', 'ANSIBLE_POLL_INTERVAL', 15)
DEFAULT_REMOTE_USER = get_config(p, DEFAULTS, 'remote_user', 'ANSIBLE_REMOTE_USER', active_user)
DEFAULT_ASK_PASS = get_config(p, DEFAULTS, 'ask_pass', 'ANSIBLE_ASK_PASS', False)
DEFAULT_PRIVATE_KEY_FILE = shell_expand_path(get_config(p, DEFAULTS, 'private_key_file', 'ANSIBLE_PRIVATE_KEY_FILE', None))
DEFAULT_SUDO_USER = get_config(p, DEFAULTS, 'sudo_user', 'ANSIBLE_SUDO_USER', 'root')
DEFAULT_ASK_SUDO_PASS = get_config(p, DEFAULTS, 'ask_sudo_pass', 'ANSIBLE_ASK_SUDO_PASS', False)
DEFAULT_REMOTE_PORT = int(get_config(p, DEFAULTS, 'remote_port', 'ANSIBLE_REMOTE_PORT', 22))
DEFAULT_TRANSPORT = get_config(p, DEFAULTS, 'transport', 'ANSIBLE_TRANSPORT', 'paramiko')
DEFAULT_SCP_IF_SSH = get_config(p, 'ssh_connection', 'scp_if_ssh', 'ANSIBLE_SCP_IF_SSH', False)
DEFAULT_MANAGED_STR = get_config(p, DEFAULTS, 'ansible_managed', None, 'Ansible managed: {file} modified on %Y-%m-%d %H:%M:%S by {uid} on {host}')
DEFAULT_SYSLOG_FACILITY = get_config(p, DEFAULTS, 'syslog_facility', 'ANSIBLE_SYSLOG_FACILITY', 'LOG_USER')
DEFAULT_KEEP_REMOTE_FILES = get_config(p, DEFAULTS, 'keep_remote_files', 'ANSIBLE_KEEP_REMOTE_FILES', '0')
DEFAULT_SUDO_EXE = get_config(p, DEFAULTS, 'sudo_exe', 'ANSIBLE_SUDO_EXE', 'sudo')
DEFAULT_SUDO_FLAGS = get_config(p, DEFAULTS, 'sudo_flags', 'ANSIBLE_SUDO_FLAGS', '-H')
DEFAULT_HASH_BEHAVIOUR = get_config(p, DEFAULTS, 'hash_behaviour', 'ANSIBLE_HASH_BEHAVIOUR', 'replace')
DEFAULT_JINJA2_EXTENSIONS = get_config(p, DEFAULTS, 'jinja2_extensions', 'ANSIBLE_JINJA2_EXTENSIONS', None)
DEFAULT_EXECUTABLE = get_config(p, DEFAULTS, 'executable', 'ANSIBLE_EXECUTABLE', '/bin/sh')
DEFAULT_ACTION_PLUGIN_PATH = shell_expand_path(get_config(p, DEFAULTS, 'action_plugins', 'ANSIBLE_ACTION_PLUGINS', '/usr/share/ansible_plugins/action_plugins'))
DEFAULT_CALLBACK_PLUGIN_PATH = shell_expand_path(get_config(p, DEFAULTS, 'callback_plugins', 'ANSIBLE_CALLBACK_PLUGINS', '/usr/share/ansible_plugins/callback_plugins'))
DEFAULT_CONNECTION_PLUGIN_PATH = shell_expand_path(get_config(p, DEFAULTS, 'connection_plugins', 'ANSIBLE_CONNECTION_PLUGINS', '/usr/share/ansible_plugins/connection_plugins'))
DEFAULT_LOOKUP_PLUGIN_PATH = shell_expand_path(get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS', '/usr/share/ansible_plugins/lookup_plugins'))
DEFAULT_VARS_PLUGIN_PATH = shell_expand_path(get_config(p, DEFAULTS, 'vars_plugins', 'ANSIBLE_VARS_PLUGINS', '/usr/share/ansible_plugins/vars_plugins'))
DEFAULT_FILTER_PLUGIN_PATH = shell_expand_path(get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '/usr/share/ansible_plugins/filter_plugins'))
# non-configurable things
DEFAULT_SUDO_PASS = None
DEFAULT_REMOTE_PASS = None
DEFAULT_SUBSET = None
ANSIBLE_SSH_ARGS = get_config(p, 'ssh_connection', 'ssh_args', 'ANSIBLE_SSH_ARGS', None)
ZEROMQ_PORT = int(get_config(p, 'fireball', 'zeromq_port', 'ANSIBLE_ZEROMQ_PORT', 5099))
| kuno/ansible | lib/ansible/constants.py | Python | gpl-3.0 | 6,435 |
#!/usr/bin/env python
# Python modules
import sys
import time
import md5
import os
# 3rd party modules
import shm
# Modules for this project
import DemoConstants
def WriteToMemory(MemoryHandle, s):
MemoryHandle.attach()
say("writing %s " % s)
MemoryHandle.write(s + (MemoryHandle.size - len(s)) * ' ')
MemoryHandle.detach()
def ReadFromMemory(MemoryHandle):
MemoryHandle.attach()
s = MemoryHandle.read(MemoryHandle.size).strip()
say("read %s" % s)
MemoryHandle.detach()
return s
def say(s):
print "conclusion@%1.6f: %s" % (time.time(), s)
if len(sys.argv) != 2:
print "Please supply Mrs. Premise's integer key on the command line."
sys.exit(-1)
key = int(sys.argv[1])
SemaphoreHandle = shm.semaphore(shm.getsemid(key))
MemoryHandle = shm.memory(shm.getshmid(key))
WhatIWrote = ""
s = ""
for i in xrange(0, DemoConstants.ITERATIONS):
say("i = %d" % i)
if DemoConstants.USE_SEMAPHORE:
# Wait for Mrs. Premise to free up the semaphore.
say("waiting for semaphore")
SemaphoreHandle.P()
s = ReadFromMemory(MemoryHandle)
while s == WhatIWrote:
if DemoConstants.USE_SEMAPHORE:
# Relinquish the semaphore...
say("relinquishing the semaphore")
SemaphoreHandle.V()
# ...and wait for it to become available again.
say("waiting for the semaphore")
SemaphoreHandle.P()
s = ReadFromMemory(MemoryHandle)
if WhatIWrote:
try:
assert(s == md5.new(WhatIWrote).hexdigest())
except:
raise AssertionError, "Shared memory corruption after %d iterations." % i
WhatIWrote = md5.new(s).hexdigest()
WriteToMemory(MemoryHandle, WhatIWrote)
if DemoConstants.USE_SEMAPHORE:
say("relinquishing the semaphore")
SemaphoreHandle.V()
| mharradon/SHMArrays | shm-1.2.2/demo/ConclusionUsingShm.py | Python | mit | 1,866 |
#!/usr/bin/env python3
import base64
import os
import re
import sys
import tarfile
import anchore_engine.analyzers.utils
import anchore_engine.utils
analyzer_name = "retrieve_files"
try:
config = anchore_engine.analyzers.utils.init_analyzer_cmdline(
sys.argv, analyzer_name
)
except Exception as err:
print(str(err))
sys.exit(1)
imgname = config["imgid"]
imageId = config["imgid_full"]
outputdir = config["dirs"]["outputdir"]
unpackdir = config["dirs"]["unpackdir"]
rootfsdir = "/".join([unpackdir, "rootfs"])
max_file_size_bytes = -1
files_to_store = list()
if "analyzer_config" in config and config["analyzer_config"]:
if (
"file_list" in config["analyzer_config"]
and type(config["analyzer_config"]["file_list"]) == list
):
files_to_store = config["analyzer_config"]["file_list"]
if (
"max_file_size_kb" in config["analyzer_config"]
and type(config["analyzer_config"]["max_file_size_kb"]) == int
):
max_file_size_bytes = config["analyzer_config"]["max_file_size_kb"] * 1024
if len(files_to_store) <= 0:
print(
"No file_list configuration found in analyzer_config.yaml for analyzer '"
+ analyzer_name
+ ", skipping"
)
sys.exit(0)
outputdata = {}
with tarfile.open(
os.path.join(unpackdir, "squashed.tar"), mode="r", format=tarfile.PAX_FORMAT
) as tfl:
for name in files_to_store:
thefile = re.sub("^/+", "", name)
try:
member = tfl.getmember(thefile)
except:
member = None
if member and member.isreg():
if max_file_size_bytes < 0 or member.size <= max_file_size_bytes:
b64buf = ""
try:
with tfl.extractfile(member) as FH:
buf = FH.read()
b64buf = anchore_engine.utils.ensure_str(base64.b64encode(buf))
outputdata[name] = b64buf
except Exception as err:
print(
"WARN: exception while reading/encoding file {} - exception: {}".format(
name, err
)
)
else:
print(
"WARN: skipping file {} in file list due to size {} > max file size bytes of {}".format(
thefile, member.size, max_file_size_bytes
)
)
if outputdata:
ofile = os.path.join(outputdir, "file_content.all")
anchore_engine.analyzers.utils.write_kvfile_fromdict(ofile, outputdata)
sys.exit(0)
| anchore/anchore-engine | anchore_engine/analyzers/modules/13_retrieve_files.py | Python | apache-2.0 | 2,629 |
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Michael Droettboom All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# The views and conclusions contained in the software and
# documentation are those of the authors and should not be interpreted
# as representing official policies, either expressed or implied, of
# the FreeBSD Project.
from __future__ import print_function, unicode_literals, absolute_import
TT_OS2__init__ = """
Information about the TrueType font, used on OS/2 and Microsoft
Windows.
Note that we now support old Mac fonts which do not include an OS/2
table. In this case, the `version` field is always set to 0xFFFF.
"""
TT_OS2_version = """
The version of the `TT_OS2` table.
If this table was synthesized from a font that has no OS/2 table, the
version is set to 0xFFFF.
"""
TT_OS2_x_avg_char_width = """
Specifies the arithmetic average of the advance width of all of the 26
lowercase letters of the Latin alphabet and the space character. If
any of the 26 lowercase letters are not present, this parameter should
equal zero.
This parameter is a descriptive attribute of the font that specifies
the spacing of characters used for comparing one font to another for
selection or substitution. For proportionally spaced fonts, this value
is useful in estimating the length for lines of text.
"""
TT_OS2_weight_class = """
A `TT_WEIGHT_CLASS` value. Indicates the visual weight (degree of
blackness or thickness of strokes) of the characters in the font.
"""
TT_WEIGHT_CLASS = """
|freetypy| Weight values for the `TT_OS2.weight_class` property.
- `ULTRA_LIGHT`
- `EXTRA_LIGHT`
- `LIGHT`
- `SEMI_LIGHT`
- `MEDIUM` (aliased to `NORMAL`)
- `SEMI_BOLD`
- `BOLD`
- `EXTRA_BOLD`
- `ULTRA_BOLD`
"""
TT_OS2_width_class = """
A `TT_WIDTH_CLASS` value. Indicates a relative change from the normal
aspect ratio (width to height ratio) as specified by a font designer
for the glyphs in a font.
Although every character in a font may have a different numeric aspect
ratio, each character in a font of normal width has a relative aspect
ratio of one. When a new type style is created of a different width
class (either by a font designer or by some automated means) the
relative aspect ratio of the characters in the new font is some
percentage greater or less than those same characters in the normal
font it is this difference that this parameter specifies.
"""
TT_WIDTH_CLASS = """
|freetypy| Width values for the `TT_OS2.width_class` property.
- `ULTRA_CONDENSED`: 50% of normal
- `EXTRA_CONDENSED`: 62.5% of normal
- `CONDENSED`: 75% of normal
- `SEMI_CONDENSED`: 87.5% of normal
- `MEDIUM`: 100% of normal (aliased to `NORMAL`)
- `SEMI_EXPANDED`: 112.5% of normal
- `EXPANDED`: 125% of normal
- `EXTRA_EXPANDED`: 150% of normal
- `ULTRA_EXPANDED`: 200% of normal
"""
TT_OS2_type = """
`FSTYPE` bitflags indicating the licensing restrictions on a font.
"""
TT_OS2_y_subscript_x_size = """
Maps to the em square size of the font being used for a subscript. If
a font has two recommended sizes for subscripts, e.g., numerics and
other, the numeric sizes should be stressed. The horizontal font size
specifies a font designer's recommended horizontal font size for
subscript characters associated with this font. If a font does not
include all of the required subscript characters for an application,
and the application can substitute characters by scaling the character
of a font or by substituting characters from another font, this
parameter specifies the recommended em square for those subscript
characters. For example, if the em square for a font is 2048 and
`y_subscript_x_size` is set to 205, then the horizontal size for a
simulated subscript character would be 1/10th the size of the normal
character.
"""
TT_OS2_y_subscript_y_size = """
Maps to the emHeight of the font being used for a subscript. If a
font has two recommended sizes for subscripts, e.g. numerics and
other, the numeric sizes should be stressed. The horizontal font size
specifies a font designer's recommendation for horizontal font size of
subscript characters associated with this font. If a font does not
include all of the required subscript characters for an application,
and the application can substitute characters by scaling the
characters in a font or by substituting characters from another font,
this parameter specifies the recommended horizontal EmInc for those
subscript characters. For example, if the em square for a font is
2048 and `y_subScript_y_size` is set to 205, then the vertical size
for a simulated subscript character would be 1/10th the size of the
normal character.
"""
TT_OS2_y_subscript_x_offset = """
Specifies a font designer's recommended horizontal offset for
subscript characters. It is from the character origin of the font to
the character origin of the subscript's character. If a font does not
include all of the required subscript characters for an application,
and the application can substitute characters, this parameter
specifies the recommended horizontal position from the character
escapement point of the last character before the first subscript
character. For upright characters, this value is usually zero;
however, if the characters of a font have an incline (italic
characters) the reference point for subscript characters is usually
adjusted to compensate for the angle of incline.
"""
TT_OS2_y_subscript_y_offset = """
Specifies a font designer's recommended vertical offset from the
character baseline to the character baseline for subscript characters
associated with this font. Values are expressed as a positive offset
below the character baseline. If a font does not include all of the
required subscript for an application, this parameter specifies the
recommended vertical distance below the character baseline for those
subscript characters.
"""
TT_OS2_y_superscript_x_size = """
Maps to the em square size of the font being used for a subscript. If
a font has two recommended sizes for subscripts, e.g., numerics and
other, the numeric sizes should be stressed. The horizontal font size
specifies a font designer's recommended horizontal font size for
superscript characters associated with this font. If a font does not
include all of the required superscript characters for an application,
and the application can substitute characters by scaling the character
of a font or by substituting characters from another font, this
parameter specifies the recommended em square for those superscript
characters. For example, if the em square for a font is 2048 and
`ySuperScriptXSize` is set to 205, then the horizontal size for a
simulated superscript character would be 1/10th the size of the normal
character.
"""
TT_OS2_y_superscript_y_size = """
Maps to the emHeight of the font being used for a subscript. If a font
has two recommended sizes for subscripts, e.g., numerics and other,
the numeric sizes should be stressed. The vertical font size specifies
a font designer's recommended vertical font size for superscript
characters associated with this font. If a font does not include all
of the required superscript characters for an application, and the
application can substitute characters by scaling the character of a
font or by substituting characters from another font, this parameter
specifies the recommended EmHeight for those superscript characters.
For example, if the em square for a font is 2048 and `ySuperScriptYSize`
is set to 205, then the vertical size for a simulated superscript
character would be 1/10th the size of the normal character.
"""
TT_OS2_y_superscript_x_offset = """
Specifies a font designer's recommended horizontal offset for the
superscript characters associated with this font. It is from the
character origin to the superscript character's origin. If a font does
not include all of the required superscript characters for an
application, this parameter specifies the recommended horizontal
position from the escapement point of the character before the first
superscript character. For upright characters, this value is usually
zero; however, if the characters of a font have an incline (italic
characters) the reference point for superscript characters is usually
adjusted to compensate for the angle of incline.
"""
TT_OS2_y_superscript_y_offset = """
Specifies a font designer's recommended vertical offset for
superscript characeters. It is from the character baseline to the
superscript character's baseline associated with this font. Values for
this parameter are expressed as a positive offset above the character
baseline. If a font does not include all of the required superscript
characters for an application, this parameter specifies the
recommended vertical distance above the character baseline for those
superscript characters.
"""
TT_OS2_y_strikeout_size = """
The size of the strikeout line. This field should normally be the
width of the em-dash for the current font. If the size is one, the
strikeout line will be the line represented by the strikeout position
field. If the value is two, the strikeout line will be the line
represented by the strikeout position and the line immediately above
the strikeout position.
"""
TT_OS2_y_strikeout_position = """
The width of the strikeout line. Positive values represent distances
above the baseline, while negative values represent distances below
the baseline. A value of zero falls directly on the baseline, while a
value of one falls one pixel above the baseline. The value of
strikeout position should not interfere with the recognition of
standard characters, and therefore should not line up with crossbars
in the font.
"""
TT_OS2_family_class = """
Classifies a font design as to its appearance. It does not identify
the specific font family, typeface variation, designer, supplier,
size, or metric table differences. This is the high byte of the
`sFamilyClass` field.
|freetypy| The use of this is limited, so the values are not provided
as enumerations. See `the TrueType specification
<https://developer.apple.com/fonts/TTRefMan/RM06/Chap6OS2.html>`_ for
more information.
"""
TT_OS2_family_subclass = """
Classifies a font design as to its appearance. It does not identify
the specific font family, typeface variation, designer, supplier,
size, or metric table differences. This is the low byte of the
`sFamilyClass` field.
|freetypy| The use of this is limited, so the values are not provided
as enumerations. See `the TrueType specification
<https://developer.apple.com/fonts/TTRefMan/RM06/Chap6OS2.html>`_ for
more information.
"""
TT_OS2_panose = """
The PANOSE classification number.
|freetypy| The use of this is limited, so the values are not provided
as enumerations. See `the TrueType specification
<https://developer.apple.com/fonts/TTRefMan/RM06/Chap6OS2.html>`_ for
more information. A more Pythonic interface may be provided in the
future if needed.
It is a 10-byte string where each byte is as follows:
- 0: Family type
- 1: Serif style
- 2: Weight
- 3: Proportion
- 4: Contrast
- 5: Stroke Variation
- 6: Arm Style
- 7: Letterform
- 8: Midline
- 9: x height
"""
TT_OS2_vend_id = """
Indentifies the font vendor.
It is not the royalty owner of the original artwork but the company
responsible for the marketing and distribution of the typeface that is
being classified. It is reasonable to assume that there will be 6
vendors of ITC Zapf Dingbats for use on desktop platforms in the near
future (if not already). It is also likely that the vendors will have
other inherent benefits in their fonts (more kern pairs, unregularized
data, hand hinted, etc.). This identifier will allow for the correct
vendors type to be used over another, possibly inferior, font
file. These id's are assigned by Microsoft.
"""
TT_OS2_selection = """
A `TT_FS_SELECTION` bitflag.
"""
TT_FS_SELECTION = """
Bitflag concerning the nature of the font patterns.
- `ITALIC`
- `UNDERSCORE`
- `NEGATIVE`
- `OUTLINED`
- `STRIKEOUT`
- `BOLD`
"""
TT_OS2_first_char_index = """
The minimum Unicode index in this font.
"""
TT_OS2_last_char_index = """
The maximum Unicode index in this font.
"""
TT_OS2_typo_ascender = """
The typographic ascender for this font. Remember that this is not the
same as `TT_HoriHeader.ascender`, which Apple defines in a far
different manner.
The suggested usage for `typo_ascender` is that it be used in
conjunction with `TT_Header.units_per_em` to compute a typographically
correct default line spacing. The goal is to free applications from
Macintosh or Windows-specific metrics which are constrained by
backward compatibility requirements. These new metrics, when combined
with the character design widths, will allow applications to lay out
documents in a typographically correct and portable fashion.
For CJK (Chinese, Japanese, and Korean) fonts that are intended to be
used for vertical writing (in addition to horizontal writing), the
required value for `typo_ascender` is that which describes the top of
the of the ideographic em-box. For example, if the ideographic em-box
of the font extends from coordinates 0,-120 to 1000,880 (that is, a
1000x1000 box set 120 design units below the Latin baseline), then the
value of `typo_ascender` must be set to 880. Failing to adhere to these
requirements will result in incorrect vertical layout.
"""
TT_OS2_typo_descender = """
The typographic descender for this font. Remember that this is not the
same as the `TT_HoriHeader.descender`, which Apple defines in a far
different manner.
The suggested usage for `typo_descender` is that it be used in
conjunction with `TT_Header.units_per_em` to compute a typographically
correct default line spacing. The goal is to free applications from
Macintosh or Windows-specific metrics which are constrained by
backward compatability requirements. These new metrics, when combined
with the character design widths, will allow applications to lay out
documents in a typographically correct and portable fashion.
For CJK (Chinese, Japanese, and Korean) fonts that are intended to be
used for vertical writing (in addition to horizontal writing), the
required value for `typo_descender` is that which describes the bottom
of the of the ideographic em-box. For example, if the ideographic
em-box of the font extends from coordinates 0,-120 to 1000,880 (that
is, a 1000x1000 box set 120 design units below the Latin baseline),
then the value of `typo_descender` must be set to -120. Failing to
adhere to these requirements will result in incorrect vertical layout.
"""
TT_OS2_typo_line_gap = """
The typographic line gap for this font. Remember that this is not the
same as `TT_HoriHeader.line_gap`, which Apple defines in a far
different manner.
The suggested usage for `typo_line_gap` is that it be used in
conjunction with `TTHeader.units_per_em` to compute a typographically
correct default line spacing. Typical values average 7-10% of units
per em. The goal is to free applications from Macintosh or
Windows-specific metrics which are constrained by backward
compatability requirements. These new metrics, when combined with the
character design widths, will allow applications to lay out documents
in a typographically correct and portable fashion. """
TT_OS2_win_ascent = """
The ascender metric for Windows. This, too, is distinct from Apple's
`TT_HoriHeader.ascender` value and from the `typo_ascender`
values. `win_ascent` is computed as the ``y_max`` for all characters
in the Windows ANSI character set. `win_ascent` is used to compute the
Windows font height and default line spacing. For platform 3 encoding
0 fonts, it is the same as `TT_Header.y_max`. Windows will clip the
bitmap of any portion of a glyph that appears above this value. Some
applications use this value to determine default line spacing. This is
strongly discouraged. The typographic ascender, descender and line gap
fields in conjunction with `TT_Header.units_per_em` should be used for
this purpose. Developers should set this field keeping the above
factors in mind.
If any clipping is unacceptable, then the value should be set to
`TT_Header.y_max`.
However, if a developer desires to provide appropriate default line
spacing using this field, for those applications that continue to use
this field for doing so (against OpenType recommendations), then the
value should be set appropriately. In such a case, it may result in
some glyph bitmaps being clipped.
"""
TT_OS2_win_descent = """
The descender metric for Windows. This, too, is distinct from Apple's
`TT_HoriHeader.descender` value and from the
`typo_descender`. `win_descent` is computed as the ``-y_min`` for all
characters in the Windows ANSI character set. `win_descent` is used to
compute the Windows font height and default line spacing. For platform
3 encoding 0 fonts, it is the same as ``-TTHeader.y_min``. Windows
will clip the bitmap of any portion of a glyph that appears below this
value. Some applications use this value to determine default line
spacing. This is strongly discouraged. The typographic ascender,
descender and line gap fields in conjunction with unitsPerEm should be
used for this purpose. Developers should set this field keeping the
above factors in mind.
If any clipping is unacceptable, then the value should be set to
`TT_Header.y_min`.
However, if a developer desires to provide appropriate default line
spacing using this field, for those applications that continue to use
this field for doing so (against OpenType recommendations), then the
value should be set appropriately. In such a case, it may result in
some glyph bitmaps being clipped.
"""
TT_OS2_x_height = """
This metric specifies the distance between the baseline and the
approximate height of non-ascending lowercase letters measured in font
units. This value would normally be specified by a type designer but
in situations where that is not possible, for example when a legacy
font is being converted, the value may be set equal to the top of the
unscaled and unhinted glyph bounding box of the glyph encoded at
``U+0078`` (``LATIN SMALL LETTER X``). If no glyph is encoded in this
position the field should be set to 0.
This metric, if specified, can be used in font substitution: the
`x_height` value of one font can be scaled to approximate the apparent
size of another.
"""
TT_OS2_cap_height = """
This metric specifies the distance between the baseline and the
approximate height of uppercase letters measured in font units. This
value would normally be specified by a type designer but in situations
where that is not possible, for example when a legacy font is being
converted, the value may be set equal to the top of the unscaled and
unhinted glyph bounding box of the glyph encoded at ``U+0048`` (``LATIN
CAPITAL LETTER H``). If no glyph is encoded in this position the field
should be set to 0.
This metric, if specified, can be used in systems that specify type
size by capital height measured in millimeters. It can also be used as
an alignment metric; the top of a drop capital, for instance, can be
aligned to the `cap_height` metric of the first line of text.
"""
TT_OS2_default_char = """
Whenever a request is made for a character that is not in the font,
Windows provides this default character. If the value of this field is
zero, glyph ID 0 is to be used for the default character otherwise
this is the Unicode encoding of the glyph that Windows uses as the
default character. This field cannot represent supplementary character
values (codepoints greater than ``0xFFFF``), and so applications are
strongly discouraged from using this field.
"""
TT_OS2_break_char = """
This is the Unicode encoding of the glyph that Windows uses as the
break character. The break character is used to separate words and
justify text. Most fonts specify 'space' as the break character. This
field cannot represent supplementary character values (codepoints
greater than ``0xFFFF``), and so applications are strongly discouraged
from using this field.
"""
TT_OS2_max_context = """
The maximum length of a target glyph context for any feature in this
font. For example, a font which has only a pair kerning feature should
set this field to 2. If the font also has a ligature feature in which
the glyph sequence "f f i" is substituted by the ligature "ffi", then
this field should be set to 3. This field could be useful to
sophisticated line-breaking engines in determining how far they should
look ahead to test whether something could change that effects the
line breaking. For chaining contextual lookups, the length of the
string (covered glyph) + (input sequence) + (lookahead sequence)
should be considered.
"""
| mdboom/freetypy | docstrings/tt_os2.py | Python | bsd-2-clause | 21,909 |
# pro-forma settings file.
#~ from djangosite import NoLocalSite as Site
from djangosite import Site
#~ SITE = Site(__file__,globals(),languages="en de fr nl et",no_local=True)
SITE = Site(globals(),no_local=True)
SECRET_KEY = "20227" # see :djangoticket:`20227`
| lsaffre/djangosite | docs/settings.py | Python | bsd-2-clause | 263 |
#!/usr/bin/env python
#
#
#
# Copyright (c) 2009-2015 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, 2008.
#
# Version: 1.0
#
from django.conf.urls import url, patterns
from django.contrib import admin
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
admin.autodiscover()
from omerostats.registry import views
# url patterns
urlpatterns = patterns(
'',
url(r'^login/$', views.login, name='registry_login'),
url(r'^logout/$', views.logout_view, name='qa_logout'),
url(r'^geoxml/$', views.get_markers_as_xml, name='registry_geoxml'),
url(r'^local_statistic/$', views.local_statistic,
name='registry_local_statistic'),
url(r'^monthly_statistics/$', views.monthly_statistics,
name='registry_monthly_statistics'),
)
urlpatterns += staticfiles_urlpatterns()
# Only append if urlpatterns are empty
if settings.DEBUG and not urlpatterns:
urlpatterns += staticfiles_urlpatterns()
| openmicroscopy/stats | omerostats/registry/urls.py | Python | agpl-3.0 | 1,687 |
# This directory is a Python package.
from .cpp import *
from .java import *
from .root_paths import *
from .deploy import *
| fifoforlifo/pynja | test2/build/repo/__init__.py | Python | apache-2.0 | 126 |
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module contains the class describing the coordination geometries that can exist in a given structure. These
"model" coordination geometries are described in the following articles :
- Pure Appl. Chem., Vol. 79, No. 10, pp. 1779--1799, 2007.
- Acta Cryst. A, Vol. 46, No. 1, pp. 1--11, 1990.
The module also contains descriptors of part of these geometries (plane of separation, ...) that are used in the
identification algorithms.
"""
__author__ = "David Waroquiers"
__copyright__ = "Copyright 2012, The Materials Project"
__credits__ = "Geoffroy Hautier"
__version__ = "2.0"
__maintainer__ = "David Waroquiers"
__email__ = "[email protected]"
__date__ = "Feb 20, 2016"
import numpy as np
from scipy.special import factorial
import itertools
import abc
from monty.json import MSONable, MontyDecoder
import json
import os
module_dir = os.path.dirname(os.path.abspath(__file__))
UNKNOWN_ENVIRONMENT_SYMBOL = 'UNKNOWN'
UNCLEAR_ENVIRONMENT_SYMBOL = 'UNCLEAR'
EXPLICIT_PERMUTATIONS = 'EXPLICIT_PERMUTATIONS'
SEPARATION_PLANE = 'SEPARATION_PLANE'
class AbstractChemenvAlgorithm(MSONable, metaclass=abc.ABCMeta):
"""
Base class used to define a Chemenv algorithm used to identify the correct permutation for the computation
of the Continuous Symmetry Measure.
"""
def __init__(self, algorithm_type):
"""
Base constructor for ChemenvAlgorithm.
Args:
algorithm_type (str): Type of algorithm.
"""
self._algorithm_type = algorithm_type
@abc.abstractmethod
def as_dict(self):
"""
A JSON serializable dict representation of the algorithm
"""
pass
@property
def algorithm_type(self):
"""
Return the type of algorithm.
Returns: Type of the algorithm
"""
return self._algorithm_type
@abc.abstractmethod
def __str__(self):
return
class ExplicitPermutationsAlgorithm(AbstractChemenvAlgorithm):
"""
Class representing the algorithm doing the explicit permutations for the calculation of
the Continuous Symmetry Measure.
"""
def __init__(self, permutations):
"""
Initializes a separation plane for a given perfect coordination geometry.
Args:
permutations: Permutations used for this algorithm.
"""
super().__init__(
algorithm_type=EXPLICIT_PERMUTATIONS)
self._permutations = permutations
def __str__(self):
return self.algorithm_type
@property
def permutations(self):
"""
Return the permutations to be performed for this algorithm.
Returns: Permutations to be performed.
"""
return self._permutations
@property
def as_dict(self):
"""
Return the JSON serializable dict representation of this ExplicitPermutationsAlgorithm algorithm.
Returns: a JSON serializable dict representation of this ExplicitPermutationsAlgorithm algorithm.
"""
return {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"permutations": self._permutations}
@classmethod
def from_dict(cls, dd):
"""
Reconstructs the ExplicitPermutationsAlgorithm algorithm from its JSON serializable dict representation.
Args:
dd: a JSON serializable dict representation of an ExplicitPermutationsAlgorithm algorithm.
Returns: an ExplicitPermutationsAlgorithm algorithm.
"""
return cls(dd['permutations'])
class SeparationPlane(AbstractChemenvAlgorithm):
"""
Class representing the algorithm using separation planes for the calculation of
the Continuous Symmetry Measure.
"""
def __init__(self, plane_points, mirror_plane=False, ordered_plane=False,
point_groups=None,
ordered_point_groups=None, # include_inverted_plane=False,
# do_inverse_pt_gp_permutations=False, plane_type='MIRROR',
explicit_permutations=None, minimum_number_of_points=None,
explicit_optimized_permutations=None,
multiplicity=None,
other_plane_points=None): # , plane_safe_permutations=False):
"""
Initializes a separation plane for a given perfect coordination geometry
Args:
plane_points: Indices of the points that are in the plane in the perfect structure (and should be
found in the defective one as well).
mirror_plane: True if the separation plane is a mirror plane, in which case there is a correspondence
of the points in each point_group (can reduce the number of permutations).
ordered_plane: True if the order of the points in the plane can be taken into account to reduce the
number of permutations.
point_groups: Indices of the points in the two groups of points separated by the plane.
ordered_point_groups: Whether the order of the points in each group of points can be taken into account to
reduce the number of permutations.
explicit_permutations: Explicit permutations to be performed in this separation plane algorithm.
minimum_number_of_points: Minimum number of points needed to initialize a separation plane
for this algorithm.
explicit_optimized_permutations: Optimized set of explicit permutations to be performed in this
separation plane algorithm.
multiplicity: Number of such planes in the model geometry.
other_plane_points: Indices of the points that are in the plane in the perfect structure for the other
planes. The multiplicity should be equal to the length of this list + 1 ("main" separation plane +
the other ones).
"""
super().__init__(algorithm_type=SEPARATION_PLANE)
self.mirror_plane = mirror_plane
self.plane_points = plane_points
self.point_groups = point_groups
if len(point_groups[0]) > len(point_groups[1]):
raise RuntimeError(
"The number of points in the first group should be\n"
"less than or equal to the number of points in the second group")
self._hash = 10000 * len(plane_points) + 100 * len(
point_groups[0]) + len(point_groups[1])
self.ordered_plane = ordered_plane
self.ordered_point_groups = [False,
False] if ordered_point_groups is None else ordered_point_groups
# self._ordered_indices = list(point_groups[0])
# self._ordered_indices.extend(plane_points)
# self._ordered_indices.extend(point_groups[1])
# self._inv_ordered_indices = np.argsort(self._ordered_indices)
self.explicit_permutations = explicit_permutations
self.explicit_optimized_permutations = explicit_optimized_permutations
self._safe_permutations = None
if self.explicit_optimized_permutations is not None:
self._permutations = self.explicit_optimized_permutations
elif self.explicit_permutations is not None:
self._permutations = self.explicit_permutations
self.multiplicity = multiplicity
self.other_plane_points = other_plane_points
self.minimum_number_of_points = minimum_number_of_points
self.maximum_number_of_points = len(self.plane_points)
self._ref_separation_perm = list(self.point_groups[0])
self._ref_separation_perm.extend(list(self.plane_points))
self._ref_separation_perm.extend(list(self.point_groups[1]))
self._argsorted_ref_separation_perm = list(
np.argsort(self._ref_separation_perm))
self.separation = (len(point_groups[0]), len(plane_points), len(point_groups[1]))
# @property
# def ordered_indices(self):
# """
# Ordered indices of the separation plane.
#
# Examples:
# For a separation plane of type 2|4|3, with plane_points indices [0, 3, 5, 8] and
# point_groups indices [1, 4] and [2, 7, 6], the list of ordered indices is :
# [0, 3, 5, 8, 1, 4, 2, 7, 6].
#
# Returns: list of ordered indices of this separation plane.
# """
# return self._ordered_indices
#
# @property
# def inv_ordered_indices(self):
# return self._inv_ordered_indices
@property
def permutations(self):
"""
Permutations used for this separation plane algorithm.
Returns: List of permutations to be performed.
"""
return self._permutations
@property
def ref_separation_perm(self):
"""
Ordered indices of the separation plane.
Examples:
For a separation plane of type 2|4|3, with plane_points indices [0, 3, 5, 8] and
point_groups indices [1, 4] and [2, 7, 6], the list of ordered indices is :
[0, 3, 5, 8, 1, 4, 2, 7, 6].
Returns: list of ordered indices of this separation plane.
"""
return self._ref_separation_perm
@property
def argsorted_ref_separation_perm(self):
"""
"Arg sorted" ordered indices of the separation plane.
This is used in the identification of the final permutation to be used.
Returns: list of the "arg sorted" ordered indices of the separation plane.
"""
return self._argsorted_ref_separation_perm
def safe_separation_permutations(self, ordered_plane=False,
ordered_point_groups=None,
add_opposite=False):
"""
Simple and safe permutations for this separation plane.
This is not meant to be used in production. Default configuration for ChemEnv does not use this method.
Args:
ordered_plane: Whether the order of the points in the plane can be used to reduce the
number of permutations.
ordered_point_groups: Whether the order of the points in each point group can be used to reduce the
number of permutations.
add_opposite: Whether to add the permutations from the second group before the first group as well.
Returns: List of safe permutations.
"""
s0 = list(range(len(self.point_groups[0])))
plane = list(range(len(self.point_groups[0]),
len(self.point_groups[0]) + len(self.plane_points)))
s1 = list(range(len(self.point_groups[0]) + len(self.plane_points),
len(self.point_groups[0]) + len(self.plane_points) + len(
self.point_groups[1])))
ordered_point_groups = [False,
False] if ordered_point_groups is None else ordered_point_groups
def rotate(s, n):
return s[-n:] + s[:-n]
if ordered_plane and self.ordered_plane:
plane_perms = [rotate(plane, ii) for ii in range(len(plane))]
inv_plane = plane[::-1]
plane_perms.extend(
[rotate(inv_plane, ii) for ii in range(len(inv_plane))])
else:
plane_perms = list(itertools.permutations(plane))
if ordered_point_groups[0] and self.ordered_point_groups[0]:
s0_perms = [rotate(s0, ii) for ii in range(len(s0))]
inv_s0 = s0[::-1]
s0_perms.extend([rotate(inv_s0, ii) for ii in range(len(inv_s0))])
else:
s0_perms = list(itertools.permutations(s0))
if ordered_point_groups[1] and self.ordered_point_groups[1]:
s1_perms = [rotate(s1, ii) for ii in range(len(s1))]
inv_s1 = s1[::-1]
s1_perms.extend([rotate(inv_s1, ii) for ii in range(len(inv_s1))])
else:
s1_perms = list(itertools.permutations(s1))
if self._safe_permutations is None:
self._safe_permutations = []
for perm_side1 in s0_perms:
for perm_sep_plane in plane_perms:
for perm_side2 in s1_perms:
perm = list(perm_side1)
perm.extend(list(perm_sep_plane))
perm.extend(list(perm_side2))
self._safe_permutations.append(perm)
if add_opposite:
perm = list(perm_side2)
perm.extend(list(perm_sep_plane))
perm.extend(list(perm_side1))
self._safe_permutations.append(perm)
return self._safe_permutations
@property
def as_dict(self):
"""
Return the JSON serializable dict representation of this SeparationPlane algorithm.
Returns: a JSON serializable dict representation of this SeparationPlane algorithm.
"""
return {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"plane_points": self.plane_points,
"mirror_plane": self.mirror_plane,
"ordered_plane": self.ordered_plane,
"point_groups": self.point_groups,
"ordered_point_groups": self.ordered_point_groups,
"explicit_permutations": [eperm.tolist() for eperm in self.explicit_permutations]
if self.explicit_permutations is not None else None,
"explicit_optimized_permutations": [eoperm.tolist()
for eoperm in self.explicit_optimized_permutations]
if self.explicit_optimized_permutations is not None else None,
"multiplicity": self.multiplicity,
"other_plane_points": self.other_plane_points,
"minimum_number_of_points": self.minimum_number_of_points}
@classmethod
def from_dict(cls, dd):
"""
Reconstructs the SeparationPlane algorithm from its JSON serializable dict representation.
Args:
dd: a JSON serializable dict representation of an SeparationPlane algorithm.
Returns: a SeparationPlane algorithm.
"""
eop = [np.array(eoperm) for eoperm in dd[
'explicit_optimized_permutations']] if ('explicit_optimized_permutations' in dd and
dd['explicit_optimized_permutations'] is not None) else None
return cls(plane_points=dd['plane_points'],
mirror_plane=dd['mirror_plane'],
ordered_plane=dd['ordered_plane'],
point_groups=dd['point_groups'],
ordered_point_groups=dd['ordered_point_groups'],
explicit_permutations=[np.array(eperm) for eperm in dd['explicit_permutations']],
explicit_optimized_permutations=eop,
multiplicity=dd[
'multiplicity'] if 'multiplicity' in dd else None,
other_plane_points=dd[
'other_plane_points'] if 'other_plane_points' in dd else None,
minimum_number_of_points=dd['minimum_number_of_points'])
def __str__(self):
out = 'Separation plane algorithm with the following reference separation :\n'
out += '[{}] | [{}] | [{}]'.format(
'-'.join(str(pp) for pp in [self.point_groups[0]]),
'-'.join(str(pp) for pp in [self.plane_points]),
'-'.join(str(pp) for pp in [self.point_groups[1]]),
)
return out
class CoordinationGeometry:
"""
Class used to store the ideal representation of a chemical environment or "coordination geometry".
"""
# Default value of continuous symmetry measure beyond which no further
# search is performed for the separation plane algorithms
CSM_SKIP_SEPARATION_PLANE_ALGO = 10.0
class NeighborsSetsHints:
"""
Class used to describe neighbors sets hints.
This allows to possibly get a lower coordination from a capped-like model polyhedron.
"""
ALLOWED_HINTS_TYPES = ['single_cap', 'double_cap', 'triple_cap']
def __init__(self, hints_type, options):
"""
Constructor for this NeighborsSetsHints.
Args:
hints_type: type of hint (single, double or triple cap)
options: options for the "hinting", e.g. the maximum csm value beyond which no additional
neighbors set could be found from a "cap hint".
"""
if hints_type not in self.ALLOWED_HINTS_TYPES:
raise ValueError('Type "{}" for NeighborsSetsHints is not allowed'.format(type))
self.hints_type = hints_type
self.options = options
def hints(self, hints_info):
"""
Return hints for an additional neighbors set, i.e. the voronoi indices that constitute this new
neighbors set.
Args:
hints_info: Info needed to build new "hinted" neighbors set.
Returns: Voronoi indices of the new "hinted" neighbors set.
"""
if hints_info['csm'] > self.options['csm_max']:
return []
return object.__getattribute__(self, '{}_hints'.format(self.hints_type))(hints_info)
def single_cap_hints(self, hints_info):
"""
Return hints for an additional neighbors set, i.e. the voronoi indices that constitute this new
neighbors set, in case of a "Single cap" hint.
Args:
hints_info: Info needed to build new "hinted" neighbors set.
Returns: Voronoi indices of the new "hinted" neighbors set.
"""
cap_index_perfect = self.options['cap_index']
nb_set = hints_info['nb_set']
permutation = hints_info['permutation']
nb_set_voronoi_indices_perfect_aligned = nb_set.get_neighb_voronoi_indices(permutation=permutation)
cap_voronoi_index = nb_set_voronoi_indices_perfect_aligned[cap_index_perfect]
new_site_voronoi_indices = list(nb_set.site_voronoi_indices)
new_site_voronoi_indices.remove(cap_voronoi_index)
return [new_site_voronoi_indices]
def double_cap_hints(self, hints_info):
"""
Return hints for an additional neighbors set, i.e. the voronoi indices that constitute this new
neighbors set, in case of a "Double cap" hint.
Args:
hints_info: Info needed to build new "hinted" neighbors set.
Returns: Voronoi indices of the new "hinted" neighbors set.
"""
first_cap_index_perfect = self.options['first_cap_index']
second_cap_index_perfect = self.options['second_cap_index']
nb_set = hints_info['nb_set']
permutation = hints_info['permutation']
nb_set_voronoi_indices_perfect_aligned = nb_set.get_neighb_voronoi_indices(permutation=permutation)
first_cap_voronoi_index = nb_set_voronoi_indices_perfect_aligned[first_cap_index_perfect]
second_cap_voronoi_index = nb_set_voronoi_indices_perfect_aligned[second_cap_index_perfect]
new_site_voronoi_indices1 = list(nb_set.site_voronoi_indices)
new_site_voronoi_indices2 = list(nb_set.site_voronoi_indices)
new_site_voronoi_indices3 = list(nb_set.site_voronoi_indices)
new_site_voronoi_indices1.remove(first_cap_voronoi_index)
new_site_voronoi_indices2.remove(second_cap_voronoi_index)
new_site_voronoi_indices3.remove(first_cap_voronoi_index)
new_site_voronoi_indices3.remove(second_cap_voronoi_index)
return [new_site_voronoi_indices1, new_site_voronoi_indices2, new_site_voronoi_indices3]
def triple_cap_hints(self, hints_info):
"""
Return hints for an additional neighbors set, i.e. the voronoi indices that constitute this new
neighbors set, in case of a "Triple cap" hint.
Args:
hints_info: Info needed to build new "hinted" neighbors set.
Returns: Voronoi indices of the new "hinted" neighbors set.
"""
first_cap_index_perfect = self.options['first_cap_index']
second_cap_index_perfect = self.options['second_cap_index']
third_cap_index_perfect = self.options['third_cap_index']
nb_set = hints_info['nb_set']
permutation = hints_info['permutation']
nb_set_voronoi_indices_perfect_aligned = nb_set.get_neighb_voronoi_indices(permutation=permutation)
first_cap_voronoi_index = nb_set_voronoi_indices_perfect_aligned[first_cap_index_perfect]
second_cap_voronoi_index = nb_set_voronoi_indices_perfect_aligned[second_cap_index_perfect]
third_cap_voronoi_index = nb_set_voronoi_indices_perfect_aligned[third_cap_index_perfect]
new_site_voronoi_indices1 = list(nb_set.site_voronoi_indices)
new_site_voronoi_indices2 = list(nb_set.site_voronoi_indices)
new_site_voronoi_indices3 = list(nb_set.site_voronoi_indices)
new_site_voronoi_indices4 = list(nb_set.site_voronoi_indices)
new_site_voronoi_indices5 = list(nb_set.site_voronoi_indices)
new_site_voronoi_indices6 = list(nb_set.site_voronoi_indices)
new_site_voronoi_indices7 = list(nb_set.site_voronoi_indices)
new_site_voronoi_indices1.remove(first_cap_voronoi_index)
new_site_voronoi_indices2.remove(second_cap_voronoi_index)
new_site_voronoi_indices3.remove(third_cap_voronoi_index)
new_site_voronoi_indices4.remove(second_cap_voronoi_index)
new_site_voronoi_indices4.remove(third_cap_voronoi_index)
new_site_voronoi_indices5.remove(first_cap_voronoi_index)
new_site_voronoi_indices5.remove(third_cap_voronoi_index)
new_site_voronoi_indices6.remove(first_cap_voronoi_index)
new_site_voronoi_indices6.remove(second_cap_voronoi_index)
new_site_voronoi_indices7.remove(first_cap_voronoi_index)
new_site_voronoi_indices7.remove(second_cap_voronoi_index)
new_site_voronoi_indices7.remove(third_cap_voronoi_index)
return [new_site_voronoi_indices1, new_site_voronoi_indices2, new_site_voronoi_indices3,
new_site_voronoi_indices4, new_site_voronoi_indices5, new_site_voronoi_indices6,
new_site_voronoi_indices7]
def as_dict(self):
"""
A JSON serializable dict representation of this NeighborsSetsHints
"""
return {'hints_type': self.hints_type,
'options': self.options}
@classmethod
def from_dict(cls, dd):
"""
Reconstructs the NeighborsSetsHints from its JSON serializable dict representation.
Args:
dd: a JSON serializable dict representation of a NeighborsSetsHints.
Returns: a NeighborsSetsHints.
"""
return cls(hints_type=dd['hints_type'],
options=dd['options'])
def __init__(self, mp_symbol, name, alternative_names=None,
IUPAC_symbol=None, IUCr_symbol=None, coordination=None,
central_site=np.zeros(3), points=None, solid_angles=None,
permutations_safe_override=False,
deactivate=False, faces=None,
edges=None,
algorithms=None,
equivalent_indices=None,
neighbors_sets_hints=None):
"""
Initializes one "coordination geometry" according to [Pure Appl. Chem., Vol. 79, No. 10, pp. 1779--1799, 2007]
and [Acta Cryst. A, Vol. 46, No. 1, pp. 1--11, 1990].
Args:
mp_symbol: Symbol used internally for the coordination geometry.
name: Name of the coordination geometry.
alternative_names: Alternative names for this coordination geometry.
IUPAC_symbol: The IUPAC symbol of this coordination geometry.
IUCr_symbol: The IUCr symbol of this coordination geometry.
coordination: The coordination number of this coordination geometry (number of neighboring atoms).
central_site: The coordinates of the central site of this coordination geometry.
points: The list of the coordinates of all the points of this coordination geometry.
solid_angles: The list of solid angles for each neighbor in this coordination geometry.
permutations_safe_override: Computes all the permutations if set to True (overrides the plane separation
algorithms or any other algorithm, for testing purposes)
deactivate: Whether to deactivate this coordination geometry
faces: List of the faces with their vertices given in a clockwise or anticlockwise order, for drawing
purposes.
edges: List of edges, for drawing purposes.
algorithms: Algorithms used to identify this coordination geometry.
equivalent_indices: The equivalent sets of indices in this coordination geometry (can be used to skip
equivalent permutations that have already been performed).
neighbors_sets_hints: Neighors sets hints for this coordination geometry.
"""
self._mp_symbol = mp_symbol
self.name = name
self.alternative_names = alternative_names if alternative_names is not None else []
self.IUPACsymbol = IUPAC_symbol
self.IUCrsymbol = IUCr_symbol
self.coordination = coordination
self.central_site = np.array(central_site)
self.points = points
self._solid_angles = solid_angles
self.permutations_safe_override = permutations_safe_override
# self.plane_safe_permutations = plane_safe_permutations
# self.setup_permutations(permutations)
self.deactivate = deactivate
self._faces = faces
self._edges = edges
self._algorithms = algorithms
if points is not None:
self.centroid = np.mean(np.array(points), axis=0)
else:
self.centroid = None
self.equivalent_indices = equivalent_indices
self.neighbors_sets_hints = neighbors_sets_hints
self._pauling_stability_ratio = None
def as_dict(self):
"""
A JSON serializable dict representation of this CoordinationGeometry.
"""
return {'mp_symbol': self._mp_symbol,
'name': self.name,
'alternative_names': self.alternative_names,
'IUPAC_symbol': self.IUPACsymbol,
'IUCr_symbol': self.IUCrsymbol,
'coordination': self.coordination,
'central_site': [float(xx) for xx in self.central_site],
'points': [[float(xx) for xx in pp] for pp in
self.points] if self.points is not None else None,
'solid_angles': [float(ang) for ang in
self._solid_angles] if self._solid_angles is not None else None,
'deactivate': self.deactivate,
'_faces': self._faces,
'_edges': self._edges,
'_algorithms': [algo.as_dict for algo in
self._algorithms] if self._algorithms is not None else None,
'equivalent_indices': self.equivalent_indices,
'neighbors_sets_hints': [nbsh.as_dict() for nbsh in self.neighbors_sets_hints]
if self.neighbors_sets_hints is not None else None}
@classmethod
def from_dict(cls, dd):
"""
Reconstructs the CoordinationGeometry from its JSON serializable dict representation.
Args:
dd: a JSON serializable dict representation of a CoordinationGeometry.
Returns: a CoordinationGeometry.
"""
dec = MontyDecoder()
return cls(mp_symbol=dd['mp_symbol'],
name=dd['name'],
alternative_names=dd['alternative_names'],
IUPAC_symbol=dd['IUPAC_symbol'],
IUCr_symbol=dd['IUCr_symbol'],
coordination=dd['coordination'],
central_site=dd['central_site'],
points=dd['points'],
solid_angles=(dd['solid_angles'] if 'solid_angles' in dd
else [4.0 * np.pi / dd['coordination']] * dd[
'coordination']),
deactivate=dd['deactivate'],
faces=dd['_faces'],
edges=dd['_edges'],
algorithms=[dec.process_decoded(algo_d)
for algo_d in dd['_algorithms']] if dd['_algorithms'] is not None else None,
equivalent_indices=dd[
'equivalent_indices'] if 'equivalent_indices' in dd else None,
neighbors_sets_hints=[cls.NeighborsSetsHints.from_dict(nbshd)
for nbshd in dd['neighbors_sets_hints']]
if ('neighbors_sets_hints' in dd and dd['neighbors_sets_hints'] is not None) else None)
def __str__(self):
symbol = ''
if self.IUPAC_symbol is not None:
symbol += ' (IUPAC: {s}'.format(s=self.IUPAC_symbol)
if self.IUCr_symbol is not None:
symbol += ' || IUCr: {s})'.format(s=self.IUCr_symbol)
else:
symbol += ')'
elif self.IUCr_symbol is not None:
symbol += ' (IUCr: {s})'.format(s=self.IUCr_symbol)
outs = ['Coordination geometry type : {n}{s}\n'.format(n=self.name,
s=symbol),
' - coordination number : {c}'.format(c=self.coordination)]
if self.points is None:
outs.append('... not yet implemented')
else:
outs.append(' - list of points :')
for pp in self.points:
outs.append(' - {p}'.format(p=pp))
outs.append(
'------------------------------------------------------------')
outs.append('')
return '\n'.join(outs)
def __repr__(self):
symbol = ''
if self.IUPAC_symbol is not None:
symbol += ' (IUPAC: {s}'.format(s=self.IUPAC_symbol)
if self.IUCr_symbol is not None:
symbol += ' || IUCr: {s})'.format(s=self.IUCr_symbol)
else:
symbol += ')'
elif self.IUCr_symbol is not None:
symbol += ' (IUCr: {s})'.format(s=self.IUCr_symbol)
outs = ['Coordination geometry type : {n}{s}\n'.format(n=self.name,
s=symbol),
' - coordination number : {c}'.format(c=self.coordination)]
outs.append(
'------------------------------------------------------------')
outs.append('')
return '\n'.join(outs)
def __len__(self):
return self.coordination
def set_permutations_safe_override(self, permutations_safe_override):
"""
Setup ChemEnv so that a safe set of permutations are used.
Args:
permutations_safe_override: Whether to use safe permutations.
"""
self.permutations_safe_override = permutations_safe_override
# self.setup_permutations()
# @property
# def csm_skip_algo(self):
# return self.CSM_SKIP_SEPARATION_PLANE_ALGO
@property
def distfactor_max(self):
"""
The maximum distfactor for the perfect CoordinationGeometry.
Returns: Maximum distfactor for the perfect CoordinationGeometry (usually 1.0 for symmetric polyhedrons).
"""
dists = [np.linalg.norm(pp - self.central_site) for pp in self.points]
return np.max(dists) / np.min(dists)
@property
def coordination_number(self):
"""
Returns the coordination number of this coordination geometry.
"""
return self.coordination
@property
def pauling_stability_ratio(self):
"""
Returns the theoretical Pauling stability ratio (rC/rA) for this environment.
"""
if self._pauling_stability_ratio is None:
if self.ce_symbol in ['S:1', 'L:2']:
self._pauling_stability_ratio = 0.0
else:
mindist_anions = 1000000.0
mindist_cation_anion = 1000000.0
for ipt1 in range(len(self.points)):
pt1 = np.array(self.points[ipt1])
mindist_cation_anion = min(mindist_cation_anion,
np.linalg.norm(pt1 - self.central_site))
for ipt2 in range(ipt1 + 1, len(self.points)):
pt2 = np.array(self.points[ipt2])
mindist_anions = min(mindist_anions,
np.linalg.norm(pt1 - pt2))
anion_radius = mindist_anions / 2.0
cation_radius = mindist_cation_anion - anion_radius
self._pauling_stability_ratio = cation_radius / anion_radius
return self._pauling_stability_ratio
@property
def mp_symbol(self):
"""
Returns the MP symbol of this coordination geometry.
"""
return self._mp_symbol
@property
def ce_symbol(self):
"""
Returns the symbol of this coordination geometry.
"""
return self._mp_symbol
def get_coordination_number(self):
"""
Returns the coordination number of this coordination geometry.
"""
return self.coordination
def is_implemented(self):
"""
Returns True if this coordination geometry is implemented.
"""
return bool(self.points)
def get_name(self):
"""
Returns the name of this coordination geometry.
"""
return self.name
@property
def IUPAC_symbol(self):
"""
Returns the IUPAC symbol of this coordination geometry.
"""
return self.IUPACsymbol
@property
def IUPAC_symbol_str(self):
"""
Returns a string representation of the IUPAC symbol of this coordination geometry.
"""
return str(self.IUPACsymbol)
@property
def IUCr_symbol(self):
"""
Returns the IUCr symbol of this coordination geometry.
"""
return self.IUCrsymbol
@property
def IUCr_symbol_str(self):
"""
Returns a string representation of the IUCr symbol of this coordination geometry.
"""
return str(self.IUCrsymbol)
@property
def number_of_permutations(self):
"""
Returns the number of permutations of this coordination geometry.
"""
if self.permutations_safe_override:
return factorial(self.coordination)
elif self.permutations is None:
return factorial(self.coordination)
return len(self.permutations)
def ref_permutation(self, permutation):
"""
Returns the reference permutation for a set of equivalent permutations.
Can be useful to skip permutations that have already been performed.
Args:
permutation: Current permutation
Returns: Reference permutation of the perfect CoordinationGeometry.
"""
perms = []
for eqv_indices in self.equivalent_indices:
perms.append(tuple([permutation[ii] for ii in eqv_indices]))
perms.sort()
return perms[0]
@property
def algorithms(self):
"""
Returns the list of algorithms that are used to identify this coordination geometry.
"""
return self._algorithms
def get_central_site(self):
"""
Returns the central site of this coordination geometry.
"""
return self.central_site
def faces(self, sites, permutation=None):
"""
Returns the list of faces of this coordination geometry. Each face is given as a
list of its vertices coordinates.
"""
if permutation is None:
coords = [site.coords for site in sites]
else:
coords = [sites[ii].coords for ii in permutation]
return [[coords[ii] for ii in f] for f in self._faces]
def edges(self, sites, permutation=None, input='sites'):
"""
Returns the list of edges of this coordination geometry. Each edge is given as a
list of its end vertices coordinates.
"""
if input == 'sites':
coords = [site.coords for site in sites]
elif input == 'coords':
coords = sites
# if permutation is None:
# coords = [site.coords for site in sites]
# else:
# coords = [sites[ii].coords for ii in permutation]
if permutation is not None:
coords = [coords[ii] for ii in permutation]
return [[coords[ii] for ii in e] for e in self._edges]
def solid_angles(self, permutation=None):
"""
Returns the list of "perfect" solid angles Each edge is given as a
list of its end vertices coordinates.
"""
if permutation is None:
return self._solid_angles
else:
return [self._solid_angles[ii] for ii in permutation]
def get_pmeshes(self, sites, permutation=None):
"""
Returns the pmesh strings used for jmol to show this geometry.
"""
pmeshes = []
# _vertices = [site.coords for site in sites]
if permutation is None:
_vertices = [site.coords for site in sites]
else:
_vertices = [sites[ii].coords for ii in permutation]
_face_centers = []
number_of_faces = 0
for face in self._faces:
if len(face) in [3, 4]:
number_of_faces += 1
else:
number_of_faces += len(face)
_face_centers.append(np.array([np.mean([_vertices[face_vertex][ii]
for face_vertex in face])
for ii in range(3)]))
out = '{}\n'.format(len(_vertices) + len(_face_centers))
for vv in _vertices:
out += '{:15.8f} {:15.8f} {:15.8f}\n'.format(vv[0], vv[1], vv[2])
for fc in _face_centers:
out += '{:15.8f} {:15.8f} {:15.8f}\n'.format(fc[0], fc[1], fc[2])
out += '{:d}\n'.format(number_of_faces)
for iface, face in enumerate(self._faces):
if len(face) == 3:
out += '4\n'
elif len(face) == 4:
out += '5\n'
else:
for ii in range(len(face)):
out += '4\n'
out += '{:d}\n'.format(len(_vertices) + iface)
out += '{:d}\n'.format(face[ii])
out += '{:d}\n'.format(face[np.mod(ii + 1, len(face))])
out += '{:d}\n'.format(len(_vertices) + iface)
if len(face) in [3, 4]:
for face_vertex in face:
out += '{:d}\n'.format(face_vertex)
out += '{:d}\n'.format(face[0])
pmeshes.append({"pmesh_string": out})
return pmeshes
class AllCoordinationGeometries(dict):
"""
Class used to store all the reference "coordination geometries" (list with instances of the CoordinationGeometry
classes)
"""
def __init__(self, permutations_safe_override=False, only_symbols=None):
"""
Initializes the list of Coordination Geometries.
Args:
permutations_safe_override: Whether to use safe permutations.
only_symbols: Whether to restrict the list of environments to be identified.
"""
dict.__init__(self)
self.cg_list = list()
if only_symbols is None:
f = open(
'{}/coordination_geometries_files/allcg.txt'.format(module_dir),
'r')
data = f.readlines()
f.close()
for line in data:
cg_file = '{}/{}'.format(module_dir, line.strip())
f = open(cg_file, 'r')
dd = json.load(f)
f.close()
self.cg_list.append(CoordinationGeometry.from_dict(dd))
else:
for symbol in only_symbols:
fsymbol = symbol.replace(':', '#')
cg_file = '{}/coordination_geometries_files/{}.json'.format(
module_dir, fsymbol)
f = open(cg_file, 'r')
dd = json.load(f)
f.close()
self.cg_list.append(CoordinationGeometry.from_dict(dd))
self.cg_list.append(CoordinationGeometry(UNKNOWN_ENVIRONMENT_SYMBOL,
"Unknown environment",
deactivate=True))
self.cg_list.append(CoordinationGeometry(UNCLEAR_ENVIRONMENT_SYMBOL,
"Unclear environment",
deactivate=True))
if permutations_safe_override:
for cg in self.cg_list:
cg.set_permutations_safe_override(True)
self.minpoints = {}
self.maxpoints = {}
self.separations_cg = {}
for cn in range(6, 21):
for cg in self.get_implemented_geometries(coordination=cn):
if only_symbols is not None and cg.ce_symbol not in only_symbols:
continue
if cn not in self.separations_cg:
self.minpoints[cn] = 1000
self.maxpoints[cn] = 0
self.separations_cg[cn] = {}
for algo in cg.algorithms:
sep = (len(algo.point_groups[0]),
len(algo.plane_points),
len(algo.point_groups[1]))
if sep not in self.separations_cg[cn]:
self.separations_cg[cn][sep] = []
self.separations_cg[cn][sep].append(cg.mp_symbol)
self.minpoints[cn] = min(self.minpoints[cn], algo.minimum_number_of_points)
self.maxpoints[cn] = max(self.maxpoints[cn], algo.maximum_number_of_points)
self.maxpoints_inplane = {cn: max([sep[1] for sep in seps.keys()]) for cn, seps in self.separations_cg.items()}
def __getitem__(self, key):
return self.get_geometry_from_mp_symbol(key)
def __contains__(self, item):
try:
self[item]
return True
except LookupError:
return False
def __repr__(self):
"""
Returns a string with the list of coordination geometries.
"""
outs = ['', '#=================================#',
'# List of coordination geometries #',
'#=================================#', '']
for cg in self.cg_list:
outs.append(repr(cg))
return '\n'.join(outs)
def __str__(self):
"""
Returns a string with the list of coordination geometries that are implemented.
"""
outs = ['', '#=======================================================#',
'# List of coordination geometries currently implemented #',
'#=======================================================#', '']
for cg in self.cg_list:
if cg.is_implemented():
outs.append(str(cg))
return '\n'.join(outs)
def get_geometries(self, coordination=None, returned='cg'):
"""
Returns a list of coordination geometries with the given coordination number.
Args:
coordination: The coordination number of which the list of coordination geometries are returned.
returned: Type of objects in the list.
"""
geom = list()
if coordination is None:
for gg in self.cg_list:
if returned == 'cg':
geom.append(gg)
elif returned == 'mp_symbol':
geom.append(gg.mp_symbol)
else:
for gg in self.cg_list:
if gg.get_coordination_number() == coordination:
if returned == 'cg':
geom.append(gg)
elif returned == 'mp_symbol':
geom.append(gg.mp_symbol)
return geom
def get_symbol_name_mapping(self, coordination=None):
"""
Return a dictionary mapping the symbol of a CoordinationGeometry to its name.
Args:
coordination: Whether to restrict the dictionary to a given coordination.
Returns: Dictionary mapping the symbol of a CoordinationGeometry to its name.
"""
geom = {}
if coordination is None:
for gg in self.cg_list:
geom[gg.mp_symbol] = gg.name
else:
for gg in self.cg_list:
if gg.get_coordination_number() == coordination:
geom[gg.mp_symbol] = gg.name
return geom
def get_symbol_cn_mapping(self, coordination=None):
"""
Return a dictionary mapping the symbol of a CoordinationGeometry to its coordination.
Args:
coordination: Whether to restrict the dictionary to a given coordination.
Returns: Dictionary mapping the symbol of a CoordinationGeometry to its coordination.
"""
geom = {}
if coordination is None:
for gg in self.cg_list:
geom[gg.mp_symbol] = gg.coordination_number
else:
for gg in self.cg_list:
if gg.get_coordination_number() == coordination:
geom[gg.mp_symbol] = gg.coordination_number
return geom
def get_implemented_geometries(self, coordination=None, returned='cg',
include_deactivated=False):
"""
Returns a list of the implemented coordination geometries with the given coordination number.
Args:
coordination: The coordination number of which the list of implemented coordination geometries
are returned.
returned: Type of objects in the list.
include_deactivated: Whether to include CoordinationGeometry that are deactivated.
"""
geom = list()
if coordination is None:
for gg in self.cg_list:
if gg.points is not None and (
(not gg.deactivate) or include_deactivated):
if returned == 'cg':
geom.append(gg)
elif returned == 'mp_symbol':
geom.append(gg.mp_symbol)
else:
for gg in self.cg_list:
if gg.get_coordination_number() == coordination and gg.points is not None and \
((not gg.deactivate) or include_deactivated):
if returned == 'cg':
geom.append(gg)
elif returned == 'mp_symbol':
geom.append(gg.mp_symbol)
return geom
def get_not_implemented_geometries(self, coordination=None,
returned='mp_symbol'):
"""
Returns a list of the implemented coordination geometries with the given coordination number.
Args:
coordination: The coordination number of which the list of implemented coordination geometries
are returned.
returned: Type of objects in the list.
"""
geom = list()
if coordination is None:
for gg in self.cg_list:
if gg.points is None:
if returned == 'cg':
geom.append(gg)
elif returned == 'mp_symbol':
geom.append(gg.mp_symbol)
else:
for gg in self.cg_list:
if gg.get_coordination_number() == coordination and gg.points is None:
if returned == 'cg':
geom.append(gg)
elif returned == 'mp_symbol':
geom.append(gg.mp_symbol)
return geom
def get_geometry_from_name(self, name):
"""
Returns the coordination geometry of the given name.
Args:
name: The name of the coordination geometry.
"""
for gg in self.cg_list:
if gg.name == name or name in gg.alternative_names:
return gg
raise LookupError(
'No coordination geometry found with name "{name}"'.format(
name=name))
def get_geometry_from_IUPAC_symbol(self, IUPAC_symbol):
"""
Returns the coordination geometry of the given IUPAC symbol.
Args:
IUPAC_symbol: The IUPAC symbol of the coordination geometry.
"""
for gg in self.cg_list:
if gg.IUPAC_symbol == IUPAC_symbol:
return gg
raise LookupError(
'No coordination geometry found with IUPAC symbol "{symbol}"'.format(
symbol=IUPAC_symbol))
def get_geometry_from_IUCr_symbol(self, IUCr_symbol):
"""
Returns the coordination geometry of the given IUCr symbol.
Args:
IUCr_symbol: The IUCr symbol of the coordination geometry.
"""
for gg in self.cg_list:
if gg.IUCr_symbol == IUCr_symbol:
return gg
raise LookupError(
'No coordination geometry found with IUCr symbol "{symbol}"'.format(
symbol=IUCr_symbol))
def get_geometry_from_mp_symbol(self, mp_symbol):
"""
Returns the coordination geometry of the given mp_symbol.
Args:
mp_symbol: The mp_symbol of the coordination geometry.
"""
for gg in self.cg_list:
if gg.mp_symbol == mp_symbol:
return gg
raise LookupError(
'No coordination geometry found with mp_symbol "{symbol}"'.format(
symbol=mp_symbol))
def is_a_valid_coordination_geometry(self, mp_symbol=None,
IUPAC_symbol=None, IUCr_symbol=None,
name=None, cn=None):
"""
Checks whether a given coordination geometry is valid (exists) and whether the parameters are coherent with
each other.
Args:
mp_symbol: The mp_symbol of the coordination geometry.
IUPAC_symbol: The IUPAC_symbol of the coordination geometry.
IUCr_symbol: The IUCr_symbol of the coordination geometry.
name: The name of the coordination geometry.
cn: The coordination of the coordination geometry.
"""
if name is not None:
raise NotImplementedError(
'is_a_valid_coordination_geometry not implemented for the name')
if mp_symbol is None and IUPAC_symbol is None and IUCr_symbol is None:
raise SyntaxError(
'missing argument for is_a_valid_coordination_geometry : at least one of mp_symbol, '
'IUPAC_symbol and IUCr_symbol must be passed to the function')
if mp_symbol is not None:
try:
cg = self.get_geometry_from_mp_symbol(mp_symbol)
if IUPAC_symbol is not None:
if IUPAC_symbol != cg.IUPAC_symbol:
return False
if IUCr_symbol is not None:
if IUCr_symbol != cg.IUCr_symbol:
return False
if cn is not None:
if int(cn) != int(cg.coordination_number):
return False
return True
except LookupError:
return False
elif IUPAC_symbol is not None:
try:
cg = self.get_geometry_from_IUPAC_symbol(IUPAC_symbol)
if IUCr_symbol is not None:
if IUCr_symbol != cg.IUCr_symbol:
return False
if cn is not None:
if cn != cg.coordination_number:
return False
return True
except LookupError:
return False
elif IUCr_symbol is not None:
try:
cg = self.get_geometry_from_IUCr_symbol(IUCr_symbol)
if cn is not None:
if cn != cg.coordination_number:
return False
return True
except LookupError:
return True
raise Exception('Should not be here !')
def pretty_print(self, type='implemented_geometries', maxcn=8, additional_info=None):
"""
Return a string with a list of the Coordination Geometries.
Args:
type: Type of string to be returned (all_geometries, all_geometries_latex_images, all_geometries_latex,
implemented_geometries).
maxcn: Maximum coordination.
additional_info: Whether to add some additional info for each coordination geometry.
Returns: String describing the list of coordination geometries.
"""
if type == 'all_geometries_latex_images':
mystring = ''
for cn in range(1, maxcn + 1):
mystring += '\\section*{{Coordination {cn}}}\n\n'.format(cn=cn)
for cg in self.get_implemented_geometries(coordination=cn,
returned='cg'):
mystring += '\\subsubsection*{{{mp} : {name}}}\n\n'.format(
mp=cg.mp_symbol, name=cg.get_name())
mystring += 'IUPAC : {iupac}\n\nIUCr : {iucr}\n\n'.format(
iupac=cg.IUPAC_symbol, iucr=cg.IUCr_symbol)
mystring += '\\begin{center}\n'
mystring += '\\includegraphics[scale=0.15]{{images/{let}_{cif}.png}}\n'.format(
let=cg.mp_symbol.split(':')[0],
cif=cg.mp_symbol.split(':')[1])
mystring += '\\end{center}\n\n'
for cg in self.get_not_implemented_geometries(cn,
returned='cg'):
mystring += '\\subsubsection*{{{mp} : {name}}}\n\n'.format(
mp=cg.mp_symbol, name=cg.get_name())
mystring += 'IUPAC : {iupac}\n\nIUCr : {iucr}\n\n'.format(
iupac=cg.IUPAC_symbol, iucr=cg.IUCr_symbol)
elif type == 'all_geometries_latex':
mystring = ''
for cn in range(1, maxcn + 1):
mystring += '\\subsection*{{Coordination {cn}}}\n\n'.format(
cn=cn)
mystring += '\\begin{itemize}\n'
for cg in self.get_implemented_geometries(coordination=cn,
returned='cg'):
mystring += '\\item {mp} $\\rightarrow$ {name} '.format(
mp=cg.mp_symbol.replace('_',
'\\_'),
name=cg.get_name())
mystring += '(IUPAC : {iupac} - IUCr : {iucr})\n'.format(
iupac=cg.IUPAC_symbol_str,
iucr=cg.IUCr_symbol_str.replace('[', '$[$').replace(']',
'$]$'))
for cg in self.get_not_implemented_geometries(cn,
returned='cg'):
mystring += '\\item {mp} $\\rightarrow$ {name} '.format(
mp=cg.mp_symbol.replace('_',
'\\_'),
name=cg.get_name())
mystring += '(IUPAC : {iupac} - IUCr : {iucr})\n'.format(
iupac=cg.IUPAC_symbol_str,
iucr=cg.IUCr_symbol_str.replace('[', '$[$').replace(']',
'$]$'))
mystring += '\\end{itemize}\n\n'
else:
mystring = '+-------------------------+\n| Coordination geometries |\n+-------------------------+\n\n'
for cn in range(1, maxcn + 1):
mystring += '==>> CN = {cn} <<==\n'.format(cn=cn)
if type == 'implemented_geometries':
for cg in self.get_implemented_geometries(coordination=cn):
if additional_info is not None:
if 'nb_hints' in additional_info:
if cg.neighbors_sets_hints is not None:
addinfo = ' *'
else:
addinfo = ''
else:
addinfo = ''
else:
addinfo = ''
mystring += ' - {mp} : {name}{addinfo}\n'.format(mp=cg.mp_symbol,
name=cg.get_name(),
addinfo=addinfo)
elif type == 'all_geometries':
for cg in self.get_geometries(coordination=cn):
mystring += ' - {mp} : {name}\n'.format(mp=cg.mp_symbol,
name=cg.get_name())
mystring += '\n'
return mystring
| mbkumar/pymatgen | pymatgen/analysis/chemenv/coordination_environments/coordination_geometries.py | Python | mit | 58,667 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import json
import unittest
import mock
import webapp2
import webtest
from google.appengine.ext import ndb
from dashboard import graph_json
from dashboard import list_tests
from dashboard.common import testing_common
from dashboard.common import utils
from dashboard.models import anomaly
from dashboard.models import graph_data
class GraphJsonTest(testing_common.TestCase):
def setUp(self):
super(GraphJsonTest, self).setUp()
app = webapp2.WSGIApplication(
[('/graph_json', graph_json.GraphJsonHandler)])
self.testapp = webtest.TestApp(app)
self.PatchDatastoreHooksRequest()
def _AddTestColumns(self, start_rev=15000, end_rev=16500, step=3):
"""Adds a bunch of test data to the mock datastore.
In particular, add Rows with revisions in the given range (but skipping
some numbers, so the revisions are non-contiguous) under the dromaeo/dom
test for winXP, win7, mac.
Args:
start_rev: Starting revision number.
end_rev: Ending revision number.
step: Difference between adjacent revisions.
"""
master = graph_data.Master(id='ChromiumGPU')
master.put()
bots = []
rows = []
for name in ['winXP', 'win7', 'mac']:
bot = graph_data.Bot(id=name, parent=master.key)
bot.put()
bots.append(bot)
test = graph_data.TestMetadata(id='ChromiumGPU/%s/dromaeo' % name)
test.UpdateSheriff()
test.put()
for sub_name in ['dom', 'jslib']:
sub_test = graph_data.TestMetadata(
id='%s/%s' % (test.key.id(), sub_name),
improvement_direction=anomaly.UP,
has_rows=True)
sub_test.UpdateSheriff()
sub_test.put()
test_container_key = utils.GetTestContainerKey(sub_test)
for i in range(start_rev, end_rev, step):
# Add Rows for one bot with revision numbers that aren't lined up
# with the other bots.
rev = i + 1 if name == 'mac' else i
row = graph_data.Row(
parent=test_container_key, id=rev, value=float(i * 2),
r_webkit=int(i * 0.25), a_str='some_string',
buildnumber=i - start_rev,
a_tracing_uri='http://trace/%d' % i)
rows.append(row)
ndb.put_multi(rows)
def _AddLongTestColumns(self, start_rev=15000, end_rev=16500, step=3):
"""Adds test data with long nested sub test to the mock datastore.
Args:
start_rev: Starting revision number.
end_rev: Ending revision number.
step: Difference between adjacent revisions.
"""
master = graph_data.Master(id='master')
master.put()
bot = graph_data.Bot(id='bot', parent=master.key)
bot.put()
test = graph_data.TestMetadata(id='master/bot/suite')
test.UpdateSheriff()
test.put()
rows = []
path = 'master/bot/suite'
for sub_name in ['sub1', 'sub2', 'sub3', 'sub4', 'sub5']:
path = '%s/%s' % (path, sub_name)
test = graph_data.TestMetadata(id=path,
improvement_direction=anomaly.UP,
has_rows=True)
test.UpdateSheriff()
test.put()
test_container_key = utils.GetTestContainerKey(test.key)
for i in range(start_rev, end_rev, step):
row = graph_data.Row(
parent=test_container_key, id=i, value=float(i * 2),
r_webkit=int(i * 0.25), a_str='some_string',
buildnumber=i - start_rev,
a_tracing_uri='http://trace/%d' % i)
rows.append(row)
ndb.put_multi(rows)
def _GetSeriesIndex(self, flot, test_path):
series = flot['annotations']['series']
for index in series:
if series[index]['path'] == test_path:
return index
return None
def CheckFlotJson(
self, json_str, num_rows, num_cols, start_rev, end_rev, step=3):
"""Checks whether a JSON string output by GetGraphJson is correct.
It's assumed that data should match data that might be added by the
_AddTestColumns method above.
In general, the Flot JSON should at contain a dict with the key 'data'
which is mapped to a list of dicts (which represent data series), each of
which also has a key called 'data', which is mapped to a list of 2-element
lists (which represent points). For example:
{data: {data: [[1, 10], [2, 20]]}, {data: [[3, 30], [4, 40]]}}
Args:
json_str: The JSON string to check.
num_rows: The expected number of points in each trace.
num_cols: The expected number of trace lines.
start_rev: Starting revision number.
end_rev: End revision number.
step: Expected difference between adjacent revision numbers.
"""
try:
flot = json.loads(json_str)
except ValueError:
self.fail('GetGraphJson returned invalid JSON')
data = flot.get('data')
if not data:
self.fail('No flot data generated by GetGraphJson')
self.assertEqual(num_cols, len(data))
for key in data:
col = data[key]
if not col.get('data'):
self.fail('No flot columns generated by GetGraphJson')
self.assertEqual(num_rows, len(col['data']))
for index, rev in enumerate(range(start_rev, end_rev, step)):
self.assertEqual(rev, col['data'][index][0])
self.assertEqual(rev * 2, col['data'][index][1])
def testPost_ValidRequest(self):
self._AddTestColumns(start_rev=15700, end_rev=16000, step=1)
graphs = {
'test_path_dict': {
'ChromiumGPU/winXP/dromaeo/dom': [],
'ChromiumGPU/winXP/dromaeo/jslib': [],
}
}
# If the request is valid, a valid response will be returned.
response = self.testapp.post(
'/graph_json', {'graphs': json.dumps(graphs)})
flot_json_str = response.body
self.CheckFlotJson(flot_json_str, 150, 2, 15850, 16000, step=1)
self.assertEqual('*', response.headers.get('Access-Control-Allow-Origin'))
def testPost_NanFiltered(self):
self._AddTestColumns(start_rev=15700, end_rev=16000, step=1)
test_key = utils.OldStyleTestKey('ChromiumGPU/win7/dromaeo/jslib')
row_key = utils.GetRowKey(test_key, 15900)
row = row_key.get()
row.value = float('nan')
row.put()
graphs = {
'test_path_dict': {
'ChromiumGPU/win7/dromaeo/jslib': [],
}
}
# If the request is valid, a valid response will be returned.
response = self.testapp.post(
'/graph_json', {'graphs': json.dumps(graphs)})
flot_json_str = response.body
rows = json.loads(flot_json_str)['data']['0']['data']
self.assertEqual(149, len(rows))
def testPost_InvalidRequest_ReportsError(self):
self.testapp.post('/graph_json', {}, status=500)
self.testapp.post('/graph_json', {'graphs': ''}, status=500)
self.testapp.post('/graph_json', {'graphs': '{}'}, status=500)
def testPost_LongTestPathWithSelected(self):
self._AddLongTestColumns(start_rev=15700, end_rev=16000, step=1)
graphs = {
'test_path_dict': {
'master/bot/suite/sub1/sub2/sub3/sub4/sub5': ['sub5']
},
'is_selected': True
}
# If the request is valid, a valid response will be returned.
response = self.testapp.post(
'/graph_json', {'graphs': json.dumps(graphs)})
flot_json_str = response.body
self.CheckFlotJson(flot_json_str, 150, 1, 15850, 16000, step=1)
def testPost_LongTestPathWithUnSelected(self):
self._AddLongTestColumns(start_rev=15700, end_rev=16000, step=1)
graphs = {
'test_path_dict': {
'master/bot/suite/sub1/sub2/sub3/sub4': ['sub4']
}
}
# If the request is valid, a valid response will be returned.
response = self.testapp.post(
'/graph_json', {'graphs': json.dumps(graphs)})
flot_json_str = response.body
self.CheckFlotJson(flot_json_str, 150, 1, 15850, 16000, step=1)
def testPost_LongTestPathWithUnSelectedAndNoSubTest_NoGraphData(self):
self._AddLongTestColumns(start_rev=15700, end_rev=16000, step=1)
graphs = {
'test_path_dict': {
'master/bot/suite/sub1/sub2/sub3/sub4/sub5': ['sub5']
},
}
# If the request is valid, a valid response will be returned.
response = self.testapp.post(
'/graph_json', {'graphs': json.dumps(graphs)})
flot_json_str = response.body
flot = json.loads(flot_json_str)
self.assertEqual(0, len(flot['data']))
def testRequest_NoSubTest_ShowsSummaryTests(self):
"""Tests the post method of the request handler."""
self._AddTestColumns(start_rev=15700, end_rev=16000, step=1)
graphs = {
'test_path_dict': {
'ChromiumGPU/winXP/dromaeo': [],
}
}
# If the request is valid, a valid response will be returned.
response = self.testapp.post(
'/graph_json', {'graphs': json.dumps(graphs)})
flot_json_str = response.body
self.CheckFlotJson(flot_json_str, 150, 2, 15850, 16000, step=1)
def testGetGraphJsonNoArgs(self):
self._AddTestColumns(start_rev=16047)
flot_json_str = graph_json.GetGraphJson(
{'ChromiumGPU/win7/dromaeo/dom': []})
self.CheckFlotJson(flot_json_str, 150, 1, 16050, 16500)
def testGetGraphJsonRevisionStart(self):
self._AddTestColumns(end_rev=15500)
flot_json_str = graph_json.GetGraphJson(
{'ChromiumGPU/win7/dromaeo/dom': []}, rev=15000)
self.CheckFlotJson(flot_json_str, 76, 1, 15000, 15228)
def testGetGraphJsonRevisionMiddle(self):
self._AddTestColumns(end_rev=15600)
flot_json_str = graph_json.GetGraphJson(
{'ChromiumGPU/win7/dromaeo/dom': []}, rev=15300)
self.CheckFlotJson(flot_json_str, 151, 1, 15075, 15525)
def testGetGraphJsonNumPoints(self):
self._AddTestColumns(end_rev=15500)
flot_json_str = graph_json.GetGraphJson(
{'ChromiumGPU/win7/dromaeo/dom': []}, rev=15300, num_points=8)
self.CheckFlotJson(flot_json_str, 9, 1, 15288, 15315)
def testGetGraphJsonStartEndRev(self):
self._AddTestColumns(start_rev=15991)
flot_json_str = graph_json.GetGraphJson(
{'ChromiumGPU/win7/dromaeo/dom': []}, start_rev=16000,
end_rev=16030)
self.CheckFlotJson(flot_json_str, 11, 1, 16000, 16030)
def testGetGraphJsonMultipleBots(self):
self._AddTestColumns(start_rev=16047)
flot_json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
'ChromiumGPU/winXP/dromaeo/dom': [],
})
self.CheckFlotJson(flot_json_str, 150, 2, 16050, 16500)
def testGetGraphJsonMultipleTests(self):
self._AddTestColumns(start_rev=16047)
flot_json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
'ChromiumGPU/win7/dromaeo/jslib': [],
})
self.CheckFlotJson(flot_json_str, 150, 2, 16050, 16500)
def testGetGraphJsonError(self):
self._AddTestColumns(start_rev=15000, end_rev=15015)
rows = graph_data.Row.query(
graph_data.Row.parent_test == ndb.Key(
'TestMetadata', 'ChromiumGPU/win7/dromaeo/dom'))
for row in rows:
row.error = 1 + ((row.revision - 15000) * 0.25)
ndb.put_multi(rows)
flot_json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
})
flot = json.loads(flot_json_str)
self.assertEqual(1, len(list(flot['error_bars'].keys())))
rev = 0
for col_dom, col_top, col_bottom in zip(
flot['data']['0']['data'],
flot['error_bars']['0'][1]['data'],
flot['error_bars']['0'][0]['data']):
error = 1 + (rev * 0.25)
self.assertEqual(rev + 15000, col_top[0])
self.assertEqual(col_dom[1] + error, col_top[1])
self.assertEqual(rev + 15000, col_bottom[0])
self.assertEqual(col_dom[1] - error, col_bottom[1])
rev += 3
def testGetGraphJsonSkewedRevisions(self):
self._AddTestColumns(end_rev=15500)
json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
'ChromiumGPU/mac/dromaeo/dom': [],
},
rev=15000, num_points=8)
flot = json.loads(json_str)
data = flot.get('data')
win7_index = self._GetSeriesIndex(flot, 'ChromiumGPU/win7/dromaeo/dom')
mac_index = self._GetSeriesIndex(flot, 'ChromiumGPU/mac/dromaeo/dom')
if not data:
self.fail('No flot data generated by GetGraphJson')
self.assertEqual(2, len(data))
self.assertEqual(
[[15000, 30000.0], [15003, 30006.0], [15006, 30012.0]],
data[win7_index].get('data'))
self.assertEqual(
[[15001, 30000.0], [15004, 30006.0]],
data[mac_index].get('data'))
def testGetGraphJson_ClampsRevisions(self):
self._AddTestColumns(end_rev=15500)
# No revision specified, clamps to the last 9 rows.
json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
'ChromiumGPU/mac/dromaeo/dom': [],
},
num_points=8)
flot = json.loads(json_str)
data = flot.get('data')
win7_index = self._GetSeriesIndex(flot, 'ChromiumGPU/win7/dromaeo/dom')
mac_index = self._GetSeriesIndex(flot, 'ChromiumGPU/mac/dromaeo/dom')
# Two columns
self.assertEqual(2, len(data))
# Clamped from 15487-15499 by 3 steps. First row doesn't contain 15487.
self.assertEqual(
[
[15489, 30978.0],
[15492, 30984.0],
[15495, 30990.0],
[15498, 30996.0]
],
data[win7_index].get('data'))
self.assertEqual(
[
[15487, 30972.0],
[15490, 30978.0],
[15493, 30984.0],
[15496, 30990.0],
[15499, 30996.0]
],
data[mac_index].get('data'))
# Revision 100 (way before data starts) specified, clamp to the first 8 rows
json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
'ChromiumGPU/mac/dromaeo/dom': [],
},
rev=100, num_points=8)
flot = json.loads(json_str)
data = flot.get('data')
win7_index = self._GetSeriesIndex(flot, 'ChromiumGPU/win7/dromaeo/dom')
mac_index = self._GetSeriesIndex(flot, 'ChromiumGPU/mac/dromaeo/dom')
# Two columns
self.assertEqual(2, len(data))
# 15000-15012.
self.assertEqual(
[
[15000, 30000.0],
[15003, 30006.0],
[15006, 30012.0],
[15009, 30018.0]
],
data[win7_index].get('data'))
self.assertEqual(
[
[15001, 30000.0],
[15004, 30006.0],
[15007, 30012.0],
[15010, 30018.0]
],
data[mac_index].get('data'))
# Revision 15530 (valid) specified, clamp 4 rows before/after
json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
'ChromiumGPU/mac/dromaeo/dom': [],
},
rev=15030, num_points=8)
flot = json.loads(json_str)
data = flot.get('data')
win7_index = self._GetSeriesIndex(flot, 'ChromiumGPU/win7/dromaeo/dom')
mac_index = self._GetSeriesIndex(flot, 'ChromiumGPU/mac/dromaeo/dom')
# Two columns
self.assertEqual(2, len(data))
# 15524-15536.
self.assertEqual(
[
[15024, 30048.0],
[15027, 30054.0],
[15030, 30060.0],
[15033, 30066.0],
[15036, 30072.0]
],
data[win7_index].get('data'))
self.assertEqual(
[
[15025, 30048.0],
[15028, 30054.0],
[15031, 30060.0],
[15034, 30066.0]
],
data[mac_index].get('data'))
# Revision 15498 specified, clamp 4 rows before and after is cut off
json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
'ChromiumGPU/mac/dromaeo/dom': [],
},
rev=15498, num_points=7)
flot = json.loads(json_str)
data = flot.get('data')
win7_index = self._GetSeriesIndex(flot, 'ChromiumGPU/win7/dromaeo/dom')
mac_index = self._GetSeriesIndex(flot, 'ChromiumGPU/mac/dromaeo/dom')
# Two columns
self.assertEqual(2, len(data))
# 15493-15499.
self.assertEqual(
[[15495, 30990.0], [15498, 30996.0]],
data[win7_index].get('data'))
self.assertEqual(
[[15493, 30984.0], [15496, 30990.0], [15499, 30996.0]],
data[mac_index].get('data'))
# Revision 15001 specified, before is cut off and clamp 4 rows after
json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
'ChromiumGPU/mac/dromaeo/dom': [],
},
rev=15001, num_points=8)
flot = json.loads(json_str)
data = flot.get('data')
win7_index = self._GetSeriesIndex(flot, 'ChromiumGPU/win7/dromaeo/dom')
mac_index = self._GetSeriesIndex(flot, 'ChromiumGPU/mac/dromaeo/dom')
# Two columns
self.assertEqual(2, len(data))
# 15493-15499.
self.assertEqual(
[[15000, 30000.0], [15003, 30006.0], [15006, 30012.0]],
data[win7_index].get('data'))
self.assertEqual(
[[15001, 30000.0], [15004, 30006.0], [15007, 30012.0]],
data[mac_index].get('data'))
def testGetGraphJson_GraphJsonAnnotations(self):
self._AddTestColumns(end_rev=15500)
flot_json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
},
rev=15000, num_points=8)
flot = json.loads(flot_json_str)
annotations = flot['annotations']
self.assertEqual(5, len(flot['data']['0']['data']))
for i, _ in enumerate(flot['data']['0']['data']):
rev = flot['data']['0']['data'][i][0]
self.assertEqual(int(int(rev) * 0.25),
annotations['0'][str(i)]['r_webkit'])
def testGetGraphJson_WithAnomalies_ReturnsCorrectAnomalyAnnotations(self):
self._AddTestColumns()
anomaly1 = anomaly.Anomaly(
start_revision=14999, end_revision=15000,
test=utils.TestKey('ChromiumGPU/win7/dromaeo/dom'),
median_before_anomaly=100,
median_after_anomaly=200)
anomaly1.SetIsImprovement()
key1 = anomaly1.put()
anomaly2 = anomaly.Anomaly(
start_revision=15004, end_revision=15006,
test=utils.TestKey('ChromiumGPU/win7/dromaeo/dom'),
median_before_anomaly=200,
median_after_anomaly=100,
bug_id=12345)
anomaly2.SetIsImprovement()
key2 = anomaly2.put()
old_style_test_key = ndb.Key(
'Master', 'ChromiumGPU',
'Bot', 'win7',
'Test', 'dromaeo',
'Test', 'dom')
anomaly3 = anomaly.Anomaly(
start_revision=15008, end_revision=15009,
test=old_style_test_key,
median_before_anomaly=100,
median_after_anomaly=200
)
key3 = anomaly3.put()
test = utils.TestKey('ChromiumGPU/win7/dromaeo/dom').get()
test.description = 'About this test'
test.units = 'ms'
test.buildername = 'Windows 7 (1)'
test.UpdateSheriff()
test.put()
flot_json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
},
rev=15000, num_points=8)
flot = json.loads(flot_json_str)
annotations = flot['annotations']
self.assertEqual(5, len(annotations['0']))
# Verify key fields of the annotation dictionary for the first anomaly.
anomaly_one_annotation = annotations['0']['0']['g_anomaly']
self.assertEqual(14999, anomaly_one_annotation['start_revision'])
self.assertEqual(15000, anomaly_one_annotation['end_revision'])
self.assertEqual('100.0%', anomaly_one_annotation['percent_changed'])
self.assertIsNone(anomaly_one_annotation['bug_id'])
self.assertEqual(key1.urlsafe(), anomaly_one_annotation['key'])
self.assertTrue(anomaly_one_annotation['improvement'])
# Verify key fields of the annotation dictionary for the second anomaly.
anomaly_two_annotation = annotations['0']['2']['g_anomaly']
self.assertEqual(15004, anomaly_two_annotation['start_revision'])
self.assertEqual(15006, anomaly_two_annotation['end_revision'])
self.assertEqual('50.0%', anomaly_two_annotation['percent_changed'])
self.assertEqual(12345, anomaly_two_annotation['bug_id'])
self.assertEqual(key2.urlsafe(), anomaly_two_annotation['key'])
self.assertFalse(anomaly_two_annotation['improvement'])
# Verify the key for the third anomaly.
anomaly_three_annotation = annotations['0']['3']['g_anomaly']
self.assertEqual(key3.urlsafe(), anomaly_three_annotation['key'])
# Verify the tracing link annotations
self.assertEqual('http://trace/15000',
annotations['0']['0']['a_tracing_uri'])
self.assertEqual('http://trace/15012',
annotations['0']['4']['a_tracing_uri'])
# Verify the series annotations.
self.assertEqual({
'0': {
'name': 'dom',
'path': 'ChromiumGPU/win7/dromaeo/dom',
'units': 'ms',
'better': 'Higher',
'description': 'About this test',
'can_bisect': True,
}
}, annotations['series'])
def testGetGraphJson_SomeDataDeprecated_OmitsDeprecatedData(self):
self._AddTestColumns(start_rev=15000, end_rev=15050)
dom = utils.TestKey('ChromiumGPU/win7/dromaeo/dom').get()
dom.deprecated = True
dom.put()
jslib = utils.TestKey('ChromiumGPU/win7/dromaeo/jslib').get()
jslib.deprecated = True
jslib.put()
flot_json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/dom': [],
'ChromiumGPU/win7/dromaeo/jslib': [],
'ChromiumGPU/mac/dromaeo/dom': [],
'ChromiumGPU/mac/dromaeo/jslib': [],
},
rev=15000, num_points=8)
flot = json.loads(flot_json_str)
# The win7 tests are deprecated and the mac tests are not. So only the mac
# tests should be returned.
self.assertEqual(2, len(flot['data']))
self.assertEqual(2, len(flot['annotations']['series']))
self.assertIsNotNone(
self._GetSeriesIndex(flot, 'ChromiumGPU/mac/dromaeo/dom'))
self.assertIsNotNone(
self._GetSeriesIndex(flot, 'ChromiumGPU/mac/dromaeo/jslib'))
def testGetGraphJson_WithSelectedTrace(self):
self._AddTestColumns(start_rev=15000, end_rev=15050)
rows = graph_data.Row.query(
graph_data.Row.parent_test == utils.OldStyleTestKey(
'ChromiumGPU/win7/dromaeo/jslib')).fetch()
for row in rows:
row.error = 1 + ((row.revision - 15000) * 0.25)
ndb.put_multi(rows)
flot_json_str = graph_json.GetGraphJson(
{
'ChromiumGPU/win7/dromaeo/jslib': ['jslib'],
},
rev=15000, num_points=8, is_selected=True)
flot = json.loads(flot_json_str)
self.assertEqual(1, len(flot['data']))
self.assertEqual(5, len(flot['data']['0']['data']))
self.assertEqual(1, len(flot['annotations']['series']))
self.assertEqual(5, len(list(flot['annotations'].get('0').keys())))
self.assertEqual(5, len(flot['error_bars']['0'][0]['data']))
self.assertEqual(5, len(flot['error_bars']['0'][1]['data']))
def testGetGraphJson_UnSelectedTrace(self):
self._AddTestColumns(start_rev=15000, end_rev=15050)
test_key = ndb.Key('TestMetadata', 'ChromiumGPU/win7/dromaeo/jslib')
rows = graph_data.Row.query(graph_data.Row.parent_test == test_key).fetch()
for row in rows:
row.error = 1 + ((row.revision - 15000) * 0.25)
ndb.put_multi(rows)
# Insert sub tests to jslib.
rows = []
start_rev = 15000
end_rev = 15050
for name in ['sub_test_a', 'sub_test_b']:
sub_test = graph_data.TestMetadata(id='%s/%s' % (test_key.id(), name),
improvement_direction=anomaly.UP,
has_rows=True)
sub_test.UpdateSheriff()
sub_test.put()
sub_test_container_key = utils.GetTestContainerKey(sub_test)
for i in range(start_rev, end_rev, 3):
# Add Rows for one bot with revision numbers that aren't lined up
# with the other bots.
row = graph_data.Row(
parent=sub_test_container_key, id=i, value=float(i * 2),
r_webkit=int(i * 0.25), a_str='some_string',
buildnumber=i - start_rev,
a_tracing_uri='http://trace/%d' % i)
rows.append(row)
ndb.put_multi(rows)
paths = list_tests.GetTestsForTestPathDict(
{
'ChromiumGPU/win7/dromaeo/jslib': ['jslib'],
}, False)['tests']
flot_json_str = graph_json.GetGraphJson(
paths, rev=15000, num_points=8, is_selected=False)
flot = json.loads(flot_json_str)
sub_test_a_index = self._GetSeriesIndex(
flot, 'ChromiumGPU/win7/dromaeo/jslib/sub_test_a')
sub_test_b_index = self._GetSeriesIndex(
flot, 'ChromiumGPU/win7/dromaeo/jslib/sub_test_b')
self.assertEqual(2, len(flot['data']))
self.assertEqual(5, len(flot['data'][sub_test_a_index]['data']))
self.assertEqual(2, len(flot['annotations']['series']))
self.assertEqual(
5, len(list(flot['annotations'].get(sub_test_a_index).keys())))
self.assertEqual(
5, len(list(flot['annotations'].get(sub_test_b_index).keys())))
def testGetGraphJson_ManyUnselected_ReturnsNothing(self):
testing_common.AddTests(
['M'], ['b'], {'suite': {str(i): {} for i in range(100)}})
test_paths = ['M/b/suite/%s' % i for i in range(100)]
for p in test_paths:
testing_common.AddRows(p, [1])
path_list = list_tests.GetTestsForTestPathDict(
{p: [] for p in test_paths}, False)['tests']
response = graph_json.GetGraphJson(path_list, is_selected=False)
self.assertEqual(
{'data': {}, 'annotations': {}, 'error_bars': {}},
json.loads(response))
class GraphJsonParseRequestArgumentsTest(testing_common.TestCase):
def _HandlerWithMockRequestParams(self, **params):
"""Returns a GraphJsonHandler object with canned request parameters."""
request_params = {
'test_path_dict': {
'Master/b1/scrolling/frame_times/about.com': [],
'Master/b2/scrolling/frame_times/about.com': [],
'Master/linux/dromaeo.domcoremodify/dom': [],
}
}
request_params.update(params)
handler = graph_json.GraphJsonHandler()
handler.request = mock.MagicMock()
handler.request.get = mock.MagicMock(
return_value=json.dumps(request_params))
return handler
def testParseRequestArguments(self):
# The numerical arguments get converted to integers, and the
# unspecified arguments get set to None.
handler = self._HandlerWithMockRequestParams(rev='12345', num_points='123')
expected = {
'test_paths': [
'Master/b1/scrolling/frame_times/about.com',
'Master/b2/scrolling/frame_times/about.com',
'Master/linux/dromaeo.domcoremodify/dom'
],
'rev': 12345,
'num_points': 123,
'start_rev': None,
'end_rev': None,
'is_selected': None,
}
actual = handler._ParseRequestArguments()
actual['test_paths'].sort()
self.assertEqual(expected, actual)
def testParseRequestArguments_TestPathListSpecified(self):
handler = self._HandlerWithMockRequestParams(
test_path_dict=None, test_path_list=[
'Master/b1/scrolling/frame_times/about.com',
'Master/b2/scrolling/frame_times/about.com',
'Master/linux/dromaeo.domcoremodify/dom'])
expected = {
'test_paths': [
'Master/b1/scrolling/frame_times/about.com',
'Master/b2/scrolling/frame_times/about.com',
'Master/linux/dromaeo.domcoremodify/dom'
],
'rev': None,
'num_points': 150,
'start_rev': None,
'end_rev': None,
'is_selected': None,
}
actual = handler._ParseRequestArguments()
self.assertEqual(expected, actual)
def testParseRequestArguments_OnlyTestPathDictSpecified(self):
# No revision or number of points is specified, so they're set to None.
handler = self._HandlerWithMockRequestParams()
expected = {
'test_paths': [
'Master/b1/scrolling/frame_times/about.com',
'Master/b2/scrolling/frame_times/about.com',
'Master/linux/dromaeo.domcoremodify/dom',
],
'rev': None,
'num_points': graph_json._DEFAULT_NUM_POINTS,
'start_rev': None,
'end_rev': None,
'is_selected': None,
}
actual = handler._ParseRequestArguments()
actual['test_paths'].sort()
self.assertEqual(expected, actual)
def testParseRequestArguments_NegativeRevision(self):
# Negative revision is invalid; it's the same as no revision.
handler = self._HandlerWithMockRequestParams(rev='-1')
expected = {
'test_paths': [
'Master/b1/scrolling/frame_times/about.com',
'Master/b2/scrolling/frame_times/about.com',
'Master/linux/dromaeo.domcoremodify/dom',
],
'rev': None,
'num_points': graph_json._DEFAULT_NUM_POINTS,
'start_rev': None,
'end_rev': None,
'is_selected': None,
}
actual = handler._ParseRequestArguments()
actual['test_paths'].sort()
self.assertEqual(expected, actual)
class GraphJsonHelperFunctionTest(testing_common.TestCase):
def testPointInfoDict_StdioUriMarkdown(self):
testing_common.AddTests(['Master'], ['b'], {'my_suite': {}})
test = utils.TestKey('Master/b/my_suite').get()
test.buildername = 'MyBuilder'
test_container_key = utils.GetTestContainerKey(test)
row = graph_data.Row(id=345, buildnumber=456, parent=test_container_key)
# Test buildbot format
row.a_stdio_uri = ('[Buildbot stdio]('
'http://build.chromium.org/p/my.master.id/'
'builders/MyBuilder%20%281%29/builds/456/steps/'
'my_suite/logs/stdio)')
point_info = graph_json._PointInfoDict(row, {})
self.assertEqual(
'[Buildbot stdio](https://luci-logdog.appspot.com/v/?s='
'chrome%2Fbb%2Fmy.master.id%2FMyBuilder__1_%2F456%2F%2B%2F'
'recipes%2Fsteps%2Fmy_suite%2F0%2Fstdout)', point_info['a_stdio_uri'])
self.assertEqual(
'[Buildbot status page](http://build.chromium.org/p/my.master.id/'
'builders/MyBuilder%20%281%29/builds/456)',
point_info['a_buildbot_status_page'])
# Test non-buildbot format
row.a_stdio_uri = '[Buildbot stdio](http://unkonwn/type)'
point_info = graph_json._PointInfoDict(row, {})
self.assertEqual(row.a_stdio_uri, point_info['a_stdio_uri'])
self.assertIsNone(point_info.get('a_buildbot_status_page'))
def testPointInfoDict_BuildUri_NoBuildbotUri(self):
testing_common.AddTests(['Master'], ['b'], {'my_suite': {}})
test = utils.TestKey('Master/b/my_suite').get()
test.buildername = 'MyBuilder'
test_container_key = utils.GetTestContainerKey(test)
row = graph_data.Row(id=345, buildnumber=456, parent=test_container_key)
# Test buildbot format
row.a_stdio_uri = ('[Buildbot stdio]('
'http://build.chromium.org/p/my.master.id/'
'builders/MyBuilder%20%281%29/builds/456/steps/'
'my_suite/logs/stdio)')
row.a_build_uri = ('[Build]('
'http://foo/bar)')
point_info = graph_json._PointInfoDict(row, {})
self.assertEqual(
'[Buildbot stdio](https://luci-logdog.appspot.com/v/?s='
'chrome%2Fbb%2Fmy.master.id%2FMyBuilder__1_%2F456%2F%2B%2F'
'recipes%2Fsteps%2Fmy_suite%2F0%2Fstdout)', point_info['a_stdio_uri'])
self.assertIsNone(point_info.get('a_buildbot_status_page'))
self.assertEqual(row.a_build_uri, point_info['a_build_uri'])
def testPointInfoDict_RowHasNoTracingUri_ResultHasNoTracingUri(self):
testing_common.AddTests(['Master'], ['b'], {'my_suite': {}})
rows = testing_common.AddRows('Master/b/my_suite', [345])
# This row has no a_tracing_uri property, so there should be no
# trace annotation returned by _PointInfoDict.
point_info = graph_json._PointInfoDict(rows[0], {})
self.assertFalse(hasattr(rows[0], 'a_tracing_uri'))
self.assertNotIn('a_tracing_uri', point_info)
if __name__ == '__main__':
unittest.main()
| endlessm/chromium-browser | third_party/catapult/dashboard/dashboard/graph_json_test.py | Python | bsd-3-clause | 32,592 |
import unittest
import unittest.mock as mock
import blivet
from pyanaconda.modules.storage.disk_initialization import DiskInitializationConfig
from pyanaconda.modules.storage.devicetree import create_storage
from pyanaconda.core.constants import CLEAR_PARTITIONS_ALL, CLEAR_PARTITIONS_LINUX, CLEAR_PARTITIONS_NONE
from parted import PARTITION_NORMAL
from blivet.flags import flags
DEVICE_CLASSES = [
blivet.devices.DiskDevice,
blivet.devices.PartitionDevice
]
@unittest.skipUnless(not any(x.unavailable_type_dependencies() for x in DEVICE_CLASSES), "some unsupported device classes required for this test")
class ClearPartTestCase(unittest.TestCase):
def setUp(self):
flags.testing = True
self._storage = create_storage()
self._config = DiskInitializationConfig()
def _can_remove(self, device):
return self._config.can_remove(self._storage, device)
def test_should_clear(self):
""" Test the can_remove method. """
DiskDevice = blivet.devices.DiskDevice
PartitionDevice = blivet.devices.PartitionDevice
# sda is a disk with an existing disklabel containing two partitions
sda = DiskDevice("sda", size=100000, exists=True)
sda.format = blivet.formats.get_format("disklabel", device=sda.path,
exists=True)
sda.format._parted_disk = mock.Mock()
sda.format._parted_device = mock.Mock()
sda.format._parted_disk.configure_mock(partitions=[])
self._storage.devicetree._add_device(sda)
# sda1 is a partition containing an existing ext4 filesystem
sda1 = PartitionDevice("sda1", size=500, exists=True,
parents=[sda])
sda1._parted_partition = mock.Mock(**{'type': PARTITION_NORMAL,
'getLength.return_value': int(sda1.size),
'getFlag.return_value': 0,
'number': 1})
sda1.format = blivet.formats.get_format("ext4", mountpoint="/boot",
device=sda1.path,
exists=True)
self._storage.devicetree._add_device(sda1)
# sda2 is a partition containing an existing vfat filesystem
sda2 = PartitionDevice("sda2", size=10000, exists=True,
parents=[sda])
sda2._parted_partition = mock.Mock(**{'type': PARTITION_NORMAL,
'getLength.return_value': int(sda2.size),
'getFlag.return_value': 0,
'number': 2})
sda2.format = blivet.formats.get_format("vfat", mountpoint="/foo",
device=sda2.path,
exists=True)
self._storage.devicetree._add_device(sda2)
# sdb is an unpartitioned disk containing an xfs filesystem
sdb = DiskDevice("sdb", size=100000, exists=True)
sdb.format = blivet.formats.get_format("xfs", device=sdb.path,
exists=True)
self._storage.devicetree._add_device(sdb)
# sdc is an unformatted/uninitialized/empty disk
sdc = DiskDevice("sdc", size=100000, exists=True)
self._storage.devicetree._add_device(sdc)
# sdd is a disk containing an existing disklabel with no partitions
sdd = DiskDevice("sdd", size=100000, exists=True)
sdd.format = blivet.formats.get_format("disklabel", device=sdd.path,
exists=True)
self._storage.devicetree._add_device(sdd)
#
# clearpart type none
#
self._config.initialization_mode = CLEAR_PARTITIONS_NONE
assert not self._can_remove(sda1), \
"type none should not clear any partitions"
assert not self._can_remove(sda2), \
"type none should not clear any partitions"
self._config.initialize_labels = False
assert not self._can_remove(sda), \
"type none should not clear non-empty disks"
assert not self._can_remove(sdb), \
"type none should not clear formatting from unpartitioned disks"
assert not self._can_remove(sdc), \
"type none should not clear empty disk without initlabel"
assert not self._can_remove(sdd), \
"type none should not clear empty partition table without initlabel"
self._config.initialize_labels = True
assert not self._can_remove(sda), \
"type none should not clear non-empty disks even with initlabel"
assert not self._can_remove(sdb), \
"type non should not clear formatting from unpartitioned disks even with initlabel"
assert self._can_remove(sdc), \
"type none should clear empty disks when initlabel is set"
assert self._can_remove(sdd), \
"type none should clear empty partition table when initlabel is set"
#
# clearpart type linux
#
self._config.initialization_mode = CLEAR_PARTITIONS_LINUX
assert self._can_remove(sda1), \
"type linux should clear partitions containing ext4 filesystems"
assert not self._can_remove(sda2), \
"type linux should not clear partitions containing vfat filesystems"
self._config.initialize_labels = False
assert not self._can_remove(sda), \
"type linux should not clear non-empty disklabels"
assert self._can_remove(sdb), \
"type linux should clear linux-native whole-disk " \
"formatting regardless of initlabel setting"
assert not self._can_remove(sdc), \
"type linux should not clear unformatted disks unless initlabel is set"
assert not self._can_remove(sdd), \
"type linux should not clear disks with empty " \
"partition tables unless initlabel is set"
self._config.initialize_labels = True
assert not self._can_remove(sda), \
"type linux should not clear non-empty disklabels"
assert self._can_remove(sdb), \
"type linux should clear linux-native whole-disk " \
"formatting regardless of initlabel setting"
assert self._can_remove(sdc), \
"type linux should clear unformatted disks when initlabel is set"
assert self._can_remove(sdd), \
"type linux should clear disks with empty " \
"partition tables when initlabel is set"
sda1.protected = True
assert not self._can_remove(sda1), \
"protected devices should never be cleared"
assert not self._can_remove(sda), \
"disks containing protected devices should never be cleared"
sda1.protected = False
#
# clearpart type all
#
self._config.initialization_mode = CLEAR_PARTITIONS_ALL
assert self._can_remove(sda1), \
"type all should clear all partitions"
assert self._can_remove(sda2), \
"type all should clear all partitions"
self._config.initialize_labels = False
assert self._can_remove(sda), \
"type all should initialize all disks"
assert self._can_remove(sdb), \
"type all should initialize all disks"
assert self._can_remove(sdc), \
"type all should initialize all disks"
assert self._can_remove(sdd), \
"type all should initialize all disks"
self._config.initialize_labels = True
assert self._can_remove(sda), \
"type all should initialize all disks"
assert self._can_remove(sdb), \
"type all should initialize all disks"
assert self._can_remove(sdc), \
"type all should initialize all disks"
assert self._can_remove(sdd), \
"type all should initialize all disks"
sda1.protected = True
assert not self._can_remove(sda1), \
"protected devices should never be cleared"
assert not self._can_remove(sda), \
"disks containing protected devices should never be cleared"
sda1.protected = False
#
# clearpart type list
#
# TODO
def tearDown(self):
flags.testing = False
def test_initialize_disk(self):
"""
magic partitions
non-empty partition table
"""
pass
def test_recursive_remove(self):
"""
protected device at various points in stack
"""
pass
| jkonecny12/anaconda | tests/unit_tests/pyanaconda_tests/modules/storage/test_clearpart.py | Python | gpl-2.0 | 8,807 |
# xorn.geda.netlist - gEDA Netlist Extraction and Generation
# Copyright (C) 1998-2010 Ales Hvezda
# Copyright (C) 1998-2010 gEDA Contributors (see ChangeLog for details)
# Copyright (C) 2013-2017 Roland Lutz
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import collections
Template = collections.namedtuple('Template', ['keyword', 'attribs'])
Attrib = collections.namedtuple('Attrib', ['name', 'prefix', 'default'])
component_templates = {
'RESISTOR': Template('RES', [
Attrib('value', 'R=', 'use value attrib for resistance'),
]),
'INDUCTOR': Template('IND', [
Attrib('value', 'L=', 'use value attrib for inductance'),
Attrib('Q', 'Q=', None)
]),
'CAPACITOR': Template('CAP', [
Attrib('value', 'C=', 'use value attrib for capacitance')
]),
'TLIN': Template('TLIN', [
Attrib('Z', 'Z=', 50),
Attrib('length', 'E=', 'length attrib for length'),
Attrib('F', 'F=', 'F attrib for frequency')
]),
'CLIN': Template('CLIN', [
Attrib('ZE', 'ZE=', None),
Attrib('ZO', 'ZO=', None),
Attrib('E', 'E=', None),
Attrib('F', 'F=', None)
]),
'SPARAMBLOCK': Template('BLOCK', [
Attrib('filename', '', 'filename attrib for sparams')
])
}
def run(f, netlist):
netnumbers = {}
number = 1
for net in reversed(netlist.nets):
if net.name == 'GND':
netnumbers[net.name] = 0
else:
netnumbers[net.name] = number
number += 1
f.write('% ViPEC RF Netlister\n')
f.write('% Written by Matthew Ettus\n')
f.write('% Based on code by Bas Gieltjes\n')
f.write('CKT\n')
# component writing
for package in reversed(netlist.packages):
device = package.get_attribute('device', 'unknown')
if device not in ['VIPEC', 'SMITH', 'GRID']:
# get template
try:
template = component_templates[device]
except KeyError:
package.warn("template \"%s\" not found" % device)
template = Template('error', [])
f.write('\t%s\t' % template.keyword)
# write net name of node
for i in xrange(len(package.pins)):
try:
pin = package.get_pin_by_pinseq(i + 1)
except KeyError:
netname = 'ERROR_INVALID_PIN'
else:
netname = pin.net.name
f.write(str(netnumbers.get(netname, '#<unspecified>')))
f.write(' ')
# write attribs
for attrib in template.attribs:
value = package.get_attribute(attrib.name, attrib.default)
if value is None:
continue
f.write('%s%s\t' % (attrib.prefix, value))
f.write('\t%% %s\n' % package.refdes)
# misc components
f.write('\tDEF2P\t%s %s\n' % (netnumbers.get('PORT1', '#<unspecified>'),
netnumbers.get('PORT2', '#<unspecified>')))
f.write('\tTERM\t50 50\n')
f.write('\n')
# analysis block
for package in reversed(netlist.packages):
if package.get_attribute('device', None) == 'VIPEC':
value = package.get_attribute('value', None)
if value is not None:
f.write('R=%s\n' % value)
f.write('\n')
| bert/geda-gaf | xorn/src/backend/gnet_vipec.py | Python | gpl-2.0 | 4,057 |
from distutils.core import setup
from distutils.command.install_data import install_data
from distutils.command.install import INSTALL_SCHEMES
import os
import sys
# Add a test command to setup.py
# Code borrowed from: http://da44en.wordpress.com/2002/11/22/using-distutils/
from distutils.core import Command
from unittest import TextTestRunner, TestLoader
from glob import glob
from os.path import splitext, basename, join as pjoin, walk
import os
class TestCommand(Command):
user_options = [ ]
def initialize_options(self):
self._dir = os.getcwd()
def finalize_options(self):
pass
def run(self):
'''
Finds all the tests modules in tests/, and runs them.
'''
testfiles = [ ]
for t in glob(pjoin(self._dir, 'tests', '*.py')):
if not t.endswith('__init__.py'):
testfiles.append('.'.join(
['tests', splitext(basename(t))[0]])
)
tests = TestLoader().loadTestsFromNames(testfiles)
t = TextTestRunner(verbosity = 2)
t.run(tests)
class CleanCommand(Command):
user_options = [ ]
def initialize_options(self):
self._clean_me = [ ]
for root, dirs, files in os.walk('.'):
for f in files:
if f.endswith('.pyc'):
self._clean_me.append(pjoin(root, f))
def finalize_options(self):
pass
def run(self):
for clean_me in self._clean_me:
try:
os.unlink(clean_me)
except:
pass
def return_version():
return __import__('dirtt').get_version()
class osx_install_data(install_data):
# On MacOS, the platform-specific lib dir is /System/Library/Framework/Python/.../
# which is wrong. Python 2.5 supplied with MacOS 10.5 has an Apple-specific fix
# for this in distutils.command.install_data#306. It fixes install_lib but not
# install_data, which is why we roll our own install_data class.
def finalize_options(self):
# By the time finalize_options is called, install.install_lib is set to the
# fixed directory, so we set the installdir to install_lib. The
# install_data class uses ('install_data', 'install_dir') instead.
self.set_undefined_options('install', ('install_lib', 'install_dir'))
install_data.finalize_options(self)
if sys.platform == "darwin":
cmdclasses = {'install_data': osx_install_data}
else:
cmdclasses = {'install_data': install_data}
cmdclasses['test'] = TestCommand
cmdclasses['clean'] = CleanCommand
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Tell distutils to put the data_files in platform-specific installation
# locations. See here for an explanation:
# http://groups.google.com/group/comp.lang.python/browse_thread/thread/35ec7b2fed36eaec/2105ee4d9e8042cb
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
dirtt_dir = 'dirtt'
for dirpath, dirnames, filenames in os.walk(dirtt_dir):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'): del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(fullsplit(dirpath)))
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames]])
# Small hack for working with bdist_wininst.
# See http://mail.python.org/pipermail/distutils-sig/2004-August/004134.html
if len(sys.argv) > 1 and sys.argv[1] == 'bdist_wininst':
for file_info in data_files:
file_info[0] = '\\PURELIB\\%s' % file_info[0]
setup(
name='python-dirtt',
packages=packages,
cmdclass = cmdclasses,
scripts=['dirtt/scripts/mkproject.py','dirtt/scripts/mktemplate.py','dirtt/scripts/mktree.py'],
data_files = data_files,
# data_files=[
# ('/var/dirtt/dirtt.dtd', ['dirtt/data/dirtt.dtd']),
# ('/var/dirtt/templates/project.xml', ['dirtt/data/templates/project.xml']),
# ('/var/dirtt/templates/project_sequence.xml', ['dirtt/data/templates/project_sequence.xml']),
# ('/var/dirtt/templates/project_shot.xml', ['dirtt/data/templates/project_shot.xml']),
# ('/var/dirtt/templates/project_work.xml', ['dirtt/data/templates/project_work.xml']),
# ('/var/dirtt/templates/project_master.xml', ['dirtt/data/templates/project_master.xml']),
# ('/var/dirtt/templates/project_production.xml', ['dirtt/data/templates/project_production.xml'])
# ('/var/dirtt/templates/workspace.mel', ['dirtt/data/templates/workspace.mel'])
# ],
version=return_version(),
description="Directory Tree Templater",
long_description="""
python-dirtt - Directory Tree Templater
(c) 2015 Robert Moggach
Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
Dirtt is a standalone tool and library used to generate
directory and file structures from xml templates that describe
repeatedly used filesystem layouts such as project structures
or elements therein.
It provides a subclassed implementation of xml.sax.handler ContentHandler
with internal methods that read,parse,render,and execute builds of
user defined XML directory tree templates.
""",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: System :: Systems Administration',
'Topic :: Text Processing :: Markup :: XML'
],
keywords='filesystem template utilities',
url='http://robmoggach.github.io/python-dirtt/',
download_url = 'https://github.com/robmoggach/python-dirtt/tarball/v0.2.0',
author='Robert Moggach',
author_email='[email protected]',
maintainer='Robert Moggach',
maintainer_email='[email protected]',
license='MIT'
)
| robmoggach/python-dirtt | setup.py | Python | mit | 6,693 |
# Simpler (but far more limited) API for ID3 editing
# Copyright 2006 Joe Wreschnig
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of version 2 of the GNU General Public License as
# published by the Free Software Foundation.
#
# $Id: id3.py 3086 2006-04-04 02:13:21Z piman $
"""Easier access to ID3 tags.
EasyID3 is a wrapper around mutagen.id3.ID3 to make ID3 tags appear
more like Vorbis or APEv2 tags.
"""
from fnmatch import fnmatchcase
import mutagen.id3
from mutagen import Metadata
from mutagen._util import DictMixin, dict_match
from mutagen.id3 import ID3, error, delete, ID3FileType
__all__ = ["EasyID3", "Open", "delete"]
class EasyID3KeyError(KeyError, ValueError, error):
"""Raised when trying to get/set an invalid key.
Subclasses both KeyError and ValueError for API compatibility,
catching KeyError is preferred.
"""
class EasyID3(DictMixin, Metadata):
"""A file with an ID3 tag.
Like Vorbis comments, EasyID3 keys are case-insensitive ASCII
strings. Only a subset of ID3 frames are supported by default. Use
EasyID3.RegisterKey and its wrappers to support more.
You can also set the GetFallback, SetFallback, and DeleteFallback
to generic key getter/setter/deleter functions, which are called
if no specific handler is registered for a key. Additionally,
ListFallback can be used to supply an arbitrary list of extra
keys. These can be set on EasyID3 or on individual instances after
creation.
To use an EasyID3 class with mutagen.mp3.MP3:
from mutagen.mp3 import EasyMP3 as MP3
MP3(filename)
Because many of the attributes are constructed on the fly, things
like the following will not work:
ezid3["performer"].append("Joe")
Instead, you must do:
values = ezid3["performer"]
values.append("Joe")
ezid3["performer"] = values
"""
Set = {}
Get = {}
Delete = {}
List = {}
# For compatibility.
valid_keys = Get
GetFallback = None
SetFallback = None
DeleteFallback = None
ListFallback = None
def RegisterKey(cls, key, getter=None, setter=None, deleter=None, lister=None):
"""Register a new key mapping.
A key mapping is four functions, a getter, setter, deleter,
and lister. The key may be either a string or a glob pattern.
The getter, deleted, and lister receive an ID3 instance and
the requested key name. The setter also receives the desired
value, which will be a list of strings.
The getter, setter, and deleter are used to implement __getitem__,
__setitem__, and __delitem__.
The lister is used to implement keys(). It should return a
list of keys that are actually in the ID3 instance, provided
by its associated getter.
"""
key = key.lower()
if getter is not None:
cls.Get[key] = getter
if setter is not None:
cls.Set[key] = setter
if deleter is not None:
cls.Delete[key] = deleter
if lister is not None:
cls.List[key] = lister
RegisterKey = classmethod(RegisterKey)
def RegisterTextKey(cls, key, frameid):
"""Register a text key.
If the key you need to register is a simple one-to-one mapping
of ID3 frame name to EasyID3 key, then you can use this
function:
EasyID3.RegisterTextKey("title", "TIT2")
"""
def getter(id3, key):
return list(id3[frameid])
def setter(id3, key, value):
try:
frame = id3[frameid]
except KeyError:
id3.add(mutagen.id3.Frames[frameid](encoding=3, text=value))
else:
frame.encoding = 3
frame.text = value
def deleter(id3, key):
del id3[frameid]
cls.RegisterKey(key, getter, setter, deleter)
RegisterTextKey = classmethod(RegisterTextKey)
def RegisterTXXXKey(cls, key, desc):
"""Register a user-defined text frame key.
Some ID3 tags are stored in TXXX frames, which allow a
freeform 'description' which acts as a subkey,
e.g. TXXX:BARCODE.
EasyID3.RegisterTXXXKey('barcode', 'BARCODE').
"""
frameid = "TXXX:" + desc
def getter(id3, key):
return list(id3[frameid])
def setter(id3, key, value):
try:
frame = id3[frameid]
except KeyError:
enc = 0
# Store 8859-1 if we can, per MusicBrainz spec.
for v in value:
if max(v) > u"\x7f":
enc = 3
id3.add(mutagen.id3.TXXX(encoding=enc, text=value, desc=desc))
else:
frame.text = value
def deleter(id3, key):
del id3[frameid]
cls.RegisterKey(key, getter, setter, deleter)
RegisterTXXXKey = classmethod(RegisterTXXXKey)
def __init__(self, filename=None):
self.__id3 = ID3()
self.load = self.__id3.load
self.save = self.__id3.save
self.delete = self.__id3.delete
if filename is not None:
self.load(filename)
filename = property(
lambda s: s.__id3.filename, lambda s, fn: setattr(s.__id3, "filename", fn)
)
_size = property(lambda s: s._id3.size, lambda s, fn: setattr(s.__id3, "_size", fn))
def __getitem__(self, key):
key = key.lower()
func = dict_match(self.Get, key, self.GetFallback)
if func is not None:
return func(self.__id3, key)
else:
raise EasyID3KeyError("%r is not a valid key" % key)
def __setitem__(self, key, value):
key = key.lower()
if isinstance(value, basestring):
value = [value]
func = dict_match(self.Set, key, self.SetFallback)
if func is not None:
return func(self.__id3, key, value)
else:
raise EasyID3KeyError("%r is not a valid key" % key)
def __delitem__(self, key):
key = key.lower()
func = dict_match(self.Delete, key, self.DeleteFallback)
if func is not None:
return func(self.__id3, key)
else:
raise EasyID3KeyError("%r is not a valid key" % key)
def keys(self):
keys = []
for key in self.Get.keys():
if key in self.List:
keys.extend(self.List[key](self.__id3, key))
elif key in self:
keys.append(key)
if self.ListFallback is not None:
keys.extend(self.ListFallback(self.__id3, ""))
return keys
def pprint(self):
"""Print tag key=value pairs."""
strings = []
for key in sorted(self.keys()):
values = self[key]
for value in values:
strings.append("%s=%s" % (key, value))
return "\n".join(strings)
Open = EasyID3
def genre_get(id3, key):
return id3["TCON"].genres
def genre_set(id3, key, value):
try:
frame = id3["TCON"]
except KeyError:
id3.add(mutagen.id3.TCON(encoding=3, text=value))
else:
frame.encoding = 3
frame.genres = value
def genre_delete(id3, key):
del id3["TCON"]
def date_get(id3, key):
return [stamp.text for stamp in id3["TDRC"].text]
def date_set(id3, key, value):
id3.add(mutagen.id3.TDRC(encoding=3, text=value))
def date_delete(id3, key):
del id3["TDRC"]
def performer_get(id3, key):
people = []
wanted_role = key.split(":", 1)[1]
try:
mcl = id3["TMCL"]
except KeyError:
raise KeyError(key)
for role, person in mcl.people:
if role == wanted_role:
people.append(person)
if people:
return people
else:
raise KeyError(key)
def performer_set(id3, key, value):
wanted_role = key.split(":", 1)[1]
try:
mcl = id3["TMCL"]
except KeyError:
mcl = mutagen.id3.TMCL(encoding=3, people=[])
id3.add(mcl)
mcl.encoding = 3
people = [p for p in mcl.people if p[0] != wanted_role]
for v in value:
people.append((wanted_role, v))
mcl.people = people
def performer_delete(id3, key):
wanted_role = key.split(":", 1)[1]
try:
mcl = id3["TMCL"]
except KeyError:
raise KeyError(key)
people = [p for p in mcl.people if p[0] != wanted_role]
if people == mcl.people:
raise KeyError(key)
elif people:
mcl.people = people
else:
del id3["TMCL"]
def performer_list(id3, key):
try:
mcl = id3["TMCL"]
except KeyError:
return []
else:
return list(set("performer:" + p[0] for p in mcl.people))
def musicbrainz_trackid_get(id3, key):
return [id3["UFID:http://musicbrainz.org"].data.decode("ascii")]
def musicbrainz_trackid_set(id3, key, value):
if len(value) != 1:
raise ValueError("only one track ID may be set per song")
value = value[0].encode("ascii")
try:
frame = id3["UFID:http://musicbrainz.org"]
except KeyError:
frame = mutagen.id3.UFID(owner="http://musicbrainz.org", data=value)
id3.add(frame)
else:
frame.data = value
def musicbrainz_trackid_delete(id3, key):
del id3["UFID:http://musicbrainz.org"]
def website_get(id3, key):
urls = [frame.url for frame in id3.getall("WOAR")]
if urls:
return urls
else:
raise EasyID3KeyError(key)
def website_set(id3, key, value):
id3.delall("WOAR")
for v in value:
id3.add(mutagen.id3.WOAR(url=v))
def website_delete(id3, key):
id3.delall("WOAR")
def gain_get(id3, key):
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
raise EasyID3KeyError(key)
else:
return [u"%+f dB" % frame.gain]
def gain_set(id3, key, value):
if len(value) != 1:
raise ValueError("there must be exactly one gain value, not %r.", value)
gain = float(value[0].split()[0])
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
frame = mutagen.id3.RVA2(desc=key[11:-5], gain=0, peak=0, channel=1)
id3.add(frame)
frame.gain = gain
def gain_delete(id3, key):
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
pass
else:
if frame.peak:
frame.gain = 0.0
else:
del id3["RVA2:" + key[11:-5]]
def peak_get(id3, key):
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
raise EasyID3KeyError(key)
else:
return [u"%f" % frame.peak]
def peak_set(id3, key, value):
if len(value) != 1:
raise ValueError("there must be exactly one peak value, not %r.", value)
peak = float(value[0])
if peak >= 2 or peak < 0:
raise ValueError("peak must be => 0 and < 2.")
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
frame = mutagen.id3.RVA2(desc=key[11:-5], gain=0, peak=0, channel=1)
id3.add(frame)
frame.peak = peak
def peak_delete(id3, key):
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
pass
else:
if frame.gain:
frame.peak = 0.0
else:
del id3["RVA2:" + key[11:-5]]
def peakgain_list(id3, key):
keys = []
for frame in id3.getall("RVA2"):
keys.append("replaygain_%s_gain" % frame.desc)
keys.append("replaygain_%s_peak" % frame.desc)
return keys
for frameid, key in {
"TALB": "album",
"TBPM": "bpm",
"TCMP": "compilation", # iTunes extension
"TCOM": "composer",
"TCOP": "copyright",
"TENC": "encodedby",
"TEXT": "lyricist",
"TLEN": "length",
"TMED": "media",
"TMOO": "mood",
"TIT2": "title",
"TIT3": "version",
"TPE1": "artist",
"TPE2": "performer",
"TPE3": "conductor",
"TPE4": "arranger",
"TPOS": "discnumber",
"TPUB": "organization",
"TRCK": "tracknumber",
"TOLY": "author",
"TSO2": "albumartistsort", # iTunes extension
"TSOA": "albumsort",
"TSOC": "composersort", # iTunes extension
"TSOP": "artistsort",
"TSOT": "titlesort",
"TSRC": "isrc",
"TSST": "discsubtitle",
}.iteritems():
EasyID3.RegisterTextKey(key, frameid)
EasyID3.RegisterKey("genre", genre_get, genre_set, genre_delete)
EasyID3.RegisterKey("date", date_get, date_set, date_delete)
EasyID3.RegisterKey(
"performer:*", performer_get, performer_set, performer_delete, performer_list
)
EasyID3.RegisterKey(
"musicbrainz_trackid",
musicbrainz_trackid_get,
musicbrainz_trackid_set,
musicbrainz_trackid_delete,
)
EasyID3.RegisterKey("website", website_get, website_set, website_delete)
EasyID3.RegisterKey("website", website_get, website_set, website_delete)
EasyID3.RegisterKey("replaygain_*_gain", gain_get, gain_set, gain_delete, peakgain_list)
EasyID3.RegisterKey("replaygain_*_peak", peak_get, peak_set, peak_delete)
# At various times, information for this came from
# http://musicbrainz.org/docs/specs/metadata_tags.html
# http://bugs.musicbrainz.org/ticket/1383
# http://musicbrainz.org/doc/MusicBrainzTag
for desc, key in {
u"MusicBrainz Artist Id": "musicbrainz_artistid",
u"MusicBrainz Album Id": "musicbrainz_albumid",
u"MusicBrainz Album Artist Id": "musicbrainz_albumartistid",
u"MusicBrainz TRM Id": "musicbrainz_trmid",
u"MusicIP PUID": "musicip_puid",
u"MusicMagic Fingerprint": "musicip_fingerprint",
u"MusicBrainz Album Status": "musicbrainz_albumstatus",
u"MusicBrainz Album Type": "musicbrainz_albumtype",
u"MusicBrainz Album Release Country": "releasecountry",
u"MusicBrainz Disc Id": "musicbrainz_discid",
u"ASIN": "asin",
u"ALBUMARTISTSORT": "albumartistsort",
u"BARCODE": "barcode",
}.iteritems():
EasyID3.RegisterTXXXKey(key, desc)
class EasyID3FileType(ID3FileType):
"""Like ID3FileType, but uses EasyID3 for tags."""
ID3 = EasyID3
| hzlf/openbroadcast.org | website/tools/mutagen-v1.20.1/easyid3.py | Python | gpl-3.0 | 14,109 |
"""
E271
Multiple spaces after keyword
"""
| landscape-test/all-messages | messages/pep8/E271.py | Python | unlicense | 44 |
# Rekall Memory Forensics
# Copyright (C) 2007-2011 Volatile Systems
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Additional Authors:
# Michael Cohen <[email protected]>
# Mike Auty <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# pylint: disable=protected-access
from rekall import testlib
from rekall_lib import utils
from rekall.plugins.common import memmap
from rekall.plugins.windows import common
class WinPsList(common.WinProcessFilter):
"""List processes for windows."""
__name = "pslist"
eprocess = None
table_header = [
dict(type="_EPROCESS", name="_EPROCESS"),
dict(name="ppid", width=6, align="r"),
dict(name="thread_count", width=6, align="r"),
dict(name="handle_count", width=8, align="r"),
dict(name="session_id", width=6, align="r"),
dict(name="wow64", width=6),
dict(name="process_create_time", width=24),
dict(name="process_exit_time", width=24)
]
def column_types(self):
result = self._row(self.session.profile._EPROCESS())
result["handle_count"] = result["ppid"]
result["session_id"] = result["ppid"]
return result
def _row(self, task):
return dict(_EPROCESS=task,
ppid=task.InheritedFromUniqueProcessId,
thread_count=task.ActiveThreads,
handle_count=task.ObjectTable.m("HandleCount"),
session_id=task.SessionId,
wow64=task.IsWow64,
process_create_time=task.CreateTime,
process_exit_time=task.ExitTime)
def collect(self):
for task in self.filter_processes():
yield self._row(task)
class WinDllList(common.WinProcessFilter):
"""Prints a list of dll modules mapped into each process."""
__name = "dlllist"
table_header = [
dict(name="divider", type="Divider"),
dict(name="_EPROCESS", hidden=True),
dict(name="base", style="address"),
dict(name="size", style="address"),
dict(name="reason", width=30),
dict(name="dll_path"),
]
def collect(self):
for task in self.filter_processes():
pid = task.UniqueProcessId
divider = "{0} pid: {1:6}\n".format(task.ImageFileName, pid)
if task.Peb:
divider += u"Command line : {0}\n".format(
task.Peb.ProcessParameters.CommandLine)
divider += u"{0}\n\n".format(task.Peb.CSDVersion)
yield dict(divider=divider)
for m in task.get_load_modules():
yield dict(base=m.DllBase,
size=m.SizeOfImage,
reason=m.LoadReason,
dll_path=m.FullDllName,
_EPROCESS=task)
else:
yield dict(divider="Unable to read PEB for task.\n")
class WinMemMap(memmap.MemmapMixIn, common.WinProcessFilter):
"""Calculates the memory regions mapped by a process."""
__name = "memmap"
def _get_highest_user_address(self):
return self.profile.get_constant_object(
"MmHighestUserAddress", "Pointer").v()
class Threads(common.WinProcessFilter):
"""Enumerate threads."""
name = "threads"
table_header = [
dict(name="_ETHREAD", style="address"),
dict(name="pid", align="r", width=6),
dict(name="tid", align="r", width=6),
dict(name="start", style="address"),
dict(name="start_symbol", width=30),
dict(name="Process", width=16),
dict(name="win32_start", style="address"),
dict(name="win32_start_symb")
]
def collect(self):
cc = self.session.plugins.cc()
with cc:
for task in self.filter_processes():
# Resolve names in the process context.
cc.SwitchProcessContext(process=task)
for thread in task.ThreadListHead.list_of_type(
"_ETHREAD", "ThreadListEntry"):
yield dict(_ETHREAD=thread,
pid=thread.Cid.UniqueProcess,
tid=thread.Cid.UniqueThread,
start=thread.StartAddress,
start_symbol=utils.FormattedAddress(
self.session.address_resolver,
thread.StartAddress),
Process=task.ImageFileName,
win32_start=thread.Win32StartAddress,
win32_start_symb=utils.FormattedAddress(
self.session.address_resolver,
thread.Win32StartAddress,
))
class WinMemDump(memmap.MemDumpMixin, common.WinProcessFilter):
"""Dump windows processes."""
class TestWinMemDump(testlib.HashChecker):
"""Test the pslist module."""
PARAMETERS = dict(
commandline="memdump %(pids)s --dump_dir %(tempdir)s",
pid=2624)
class TestMemmap(testlib.SimpleTestCase):
"""Test the pslist module."""
PARAMETERS = dict(
commandline="memmap %(pids)s",
pid=2624)
class TestMemmapCoalesce(testlib.SimpleTestCase):
"""Make sure that memmaps are coalesced properly."""
PARAMETERS = dict(commandline="memmap %(pids)s --coalesce",
pid=2624)
| dsweet04/rekall | rekall-core/rekall/plugins/windows/taskmods.py | Python | gpl-2.0 | 6,207 |
# Copyright 2017, 2018, 2021 Stefan Richthofer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Created on 01.12.2016
"""
This file causes NameErrors if forward-declarations
of Types are not supported properly.
(unless typechecker.check_override_at_runtime == False)
todo: involve something like [str, int, 'TestClass2']
"""
from pytypes import override
class TestClass():
def test_meth0(self, a: int) -> str:
pass
def test_meth1(self, a: 'TestArg2') -> str:
pass
def test_meth2(self, a: int) -> 'TestResult1':
pass
class TestClass2(TestClass):
@override
def test_meth0(self, a: int) -> str:
pass
@override
def test_meth1(self, a: 'TestArg1') -> str:
pass
@override
def test_meth2(self, a: int) -> 'TestResult2':
pass
class TestClass3(TestClass):
@override
def test_meth1(self, a: 'TestArg1') -> str:
pass
@override
def test_meth2(self, a: int) -> 'TestResult2':
pass
class TestArg1():
pass
class TestResult1():
pass
class TestClass3(TestClass):
@override
def test_meth1(self, a: TestArg1) -> str:
pass
@override
def test_meth2(self, a: int) -> 'TestResult2':
pass
class TestArg2(TestArg1):
pass
class TestResult2(TestResult1):
pass
class override_varargs_class_base(object):
# var-arg tests:
def method_vararg1(self, a: int, b: int, *args: int) -> int:
return a+b
def method_vararg2(self, a: int, b: int) -> int:
return a+b
def method_vararg3(self, a: int, b: int, c: float) -> int:
return a+b
# var-kw tests:
def method_varkw1(self, a: int, b: int, **kw: int) -> int:
return a+b
def method_varkw2(self, a: int, b: int, *arg: str, **kw: int) -> int:
return a+b
# default tests:
def method_defaults1(self, a: int, b: int) -> int:
return a+b
def method_defaults2(self, a: int, b: int, *vargs: int) -> int:
return a+b
# kw-only tests (Python 3 only):
def method_kwonly1(self, a: int, b: int, *vargs: float, q: int) -> int:
return a+b+q
def method_kwonly2(self, a: int, b: int, *vargs: float, q: int) -> int:
return a+b+q
def method_kwonly3(self, a: int, b: int, *vargs: float, q: int, v: float) -> int:
return a+b+q
def method_kwonly4(self, a: float, b: int, *vargs: float, q: int) -> int:
return b+q
def method_kwonly5(self, a: float, b: float, *vargs: int, q: int, v: int, **kw: int) -> int:
return q+v+len(kw)
def method_kwonly6(self, a: float, b: int, *vargs: float, q: int, v: int) -> int:
return a+b+q+v
def method_kwonly7(self, a: int, b: float, *vargs: float, q: int, v: int, **kw: int) -> int:
return a+b+q+v
# kw-only tests (Python 2 type hints):
def method_kwonly1_py2(self, a, b, *vargs, q):
# type: (int, int, *float, int) -> int
return a+b+q
def method_kwonly2_py2(self, a, b, *vargs, q):
# type: (int, int, *float, int) -> int
return a+b+q
def method_kwonly3_py2(self, a, b, *vargs, q, v):
# type: (int, int, *float, int, float) -> int
return a+b+q
def method_kwonly4_py2(self, a, b, *vargs, q):
# type: (float, int, *float, int) -> int
return b+q
def method_kwonly5_py2(self, a, b, *vargs, q, v, **kw):
# type: (float, float, *int, int, int, **int) -> int
return q+v+len(kw)
def method_kwonly6_py2(self, a, b, *vargs, q, v):
# type: (float, int, *float, int, int) -> int
return a+b+q+v
def method_kwonly7_py2(self, a, b, *vargs, q, v, **kw):
# type: (int, float, *float, int, int, **int) -> int
return a+b+q+v
class override_varargs_class(override_varargs_class_base):
@override
def method_vararg1(self, a: int, b: float, *args: int) -> int:
return len(args)
@override
def method_vararg2(self, a: int, b: float, *vargs: str) -> int:
return a+len(str(b))+len(vargs)
@override
def method_vararg3(self, a: int, *vgs: float) -> int:
return a+len(vgs)
# var-kw tests:
@override
def method_varkw1(self, a: int, b: int, **kw: float) -> int:
return a+b
@override
def method_varkw2(self, a: int, b: int, *arg: str, **kw: float) -> int:
return a+b
# default tests:
@override
def method_defaults1(self, a: int, b: int, c=4.6) -> int:
return a+b
@override
def method_defaults2(self, a: int, b: int, c: float = 4, *args: int) -> int:
return a+b
# kw-only tests (Python 3 only):
@override
def method_kwonly1(self, a: int, b: int, *vargs: float, q: float, **vkw: str) -> int:
# child can add var-kw
return a+b
@override
def method_kwonly2(self, a: int, b: int, *vargs: float, q: int, v=17) -> int:
# child can add default kw-only
return a+b+q
@override
def method_kwonly3(self, a: int, b: int, *vargs: float, v: float, q: int) -> int:
# child can reorder kw-only
return a+b+q
@override
def method_kwonly4(self, a: float, b: int, q: float, *vargs: float) -> int:
# child can move kw-only to ordinary arg
return len(str(a+b+q))
@override
def method_kwonly5(self, a: float, b: float, *vargs: int, q: float, v: int, **kw: float) -> int:
# child must also have var-kw
return len(str(a+b+v))
@override
def method_kwonly6(self, a: float, b: int, *vargs: float, q: int, **kwargs: float) -> int:
# child can drop kw-only in favor of var-kw
return a+b+q+v
@override
def method_kwonly7(self, a: int, b: float, *vargs: float, q: float, **kw: int) -> int:
# child can drop kw-only in favor of var-kw
return a+b
# kw-only tests (Python 2 type hints):
@override
def method_kwonly1_py2(self, a, b, *vargs, q, **vkw):
# type: (int, int, *float, float, **str) -> int
# child can add var-kw
return a+b
@override
def method_kwonly2_py2(self, a, b, *vargs, q, v=17):
# type: (int, int, *float, int) -> int
# child can add default kw-only
return a+b+q
@override
def method_kwonly3_py2(self, a, b, *vargs, v, q):
# type: (int, int, *float, float, int) -> int
# child can reorder kw-only
return a+b+q
@override
def method_kwonly4_py2(self, a, b, q, *vargs):
# type: (float, int, float, *float) -> int
# child can move kw-only to ordinary arg
return len(str(a+b+q))
@override
def method_kwonly5_py2(self, a, b, *vargs, q, v, **kw):
# type: (float, float, *int, float, int, **float) -> int
# child must also have var-kw
return len(str(a+b+v))
@override
def method_kwonly6_py2(self, a, b, *vargs, q, **kwargs):
# type: (float, int, *float, int, **float) -> int
# child can drop kw-only in favor of var-kw
return a+b+q+v
@override
def method_kwonly7_py2(self, a, b, *vargs, q, **kw):
# type: (int, float, *float, float, **int) -> int
# child can drop kw-only in favor of var-kw
return a+b
| Stewori/pytypes | tests/testhelpers/override_testhelper_py3.py | Python | apache-2.0 | 7,697 |
# Calculator project for collaboration
a = raw_input("Enter the first number :")
b = raw_input("\nEnter the second Number :")
def addition(x, y):
return a + b
def sum(a,b):
return (a + b)
def division(x, y):
if(y!=0):
return(x/y)
else:
return("Division by Zero not defined")
def mul(a,b):
return(a*b)
def subtraction(a, b):
return(a-b)
def mod(a,b):
return (a%b)
print "Calculator"
| beniza/learningPython | myCalc.py | Python | mit | 404 |
from django.db.backends.mysql.base import DatabaseOperations
from django.contrib.gis.db.backends.adapter import WKTAdapter
from django.contrib.gis.db.backends.base import BaseSpatialOperations
from django.contrib.gis.db.backends.utils import SpatialOperator
class MySQLOperations(DatabaseOperations, BaseSpatialOperations):
compiler_module = 'django.contrib.gis.db.models.sql.compiler'
mysql = True
name = 'mysql'
select = 'AsText(%s)'
from_wkb = 'GeomFromWKB'
from_text = 'GeomFromText'
Adapter = WKTAdapter
Adaptor = Adapter # Backwards-compatibility alias.
gis_operators = {
'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API
'bboverlaps': SpatialOperator(func='MBROverlaps'), # .. ..
'contained': SpatialOperator(func='MBRWithin'), # .. ..
'contains': SpatialOperator(func='MBRContains'),
'disjoint': SpatialOperator(func='MBRDisjoint'),
'equals': SpatialOperator(func='MBREqual'),
'exact': SpatialOperator(func='MBREqual'),
'intersects': SpatialOperator(func='MBRIntersects'),
'overlaps': SpatialOperator(func='MBROverlaps'),
'same_as': SpatialOperator(func='MBREqual'),
'touches': SpatialOperator(func='MBRTouches'),
'within': SpatialOperator(func='MBRWithin'),
}
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, f, value, qn):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'as_sql'):
placeholder, _ = qn.compile(value)
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
| ulope/django | django/contrib/gis/db/backends/mysql/operations.py | Python | bsd-3-clause | 1,888 |
from django.conf.urls import include, url
from rest_framework import routers
from bills import api
router = routers.DefaultRouter()
router.register(r'bills', api.BillViewSet, base_name='bills')
urlpatterns = [
url(r'^', include(router.urls))
]
| gnmerritt/dailyrippl | rippl/bills/urls.py | Python | mit | 251 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Ignore_errors dataset transformations."""
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.ops import gen_experimental_dataset_ops
from tensorflow.python.util.tf_export import tf_export
@tf_export("data.experimental.ignore_errors")
def ignore_errors(log_warning=False):
"""Creates a `Dataset` from another `Dataset` and silently ignores any errors.
Use this transformation to produce a dataset that contains the same elements
as the input, but silently drops any elements that caused an error. For
example:
```python
dataset = tf.data.Dataset.from_tensor_slices([1., 2., 0., 4.])
# Computing `tf.debugging.check_numerics(1. / 0.)` will raise an
InvalidArgumentError.
dataset = dataset.map(lambda x: tf.debugging.check_numerics(1. / x, "error"))
# Using `ignore_errors()` will drop the element that causes an error.
dataset =
dataset.apply(tf.data.experimental.ignore_errors()) # ==> {1., 0.5, 0.2}
```
Args:
log_warning: (Optional.) A 'tf.bool' scalar indicating whether ignored
errors should be logged to stderr. Defaults to 'False'.
Returns:
A `Dataset` transformation function, which can be passed to
`tf.data.Dataset.apply`.
"""
def _apply_fn(dataset):
return _IgnoreErrorsDataset(dataset, log_warning)
return _apply_fn
class _IgnoreErrorsDataset(dataset_ops.UnaryUnchangedStructureDataset):
"""A `Dataset` that silently ignores errors when computing its input."""
def __init__(self, input_dataset, log_warning):
"""See `Dataset.ignore_errors()` for details."""
self._input_dataset = input_dataset
variant_tensor = (
gen_experimental_dataset_ops.ignore_errors_dataset(
self._input_dataset._variant_tensor, # pylint: disable=protected-access
log_warning=log_warning,
**self._flat_structure))
super(_IgnoreErrorsDataset, self).__init__(input_dataset, variant_tensor)
| tensorflow/tensorflow | tensorflow/python/data/experimental/ops/error_ops.py | Python | apache-2.0 | 2,627 |
'''Test cases for connecting signals between threads'''
import unittest
from PySide.QtCore import QThread, QObject, SIGNAL, QCoreApplication
thread_run = False
class Source(QObject):
def __init__(self, *args):
QObject.__init__(self, *args)
def emit_sig(self):
self.emit(SIGNAL('source()'))
class Target(QObject):
def __init__(self, *args):
QObject.__init__(self, *args)
self.called = False
def myslot(self):
self.called = True
class ThreadJustConnects(QThread):
def __init__(self, source, *args):
QThread.__init__(self, *args)
self.source = source
self.target = Target()
def run(self):
global thread_run
thread_run = True
QObject.connect(self.source, SIGNAL('source()'), self.target.myslot)
while not self.target.called:
pass
class BasicConnection(unittest.TestCase):
def testEmitOutsideThread(self):
global thread_run
app = QCoreApplication([])
source = Source()
thread = ThreadJustConnects(source)
QObject.connect(thread, SIGNAL('finished()'), lambda: app.exit(0))
thread.start()
while not thread_run:
pass
source.emit_sig()
app.exec_()
thread.wait()
self.assert_(thread.target.called)
if __name__ == '__main__':
unittest.main()
| M4rtinK/pyside-android | tests/QtCore/qthread_signal_test.py | Python | lgpl-2.1 | 1,393 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .client import AssetServiceClient
__all__ = ("AssetServiceClient",)
| googleads/google-ads-python | google/ads/googleads/v9/services/services/asset_service/__init__.py | Python | apache-2.0 | 674 |
"""This module contains the class used to represent disassembly code."""
from mythril.ethereum import util
from mythril.disassembler import asm
from mythril.support.signatures import SignatureDB
from typing import Dict, List, Tuple
class Disassembly(object):
"""Disassembly class.
Stores bytecode, and its disassembly.
Additionally it will gather the following information on the existing functions in the disassembled code:
- function hashes
- function name to entry point mapping
- function entry point to function name mapping
"""
def __init__(self, code: str, enable_online_lookup: bool = False) -> None:
"""
:param code:
:param enable_online_lookup:
"""
self.bytecode = code
self.instruction_list = asm.disassemble(util.safe_decode(code))
self.func_hashes = [] # type: List[str]
self.function_name_to_address = {} # type: Dict[str, int]
self.address_to_function_name = {} # type: Dict[int, str]
self.enable_online_lookup = enable_online_lookup
self.assign_bytecode(bytecode=code)
def assign_bytecode(self, bytecode):
self.bytecode = bytecode
# open from default locations
# control if you want to have online signature hash lookups
signatures = SignatureDB(enable_online_lookup=self.enable_online_lookup)
self.instruction_list = asm.disassemble(util.safe_decode(bytecode))
# Need to take from PUSH1 to PUSH4 because solc seems to remove excess 0s at the beginning for optimizing
jump_table_indices = asm.find_op_code_sequence(
[("PUSH1", "PUSH2", "PUSH3", "PUSH4"), ("EQ",)], self.instruction_list
)
for index in jump_table_indices:
function_hash, jump_target, function_name = get_function_info(
index, self.instruction_list, signatures
)
self.func_hashes.append(function_hash)
if jump_target is not None and function_name is not None:
self.function_name_to_address[function_name] = jump_target
self.address_to_function_name[jump_target] = function_name
def get_easm(self):
"""
:return:
"""
return asm.instruction_list_to_easm(self.instruction_list)
def get_function_info(
index: int, instruction_list: list, signature_database: SignatureDB
) -> Tuple[str, int, str]:
"""Finds the function information for a call table entry Solidity uses the
first 4 bytes of the calldata to indicate which function the message call
should execute The generated code that directs execution to the correct
function looks like this:
- PUSH function_hash
- EQ
- PUSH entry_point
- JUMPI
This function takes an index that points to the first instruction, and from that finds out the function hash,
function entry and the function name.
:param index: Start of the entry pattern
:param instruction_list: Instruction list for the contract that is being analyzed
:param signature_database: Database used to map function hashes to their respective function names
:return: function hash, function entry point, function name
"""
# Append with missing 0s at the beginning
function_hash = "0x" + instruction_list[index]["argument"][2:].rjust(8, "0")
function_names = signature_database.get(function_hash)
if len(function_names) > 0:
function_name = function_names[0]
else:
function_name = "_function_" + function_hash
try:
offset = instruction_list[index + 2]["argument"]
entry_point = int(offset, 16)
except (KeyError, IndexError):
return function_hash, None, None
return function_hash, entry_point, function_name
| b-mueller/mythril | mythril/disassembler/disassembly.py | Python | mit | 3,790 |
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals, absolute_import
import logging
from io import BytesIO
from collections import OrderedDict
from requests import HTTPError
import itertools
from warnings import warn
from concurrent.futures import Future, as_completed
from PIL import Image
import numpy as np
import requests
from requests_futures.sessions import FuturesSession
from catpy.spatial import StackOrientation, CoordinateTransformer
from catpy.stacks import StackMirror, ProjectStack, TileIndex
from catpy.util import StrEnum
from catpy.compat import tqdm
logger = logging.getLogger()
DEFAULT_CACHE_ITEMS = 10
DEFAULT_CACHE_BYTES = None
THREADS = 10
SUPPORTED_CONTENT_TYPES = {"image/png", "image/jpeg"}
class Orientation3D(StrEnum):
NUMPY = "zyx"
C = "zyx"
ZYX = "zyx"
VIGRA = "xyz"
FORTRAN = "xyz"
XYZ = "xyz"
DEFAULT_3D_ORIENTATION = Orientation3D.NUMPY
class BrokenSliceHandling(StrEnum):
FILL = "fill"
# ABOVE = 'above'
# BELOW = 'below'
# CLOSEST = 'closest'
# INTERPOLATE = 'interpolate'
DEFAULT_BROKEN_SLICE_HANDLING = BrokenSliceHandling.FILL
class ROIMode(StrEnum):
STACK = "stack"
SCALED = "scaled"
PROJECT = "project"
DEFAULT_ROI_MODE = ROIMode.STACK
def response_to_array(response, pil_kwargs=None):
response.raise_for_status()
content_type = response.headers["Content-Type"]
if content_type in SUPPORTED_CONTENT_TYPES:
buffer = BytesIO(
response.content
) # opening directly from raw response doesn't work for JPEGs
raw_img = Image.open(buffer)
pil_kwargs = dict(pil_kwargs) if pil_kwargs else dict()
pil_kwargs["mode"] = pil_kwargs.get("mode", "L")
grey_img = raw_img.convert(**pil_kwargs)
return np.array(grey_img)
else:
raise NotImplementedError(
"Image fetching is only implemented for greyscale PNG and JPEG, not {}".format(
content_type.upper().split("/")[1]
)
)
def response_to_array_callback(session, response):
response.array = response_to_array(response)
def as_future(item):
if isinstance(item, Future):
return item
f = Future()
f.set_result(item)
return f
def fill_tiled_cuboid(min_tile_idx, max_tile_idx):
if not min_tile_idx.is_comparable(max_tile_idx):
raise ValueError("Tile indices are not comparable (different zoom or size)")
iters = [
range(getattr(min_tile_idx, name), getattr(max_tile_idx, name) + 1)
for name in ("depth", "row", "col")
]
return {
TileIndex(
depth,
row,
col,
min_tile_idx.zoom_level,
min_tile_idx.height,
min_tile_idx.width,
)
for depth, row, col in itertools.product(*iters)
}
def dict_subtract(d1, d2):
if set(d1) != set(d2):
raise ValueError("Dicts have different keys")
out = dict()
for key in d1.keys():
out[key] = d1[key] - d2[key]
return out
def is_valid_format_url(format_url):
"""
Ensure that the given URL has the required format keys for use as a format URL.
Parameters
----------
format_url : str
Returns
-------
bool
"""
components = [
"image_base",
"{depth}",
"{zoom_level}",
"{row}",
"{col}",
"file_extension",
]
return all("{" + component + "}" in format_url for component in components)
class TileCache(object):
def __init__(self, max_items=DEFAULT_CACHE_ITEMS, max_bytes=DEFAULT_CACHE_BYTES):
super(TileCache, self).__init__()
self.max_bytes = max_bytes
self.max_items = max_items
self._dict = OrderedDict()
@property
def current_bytes(self):
"""
Current total size, in bytes, of the cache's values
Returns
-------
int
"""
if self.max_bytes is None:
return -1
return sum(value.nbytes for value in self._dict.values())
def __setitem__(self, key, value):
"""
Append value to cache under the given key. If this causes the cache to break the size constraints, remove the
oldest items until it is valid again.
Parameters
----------
key : TileIndex
value : np.ndarray
"""
if key in self._dict:
del self._dict[key]
self._dict[key] = value
self._constrain_size()
def __getitem__(self, key):
value = self._dict.pop(key)
self._dict[key] = value
return value
def clear(self):
self._dict.clear()
def __contains__(self, item):
return item in self._dict
def __len__(self):
return len(self._dict)
def __iter__(self):
return iter(self._dict)
def _constrain_size(self):
if self.max_items is not None:
while len(self) > self.max_items:
self._dict.popitem(False)
if self.max_bytes is not None:
total_bytes = self.current_bytes
while total_bytes > self.max_bytes:
key, value = self._dict.popitem(False)
total_bytes -= value.nbytes
class ImageFetcher(object):
def __init__(
self,
stack,
output_orientation=DEFAULT_3D_ORIENTATION,
preferred_mirror=None,
timeout=1,
cache_items=DEFAULT_CACHE_ITEMS,
cache_bytes=DEFAULT_CACHE_BYTES,
broken_slice_handling=DEFAULT_BROKEN_SLICE_HANDLING,
cval=0,
auth=None,
):
"""
Parameters
----------
stack : catpy.stacks.Stack
output_orientation : str or Orientation3D
default Orientation3D.ZYX
preferred_mirror : int or str or StackMirror, optional
default None
timeout : float, optional
default 1
cache_items : int, optional
default 10
cache_bytes : int, optional
default None
broken_slice_handling : str or BrokenSliceHandling
default BrokenSliceHandling.FILL
cval : int, optional
default 0
auth : (str, str), optional
Tuple of (username, password) for basic HTTP authentication, to be used if the selected mirror has no
defined ``auth``. Default None
"""
self.stack = stack
self.depth_dimension = "z"
self.source_orientation = self.depth_dimension + "yx"
self.broken_slice_handling = BrokenSliceHandling(broken_slice_handling)
if self.broken_slice_handling == BrokenSliceHandling.FILL:
self.cval = cval
else:
self.cval = None
self.target_orientation = str(output_orientation)
self._dimension_mappings = self._map_dimensions()
self.timeout = timeout
self.coord_trans = CoordinateTransformer(
*[
getattr(self.stack, name, None)
for name in ("resolution", "translation", "orientation")
]
)
self._tile_cache = TileCache(cache_items, cache_bytes)
self._session = requests.Session()
self._auth = auth
self._mirror = None
self.mirror = preferred_mirror
@property
def auth(self):
return self._auth
@auth.setter
def auth(self, name_pass):
self._auth = name_pass
if self._mirror and not self._mirror.auth:
self._session.auth = name_pass
@property
def mirror(self):
if not self._mirror:
warn(
"No mirror set: falling back to {}, which may not be accessible."
"You might want to run set_fastest_mirror.".format(
self.stack.mirrors[0].title
)
)
m = self.stack.mirrors[0]
self._session.auth = m.auth or self.auth
return m
return self._mirror
@mirror.setter
def mirror(self, preferred_mirror):
"""
Set mirror by its string name, its position attribute, or the object itself
Parameters
----------
preferred_mirror : str or int or catpy.stacks.StackMirror
"""
if preferred_mirror is None:
self._mirror = None
elif isinstance(preferred_mirror, StackMirror):
if preferred_mirror not in self.stack.mirrors:
raise ValueError("Selected mirror is not in stack's mirrors")
self._mirror = preferred_mirror
else:
try:
pos = int(preferred_mirror)
matching_mirrors = [m for m in self.stack.mirrors if m.position == pos]
if not matching_mirrors:
warn(
"Preferred mirror position {} does not exist, choose from {}".format(
pos, ", ".join(str(m.position) for m in self.stack.mirrors)
)
)
return
elif len(matching_mirrors) > 1:
warn(
"More than one mirror found for position {}, picking {}".format(
pos, matching_mirrors[0].title
)
)
self._mirror = matching_mirrors[0]
except (ValueError, TypeError):
if isinstance(preferred_mirror, str):
matching_mirrors = [
m for m in self.stack.mirrors if m.title == preferred_mirror
]
if not matching_mirrors:
warn(
"Preferred mirror called {} does not exist, choose from {}".format(
preferred_mirror,
", ".join(m.title for m in self.stack.mirrors),
)
)
return
elif len(matching_mirrors) > 1:
warn(
"More than one mirror found for title {}, picking first".format(
preferred_mirror
)
)
self._mirror = matching_mirrors[0]
if self._mirror is not None and self._mirror.auth:
self._session.auth = self._mirror.auth
else:
self._session.auth = self.auth
def clear_cache(self):
self._tile_cache.clear()
def _map_dimensions(self):
"""
Find the indices of the target dimensions in the source dimension order
Returns
-------
tuple of int
Examples
--------
>>> self.source_orientation = 'xyz'
>>> self.target_orientation = 'yzx'
>>> self._map_dimensions()
(1, 2, 0)
"""
mapping = {dim: idx for idx, dim in enumerate(self.source_orientation)}
return tuple(mapping[dim] for dim in self.target_orientation)
def _reorient_volume_src_to_tgt(self, volume):
arr = np.asarray(volume)
if len(arr.shape) == 2:
arr = np.expand_dims(arr, 0)
if len(arr.shape) != 3:
raise ValueError("Unknown dimension of volume: should be 2D or 3D")
return np.moveaxis(arr, (0, 1, 2), self._dimension_mappings)
def _make_empty_tile(self, width, height=None):
height = height or width
tile = np.empty((height, width), dtype=np.uint8)
tile.fill(self.cval)
return tile
def _get_tile(self, tile_index):
"""
Get the tile from the cache, handle broken slices, or fetch.
Parameters
----------
tile_index : TileIndex
Returns
-------
Future
"""
try:
return self._tile_cache[tile_index]
except KeyError:
pass
if tile_index.depth in self.stack.broken_slices:
if (
self.broken_slice_handling == BrokenSliceHandling.FILL
and self.cval is not None
):
return self._make_empty_tile(tile_index.width, tile_index.height)
else:
raise NotImplementedError(
"'fill' with a non-None cval is the only implemented broken slice handling mode"
)
return self._fetch(tile_index)
def _roi_to_tiles(self, roi_src, zoom_level):
"""
Parameters
----------
roi_src : array-like
2 x 3 array where the rows are the half-closed interval of which pixels to select in the given dimension
and at the given zoom level, and the columns are the 3 dimensions in the source orientation
zoom_level : int
Zoom level at which roi is scaled and which images will be fetched
Returns
-------
set of TileIndex
Set of tile indices to fetch
dict of {str to dict of {str to int}}
{'min': {}, 'max': {}} with values {'x': int, 'y': int, 'z': int}
Pixel offsets of the minimum maximum pixels from the shallow-top-left corner of the tile which they are on
"""
closed_roi = np.array(roi_src)
closed_roi[1, :] -= 1
min_pixel = dict(zip(self.source_orientation, closed_roi[0, :]))
max_pixel = dict(zip(self.source_orientation, closed_roi[1, :]))
min_tile, min_offset = self.mirror.get_tile_index(min_pixel, zoom_level)
max_tile, max_offset = self.mirror.get_tile_index(max_pixel, zoom_level)
tile_indices = fill_tiled_cuboid(min_tile, max_tile)
src_inner_slicing = {"min": min_offset, "max": max_offset}
return tile_indices, src_inner_slicing
def _insert_tile_into_arr(
self, tile_index, src_tile, min_tile, max_tile, src_inner_slicing, out
):
min_col = tile_index.col == min_tile.col
max_col = tile_index.col == max_tile.col
min_row = tile_index.row == min_tile.row
max_row = tile_index.row == max_tile.row
tile_slicing_dict = {
"z": slice(None),
"y": slice(
src_inner_slicing["min"]["y"] if min_row else None,
src_inner_slicing["max"]["y"] + 1 if max_row else None,
),
"x": slice(
src_inner_slicing["min"]["x"] if min_col else None,
src_inner_slicing["max"]["x"] + 1 if max_col else None,
),
}
tile_slicing = tuple(
tile_slicing_dict[dim] for dim in self.source_orientation if dim in "xy"
)
tgt_tile = self._reorient_volume_src_to_tgt(src_tile[tile_slicing])
untrimmed_topleft = dict_subtract(tile_index.coords, min_tile.coords)
# location of the top left of the tile in out
topleft_dict = {
"z": untrimmed_topleft["z"], # we don't trim in Z
"y": 0
if min_row
else untrimmed_topleft["y"] - src_inner_slicing["min"]["y"],
"x": 0
if min_col
else untrimmed_topleft["x"] - src_inner_slicing["min"]["x"],
}
topleft = tuple(topleft_dict[dim] for dim in self.target_orientation)
arr_slice = tuple(slice(tl, tl + s) for tl, s in zip(topleft, tgt_tile.shape))
out[arr_slice] = tgt_tile
def _iter_tiles(self, tile_indices):
for tile_idx in tile_indices:
yield self._get_tile(tile_idx), tile_idx
def _assemble_tiles(self, tile_indices, src_inner_slicing, out):
"""
Parameters
----------
tile_indices : list of TileIndex
tiles to be got, reoriented, and compiled.
src_inner_slicing : dict of str to {dict of str to int}
{'min': {}, 'max': {}} with values {'x': int, 'y': int, 'z': int}
out : array-like
target-spaced, into which the tiles will be written
Returns
-------
np.ndarray
"""
min_tile = min(tile_indices, key=lambda idx: (idx.depth, idx.row, idx.col))
max_tile = max(tile_indices, key=lambda idx: (idx.depth, idx.row, idx.col))
tqdm_kwargs = {
"total": len(tile_indices),
"ncols": 80,
"unit": "tiles",
"desc": "Downloading tiles",
}
for src_tile, tile_index in tqdm(self._iter_tiles(tile_indices), **tqdm_kwargs):
self._tile_cache[tile_index] = src_tile
self._insert_tile_into_arr(
tile_index, src_tile, min_tile, max_tile, src_inner_slicing, out
)
return out
def _fetch(self, tile_index):
"""
Parameters
----------
tile_index : TileIndex
Returns
-------
Future of np.ndarray in source orientation
"""
url = self.mirror.generate_url(tile_index)
try:
return response_to_array(self._session.get(url, timeout=self.timeout))
except HTTPError as e:
if e.response.status_code == 404:
logger.warning(
"Tile not found at %s (error 404), returning blank tile", url
)
return self._make_empty_tile(tile_index.width, tile_index.height)
else:
raise
def _reorient_roi_tgt_to_src(self, roi_tgt):
return roi_tgt[:, self._dimension_mappings]
def roi_to_scaled(self, roi, roi_mode, zoom_level):
"""
Convert ROI into scaled stack space, keeping in the target dimension order.
Parameters
----------
roi : np.ndarray
ROI as 2x3 array containing half-closed bounds in the target dimension order
roi_mode : ROIMode or str
Whether the ROI is in "project", "stack", or "scaled" stack coordinates
zoom_level : float
The desired zoom level of the returned data
Returns
-------
np.ndarray
ROI as 2x3 array containing half-closed bounds in scaled stack space in the target dimension order
"""
roi_mode = ROIMode(roi_mode)
roi_tgt = np.asarray(roi)
if zoom_level != int(zoom_level):
raise NotImplementedError("Non-integer zoom level is not supported")
if roi_mode == ROIMode.PROJECT:
if not isinstance(self.stack, ProjectStack):
raise ValueError(
"ImageFetcher's stack is not related to a project, cannot use ROIMode.PROJECT"
)
if self.stack.orientation != StackOrientation.XY:
warn(
"Stack orientation differs from project: returned array's orientation will reflect"
"stack orientation, not project orientation"
)
roi_tgt = self.coord_trans.project_to_stack_array(
roi_tgt, dims=self.target_orientation
)
roi_mode = ROIMode.STACK
if roi_mode == ROIMode.STACK:
roi_tgt = self.coord_trans.stack_to_scaled_array(
roi_tgt, zoom_level, dims=self.target_orientation
)
roi_mode = ROIMode.SCALED
if roi_mode == ROIMode.SCALED:
roi_tgt = np.array(
[np.floor(roi_tgt[0, :]), np.ceil(roi_tgt[1, :])], dtype=int
)
else:
raise ValueError(
"Mismatch between roi_mode and roi"
) # shouldn't be possible
return roi_tgt
def get(self, roi, roi_mode=ROIMode.STACK, zoom_level=0, out=None):
"""
Fetch image data in the ROI in the dimension order of the target orientation.
ROI modes:
ROIMode.PROJECT ('project'):
- `roi` is given in project space
- `zoom_level` specifies the zoom level of returned data
- Returned array may overflow desired ROI by < 1 scaled pixel per side
- Data will be reoriented from stack space/orientation into the `target_orientation` without
going via project space: as such, for stacks with orientation other than 'xy', the output
data will not be in the same orientation as the project-spaced query.
ROIMode.STACK ('stack'):
- Default option
- `roi` is given in unscaled stack space (i.e. pixels at zoom level 0)
- `zoom_level` specifies the desired zoom level of returned data
- Returned array may overflow desired ROI by < 1 scaled pixel per side
- Equivalent to ROIMode.SCALED if `zoom_level` == 0
ROIMode.SCALED ('scaled'):
- `roi` is given in scaled stack space at the given zoom level.
- `zoom_level` specifies the zoom level of ROI and returned data
- Returned array treats `roi` as a half-closed interval: i.e. it should have shape np.diff(roi, axis=0)
Parameters
----------
roi : array-like
2 x 3 array where the columns are the 3 dimensions in the target orientation, and the rows are the upper
and lower bounds of the ROI.
roi_mode : ROIMode or str
Default ROIMode.STACK
zoom_level : int
out : array-like or None
Anything with array-like __setitem__ handling (e.g. np.ndarray, np.memmap, h5py.File, z5py.File), to which
the results will be written. Must have the same shape in as the ROI does in scaled pixels. If None
(default), will create a new np.ndarray.
Returns
-------
array-like
"""
roi_tgt = self.roi_to_scaled(roi, roi_mode, zoom_level)
roi_src = self._reorient_roi_tgt_to_src(roi_tgt)
tile_indices, inner_slicing_src = self._roi_to_tiles(roi_src, zoom_level)
if out is None:
out = np.zeros(np.diff(roi_tgt, axis=0).squeeze(), dtype=np.uint8)
return self._assemble_tiles(tile_indices, inner_slicing_src, out)
def get_project_space(self, roi, zoom_level=0, out=None):
"""
Equivalent to `get` method with roi_mode=ROIMode.PROJECT
"""
return self.get(roi, ROIMode.PROJECT, zoom_level, out)
def get_stack_space(self, roi, zoom_level=0, out=None):
"""
Equivalent to `get` method with roi_mode=ROIMode.STACK
"""
return self.get(roi, ROIMode.STACK, zoom_level, out)
def get_scaled_space(self, roi, zoom_level=0, out=None):
"""
Equivalent to `get` method with roi_mode=ROIMode.SCALED
"""
return self.get(roi, ROIMode.SCALED, zoom_level, out)
def set_fastest_mirror(self, reps=1, normalise_by_tile_size=True):
"""
Set the ImageFetcher to use the fastest accessible mirror.
Parameters
----------
reps : int
How many times to fetch the canary tile, for robustness
normalise_by_tile_size : bool
Whether to normalise the fetch time by the tile size used by this mirror (to get per-pixel response time)
"""
self.mirror = self.stack.get_fastest_mirror(
self.timeout, reps, normalise_by_tile_size
)
@classmethod
def from_stack_info(cls, stack_info, *args, **kwargs):
"""
Parameters
----------
stack_info : dict
args, kwargs
See __init__ for arguments beyond stack
Returns
-------
ImageFetcher
"""
return cls(ProjectStack.from_stack_info(stack_info), *args, **kwargs)
@classmethod
def from_catmaid(cls, catmaid, stack_id, *args, **kwargs):
"""
Parameters
----------
catmaid : catpy.AbstractCatmaidClient
stack_id : int
args, kwargs
See __init__ for arguments beyond stack
Returns
-------
ImageFetcher
"""
stack_info = catmaid.get((catmaid.project_id, "stack", stack_id, "info"))
return cls.from_stack_info(stack_info, *args, **kwargs)
class DummyResponse(object):
def __init__(self, array):
self.array = array
def as_future_response(array):
return as_future(DummyResponse(array))
class ThreadedImageFetcher(ImageFetcher):
def __init__(
self,
stack,
output_orientation=DEFAULT_3D_ORIENTATION,
preferred_mirror=None,
timeout=1,
cache_items=DEFAULT_CACHE_ITEMS,
cache_bytes=DEFAULT_CACHE_BYTES,
broken_slice_handling=DEFAULT_BROKEN_SLICE_HANDLING,
cval=0,
auth=None,
threads=THREADS,
):
"""
Note: for small numbers of tiles on fast internet connection, ImageFetcher may be faster
Parameters
----------
stack : catpy.stacks.Stack
output_orientation : str or Orientation3D
default Orientation3D.ZYX
preferred_mirror : int or str or StackMirror or None
default None
timeout : float
default 1
cache_items : int or None
default 10
cache_bytes : int or None
default None
broken_slice_handling : str or BrokenSliceHandling
default BrokenSliceHandling.FILL
cval : int
default 0
threads : int
default 10
"""
super(ThreadedImageFetcher, self).__init__(
stack,
output_orientation,
preferred_mirror,
timeout,
cache_items,
cache_bytes,
broken_slice_handling,
cval,
auth,
)
self._session = FuturesSession(session=self._session, max_workers=threads)
def _get_tile(self, tile_index):
"""
Get the tile from the cache, handle broken slices, or fetch.
Parameters
----------
tile_index : TileIndex
Returns
-------
Future
"""
try:
return as_future_response(self._tile_cache[tile_index])
except KeyError:
pass
if tile_index.depth in self.stack.broken_slices:
if (
self.broken_slice_handling == BrokenSliceHandling.FILL
and self.cval is not None
):
tile = np.empty((tile_index.width, tile_index.height))
tile.fill(self.cval)
return as_future_response(tile)
else:
raise NotImplementedError(
"'fill' with a non-None cval is the only implemented broken slice handling mode"
)
return self._fetch(tile_index)
def _iter_tiles(self, tile_indices):
logger.info("Queuing requests, may take a few seconds...")
fetched_tiles = {
self._get_tile(tile_index): tile_index for tile_index in tile_indices
}
for tile_future in as_completed(fetched_tiles):
yield tile_future.result().array, fetched_tiles[tile_future]
def _fetch(self, tile_index):
"""
Parameters
----------
tile_index : TileIndex
Returns
-------
Future of np.ndarray in source orientation
"""
url = self.mirror.generate_url(tile_index)
return self._session.get(
url, timeout=self.timeout, background_callback=response_to_array_callback
)
| catmaid/catpy | catpy/image.py | Python | mit | 27,623 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for AnalyzeSyntax
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-language
# [START language_v1_generated_LanguageService_AnalyzeSyntax_sync]
from google.cloud import language_v1
def sample_analyze_syntax():
# Create a client
client = language_v1.LanguageServiceClient()
# Initialize request argument(s)
document = language_v1.Document()
document.content = "content_value"
request = language_v1.AnalyzeSyntaxRequest(
document=document,
)
# Make the request
response = client.analyze_syntax(request=request)
# Handle the response
print(response)
# [END language_v1_generated_LanguageService_AnalyzeSyntax_sync]
| googleapis/python-language | samples/generated_samples/language_v1_generated_language_service_analyze_syntax_sync.py | Python | apache-2.0 | 1,530 |
"""
WSGI config for reminders project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "reminders.settings")
application = get_wsgi_application()
| Silvian/Reminders-App | reminders/wsgi.py | Python | gpl-3.0 | 395 |
for i in range(1,100000000):
x = 1;
x = x >> 1;
x = x >> 6;
| damijanc/algorithms | algorithms/bitwise/rshift/bitwise/rshift.py | Python | apache-2.0 | 66 |
from io import BytesIO
from translate.convert import po2prop, test_convert
from translate.storage import po
class TestPO2Prop:
def po2prop(self, posource):
"""helper that converts po source to .properties source without requiring files"""
inputfile = BytesIO(posource.encode())
inputpo = po.pofile(inputfile)
convertor = po2prop.po2prop()
outputprop = convertor.convertstore(inputpo)
return outputprop
def merge2prop(
self,
propsource,
posource,
personality="java",
remove_untranslated=False,
encoding="utf-8",
):
"""helper that merges po translations to .properties source without requiring files"""
inputfile = BytesIO(posource.encode())
inputpo = po.pofile(inputfile)
templatefile = BytesIO(
propsource.encode() if isinstance(propsource, str) else propsource
)
# templateprop = properties.propfile(templatefile)
convertor = po2prop.reprop(
templatefile,
inputpo,
personality=personality,
remove_untranslated=remove_untranslated,
)
outputprop = convertor.convertstore()
print(outputprop)
return outputprop.decode(encoding)
def test_merging_simple(self):
"""check the simplest case of merging a translation"""
posource = """#: prop\nmsgid "value"\nmsgstr "waarde"\n"""
proptemplate = """prop=value\n"""
propexpected = """prop=waarde\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_merging_untranslated(self):
"""check the simplest case of merging an untranslated unit"""
posource = """#: prop\nmsgid "value"\nmsgstr ""\n"""
proptemplate = """prop=value\n"""
propexpected = proptemplate
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_hard_newlines_preserved(self):
"""check that we preserver hard coded newlines at the start and end of sentence"""
posource = """#: prop\nmsgid "\\nvalue\\n\\n"\nmsgstr "\\nwaarde\\n\\n"\n"""
proptemplate = """prop=\\nvalue\\n\\n\n"""
propexpected = """prop=\\nwaarde\\n\\n\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_space_preservation(self):
"""check that we preserve any spacing in properties files when merging"""
posource = """#: prop\nmsgid "value"\nmsgstr "waarde"\n"""
proptemplate = """prop = value\n"""
propexpected = """prop = waarde\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_no_value(self):
"""check that we can handle keys without value"""
posource = """#: KEY\nmsgctxt "KEY"\nmsgid ""\nmsgstr ""\n"""
proptemplate = """KEY = \n"""
propexpected = """KEY = \n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_no_separator(self):
"""check that we can handle keys without separator"""
posource = """#: KEY\nmsgctxt "KEY"\nmsgid ""\nmsgstr ""\n"""
proptemplate = """KEY\n"""
propexpected = """KEY\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_merging_blank_entries(self):
"""check that we can correctly merge entries that are blank in the template"""
posource = r'''#: accesskey-accept
msgid ""
"_: accesskey-accept\n"
""
msgstr ""'''
proptemplate = "accesskey-accept=\n"
propexpected = "accesskey-accept=\n"
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_merging_fuzzy(self):
"""check merging a fuzzy translation"""
posource = """#: prop\n#, fuzzy\nmsgid "value"\nmsgstr "waarde"\n"""
proptemplate = """prop=value\n"""
propexpected = """prop=value\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_mozilla_accesskeys(self):
"""check merging Mozilla accesskeys"""
posource = """#: prop.label prop.accesskey
msgid "&Value"
msgstr "&Waarde"
#: key.label key.accesskey
msgid "&Key"
msgstr "&Sleutel"
"""
proptemplate = """prop.label=Value
prop.accesskey=V
key.label=Key
key.accesskey=K
"""
propexpected = """prop.label=Waarde
prop.accesskey=W
key.label=Sleutel
key.accesskey=S
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
print(propfile)
assert propfile == propexpected
def test_mozilla_accesskeys_missing_accesskey(self):
"""check merging Mozilla accesskeys"""
posource = """#: prop.label prop.accesskey
# No accesskey because we forgot or language doesn't do accesskeys
msgid "&Value"
msgstr "Waarde"
"""
proptemplate = """prop.label=Value
prop.accesskey=V
"""
propexpected = """prop.label=Waarde
prop.accesskey=V
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
print(propfile)
assert propfile == propexpected
def test_mozilla_margin_whitespace(self):
"""Check handling of Mozilla leading and trailing spaces"""
posource = """#: sepAnd
msgid " and "
msgstr " و "
#: sepComma
msgid ", "
msgstr "، "
"""
proptemplate = r"""sepAnd = \u0020and\u0020
sepComma = ,\u20
"""
propexpected = """sepAnd = \\u0020و\\u0020
sepComma = ،\\u0020
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
print(propfile)
assert propfile == propexpected
def test_mozilla_all_whitespace(self):
"""
Check for all white-space Mozilla hack, remove when the corresponding code
is removed.
"""
posource = """#: accesskey-accept
msgctxt "accesskey-accept"
msgid ""
msgstr " "
#: accesskey-help
msgid "H"
msgstr "م"
"""
proptemplate = """accesskey-accept=
accesskey-help=H
"""
propexpected = """accesskey-accept=
accesskey-help=م
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
print(propfile)
assert propfile == propexpected
def test_merging_propertyless_template(self):
"""check that when merging with a template with no property values that we copy the template"""
posource = ""
proptemplate = "# A comment\n"
propexpected = proptemplate
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_delimiters(self):
"""test that we handle different delimiters."""
posource = """#: prop\nmsgid "value"\nmsgstr "translated"\n"""
proptemplate = """prop %s value\n"""
propexpected = """prop %s translated\n"""
for delim in ["=", ":", ""]:
print("testing '%s' as delimiter" % delim)
propfile = self.merge2prop(proptemplate % delim, posource)
print(propfile)
assert propfile == propexpected % delim
def test_empty_value(self):
"""test that we handle an value in the template"""
posource = """#: key
msgctxt "key"
msgid ""
msgstr "translated"
"""
proptemplate = """key\n"""
propexpected = """key = translated\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected
def test_personalities(self):
"""test that we output correctly for Java and Mozilla style property files. Mozilla uses Unicode, while Java uses escaped Unicode"""
posource = """#: prop\nmsgid "value"\nmsgstr "ṽḁḽṻḝ"\n"""
proptemplate = """prop = value\n"""
propexpectedjava = """prop = \\u1E7D\\u1E01\\u1E3D\\u1E7B\\u1E1D\n"""
propfile = self.merge2prop(proptemplate, posource)
assert propfile == propexpectedjava
propexpectedmozilla = """prop = ṽḁḽṻḝ\n"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
assert propfile == propexpectedmozilla
proptemplate = """prop = value\n""".encode("utf-16")
propexpectedskype = """prop = ṽḁḽṻḝ\n"""
propfile = self.merge2prop(
proptemplate, posource, personality="skype", encoding="utf-16"
)
assert propfile == propexpectedskype
proptemplate = """"prop" = "value";\n""".encode("utf-16")
propexpectedstrings = """"prop" = "ṽḁḽṻḝ";\n"""
propfile = self.merge2prop(
proptemplate, posource, personality="strings", encoding="utf-16"
)
assert propfile == propexpectedstrings
def test_merging_untranslated_simple(self):
"""check merging untranslated entries in two 1) use English 2) drop key, value pair"""
posource = """#: prop\nmsgid "value"\nmsgstr ""\n"""
proptemplate = """prop = value\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == proptemplate # We use the existing values
propfile = self.merge2prop(proptemplate, posource, remove_untranslated=True)
print(propfile)
assert propfile == "" # We drop the key
def test_merging_untranslated_multiline(self):
"""check merging untranslated entries with multiline values"""
posource = """#: prop\nmsgid "value1 value2"\nmsgstr ""\n"""
proptemplate = """prop = value1 \\
value2
"""
propexpected = """prop = value1 value2\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected # We use the existing values
propfile = self.merge2prop(proptemplate, posource, remove_untranslated=True)
print(propfile)
assert propfile == "" # We drop the key
def test_merging_untranslated_multiline2(self):
"""check merging untranslated entries with multiline values"""
posource = """
#: legal_text_and_links3
msgid "By using {{clientShortname}} you agree to the {{terms_of_use}} and {{privacy_notice}}."
msgstr ""
"""
proptemplate = r"""legal_text_and_links3=By using {{clientShortname}} you agree to the {{terms_of_use}} \\
and {{privacy_notice}}.
"""
propexpected = """legal_text_and_links3=By using {{clientShortname}} you agree to the {{terms_of_use}} and {{privacy_notice}}.\n"""
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected # We use the existing values
propfile = self.merge2prop(proptemplate, posource, remove_untranslated=True)
print(propfile)
assert propfile == "" # We drop the key
def test_merging_untranslated_comments(self):
"""check merging untranslated entries with comments"""
posource = """#: prop\nmsgid "value"\nmsgstr ""\n"""
proptemplate = """# A comment\nprop = value\n"""
propexpected = "# A comment\nprop = value\n"
propfile = self.merge2prop(proptemplate, posource)
print(propfile)
assert propfile == propexpected # We use the existing values
propfile = self.merge2prop(proptemplate, posource, remove_untranslated=True)
print(propfile)
# FIXME ideally we should drop the comment as well as the unit
assert propfile == "# A comment\n" # We drop the key
def test_merging_untranslated_unchanged(self):
"""check removing untranslated entries but keeping unchanged ones"""
posource = """#: prop
msgid "value"
msgstr ""
#: prop2
msgid "value2"
msgstr "value2"
"""
proptemplate = """prop=value
prop2=value2
"""
propexpected = """prop2=value2\n"""
propfile = self.merge2prop(proptemplate, posource, remove_untranslated=True)
print(propfile)
assert propfile == propexpected
def test_merging_blank(self):
"""We always merge in a blank translation for a blank source"""
posource = """#: prop
msgctxt "prop"
msgid ""
msgstr "value"
#: prop2
msgctxt "prop2"
msgid ""
msgstr ""
"""
proptemplate = """prop=
prop2=
"""
propexpected = """prop=value
prop2=
"""
propfile = self.merge2prop(proptemplate, posource, remove_untranslated=False)
print(propfile)
assert propfile == propexpected
propfile = self.merge2prop(proptemplate, posource, remove_untranslated=True)
print(propfile)
assert propfile == propexpected
def test_gaia_plurals(self):
"""Test back conversion of gaia plural units."""
proptemplate = """
message-multiedit-header={[ plural(n) ]}
message-multiedit-header[zero]=Edit
message-multiedit-header[one]={{ n }} selected
message-multiedit-header[two]={{ n }} selected
message-multiedit-header[few]={{ n }} selected
message-multiedit-header[many]={{ n }} selected
message-multiedit-header[other]={{ n }} selected
"""
posource = r"""#: message-multiedit-header[zero]
msgctxt "message-multiedit-header[zero]"
msgid "Edit"
msgstr "Redigeer"
#: message-multiedit-header
msgctxt "message-multiedit-header"
msgid "Edit"
msgid_plural "{{ n }} selected"
msgstr[0] "xxxRedigeerxxx"
msgstr[1] "{{ n }} gekies"
msgstr[2] "{{ n }} gekies"
msgstr[3] "{{ n }} gekies"
msgstr[4] "{{ n }} gekies"
msgstr[5] "{{ n }} gekies"
"""
propexpected = """
message-multiedit-header={[ plural(n) ]}
message-multiedit-header[zero]=Redigeer
message-multiedit-header[one]={{ n }} gekies
message-multiedit-header[two]={{ n }} gekies
message-multiedit-header[few]={{ n }} gekies
message-multiedit-header[many]={{ n }} gekies
message-multiedit-header[other]={{ n }} gekies
"""
propfile = self.merge2prop(proptemplate, posource, personality="gaia")
assert propfile == propexpected
def test_duplicates(self):
"""Test back conversion of properties with duplicate units."""
# Test entries with same key and value.
proptemplate = """
key=value
key=value
"""
posource = r"""#: key
msgid "value"
msgstr "Waarde"
"""
propexpected = """
key=Waarde
key=Waarde
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
assert propfile == propexpected
# Test entries with same key and different value, and single
# corresponding entry in PO.
proptemplate = """
key=value
key=another value
"""
posource = r"""#: key
msgid "value"
msgstr "Waarde"
"""
propexpected = """
key=Waarde
key=Waarde
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
assert propfile == propexpected
# Test entries with same key and different value, and two different
# corresponding entries in PO.
proptemplate = """
key=value
key=another value
"""
posource = r"""#: key
msgid "value"
msgstr "Valor"
#: key
msgid "another value"
msgstr "Outro valor"
"""
propexpected = """
key=Valor
key=Valor
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
assert propfile == propexpected
# Test entries with same key and different value.
proptemplate = """
key1=value
key2=value
"""
posource = r"""#: key1
msgctxt "key1"
msgid "value"
msgstr "Waarde"
#: key2
msgctxt "key2"
msgid "value"
msgstr "Waarde"
"""
propexpected = """
key1=Waarde
key2=Waarde
"""
propfile = self.merge2prop(proptemplate, posource, personality="mozilla")
assert propfile == propexpected
def test_gwt_plurals(self):
"""Test back conversion of gwt plural units."""
proptemplate = """
message-multiedit-header={0,number} selected
message-multiedit-header[none]=Edit
message-multiedit-header[one]={0,number} selected
message-multiedit-header[two]={0,number} selected
message-multiedit-header[few]={0,number} selected
message-multiedit-header[many]={0,number} selected
"""
posource = r"""#: message-multiedit-header
msgctxt "message-multiedit-header"
msgid "Edit"
msgid_plural "{0,number} selected"
msgstr[0] "Redigeer"
msgstr[1] "{0,number} gekies"
msgstr[2] "{0,number} gekies"
msgstr[3] "{0,number} gekies"
msgstr[4] "{0,number} gekies"
msgstr[5] "{0,number} gekies"
"""
propexpected = """
message-multiedit-header={0,number} gekies
message-multiedit-header[none]=Redigeer
message-multiedit-header[one]={0,number} gekies
message-multiedit-header[two]={0,number} gekies
message-multiedit-header[few]={0,number} gekies
message-multiedit-header[many]={0,number} gekies
"""
propfile = self.merge2prop(proptemplate, posource, personality="gwt")
assert propfile == propexpected
class TestPO2PropCommand(test_convert.TestConvertCommand, TestPO2Prop):
"""Tests running actual po2prop commands on files"""
convertmodule = po2prop
defaultoptions = {"progress": "none"}
def test_help(self, capsys):
"""tests getting help"""
options = super().test_help(capsys)
options = self.help_check(options, "-t TEMPLATE, --template=TEMPLATE")
options = self.help_check(options, "--fuzzy")
options = self.help_check(options, "--threshold=PERCENT")
options = self.help_check(options, "--personality=TYPE")
options = self.help_check(options, "--encoding=ENCODING")
options = self.help_check(options, "--removeuntranslated")
options = self.help_check(options, "--nofuzzy", last=True)
| miurahr/translate | translate/convert/test_po2prop.py | Python | gpl-2.0 | 17,884 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 4 12:22:42 2020
@author: kpmurphy
"""
# https://gist.github.com/FedeMiorelli/640bbc66b2038a14802729e609abfe89
# This script registers the "turbo" colormap to matplotlib, and the reversed version as "turbo_r"
# Reference: https://ai.googleblog.com/2019/08/turbo-improved-rainbow-colormap-for.html
import superimport
import numpy as np
import matplotlib.pyplot as plt
turbo_colormap_data = np.array(
[[0.18995,0.07176,0.23217],
[0.19483,0.08339,0.26149],
[0.19956,0.09498,0.29024],
[0.20415,0.10652,0.31844],
[0.20860,0.11802,0.34607],
[0.21291,0.12947,0.37314],
[0.21708,0.14087,0.39964],
[0.22111,0.15223,0.42558],
[0.22500,0.16354,0.45096],
[0.22875,0.17481,0.47578],
[0.23236,0.18603,0.50004],
[0.23582,0.19720,0.52373],
[0.23915,0.20833,0.54686],
[0.24234,0.21941,0.56942],
[0.24539,0.23044,0.59142],
[0.24830,0.24143,0.61286],
[0.25107,0.25237,0.63374],
[0.25369,0.26327,0.65406],
[0.25618,0.27412,0.67381],
[0.25853,0.28492,0.69300],
[0.26074,0.29568,0.71162],
[0.26280,0.30639,0.72968],
[0.26473,0.31706,0.74718],
[0.26652,0.32768,0.76412],
[0.26816,0.33825,0.78050],
[0.26967,0.34878,0.79631],
[0.27103,0.35926,0.81156],
[0.27226,0.36970,0.82624],
[0.27334,0.38008,0.84037],
[0.27429,0.39043,0.85393],
[0.27509,0.40072,0.86692],
[0.27576,0.41097,0.87936],
[0.27628,0.42118,0.89123],
[0.27667,0.43134,0.90254],
[0.27691,0.44145,0.91328],
[0.27701,0.45152,0.92347],
[0.27698,0.46153,0.93309],
[0.27680,0.47151,0.94214],
[0.27648,0.48144,0.95064],
[0.27603,0.49132,0.95857],
[0.27543,0.50115,0.96594],
[0.27469,0.51094,0.97275],
[0.27381,0.52069,0.97899],
[0.27273,0.53040,0.98461],
[0.27106,0.54015,0.98930],
[0.26878,0.54995,0.99303],
[0.26592,0.55979,0.99583],
[0.26252,0.56967,0.99773],
[0.25862,0.57958,0.99876],
[0.25425,0.58950,0.99896],
[0.24946,0.59943,0.99835],
[0.24427,0.60937,0.99697],
[0.23874,0.61931,0.99485],
[0.23288,0.62923,0.99202],
[0.22676,0.63913,0.98851],
[0.22039,0.64901,0.98436],
[0.21382,0.65886,0.97959],
[0.20708,0.66866,0.97423],
[0.20021,0.67842,0.96833],
[0.19326,0.68812,0.96190],
[0.18625,0.69775,0.95498],
[0.17923,0.70732,0.94761],
[0.17223,0.71680,0.93981],
[0.16529,0.72620,0.93161],
[0.15844,0.73551,0.92305],
[0.15173,0.74472,0.91416],
[0.14519,0.75381,0.90496],
[0.13886,0.76279,0.89550],
[0.13278,0.77165,0.88580],
[0.12698,0.78037,0.87590],
[0.12151,0.78896,0.86581],
[0.11639,0.79740,0.85559],
[0.11167,0.80569,0.84525],
[0.10738,0.81381,0.83484],
[0.10357,0.82177,0.82437],
[0.10026,0.82955,0.81389],
[0.09750,0.83714,0.80342],
[0.09532,0.84455,0.79299],
[0.09377,0.85175,0.78264],
[0.09287,0.85875,0.77240],
[0.09267,0.86554,0.76230],
[0.09320,0.87211,0.75237],
[0.09451,0.87844,0.74265],
[0.09662,0.88454,0.73316],
[0.09958,0.89040,0.72393],
[0.10342,0.89600,0.71500],
[0.10815,0.90142,0.70599],
[0.11374,0.90673,0.69651],
[0.12014,0.91193,0.68660],
[0.12733,0.91701,0.67627],
[0.13526,0.92197,0.66556],
[0.14391,0.92680,0.65448],
[0.15323,0.93151,0.64308],
[0.16319,0.93609,0.63137],
[0.17377,0.94053,0.61938],
[0.18491,0.94484,0.60713],
[0.19659,0.94901,0.59466],
[0.20877,0.95304,0.58199],
[0.22142,0.95692,0.56914],
[0.23449,0.96065,0.55614],
[0.24797,0.96423,0.54303],
[0.26180,0.96765,0.52981],
[0.27597,0.97092,0.51653],
[0.29042,0.97403,0.50321],
[0.30513,0.97697,0.48987],
[0.32006,0.97974,0.47654],
[0.33517,0.98234,0.46325],
[0.35043,0.98477,0.45002],
[0.36581,0.98702,0.43688],
[0.38127,0.98909,0.42386],
[0.39678,0.99098,0.41098],
[0.41229,0.99268,0.39826],
[0.42778,0.99419,0.38575],
[0.44321,0.99551,0.37345],
[0.45854,0.99663,0.36140],
[0.47375,0.99755,0.34963],
[0.48879,0.99828,0.33816],
[0.50362,0.99879,0.32701],
[0.51822,0.99910,0.31622],
[0.53255,0.99919,0.30581],
[0.54658,0.99907,0.29581],
[0.56026,0.99873,0.28623],
[0.57357,0.99817,0.27712],
[0.58646,0.99739,0.26849],
[0.59891,0.99638,0.26038],
[0.61088,0.99514,0.25280],
[0.62233,0.99366,0.24579],
[0.63323,0.99195,0.23937],
[0.64362,0.98999,0.23356],
[0.65394,0.98775,0.22835],
[0.66428,0.98524,0.22370],
[0.67462,0.98246,0.21960],
[0.68494,0.97941,0.21602],
[0.69525,0.97610,0.21294],
[0.70553,0.97255,0.21032],
[0.71577,0.96875,0.20815],
[0.72596,0.96470,0.20640],
[0.73610,0.96043,0.20504],
[0.74617,0.95593,0.20406],
[0.75617,0.95121,0.20343],
[0.76608,0.94627,0.20311],
[0.77591,0.94113,0.20310],
[0.78563,0.93579,0.20336],
[0.79524,0.93025,0.20386],
[0.80473,0.92452,0.20459],
[0.81410,0.91861,0.20552],
[0.82333,0.91253,0.20663],
[0.83241,0.90627,0.20788],
[0.84133,0.89986,0.20926],
[0.85010,0.89328,0.21074],
[0.85868,0.88655,0.21230],
[0.86709,0.87968,0.21391],
[0.87530,0.87267,0.21555],
[0.88331,0.86553,0.21719],
[0.89112,0.85826,0.21880],
[0.89870,0.85087,0.22038],
[0.90605,0.84337,0.22188],
[0.91317,0.83576,0.22328],
[0.92004,0.82806,0.22456],
[0.92666,0.82025,0.22570],
[0.93301,0.81236,0.22667],
[0.93909,0.80439,0.22744],
[0.94489,0.79634,0.22800],
[0.95039,0.78823,0.22831],
[0.95560,0.78005,0.22836],
[0.96049,0.77181,0.22811],
[0.96507,0.76352,0.22754],
[0.96931,0.75519,0.22663],
[0.97323,0.74682,0.22536],
[0.97679,0.73842,0.22369],
[0.98000,0.73000,0.22161],
[0.98289,0.72140,0.21918],
[0.98549,0.71250,0.21650],
[0.98781,0.70330,0.21358],
[0.98986,0.69382,0.21043],
[0.99163,0.68408,0.20706],
[0.99314,0.67408,0.20348],
[0.99438,0.66386,0.19971],
[0.99535,0.65341,0.19577],
[0.99607,0.64277,0.19165],
[0.99654,0.63193,0.18738],
[0.99675,0.62093,0.18297],
[0.99672,0.60977,0.17842],
[0.99644,0.59846,0.17376],
[0.99593,0.58703,0.16899],
[0.99517,0.57549,0.16412],
[0.99419,0.56386,0.15918],
[0.99297,0.55214,0.15417],
[0.99153,0.54036,0.14910],
[0.98987,0.52854,0.14398],
[0.98799,0.51667,0.13883],
[0.98590,0.50479,0.13367],
[0.98360,0.49291,0.12849],
[0.98108,0.48104,0.12332],
[0.97837,0.46920,0.11817],
[0.97545,0.45740,0.11305],
[0.97234,0.44565,0.10797],
[0.96904,0.43399,0.10294],
[0.96555,0.42241,0.09798],
[0.96187,0.41093,0.09310],
[0.95801,0.39958,0.08831],
[0.95398,0.38836,0.08362],
[0.94977,0.37729,0.07905],
[0.94538,0.36638,0.07461],
[0.94084,0.35566,0.07031],
[0.93612,0.34513,0.06616],
[0.93125,0.33482,0.06218],
[0.92623,0.32473,0.05837],
[0.92105,0.31489,0.05475],
[0.91572,0.30530,0.05134],
[0.91024,0.29599,0.04814],
[0.90463,0.28696,0.04516],
[0.89888,0.27824,0.04243],
[0.89298,0.26981,0.03993],
[0.88691,0.26152,0.03753],
[0.88066,0.25334,0.03521],
[0.87422,0.24526,0.03297],
[0.86760,0.23730,0.03082],
[0.86079,0.22945,0.02875],
[0.85380,0.22170,0.02677],
[0.84662,0.21407,0.02487],
[0.83926,0.20654,0.02305],
[0.83172,0.19912,0.02131],
[0.82399,0.19182,0.01966],
[0.81608,0.18462,0.01809],
[0.80799,0.17753,0.01660],
[0.79971,0.17055,0.01520],
[0.79125,0.16368,0.01387],
[0.78260,0.15693,0.01264],
[0.77377,0.15028,0.01148],
[0.76476,0.14374,0.01041],
[0.75556,0.13731,0.00942],
[0.74617,0.13098,0.00851],
[0.73661,0.12477,0.00769],
[0.72686,0.11867,0.00695],
[0.71692,0.11268,0.00629],
[0.70680,0.10680,0.00571],
[0.69650,0.10102,0.00522],
[0.68602,0.09536,0.00481],
[0.67535,0.08980,0.00449],
[0.66449,0.08436,0.00424],
[0.65345,0.07902,0.00408],
[0.64223,0.07380,0.00401],
[0.63082,0.06868,0.00401],
[0.61923,0.06367,0.00410],
[0.60746,0.05878,0.00427],
[0.59550,0.05399,0.00453],
[0.58336,0.04931,0.00486],
[0.57103,0.04474,0.00529],
[0.55852,0.04028,0.00579],
[0.54583,0.03593,0.00638],
[0.53295,0.03169,0.00705],
[0.51989,0.02756,0.00780],
[0.50664,0.02354,0.00863],
[0.49321,0.01963,0.00955],
[0.47960,0.01583,0.01055]])
def RGBToPyCmap(rgbdata):
nsteps = rgbdata.shape[0]
stepaxis = np.linspace(0, 1, nsteps)
rdata=[]; gdata=[]; bdata=[]
for istep in range(nsteps):
r = rgbdata[istep,0]
g = rgbdata[istep,1]
b = rgbdata[istep,2]
rdata.append((stepaxis[istep], r, r))
gdata.append((stepaxis[istep], g, g))
bdata.append((stepaxis[istep], b, b))
mpl_data = {'red': rdata,
'green': gdata,
'blue': bdata}
return mpl_data
mpl_data = RGBToPyCmap(turbo_colormap_data)
plt.register_cmap(name='turbo', data=mpl_data, lut=turbo_colormap_data.shape[0])
mpl_data_r = RGBToPyCmap(turbo_colormap_data[::-1,:])
plt.register_cmap(name='turbo_r', data=mpl_data_r, lut=turbo_colormap_data.shape[0])
def demo():
XX, YY = np.meshgrid(np.linspace(0,1,100), np.linspace(0,1,100))
ZZ = np.sqrt(XX**2 + YY**2)
plt.figure()
plt.imshow(ZZ, cmap='turbo')
plt.colorbar()
plt.figure()
plt.imshow(ZZ, cmap='turbo_r')
plt.colorbar()
plt.show()
demo() | probml/pyprobml | scripts/colormap_turbo.py | Python | mit | 14,343 |
"""
WSGI config for panda_prospecting project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "panda_prospecting.settings")
application = get_wsgi_application()
| helanan/Panda_Prospecting | panda_prospecting/panda_prospecting/wsgi.py | Python | mit | 412 |
#!../bin/python
"""
HipparchiaSQLoader: archive and restore a database of Greek and Latin texts
Copyright: E Gunderson 2016-18
License: GNU GENERAL PUBLIC LICENSE 3
(see LICENSE in the top level directory of the distribution)
"""
import gzip
import io
import os
import pickle
from multiprocessing import Manager, Process, freeze_support
import psycopg2
from dbhelpers import *
from dbhelpers import MPCounter
config = configparser.ConfigParser()
config.read('config.ini')
datadir = config['io']['datadir'] + 'sqldumps/'
schemadir = config['io']['schemadir']
vectors = config['options']['archivevectors']
def retrievedb(location):
"""
decompress the data and get the db ready for reloading
:param pickleddb:
:param location:
:return:
"""
f = gzip.open(location, 'rb')
dbcontents = pickle.load(f)
f.close()
return dbcontents
def resetdb(tablename, templatetablename, templatefilename, cursor):
"""
empty out a db and get it ready for reloaded data
build the set of queries from the output of 'pgdump -c -s -t'
:param dbname:
:param cursor:
:return:
"""
querylines = loadschemafromfile(tablename, templatetablename, templatefilename)
querylines = [q for q in querylines if q and re.search(r'^--', q) is None]
querylines = [re.sub(r'(ALTER|DROP) (TABLE|INDEX) ', r'\1 \2 IF EXISTS ', q) for q in querylines]
corequery = [q for q in querylines
if re.search(r',$', q)
or re.search(r'CREATE TABLE', q)
or re.search(r'[^;]$', q)
or q == ');']
othersql = list()
for q in querylines:
if re.search(r';$', q) and q not in corequery:
othersql.append(q)
else:
othersql.append('padding')
corequery = ' '.join(corequery)
tablecreated = False
for q in othersql:
if q != 'padding':
cursor.execute(q)
elif tablecreated is False:
cursor.execute(corequery)
tablecreated = True
else:
pass
return
def reloadwhoeldb(dbcontents, dbconnection):
"""
the pickle package itself should tell you all you need to know to call reloadoneline() repeatedly
note that the dbname is stored in the file and need not be derived from the filename itself
example:
struct [('index', "integer DEFAULT nextval('public.gr0001'::regclass) NOT NULL"), ('wkuniversalid', 'character varying(10)'), ('level_05_value', 'character varying(64)'), ('level_04_value', 'character varying(64)'), ('level_03_value', 'character varying(64)'), ('level_02_value', 'character varying(64)'), ('level_01_value', 'character varying(64)'), ('level_00_value', 'character varying(64)'), ('marked_up_line', 'text'), ('accented_line', 'text'), ('stripped_line', 'text'), ('hyphenated_words', 'character varying(128)'), ('annotations', 'character varying(256)')]
data[:4] [(1, 'lt9505w001', '-1', '-1', '-1', '-1', '-1', 't', ' <span class="hmu_title">INCERTI NOMINIS RELIQVIAE</span>', 'incerti nominis reliqviae', 'incerti nominis reliquiae', '', ''), (2, 'lt9505w001', '-1', '-1', '-1', '-1', '-1', '1', '<hmu_metadata_notes value="Serv. Dan. &3A.& 11.160" />uictrices', 'uictrices', 'uictrices', '', ''), (3, 'lt9505w001', '-1', '-1', '-1', '-1', '-1', '2', '<hmu_metadata_notes value="Quint. &3Inst.& 5.11.24" />Quis⟨nam⟩ íste torquens fáciem planipedís senis? <hmu_standalone_endofpage />', 'quisnam íste torquens fáciem planipedís senis ', 'quisnam iste torquens faciem planipedis senis ', '', ''), (4, 'lt9505w002', '-1', '-1', '-1', '-1', '-1', 't', ' <span class="hmu_title">INCERTI NOMINIS RELIQVIAE</span>', 'incerti nominis reliqviae', 'incerti nominis reliquiae', '', '')]
:param dbcontents:
:return:
"""
dbcursor = dbconnection.cursor()
# there are tabs in the greek dictionary: you can't use '\t' as the separator
# similarly you can't use a high-value junk separator because you are not allowed to use that many bits...
# 7 = BEL (bell); 30 = RS (record separator)
separator = chr(30)
table = dbcontents['dbname']
structure = dbcontents['structure']
data = dbcontents['data']
# the problems
# [z] BSD will only reload *some* of the data...
# [a] *_lemmata
# psycopg2.DataError: malformed array literal: "['ζῳοτύπον', 'ζωιοτύποϲ']"
# DETAIL: "[" must introduce explicitly-specified array dimensions.
# [b] *_morphology
# [1] psycopg2.DataError: missing data for column "xrefs"
# [2] psycopg2.DataError: value too long for type character varying(64)
# [c] authors and works
# psycopg2.DataError: invalid input syntax for integer: "None"
# CONTEXT: COPY works, line 1, column converted_date: "None"
# [d] unclear why ATM, but latin_dictionary will turn up empty...
tests = ['lemmata', 'morphology', 'authors', 'works', 'latin_dictionary']
avoidcopyfrom = [t for t in tests if t in table]
if config['io']['slowload'] == 'yes':
avoidcopyfrom = True
if not avoidcopyfrom:
columns = [s[0] for s in structure]
stream = generatecopystream(data, separator=separator)
dbcursor.copy_from(stream, table, sep=separator, columns=columns)
else:
dbconnection.setdefaultisolation()
count = 1
for line in data:
count += 1
# 32k is the limit?
if count % 5000 == 0:
dbconnection.commit()
if count % 200000 == 0:
print('\t\tlongdb: {t} [ @ line {c}]'.format(t=table, c=count))
reloadoneline(line, table, structure, dbcursor)
dbconnection.commit()
return
def generatecopystream(queryvaluetuples, separator='\t'):
"""
postgres inserts much faster via "COPY FROM"
prepare data to match the psychopg2.copy_from() interface
copy_from(file, table, sep='\t', null='\\N', size=8192, columns=None)
Read data from the file-like object file appending them to the table named table.
see the example at http://initd.org/psycopg/docs/cursor.html:
f = StringIO("42\tfoo\n74\tbar\n")
cur.copy_from(f, 'test', columns=('num', 'data'))
:param queryvaluetuples:
:return:
"""
copystream = io.StringIO()
for t in queryvaluetuples:
copystream.write(separator.join([str(x) for x in t]) + '\n')
copystream.seek(0)
return copystream
def reloadoneline(insertdata, dbname, dbstructurelist, cursor):
"""
restore everything to a db
remember that the db itself should have pickled it structure
and the values that came out then should still be tuples now
:param dbname:
:param cursor:
:return:
"""
insertstring = ' ('
insertvals = '('
for label in dbstructurelist:
insertstring += label[0] + ', '
insertvals += '%s, '
insertstring = insertstring[:-2] + ')'
insertvals = insertvals[:-2] + ')'
q = 'INSERT INTO {t} {i} VALUES {v}'.format(t=dbname, i=insertstring, v=insertvals)
d = insertdata
try:
cursor.execute(q, d)
except psycopg2.DatabaseError as e:
print('insert into ', dbname, 'failed at while attempting', d)
print('Error %s' % e)
return
def buildfilesearchlist(datadir, memory):
"""
look down inside datadir and its subdirs for anything named *.pickle.gz
obviously quite dangerous if you don't keep a close watch on the contents of those directories
:param datadir:
:return: a list of DirEntries
"""
suffix = '.pickle.gz'
for entry in os.scandir(datadir):
if entry.is_dir():
buildfilesearchlist(entry.path, memory)
elif suffix in entry.name:
memory.append(entry)
entries = list()
pickles = re.compile(r'\.pickle\.gz$')
for m in memory:
if re.search(pickles, m.path):
entries.append(m)
paths = list()
for e in entries:
paths.append(e.path)
return paths
def recursivereload(datadir):
"""
aim me at a directory and I will unpack all of the pickles and put them in the db
:return:
"""
dbc = setconnection(config)
cur = dbc.cursor()
support = {'authors', 'works', 'greek_dictionary', 'latin_dictionary', 'greek_lemmata', 'latin_lemmata',
'greek_morphology', 'latin_morphology', 'builderversion', 'dictionary_headword_wordcounts',
'storedvectors', 'storedvectorimages'}
if vectors != 'yes':
support = support - {'storedvectors', 'storedvectorimages'}
structures = dict()
for item in support:
structures[item] = loadcolumnsfromfile(schemadir+item+'_schema.sql')
strwordcount = loadcolumnsfromfile(schemadir+'wordcounts_0_schema.sql')
letters = '0abcdefghijklmnopqrstuvwxyzαβψδεφγηιξκλμνοπρϲτυωχθζ'
for l in letters:
structures['wordcounts_'+l] = strwordcount
print('scanning the filesystem')
dbpaths = buildfilesearchlist(datadir, [])
totaldbs = len(dbpaths)
print('dropping any old tables and creating new ones')
authorfinder = re.compile(r'(gr|lt|in|dp|ch)\w\w\w\w$')
count = 0
for db in dbpaths:
count += 1
dbcontents = retrievedb(db)
nm = dbcontents['dbname']
if nm in structures and 'wordcounts_' not in nm:
resetdb(nm, nm, schemadir+nm+'_schema.sql', cur)
elif nm in structures and 'wordcounts_' in nm:
resetdb(nm, 'wordcounts_0', schemadir+'wordcounts_0_schema.sql', cur)
elif re.search(authorfinder, nm):
resetdb(nm, 'gr0001', schemadir+'gr0001_schema.sql', cur)
if count % 500 == 0:
print('\t{c} tables reset'.format(c=count))
dbc.commit()
dbc.commit()
print('beginning to reload the tables: {d} found'.format(d=totaldbs))
print('[NB: the lengths vary considerably; not every 10% chunk will load as swiftly/slowly as did its peers...]')
manager = Manager()
count = MPCounter()
dbs = manager.list(dbpaths)
workers = int(config['io']['workers'])
if icanpickleconnections():
connections = {i: setconnection() for i in range(workers)}
else:
# will grab a connection later
connections = {i: None for i in range(workers)}
jobs = [Process(target=mpreloader, args=(dbs, count, totaldbs, connections[i])) for i in range(workers)]
for j in jobs:
j.start()
for j in jobs:
j.join()
if connections[0]:
for c in connections:
connections[c].connectioncleanup()
return
def mpreloader(dbs, count, totaldbs, dbconnection):
"""
mp reader reloader
:return:
"""
if not dbconnection:
dbconnection = setconnection()
progresschunks = int(totaldbs / 10)
while len(dbs) > 0:
try:
db = dbs.pop()
dbcontents = retrievedb(db)
except IndexError:
dbcontents = dict()
dbcontents['dbname'] = ''
count.increment()
if count.value % progresschunks == 0:
percent = round((count.value / totaldbs) * 100, 1)
print('\t {p}% of the tables have been restored ({a}/{b})'.format(p=percent, a=count.value, b=totaldbs))
if dbcontents['dbname'] != '':
reloadwhoeldb(dbcontents, dbconnection)
# unnestreloader(dbcontents)
return
if __name__ == '__main__':
freeze_support()
print('\n *** WARNING ***\n')
print('You are about to completely erase any currently installed data\n')
print('If "{d}" does not contain a full set of datafiles, HipparchiaServer will be sad and refuse to work properly, if at all.\n'.format(d=datadir))
areyousure = input('Type "YES" if you are sure you want to do this: ')
# everything
if areyousure == 'YES':
# print('nuking')
recursivereload(datadir)
else:
print()
print('"{x}" is not "YES"'.format(x=areyousure))
print('Aborting. Current installation unmodified')
# support only
# recursivereload(datadir+'/supportdbs/')
# gk only
# recursivereload(datadir+'/workdbs/greekauthors/')
# lt only
# recursivereload(datadir+'/workdbs/latinauthors/') | e-gun/HipparchiaSQLoader | reloadhipparchiaDBs.py | Python | gpl-3.0 | 11,369 |
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015-2018-2018 by Exopy Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Test the declarative function present in the manifest.
"""
import enaml
from exopy.measurement.monitors.text_monitor.monitor import TextMonitor
with enaml.imports():
from exopy.testing.util import show_and_close_widget
from exopy.testing.windows import DockItemTestingWindow
from exopy.measurement.monitors.text_monitor.monitor_views\
import TextMonitorEdit, TextMonitorItem
def test_text_monitor_declration_functions(text_monitor_workbench,
exopy_qtbot):
"""Test that we can create a monitor and its views.
"""
m_p = text_monitor_workbench.get_plugin('exopy.measurement')
decl = m_p.get_declarations('monitor',
['exopy.text_monitor'])['exopy.text_monitor']
mon = decl.new(text_monitor_workbench, False)
assert isinstance(mon, TextMonitor)
edit_view = decl.make_view(text_monitor_workbench, mon)
assert isinstance(edit_view, TextMonitorEdit)
item = decl.create_item(text_monitor_workbench, None)
assert isinstance(item, TextMonitorItem)
show_and_close_widget(exopy_qtbot, DockItemTestingWindow(widget=item))
| Ecpy/ecpy | tests/measurement/monitors/text_monitor/test_manifest.py | Python | bsd-3-clause | 1,543 |
import cherrypy, locations, os, ythelper, json
from processpipe import ExternalProcess, ProcessException, OUT_FILE
YTDL_PATH = os.path.join(locations.YTUBE_PATH, "youtube_dl")
YTDL_PATH = os.path.join(YTDL_PATH, "__main__.py")
class YoutubeDlProcess(ExternalProcess):
def __init__(self, url):
ExternalProcess.__init__(self)
self.url = url
def name(self):
return 'youtube-dl'
def _get_cmd(self, args):
self.args = args
cmd = [YTDL_PATH, "--no-part", "--no-continue", "--no-playlist",
"--max-downloads", "1", "--no-progress", "--output", OUT_FILE]
if ythelper.skip_download(self.url):
cmd.append("--simulate")
cmd.append("--dump-single-json")
fmat = ythelper.get_format(self.url)
if fmat is not None:
cmd.append("--format")
cmd.append(fmat)
cmd.append(self.url)
return cmd
def _ready(self):
self.args['pid'] = self.proc.pid
while True:
line = self._readline()
if line.startswith("[download] Destination:"):
self.args['outfile'] = OUT_FILE
return self.args
elif line.startswith("{"):
obj = json.loads(line)
self.args['outfile'] = obj['url']
return self.args
elif line.startswith("ERROR:"):
raise ProcessException(self._get_ytdl_err(line[7:]))
def _get_ytdl_err(self, msg):
if msg.strip() == "":
return
idx = msg.find('YouTube said:')
if idx > -1:
msg = msg[idx+14:]
idx = msg.find('Unsupported URL:')
if idx > -1:
msg = 'Unsupported URL'
idx = msg.find('is not a valid URL.')
if idx > -1:
msg = msg[:idx+18]
idx = msg.find('This video is no longer available')
if idx > -1:
msg = 'No longer available'
# Assume 403 is because wrong country
idx = msg.find('HTTP Error 403: FORBIDDEN')
if idx > -1:
msg = 'This video is not available in your country'
idx = msg.find('ERROR:')
if idx > -1:
idx = msg.find(' ', idx)
msg = msg[idx+1:]
return msg
| blissland/devflixx | lib/player/ytdlproc.py | Python | gpl-2.0 | 2,028 |
from app import app
# Start the server
app.run(debug=True)
| arpitbbhayani/flask-template | run.py | Python | mit | 60 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Quote.published_on'
db.add_column('quotes_quote', 'published_on',
self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, default=datetime.datetime(2012, 8, 18, 0, 0), blank=True),
keep_default=False)
# Changing field 'Quote.language'
db.alter_column('quotes_quote', 'language', self.gf('django.db.models.fields.CharField')(max_length=5))
def backwards(self, orm):
# Deleting field 'Quote.published_on'
db.delete_column('quotes_quote', 'published_on')
# Changing field 'Quote.language'
db.alter_column('quotes_quote', 'language', self.gf('django.db.models.fields.CharField')(max_length=2))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'quotes.author': {
'Meta': {'object_name': 'Author'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
'quotes.quote': {
'Meta': {'object_name': 'Quote'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'quotes'", 'null': 'True', 'to': "orm['quotes.Author']"}),
'body': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'metadata': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'published_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'source': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'quotes'", 'null': 'True', 'to': "orm['auth.User']"})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
}
}
complete_apps = ['quotes'] | rollstudio/DjangoDash | quotes/quotes/migrations/0004_auto__add_field_quote_published_on__chg_field_quote_language.py | Python | mit | 6,283 |
from numpy import sin, cos, pi, array
import numpy as np
import matplotlib.pyplot as plt
import scipy.integrate as integrate
import matplotlib.animation as animation
from matplotlib.widgets import Slider, Button, RadioButtons
global G, L1, L2, M1, M2
G = 9.8 # gravitational acceleration (unit: m/(s^2))
L1 = 1.0 # first link length (unit: m)
L2 = 1.0 # second link length (unit: m)
M1 = 1.0 # first ball mass (unit: kg)
M2 = 1.0 # second ball mass (unit: kg)
def derivs(state, t, G,L1,L2,M1,M2):
dydx = np.zeros_like(state)
dydx[0] = state[1]
del_ = state[2]-state[0]
den1 = (M1+M2)*L1 - M2*L1*cos(del_)*cos(del_)
dydx[1] = (M2*L1*state[1]*state[1]*sin(del_)*cos(del_)
+ M2*G*sin(state[2])*cos(del_) + M2*L2*state[3]*state[3]*sin(del_)
- (M1+M2)*G*sin(state[0]))/den1
dydx[2] = state[3]
den2 = (L2/L1)*den1
dydx[3] = (-M2*L2*state[3]*state[3]*sin(del_)*cos(del_)
+ (M1+M2)*G*sin(state[0])*cos(del_)
- (M1+M2)*L1*state[1]*state[1]*sin(del_)
- (M1+M2)*G*sin(state[2]))/den2
return dydx
# create a time array from 0..10s sampled at 0.025 second steps
dt = 0.025
t = np.arange(0.0, 10, dt)
# th1 and th2 are the initial angles (degrees)
# w10 and w20 are the initial angular velocities (degrees per second)
th1 = 120.0
w1 = 0.0
th2 = -10.0
w2 = 0.0
rad = pi/180
def dophysics(G,L1,L2,M1,M2):
# initial state
global state
state = np.array([th1, w1, th2, w2])*pi/180.
# integrate your ODE using scipy.integrate.
global y,x1,y1,x2,y2
y = integrate.odeint(derivs, state, t, args=(G,L1,L2,M1,M2))
x1 = L1*sin(y[:,0])
y1 = -L1*cos(y[:,0])
x2 = L2*sin(y[:,2]) + x1
y2 = -L2*cos(y[:,2]) + y1
## Do initial calculatiData browserons
dophysics(G,L1,L2,M1,M2)
########################
## Plot setup
########################
fig = plt.figure()
## Draw the main plotting window
ax = fig.add_subplot(111, autoscale_on=False, xlim=(-2, 2), ylim=(-2, 2))
## resize the plot to leave room for buttons
plt.subplots_adjust(left=0.25, bottom=0.25)
ax.grid()
line, = ax.plot([], [], 'o-', lw=2)
time_template = 'time = %.1fs'
time_text = ax.text(0.05, 0.9, '', transform=ax.transAxes)
########################
## Sliders
########################
## Color for the sliders, RGB
bcolor=(0.8,0.8,0.8)
def update(val):
# Grab values from the sliders
G = s_grav.val
L1 = s_L1.val
L2 = s_L2.val
M1 = s_M1.val
M2 = s_M2.val
#Recompute simulation
dophysics(G,L1,L2,M1,M2)
##Readjust axes
mL=(L1+L2)*1.1
ax.axis([-mL,mL,-mL,mL])
#l.set_ydata(amp*np.sin(2*np.pi*freq*t))
## Also update the plot grid if the user has opened them
global made_button
if made_button:
global b_lines, b_axs
b_lines[0][0].set_data(t,x1)
b_lines[1][0].set_data(t,y1)
b_lines[0][1].set_data(t,x2)
b_lines[1][1].set_data(t,y2)
b_axs[0,0].axis([min(t),max(t),-max(abs(x1)),max(abs(x1))])
b_axs[1,0].axis([min(t),max(t),-max(abs(y1)),max(abs(y1))])
b_axs[0,1].axis([min(t),max(t),-max(abs(x2)),max(abs(x2))])
b_axs[1,1].axis([min(t),max(t),-max(abs(y2)),max(abs(y2))])
plt.show()
plt.draw()
axp_s=0.02;si=0;sw=0.02
##Gravity
si+=1
ax_grav = plt.axes([0.15, axp_s+si*(0.01+sw), 0.65, sw], axisbg=bcolor)
s_grav = Slider(ax_grav, 'Gravity', 1, 100, valinit=G,valfmt='%0.2f m/s')
s_grav.on_changed(update)
##Length1
si+=1
ax_L1 = plt.axes([0.15, axp_s+si*(0.01+sw), 0.65, sw], axisbg=bcolor)
s_L1 = Slider(ax_L1, 'Length1', 0.01, 10, valinit=L1,valfmt='%0.2f m')
s_L1.on_changed(update)
##Length2
si+=1
ax_L2 = plt.axes([0.15, axp_s+si*(0.01+sw), 0.65, sw], axisbg=bcolor)
s_L2 = Slider(ax_L2, 'Length2', 0.01, 10, valinit=L2,valfmt='%0.2f m')
s_L2.on_changed(update)
##Mass1
si+=1
ax_M1 = plt.axes([0.15, axp_s+si*(0.01+sw), 0.65, sw], axisbg=bcolor)
s_M1 = Slider(ax_M1, 'Mass1', 0.01, 10, valinit=M1,valfmt='%0.1f kg')
s_M1.on_changed(update)
##Mass2
si+=1
ax_M2 = plt.axes([0.15, axp_s+si*(0.01+sw), 0.65, sw], axisbg=bcolor)
s_M2 = Slider(ax_M2, 'Mass2', 0.01, 10, valinit=M2,valfmt='%0.2f kg')
s_M2.on_changed(update)
########################
## Buttons
########################
##### Button to open seperate plot for dynamics
xplotax = plt.axes([0.03, 0.4, 0.12, 0.08])
b_xplot = Button(xplotax, 'Plot\ndynamics', color=bcolor, hovercolor='0.975')
made_button=0
## The function executed by the button
def xplot(event):
global made_button,b_lines,b_axs,b_fig
## Logic - did the user open the plots?
if made_button: #close existing plot
plt.close(b_fig)
made_button=0
else:
made_button=1
## Create a holder for the plot lines
b_lines=[['',''],['','']]
## Setup plot grid
##auto-update
plt.ion()
b_fig,b_axs=plt.subplots(2,2,sharex='all')
### Mass 1
b_axs[0,0].set_title('Mass 1')
## x plot
b_lines[0][0],=b_axs[0,0].plot(t,x1)
b_axs[0,0].set_ylabel('X position (m)')
b_axs[0,0].axis([min(t),max(t),-max(abs(x1)),max(abs(x1))])
## y plot
b_lines[1][0],=b_axs[1,0].plot(t,y1)
b_axs[1,0].set_xlabel('Time (s)')
b_axs[1,0].set_ylabel('Y position (m)')
b_axs[1,0].axis([min(t),max(t),-max(abs(y1)),max(abs(y1))])
### Mass 1
b_axs[0,1].set_title('Mass 2')
## x plot
b_lines[0][1],=b_axs[0,1].plot(t,x2)
b_axs[0,1].set_ylabel('X position (m)')
b_axs[0,1].axis([min(t),max(t),-max(abs(x2)),max(abs(x2))])
## x plot
b_lines[1][1],=b_axs[1,1].plot(t,y2)
b_axs[1,1].set_xlabel('Time (s)')
b_axs[1,1].set_ylabel('Y position (m)')
b_axs[1,1].axis([min(t),max(t),-max(abs(y2)),max(abs(y2))])
b_fig.subplots_adjust(hspace=0,wspace=0.3)
plt.show()
b_xplot.on_clicked(xplot)
########################
## Animation
########################
def init():
line.set_data([], [])
time_text.set_text('')
return line, time_text
def animate(i):
thisx = [0, x1[i], x2[i]]
thisy = [0, y1[i], y2[i]]
line.set_data(thisx, thisy)
time_text.set_text(time_template%(i*dt))
return line, time_text
ani = animation.FuncAnimation(fig, animate, np.arange(1, len(y)),
interval=25, blit=True, init_func=init)
#ani.save('double_pendulum.mp4', fps=15, clear_temp=True)
plt.show() | GitYiheng/reinforcement_learning_test | test00_previous_files/pendulum_test/pendulum_test_090920170002.py | Python | mit | 6,240 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
DWF Python Example 2
Modified by: MURAMATSU Atsushi <[email protected]>
Revised: 2016-04-21
Original Author: Digilent, Inc.
Original Revision: 10/17/2013
Requires:
Python 2.7, 3.3 or later
"""
import dwf
import time
#print DWF version
print("DWF Version: " + dwf.FDwfGetVersion())
#open device
print("Opening first device...")
dwf_ao = dwf.DwfAnalogOut()
print("Generating sine wave...")
# enable two channels
dwf_ao.nodeEnableSet(0, dwf_ao.NODE.CARRIER, True)
dwf_ao.nodeEnableSet(1, dwf_ao.NODE.CARRIER, True)
# for second channel set master the first channel
dwf_ao.masterSet(1, 0)
# slave channel is controlled by the master channel
# it is enough to set trigger, wait, run and repeat paramters for master channel
# configure enabled channels
dwf_ao.nodeFunctionSet(-1, dwf_ao.NODE.CARRIER, dwf_ao.FUNC.SINE)
dwf_ao.nodeFrequencySet(-1, dwf_ao.NODE.CARRIER, 1000.0)
dwf_ao.nodeAmplitudeSet(-1, dwf_ao.NODE.CARRIER, 1.0)
#set phase for second channel
dwf_ao.nodePhaseSet(1, dwf_ao.NODE.CARRIER, 180.0)
print("Generating sine wave for 10 seconds...")
# start signal generation,
# the second, slave channel will start too
dwf_ao.configure(0, True)
time.sleep(10)
print("done.")
dwf_ao.close()
| amuramatsu/dwf | examples/AnalogOut_Sync.py | Python | mit | 1,317 |
import tensorflow as tf
try:
from tensorflow.python.ops.rnn_cell_impl import RNNCell
except ImportError:
from tensorflow.python.ops.rnn_cell_impl import _RNNCell as RNNCell
# Modified from "https://github.com/teganmaharaj/zoneout/blob/master/zoneout_seq2seq.py"
# Wrapper for the TF RNN cell
class ZoneoutWrapper(RNNCell):
"""Operator adding zoneout to all states (states+cells) of the given cell."""
def __init__(self, cell, state_zoneout_prob, training=True, seed=None, name="zoneout_wrapper"):
if not isinstance(cell, tf.nn.rnn_cell.RNNCell):
raise TypeError("The parameter cell is not an RNNCell.")
if (isinstance(state_zoneout_prob, float) and
not (state_zoneout_prob >= 0.0 and state_zoneout_prob <= 1.0)):
raise ValueError("Parameter zoneout_prob must be between 0 and 1: %d"
% state_zoneout_prob)
self._cell = cell
if isinstance(self._cell.state_size, tuple):
self._zoneout_prob = tuple([state_zoneout_prob]*len(self._cell.state_size))
else:
self._zoneout_prob = state_zoneout_prob
self._seed = seed
self.is_training = training
self._name = name
@property
def state_size(self):
return self._cell.state_size
@property
def output_size(self):
return self._cell.output_size
def __call__(self, inputs, state, scope=None):
with tf.name_scope(self.name):
if isinstance(self.state_size, tuple) != isinstance(self._zoneout_prob, tuple):
raise TypeError("Subdivided states need subdivided zoneouts.")
if isinstance(self.state_size, tuple) and len(tuple(self.state_size)) != len(tuple(self._zoneout_prob)):
raise ValueError("State and zoneout need equally many parts.")
output, new_state = self._cell(inputs, state)
if isinstance(self.state_size, tuple):
if self.is_training:
new_state = tuple((1 - state_part_zoneout_prob) * tf.nn.dropout(
new_state_part - state_part, (1 - state_part_zoneout_prob), seed=self._seed) + state_part
for new_state_part, state_part, state_part_zoneout_prob in
zip(new_state, state, self._zoneout_prob))
else:
new_state = tuple(state_part_zoneout_prob * state_part + (1 - state_part_zoneout_prob) * new_state_part
for new_state_part, state_part, state_part_zoneout_prob in
zip(new_state, state, self._zoneout_prob))
else:
if self.is_training:
new_state = (1 - self._zoneout_prob) * tf.nn.dropout(
new_state - state, (1 - self._zoneout_prob), seed=self._seed) + state
else:
new_state = self._zoneout_prob * state + (1 - self._zoneout_prob) * new_state
return output, new_state
class AttentionWrapper(RNNCell):
def __init__(self, cell, attention, name="attention_wrapper"):
self._cell = cell
self._attention = attention
self._name = name
def zero_state(self, batch_size, dtype):
with tf.name_scope("AttentionWrapperZeroState"):
return tuple([self._cell.zero_state(batch_size, dtype),
self._attention.zero_state(batch_size, dtype)])
def __call__(self, inputs, state_tm1, scope=None):
with tf.name_scope(self.name):
rnn_state_tm1, att_state_tm1 = state_tm1
inputs_context_tm1 = tf.concat([inputs, att_state_tm1["context"]], axis=-1)
rnn_out_t, rnn_state_t = self._cell(inputs_context_tm1, rnn_state_tm1)
context_t, att_state_t = self._attention(rnn_out_t, att_state_tm1)
output_t = tf.concat([rnn_out_t, context_t], axis=-1)
return output_t, tuple([rnn_state_t, att_state_t])
"""
或许不应该将多个U矩阵拼接在一起用orthogonal初始化。
"""
class GRUCell(RNNCell):
"""Gated Recurrent Unit"""
def __init__(self, num_units, name="gru"):
self._num_units = num_units
self._gate_activation = tf.sigmoid
self._name = name
@property
def state_size(self):
return self._num_units
@property
def output_size(self):
return self._num_units
def __call__(self, x, h_prev, scope=None):
with tf.variable_scope(self.name):
# Check if the input size exist.
input_size = x.shape.with_rank(2)[1].value
if input_size is None:
raise ValueError("Expecting input_size to be set.")
### get weights.
W_shape = (input_size, self.output_size)
U_shape = (self.output_size, self.output_size)
b_shape = (self.output_size,)
Wrz = tf.get_variable(name="Wrz", shape=(input_size, 2 * self.output_size))
Wh = tf.get_variable(name='Wh', shape=W_shape)
Ur = tf.get_variable(name="Ur", shape=U_shape, initializer=tf.orthogonal_initializer())
Uz = tf.get_variable(name="Uz", shape=U_shape, initializer=tf.orthogonal_initializer())
Uh = tf.get_variable(name='Uh', shape=U_shape,
initializer=tf.orthogonal_initializer())
brz = tf.get_variable(name="brz", shape=(2 * self.output_size),
initializer=tf.constant_initializer(0.))
bh = tf.get_variable(name='bh', shape=b_shape,
initializer=tf.constant_initializer(0.))
### calculate r and z
rz_x = tf.matmul(x, Wrz) + brz
r_x, z_x = tf.split(rz_x, num_or_size_splits=2, axis=1)
r = self._gate_activation(r_x + tf.matmul(h_prev, Ur))
z = self._gate_activation(z_x + tf.matmul(h_prev, Uz))
### calculate candidate
h_slash = tf.tanh(tf.matmul(x, Wh) + tf.matmul(r * h_prev, Uh) + bh)
### final cal
new_h = (1-z) * h_prev + z * h_slash
return new_h, new_h
"""
或许不应该将多个U矩阵拼接在一起用orthogonal初始化。
"""
class LSTMCell(RNNCell):
"""Long Short-Term Memory (LSTM) unit recurrent network cell."""
def __init__(self, num_units, forget_bias=1.0, name="lstm", scope=None):
self._num_units = num_units
self._gate_activation = tf.sigmoid
self._forget_bias = forget_bias
self._name = name
_scope = name if scope is None else scope+"/"+name
with tf.variable_scope(_scope, reuse=tf.AUTO_REUSE):
u_shape = (self.output_size, self.output_size)
mat_u_i = tf.get_variable(name="recurrent_kernel_i", shape=u_shape, initializer=tf.orthogonal_initializer())
mat_u_o = tf.get_variable(name="recurrent_kernel_o", shape=u_shape, initializer=tf.orthogonal_initializer())
mat_u_j = tf.get_variable(name="recurrent_kernel_j", shape=u_shape, initializer=tf.orthogonal_initializer())
mat_u_f = tf.get_variable(name="recurrent_kernel_f", shape=u_shape, initializer=tf.orthogonal_initializer())
self.mat_u = tf.concat([mat_u_i, mat_u_o, mat_u_j, mat_u_f], axis=-1)
@property
def state_size(self):
return tuple([self.output_size, self.output_size])
@property
def output_size(self):
return self._num_units
def __call__(self, x, state_prev, scope=None):
with tf.variable_scope(self.name):
h_prev, c_prev = state_prev
# Check if the input size exist.
input_size = x.shape.with_rank(2)[1].value
if input_size is None:
raise ValueError("Expecting input_size to be set.")
# get weights for concatenated tensor.
mat_w = tf.get_variable(name='input_kernel', shape=(input_size, self.output_size*4))
b = tf.get_variable(name='bias', shape=(self.output_size*4),
initializer=tf.constant_initializer(0.))
# calculate gates and input's info.
i_o_j_f_x = tf.matmul(x, mat_w) + b
i_o_j_f_h = tf.matmul(h_prev, self.mat_u)
i_o_j_f = i_o_j_f_x + i_o_j_f_h
i, o, j, f = tf.split(i_o_j_f, num_or_size_splits=4, axis=-1)
# activate them!
i, o = tf.tanh(i), self._gate_activation(o)
j, f = self._gate_activation(j), self._gate_activation(f + self._forget_bias)
# calculate candidate.
new_c = f * c_prev + j * i
# final cal.
new_h = o * tf.tanh(new_c)
return new_h, tuple([new_h, new_c])
class PreDNNWrapper(RNNCell):
def __init__(self, cell, dnn_fn, name="pre_projection"):
self._cell = cell
self._dnn_fn = dnn_fn
self._name = name
def zero_state(self, batch_size, dtype):
return self._cell.zero_state(batch_size, dtype)
def __call__(self, inputs, state_tm1, scope=None):
with tf.name_scope(self.name):
dnn_out = self._dnn_fn(inputs)
rnn_out_t, rnn_state_t = self._cell(dnn_out, state_tm1)
return rnn_out_t, rnn_state_t
class PostDNNWrapper(RNNCell):
def __init__(self, cell, dnn_fn, name="post_projection"):
self._cell = cell
self._dnn_fn = dnn_fn
self._name = name
def zero_state(self, batch_size, dtype):
return self._cell.zero_state(batch_size, dtype)
def __call__(self, inputs, state_tm1, scope=None):
with tf.name_scope(self.name):
rnn_out_t, rnn_state_t = self._cell(inputs, state_tm1)
dnn_out = self._dnn_fn(rnn_out_t)
return dnn_out, rnn_state_t
| MU94W/TFCommon | RNNCell.py | Python | mit | 9,893 |
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from neutron import scheduler
ml2_opts = [
cfg.ListOpt('type_drivers',
default=['local', 'flat', 'vlan'],
help=_("List of network type driver entrypoints to be loaded "
"from the neutron.ml2.type_drivers namespace.")),
cfg.ListOpt('tenant_network_types',
default=['local'],
help=_("Ordered list of network_types to allocate as tenant "
"networks.")),
cfg.ListOpt('mechanism_drivers',
default=[],
help=_("List of networking mechanism driver entrypoints to "
"be loaded from the neutron.ml2.mechanism_drivers "
"namespace.")),
]
cfg.CONF.register_opts(ml2_opts, "ml2")
cfg.CONF.register_opts(scheduler.AGENTS_SCHEDULER_OPTS)
| ykaneko/neutron | neutron/plugins/ml2/config.py | Python | apache-2.0 | 1,496 |
"""Docstring manipulation functions.
These functions are generally work-arounds to fix deficiencies in Sphinx's
autodoc capabilities.
"""
import inspect
from six import iteritems
def add_int_enums_to_docstring(enum):
"""Decorator for IntEnum which re-writes the documentation string so that
Sphinx enumerates all the enumeration values.
This is a work-around for Sphinx autodoc's inability to properly document
IntEnums.
This decorator adds enumeration names and values to the 'Attributes'
section of the docstring of the decorated IntEnum class.
Example::
>>> from enum import IntEnum
>>> @add_int_enums_to_docstring
... class MyIntEnum(IntEnum):
... '''An example IntEnum.'''
... a = 0
... b = 1
>>> print(MyIntEnum.__doc__)
An example IntEnum.
<BLANKLINE>
Attributes
----------
a = 0
b = 1
<BLANKLINE>
"""
# The enum34 library (used for compatibility with Python < v3.4) rather
# oddly set its docstring to None rather than some senible but empty
# default...
if enum.__doc__ is None: # pragma: nocover
enum.__doc__ = ""
enum.__doc__ += ("\n\n"
"Attributes\n"
"----------\n")
for val in list(enum):
enum.__doc__ += "{} = {}\n".format(val.name, int(val))
return enum
def add_signature_to_docstring(f, include_self=False, kw_only_args={}):
"""Decorator which adds the function signature of 'f' to the decorated
function's docstring.
Under Python 2, wrapping a function (even using functools.wraps) hides its
signature to Sphinx's introspection tools so it is necessary to include the
function signature in the docstring to enable Sphinx to render it
correctly.
Additionally, when building decorators which change a function's signature,
it is non-trivial modify the wrapper's function signature and so
automatically generated documentation will not display the correct
signature. This decorator can aid in the specific case where a wrapper adds
keyword-only arguments to the set of arguments accepted by the underlying
function.
For example::
>>> def my_func(a, b=0, *args, **kwargs):
... '''An example function.'''
... pass
>>> import functools
>>> @add_signature_to_docstring(my_func, kw_only_args={"c": 1})
... @functools.wraps(my_func)
... def my_func_wrapper(*args, **kwargs):
... c = kwargs.pop("c")
... # ...do something with c...
... return my_func(*args, **kwargs)
>>> print(my_func_wrapper.__doc__)
my_func(a, b=0, *args, c=1, **kwargs)
An example function.
.. warning::
This function only works with functions which do not have any
named keyword-only arguments. For example this function cannot be
handled::
def f(*args, kw_only_arg=123)
This is due to a limitation in the underlying introspection library
provided in Python 2.
Parameters
----------
f : function
The function whose signature will be used. Need not be the same as the
decorated function.
include_self : bool
Should an initial 'self' arguments be included in the signature? (These
are assumed to be arguments called 'self' without a default value).
kw_only_args : dict
Optionally, add a set of keyword-only arguments to the function
signature. This is useful if the wrapper function adds new keyword-only
arguments.
"""
def decorate(f_wrapper):
args, varargs, keywords, defaults = inspect.getargspec(f)
# Simplifies later logic
if defaults is None:
defaults = []
# Make sure the keyword only arguments don't use the names of any other
# arguments
assert set(args).isdisjoint(set(kw_only_args))
assert varargs is None or varargs not in kw_only_args
assert keywords is None or keywords not in kw_only_args
# If required, remove the initial 'self' argument (e.g. for methods)
if not include_self:
if (len(args) >= 1 and
args[0] == "self" and
len(args) > len(defaults)):
args.pop(0)
# Assemble a string representation of the signature. This must be done
# by hand (rather than using formatargspec) to allow the assembly of
# signatures with keyword-only values.
signature = "{}(".format(f_wrapper.__name__)
for arg in args[:-len(defaults)] if defaults else args:
signature += "{}, ".format(arg)
for arg, default in zip(args[-len(defaults):], defaults):
signature += "{}={}, ".format(arg, repr(default))
if kw_only_args or varargs is not None:
# Must include a varargs name if keyword only arguments are
# supplied.
if varargs is None and kw_only_args:
assert "_" not in args
assert "_" not in kw_only_args
assert "_" != keywords
signature += "*_, "
else:
signature += "*{}, ".format(varargs)
for keyword, default in iteritems(kw_only_args):
signature += "{}={}, ".format(keyword, default)
if keywords is not None:
signature += "**{}, ".format(keywords)
signature = "{})".format(signature.rstrip(", "))
# Only add the signature if one is not already present.
if f_wrapper.__doc__ is None:
f_wrapper.__doc__ = signature
elif not f_wrapper.__doc__.lstrip().startswith(
"{}(".format(f_wrapper.__name__)):
f_wrapper.__doc__ = "{}\n{}".format(signature, f_wrapper.__doc__)
# Return the original function (after modifying its __doc__)
return f_wrapper
return decorate
| project-rig/rig | rig/utils/docstrings.py | Python | gpl-2.0 | 6,023 |
from reportlab.platypus import SimpleDocTemplate, Paragraph, Image, ParagraphAndImage
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.enums import TA_JUSTIFY, TA_CENTER
from reportlab.lib.units import cm
from StringIO import StringIO
from datetime import tzinfo, timedelta, datetime
from copy import deepcopy
import time
# the following code is used to make datetime.now() be TZ aware
# taken directly from the python docs: http://docs.python.org/2/library/datetime.html#tzinfo-objects
STDOFFSET = timedelta(seconds=-time.timezone)
if time.daylight:
DSTOFFSET = timedelta(seconds=-time.altzone)
else:
DSTOFFSET = STDOFFSET
DSTDIFF = DSTOFFSET - STDOFFSET
class LocalTimezone(tzinfo):
def utcoffset(self, dt):
if self._isdst(dt):
return DSTOFFSET
else:
return STDOFFSET
def dst(self, dt):
if self._isdst(dt):
return DSTDIFF
else:
return timedelta(0)
def tzname(self, dt):
return time.tzname[self._isdst(dt)]
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.weekday(), 0, 0)
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
def compose_pdf(profile):
"""Generates the pdf file based on the profile returned by the linkedin API."""
output = StringIO()
# some constants
FONT_NAME = 'Helvetica'
FONT_SIZE = 12
COLOR = '#aaabbb'
LEADING = 16
HEADLINE_FONT_SIZE = 18
PARAGRAPH_SPACE = 30
BULLET_INDENT = 10
# FIXME (Iurii Kudriavtsev): image should be scaled appropriately
IMG_WIDTH = 4 * cm
IMG_HEIGHT = 4 * cm
doc = SimpleDocTemplate(
output,
topMargin=2 * cm,
rightMargin=cm,
bottomMargin=2 * cm,
leftMargin=cm,
)
# define styles that will be used
styles = getSampleStyleSheet()
styles["Normal"].fontName = FONT_NAME
styles["Normal"].fontSize = FONT_SIZE
styles["Normal"].leading = LEADING
styles["Normal"].alignment = TA_JUSTIFY
styles["Normal"].spaceBefore = 20
styles["Normal"].spaceAfter = 10
styles.add(ParagraphStyle(
name='Headline',
alignment=TA_CENTER,
fontName=FONT_NAME,
fontSize=HEADLINE_FONT_SIZE,
leading=LEADING,
spaceBefore=PARAGRAPH_SPACE,
spaceAfter=PARAGRAPH_SPACE
)
)
Indent_style = deepcopy(styles["Normal"])
Indent_style.name = 'Indent'
Indent_style.fontSize = 10
Indent_style.leftIndent = 30
Indent_style.spaceBefore = 0
Indent_style.spaceAfter = 0
Indent_style.bulletIndent = 20
styles.add(Indent_style)
Bullet_style = deepcopy(styles["Normal"])
Bullet_style.name = 'CustomBullet'
Bullet_style.bulletFontName = 'Symbol'
Bullet_style.bulletFontSize = FONT_SIZE
Bullet_style.firstLineIndent = 0
Bullet_style.leftIndent = FONT_SIZE + BULLET_INDENT
Bullet_style.bulletIndent = BULLET_INDENT
Bullet_style.bulletColor = COLOR
styles.add(Bullet_style)
Skill_style = deepcopy(styles["Normal"])
Skill_style.name = 'Skill'
Skill_style.fontSize = 15
Skill_style.fontName = 'Courier'
Skill_style.textColor = COLOR
styles.add(Skill_style)
story = []
p = '<font size=%d>%s %s</font><br/><br/>' % (HEADLINE_FONT_SIZE, profile['firstName'], profile['lastName'])
p += '%s<br/><br/><br/>' % profile['headline']
if profile['phoneNumbers']['_total']:
p += '<strong>Phone:</strong> <font size=10>%s</font><br/><br/>' % profile[
'phoneNumbers']['values'][0]['phoneNumber']
p += '<strong>Email:</strong> <font size=10>%s</font><br/><br/>' % profile['emailAddress']
p += '<strong>Location:</strong> <font size=10>%s</font><br/><br/>' % profile['location']['name']
p = Paragraph(p, styles["Normal"])
if profile['pictureUrls']['_total']:
profile_picture_url = profile['pictureUrls']['values'][0]
img = Image(profile_picture_url, IMG_WIDTH, IMG_HEIGHT)
story.append(ParagraphAndImage(p, img))
story.append(Paragraph('Objective', styles["Headline"]))
story.append(Paragraph(profile['summary'], styles["Normal"]))
if profile['positions']['_total']:
story.append(Paragraph('Work Experience', styles["Headline"]))
for position in profile['positions']['values']:
position_headline = '%s - %s <font color="%s">|</font> <font size=8>%d/%d - %s</font>' % (
position['title'],
position['company']['name'],
COLOR,
position['startDate']['month'],
position['startDate']['year'],
'present' if position['isCurrent'] else str(
position['endDate']['month']) + '/' + str(position['endDate']['year']),
)
position_summary = position['summary'].replace('\n', '<br/>')
story.append(Paragraph(position_headline, styles["CustomBullet"], bulletText='\xe2\x80\xa2'))
story.append(Paragraph(position_summary, styles["Indent"]))
if profile['educations']['_total']:
story.append(Paragraph('Education', styles["Headline"]))
for education in profile['educations']['values']:
education_headline = '%s <font color="%s">|</font> <font size=8>%d - %d</font>' % (
education['schoolName'],
COLOR,
education['startDate']['year'],
education['endDate']['year'],
)
education_headline += '<br/>%s, %s' % (education['degree'], education['fieldOfStudy'])
story.append(Paragraph(education_headline, styles["CustomBullet"], bulletText='\xe2\x80\xa2'))
for note in education['notes'].split('\n'):
story.append(Paragraph(note, styles["Indent"], bulletText='-'))
if profile['projects']['_total']:
story.append(Paragraph('Projects', styles["Headline"]))
for project in profile['projects']['values']:
story.append(Paragraph(project['name'], styles["CustomBullet"], bulletText='\xe2\x80\xa2'))
story.append(Paragraph(project['description'].replace('\n', '<br/>'), styles["Indent"]))
if profile['skills']['_total']:
story.append(Paragraph('Skills', styles["Headline"]))
p = ' '.join([skill['skill']['name'].upper() for skill in profile['skills']['values']])
story.append(Paragraph(p, styles["Skill"]))
if profile['recommendationsReceived']['_total']:
story.append(Paragraph('Recommendations', styles["Headline"]))
for recommendation in profile['recommendationsReceived']['values']:
recommendation_headline = '%s %s <font color="%s">|</font> <font size=8>%s</font>' % (
recommendation['recommender']['firstName'],
recommendation['recommender']['lastName'],
COLOR,
recommendation['recommendationType']['code'],
)
recommendation_text = recommendation['recommendationText'].replace('\n', '<br/>')
story.append(Paragraph(recommendation_headline, styles["CustomBullet"], bulletText='\xe2\x80\xa2'))
story.append(Paragraph(recommendation_text, styles["Indent"]))
def onPage(canvas, dcmt):
canvas.saveState()
canvas.setFont(FONT_NAME, 7)
canvas.setStrokeColor(COLOR)
canvas.setFillColor(COLOR)
canvas.drawString(cm, dcmt.pagesize[1] - .75 * cm,
'%s %s - %s' % (profile['firstName'], profile['lastName'], profile['headline']))
Local = LocalTimezone()
canvas.drawRightString(dcmt.pagesize[0] - cm, dcmt.pagesize[1] - .75 * cm,
datetime.now(Local).strftime('Generated on %d.%m.%Y %H:%M:%S %Z'))
canvas.line(cm, dcmt.pagesize[1] - cm, dcmt.pagesize[0] - cm, dcmt.pagesize[1] - cm)
canvas.line(cm, cm, (dcmt.pagesize[0] - cm) / 2.0, cm)
canvas.drawCentredString(dcmt.pagesize[0] / 2.0, .95 * cm, str(dcmt.page))
canvas.line((dcmt.pagesize[0] + cm) / 2.0, cm, dcmt.pagesize[0] - cm, cm)
canvas.restoreState()
doc.build(story, onFirstPage=onPage, onLaterPages=onPage)
pdf = output.getvalue()
output.close()
return pdf
| ikudriavtsev/personal | utils.py | Python | mit | 8,429 |
import unicodecsv as csv
from MongoManager import MongoManager
import argparse
import ConfigParser
import os
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Script to find data stats.')
parser.add_argument('--config', default='../config/config.cnf',
help='Location of Config File (default: ../config/config.cnf)')
args = parser.parse_args()
config = ConfigParser.ConfigParser()
config_file = args.config
config_file = args.config
config.read(config_file)
db_config = {
'host': config.get('MongoDBConnection', 'db.host'),
'port': config.get('MongoDBConnection', 'db.port'),
'username': config.get('MongoDBConnection', 'db.username'),
'password': config.get('MongoDBConnection', 'db.password')
}
schema = config.get('MongoDBConnection', 'db.schema')
batch_size = config.get('MongoDBConnection', 'db.batch_limit')
cameo_table = config.get('Cameo', 'db.Cameo')
jrc_table = config.get('JRCNames', 'db.JRCNames')
manager = MongoManager(schema, cameo_table, batch_size, db_config)
cameo = manager.get_collection()
cameo_data = cameo.find()
counter = 0
result_file_cameo = config.get('Cameo', 'Cameo.CSV')
os.remove(result_file_cameo)
with open(result_file_cameo, 'ab') as csvfile:
writer = csv.writer(csvfile, delimiter='|')
d = ['id','record_type','cameo_title','compare_strings']
writer.writerow(d)
for cameo_record in cameo_data:
cameo_compare_list = cameo_record['compare_strings']
for s in cameo_compare_list:
dataList = []
dataList.append(cameo_record['_id'])
dataList.append(cameo_record['record_type'])
dataList.append(cameo_record['cameo_title'])
dataList.append(s)
writer.writerow(dataList)
counter += 1
print "# of CAMEO entities extracted: ", counter
manager2 = MongoManager(schema, jrc_table, batch_size, db_config)
jrc = manager2.get_collection()
jrc_data = jrc.find({})
counter = 0
result_file_jrc = config.get('JRCNames', 'Jrc.CSV')
os.remove(result_file_jrc)
with open(result_file_jrc, 'ab') as csvfile:
writer = csv.writer(csvfile, delimiter='|')
d = ['id','jrc_id','type','compare_strings']
writer.writerow(d)
for jrc_record in jrc_data:
jrc_compare_list = jrc_record['compare_strings']
for s in jrc_compare_list:
dataList = []
dataList.append(jrc_record['_id'])
dataList.append(jrc_record['id'])
dataList.append(jrc_record['type'])
dataList.append(s)
writer.writerow(dataList)
counter += 1
print "# of JRC entities extracted: ", counter | SubhasisDutta/CAMEO-JRC-Database | ir-scripts/extractCSVcameojrc.py | Python | apache-2.0 | 2,895 |
import os
from slackclient import SlackClient
BOT_NAME = 'taigabot'
slack_client = SlackClient(os.environ.get('SLACK_BOT_TOKEN'))
if __name__ == "__main__":
api_call = slack_client.api_call("users.list")
if api_call.get('ok'):
# retrieve all users so we can find our bot
users = api_call.get('members')
for user in users:
if 'name' in user and user.get('name') == BOT_NAME:
print("Bot ID for '" + user['name'] + "' is " + user.get('id'))
else:
print("could not find bot user with the name " + BOT_NAME)
| sopitz/taigabot | bot/print_bot_id.py | Python | agpl-3.0 | 579 |
#!/usr/bin/env python
#coding: utf-8
import sys
import time
import numpy as np
import random
import rospy
import tf
from geometry_msgs.msg import PointStamped
from std_msgs.msg import String, Empty, Header
from learningSignals.agents import Drqn
############################################# publish action to main_activity (and to body)
pub_action = rospy.Publisher('action', String, queue_size = 1)
# for training:
pub_scores = rospy.Publisher('scores', String, queue_size = 1)
pub_hiddens = rospy.Publisher('hiddens', String, queue_size = 1)
############################################# global values
agent = Drqn(input_size=3, nb_action=2, gamma=0.9)
stop = False
signal = [0,0,0]
reward = 0
key = 0
received_signal = False
received_reward = True
############################################# functions
def onExit(msg):
global stop
stop = True
def onReceiveSignal(msg):
global signal
global received_signal
signal_msg = str(msg.data)
new_signal = [float(i) for i in signal_msg.split('_')]
if new_signal!=signal:
received_signal = True
rospy.loginfo('signal: '+str(new_signal))
signal = new_signal
def onReceiveReward(msg):
global reward
global key
global received_reward
reward_msg = str(msg.data)
new_reward, new_key = float(reward_msg.split('_')[0]), float(reward_msg.split('_')[1])
#rospy.loginfo('new_key: '+str(new_key))
#rospy.loginfo('key: '+str(key))
if new_key!=key:
reward = new_reward
key = new_key
received_reward = True
rospy.loginfo('reward: '+str(reward))
############################################# main loop
if __name__=="__main__":
rospy.init_node("brain")
while not stop:
rospy.Subscriber('exit_topic', String, onExit)
rospy.Subscriber('signal', String, onReceiveSignal)
rospy.Subscriber('reward', String, onReceiveReward)
if received_signal:#received_reward and received_signal:
#rospy.loginfo('update !')
action = agent.update(reward, signal)
#rospy.loginfo('action: '+str(action))
msg = String()
msg.data = str(action)+'_'+str(np.random.rand())
pub_action.publish(msg)
received_signal = False
received_reward = False
#for training:
msg_scores = String(); msg_hiddens = String()
msg_scores.data = str(agent.scores*100)
msg_hiddens.data = str(agent.last_hidden*100)
pub_scores.publish(msg_scores)
pub_hiddens.publish(msg_hiddens)
rospy.sleep(0.3)
rospy.spin()
| alexis-jacq/signals | nodes/brain.py | Python | isc | 2,636 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.