repo_name
stringlengths
5
100
ref
stringlengths
12
67
path
stringlengths
4
244
copies
stringlengths
1
8
content
stringlengths
0
1.05M
henrix/rpi-linux
refs/heads/rpi-4.1.y
scripts/gdb/linux/tasks.py
367
# # gdb helper commands and functions for Linux kernel debugging # # task & thread tools # # Copyright (c) Siemens AG, 2011-2013 # # Authors: # Jan Kiszka <[email protected]> # # This work is licensed under the terms of the GNU GPL version 2. # import gdb from linux import utils task_type = utils.CachedType("struct task_struct") def task_lists(): global task_type task_ptr_type = task_type.get_type().pointer() init_task = gdb.parse_and_eval("init_task").address t = g = init_task while True: while True: yield t t = utils.container_of(t['thread_group']['next'], task_ptr_type, "thread_group") if t == g: break t = g = utils.container_of(g['tasks']['next'], task_ptr_type, "tasks") if t == init_task: return def get_task_by_pid(pid): for task in task_lists(): if int(task['pid']) == pid: return task return None class LxTaskByPidFunc(gdb.Function): """Find Linux task by PID and return the task_struct variable. $lx_task_by_pid(PID): Given PID, iterate over all tasks of the target and return that task_struct variable which PID matches.""" def __init__(self): super(LxTaskByPidFunc, self).__init__("lx_task_by_pid") def invoke(self, pid): task = get_task_by_pid(pid) if task: return task.dereference() else: raise gdb.GdbError("No task of PID " + str(pid)) LxTaskByPidFunc() thread_info_type = utils.CachedType("struct thread_info") ia64_task_size = None def get_thread_info(task): global thread_info_type thread_info_ptr_type = thread_info_type.get_type().pointer() if utils.is_target_arch("ia64"): global ia64_task_size if ia64_task_size is None: ia64_task_size = gdb.parse_and_eval("sizeof(struct task_struct)") thread_info_addr = task.address + ia64_task_size thread_info = thread_info_addr.cast(thread_info_ptr_type) else: thread_info = task['stack'].cast(thread_info_ptr_type) return thread_info.dereference() class LxThreadInfoFunc (gdb.Function): """Calculate Linux thread_info from task variable. $lx_thread_info(TASK): Given TASK, return the corresponding thread_info variable.""" def __init__(self): super(LxThreadInfoFunc, self).__init__("lx_thread_info") def invoke(self, task): return get_thread_info(task) LxThreadInfoFunc()
Isabek/python-koans
refs/heads/master
python3/koans/about_none.py
79
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Based on AboutNil in the Ruby Koans # from runner.koan import * class AboutNone(Koan): def test_none_is_an_object(self): "Unlike NULL in a lot of languages" self.assertEqual(__, isinstance(None, object)) def test_none_is_universal(self): "There is only one None" self.assertEqual(____, None is None) def test_what_exception_do_you_get_when_calling_nonexistent_methods(self): """ What is the Exception that is thrown when you call a method that does not exist? Hint: launch python command console and try the code in the block below. Don't worry about what 'try' and 'except' do, we'll talk about this later """ try: None.some_method_none_does_not_know_about() except Exception as ex: ex2 = ex # What exception has been caught? # # Need a recap on how to evaluate __class__ attributes? # # http://bit.ly/__class__ self.assertEqual(__, ex2.__class__) # What message was attached to the exception? # (HINT: replace __ with part of the error message.) self.assertRegexpMatches(ex2.args[0], __) def test_none_is_distinct(self): """ None is distinct from other things which are False. """ self.assertEqual(__, None is not 0) self.assertEqual(__, None is not False)
abramhindle/UnnaturalCodeFork
refs/heads/master
python/testdata/launchpad/lib/lp/bugs/browser/tests/test_bugsubscriptionfilter.py
1
# Copyright 2010-2012 Canonical Ltd. This software is licensed under the # GNU Affero General Public License version 3 (see the file LICENSE). """Tests for bug subscription filter browser code.""" __metaclass__ = type from functools import partial from urlparse import urlparse from lazr.restfulclient.errors import BadRequest from lxml import html from storm.exceptions import LostObjectError from testtools.matchers import StartsWith import transaction from lp.app.enums import InformationType from lp.bugs.browser.structuralsubscription import ( StructuralSubscriptionNavigation, ) from lp.bugs.enums import BugNotificationLevel from lp.bugs.interfaces.bugtask import ( BugTaskImportance, BugTaskStatus, ) from lp.services.webapp.publisher import canonical_url from lp.services.webapp.servers import LaunchpadTestRequest from lp.testing import ( anonymous_logged_in, login_person, normalize_whitespace, person_logged_in, TestCaseWithFactory, ws_object, ) from lp.testing.layers import ( AppServerLayer, DatabaseFunctionalLayer, LaunchpadFunctionalLayer, ) from lp.testing.views import create_initialized_view class TestBugSubscriptionFilterBase: def setUp(self): super(TestBugSubscriptionFilterBase, self).setUp() self.owner = self.factory.makePerson(name=u"foo") self.structure = self.factory.makeProduct( owner=self.owner, name=u"bar") with person_logged_in(self.owner): self.subscription = self.structure.addBugSubscription( self.owner, self.owner) self.initial_filter = self.subscription.bug_filters.one() self.subscription_filter = self.subscription.newBugFilter() class TestBugSubscriptionFilterNavigation( TestBugSubscriptionFilterBase, TestCaseWithFactory): layer = LaunchpadFunctionalLayer def test_canonical_url(self): url = urlparse(canonical_url(self.subscription_filter)) self.assertThat(url.hostname, StartsWith("bugs.")) self.assertEqual( "/bar/+subscription/foo/+filter/%d" % ( self.subscription_filter.id), url.path) def test_navigation(self): request = LaunchpadTestRequest() request.setTraversalStack([unicode(self.subscription_filter.id)]) navigation = StructuralSubscriptionNavigation( self.subscription, request) view = navigation.publishTraverse(request, '+filter') self.assertIsNot(None, view) class TestBugSubscriptionFilterAPI( TestBugSubscriptionFilterBase, TestCaseWithFactory): layer = AppServerLayer def test_visible_attributes(self): # Bug subscription filters are not private objects. All attributes are # visible to everyone. transaction.commit() # Create a service for a new person. service = self.factory.makeLaunchpadService() get_ws_object = partial(ws_object, service) ws_subscription = get_ws_object(self.subscription) ws_subscription_filter = get_ws_object(self.subscription_filter) self.assertEqual( ws_subscription.self_link, ws_subscription_filter.structural_subscription_link) self.assertEqual( self.subscription_filter.find_all_tags, ws_subscription_filter.find_all_tags) self.assertEqual( self.subscription_filter.description, ws_subscription_filter.description) self.assertEqual( list(self.subscription_filter.statuses), ws_subscription_filter.statuses) self.assertEqual( list(self.subscription_filter.importances), ws_subscription_filter.importances) self.assertEqual( list(self.subscription_filter.tags), ws_subscription_filter.tags) def test_structural_subscription_cannot_be_modified(self): # Bug filters cannot be moved from one structural subscription to # another. In other words, the structural_subscription field is # read-only. user = self.factory.makePerson(name=u"baz") with person_logged_in(self.owner): user_subscription = self.structure.addBugSubscription(user, user) transaction.commit() # Create a service for the structure owner. service = self.factory.makeLaunchpadService(self.owner) get_ws_object = partial(ws_object, service) ws_user_subscription = get_ws_object(user_subscription) ws_subscription_filter = get_ws_object(self.subscription_filter) ws_subscription_filter.structural_subscription = ws_user_subscription error = self.assertRaises(BadRequest, ws_subscription_filter.lp_save) self.assertEqual(400, error.response.status) self.assertEqual( self.subscription, self.subscription_filter.structural_subscription) class TestBugSubscriptionFilterAPIModifications( TestBugSubscriptionFilterBase, TestCaseWithFactory): layer = AppServerLayer def setUp(self): super(TestBugSubscriptionFilterAPIModifications, self).setUp() transaction.commit() self.service = self.factory.makeLaunchpadService(self.owner) self.ws_subscription_filter = ws_object( self.service, self.subscription_filter) def test_modify_tags_fields(self): # Two tags-related fields - find_all_tags and tags - can be # modified. The other two tags-related fields - include_any_tags and # exclude_any_tags - are not exported because the tags field provides # a more intuitive way to update them (from the perspective of an API # consumer). self.assertFalse(self.subscription_filter.find_all_tags) self.assertFalse(self.subscription_filter.include_any_tags) self.assertFalse(self.subscription_filter.exclude_any_tags) self.assertEqual(set(), self.subscription_filter.tags) # Modify, save, and start a new transaction. self.ws_subscription_filter.find_all_tags = True self.ws_subscription_filter.tags = ["foo", "-bar", "*", "-*"] self.ws_subscription_filter.lp_save() transaction.begin() # Updated state. self.assertTrue(self.subscription_filter.find_all_tags) self.assertTrue(self.subscription_filter.include_any_tags) self.assertTrue(self.subscription_filter.exclude_any_tags) self.assertEqual( set(["*", "-*", "foo", "-bar"]), self.subscription_filter.tags) def test_modify_description(self): # The description can be modified. self.assertEqual( None, self.subscription_filter.description) # Modify, save, and start a new transaction. self.ws_subscription_filter.description = u"It's late." self.ws_subscription_filter.lp_save() transaction.begin() # Updated state. self.assertEqual( u"It's late.", self.subscription_filter.description) def test_modify_statuses(self): # The statuses field can be modified. self.assertEqual(set(), self.subscription_filter.statuses) # Modify, save, and start a new transaction. self.ws_subscription_filter.statuses = ["New", "Triaged"] self.ws_subscription_filter.lp_save() transaction.begin() # Updated state. self.assertEqual( set([BugTaskStatus.NEW, BugTaskStatus.TRIAGED]), self.subscription_filter.statuses) def test_modify_importances(self): # The importances field can be modified. self.assertEqual(set(), self.subscription_filter.importances) # Modify, save, and start a new transaction. self.ws_subscription_filter.importances = ["Low", "High"] self.ws_subscription_filter.lp_save() transaction.begin() # Updated state. self.assertEqual( set([BugTaskImportance.LOW, BugTaskImportance.HIGH]), self.subscription_filter.importances) def test_delete(self): # Subscription filters can be deleted. self.ws_subscription_filter.lp_delete() transaction.begin() self.assertRaises( LostObjectError, getattr, self.subscription_filter, "find_all_tags") class TestBugSubscriptionFilterView( TestBugSubscriptionFilterBase, TestCaseWithFactory): layer = DatabaseFunctionalLayer def setUp(self): super(TestBugSubscriptionFilterView, self).setUp() self.view = create_initialized_view( self.subscription_filter, "+definition") def test_description(self): # If the description is not set then the empty string is returned. self.assertEqual(u"", self.view.description) # If the description is just whitespace then the empty string is # returned. with person_logged_in(self.owner): self.subscription_filter.description = u" " self.assertEqual(u"", self.view.description) # If the description is set it is returned. with person_logged_in(self.owner): self.subscription_filter.description = u"Foo" self.assertEqual(u"Foo", self.view.description) # Leading and trailing whitespace is trimmed. with person_logged_in(self.owner): self.subscription_filter.description = u" Foo\t " self.assertEqual(u"Foo", self.view.description) def test_conditions_with_nothing_set(self): # If nothing is set the conditions list is empty. self.assertEqual([], self.view.conditions) def test_not_filters_everything_normally(self): self.failIf(self.view.filters_everything) def test_conditions_for_COMMENTS_events(self): # If we are subscribed to comments, that is all-inclusive: no # conditions are returned. self.assertEqual(BugNotificationLevel.COMMENTS, self.subscription_filter.bug_notification_level) self.assertEqual([], self.view.conditions) def test_conditions_for_METADATA_events(self): with person_logged_in(self.owner): self.subscription_filter.bug_notification_level = ( BugNotificationLevel.METADATA) self.assertEqual( [u'any change is made to the bug, other than a new comment being ' 'added'], self.view.conditions) def test_conditions_for_LIFECYCLE_events(self): with person_logged_in(self.owner): self.subscription_filter.bug_notification_level = ( BugNotificationLevel.LIFECYCLE) self.assertEqual( [u'the bug is fixed or re-opened'], self.view.conditions) def test_conditions_for_statuses(self): # If no statuses have been specified nothing is returned. self.assertEqual([], self.view.conditions) # If set, a description of the statuses is returned. with person_logged_in(self.owner): self.subscription_filter.statuses = [ BugTaskStatus.NEW, BugTaskStatus.CONFIRMED, BugTaskStatus.TRIAGED, ] self.assertEqual( [u"the status is New, Confirmed, or Triaged"], self.view.conditions) def test_conditions_for_importances(self): # If no importances have been specified nothing is returned. self.assertEqual([], self.view.conditions) # If set, a description of the importances is returned. with person_logged_in(self.owner): self.subscription_filter.importances = [ BugTaskImportance.LOW, BugTaskImportance.MEDIUM, BugTaskImportance.HIGH, ] self.assertEqual( [u"the importance is High, Medium, or Low"], self.view.conditions) def test_conditions_for_tags(self): # If no tags have been specified nothing is returned. self.assertEqual([], self.view.conditions) # If set, a description of the tags is returned. with person_logged_in(self.owner): self.subscription_filter.tags = [u"foo", u"bar", u"*"] self.assertEqual( [u"the bug is tagged with *, bar, or foo"], self.view.conditions) # If find_all_tags is set, the conjunction changes. with person_logged_in(self.owner): self.subscription_filter.find_all_tags = True self.assertEqual( [u"the bug is tagged with *, bar, and foo"], self.view.conditions) def test_conditions_for_information_types(self): # If no information types have been specified nothing is returned. self.assertEqual([], self.view.conditions) # If set, a description of the information type is returned. with person_logged_in(self.owner): self.subscription_filter.information_types = [ InformationType.PRIVATESECURITY, InformationType.USERDATA] self.assertEqual( [u"the information type is Private Security or Private"], self.view.conditions) def assertRender(self, dt_content=None, dd_content=None): root = html.fromstring(self.view.render()) if dt_content is not None: self.assertEqual( dt_content, normalize_whitespace( root.find("dt").text_content())) if dd_content is not None: self.assertEqual( dd_content, normalize_whitespace( root.find("dd").text_content())) def test_render_with_no_description_and_no_conditions(self): # If no description and no conditions are set, the rendered # description is very simple, and there's a short message describing # the absense of conditions. self.assertRender( u"This filter allows all mail through.", u"There are no filter conditions!") def test_render_with_no_description_and_conditions(self): # If conditions are set but no description, the rendered description # is very simple, and the conditions are described. with person_logged_in(self.owner): self.subscription_filter.bug_notification_level = ( BugNotificationLevel.METADATA) self.subscription_filter.statuses = [ BugTaskStatus.NEW, BugTaskStatus.CONFIRMED, BugTaskStatus.TRIAGED, ] self.subscription_filter.importances = [ BugTaskImportance.LOW, BugTaskImportance.MEDIUM, BugTaskImportance.HIGH, ] self.subscription_filter.tags = [u"foo", u"bar"] self.assertRender( u"This filter allows mail through when:", u" and ".join(self.view.conditions)) def test_render_with_description_and_no_conditions(self): # If a description is set it appears in the content of the dt tag, # surrounded by "curly" quotes. with person_logged_in(self.owner): self.subscription_filter.description = u"The Wait" self.assertRender( u"\u201cThe Wait\u201d allows all mail through.", u"There are no filter conditions!") def test_render_with_no_events_allowed(self): self.view.filters_everything = True self.assertRender( u"This filter allows no mail through.", u"") def test_render_with_description_and_conditions(self): # If a description is set it appears in the content of the dt tag, # surrounded by "curly" quotes. with person_logged_in(self.owner): self.subscription_filter.description = u"The Wait" self.subscription_filter.tags = [u"foo"] self.assertRender( u"\u201cThe Wait\u201d allows mail through when:", u" and ".join(self.view.conditions)) def findEditLinks(self, view): root = html.fromstring(view.render()) return [ node for node in root.findall("dd//a") if node.get("href").endswith("/+edit")] def test_edit_link_for_subscriber(self): # A link to edit the filter is rendered for the subscriber. with person_logged_in(self.subscription.subscriber): subscriber_view = create_initialized_view( self.subscription_filter, "+definition") self.assertNotEqual([], self.findEditLinks(subscriber_view)) def test_edit_link_for_non_subscriber(self): # A link to edit the filter is *not* rendered for anyone but the # subscriber. with person_logged_in(self.factory.makePerson()): non_subscriber_view = create_initialized_view( self.subscription_filter, "+definition") self.assertEqual([], self.findEditLinks(non_subscriber_view)) def test_edit_link_for_anonymous(self): # A link to edit the filter is *not* rendered for anyone but the # subscriber. with anonymous_logged_in(): self.assertEqual([], self.findEditLinks(self.view)) class TestBugSubscriptionFilterEditView( TestBugSubscriptionFilterBase, TestCaseWithFactory): layer = DatabaseFunctionalLayer def test_view_properties(self): # The cancel url and next url will both point to the user's structural # subscription overview page. login_person(self.owner) view = create_initialized_view( self.subscription_filter, name="+edit") self.assertEqual([], view.errors) path = "/~%s/+structural-subscriptions" % self.owner.name self.assertEqual(path, urlparse(view.cancel_url).path) self.assertEqual(path, urlparse(view.next_url).path) def test_edit(self): # The filter can be updated by using the update action. form = { "field.description": "New description", "field.statuses": ["NEW", "INCOMPLETE"], "field.importances": ["LOW", "MEDIUM"], "field.information_types": ["USERDATA"], "field.tags": u"foo bar", "field.find_all_tags": "on", "field.actions.update": "Update", } with person_logged_in(self.owner): view = create_initialized_view( self.subscription_filter, name="+edit", form=form) self.assertEqual([], view.errors) # The subscription filter has been updated. self.assertEqual( u"New description", self.subscription_filter.description) self.assertEqual( frozenset([BugTaskStatus.NEW, BugTaskStatus.INCOMPLETE]), self.subscription_filter.statuses) self.assertEqual( frozenset([BugTaskImportance.LOW, BugTaskImportance.MEDIUM]), self.subscription_filter.importances) self.assertEqual( frozenset([InformationType.USERDATA]), self.subscription_filter.information_types) self.assertEqual( frozenset([u"foo", u"bar"]), self.subscription_filter.tags) self.assertTrue(self.subscription_filter.find_all_tags) def test_delete(self): # The filter can be deleted by using the delete action. form = { "field.actions.delete": "Delete", } with person_logged_in(self.owner): view = create_initialized_view( self.subscription_filter, name="+edit", form=form) self.assertEqual([], view.errors) # The subscription filter has been deleted. self.assertEqual( [self.initial_filter], list(self.subscription.bug_filters)) class TestBugSubscriptionFilterAdvancedFeatures(TestCaseWithFactory): """A base class for testing advanced structural subscription features.""" layer = LaunchpadFunctionalLayer def setUp(self): super(TestBugSubscriptionFilterAdvancedFeatures, self).setUp() self.setUpTarget() def setUpTarget(self): self.target = self.factory.makeProduct() def test_filter_uses_bug_notification_level(self): # A user can specify a bug_notification_level on the +filter form. displayed_levels = [ level for level in BugNotificationLevel.items] for level in displayed_levels: person = self.factory.makePerson() with person_logged_in(person): subscription = self.target.addBugSubscription( person, person) initial_filter = subscription.bug_filters.one() form = { "field.description": "New description", "field.statuses": ["NEW", "INCOMPLETE"], "field.importances": ["LOW", "MEDIUM"], "field.tags": u"foo bar", "field.find_all_tags": "on", 'field.bug_notification_level': level.title, "field.actions.create": "Create", } create_initialized_view( subscription, name="+new-filter", form=form) filters = subscription.bug_filters new_filter = [filter for filter in filters if filter != initial_filter][0] self.assertEqual(filters.count(), 2) self.assertEqual( level, new_filter.bug_notification_level, "Bug notification level of filter should be %s, " "is actually %s." % ( level.name, new_filter.bug_notification_level.name)) class TestBugSubscriptionFilterCreateView(TestCaseWithFactory): layer = DatabaseFunctionalLayer def setUp(self): super(TestBugSubscriptionFilterCreateView, self).setUp() self.owner = self.factory.makePerson(name=u"foo") self.structure = self.factory.makeProduct( owner=self.owner, name=u"bar") with person_logged_in(self.owner): self.subscription = self.structure.addBugSubscription( self.owner, self.owner) def test_view_properties(self): # The cancel url and next url will both point to the user's structural # subscription overview page. login_person(self.owner) view = create_initialized_view( self.subscription, name="+new-filter") self.assertEqual([], view.errors) path = "/~%s/+structural-subscriptions" % self.owner.name self.assertEqual(path, urlparse(view.cancel_url).path) self.assertEqual(path, urlparse(view.next_url).path) def test_create(self): # New filters can be created with +new-filter. initial_filter = self.subscription.bug_filters.one() self.assertEqual( [initial_filter], list(self.subscription.bug_filters)) form = { "field.description": "New description", "field.statuses": ["NEW", "INCOMPLETE"], "field.importances": ["LOW", "MEDIUM"], "field.information_types": ["PRIVATESECURITY"], "field.tags": u"foo bar", "field.find_all_tags": "on", "field.actions.create": "Create", } with person_logged_in(self.owner): view = create_initialized_view( self.subscription, name="+new-filter", form=form) self.assertEqual([], view.errors) # The subscription filter has been created. subscription_filter = [ filter for filter in self.subscription.bug_filters if filter != initial_filter][0] self.assertEqual( u"New description", subscription_filter.description) self.assertEqual( frozenset([BugTaskStatus.NEW, BugTaskStatus.INCOMPLETE]), subscription_filter.statuses) self.assertEqual( frozenset([BugTaskImportance.LOW, BugTaskImportance.MEDIUM]), subscription_filter.importances) self.assertEqual( frozenset([InformationType.PRIVATESECURITY]), subscription_filter.information_types) self.assertEqual( frozenset([u"foo", u"bar"]), subscription_filter.tags) self.assertTrue(subscription_filter.find_all_tags)
ojengwa/oh-mainline
refs/heads/master
vendor/packages/PyJWT/jwt/exceptions.py
15
class InvalidTokenError(Exception): pass class DecodeError(InvalidTokenError): pass class ExpiredSignatureError(InvalidTokenError): pass class InvalidAudienceError(InvalidTokenError): pass class InvalidIssuerError(InvalidTokenError): pass class InvalidIssuedAtError(InvalidTokenError): pass class ImmatureSignatureError(InvalidTokenError): pass class InvalidKeyError(Exception): pass class InvalidAlgorithmError(InvalidTokenError): pass # Compatibility aliases (deprecated) ExpiredSignature = ExpiredSignatureError InvalidAudience = InvalidAudienceError InvalidIssuer = InvalidIssuerError
thehyve/variant
refs/heads/master
eggs/django-1.3.1-py2.7.egg/django/contrib/localflavor/sk/forms.py
344
""" Slovak-specific form helpers """ from django.forms.fields import Select, RegexField from django.utils.translation import ugettext_lazy as _ class SKRegionSelect(Select): """ A select widget widget with list of Slovak regions as choices. """ def __init__(self, attrs=None): from sk_regions import REGION_CHOICES super(SKRegionSelect, self).__init__(attrs, choices=REGION_CHOICES) class SKDistrictSelect(Select): """ A select widget with list of Slovak districts as choices. """ def __init__(self, attrs=None): from sk_districts import DISTRICT_CHOICES super(SKDistrictSelect, self).__init__(attrs, choices=DISTRICT_CHOICES) class SKPostalCodeField(RegexField): """ A form field that validates its input as Slovak postal code. Valid form is XXXXX or XXX XX, where X represents integer. """ default_error_messages = { 'invalid': _(u'Enter a postal code in the format XXXXX or XXX XX.'), } def __init__(self, *args, **kwargs): super(SKPostalCodeField, self).__init__(r'^\d{5}$|^\d{3} \d{2}$', max_length=None, min_length=None, *args, **kwargs) def clean(self, value): """ Validates the input and returns a string that contains only numbers. Returns an empty string for empty values. """ v = super(SKPostalCodeField, self).clean(value) return v.replace(' ', '')
lirenjie95/DataMining
refs/heads/master
ImageRecognition/face_recognize.py
2
#!/usr/bin/env python # -*- coding: utf-8 -*- '''Author = Renjie Li''' import os import cv2 from math import sqrt from time import time from PIL import Image,ImageDraw,ImageFilter,ImageFont threshold = (1024+512) #detectFaces()返回图像中所有人脸的矩形坐标(矩形左上、右下顶点) #使用haar特征的两个级联分类器需要灰度图作为输入 def detectFaces(image_name): refer = Image.open(image_name) img = refer.filter(ImageFilter.SHARPEN) #先进行一次锐化 img.save('transfer.png') refer = cv2.imread('transfer.png') if refer.ndim == 3: img = cv2.cvtColor(refer, cv2.COLOR_BGR2GRAY) gray = cv2.equalizeHist(img)#进行均匀化 else: gray = refer #如果refer维度为3,说明是RGB图,先转化为灰度图gray face_cascade = cv2.CascadeClassifier("haarcascade_frontalface_alt.xml") faces = face_cascade.detectMultiScale(gray,1.005,1)#特征的最小、最大检测窗口 result = [] for (x,y,width,height) in faces: if width*height>threshold: continue #如果图像过大,那么很可能不是人脸,直接去除 result.append((x,y,x+width,y+height)) face_cascade = cv2.CascadeClassifier("haarcascade_profileface.xml") faces = face_cascade.detectMultiScale(gray,1.005,1) for (x,y,width,height) in faces: #两个方法认同的都加入结果序列 if width*height>threshold: continue flag = True for (x0,y0,xwidth,yheight) in result: width0 = xwidth-x0 height0 = yheight-y0 cal = abs(x-x0)+abs(y-y0)+abs(width-width0)+abs(height-height0) #如果两张脸相近达到一定的下限,那么说明出现了重复 if cal<sqrt(threshold): flag = False break if flag: result.append((x,y,x+width,y+height)) return result #在原图像上画矩形,框出所有人脸,并进行标号 #调用Image模块的draw方法,Image.open获取图像句柄,ImageDraw.Draw获取该图像的draw实例 #调用rectangle方法画矩形(矩形的坐标即detectFaces返回的坐标),outline是矩形线条颜色(B,G,R) #调用rectangle方法进行数据标号 def FindFaces(image_name): faces = detectFaces(image_name) faces.sort(key=lambda x:(x[1],x[0])) #按照y坐标为第一关键字,x坐标为第二关键字排序 if faces: img = Image.open(image_name) draw_instance = ImageDraw.Draw(img) font = ImageFont.truetype("arial.ttf",12,encoding="utf-8")#设置字体 #将人脸保存在save_dir目录下。 #Image模块:Image.open获取图像句柄,crop剪切图像(剪切的区域就是detectFaces返回的坐标) save_dir = image_name.split('.')[0]+"_faces" os.mkdir(save_dir) count = 0 for (x0,y0,x1,y1) in faces: count = count+1 draw_instance.rectangle((x0,y0,x1,y1),outline=(255,0,0)) #红色边框 draw_instance.text((x0,y0),str(count),fill=(0,255,0),font=font) #绿色数字标识 file_name = os.path.join(save_dir,str(count)+".jpg") Image.open(image_name).crop((x0,y0,x1,y1)).save(file_name) print count,'faces found.' img.save('Drawfaces_'+image_name) else: print 'Not found.' #主函数,先读取要处理的图像的名字,然后调用FindFaces(),并用time库记录程序运行时间 if __name__ == '__main__': image_name = raw_input('Please input the name of image:') clock = time() if len(image_name)==0: image_name = 'GreatWall.png'#无输入则选用默认文件名 try: FindFaces(image_name) except: print 'Error.' print 'The script runs',int(time()-clock),'second(s).'
Venturi/cms
refs/heads/master
env/lib/python2.7/site-packages/djangocms_flash/models.py
2
import os import re from django.db import models from django.utils.translation import ugettext_lazy as _ from cms.models import CMSPlugin try: from cms.models import get_plugin_media_path except ImportError: def get_plugin_media_path(instance, filename): """ See cms.models.pluginmodel.get_plugin_media_path on django CMS 3.0.4+ for information """ return instance.get_media_path(filename) from cms.utils.compat.dj import python_2_unicode_compatible @python_2_unicode_compatible class Flash(CMSPlugin): file = models.FileField( _('file'), upload_to=get_plugin_media_path, help_text=_('use swf file')) width = models.CharField(_('width'), max_length=6) height = models.CharField(_('height'), max_length=6) def __str__(self): return u"%s" % os.path.basename(self.file.path) def get_height(self): return fix_unit(self.height) def get_width(self): return fix_unit(self.width) def fix_unit(value): if not re.match(r'.*[0-9]$', value): # no unit, add px return value + "px" return value
beres/waliki
refs/heads/master
waliki/slides/tests.py
24123
from django.test import TestCase # Create your tests here.
jdhp-sap/data-pipeline-standalone-scripts
refs/heads/master
datapipe/benchmark/__init__.py
2
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org) # This script is provided under the terms and conditions of the MIT license: # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. __all__ = ['assess']
JFriel/honours_project
refs/heads/master
venv/lib/python2.7/site-packages/numpy/doc/structured_arrays.py
33
""" ================= Structured Arrays ================= Introduction ============ Numpy provides powerful capabilities to create arrays of structured datatype. These arrays permit one to manipulate the data by named fields. A simple example will show what is meant.: :: >>> x = np.array([(1,2.,'Hello'), (2,3.,"World")], ... dtype=[('foo', 'i4'),('bar', 'f4'), ('baz', 'S10')]) >>> x array([(1, 2.0, 'Hello'), (2, 3.0, 'World')], dtype=[('foo', '>i4'), ('bar', '>f4'), ('baz', '|S10')]) Here we have created a one-dimensional array of length 2. Each element of this array is a structure that contains three items, a 32-bit integer, a 32-bit float, and a string of length 10 or less. If we index this array at the second position we get the second structure: :: >>> x[1] (2,3.,"World") Conveniently, one can access any field of the array by indexing using the string that names that field. :: >>> y = x['bar'] >>> y array([ 2., 3.], dtype=float32) >>> y[:] = 2*y >>> y array([ 4., 6.], dtype=float32) >>> x array([(1, 4.0, 'Hello'), (2, 6.0, 'World')], dtype=[('foo', '>i4'), ('bar', '>f4'), ('baz', '|S10')]) In these examples, y is a simple float array consisting of the 2nd field in the structured type. But, rather than being a copy of the data in the structured array, it is a view, i.e., it shares exactly the same memory locations. Thus, when we updated this array by doubling its values, the structured array shows the corresponding values as doubled as well. Likewise, if one changes the structured array, the field view also changes: :: >>> x[1] = (-1,-1.,"Master") >>> x array([(1, 4.0, 'Hello'), (-1, -1.0, 'Master')], dtype=[('foo', '>i4'), ('bar', '>f4'), ('baz', '|S10')]) >>> y array([ 4., -1.], dtype=float32) Defining Structured Arrays ========================== One defines a structured array through the dtype object. There are **several** alternative ways to define the fields of a record. Some of these variants provide backward compatibility with Numeric, numarray, or another module, and should not be used except for such purposes. These will be so noted. One specifies record structure in one of four alternative ways, using an argument (as supplied to a dtype function keyword or a dtype object constructor itself). This argument must be one of the following: 1) string, 2) tuple, 3) list, or 4) dictionary. Each of these is briefly described below. 1) String argument. In this case, the constructor expects a comma-separated list of type specifiers, optionally with extra shape information. The fields are given the default names 'f0', 'f1', 'f2' and so on. The type specifiers can take 4 different forms: :: a) b1, i1, i2, i4, i8, u1, u2, u4, u8, f2, f4, f8, c8, c16, a<n> (representing bytes, ints, unsigned ints, floats, complex and fixed length strings of specified byte lengths) b) int8,...,uint8,...,float16, float32, float64, complex64, complex128 (this time with bit sizes) c) older Numeric/numarray type specifications (e.g. Float32). Don't use these in new code! d) Single character type specifiers (e.g H for unsigned short ints). Avoid using these unless you must. Details can be found in the Numpy book These different styles can be mixed within the same string (but why would you want to do that?). Furthermore, each type specifier can be prefixed with a repetition number, or a shape. In these cases an array element is created, i.e., an array within a record. That array is still referred to as a single field. An example: :: >>> x = np.zeros(3, dtype='3int8, float32, (2,3)float64') >>> x array([([0, 0, 0], 0.0, [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]), ([0, 0, 0], 0.0, [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]), ([0, 0, 0], 0.0, [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]])], dtype=[('f0', '|i1', 3), ('f1', '>f4'), ('f2', '>f8', (2, 3))]) By using strings to define the record structure, it precludes being able to name the fields in the original definition. The names can be changed as shown later, however. 2) Tuple argument: The only relevant tuple case that applies to record structures is when a structure is mapped to an existing data type. This is done by pairing in a tuple, the existing data type with a matching dtype definition (using any of the variants being described here). As an example (using a definition using a list, so see 3) for further details): :: >>> x = np.zeros(3, dtype=('i4',[('r','u1'), ('g','u1'), ('b','u1'), ('a','u1')])) >>> x array([0, 0, 0]) >>> x['r'] array([0, 0, 0], dtype=uint8) In this case, an array is produced that looks and acts like a simple int32 array, but also has definitions for fields that use only one byte of the int32 (a bit like Fortran equivalencing). 3) List argument: In this case the record structure is defined with a list of tuples. Each tuple has 2 or 3 elements specifying: 1) The name of the field ('' is permitted), 2) the type of the field, and 3) the shape (optional). For example:: >>> x = np.zeros(3, dtype=[('x','f4'),('y',np.float32),('value','f4',(2,2))]) >>> x array([(0.0, 0.0, [[0.0, 0.0], [0.0, 0.0]]), (0.0, 0.0, [[0.0, 0.0], [0.0, 0.0]]), (0.0, 0.0, [[0.0, 0.0], [0.0, 0.0]])], dtype=[('x', '>f4'), ('y', '>f4'), ('value', '>f4', (2, 2))]) 4) Dictionary argument: two different forms are permitted. The first consists of a dictionary with two required keys ('names' and 'formats'), each having an equal sized list of values. The format list contains any type/shape specifier allowed in other contexts. The names must be strings. There are two optional keys: 'offsets' and 'titles'. Each must be a correspondingly matching list to the required two where offsets contain integer offsets for each field, and titles are objects containing metadata for each field (these do not have to be strings), where the value of None is permitted. As an example: :: >>> x = np.zeros(3, dtype={'names':['col1', 'col2'], 'formats':['i4','f4']}) >>> x array([(0, 0.0), (0, 0.0), (0, 0.0)], dtype=[('col1', '>i4'), ('col2', '>f4')]) The other dictionary form permitted is a dictionary of name keys with tuple values specifying type, offset, and an optional title. :: >>> x = np.zeros(3, dtype={'col1':('i1',0,'title 1'), 'col2':('f4',1,'title 2')}) >>> x array([(0, 0.0), (0, 0.0), (0, 0.0)], dtype=[(('title 1', 'col1'), '|i1'), (('title 2', 'col2'), '>f4')]) Accessing and modifying field names =================================== The field names are an attribute of the dtype object defining the structure. For the last example: :: >>> x.dtype.names ('col1', 'col2') >>> x.dtype.names = ('x', 'y') >>> x array([(0, 0.0), (0, 0.0), (0, 0.0)], dtype=[(('title 1', 'x'), '|i1'), (('title 2', 'y'), '>f4')]) >>> x.dtype.names = ('x', 'y', 'z') # wrong number of names <type 'exceptions.ValueError'>: must replace all names at once with a sequence of length 2 Accessing field titles ==================================== The field titles provide a standard place to put associated info for fields. They do not have to be strings. :: >>> x.dtype.fields['x'][2] 'title 1' Accessing multiple fields at once ==================================== You can access multiple fields at once using a list of field names: :: >>> x = np.array([(1.5,2.5,(1.0,2.0)),(3.,4.,(4.,5.)),(1.,3.,(2.,6.))], dtype=[('x','f4'),('y',np.float32),('value','f4',(2,2))]) Notice that `x` is created with a list of tuples. :: >>> x[['x','y']] array([(1.5, 2.5), (3.0, 4.0), (1.0, 3.0)], dtype=[('x', '<f4'), ('y', '<f4')]) >>> x[['x','value']] array([(1.5, [[1.0, 2.0], [1.0, 2.0]]), (3.0, [[4.0, 5.0], [4.0, 5.0]]), (1.0, [[2.0, 6.0], [2.0, 6.0]])], dtype=[('x', '<f4'), ('value', '<f4', (2, 2))]) The fields are returned in the order they are asked for.:: >>> x[['y','x']] array([(2.5, 1.5), (4.0, 3.0), (3.0, 1.0)], dtype=[('y', '<f4'), ('x', '<f4')]) Filling structured arrays ========================= Structured arrays can be filled by field or row by row. :: >>> arr = np.zeros((5,), dtype=[('var1','f8'),('var2','f8')]) >>> arr['var1'] = np.arange(5) If you fill it in row by row, it takes a take a tuple (but not a list or array!):: >>> arr[0] = (10,20) >>> arr array([(10.0, 20.0), (1.0, 0.0), (2.0, 0.0), (3.0, 0.0), (4.0, 0.0)], dtype=[('var1', '<f8'), ('var2', '<f8')]) Record Arrays ============= For convenience, numpy provides "record arrays" which allow one to access fields of structured arrays by attribute rather than by index. Record arrays are structured arrays wrapped using a subclass of ndarray, :class:`numpy.recarray`, which allows field access by attribute on the array object, and record arrays also use a special datatype, :class:`numpy.record`, which allows field access by attribute on the individual elements of the array. The simplest way to create a record array is with :func:`numpy.rec.array`: :: >>> recordarr = np.rec.array([(1,2.,'Hello'),(2,3.,"World")], ... dtype=[('foo', 'i4'),('bar', 'f4'), ('baz', 'S10')]) >>> recordarr.bar array([ 2., 3.], dtype=float32) >>> recordarr[1:2] rec.array([(2, 3.0, 'World')], dtype=[('foo', '<i4'), ('bar', '<f4'), ('baz', 'S10')]) >>> recordarr[1:2].foo array([2], dtype=int32) >>> recordarr.foo[1:2] array([2], dtype=int32) >>> recordarr[1].baz 'World' numpy.rec.array can convert a wide variety of arguments into record arrays, including normal structured arrays: :: >>> arr = array([(1,2.,'Hello'),(2,3.,"World")], ... dtype=[('foo', 'i4'), ('bar', 'f4'), ('baz', 'S10')]) >>> recordarr = np.rec.array(arr) The numpy.rec module provides a number of other convenience functions for creating record arrays, see :ref:`record array creation routines <routines.array-creation.rec>`. A record array representation of a structured array can be obtained using the appropriate :ref:`view`: :: >>> arr = np.array([(1,2.,'Hello'),(2,3.,"World")], ... dtype=[('foo', 'i4'),('bar', 'f4'), ('baz', 'a10')]) >>> recordarr = arr.view(dtype=dtype((np.record, arr.dtype)), ... type=np.recarray) For convenience, viewing an ndarray as type `np.recarray` will automatically convert to `np.record` datatype, so the dtype can be left out of the view: :: >>> recordarr = arr.view(np.recarray) >>> recordarr.dtype dtype((numpy.record, [('foo', '<i4'), ('bar', '<f4'), ('baz', 'S10')])) To get back to a plain ndarray both the dtype and type must be reset. The following view does so, taking into account the unusual case that the recordarr was not a structured type: :: >>> arr2 = recordarr.view(recordarr.dtype.fields or recordarr.dtype, np.ndarray) Record array fields accessed by index or by attribute are returned as a record array if the field has a structured type but as a plain ndarray otherwise. :: >>> recordarr = np.rec.array([('Hello', (1,2)),("World", (3,4))], ... dtype=[('foo', 'S6'),('bar', [('A', int), ('B', int)])]) >>> type(recordarr.foo) <type 'numpy.ndarray'> >>> type(recordarr.bar) <class 'numpy.core.records.recarray'> Note that if a field has the same name as an ndarray attribute, the ndarray attribute takes precedence. Such fields will be inaccessible by attribute but may still be accessed by index. """ from __future__ import division, absolute_import, print_function
elsigh/browserscope
refs/heads/master
gaeunit.py
9
#!/usr/bin/env python ''' GAEUnit: Google App Engine Unit Test Framework Usage: 1. Put gaeunit.py into your application directory. Modify 'app.yaml' by adding the following mapping below the 'handlers:' section: - url: /test.* script: gaeunit.py 2. Write your own test cases by extending unittest.TestCase. 3. Launch the development web server. To run all tests, point your browser to: http://localhost:8080/test (Modify the port if necessary.) For plain text output add '?format=plain' to the above URL. See README.TXT for information on how to run specific tests. 4. The results are displayed as the tests are run. Visit http://code.google.com/p/gaeunit for more information and updates. ------------------------------------------------------------------------------ Copyright (c) 2008-2009, George Lei and Steven R. Farley. All rights reserved. Distributed under the following BSD license: Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ------------------------------------------------------------------------------ ''' __author__ = "George Lei and Steven R. Farley" __email__ = "[email protected]" __version__ = "#Revision: 1.2.8 $"[11:-2] __copyright__= "Copyright (c) 2008-2009, George Lei and Steven R. Farley" __license__ = "BSD" __url__ = "http://code.google.com/p/gaeunit" import base64 import cgi import logging import os import re import sys import time import unittest import urlparse import appengine_config from django.template import add_to_builtins add_to_builtins('base.custom_filters') import django.utils.simplejson from django.test import client from xml.sax.saxutils import unescape from google.appengine.ext import webapp from google.appengine.api import apiproxy_stub_map from google.appengine.api import datastore_file_stub from google.appengine.api import urlfetch from google.appengine.api.memcache import memcache_stub from google.appengine.api.taskqueue import taskqueue_stub from google.appengine.ext.webapp.util import run_wsgi_app _LOCAL_TEST_DIR = 'test' # location of files _WEB_TEST_DIR = '/test' # how you want to refer to tests on your web server _LOCAL_DJANGO_TEST_DIR = '../../gaeunit/test' # or: # _WEB_TEST_DIR = '/u/test' # then in app.yaml: # - url: /u/test.* # script: gaeunit.py ################################################## ## Django support def django_test_runner(request): unknown_args = [arg for (arg, v) in request.REQUEST.items() if arg not in ("format", "package", "name")] if len(unknown_args) > 0: errors = [] for arg in unknown_args: errors.append(_log_error("The request parameter '%s' is not valid." % arg)) from django.http import HttpResponseNotFound return HttpResponseNotFound(" ".join(errors)) format = request.REQUEST.get("format", "html") package_name = request.REQUEST.get("package") test_name = request.REQUEST.get("name") if format == "html": return _render_html(package_name, test_name) elif format == "plain": return _render_plain(package_name, test_name) else: error = _log_error("The format '%s' is not valid." % cgi.escape(format)) from django.http import HttpResponseServerError return HttpResponseServerError(error) def _render_html(package_name, test_name): suite, error = _create_suite(package_name, test_name, _LOCAL_DJANGO_TEST_DIR) if not error: content = _MAIN_PAGE_CONTENT % (_test_suite_to_json(suite), _WEB_TEST_DIR, __version__) from django.http import HttpResponse return HttpResponse(content) else: from django.http import HttpResponseServerError return HttpResponseServerError(error) def _render_plain(package_name, test_name): suite, error = _create_suite(package_name, test_name, _LOCAL_DJANGO_TEST_DIR) if not error: from django.http import HttpResponse response = HttpResponse() response["Content-Type"] = "text/plain" runner = unittest.TextTestRunner(response) response.write("====================\n" \ "GAEUnit Test Results\n" \ "====================\n\n") _run_test_suite(runner, suite) return response else: from django.http import HttpResponseServerError return HttpResponseServerError(error) def django_json_test_runner(request): from django.http import HttpResponse response = HttpResponse() response["Content-Type"] = "text/javascript" test_name = request.REQUEST.get("name") _load_default_test_modules(_LOCAL_DJANGO_TEST_DIR) suite = unittest.defaultTestLoader.loadTestsFromName(test_name) runner = JsonTestRunner() _run_test_suite(runner, suite) runner.result.render_to(response) return response ######################################################## class GAETestCase(unittest.TestCase): """TestCase parent class that provides the following assert functions * assertHtmlEqual - compare two HTML string ignoring the out-of-element blanks and other differences acknowledged in standard. """ def assertHtmlEqual(self, html1, html2): if html1 is None or html2 is None: raise self.failureException, "argument is None" html1 = self._formalize(html1) html2 = self._formalize(html2) if not html1 == html2: error_msg = self._findHtmlDifference(html1, html2) error_msg = "HTML contents are not equal" + error_msg raise self.failureException, error_msg def _formalize(self, html): html = html.replace("\r\n", " ").replace("\n", " ") html = re.sub(r"[ \t]+", " ", html) html = re.sub(r"[ ]*>[ ]*", ">", html) html = re.sub(r"[ ]*<[ ]*", "<", html) return unescape(html) def _findHtmlDifference(self, html1, html2): display_window_width = 41 html1_len = len(html1) html2_len = len(html2) for i in range(html1_len): if i >= html2_len or html1[i] != html2[i]: break if html1_len < html2_len: html1 += " " * (html2_len - html1_len) length = html2_len else: html2 += " " * (html1_len - html2_len) length = html1_len if length <= display_window_width: return "\n%s\n%s\n%s^" % (html1, html2, "_" * i) start = i - display_window_width / 2 end = i + 1 + display_window_width / 2 if start < 0: adjust = -start start += adjust end += adjust pointer_pos = i leading_dots = "" ending_dots = "..." elif end > length: adjust = end - length start -= adjust end -= adjust pointer_pos = i - start + 3 leading_dots = "..." ending_dots = "" else: pointer_pos = i - start + 3 leading_dots = "..." ending_dots = "..." return '\n%s%s%s\n%s\n%s^' % (leading_dots, html1[start:end], ending_dots, leading_dots+html2[start:end]+ending_dots, "_" * (i - start + len(leading_dots))) assertHtmlEquals = assertHtmlEqual ############################################################################## # Main request handler ############################################################################## class MainTestPageHandler(webapp.RequestHandler): def get(self): unknown_args = [arg for arg in self.request.arguments() if arg not in ("format", "package", "name")] if len(unknown_args) > 0: errors = [] for arg in unknown_args: errors.append(_log_error("The request parameter '%s' is not valid." % arg)) self.error(404) self.response.out.write(" ".join(errors)) return format = self.request.get("format", "html") package_name = self.request.get("package") test_name = self.request.get("name") if format == "html": self._render_html(package_name, test_name) elif format == "plain": self._render_plain(package_name, test_name) else: error = _log_error("The format '%s' is not valid." % cgi.escape(format)) self.error(404) self.response.out.write(error) def _render_html(self, package_name, test_name): suite, error = _create_suite(package_name, test_name, _LOCAL_TEST_DIR) if not error: self.response.out.write(_MAIN_PAGE_CONTENT % (_test_suite_to_json(suite), _WEB_TEST_DIR, __version__)) else: self.error(404) self.response.out.write(error) def _render_plain(self, package_name, test_name): self.response.headers["Content-Type"] = "text/plain" runner = unittest.TextTestRunner(self.response.out) suite, error = _create_suite(package_name, test_name, _LOCAL_TEST_DIR) if not error: self.response.out.write("====================\n" \ "GAEUnit Test Results\n" \ "====================\n\n") _run_test_suite(runner, suite) else: self.error(404) self.response.out.write(error) ############################################################################## # JSON test classes ############################################################################## class JsonTestResult(unittest.TestResult): def __init__(self): unittest.TestResult.__init__(self) self.testNumber = 0 def render_to(self, stream): result = { 'runs': self.testsRun, 'total': self.testNumber, 'errors': self._error_tuples_to_dicts(self.errors), 'failures': self._error_tuples_to_dicts(self.failures), # TODO(slamm): Add times. # TODO(slamm): Add logs. } stream.write(django.utils.simplejson.dumps(result)) def _error_tuples_to_dicts(self, errors): """Convert errors and failures into simple datastructures.""" return [{ 'short_description': test.shortDescription() or '', 'traceback': cgi.escape(traceback_string), } for test, traceback_string in errors] class JsonTestRunner: def run(self, test): self.result = JsonTestResult() self.result.testNumber = test.countTestCases() startTime = time.time() test(self.result) stopTime = time.time() timeTaken = stopTime - startTime return self.result class JsonTestRunHandler(webapp.RequestHandler): def get(self): self.response.headers["Content-Type"] = "text/javascript" test_name = self.request.get("name") _load_default_test_modules(_LOCAL_TEST_DIR) suite = unittest.defaultTestLoader.loadTestsFromName(test_name) runner = JsonTestRunner() _run_test_suite(runner, suite) runner.result.render_to(self.response.out) # This is not used by the HTML page, but it may be useful for other client test runners. class JsonTestListHandler(webapp.RequestHandler): def get(self): self.response.headers["Content-Type"] = "text/javascript" suite, error = _create_suite(self.request) #TODO if not error: self.response.out.write(_test_suite_to_json(suite)) else: self.error(404) self.response.out.write(error) ############################################################################## # Module helper functions ############################################################################## def _create_suite(package_name, test_name, test_dir): loader = unittest.defaultTestLoader suite = unittest.TestSuite() error = None try: if not package_name and not test_name: modules = _load_default_test_modules(test_dir) for module in modules: suite.addTest(loader.loadTestsFromModule(module)) elif test_name: _load_default_test_modules(test_dir) suite.addTest(loader.loadTestsFromName(test_name)) elif package_name: package = reload(__import__(package_name)) module_names = package.__all__ for module_name in module_names: suite.addTest(loader.loadTestsFromName('%s.%s' % (package_name, module_name))) if suite.countTestCases() == 0: raise Exception("'%s' is not found or does not contain any tests." % \ (test_name or package_name or 'local directory: \"%s\"' % _LOCAL_TEST_DIR)) except Exception: import traceback error = traceback.format_exc() print error _log_error(error) return (suite, error) def _load_default_test_modules(test_dir): if not test_dir in sys.path: sys.path.append(test_dir) module_names = [mf[0:-3] for mf in os.listdir(test_dir) if mf.endswith(".py")] emacs_temp_files = [mf for mf in module_names if mf.startswith(".#")] if emacs_temp_files: raise Exception("Found emacs temporary files (indicates unsaved files): %s" % ", ".join(["%s.py" % x for x in emacs_temp_files])) return [reload(__import__(name)) for name in module_names] def _get_tests_from_suite(suite): for test in suite: if isinstance(test, unittest.TestSuite): for t in _get_tests_from_suite(test): yield t else: yield test def _test_suite_to_json(suite): """Convert a test suite to a json string. Returns: A json string of the following python structure: {module_name: {class_name: [method_name, ...], ...}, ...} """ test_dict = {} for test in _get_tests_from_suite(suite): test_dict.setdefault( type(test).__module__, {}).setdefault( type(test).__name__, []).append( test._testMethodName) return django.utils.simplejson.dumps(test_dict) class TestingTaskQueueService(taskqueue_stub.TaskQueueServiceStub): def _Dynamic_Add(self, request, response): logging.info("gaeunit faking a taskqueue Add") taskqueue_stub.TaskQueueServiceStub._Dynamic_Add( self, request, response) self._ExecuteTasksImmediately() def _Dynamic_BulkAdd(self, request, response): logging.info("gaeunit faking a taskqueue BulkAdd") taskqueue_stub.TaskQueueServiceStub._Dynamic_BulkAdd( self, request, response) self._ExecuteTasksImmediately() def _ExecuteTasksImmediately(self): logging.info("gaeunit executing tasks immediately") if hasattr(self, '_is_executing') and self._is_executing: return self._is_executing = True try: while 1: task = None for queue in self.GetQueues(): queue_name = queue['name'] tasks = self.GetTasks(queue_name) if tasks: task = tasks[0] break if task is None: break headers = dict((k.lower(), v) for k, v in task['headers']) content_type = headers['content-type'] c = client.Client() if task['method'] == 'GET': try: # Only Django 1.1 or greater can handle a query string. url_parts = list(urlparse.urlsplit(task['url'])) query = url_parts[3] query_data = {} if query: query_data = dict( kv.split('=') for kv in url_parts[3].split('&')) url_parts[3] = '' url = urlparse.urlunsplit(url_parts) logging.info("Execute task immediately: GET %s, query=%s", url, query_data) c.get(url, query_data, content_type=content_type) except: import traceback error = traceback.format_exc() _log_error(error) raise elif task['method'] == 'POST': body = base64.b64decode(task['body']) logging.info("Execute task immediately: POST %s, body=%s", task['url'], body) c.post(task['url'], data=body, content_type=content_type) else: raise NotImplementedError self.DeleteTask(queue_name, task['name']) finally: self._is_executing = False def _run_test_suite(runner, suite): """Run the test suite. Preserve the current development apiproxy, create a new apiproxy and replace the datastore with a temporary one that will be used for this test suite, run the test suite, and restore the development apiproxy. This isolates the test datastore from the development datastore. """ original_apiproxy = apiproxy_stub_map.apiproxy try: apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap() apiproxy_stub_map.apiproxy.RegisterStub( 'datastore', datastore_file_stub.DatastoreFileStub( 'GAEUnitDataStore', datastore_file=None, trusted=True)) apiproxy_stub_map.apiproxy.RegisterStub( 'memcache', memcache_stub.MemcacheServiceStub()) apiproxy_stub_map.apiproxy.RegisterStub( 'taskqueue', TestingTaskQueueService(root_path='.')) # Allow the other services to be used as-is for tests. for name in ('user', 'urlfetch', 'mail', 'images'): apiproxy_stub_map.apiproxy.RegisterStub( name, original_apiproxy.GetStub(name)) # TODO(slamm): add coverage tool here. runner.run(suite) finally: apiproxy_stub_map.apiproxy = original_apiproxy def _log_error(s): logging.warn(s) return s ################################################ # Browser HTML, CSS, and Javascript ################################################ # This string uses Python string formatting, so be sure to escape percents as %%. _MAIN_PAGE_CONTENT = """ <html> <head> <style> body {font-family:arial,sans-serif} #title {font-family:"Times New Roman","Times Roman",TimesNR,times,serif; font-size:28px; font-weight:bold} #version {font-size:87%%} #weblink {font-style:italic; padding-top:7px; padding-bottom:7px} #results {padding-top:20px; margin:0pt; font-weight:bold;} #testindicator {width:750px; height:16px; border-style:solid; border-width:2px 1px 1px 2px; background-color:#f8f8f8;} #footerarea {font-size:83%%; padding-top:25px} #errorarea {padding-top:25px} .error {border-color: #c3d9ff; border-style: solid; border-width: 2px 1px 2px 1px; padding:1px; margin:0pt; text-align:left} .errtitle {background-color:#c3d9ff; font-weight:bold} pre { white-space: pre-wrap; /* css-3 */ white-space: -moz-pre-wrap; /* Mozilla, since 1999 */ white-space: -pre-wrap; /* Opera 4-6 */ white-space: -o-pre-wrap; /* Opera 7 */ word-wrap: break-word; /* Internet Explorer 5.5+ */ } </style> <script language="javascript" type="text/javascript"> var testsToRun = %s; var totalRuns = 0; var totalErrors = 0; var totalFailures = 0; function newXmlHttp() { try { return new XMLHttpRequest(); } catch(e) {} try { return new ActiveXObject("Msxml2.XMLHTTP"); } catch (e) {} try { return new ActiveXObject("Microsoft.XMLHTTP"); } catch (e) {} alert("XMLHttpRequest not supported"); return null; } function requestTestRun(moduleName, className, methodName) { var methodSuffix = ""; if (methodName) { methodSuffix = "." + methodName; } var xmlHttp = newXmlHttp(); xmlHttp.open("GET", "%s/run?name=" + moduleName + "." + className + methodSuffix, true); xmlHttp.onreadystatechange = function() { if (xmlHttp.readyState != 4) { return; } if (xmlHttp.status == 200) { var result = eval("(" + xmlHttp.responseText + ")"); totalRuns += parseInt(result.runs); totalErrors += result.errors.length; totalFailures += result.failures.length; document.getElementById("testran").innerHTML = totalRuns; document.getElementById("testerror").innerHTML = totalErrors; document.getElementById("testfailure").innerHTML = totalFailures; if (totalErrors == 0 && totalFailures == 0) { testSucceed(); } else { testFailed(); } var error_groups = [['ERROR', result.errors], ['FAILURE', result.failures]]; var details = ""; for (var i=0; i < error_groups.length; i++) { var label = error_groups[i][0]; var items = error_groups[i][1]; var module_url = '?' + 'name=' + moduleName; var class_url = module_url + '.' + className; var method_url = class_url + '.' + methodName; for (var j=0; j < items.length; j++) { details += '<p><div class="error"><div class="errtitle">' + label + ' <a href="' + module_url + '">' + moduleName + '</a>' + ' . <a href="' + class_url + '">' + className + '</a>' + ' . <a href="' + method_url + '">' + methodName + '</a>' + items[j].short_description + '</div><div class="errdetail"><pre>' + items[j].traceback + '</pre></div></div></p>'; } } var errorArea = document.getElementById("errorarea"); errorArea.innerHTML += details; } else { document.getElementById("errorarea").innerHTML = xmlHttp.responseText; testFailed(); } }; xmlHttp.send(null); } function testFailed() { document.getElementById("testindicator").style.backgroundColor="red"; } function testSucceed() { document.getElementById("testindicator").style.backgroundColor="green"; } function runTests() { // Run each test asynchronously (concurrently). var totalTests = 0; for (var moduleName in testsToRun) { var classes = testsToRun[moduleName]; for (var className in classes) { // TODO: Optimize for the case where tests are run by class so we don't // have to always execute each method separately. This should be // possible when we have a UI that allows the user to select tests // by module, class, and method. //requestTestRun(moduleName, className); methods = classes[className]; for (var i = 0; i < methods.length; i++) { totalTests += 1; var methodName = methods[i]; requestTestRun(moduleName, className, methodName); } } } document.getElementById("testtotal").innerHTML = totalTests; } </script> <title>GAEUnit: Google App Engine Unit Test Framework</title> </head> <body onload="runTests()"> <div id="headerarea"> <div id="title">GAEUnit: Google App Engine Unit Test Framework</div> <div id="version">Version %s</div> </div> <div id="resultarea"> <table id="results"><tbody> <tr><td colspan="3"><div id="testindicator"> </div></td</tr> <tr> <td>Runs: <span id="testran">0</span>/<span id="testtotal">0</span></td> <td>Errors: <span id="testerror">0</span></td> <td>Failures: <span id="testfailure">0</span></td> </tr> </tbody></table> </div> <div id="errorarea"></div> <div id="footerarea"> <div id="weblink"> <p> Please visit the <a href="http://code.google.com/p/gaeunit">project home page</a> for the latest version or to report problems. </p> <p> Copyright 2008-2009 <a href="mailto:[email protected]">George Lei</a> and <a href="mailto:[email protected]">Steven R. Farley</a> </p> </div> </div> </body> </html> """ ############################################################################## # Script setup and execution ############################################################################## application = webapp.WSGIApplication([('%s' % _WEB_TEST_DIR, MainTestPageHandler), ('%s/run' % _WEB_TEST_DIR, JsonTestRunHandler), ('%s/list' % _WEB_TEST_DIR, JsonTestListHandler)], debug=True) def main(): run_wsgi_app(application) if __name__ == '__main__': main()
modoboa/modoboa
refs/heads/master
modoboa/admin/api/v2/serializers.py
1
"""Admin API v2 serializers.""" import os from django.conf import settings from django.core.exceptions import ValidationError from django.utils.translation import ugettext as _ from django.contrib.auth import password_validation from rest_framework import serializers from modoboa.admin.api.v1 import serializers as v1_serializers from modoboa.core import models as core_models, signals as core_signals from modoboa.lib import exceptions as lib_exceptions from modoboa.lib import fields as lib_fields from modoboa.lib import validators, web_utils from modoboa.lib.sysutils import exec_cmd from modoboa.parameters import tools as param_tools from ... import constants, lib, models class CreateDomainAdminSerializer(serializers.Serializer): """Sub serializer for domain administrator creation.""" username = serializers.CharField() password = serializers.CharField(required=False) with_mailbox = serializers.BooleanField(default=False) with_aliases = serializers.BooleanField(default=False) class DomainSerializer(v1_serializers.DomainSerializer): """Domain serializer for v2 API.""" domain_admin = CreateDomainAdminSerializer(required=False) class Meta(v1_serializers.DomainSerializer.Meta): fields = v1_serializers.DomainSerializer.Meta.fields + ( "domain_admin", ) def create(self, validated_data): """Create administrator and other stuff if needed.""" domain_admin = validated_data.pop("domain_admin", None) domain = super().create(validated_data) if not domain_admin: return domain # 1. Create a domain administrator username = "%s@%s" % (domain_admin["username"], domain.name) try: da = core_models.User.objects.get(username=username) except core_models.User.DoesNotExist: pass else: raise lib_exceptions.Conflict( _("User '%s' already exists") % username) user = self.context["request"].user core_signals.can_create_object.send( self.__class__, context=user, klass=models.Mailbox) da = core_models.User(username=username, email=username, is_active=True) password = domain_admin.get("password") if password is None: password = param_tools.get_global_parameter( "default_password", app="core") da.set_password(password) da.save() da.role = "DomainAdmins" da.post_create(user) # 2. Create mailbox if needed if domain_admin["with_mailbox"]: dom_admin_username = domain_admin["username"] mb = models.Mailbox( address=dom_admin_username, domain=domain, user=da, use_domain_quota=True ) mb.set_quota( override_rules=user.has_perm("admin.change_domain")) mb.save(creator=user) # 3. Create aliases if needed condition = ( domain.type == "domain" and domain_admin["with_aliases"] and dom_admin_username != "postmaster" ) if condition: core_signals.can_create_object.send( self.__class__, context=user, klass=models.Alias) address = u"postmaster@{}".format(domain.name) alias = models.Alias.objects.create( address=address, domain=domain, enabled=True) alias.set_recipients([mb.full_address]) alias.post_create(user) domain.add_admin(da) return domain class DeleteDomainSerializer(serializers.Serializer): """Serializer used with delete operation.""" keep_folder = serializers.BooleanField(default=False) class AdminGlobalParametersSerializer(serializers.Serializer): """A serializer for global parameters.""" # Domain settings enable_mx_checks = serializers.BooleanField(default=True) valid_mxs = serializers.CharField(allow_blank=True) domains_must_have_authorized_mx = serializers.BooleanField(default=False) enable_spf_checks = serializers.BooleanField(default=True) enable_dkim_checks = serializers.BooleanField(default=True) enable_dmarc_checks = serializers.BooleanField(default=True) enable_autoconfig_checks = serializers.BooleanField(default=True) custom_dns_server = serializers.IPAddressField(allow_blank=True) enable_dnsbl_checks = serializers.BooleanField(default=True) dkim_keys_storage_dir = serializers.CharField(allow_blank=True) dkim_default_key_length = serializers.ChoiceField( default=2048, choices=constants.DKIM_KEY_LENGTHS) default_domain_quota = serializers.IntegerField(default=0) default_domain_message_limit = serializers.IntegerField( required=False, allow_null=True) # Mailboxes settings handle_mailboxes = serializers.BooleanField(default=False) default_mailbox_quota = serializers.IntegerField(default=0) default_mailbox_message_limit = serializers.IntegerField( required=False, allow_null=True) auto_account_removal = serializers.BooleanField(default=False) auto_create_domain_and_mailbox = serializers.BooleanField(default=True) create_alias_on_mbox_rename = serializers.BooleanField(default=False) def validate_default_domain_quota(self, value): """Ensure quota is a positive integer.""" if value < 0: raise serializers.ValidationError( _("Must be a positive integer") ) return value def validate_default_mailbox_quota(self, value): """Ensure quota is a positive integer.""" if value < 0: raise serializers.ValidationError( _("Must be a positive integer") ) return value def validate_dkim_keys_storage_dir(self, value): """Check that directory exists.""" if value: if not os.path.isdir(value): raise serializers.ValidationError( _("Directory not found.") ) code, output = exec_cmd("which openssl") if code: raise serializers.ValidationError( _("openssl not found, please make sure it is installed.") ) return value def validate(self, data): """Check MX options.""" condition = ( data.get("enable_mx_checks") and data.get("domains_must_have_authorized_mx") and not data.get("valid_mxs")) if condition: raise serializers.ValidationError({ "valid_mxs": _("Define at least one authorized network / address") }) return data class DomainAdminSerializer(serializers.ModelSerializer): """Serializer used for administrator related routes.""" class Meta: model = core_models.User fields = ("id", "username", "first_name", "last_name") class SimpleDomainAdminSerializer(serializers.Serializer): """Serializer used for add/remove operations.""" account = serializers.PrimaryKeyRelatedField( queryset=core_models.User.objects.all() ) class TagSerializer(serializers.Serializer): """Serializer used to represent a tag.""" name = serializers.CharField() label = serializers.CharField() type = serializers.CharField() class IdentitySerializer(serializers.Serializer): """Serializer used for identities.""" pk = serializers.IntegerField() type = serializers.CharField() identity = serializers.CharField() name_or_rcpt = serializers.CharField() tags = TagSerializer(many=True) class MailboxSerializer(serializers.ModelSerializer): """Base mailbox serializer.""" quota = serializers.CharField(required=False) class Meta: model = models.Mailbox fields = ( "pk", "use_domain_quota", "quota", "message_limit" ) def validate_quota(self, value): """Convert quota to MB.""" return web_utils.size2integer(value, output_unit="MB") def validate(self, data): """Check if quota is required.""" method = self.context["request"].method if not data.get("use_domain_quota", False): if "quota" not in data and method != "PATCH": raise serializers.ValidationError({ "quota": _("This field is required") }) return data class WritableAccountSerializer(v1_serializers.WritableAccountSerializer): """Add support for aliases and sender addresses.""" aliases = serializers.ListField( child=lib_fields.DRFEmailFieldUTF8(), required=False ) mailbox = MailboxSerializer(required=False) class Meta(v1_serializers.WritableAccountSerializer.Meta): fields = tuple( field for field in v1_serializers.WritableAccountSerializer.Meta.fields if field != "random_password" ) + ("aliases", ) def validate_aliases(self, value): """Check if required domains are locals and user can access them.""" aliases = [] for alias in value: localpart, domain = models.validate_alias_address( alias, self.context["request"].user) aliases.append({"localpart": localpart, "domain": domain}) return aliases def validate(self, data): """Check constraints.""" master_user = data.get("master_user", False) role = data.get("role") if master_user and role != "SuperAdmins": raise serializers.ValidationError({ "master_user": _("Not allowed for this role.") }) if role == "SimpleUsers": username = data.get("username") if username: try: validators.UTF8EmailValidator()(username) except ValidationError as err: raise ValidationError({"username": err.message}) mailbox = data.get("mailbox") if mailbox is None: if not self.instance: data["mailbox"] = { "use_domain_quota": True } if data.get("password") or not self.partial: password = data.get("password") if password: try: password_validation.validate_password( data["password"], self.instance) except ValidationError as exc: raise serializers.ValidationError({ "password": exc.messages[0]}) elif not self.instance: raise serializers.ValidationError({ "password": _("This field is required.") }) aliases = data.get("aliases") if aliases and "mailbox" not in data: raise serializers.ValidationError({ "aliases": _("A mailbox is required to create aliases.") }) domain_names = data.get("domains") if not domain_names: return data domains = [] for name in domain_names: domain = models.Domain.objects.filter(name=name).first() if domain: domains.append(domain) continue raise serializers.ValidationError({ "domains": _("Local domain {} does not exist").format(name) }) data["domains"] = domains return data def create(self, validated_data): """Create account, mailbox and aliases.""" creator = self.context["request"].user mailbox_data = validated_data.pop("mailbox", None) role = validated_data.pop("role") domains = validated_data.pop("domains", []) aliases = validated_data.pop("aliases", None) user = core_models.User(**validated_data) password = validated_data.pop("password") user.set_password(password) if "language" not in validated_data: user.language = settings.LANGUAGE_CODE user.save(creator=creator) if mailbox_data: mailbox_data["full_address"] = user.username self._create_mailbox(creator, user, mailbox_data) user.role = role self.set_permissions(user, domains) if aliases: for alias in aliases: models.Alias.objects.create( creator=creator, domain=alias["domain"], address="{}@{}".format(alias["localpart"], alias["domain"]), recipients=[user.username] ) return user class DeleteAccountSerializer(serializers.Serializer): """Serializer used with delete operation.""" keepdir = serializers.BooleanField(default=False) class AliasSerializer(v1_serializers.AliasSerializer): """Alias serializer for v2 API.""" class Meta(v1_serializers.AliasSerializer.Meta): # We remove 'internal' field fields = tuple( field for field in v1_serializers.AliasSerializer.Meta.fields if field != "internal" ) + ("expire_at", "description")
CVML/pybrain
refs/heads/master
pybrain/supervised/evolino/individual.py
26
__author__ = 'Michael Isik' from pybrain.supervised.evolino.gindividual import Individual from copy import copy, deepcopy class EvolinoIndividual(Individual): """ Individual of the Evolino framework, that consists of a list of sub-individuals. The genomes of the sub-individuals are used as the cromosomes for the main individual's genome. The genome of an individual encodes the RNN's connection weights. """ def __init__(self, sub_individuals): """ :key sub_individuals: sequence (e.g. list) of sub-individuals """ self._sub_individuals = list(sub_individuals) def getGenome(self): """ Returns the genome created by concatenating the chromosomes supplied by the sub-individuals. """ genome = [] for sub_individual in self._sub_individuals: genome.append(deepcopy(sub_individual.getGenome())) return genome def getSubIndividuals(self): """ Returns a shallow copy of the list of sub-individuals """ return copy(self._sub_individuals) class EvolinoSubIndividual(Individual): """ The sub-individual class of evolino """ _next_id = 0 def __init__(self, genome): """ :key genome: Any kind of nested iteratable container containing floats as leafs """ self.setGenome(genome) self.id = EvolinoSubIndividual._next_id EvolinoSubIndividual._next_id += 1 def getGenome(self): """ Returns the genome. """ return self._genome def setGenome(self, genome): """ Sets the genome. """ self._genome = genome def copy(self): """ Returns a complete copy of the individual. """ return copy(self) def __copy__(self): """ Returns a complete copy of the individual. """ return EvolinoSubIndividual(deepcopy(self._genome))
jcftang/ansible
refs/heads/devel
test/units/mock/path.py
48
#!/usr/bin/env python def mock_unfrackpath_noop(path): ''' Do not expand the path ''' return path
HackLinux/androguard
refs/heads/master
androguard/core/api_specific_resources/aosp_permissions/aosp_permissions_api22.py
27
#!/usr/bin/python # -*- coding: utf-8 -*- ################################################# ### Extracted from platform version: 5.1.1 ################################################# AOSP_PERMISSIONS = { 'android.permission.REMOTE_AUDIO_PLAYBACK' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.DUMP' : {'permissionGroup' : 'android.permission-group.DEVELOPMENT_TOOLS', 'description' : 'Allows the app to retrieve internal state of the system. Malicious apps may retrieve a wide variety of private and secure information that they should never normally need.', 'protectionLevel' : 'signature|system|development', 'label' : 'retrieve system internal state'}, 'android.permission.BODY_SENSORS' : {'permissionGroup' : 'android.permission-group.PERSONAL_INFO', 'description' : 'Allows the app to access data from sensors that monitor your physical condition, such as your heart rate.', 'protectionLevel' : '', 'label' : 'body sensors (like heart rate monitors)'}, 'android.permission.READ_SOCIAL_STREAM' : {'permissionGroup' : 'android.permission-group.SOCIAL_INFO', 'description' : 'Allows the app to access and sync social updates from you and your friends. Be careful when sharing information -- this allows the app to read communications between you and your friends on social networks, regardless of confidentiality. Note: this permission may not be enforced on all social networks.', 'protectionLevel' : 'dangerous', 'label' : 'read your social stream'}, 'android.permission.MODIFY_AUDIO_ROUTING' : {'permissionGroup' : '', 'description' : 'Allows the app to directly control audio routing and override audio policy decisions.', 'protectionLevel' : 'signature|system', 'label' : 'Audio Routing'}, 'android.permission.READ_NETWORK_USAGE_HISTORY' : {'permissionGroup' : '', 'description' : 'Allows the app to read historical network usage for specific networks and apps.', 'protectionLevel' : 'signature|system', 'label' : 'read historical network usage'}, 'android.permission.BIND_DIRECTORY_SEARCH' : {'permissionGroup' : 'android.permission-group.PERSONAL_INFO', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.INTERNET' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : 'Allows the app to create network sockets and use custom network protocols. The browser and other applications provide means to send data to the internet, so this permission is not required to send data to the internet.', 'protectionLevel' : 'dangerous', 'label' : 'full network access'}, 'android.permission.HARDWARE_TEST' : {'permissionGroup' : 'android.permission-group.HARDWARE_CONTROLS', 'description' : 'Allows the app to control various peripherals for the purpose of hardware testing.', 'protectionLevel' : 'signature', 'label' : 'test hardware'}, 'android.permission.START_TASKS_FROM_RECENTS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to use an ActivityManager.RecentTaskInfo object to launch a defunct task that was returned from ActivityManager.getRecentTaskList().', 'protectionLevel' : 'signature|system', 'label' : 'start a task from recents'}, 'android.permission.ACCESS_DOWNLOAD_MANAGER_ADVANCED' : {'permissionGroup' : '', 'description' : 'Allows the app to access the download manager\'s advanced functions. Malicious apps can use this to disrupt downloads and access private information.', 'protectionLevel' : 'signatureOrSystem', 'label' : 'Advanced download manager functions.'}, 'android.permission.REMOVE_DRM_CERTIFICATES' : {'permissionGroup' : '', 'description' : 'Allows an application to remove DRM certficates. Should never be needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'remove DRM certificates'}, 'com.android.launcher.permission.INSTALL_SHORTCUT' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows an application to add Homescreen shortcuts without user intervention.', 'protectionLevel' : 'dangerous', 'label' : 'install shortcuts'}, 'android.permission.BIND_TV_INPUT' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a TV input. Should never be needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'bind to a TV input'}, 'android.permission.BIND_VPN_SERVICE' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a Vpn service. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to a VPN service'}, 'com.android.voicemail.permission.READ_VOICEMAIL' : {'permissionGroup' : 'android.permission-group.VOICEMAIL', 'description' : 'Allows the app to read your voicemails.', 'protectionLevel' : 'system|signature', 'label' : 'read voicemail'}, 'android.permission.REGISTER_CONNECTION_MANAGER' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to manage telecom connections.', 'protectionLevel' : 'system|signature', 'label' : 'manage telecom connections'}, 'android.permission.READ_SEARCH_INDEXABLES' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.BIND_INPUT_METHOD' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of an input method. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to an input method'}, 'android.permission.ACCESS_CACHE_FILESYSTEM' : {'permissionGroup' : '', 'description' : 'Allows the app to read and write the cache filesystem.', 'protectionLevel' : 'signature|system', 'label' : 'access the cache filesystem'}, 'android.permission.DOWNLOAD_CACHE_NON_PURGEABLE' : {'permissionGroup' : '', 'description' : 'Allows the app to download files to the download cache, which can\'t be automatically deleted when the download manager needs more space.', 'protectionLevel' : 'signatureOrSystem', 'label' : 'Reserve space in the download cache'}, 'android.permission.CONFIGURE_WIFI_DISPLAY' : {'permissionGroup' : '', 'description' : 'Allows the app to configure and connect to Wifi displays.', 'protectionLevel' : 'signature', 'label' : 'configure Wifi displays'}, 'com.android.gallery3d.permission.GALLERY_PROVIDER' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signatureOrSystem', 'label' : ''}, 'com.android.permission.WHITELIST_BLUETOOTH_DEVICE' : {'permissionGroup' : '', 'description' : 'Allows the app to temporarily whitelist a Bluetooth device, allowing that device to send files to this device without user confirmation.', 'protectionLevel' : 'signature', 'label' : 'Whitelist bluetooth device access.'}, 'android.permission.READ_CELL_BROADCASTS' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to read cell broadcast messages received by your device. Cell broadcast alerts are delivered in some locations to warn you of emergency situations. Malicious apps may interfere with the performance or operation of your device when an emergency cell broadcast is received.', 'protectionLevel' : 'dangerous', 'label' : 'read cell broadcast messages'}, 'android.permission.BIND_DEVICE_ADMIN' : {'permissionGroup' : '', 'description' : 'Allows the holder to send intents to a device administrator. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'interact with a device admin'}, 'android.permission.FRAME_STATS' : {'permissionGroup' : '', 'description' : 'Allows an application to collect frame statistics. Malicious apps may observe the frame statistics of windows from other apps.', 'protectionLevel' : 'signature', 'label' : 'retrieve frame statistics'}, 'com.android.providers.tv.permission.ACCESS_ALL_EPG_DATA' : {'permissionGroup' : '', 'description' : 'Allows the app to read and write all TV channel/program data stored on your device.', 'protectionLevel' : 'signatureOrSystem', 'label' : 'access all TV channel/program information'}, 'android.permission.WRITE_SECURE_SETTINGS' : {'permissionGroup' : 'android.permission-group.DEVELOPMENT_TOOLS', 'description' : 'Allows the app to modify the system\'s secure settings data. Not for use by normal apps.', 'protectionLevel' : 'signature|system|development', 'label' : 'modify secure system settings'}, 'android.permission.MANAGE_DOCUMENTS' : {'permissionGroup' : 'android.permission-group.STORAGE', 'description' : 'Allows the app to manage document storage.', 'protectionLevel' : 'signature', 'label' : 'manage document storage'}, 'android.permission.SYSTEM_ALERT_WINDOW' : {'permissionGroup' : 'android.permission-group.DISPLAY', 'description' : 'Allows the app to draw on top of other applications or parts of the user interface. They may interfere with your use of the interface in any application, or change what you think you are seeing in other applications.', 'protectionLevel' : 'dangerous', 'label' : 'draw over other apps'}, 'com.android.cts.permissionNotUsedWithSignature' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.READ_SYNC_STATS' : {'permissionGroup' : 'android.permission-group.SYNC_SETTINGS', 'description' : 'Allows an app to read the sync stats for an account, including the history of sync events and how much data is synced.', 'protectionLevel' : 'normal', 'label' : 'read sync statistics'}, 'android.permission.START_ANY_ACTIVITY' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to start any activity, regardless of permission protection or exported state.', 'protectionLevel' : 'signature', 'label' : 'start any activity'}, 'android.permission.AUTHENTICATE_ACCOUNTS' : {'permissionGroup' : 'android.permission-group.ACCOUNTS', 'description' : 'Allows the app to use the account authenticator capabilities of the AccountManager, including creating accounts and getting and setting their passwords.', 'protectionLevel' : 'dangerous', 'label' : 'create accounts and set passwords'}, 'test_permission' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'normal', 'label' : ''}, 'android.permission.MODIFY_PHONE_STATE' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to control the phone features of the device. An app with this permission can switch networks, turn the phone radio on and off and the like without ever notifying you.', 'protectionLevel' : 'signature|system', 'label' : 'modify phone state'}, 'android.permission.LAUNCH_TRUST_AGENT_SETTINGS' : {'permissionGroup' : '', 'description' : 'Allows an application to launch an activity that changes the trust agent behavior.', 'protectionLevel' : 'signatureOrSystem', 'label' : 'Launch trust agent settings menu.'}, 'android.permission.CAPTURE_SECURE_VIDEO_OUTPUT' : {'permissionGroup' : '', 'description' : 'Allows the app to capture and redirect secure video output.', 'protectionLevel' : 'signature|system', 'label' : 'capture secure video output'}, 'com.android.launcher.permission.PRELOAD_WORKSPACE' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signatureOrSystem', 'label' : ''}, 'android.permission.BIND_CONNECTION_SERVICE' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to interact with telephony services to make/receive calls.', 'protectionLevel' : 'system|signature', 'label' : 'interact with telephony services'}, 'android.permission.WRITE_PROFILE' : {'permissionGroup' : 'android.permission-group.PERSONAL_INFO', 'description' : 'Allows the app to change or add to personal profile information stored on your device, such as your name and contact information. This means the app can identify you and may send your profile information to others.', 'protectionLevel' : 'dangerous', 'label' : 'modify your own contact card'}, 'android.permission.BIND_NFC_SERVICE' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to applications that are emulating NFC cards. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to NFC service'}, 'android.permission.GRANT_REVOKE_PERMISSIONS' : {'permissionGroup' : '', 'description' : 'Allows an application to grant or revoke specific permissions for it or other applications. Malicious applications may use this to access features you have not granted them.', 'protectionLevel' : 'signature', 'label' : 'grant or revoke permissions'}, 'android.permission.CAMERA' : {'permissionGroup' : 'android.permission-group.CAMERA', 'description' : 'Allows the app to take pictures and videos with the camera. This permission allows the app to use the camera at any time without your confirmation.', 'protectionLevel' : 'dangerous', 'label' : 'take pictures and videos'}, 'android.permission.START_PRINT_SERVICE_CONFIG_ACTIVITY' : {'permissionGroup' : '', 'description' : 'Allows the holder to start the configuration activities of a print service. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'start print service configuration activities'}, 'android.permission.SET_WALLPAPER_HINTS' : {'permissionGroup' : 'android.permission-group.WALLPAPER', 'description' : 'Allows the app to set the system wallpaper size hints.', 'protectionLevel' : 'normal', 'label' : 'adjust your wallpaper size'}, 'android.permission.INVOKE_CARRIER_SETUP' : {'permissionGroup' : '', 'description' : 'Allows the holder to invoke the carrier-provided configuration app. Should never be needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'invoke the carrier-provided configuration app'}, 'android.permission.BIND_NOTIFICATION_LISTENER_SERVICE' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a notification listener service. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to a notification listener service'}, 'android.permission.BIND_CARRIER_MESSAGING_SERVICE' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a carrier messaging service. Should never be needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'bind to a carrier messaging service'}, 'android.permission.CONTROL_LOCATION_UPDATES' : {'permissionGroup' : '', 'description' : 'Allows the app to enable/disable location update notifications from the radio. Not for use by normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'control location update notifications'}, 'android.permission.REBOOT' : {'permissionGroup' : '', 'description' : 'Allows the app to force the phone to reboot.', 'protectionLevel' : 'signature|system', 'label' : 'force phone reboot'}, 'android.permission.BROADCAST_WAP_PUSH' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to broadcast a notification that a WAP PUSH message has been received. Malicious apps may use this to forge MMS message receipt or to silently replace the content of any webpage with malicious variants.', 'protectionLevel' : 'signature', 'label' : 'send WAP-PUSH-received broadcast'}, 'android.permission.ACCESS_NETWORK_STATE' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : 'Allows the app to view information about network connections such as which networks exist and are connected.', 'protectionLevel' : 'normal', 'label' : 'view network connections'}, 'android.permission.CAPTURE_TV_INPUT' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signatureOrSystem', 'label' : ''}, 'android.permission.CHANGE_WIMAX_STATE' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : 'Allows the app to connect the phone to and disconnect the phone from WiMAX networks.', 'protectionLevel' : 'dangerous', 'label' : 'Change WiMAX state'}, 'com.foo.mypermission' : {'permissionGroup' : '', 'description' : 'MyActivity', 'protectionLevel' : '', 'label' : 'MyActivity'}, 'android.permission.MOUNT_FORMAT_FILESYSTEMS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to format removable storage.', 'protectionLevel' : 'system|signature', 'label' : 'erase SD Card'}, 'android.permission.SCORE_NETWORKS' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : 'Allows the app to rank networks and influence which networks the phone should prefer.', 'protectionLevel' : 'signature|system', 'label' : 'score networks'}, 'android.permission.BIND_APPWIDGET' : {'permissionGroup' : 'android.permission-group.PERSONAL_INFO', 'description' : 'Allows the app to tell the system which widgets can be used by which app. An app with this permission can give access to personal data to other apps. Not for use by normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'choose widgets'}, 'com.android.frameworks.coretests.permission.TEST_GRANTED' : {'permissionGroup' : '', 'description' : 'Used for running unit tests, for testing operations where we have the permission.', 'protectionLevel' : 'normal', 'label' : 'Test Granted'}, 'android.permission.ASEC_CREATE' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to create internal storage.', 'protectionLevel' : 'signature', 'label' : 'create internal storage'}, 'android.permission.MANAGE_CA_CERTIFICATES' : {'permissionGroup' : '', 'description' : 'Allows the app to install and uninstall CA certificates as trusted credentials.', 'protectionLevel' : 'signature|system', 'label' : 'manage trusted credentials'}, 'android.permission.INSTALL_LOCATION_PROVIDER' : {'permissionGroup' : '', 'description' : 'Create mock location sources for testing or install a new location provider. This allows the app to override the location and/or status returned by other location sources such as GPS or location providers.', 'protectionLevel' : 'signature|system', 'label' : 'permission to install a location provider'}, 'android.permission.LOOP_RADIO' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.TRANSMIT_IR' : {'permissionGroup' : 'android.permission-group.AFFECTS_BATTERY', 'description' : 'Allows the app to use the phone\'s infrared transmitter.', 'protectionLevel' : 'normal', 'label' : 'transmit infrared'}, 'com.android.browser.permission.WRITE_HISTORY_BOOKMARKS' : {'permissionGroup' : 'android.permission-group.BOOKMARKS', 'description' : 'Allows the app to modify the Browser\'s history or bookmarks stored on your phone. This may allow the app to erase or modify Browser data. Note: this permission may note be enforced by third-party browsers or other applications with web browsing capabilities.', 'protectionLevel' : 'dangerous', 'label' : 'write web bookmarks and history'}, 'android.permission.WRITE_SETTINGS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to modify the system\'s settings data. Malicious apps may corrupt your system\'s configuration.', 'protectionLevel' : 'normal', 'label' : 'modify system settings'}, 'android.permission.MANAGE_APP_TOKENS' : {'permissionGroup' : '', 'description' : 'Allows the app to create and manage their own tokens, bypassing their normal Z-ordering. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'manage app tokens'}, 'android.permission.RESTART_PACKAGES' : {'permissionGroup' : 'android.permission-group.APP_INFO', 'description' : 'Allows the app to end background processes of other apps. This may cause other apps to stop running.', 'protectionLevel' : 'normal', 'label' : 'close other apps'}, 'android.permission.ACCESS_DRM_CERTIFICATES' : {'permissionGroup' : '', 'description' : 'Allows an application to provision and use DRM certficates. Should never be needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'access DRM certificates'}, 'android.permission.PACKAGE_VERIFICATION_AGENT' : {'permissionGroup' : '', 'description' : 'Allows the app to verify a package is installable.', 'protectionLevel' : 'signature|system', 'label' : 'verify packages'}, 'android.permission.CONFIRM_FULL_BACKUP' : {'permissionGroup' : '', 'description' : 'Allows the app to launch the full backup confirmation UI. Not to be used by any app.', 'protectionLevel' : 'signature', 'label' : 'confirm a full backup or restore operation'}, 'com.android.smspush.WAPPUSH_MANAGER_BIND' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signatureOrSystem', 'label' : ''}, 'com.android.gallery3d.filtershow.permission.READ' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.BIND_PRINT_SERVICE' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a print service. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to a print service'}, 'com.android.providers.tv.permission.ACCESS_WATCHED_PROGRAMS' : {'permissionGroup' : '', 'description' : 'Allows the app to read and write the list of TV programs you watched. Malicious apps may collect your private TV watch history.', 'protectionLevel' : 'signatureOrSystem', 'label' : 'access watched TV program information'}, 'android.permission.ASEC_ACCESS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to get information on internal storage.', 'protectionLevel' : 'signature', 'label' : 'get information on internal storage'}, 'android.permission.USE_SIP' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to make and receive SIP calls.', 'protectionLevel' : 'dangerous', 'label' : 'make/receive SIP calls'}, 'android.permission.RECEIVE_DATA_ACTIVITY_CHANGE' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.NET_ADMIN' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.CHANGE_BACKGROUND_DATA_SETTING' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to change the background data usage setting.', 'protectionLevel' : 'signature', 'label' : 'change background data usage setting'}, 'android.permission.PROCESS_CALLLOG_INFO' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.CAPTURE_AUDIO_HOTWORD' : {'permissionGroup' : '', 'description' : 'Allows the app to capture audio for Hotword detection. The capture can happen in the background but does not prevent other audio capture (e.g. Camcorder).', 'protectionLevel' : 'signature|system', 'label' : 'Hotword detection'}, 'android.permission.NFC' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : 'Allows the app to communicate with Near Field Communication (NFC) tags, cards, and readers.', 'protectionLevel' : 'dangerous', 'label' : 'control Near Field Communication'}, 'android.permission.SEND_SMS' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to send SMS messages. This may result in unexpected charges. Malicious apps may cost you money by sending messages without your confirmation.', 'protectionLevel' : 'dangerous', 'label' : 'send SMS messages'}, 'android.permission.INTERACT_ACROSS_USERS_FULL' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows all possible interactions across users.', 'protectionLevel' : 'signature', 'label' : 'full license to interact across users'}, 'android.permission.CLEAR_APP_USER_DATA' : {'permissionGroup' : '', 'description' : 'Allows the app to clear user data.', 'protectionLevel' : 'signature', 'label' : 'delete other apps\' data'}, 'android.permission.ACCESS_MOCK_LOCATION' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Create mock location sources for testing or install a new location provider. This allows the app to override the location and/or status returned by other location sources such as GPS or location providers.', 'protectionLevel' : 'dangerous', 'label' : 'mock location sources for testing'}, 'android.permission.CAPTURE_AUDIO_OUTPUT' : {'permissionGroup' : '', 'description' : 'Allows the app to capture and redirect audio output.', 'protectionLevel' : 'signature|system', 'label' : 'capture audio output'}, 'android.permission.GET_DETAILED_TASKS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to retrieve detailed information about currently and recently running tasks. Malicious apps may discover private information about other apps.', 'protectionLevel' : 'signature', 'label' : 'retrieve details of running apps'}, 'android.permission.ACCESS_ALL_DOWNLOADS' : {'permissionGroup' : '', 'description' : 'Allows the app to view and modify all downloads initiated by any app on the system.', 'protectionLevel' : 'signature', 'label' : 'Access all system downloads'}, 'android.permission.STATUS_BAR' : {'permissionGroup' : '', 'description' : 'Allows the app to disable the status bar or add and remove system icons.', 'protectionLevel' : 'signature|system', 'label' : 'disable or modify status bar'}, 'android.permission.TV_INPUT_HARDWARE' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signatureOrSystem', 'label' : ''}, 'android.permission.MEDIA_CONTENT_CONTROL' : {'permissionGroup' : '', 'description' : 'Allows the app to control media playback and access the media information (title, author...).', 'protectionLevel' : 'signature|system', 'label' : 'control media playback and metadata access'}, 'android.permission.DOWNLOAD_WITHOUT_NOTIFICATION' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : 'Allows the app to download files through the download manager without any notification being shown to the user.', 'protectionLevel' : 'normal', 'label' : 'download files without notification'}, 'android.permission.RECOVERY' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows an application to interact with the recovery system and system updates.', 'protectionLevel' : 'signature|system', 'label' : 'Interact with update and recovery system'}, 'com.android.email.permission.READ_ATTACHMENT' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to read your email attachments.', 'protectionLevel' : 'dangerous', 'label' : 'Read email attachments'}, 'android.permission.NET_TUNNELING' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.SET_TIME' : {'permissionGroup' : '', 'description' : 'Allows the app to change the phone\'s clock time.', 'protectionLevel' : 'signature|system', 'label' : 'set time'}, 'android.permission.MANAGE_MEDIA_PROJECTION' : {'permissionGroup' : '', 'description' : 'Allows an application to manage media projection sessions. These sessions can provide applications the ability to capture display and audio contents. Should never be needed by normal apps.', 'protectionLevel' : 'signature', 'label' : 'Manage media projection sessions'}, 'android.permission.CALL_PHONE' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to call phone numbers without your intervention. This may result in unexpected charges or calls. Note that this doesn\'t allow the app to call emergency numbers. Malicious apps may cost you money by making calls without your confirmation.', 'protectionLevel' : 'dangerous', 'label' : 'directly call phone numbers'}, 'android.permission.FLASHLIGHT' : {'permissionGroup' : 'android.permission-group.AFFECTS_BATTERY', 'description' : 'Allows the app to control the flashlight.', 'protectionLevel' : 'normal', 'label' : 'control flashlight'}, 'android.permission.READ_PHONE_STATE' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to access the phone features of the device. This permission allows the app to determine the phone number and device IDs, whether a call is active, and the remote number connected by a call.', 'protectionLevel' : 'dangerous', 'label' : 'read phone status and identity'}, 'android.permission.MANAGE_DEVICE_ADMINS' : {'permissionGroup' : '', 'description' : 'Allows the holder to add or remove active device administrators. Should never be needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'add or remove a device admin'}, 'com.android.voicemail.permission.ADD_VOICEMAIL' : {'permissionGroup' : 'android.permission-group.VOICEMAIL', 'description' : 'Allows the app to add messages to your voicemail inbox.', 'protectionLevel' : 'dangerous', 'label' : 'add voicemail'}, 'android.permission.REAL_GET_TASKS' : {'permissionGroup' : 'android.permission-group.APP_INFO', 'description' : 'Allows the app to retrieve information about currently and recently running tasks. This may allow the app to discover information about which applications are used on the device.', 'protectionLevel' : 'signature|system', 'label' : 'retrieve running apps'}, 'android.permission.KILL_BACKGROUND_PROCESSES' : {'permissionGroup' : 'android.permission-group.APP_INFO', 'description' : 'Allows the app to end background processes of other apps. This may cause other apps to stop running.', 'protectionLevel' : 'normal', 'label' : 'close other apps'}, 'android.permission.RECEIVE_MMS' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to receive and process MMS messages. This means the app could monitor or delete messages sent to your device without showing them to you.', 'protectionLevel' : 'dangerous', 'label' : 'receive text messages (MMS)'}, 'android.permission.WAKE_LOCK' : {'permissionGroup' : 'android.permission-group.AFFECTS_BATTERY', 'description' : 'Allows the app to prevent the phone from going to sleep.', 'protectionLevel' : 'normal', 'label' : 'prevent phone from sleeping'}, 'android.permission.BIND_VOICE_INTERACTION' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a voice interaction service. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to a voice interactor'}, 'android.permission.STATUS_BAR_SERVICE' : {'permissionGroup' : '', 'description' : 'Allows the app to be the status bar.', 'protectionLevel' : 'signature', 'label' : 'status bar'}, 'android.permission.DELETE_CACHE_FILES' : {'permissionGroup' : '', 'description' : 'Allows the app to delete cache files.', 'protectionLevel' : 'signature|system', 'label' : 'delete other apps\' caches'}, 'android.permission.MODIFY_NETWORK_ACCOUNTING' : {'permissionGroup' : '', 'description' : 'Allows the app to modify how network usage is accounted against apps. Not for use by normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'modify network usage accounting'}, 'android.permission.GET_ACCOUNTS' : {'permissionGroup' : 'android.permission-group.ACCOUNTS', 'description' : 'Allows the app to get the list of accounts known by the phone. This may include any accounts created by applications you have installed.', 'protectionLevel' : 'normal', 'label' : 'find accounts on the device'}, 'android.permission.CHANGE_NETWORK_STATE' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : 'Allows the app to change the state of network connectivity.', 'protectionLevel' : 'normal', 'label' : 'change network connectivity'}, 'android.permission.ACCESS_MTP' : {'permissionGroup' : 'android.permission-group.HARDWARE_CONTROLS', 'description' : 'Allows access to the kernel MTP driver to implement the MTP USB protocol.', 'protectionLevel' : 'signature|system', 'label' : 'implement MTP protocol'}, 'android.permission.DISABLE_KEYGUARD' : {'permissionGroup' : 'android.permission-group.SCREENLOCK', 'description' : 'Allows the app to disable the keylock and any associated password security. For example, the phone disables the keylock when receiving an incoming phone call, then re-enables the keylock when the call is finished.', 'protectionLevel' : 'dangerous', 'label' : 'disable your screen lock'}, 'android.permission.BIND_PACKAGE_VERIFIER' : {'permissionGroup' : '', 'description' : 'Allows the holder to make requests of package verifiers. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to a package verifier'}, 'com.android.launcher.permission.UNINSTALL_SHORTCUT' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the application to remove Homescreen shortcuts without user intervention.', 'protectionLevel' : 'dangerous', 'label' : 'uninstall shortcuts'}, 'android.permission.USE_CREDENTIALS' : {'permissionGroup' : 'android.permission-group.ACCOUNTS', 'description' : 'Allows the app to request authentication tokens.', 'protectionLevel' : 'dangerous', 'label' : 'use accounts on the device'}, 'android.permission.WRITE_MEDIA_STORAGE' : {'permissionGroup' : 'android.permission-group.STORAGE', 'description' : 'Allows the app to modify the contents of the internal media storage.', 'protectionLevel' : 'signature|system', 'label' : 'modify/delete internal media storage contents'}, 'android.permission.ACCESS_COARSE_LOCATION' : {'permissionGroup' : 'android.permission-group.LOCATION', 'description' : 'Allows the app to get your approximate location. This location is derived by location services using network location sources such as cell towers and Wi-Fi. These location services must be turned on and available to your device for the app to use them. Apps may use this to determine approximately where you are.', 'protectionLevel' : 'dangerous', 'label' : 'approximate location (network-based)'}, 'android.permission.BIND_KEYGUARD_APPWIDGET' : {'permissionGroup' : 'android.permission-group.PERSONAL_INFO', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.BIND_TRUST_AGENT' : {'permissionGroup' : '', 'description' : 'Allows an application to bind to a trust agent service.', 'protectionLevel' : 'signature', 'label' : 'Bind to a trust agent service'}, 'android.permission.CONTROL_VPN' : {'permissionGroup' : '', 'description' : 'Allows the app to control low-level features of Virtual Private Networks.', 'protectionLevel' : 'signature|system', 'label' : 'control Virtual Private Networks'}, 'android.permission.BLUETOOTH_ADMIN' : {'permissionGroup' : 'android.permission-group.BLUETOOTH_NETWORK', 'description' : 'Allows the app to configure the local Bluetooth phone, and to discover and pair with remote devices.', 'protectionLevel' : 'dangerous', 'label' : 'access Bluetooth settings'}, 'android.permission.PERSISTENT_ACTIVITY' : {'permissionGroup' : 'android.permission-group.APP_INFO', 'description' : 'Allows the app to make parts of itself persistent in memory. This can limit memory available to other apps slowing down the phone.', 'protectionLevel' : 'normal', 'label' : 'make app always run'}, 'android.permission.TRUST_LISTENER' : {'permissionGroup' : '', 'description' : 'Allows an application to listen for changes in trust state.', 'protectionLevel' : 'signature', 'label' : 'Listen to trust state changes.'}, 'android.permission.CARRIER_FILTER_SMS' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.BIND_TEXT_SERVICE' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a text service(e.g. SpellCheckerService). Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to a text service'}, 'android.permission.RECEIVE_WAP_PUSH' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to receive and process WAP messages. This permission includes the ability to monitor or delete messages sent to you without showing them to you.', 'protectionLevel' : 'dangerous', 'label' : 'receive text messages (WAP)'}, 'com.foo.mypermission2' : {'permissionGroup' : '', 'description' : 'MyActivity', 'protectionLevel' : '', 'label' : 'MyActivity'}, 'android.permission.SET_WALLPAPER' : {'permissionGroup' : 'android.permission-group.WALLPAPER', 'description' : 'Allows the app to set the system wallpaper.', 'protectionLevel' : 'normal', 'label' : 'set wallpaper'}, 'android.permission.PROVIDE_TRUST_AGENT' : {'permissionGroup' : '', 'description' : 'Allows an application to provide a trust agent.', 'protectionLevel' : 'signatureOrSystem', 'label' : 'Provide a trust agent.'}, 'android' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : '', 'label' : ''}, 'android.permission.BIND_PRINT_SPOOLER_SERVICE' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a print spooler service. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to a print spooler service'}, 'android.permission.GET_PACKAGE_SIZE' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to retrieve its code, data, and cache sizes', 'protectionLevel' : 'normal', 'label' : 'measure app storage space'}, 'android.permission.BIND_REMOTE_DISPLAY' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a remote display. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to a remote display'}, 'android.permission.READ_EXTERNAL_STORAGE' : {'permissionGroup' : 'android.permission-group.STORAGE', 'description' : 'Allows the app to read the contents of your SD card.', 'protectionLevel' : 'normal', 'label' : 'read the contents of your SD card'}, 'android.permission.SET_KEYBOARD_LAYOUT' : {'permissionGroup' : '', 'description' : 'Allows the app to change the keyboard layout. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'change keyboard layout'}, 'com.android.frameworks.coretests.SIGNATURE' : {'permissionGroup' : 'android.permission-group.COST_MONEY', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.INTERNAL_SYSTEM_WINDOW' : {'permissionGroup' : '', 'description' : 'Allows the app to create windows that are intended to be used by the internal system user interface. Not for use by normal apps.', 'protectionLevel' : 'signature', 'label' : 'display unauthorized windows'}, 'com.android.cts.intent.sender.permission.SAMPLE' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : '', 'label' : ''}, 'com.android.launcher3.permission.RECEIVE_FIRST_LOAD_BROADCAST' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signatureOrSystem', 'label' : ''}, 'android.permission.PERFORM_CDMA_PROVISIONING' : {'permissionGroup' : '', 'description' : 'Allows the app to start CDMA provisioning. Malicious apps may unnecessarily start CDMA provisioning.', 'protectionLevel' : 'signature|system', 'label' : 'directly start CDMA phone setup'}, 'com.android.browser.permission.PRELOAD' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signatureOrSystem', 'label' : 'Preload results'}, 'android.permission.MODIFY_AUDIO_SETTINGS' : {'permissionGroup' : 'android.permission-group.AUDIO_SETTINGS', 'description' : 'Allows the app to modify global audio settings such as volume and which speaker is used for output.', 'protectionLevel' : 'normal', 'label' : 'change your audio settings'}, 'android.permission.CONTROL_WIFI_DISPLAY' : {'permissionGroup' : '', 'description' : 'Allows the app to control low-level features of Wifi displays.', 'protectionLevel' : 'signature', 'label' : 'control Wifi displays'}, 'android.permission.SET_ACTIVITY_WATCHER' : {'permissionGroup' : '', 'description' : 'Allows the app to monitor and control how the system launches activities. Malicious apps may completely compromise the system. This permission is only needed for development, never for normal use.', 'protectionLevel' : 'signature', 'label' : 'monitor and control all app launching'}, 'com.android.frameworks.coretests.NORMAL' : {'permissionGroup' : 'android.permission-group.COST_MONEY', 'description' : '', 'protectionLevel' : 'normal', 'label' : ''}, 'android.permission.BROADCAST_NETWORK_PRIVILEGED' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : 'Allows the app to send privileged network broadcasts. Never needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'send privileged network broadcasts'}, 'android.permission.COPY_PROTECTED_DATA' : {'permissionGroup' : '', 'description' : 'copy content', 'protectionLevel' : 'signature', 'label' : 'copy content'}, 'android.permission.RETRIEVE_WINDOW_TOKEN' : {'permissionGroup' : '', 'description' : 'Allows an application to retrieve the window token. Malicious apps may perfrom unauthorized interaction with the application window impersonating the system.', 'protectionLevel' : 'signature', 'label' : 'retrieve window token'}, 'com.android.cts.permissionWithSignature' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.PACKAGE_USAGE_STATS' : {'permissionGroup' : '', 'description' : 'Allows the app to modify collected component usage statistics. Not for use by normal apps.', 'protectionLevel' : 'signature|development|appop', 'label' : 'update component usage statistics'}, 'android.permission.RECEIVE_BLUETOOTH_MAP' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to receive and process Bluetooth MAP messages. This means the app could monitor or delete messages sent to your device without showing them to you.', 'protectionLevel' : 'signature|system', 'label' : 'receive Bluetooth messages (MAP)'}, 'android.permission.BROADCAST_CALLLOG_INFO' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.CONTROL_INCALL_EXPERIENCE' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to provide an in-call user experience.', 'protectionLevel' : 'system|signature', 'label' : 'provide an in-call user experience'}, 'android.permission.MOUNT_UNMOUNT_FILESYSTEMS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to mount and unmount filesystems for removable storage.', 'protectionLevel' : 'system|signature', 'label' : 'access SD Card filesystem'}, 'android.permission.BIND_WALLPAPER' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a wallpaper. Should never be needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'bind to a wallpaper'}, 'android.permission.READ_DREAM_STATE' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.NFC_HANDOVER_STATUS' : {'permissionGroup' : '', 'description' : 'Allows this application to receive information about current Android Beam transfers', 'protectionLevel' : 'signature|system', 'label' : 'Receive Android Beam transfer status'}, 'android.permission.FORCE_BACK' : {'permissionGroup' : '', 'description' : 'Allows the app to force any activity that is in the foreground to close and go back. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'force app to close'}, 'android.permission.READ_CALENDAR' : {'permissionGroup' : 'android.permission-group.PERSONAL_INFO', 'description' : 'Allows the app to read all calendar events stored on your phone, including those of friends or co-workers. This may allow the app to share or save your calendar data, regardless of confidentiality or sensitivity.', 'protectionLevel' : 'dangerous', 'label' : 'read calendar events plus confidential information'}, 'android.permission.DEVICE_POWER' : {'permissionGroup' : '', 'description' : 'Allows the app to turn the phone on or off.', 'protectionLevel' : 'signature', 'label' : 'power phone on or off'}, 'android.permission.SHUTDOWN' : {'permissionGroup' : '', 'description' : 'Puts the activity manager into a shutdown state. Does not perform a complete shutdown.', 'protectionLevel' : 'signature|system', 'label' : 'partial shutdown'}, 'android.os.cts.permission.TEST_GRANTED' : {'permissionGroup' : '', 'description' : 'Used for running CTS tests, for testing operations where we have the permission.', 'protectionLevel' : 'normal', 'label' : 'Test Granted'}, 'android.permission.CHANGE_CONFIGURATION' : {'permissionGroup' : 'android.permission-group.DEVELOPMENT_TOOLS', 'description' : 'Allows the app to change the current configuration, such as the locale or overall font size.', 'protectionLevel' : 'signature|system|development', 'label' : 'change system display settings'}, 'android.permission.READ_CONTACTS' : {'permissionGroup' : 'android.permission-group.SOCIAL_INFO', 'description' : 'Allows the app to read data about your contacts stored on your phone, including the frequency with which you\'ve called, emailed, or communicated in other ways with specific individuals. This permission allows apps to save your contact data, and malicious apps may share contact data without your knowledge.', 'protectionLevel' : 'dangerous', 'label' : 'read your contacts'}, 'android.permission.BIND_DREAM_SERVICE' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a dream service. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to a dream service'}, 'android.permission.SEND_DOWNLOAD_COMPLETED_INTENTS' : {'permissionGroup' : '', 'description' : 'Allows the app to send notifications about completed downloads. Malicious apps can use this to confuse other apps that download files.', 'protectionLevel' : 'signature', 'label' : 'Send download notifications.'}, 'android.permission.READ_CALL_LOG' : {'permissionGroup' : 'android.permission-group.SOCIAL_INFO', 'description' : 'Allows the app to read your phone\'s call log, including data about incoming and outgoing calls. This permission allows apps to save your call log data, and malicious apps may share call log data without your knowledge.', 'protectionLevel' : 'dangerous', 'label' : 'read call log'}, 'android.permission.BLUETOOTH_PRIVILEGED' : {'permissionGroup' : 'android.permission-group.BLUETOOTH_NETWORK', 'description' : 'Allows the app to pair with remote devices without user interaction.', 'protectionLevel' : 'system|signature', 'label' : 'allow Bluetooth pairing by Application'}, 'android.permission.WRITE_CALL_LOG' : {'permissionGroup' : 'android.permission-group.SOCIAL_INFO', 'description' : 'Allows the app to modify your phone\'s call log, including data about incoming and outgoing calls. Malicious apps may use this to erase or modify your call log.', 'protectionLevel' : 'dangerous', 'label' : 'write call log'}, 'android.permission.CHANGE_WIFI_MULTICAST_STATE' : {'permissionGroup' : 'android.permission-group.AFFECTS_BATTERY', 'description' : 'Allows the app to receive packets sent to all devices on a Wi-Fi network using multicast addresses, not just your phone. It uses more power than the non-multicast mode.', 'protectionLevel' : 'dangerous', 'label' : 'allow Wi-Fi Multicast reception'}, 'android.permission.ACCESS_PDB_STATE' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.MODIFY_APPWIDGET_BIND_PERMISSIONS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.SET_TIME_ZONE' : {'permissionGroup' : 'android.permission-group.SYSTEM_CLOCK', 'description' : 'Allows the app to change the phone\'s time zone.', 'protectionLevel' : 'normal', 'label' : 'set time zone'}, 'android.permission.HDMI_CEC' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signatureOrSystem', 'label' : ''}, 'android.permission.WRITE_SYNC_SETTINGS' : {'permissionGroup' : 'android.permission-group.SYNC_SETTINGS', 'description' : 'Allows an app to modify the sync settings for an account. For example, this can be used to enable sync of the People app with an account.', 'protectionLevel' : 'normal', 'label' : 'toggle sync on and off'}, 'android.permission.CRYPT_KEEPER' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.READ_LOGS' : {'permissionGroup' : 'android.permission-group.DEVELOPMENT_TOOLS', 'description' : 'Allows the app to read from the system\'s various log files. This allows it to discover general information about what you are doing with the phone, potentially including personal or private information.', 'protectionLevel' : 'signature|system|development', 'label' : 'read sensitive log data'}, 'android.permission.WRITE_GSERVICES' : {'permissionGroup' : '', 'description' : 'Allows the app to modify the Google services map. Not for use by normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'modify the Google services map'}, 'android.permission.SET_ORIENTATION' : {'permissionGroup' : '', 'description' : 'Allows the app to change the rotation of the screen at any time. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'change screen orientation'}, 'android.permission.BROADCAST_STICKY' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to send sticky broadcasts, which remain after the broadcast ends. Excessive use may make the phone slow or unstable by causing it to use too much memory.', 'protectionLevel' : 'normal', 'label' : 'send sticky broadcast'}, 'android.permission.FORCE_STOP_PACKAGES' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to forcibly stop other apps.', 'protectionLevel' : 'signature|system', 'label' : 'force stop other apps'}, 'com.android.frameworks.coretests.permission.TEST_DENIED' : {'permissionGroup' : '', 'description' : 'Used for running unit tests, for testing operations where we do not have the permission.', 'protectionLevel' : 'normal', 'label' : 'Test Denied'}, 'com.android.providers.tv.permission.READ_EPG_DATA' : {'permissionGroup' : '', 'description' : 'Allows the app to read the TV channel/program information stored on your device.', 'protectionLevel' : 'dangerous', 'label' : 'read TV channel/program information'}, 'android.permission.UPDATE_DEVICE_STATS' : {'permissionGroup' : '', 'description' : 'Allows the app to modify collected battery statistics. Not for use by normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'modify battery statistics'}, 'android.permission.ACCESS_LOCATION_EXTRA_COMMANDS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to access extra location provider commands. This may allow the app to interfere with the operation of the GPS or other location sources.', 'protectionLevel' : 'normal', 'label' : 'access extra location provider commands'}, 'android.permission.GET_TASKS' : {'permissionGroup' : 'android.permission-group.APP_INFO', 'description' : 'Allows the app to retrieve information about currently and recently running tasks. This may allow the app to discover information about which applications are used on the device.', 'protectionLevel' : 'normal', 'label' : 'retrieve running apps'}, 'android.permission.CHANGE_WIFI_STATE' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : 'Allows the app to connect to and disconnect from Wi-Fi access points and to make changes to device configuration for Wi-Fi networks.', 'protectionLevel' : 'dangerous', 'label' : 'connect and disconnect from Wi-Fi'}, 'android.permission.RECEIVE_SMS' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to receive and process SMS messages. This means the app could monitor or delete messages sent to your device without showing them to you.', 'protectionLevel' : 'dangerous', 'label' : 'receive text messages (SMS)'}, 'android.permission.READ_PROFILE' : {'permissionGroup' : 'android.permission-group.PERSONAL_INFO', 'description' : 'Allows the app to read personal profile information stored on your device, such as your name and contact information. This means the app can identify you and may send your profile information to others.', 'protectionLevel' : 'dangerous', 'label' : 'read your own contact card'}, 'android.permission.ACCESS_NETWORK_CONDITIONS' : {'permissionGroup' : '', 'description' : 'Allows an application to listen for observations on network conditions. Should never be needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'listen for observations on network conditions'}, 'android.permission.ACCOUNT_MANAGER' : {'permissionGroup' : 'android.permission-group.ACCOUNTS', 'description' : 'Allows the app to make calls to AccountAuthenticators.', 'protectionLevel' : 'signature', 'label' : 'act as the AccountManagerService'}, 'android.permission.SET_ANIMATION_SCALE' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to change the global animation speed (faster or slower animations) at any time.', 'protectionLevel' : 'signature|system|development', 'label' : 'modify global animation speed'}, 'com.android.certinstaller.INSTALL_AS_USER' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.BLUETOOTH_STACK' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.SET_PROCESS_LIMIT' : {'permissionGroup' : 'android.permission-group.DEVELOPMENT_TOOLS', 'description' : 'Allows the app to control the maximum number of processes that will run. Never needed for normal apps.', 'protectionLevel' : 'signature|system|development', 'label' : 'limit number of running processes'}, 'android.permission.MOVE_PACKAGE' : {'permissionGroup' : '', 'description' : 'Allows the app to move app resources from internal to external media and vice versa.', 'protectionLevel' : 'signature|system', 'label' : 'move app resources'}, 'com.android.cts.permissionAllowedWithSignature' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.SET_DEBUG_APP' : {'permissionGroup' : 'android.permission-group.DEVELOPMENT_TOOLS', 'description' : 'Allows the app to turn on debugging for another app. Malicious apps may use this to kill other apps.', 'protectionLevel' : 'signature|system|development', 'label' : 'enable app debugging'}, 'android.permission.BRICK' : {'permissionGroup' : '', 'description' : 'Allows the app to disable the entire phone permanently. This is very dangerous.', 'protectionLevel' : 'signature', 'label' : 'permanently disable phone'}, 'android.permission.BLUETOOTH' : {'permissionGroup' : 'android.permission-group.BLUETOOTH_NETWORK', 'description' : 'Allows the app to view the configuration of the Bluetooth on the phone, and to make and accept connections with paired devices.', 'protectionLevel' : 'dangerous', 'label' : 'pair with Bluetooth devices'}, 'com.android.launcher3.permission.RECEIVE_LAUNCH_BROADCASTS' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.MMS_SEND_OUTBOX_MSG' : {'permissionGroup' : '', 'description' : 'Sends out all MMSs from the outbox to the network', 'protectionLevel' : 'signatureOrSystem', 'label' : 'MMS Wakeup'}, 'android.permission.UPDATE_APP_OPS_STATS' : {'permissionGroup' : '', 'description' : 'Allows the app to modify collected application operation statistics. Not for use by normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'modify app ops statistics'}, 'android.permission.READ_PRIVILEGED_PHONE_STATE' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.SET_WALLPAPER_COMPONENT' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'com.android.launcher3.permission.READ_SETTINGS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to read the settings and shortcuts in Home.', 'protectionLevel' : 'normal', 'label' : 'read Home settings and shortcuts'}, 'android.permission.WRITE_DREAM_STATE' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.ACCESS_BLUETOOTH_SHARE' : {'permissionGroup' : '', 'description' : 'Allows the app to access the BluetoothShare manager and use it to transfer files.', 'protectionLevel' : 'signature', 'label' : 'Access download manager.'}, 'android.permission.READ_WIFI_CREDENTIAL' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.intent.category.MASTER_CLEAR.permission.C2D_MESSAGE' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.UPDATE_LOCK' : {'permissionGroup' : '', 'description' : 'Allows the holder to offer information to the system about when would be a good time for a noninteractive reboot to upgrade the device.', 'protectionLevel' : 'signatureOrSystem', 'label' : 'discourage automatic device updates'}, 'com.android.cts.keysets_permdef.keysets_perm' : {'permissionGroup' : '', 'description' : 'keysets_perm_description', 'protectionLevel' : 'signature', 'label' : 'keysets_perm_label'}, 'android.permission.WRITE_USER_DICTIONARY' : {'permissionGroup' : 'android.permission-group.WRITE_USER_DICTIONARY', 'description' : 'Allows the app to write new words into the user dictionary.', 'protectionLevel' : 'normal', 'label' : 'add words to user-defined dictionary'}, 'com.android.browser.permission.READ_HISTORY_BOOKMARKS' : {'permissionGroup' : 'android.permission-group.BOOKMARKS', 'description' : 'Allows the app to read the history of all URLs that the Browser has visited, and all of the Browser\'s bookmarks. Note: this permission may not be enforced by third-party browsers or other applications with web browsing capabilities.', 'protectionLevel' : 'dangerous', 'label' : 'read your Web bookmarks and history'}, 'android.permission.RECORD_AUDIO' : {'permissionGroup' : 'android.permission-group.MICROPHONE', 'description' : 'Allows the app to record audio with the microphone. This permission allows the app to record audio at any time without your confirmation.', 'protectionLevel' : 'dangerous', 'label' : 'record audio'}, 'android.permission.WRITE_CONTACTS' : {'permissionGroup' : 'android.permission-group.SOCIAL_INFO', 'description' : 'Allows the app to modify the data about your contacts stored on your phone, including the frequency with which you\'ve called, emailed, or communicated in other ways with specific contacts. This permission allows apps to delete contact data.', 'protectionLevel' : 'dangerous', 'label' : 'modify your contacts'}, 'android.permission.REGISTER_CALL_PROVIDER' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to register new telecom connections.', 'protectionLevel' : 'system|signature', 'label' : 'register new telecom connections'}, 'android.permission.ACCESS_KEYGUARD_SECURE_STORAGE' : {'permissionGroup' : '', 'description' : 'Allows an application to access keguard secure storage.', 'protectionLevel' : 'signature', 'label' : 'Access keyguard secure storage'}, 'android.permission.BIND_CONDITION_PROVIDER_SERVICE' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a condition provider service. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to a condition provider service'}, 'android.permission.SIGNAL_PERSISTENT_PROCESSES' : {'permissionGroup' : 'android.permission-group.DEVELOPMENT_TOOLS', 'description' : 'Allows the app to request that the supplied signal be sent to all persistent processes.', 'protectionLevel' : 'signature|system|development', 'label' : 'send Linux signals to apps'}, 'android.permission.MANAGE_VOICE_KEYPHRASES' : {'permissionGroup' : '', 'description' : 'Allows the holder to manage the keyphrases for voice hotword detection. Should never be needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'manage voice keyphrases'}, 'android.permission.MASTER_CLEAR' : {'permissionGroup' : '', 'description' : 'Allows the app to completely reset the system to its factory settings, erasing all data, configuration, and installed apps.', 'protectionLevel' : 'signature|system', 'label' : 'reset system to factory defaults'}, 'android.permission.READ_INPUT_STATE' : {'permissionGroup' : '', 'description' : 'Allows the app to watch the keys you press even when interacting with another app (such as typing a password). Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'record what you type and actions you take'}, 'android.permission.INJECT_EVENTS' : {'permissionGroup' : '', 'description' : 'Allows the app to deliver its own input events (key presses, etc.) to other apps. Malicious apps may use this to take over the phone.', 'protectionLevel' : 'signature', 'label' : 'press keys and control buttons'}, 'com.android.email.permission.ACCESS_PROVIDER' : {'permissionGroup' : '', 'description' : 'Allows the app to access your email database, including received messages, sent messages, usernames, and passwords.', 'protectionLevel' : 'signature', 'label' : 'Access email provider data'}, 'org.chromium.content_shell.permission.SANDBOX' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.ACCESS_WIMAX_STATE' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : 'Allows the app to determine whether WiMAX is enabled and information about any WiMAX networks that are connected.', 'protectionLevel' : 'normal', 'label' : 'connect and disconnect from WiMAX'}, 'com.android.launcher.permission.WRITE_SETTINGS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to change the settings and shortcuts in Home.', 'protectionLevel' : 'signatureOrSystem', 'label' : 'write Home settings and shortcuts'}, 'android.permission.FREEZE_SCREEN' : {'permissionGroup' : '', 'description' : 'Allows the application to temporarily freeze the screen for a full-screen transition.', 'protectionLevel' : 'signature', 'label' : 'freeze screen'}, 'android.permission.GET_TOP_ACTIVITY_INFO' : {'permissionGroup' : '', 'description' : 'Allows the holder to retrieve private information about the current application in the foreground of the screen.', 'protectionLevel' : 'signature', 'label' : 'get current app info'}, 'android.permission.WRITE_APN_SETTINGS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to change network settings and to intercept and inspect all network traffic, for example to change the proxy and port of any APN. Malicious apps may monitor, redirect, or modify network packets without your knowledge.', 'protectionLevel' : 'signature|system', 'label' : 'change/intercept network settings and traffic'}, 'android.permission.ACCESS_SURFACE_FLINGER' : {'permissionGroup' : '', 'description' : 'Allows the app to use SurfaceFlinger low-level features.', 'protectionLevel' : 'signature', 'label' : 'access SurfaceFlinger'}, 'android.permission.USER_ACTIVITY' : {'permissionGroup' : '', 'description' : 'Allows the app to reset the display timeout.', 'protectionLevel' : 'signature|system', 'label' : 'reset display timeout'}, 'android.permission.SERIAL_PORT' : {'permissionGroup' : '', 'description' : 'Allows the holder to access serial ports using the SerialManager API.', 'protectionLevel' : 'signature|system', 'label' : 'access serial ports'}, 'android.permission.ALLOW_ANY_CODEC_FOR_PLAYBACK' : {'permissionGroup' : '', 'description' : 'Allows the app to use any installed media decoder to decode for playback.', 'protectionLevel' : 'signature|system', 'label' : 'use any media decoder for playback'}, 'android.permission.MANAGE_USB' : {'permissionGroup' : 'android.permission-group.HARDWARE_CONTROLS', 'description' : 'Allows the app to manage preferences and permissions for USB devices.', 'protectionLevel' : 'signature|system', 'label' : 'manage preferences and permissions for USB devices'}, 'android.permission.PROCESS_OUTGOING_CALLS' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to see the number being dialed during an outgoing call with the option to redirect the call to a different number or abort the call altogether.', 'protectionLevel' : 'dangerous', 'label' : 'reroute outgoing calls'}, 'android.permission.CALL_PRIVILEGED' : {'permissionGroup' : '', 'description' : 'Allows the app to call any phone number, including emergency numbers, without your intervention. Malicious apps may place unnecessary and illegal calls to emergency services.', 'protectionLevel' : 'signature|system', 'label' : 'directly call any phone numbers'}, 'com.android.gallery3d.filtershow.permission.WRITE' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.WRITE_CALENDAR' : {'permissionGroup' : 'android.permission-group.PERSONAL_INFO', 'description' : 'Allows the app to add, remove, change events that you can modify on your phone, including those of friends or co-workers. This may allow the app to send messages that appear to come from calendar owners, or modify events without the owners\' knowledge.', 'protectionLevel' : 'dangerous', 'label' : 'add or modify calendar events and send email to guests without owners\' knowledge'}, 'android.permission.ACCESS_CONTENT_PROVIDERS_EXTERNALLY' : {'permissionGroup' : '', 'description' : 'Allows the holder to access content providers from the shell. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'access content providers externally'}, 'android.permission.SUBSCRIBED_FEEDS_READ' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to get details about the currently synced feeds.', 'protectionLevel' : 'normal', 'label' : 'read subscribed feeds'}, 'android.permission.MANAGE_ACCOUNTS' : {'permissionGroup' : 'android.permission-group.ACCOUNTS', 'description' : 'Allows the app to perform operations like adding and removing accounts, and deleting their password.', 'protectionLevel' : 'dangerous', 'label' : 'add or remove accounts'}, 'android.permission.BIND_INCALL_SERVICE' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to control when and how the user sees the in-call screen.', 'protectionLevel' : 'system|signature', 'label' : 'interact with in-call screen'}, 'android.permission.BIND_REMOTEVIEWS' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of a widget service. Should never be needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'bind to a widget service'}, 'org.chromium.chrome.shell.permission.DEBUG' : {'permissionGroup' : 'android.permission-group.DEVELOPMENT_TOOLS', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.BIND_ACCESSIBILITY_SERVICE' : {'permissionGroup' : '', 'description' : 'Allows the holder to bind to the top-level interface of an accessibility service. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'bind to an accessibility service'}, 'android.permission.ACCESS_NOTIFICATIONS' : {'permissionGroup' : '', 'description' : 'Allows the app to retrieve, examine, and clear notifications, including those posted by other apps.', 'protectionLevel' : 'signature|system', 'label' : 'access notifications'}, 'android.permission.WRITE_SMS' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to write to SMS messages stored on your phone or SIM card. Malicious apps may delete your messages.', 'protectionLevel' : 'dangerous', 'label' : 'edit your text messages (SMS or MMS)'}, 'android.permission.DELETE_PACKAGES' : {'permissionGroup' : '', 'description' : 'Allows the app to delete Android packages. Malicious apps may use this to delete important apps.', 'protectionLevel' : 'signature|system', 'label' : 'delete apps'}, 'android.permission.FILTER_EVENTS' : {'permissionGroup' : '', 'description' : 'Allows an application to register an input filter which filters the stream of all user events before they are dispatched. Malicious app may control the system UI whtout user intervention.', 'protectionLevel' : 'signature', 'label' : 'filter events'}, 'android.permission.ACCESS_CHECKIN_PROPERTIES' : {'permissionGroup' : '', 'description' : 'Allows the app read/write access to properties uploaded by the checkin service. Not for use by normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'access checkin properties'}, 'android.permission.SEND_RESPOND_VIA_MESSAGE' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to send requests to other messaging apps to handle respond-via-message events for incoming calls.', 'protectionLevel' : 'signature|system', 'label' : 'send respond-via-message events'}, 'com.android.voicemail.permission.WRITE_VOICEMAIL' : {'permissionGroup' : 'android.permission-group.VOICEMAIL', 'description' : 'Allows the app to modify and remove messages from your voicemail inbox.', 'protectionLevel' : 'system|signature', 'label' : 'write voicemails'}, 'android.permission.RETRIEVE_WINDOW_CONTENT' : {'permissionGroup' : 'android.permission-group.PERSONAL_INFO', 'description' : 'Allows the app to retrieve the content of the active window. Malicious apps may retrieve the entire window content and examine all its text except passwords.', 'protectionLevel' : 'signature|system', 'label' : 'retrieve screen content'}, 'android.permission.SET_PREFERRED_APPLICATIONS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to modify your preferred apps. Malicious apps may silently change the apps that are run, spoofing your existing apps to collect private data from you.', 'protectionLevel' : 'signature', 'label' : 'set preferred apps'}, 'android.permission.VIBRATE' : {'permissionGroup' : 'android.permission-group.AFFECTS_BATTERY', 'description' : 'Allows the app to control the vibrator.', 'protectionLevel' : 'normal', 'label' : 'control vibration'}, 'android.app.cts.permission.TEST_GRANTED' : {'permissionGroup' : '', 'description' : 'Used for running CTS tests, for testing operations where we have the permission.', 'protectionLevel' : 'normal', 'label' : 'Test Granted'}, 'android.permission.ACCESS_FM_RADIO' : {'permissionGroup' : 'android.permission-group.HARDWARE_CONTROLS', 'description' : 'Allows the app to access FM radio to listen to programs.', 'protectionLevel' : 'signature|system', 'label' : 'access FM radio'}, 'android.permission.DIAGNOSTIC' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to read and write to any resource owned by the diag group; for example, files in /dev. This could potentially affect system stability and security. This should be ONLY be used for hardware-specific diagnostics by the manufacturer or operator.', 'protectionLevel' : 'signature', 'label' : 'read/write to resources owned by diag'}, 'android.permission.BIND_JOB_SERVICE' : {'permissionGroup' : '', 'description' : 'This permission allows the Android system to run the application in the background when requested.', 'protectionLevel' : 'signature', 'label' : 'run the application\'s scheduled background work'}, 'org.chromium.chrome.shell.permission.SANDBOX' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.INTERACT_ACROSS_USERS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to perform actions across different users on the device. Malicious apps may use this to violate the protection between users.', 'protectionLevel' : 'signature|system|development', 'label' : 'interact across users'}, 'android.permission.REGISTER_SIM_SUBSCRIPTION' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to register new telecom SIM connections.', 'protectionLevel' : 'system|signature', 'label' : 'register new telecom SIM connections'}, 'android.permission.BROADCAST_SMS' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to broadcast a notification that an SMS message has been received. Malicious apps may use this to forge incoming SMS messages.', 'protectionLevel' : 'signature', 'label' : 'send SMS-received broadcast'}, 'android.permission.BLUETOOTH_MAP' : {'permissionGroup' : 'android.permission-group.BLUETOOTH_NETWORK', 'description' : 'Allows the app to access Bluetooth MAP data.', 'protectionLevel' : 'signature', 'label' : 'access Bluetooth MAP data'}, 'android.permission.READ_FRAME_BUFFER' : {'permissionGroup' : '', 'description' : 'Allows the app to read the content of the frame buffer.', 'protectionLevel' : 'signature|system', 'label' : 'read frame buffer'}, 'android.permission.STOP_APP_SWITCHES' : {'permissionGroup' : '', 'description' : 'Prevents the user from switching to another app.', 'protectionLevel' : 'signature|system', 'label' : 'prevent app switches'}, 'android.permission.ACCESS_WIFI_STATE' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : 'Allows the app to view information about Wi-Fi networking, such as whether Wi-Fi is enabled and name of connected Wi-Fi devices.', 'protectionLevel' : 'normal', 'label' : 'view Wi-Fi connections'}, 'android.permission.GLOBAL_SEARCH_CONTROL' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.ACCESS_DOWNLOAD_MANAGER' : {'permissionGroup' : '', 'description' : 'Allows the app to access the download manager and to use it to download files. Malicious apps can use this to disrupt downloads and access private information.', 'protectionLevel' : 'signatureOrSystem', 'label' : 'Access download manager.'}, 'com.android.launcher3.permission.WRITE_SETTINGS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to change the settings and shortcuts in Home.', 'protectionLevel' : 'signatureOrSystem', 'label' : 'write Home settings and shortcuts'}, 'android.permission.MODIFY_PARENTAL_CONTROLS' : {'permissionGroup' : '', 'description' : 'Allows the holder to modify the system\'s parental controls data. Should never be needed for normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'modify parental controls'}, 'org.chromium.chromecast.shell.permission.SANDBOX' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.READ_SYNC_SETTINGS' : {'permissionGroup' : 'android.permission-group.SYNC_SETTINGS', 'description' : 'Allows the app to read the sync settings for an account. For example, this can determine whether the People app is synced with an account.', 'protectionLevel' : 'normal', 'label' : 'read sync settings'}, 'android.permission.OEM_UNLOCK_STATE' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.SUBSCRIBED_FEEDS_WRITE' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to modify your currently synced feeds. Malicious apps may change your synced feeds.', 'protectionLevel' : 'dangerous', 'label' : 'write subscribed feeds'}, 'android.permission.READ_USER_DICTIONARY' : {'permissionGroup' : 'android.permission-group.USER_DICTIONARY', 'description' : 'Allows the app to read all words, names and phrases that the user may have stored in the user dictionary.', 'protectionLevel' : 'dangerous', 'label' : 'read terms you added to the dictionary'}, 'android.permission.READ_INSTALL_SESSIONS' : {'permissionGroup' : '', 'description' : 'Allows an application to read install sessions. This allows it to see details about active package installations.', 'protectionLevel' : '', 'label' : 'Read install sessions'}, 'android.permission.MANAGE_USERS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows apps to manage users on the device, including query, creation and deletion.', 'protectionLevel' : 'signature|system', 'label' : 'manage users'}, 'android.permission.FACTORY_TEST' : {'permissionGroup' : '', 'description' : 'Run as a low-level manufacturer test, allowing complete access to the phone hardware. Only available when a phone is running in manufacturer test mode.', 'protectionLevel' : 'signature', 'label' : 'run in factory test mode'}, 'android.permission.CHANGE_COMPONENT_ENABLED_STATE' : {'permissionGroup' : '', 'description' : 'Allows the app to change whether a component of another app is enabled or not. Malicious apps may use this to disable important phone capabilities. Care must be used with this permission, as it is possible to get app components into an unusable, inconsistent, or unstable state.', 'protectionLevel' : 'signature|system', 'label' : 'enable or disable app components'}, 'android.permission.RECEIVE_BOOT_COMPLETED' : {'permissionGroup' : 'android.permission-group.APP_INFO', 'description' : 'Allows the app to have itself started as soon as the system has finished booting. This can make it take longer to start the phone and allow the app to slow down the overall phone by always running.', 'protectionLevel' : 'normal', 'label' : 'run at startup'}, 'android.permission.SET_POINTER_SPEED' : {'permissionGroup' : '', 'description' : 'Allows the app to change the mouse or trackpad pointer speed at any time. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'change pointer speed'}, 'android.permission.BACKUP' : {'permissionGroup' : '', 'description' : 'Allows the app to control the system\'s backup and restore mechanism. Not for use by normal apps.', 'protectionLevel' : 'signature|system', 'label' : 'control system backup and restore'}, 'android.permission.TEMPORARY_ENABLE_ACCESSIBILITY' : {'permissionGroup' : '', 'description' : 'Allows an application to temporarily enable accessibility on the device. Malicious apps may enable accessibility without user consent.', 'protectionLevel' : 'signature', 'label' : 'temporary enable accessibility'}, 'android.permission.EXPAND_STATUS_BAR' : {'permissionGroup' : 'android.permission-group.STATUS_BAR', 'description' : 'Allows the app to expand or collapse the status bar.', 'protectionLevel' : 'normal', 'label' : 'expand/collapse status bar'}, 'android.permission.ACCESS_FINE_LOCATION' : {'permissionGroup' : 'android.permission-group.LOCATION', 'description' : 'Allows the app to get your precise location using the Global Positioning System (GPS) or network location sources such as cell towers and Wi-Fi. These location services must be turned on and available to your device for the app to use them. Apps may use this to determine where you are, and may consume additional battery power.', 'protectionLevel' : 'dangerous', 'label' : 'precise location (GPS and network-based)'}, 'android.permission.ASEC_RENAME' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to rename internal storage.', 'protectionLevel' : 'signature', 'label' : 'rename internal storage'}, 'android.permission.LOCATION_HARDWARE' : {'permissionGroup' : 'android.permission-group.LOCATION', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'com.android.frameworks.coretests.keysets_permdef.keyset_perm' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.GET_APP_OPS_STATS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to retrieve collected application operation statistics. Not for use by normal apps.', 'protectionLevel' : 'signature|system|development', 'label' : 'retrieve app ops statistics'}, 'android.permission.REORDER_TASKS' : {'permissionGroup' : 'android.permission-group.APP_INFO', 'description' : 'Allows the app to move tasks to the foreground and background. The app may do this without your input.', 'protectionLevel' : 'normal', 'label' : 'reorder running apps'}, 'com.android.cts.permissionNormal' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : '', 'label' : ''}, 'android.permission.CONTROL_KEYGUARD' : {'permissionGroup' : '', 'description' : 'Allows an application to control keguard.', 'protectionLevel' : 'signature', 'label' : 'Control displaying and hiding keyguard'}, 'android.permission.ASEC_DESTROY' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to destroy internal storage.', 'protectionLevel' : 'signature', 'label' : 'destroy internal storage'}, 'android.permission.BROADCAST_PACKAGE_REMOVED' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to broadcast a notification that an app package has been removed. Malicious apps may use this to kill any other running app.', 'protectionLevel' : 'signature', 'label' : 'send package removed broadcast'}, 'android.permission.MANAGE_ACTIVITY_STACKS' : {'permissionGroup' : 'android.permission-group.APP_INFO', 'description' : 'Allows the app to add, remove, and modify the activity stacks in which other apps run. Malicious apps may disrupt the behavior of other apps.', 'protectionLevel' : 'signature|system', 'label' : 'manage activity stacks'}, 'android.permission.CONNECTIVITY_INTERNAL' : {'permissionGroup' : 'android.permission-group.NETWORK', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.SET_SCREEN_COMPATIBILITY' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to control the screen compatibility mode of other applications. Malicious applications may break the behavior of other applications.', 'protectionLevel' : 'signature', 'label' : 'set screen compatibility'}, 'android.permission.WRITE_EXTERNAL_STORAGE' : {'permissionGroup' : 'android.permission-group.STORAGE', 'description' : 'Allows the app to write to the SD card.', 'protectionLevel' : 'dangerous', 'label' : 'modify or delete the contents of your SD card'}, 'android.permission.CAMERA_DISABLE_TRANSMIT_LED' : {'permissionGroup' : 'android.permission-group.CAMERA', 'description' : 'Allows a pre-installed system application to disable the camera use indicator LED.', 'protectionLevel' : 'signature|system', 'label' : 'disable transmit indicator LED when camera is in use'}, 'com.android.frameworks.coretests.DANGEROUS' : {'permissionGroup' : 'android.permission-group.COST_MONEY', 'description' : '', 'protectionLevel' : 'dangerous', 'label' : ''}, 'android.permission.WRITE_SOCIAL_STREAM' : {'permissionGroup' : 'android.permission-group.SOCIAL_INFO', 'description' : 'Allows the app to display social updates from your friends. Be careful when sharing information -- this allows the app to produce messages that may appear to come from a friend. Note: this permission may not be enforced on all social networks.', 'protectionLevel' : 'dangerous', 'label' : 'write to your social stream'}, 'com.android.printspooler.permission.ACCESS_ALL_PRINT_JOBS' : {'permissionGroup' : '', 'description' : 'Allows the holder to access print jobs created by another app. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'access all print jobs'}, 'android.permission.ASEC_MOUNT_UNMOUNT' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to mount/unmount internal storage.', 'protectionLevel' : 'signature', 'label' : 'mount/unmount internal storage'}, 'android.permission.INSTALL_PACKAGES' : {'permissionGroup' : '', 'description' : 'Allows the app to install new or updated Android packages. Malicious apps may use this to add new apps with arbitrarily powerful permissions.', 'protectionLevel' : 'signature|system', 'label' : 'directly install apps'}, 'com.android.providers.tv.permission.WRITE_EPG_DATA' : {'permissionGroup' : '', 'description' : 'Allows the app to provide and modify the TV channel/program information on your device.', 'protectionLevel' : 'dangerous', 'label' : 'write TV channel/program information'}, 'android.permission.RECEIVE_EMERGENCY_BROADCAST' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to receive and process emergency broadcast messages. This permission is only available to system apps.', 'protectionLevel' : 'signature|system', 'label' : 'receive emergency broadcasts'}, 'com.android.launcher.permission.READ_SETTINGS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to read the settings and shortcuts in Home.', 'protectionLevel' : 'normal', 'label' : 'read Home settings and shortcuts'}, 'com.android.alarm.permission.SET_ALARM' : {'permissionGroup' : 'android.permission-group.DEVICE_ALARMS', 'description' : 'Allows the app to set an alarm in an installed alarm clock app. Some alarm clock apps may not implement this feature.', 'protectionLevel' : 'normal', 'label' : 'set an alarm'}, 'android.permission.CAPTURE_VIDEO_OUTPUT' : {'permissionGroup' : '', 'description' : 'Allows the app to capture and redirect video output.', 'protectionLevel' : 'signature|system', 'label' : 'capture video output'}, 'org.chromium.chrome.shell.permission.C2D_MESSAGE' : {'permissionGroup' : '', 'description' : '', 'protectionLevel' : 'signature', 'label' : ''}, 'android.permission.READ_PRECISE_PHONE_STATE' : {'permissionGroup' : 'android.permission-group.PHONE_CALLS', 'description' : 'Allows the app to access the precise phone states. This permission allows the app to determine the real call status, whether a call is active or in the background, call fails, precise data connection status and data connection fails.', 'protectionLevel' : 'signature|system', 'label' : 'read precise phone states'}, 'android.permission.READ_SMS' : {'permissionGroup' : 'android.permission-group.MESSAGES', 'description' : 'Allows the app to read SMS messages stored on your phone or SIM card. This allows the app to read all SMS messages, regardless of content or confidentiality.', 'protectionLevel' : 'dangerous', 'label' : 'read your text messages (SMS or MMS)'}, 'android.permission.ACCESS_INPUT_FLINGER' : {'permissionGroup' : '', 'description' : 'Allows the app to use InputFlinger low-level features.', 'protectionLevel' : 'signature', 'label' : 'access InputFlinger'}, 'android.permission.BATTERY_STATS' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows an application to read the current low-level battery use data. May allow the application to find out detailed information about which apps you use.', 'protectionLevel' : 'signature|system|development', 'label' : 'read battery statistics'}, 'android.permission.GLOBAL_SEARCH' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : '', 'protectionLevel' : 'signature|system', 'label' : ''}, 'android.permission.SET_INPUT_CALIBRATION' : {'permissionGroup' : '', 'description' : 'Allows the app to modify the calibration parameters of the touch screen. Should never be needed for normal apps.', 'protectionLevel' : 'signature', 'label' : 'change input device calibration'}, 'android.permission.REMOVE_TASKS' : {'permissionGroup' : 'android.permission-group.APP_INFO', 'description' : 'Allows the app to remove tasks and kill their apps. Malicious apps may disrupt the behavior of other apps.', 'protectionLevel' : 'signature', 'label' : 'stop running apps'}, 'android.permission.SET_ALWAYS_FINISH' : {'permissionGroup' : 'android.permission-group.DEVELOPMENT_TOOLS', 'description' : 'Allows the app to control whether activities are always finished as soon as they go to the background. Never needed for normal apps.', 'protectionLevel' : 'signature|system|development', 'label' : 'force background apps to close'}, 'android.permission.ACCESS_ALL_EXTERNAL_STORAGE' : {'permissionGroup' : 'android.permission-group.DEVELOPMENT_TOOLS', 'description' : 'Allows the app to access external storage for all users.', 'protectionLevel' : 'signature', 'label' : 'access external storage of all users'}, 'android.permission.CLEAR_APP_CACHE' : {'permissionGroup' : 'android.permission-group.SYSTEM_TOOLS', 'description' : 'Allows the app to free phone storage by deleting files in the cache directories of other applications. This may cause other applications to start up more slowly as they need to re-retrieve their data.', 'protectionLevel' : 'dangerous', 'label' : 'delete all app cache data'}, 'android.permission.MANAGE_NETWORK_POLICY' : {'permissionGroup' : '', 'description' : 'Allows the app to manage network policies and define app-specific rules.', 'protectionLevel' : 'signature', 'label' : 'manage network policy'}, } AOSP_PERMISSION_GROUPS = { 'android.permission-group.AFFECTS_BATTERY' : {'description' : 'Use features that can quickly drain battery.', 'label' : 'Affects Battery'}, 'android.permission-group.PERSONAL_INFO' : {'description' : 'Direct access to information about you, stored in on your contact card.', 'label' : 'Your personal information'}, 'android.permission-group.WRITE_USER_DICTIONARY' : {'description' : 'Add words to the user dictionary.', 'label' : 'Write User Dictionary'}, 'android.permission-group.BLUETOOTH_NETWORK' : {'description' : 'Access devices and networks through Bluetooth.', 'label' : 'Bluetooth'}, 'android.permission-group.DISPLAY' : {'description' : 'Effect the UI of other applications.', 'label' : 'Other Application UI'}, 'android.permission-group.SCREENLOCK' : {'description' : 'Ability to affect behavior of the lock screen on your device.', 'label' : 'Lock screen'}, 'android.permission-group.STORAGE' : {'description' : 'Access the SD card.', 'label' : 'Storage'}, 'android.permission-group.APP_INFO' : {'description' : 'Ability to affect behavior of other applications on your device.', 'label' : 'Your applications information'}, 'android.permission-group.SYNC_SETTINGS' : {'description' : 'Access to the sync settings.', 'label' : 'Sync Settings'}, 'android.permission-group.AUDIO_SETTINGS' : {'description' : 'Change audio settings.', 'label' : 'Audio Settings'}, 'android.permission-group.WALLPAPER' : {'description' : 'Change the device wallpaper settings.', 'label' : 'Wallpaper'}, 'android.permission-group.CALENDAR' : {'description' : 'Direct access to calendar and events.', 'label' : 'Calendar'}, 'android.permission-group.DEVICE_ALARMS' : {'description' : 'Set the alarm clock.', 'label' : 'Alarm'}, 'android.permission-group.DEVELOPMENT_TOOLS' : {'description' : 'Features only needed for app developers.', 'label' : 'Development tools'}, 'android.permission-group.USER_DICTIONARY' : {'description' : 'Read words in user dictionary.', 'label' : 'Read User Dictionary'}, 'android.permission-group.VOICEMAIL' : {'description' : 'Direct access to voicemail.', 'label' : 'Voicemail'}, 'android.permission-group.LOCATION' : {'description' : 'Monitor your physical location.', 'label' : 'Your location'}, 'android.permission-group.STATUS_BAR' : {'description' : 'Change the device status bar settings.', 'label' : 'Status Bar'}, 'android.permission-group.SYSTEM_TOOLS' : {'description' : 'Lower-level access and control of the system.', 'label' : 'System tools'}, 'android.permission-group.SYSTEM_CLOCK' : {'description' : 'Change the device time or timezone.', 'label' : 'Clock'}, 'android.permission-group.SOCIAL_INFO' : {'description' : 'Direct access to information about your contacts and social connections.', 'label' : 'Your social information'}, 'android.permission-group.NETWORK' : {'description' : 'Access various network features.', 'label' : 'Network communication'}, 'android.permission-group.MESSAGES' : {'description' : 'Read and write your SMS, email, and other messages.', 'label' : 'Your messages'}, 'android.permission-group.MICROPHONE' : {'description' : 'Direct access to the microphone to record audio.', 'label' : 'Microphone'}, 'android.permission-group.ACCESSIBILITY_FEATURES' : {'description' : 'Features that assistive technology can request.', 'label' : 'Accessibility features'}, 'android.permission-group.ACCOUNTS' : {'description' : 'Access the available accounts.', 'label' : 'Your accounts'}, 'android.permission-group.COST_MONEY' : {'description' : '', 'label' : ''}, 'android.permission-group.CAMERA' : {'description' : 'Direct access to camera for image or video capture.', 'label' : 'Camera'}, 'android.permission-group.HARDWARE_CONTROLS' : {'description' : 'Direct access to hardware on the handset.', 'label' : 'Hardware controls'}, 'android.permission-group.PHONE_CALLS' : {'description' : 'Monitor, record, and process phone calls.', 'label' : 'Phone calls'}, 'android.permission-group.BOOKMARKS' : {'description' : 'Direct access to bookmarks and browser history.', 'label' : 'Bookmarks and History'}, } #################################################
hackday-profilers/flocker
refs/heads/master
flocker/route/_memory.py
15
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Objects related to an in-memory implementation of ``INetwork``. """ from zope.interface import implementer from eliot import Logger from ._interfaces import INetwork from ._model import Proxy, OpenPort @implementer(INetwork) class MemoryNetwork(object): """ An isolated, in-memory-only implementation of ``INetwork``. :ivar set _proxies: A ``set`` of ``Proxy`` instances representing all of the proxies supposedly configured on this network. """ logger = Logger() def __init__(self, used_ports): self._proxies = set() self._used_ports = used_ports self._open_ports = set() def create_proxy_to(self, ip, port): proxy = Proxy(ip=ip, port=port) self._proxies.add(proxy) return proxy def delete_proxy(self, proxy): self._proxies.remove(proxy) def open_port(self, port): open_port = OpenPort(port=port) self._open_ports.add(open_port) return open_port def delete_open_port(self, open_port): self._open_ports.remove(open_port) def enumerate_proxies(self): return list(self._proxies) def enumerate_open_ports(self): return list(self._open_ports) def enumerate_used_ports(self): proxy_ports = frozenset(proxy.port for proxy in self._proxies) open_ports = frozenset(open_port.port for open_port in self._open_ports) return proxy_ports | open_ports | self._used_ports def make_memory_network(used_ports=frozenset()): """ Create a new, isolated, in-memory-only provider of ``INetwork``. :param frozenset used_ports: Some port numbers which are to be considered already used and included in the result of ``enumerate_used_ports`` when called on the returned object. """ return MemoryNetwork(used_ports=used_ports)
timlinux/QGIS
refs/heads/master
python/plugins/processing/algs/grass7/Grass7Utils.py
6
# -*- coding: utf-8 -*- """ *************************************************************************** GrassUtils.py --------------------- Date : February 2015 Copyright : (C) 2014-2015 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'February 2015' __copyright__ = '(C) 2014-2015, Victor Olaya' import stat import shutil import shlex import subprocess import os import sys from qgis.core import (Qgis, QgsApplication, QgsProcessingUtils, QgsMessageLog) from qgis.PyQt.QtCore import QCoreApplication from processing.core.ProcessingConfig import ProcessingConfig from processing.tools.system import userFolder, isWindows, isMac, mkdir from processing.tests.TestData import points from processing.algs.gdal.GdalUtils import GdalUtils class Grass7Utils: GRASS_REGION_XMIN = 'GRASS7_REGION_XMIN' GRASS_REGION_YMIN = 'GRASS7_REGION_YMIN' GRASS_REGION_XMAX = 'GRASS7_REGION_XMAX' GRASS_REGION_YMAX = 'GRASS7_REGION_YMAX' GRASS_REGION_CELLSIZE = 'GRASS7_REGION_CELLSIZE' GRASS_LOG_COMMANDS = 'GRASS7_LOG_COMMANDS' GRASS_LOG_CONSOLE = 'GRASS7_LOG_CONSOLE' GRASS_HELP_PATH = 'GRASS_HELP_PATH' GRASS_USE_REXTERNAL = 'GRASS_USE_REXTERNAL' GRASS_USE_VEXTERNAL = 'GRASS_USE_VEXTERNAL' # TODO Review all default options formats GRASS_RASTER_FORMATS_CREATEOPTS = { 'GTiff': 'TFW=YES,COMPRESS=LZW', 'PNG': 'ZLEVEL=9', 'WEBP': 'QUALITY=85' } sessionRunning = False sessionLayers = {} projectionSet = False isGrassInstalled = False version = None path = None command = None @staticmethod def grassBatchJobFilename(): """ The Batch file is executed by GRASS binary. On GNU/Linux and MacOSX it will be executed by a shell. On MS-Windows, it will be executed by cmd.exe. """ gisdbase = Grass7Utils.grassDataFolder() if isWindows(): batchFile = os.path.join(gisdbase, 'grass_batch_job.cmd') else: batchFile = os.path.join(gisdbase, 'grass_batch_job.sh') return batchFile @staticmethod def installedVersion(run=False): """ Returns the installed version of GRASS by launching the GRASS command with -v parameter. """ if Grass7Utils.isGrassInstalled and not run: return Grass7Utils.version if Grass7Utils.grassBin() is None: return None # Launch GRASS command with -v parameter # For MS-Windows, hide the console if isWindows(): si = subprocess.STARTUPINFO() si.dwFlags |= subprocess.STARTF_USESHOWWINDOW si.wShowWindow = subprocess.SW_HIDE with subprocess.Popen( [Grass7Utils.command, '-v'], shell=False, stdout=subprocess.PIPE, stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, universal_newlines=True, startupinfo=si if isWindows() else None ) as proc: try: lines = proc.stdout.readlines() for line in lines: if "GRASS GIS " in line: line = line.split(" ")[-1].strip() if line.startswith("7."): Grass7Utils.version = line return Grass7Utils.version except: pass return None @staticmethod def grassBin(): """ Find GRASS binary path on the operating system. Sets global variable Grass7Utils.command """ def searchFolder(folder): """ Inline function to search for grass binaries into a folder with os.walk """ if os.path.exists(folder): for root, dirs, files in os.walk(folder): for cmd in cmdList: if cmd in files: return os.path.join(root, cmd) return None if Grass7Utils.command: return Grass7Utils.command path = Grass7Utils.grassPath() command = None vn = os.path.join(path, "etc", "VERSIONNUMBER") if os.path.isfile(vn): with open(vn, "r") as f: major, minor, patch = f.readlines()[0].split(' ')[0].split('.') if patch != 'svn': patch = '' cmdList = [ "grass{}{}{}".format(major, minor, patch), "grass", "grass{}{}{}.sh".format(major, minor, patch), "grass.sh" ] else: cmdList = [ "grass78", "grass76", "grass74", "grass72", "grass70", "grass", "grass78.sh", "grass76.sh", "grass74.sh", "grass72.sh", "grass70.sh", "grass.sh" ] # For MS-Windows there is a difference between GRASS Path and GRASS binary if isWindows(): # If nothing found, use OSGEO4W or QgsPrefix: if "OSGEO4W_ROOT" in os.environ: testFolder = str(os.environ['OSGEO4W_ROOT']) else: testFolder = str(QgsApplication.prefixPath()) testFolder = os.path.join(testFolder, 'bin') command = searchFolder(testFolder) elif isMac(): # Search in grassPath command = searchFolder(path) # If everything has failed, use shutil if not command: for cmd in cmdList: testBin = shutil.which(cmd) if testBin: command = os.path.abspath(testBin) break if command: Grass7Utils.command = command if path == '': Grass7Utils.path = os.path.dirname(command) return command @staticmethod def grassPath(): """ Find GRASS path on the operating system. Sets global variable Grass7Utils.path """ if Grass7Utils.path is not None: return Grass7Utils.path if not isWindows() and not isMac(): return '' folder = None # Under MS-Windows, we use GISBASE or QGIS Path for folder if isWindows(): if "GISBASE" in os.environ: folder = os.environ["GISBASE"] else: testfolder = os.path.join(os.path.dirname(QgsApplication.prefixPath()), 'grass') if os.path.isdir(testfolder): grassfolders = sorted([f for f in os.listdir(testfolder) if f.startswith("grass-7.") and os.path.isdir(os.path.join(testfolder, f))], reverse=True, key=lambda x: [int(v) for v in x[len("grass-"):].split('.') if v != 'svn']) if grassfolders: folder = os.path.join(testfolder, grassfolders[0]) elif isMac(): # For MacOSX, first check environment if "GISBASE" in os.environ: folder = os.environ["GISBASE"] else: # Find grass folder if it exists inside QGIS bundle for version in ['', '7', '78', '76', '74', '72', '71', '70']: testfolder = os.path.join(str(QgsApplication.prefixPath()), 'grass{}'.format(version)) if os.path.isdir(testfolder): folder = testfolder break # If nothing found, try standalone GRASS installation if folder is None: for version in ['8', '6', '4', '2', '1', '0']: testfolder = '/Applications/GRASS-7.{}.app/Contents/MacOS'.format(version) if os.path.isdir(testfolder): folder = testfolder break if folder is not None: Grass7Utils.path = folder return folder or '' @staticmethod def grassDescriptionPath(): return os.path.join(os.path.dirname(__file__), 'description') @staticmethod def getWindowsCodePage(): """ Determines MS-Windows CMD.exe shell codepage. Used into GRASS exec script under MS-Windows. """ from ctypes import cdll return str(cdll.kernel32.GetACP()) @staticmethod def createGrassBatchJobFileFromGrassCommands(commands): with open(Grass7Utils.grassBatchJobFilename(), 'w') as fout: if not isWindows(): fout.write('#!/bin/sh\n') else: fout.write('chcp {}>NUL\n'.format(Grass7Utils.getWindowsCodePage())) for command in commands: Grass7Utils.writeCommand(fout, command) fout.write('exit') @staticmethod def grassMapsetFolder(): """ Creates and returns the GRASS temporary DB LOCATION directory. """ folder = os.path.join(Grass7Utils.grassDataFolder(), 'temp_location') mkdir(folder) return folder @staticmethod def grassDataFolder(): """ Creates and returns the GRASS temporary DB directory. """ tempfolder = os.path.normpath( os.path.join(QgsProcessingUtils.tempFolder(), 'grassdata')) mkdir(tempfolder) return tempfolder @staticmethod def createTempMapset(): """ Creates a temporary location and mapset(s) for GRASS data processing. A minimal set of folders and files is created in the system's default temporary directory. The settings files are written with sane defaults, so GRASS can do its work. The mapset projection will be set later, based on the projection of the first input image or vector """ folder = Grass7Utils.grassMapsetFolder() mkdir(os.path.join(folder, 'PERMANENT')) mkdir(os.path.join(folder, 'PERMANENT', '.tmp')) Grass7Utils.writeGrassWindow(os.path.join(folder, 'PERMANENT', 'DEFAULT_WIND')) with open(os.path.join(folder, 'PERMANENT', 'MYNAME'), 'w') as outfile: outfile.write( 'QGIS GRASS GIS 7 interface: temporary data processing location.\n') Grass7Utils.writeGrassWindow(os.path.join(folder, 'PERMANENT', 'WIND')) mkdir(os.path.join(folder, 'PERMANENT', 'sqlite')) with open(os.path.join(folder, 'PERMANENT', 'VAR'), 'w') as outfile: outfile.write('DB_DRIVER: sqlite\n') outfile.write('DB_DATABASE: $GISDBASE/$LOCATION_NAME/$MAPSET/sqlite/sqlite.db\n') @staticmethod def writeGrassWindow(filename): """ Creates the GRASS Window file """ with open(filename, 'w') as out: out.write('proj: 0\n') out.write('zone: 0\n') out.write('north: 1\n') out.write('south: 0\n') out.write('east: 1\n') out.write('west: 0\n') out.write('cols: 1\n') out.write('rows: 1\n') out.write('e-w resol: 1\n') out.write('n-s resol: 1\n') out.write('top: 1\n') out.write('bottom: 0\n') out.write('cols3: 1\n') out.write('rows3: 1\n') out.write('depths: 1\n') out.write('e-w resol3: 1\n') out.write('n-s resol3: 1\n') out.write('t-b resol: 1\n') @staticmethod def prepareGrassExecution(commands): """ Prepare GRASS batch job in a script and returns it as a command ready for subprocess. """ if Grass7Utils.command is None: Grass7Utils.grassBin() env = os.environ.copy() env['GRASS_MESSAGE_FORMAT'] = 'plain' if 'GISBASE' in env: del env['GISBASE'] Grass7Utils.createGrassBatchJobFileFromGrassCommands(commands) os.chmod(Grass7Utils.grassBatchJobFilename(), stat.S_IEXEC | stat.S_IREAD | stat.S_IWRITE) command = [Grass7Utils.command, os.path.join(Grass7Utils.grassMapsetFolder(), 'PERMANENT'), '--exec', Grass7Utils.grassBatchJobFilename()] return command, env @staticmethod def executeGrass(commands, feedback, outputCommands=None): loglines = [] loglines.append(Grass7Utils.tr('GRASS GIS 7 execution console output')) grassOutDone = False command, grassenv = Grass7Utils.prepareGrassExecution(commands) # QgsMessageLog.logMessage('exec: {}'.format(command), 'DEBUG', Qgis.Info) # For MS-Windows, we need to hide the console window. kw = {} if isWindows(): si = subprocess.STARTUPINFO() si.dwFlags |= subprocess.STARTF_USESHOWWINDOW si.wShowWindow = subprocess.SW_HIDE kw['startupinfo'] = si if sys.version_info >= (3, 6): kw['encoding'] = "cp{}".format(Grass7Utils.getWindowsCodePage()) with subprocess.Popen( command, shell=False, stdout=subprocess.PIPE, stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, universal_newlines=True, env=grassenv, **kw ) as proc: for line in iter(proc.stdout.readline, ''): if 'GRASS_INFO_PERCENT' in line: try: feedback.setProgress(int(line[len('GRASS_INFO_PERCENT') + 2:])) except: pass else: if 'r.out' in line or 'v.out' in line: grassOutDone = True loglines.append(line) if any([l in line for l in ['WARNING', 'ERROR']]): feedback.reportError(line.strip()) elif 'Segmentation fault' in line: feedback.reportError(line.strip()) feedback.reportError('\n' + Grass7Utils.tr('GRASS command crashed :( Try a different set of input parameters and consult the GRASS algorithm manual for more information.') + '\n') if ProcessingConfig.getSetting(Grass7Utils.GRASS_USE_REXTERNAL): feedback.reportError(Grass7Utils.tr( 'Suggest disabling the experimental "use r.external" option from the Processing GRASS Provider options.') + '\n') if ProcessingConfig.getSetting(Grass7Utils.GRASS_USE_VEXTERNAL): feedback.reportError(Grass7Utils.tr( 'Suggest disabling the experimental "use v.external" option from the Processing GRASS Provider options.') + '\n') elif line.strip(): feedback.pushConsoleInfo(line.strip()) # Some GRASS scripts, like r.mapcalculator or r.fillnulls, call # other GRASS scripts during execution. This may override any # commands that are still to be executed by the subprocess, which # are usually the output ones. If that is the case runs the output # commands again. if not grassOutDone and outputCommands: command, grassenv = Grass7Utils.prepareGrassExecution(outputCommands) # For MS-Windows, we need to hide the console window. kw = {} if isWindows(): si = subprocess.STARTUPINFO() si.dwFlags |= subprocess.STARTF_USESHOWWINDOW si.wShowWindow = subprocess.SW_HIDE kw['startupinfo'] = si if sys.version_info >= (3, 6): kw['encoding'] = "cp{}".format(Grass7Utils.getWindowsCodePage()) with subprocess.Popen( command, shell=False, stdout=subprocess.PIPE, stdin=subprocess.DEVNULL, stderr=subprocess.STDOUT, universal_newlines=True, env=grassenv, **kw ) as proc: for line in iter(proc.stdout.readline, ''): if 'GRASS_INFO_PERCENT' in line: try: feedback.setProgress(int( line[len('GRASS_INFO_PERCENT') + 2:])) except: pass if any([l in line for l in ['WARNING', 'ERROR']]): loglines.append(line.strip()) feedback.reportError(line.strip()) elif line.strip(): loglines.append(line.strip()) feedback.pushConsoleInfo(line.strip()) if ProcessingConfig.getSetting(Grass7Utils.GRASS_LOG_CONSOLE): QgsMessageLog.logMessage('\n'.join(loglines), 'Processing', Qgis.Info) # GRASS session is used to hold the layers already exported or # produced in GRASS between multiple calls to GRASS algorithms. # This way they don't have to be loaded multiple times and # following algorithms can use the results of the previous ones. # Starting a session just involves creating the temp mapset # structure @staticmethod def startGrassSession(): if not Grass7Utils.sessionRunning: Grass7Utils.createTempMapset() Grass7Utils.sessionRunning = True # End session by removing the temporary GRASS mapset and all # the layers. @staticmethod def endGrassSession(): # shutil.rmtree(Grass7Utils.grassMapsetFolder(), True) Grass7Utils.sessionRunning = False Grass7Utils.sessionLayers = {} Grass7Utils.projectionSet = False @staticmethod def getSessionLayers(): return Grass7Utils.sessionLayers @staticmethod def addSessionLayers(exportedLayers): Grass7Utils.sessionLayers = dict( list(Grass7Utils.sessionLayers.items()) + list(exportedLayers.items())) @staticmethod def checkGrassIsInstalled(ignorePreviousState=False): if not ignorePreviousState: if Grass7Utils.isGrassInstalled: return # We check the version of Grass7 if Grass7Utils.installedVersion() is not None: # For Ms-Windows, we check GRASS binaries if isWindows(): cmdpath = os.path.join(Grass7Utils.path, 'bin', 'r.out.gdal.exe') if not os.path.exists(cmdpath): return Grass7Utils.tr( 'The specified GRASS 7 folder "{}" does not contain ' 'a valid set of GRASS 7 modules.\nPlease, go to the ' 'Processing settings dialog, and check that the ' 'GRASS 7\nfolder is correctly configured'.format(os.path.join(Grass7Utils.path, 'bin'))) Grass7Utils.isGrassInstalled = True return # Return error messages else: # MS-Windows or MacOSX if isWindows() or isMac(): if Grass7Utils.path is None: return Grass7Utils.tr( 'GRASS GIS 7 folder is not configured. Please configure ' 'it before running GRASS GIS 7 algorithms.') if Grass7Utils.command is None: return Grass7Utils.tr( 'GRASS GIS 7 binary {0} can\'t be found on this system from a shell. ' 'Please install it or configure your PATH {1} environment variable.'.format( '(grass.bat)' if isWindows() else '(grass.sh)', 'or OSGEO4W_ROOT' if isWindows() else '')) # GNU/Linux else: return Grass7Utils.tr( 'GRASS 7 can\'t be found on this system from a shell. ' 'Please install it or configure your PATH environment variable.') @staticmethod def tr(string, context=''): if context == '': context = 'Grass7Utils' return QCoreApplication.translate(context, string) @staticmethod def writeCommand(output, command): try: # Python 2 output.write(command.encode('utf8') + '\n') except TypeError: # Python 3 output.write(command + '\n') @staticmethod def grassHelpPath(): helpPath = ProcessingConfig.getSetting(Grass7Utils.GRASS_HELP_PATH) if helpPath is None: if isWindows() or isMac(): if Grass7Utils.path is not None: localPath = os.path.join(Grass7Utils.path, 'docs/html') if os.path.exists(localPath): helpPath = os.path.abspath(localPath) else: searchPaths = ['/usr/share/doc/grass-doc/html', '/opt/grass/docs/html', '/usr/share/doc/grass/docs/html'] for path in searchPaths: if os.path.exists(path): helpPath = os.path.abspath(path) break if helpPath is not None: return helpPath elif Grass7Utils.version: version = Grass7Utils.version.replace('.', '')[:2] return 'https://grass.osgeo.org/grass{}/manuals/'.format(version) else: # GRASS not available! return 'https://grass.osgeo.org/grass78/manuals/' @staticmethod def getSupportedOutputRasterExtensions(): # We use the same extensions than GDAL because: # - GRASS is also using GDAL for raster imports. # - Chances that GRASS is compiled with another version of # GDAL than QGIS are very limited! return GdalUtils.getSupportedOutputRasterExtensions() @staticmethod def getRasterFormatFromFilename(filename): """ Returns Raster format name from a raster filename. :param filename: The name with extension of the raster. :return: The Gdal short format name for extension. """ ext = os.path.splitext(filename)[1].lower() ext = ext.lstrip('.') if ext: supported = GdalUtils.getSupportedRasters() for name in list(supported.keys()): exts = supported[name] if ext in exts: return name return 'GTiff'
amith01994/intellij-community
refs/heads/master
python/helpers/profiler/thrift/TSerialization.py
36
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # from thrift.protocol import TBinaryProtocol from thrift.transport import TTransport def serialize(thrift_object, protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()): transport = TTransport.TMemoryBuffer() protocol = protocol_factory.getProtocol(transport) thrift_object.write(protocol) return transport.getvalue() def deserialize(base, buf, protocol_factory=TBinaryProtocol.TBinaryProtocolFactory()): transport = TTransport.TMemoryBuffer(buf) protocol = protocol_factory.getProtocol(transport) base.read(protocol) return base
ncos/hometasks
refs/heads/master
CMSC733/amitrokh_P2/Code/FaceDetectorCodes/DLib/dist/dlib/examples/train_shape_predictor.py
10
#!/usr/bin/python # The contents of this file are in the public domain. See LICENSE_FOR_EXAMPLE_PROGRAMS.txt # # This example program shows how to use dlib's implementation of the paper: # One Millisecond Face Alignment with an Ensemble of Regression Trees by # Vahid Kazemi and Josephine Sullivan, CVPR 2014 # # In particular, we will train a face landmarking model based on a small # dataset and then evaluate it. If you want to visualize the output of the # trained model on some images then you can run the # face_landmark_detection.py example program with predictor.dat as the input # model. # # It should also be noted that this kind of model, while often used for face # landmarking, is quite general and can be used for a variety of shape # prediction tasks. But here we demonstrate it only on a simple face # landmarking task. # # COMPILING/INSTALLING THE DLIB PYTHON INTERFACE # You can install dlib using the command: # pip install dlib # # Alternatively, if you want to compile dlib yourself then go into the dlib # root folder and run: # python setup.py install # or # python setup.py install --yes USE_AVX_INSTRUCTIONS # if you have a CPU that supports AVX instructions, since this makes some # things run faster. # # Compiling dlib should work on any operating system so long as you have # CMake and boost-python installed. On Ubuntu, this can be done easily by # running the command: # sudo apt-get install libboost-python-dev cmake # # Also note that this example requires scikit-image which can be installed # via the command: # pip install scikit-image # Or downloaded from http://scikit-image.org/download.html. import os import sys import glob import dlib from skimage import io # In this example we are going to train a face detector based on the small # faces dataset in the examples/faces directory. This means you need to supply # the path to this faces folder as a command line argument so we will know # where it is. if len(sys.argv) != 2: print( "Give the path to the examples/faces directory as the argument to this " "program. For example, if you are in the python_examples folder then " "execute this program by running:\n" " ./train_shape_predictor.py ../examples/faces") exit() faces_folder = sys.argv[1] options = dlib.shape_predictor_training_options() # Now make the object responsible for training the model. # This algorithm has a bunch of parameters you can mess with. The # documentation for the shape_predictor_trainer explains all of them. # You should also read Kazemi's paper which explains all the parameters # in great detail. However, here I'm just setting three of them # differently than their default values. I'm doing this because we # have a very small dataset. In particular, setting the oversampling # to a high amount (300) effectively boosts the training set size, so # that helps this example. options.oversampling_amount = 300 # I'm also reducing the capacity of the model by explicitly increasing # the regularization (making nu smaller) and by using trees with # smaller depths. options.nu = 0.05 options.tree_depth = 2 options.be_verbose = True # dlib.train_shape_predictor() does the actual training. It will save the # final predictor to predictor.dat. The input is an XML file that lists the # images in the training dataset and also contains the positions of the face # parts. training_xml_path = os.path.join(faces_folder, "training_with_face_landmarks.xml") dlib.train_shape_predictor(training_xml_path, "predictor.dat", options) # Now that we have a model we can test it. dlib.test_shape_predictor() # measures the average distance between a face landmark output by the # shape_predictor and where it should be according to the truth data. print("\nTraining accuracy: {}".format( dlib.test_shape_predictor(training_xml_path, "predictor.dat"))) # The real test is to see how well it does on data it wasn't trained on. We # trained it on a very small dataset so the accuracy is not extremely high, but # it's still doing quite good. Moreover, if you train it on one of the large # face landmarking datasets you will obtain state-of-the-art results, as shown # in the Kazemi paper. testing_xml_path = os.path.join(faces_folder, "testing_with_face_landmarks.xml") print("Testing accuracy: {}".format( dlib.test_shape_predictor(testing_xml_path, "predictor.dat"))) # Now let's use it as you would in a normal application. First we will load it # from disk. We also need to load a face detector to provide the initial # estimate of the facial location. predictor = dlib.shape_predictor("predictor.dat") detector = dlib.get_frontal_face_detector() # Now let's run the detector and shape_predictor over the images in the faces # folder and display the results. print("Showing detections and predictions on the images in the faces folder...") win = dlib.image_window() for f in glob.glob(os.path.join(faces_folder, "*.jpg")): print("Processing file: {}".format(f)) img = io.imread(f) win.clear_overlay() win.set_image(img) # Ask the detector to find the bounding boxes of each face. The 1 in the # second argument indicates that we should upsample the image 1 time. This # will make everything bigger and allow us to detect more faces. dets = detector(img, 1) print("Number of faces detected: {}".format(len(dets))) for k, d in enumerate(dets): print("Detection {}: Left: {} Top: {} Right: {} Bottom: {}".format( k, d.left(), d.top(), d.right(), d.bottom())) # Get the landmarks/parts for the face in box d. shape = predictor(img, d) print("Part 0: {}, Part 1: {} ...".format(shape.part(0), shape.part(1))) # Draw the face landmarks on the screen. win.add_overlay(shape) win.add_overlay(dets) dlib.hit_enter_to_continue()
unseenlaser/python-for-android
refs/heads/master
python3-alpha/python3-src/PC/VS7.1/field3.py
96
# An absurd workaround for the lack of arithmetic in MS's resource compiler. # After building Python, run this, then paste the output into the appropriate # part of PC\python_nt.rc. # Example output: # # * For 2.3a0, # * PY_MICRO_VERSION = 0 # * PY_RELEASE_LEVEL = 'alpha' = 0xA # * PY_RELEASE_SERIAL = 1 # * # * and 0*1000 + 10*10 + 1 = 101. # */ # #define FIELD3 101 import sys major, minor, micro, level, serial = sys.version_info levelnum = {'alpha': 0xA, 'beta': 0xB, 'candidate': 0xC, 'final': 0xF, }[level] string = sys.version.split()[0] # like '2.3a0' print " * For %s," % string print " * PY_MICRO_VERSION = %d" % micro print " * PY_RELEASE_LEVEL = %r = %s" % (level, hex(levelnum)) print " * PY_RELEASE_SERIAL = %d" % serial print " *" field3 = micro * 1000 + levelnum * 10 + serial print " * and %d*1000 + %d*10 + %d = %d" % (micro, levelnum, serial, field3) print " */" print "#define FIELD3", field3
i-maravic/ns-3
refs/heads/master
utils/python-unit-tests.py
155
import unittest from ns.core import Simulator, Seconds, Config, int64x64_t import ns.core import ns.network import ns.internet import ns.mobility import ns.csma import ns.applications class TestSimulator(unittest.TestCase): def testScheduleNow(self): def callback(args): self._args_received = args self._cb_time = Simulator.Now() Simulator.Destroy() self._args_received = None self._cb_time = None Simulator.ScheduleNow(callback, "args") Simulator.Run() self.assertEqual(self._args_received, "args") self.assertEqual(self._cb_time.GetSeconds(), 0.0) def testSchedule(self): def callback(args): self._args_received = args self._cb_time = Simulator.Now() Simulator.Destroy() self._args_received = None self._cb_time = None Simulator.Schedule(Seconds(123), callback, "args") Simulator.Run() self.assertEqual(self._args_received, "args") self.assertEqual(self._cb_time.GetSeconds(), 123.0) def testScheduleDestroy(self): def callback(args): self._args_received = args self._cb_time = Simulator.Now() Simulator.Destroy() self._args_received = None self._cb_time = None def null(): pass Simulator.Schedule(Seconds(123), null) Simulator.ScheduleDestroy(callback, "args") Simulator.Run() Simulator.Destroy() self.assertEqual(self._args_received, "args") self.assertEqual(self._cb_time.GetSeconds(), 123.0) def testScheduleWithContext(self): def callback(context, args): self._context_received = context self._args_received = args self._cb_time = Simulator.Now() Simulator.Destroy() self._args_received = None self._cb_time = None self._context_received = None Simulator.ScheduleWithContext(54321, Seconds(123), callback, "args") Simulator.Run() self.assertEqual(self._context_received, 54321) self.assertEqual(self._args_received, "args") self.assertEqual(self._cb_time.GetSeconds(), 123.0) def testTimeComparison(self): self.assert_(Seconds(123) == Seconds(123)) self.assert_(Seconds(123) >= Seconds(123)) self.assert_(Seconds(123) <= Seconds(123)) self.assert_(Seconds(124) > Seconds(123)) self.assert_(Seconds(123) < Seconds(124)) def testTimeNumericOperations(self): self.assertEqual(Seconds(10) + Seconds(5), Seconds(15)) self.assertEqual(Seconds(10) - Seconds(5), Seconds(5)) v1 = int64x64_t(5.0)*int64x64_t(10) self.assertEqual(v1, int64x64_t(50)) def testConfig(self): Config.SetDefault("ns3::OnOffApplication::PacketSize", ns.core.UintegerValue(123)) # hm.. no Config.Get? def testSocket(self): node = ns.network.Node() internet = ns.internet.InternetStackHelper() internet.Install(node) self._received_packet = None def rx_callback(socket): assert self._received_packet is None self._received_packet = socket.Recv() sink = ns.network.Socket.CreateSocket(node, ns.core.TypeId.LookupByName("ns3::UdpSocketFactory")) sink.Bind(ns.network.InetSocketAddress(ns.network.Ipv4Address.GetAny(), 80)) sink.SetRecvCallback(rx_callback) source = ns.network.Socket.CreateSocket(node, ns.core.TypeId.LookupByName("ns3::UdpSocketFactory")) source.SendTo(ns.network.Packet(19), 0, ns.network.InetSocketAddress(ns.network.Ipv4Address("127.0.0.1"), 80)) Simulator.Run() self.assert_(self._received_packet is not None) self.assertEqual(self._received_packet.GetSize(), 19) def testAttributes(self): ## ## Yes, I know, the GetAttribute interface for Python is ## horrible, we should fix this soon, I hope. ## queue = ns.network.DropTailQueue() queue.SetAttribute("MaxPackets", ns.core.UintegerValue(123456)) limit = ns.core.UintegerValue() queue.GetAttribute("MaxPackets", limit) self.assertEqual(limit.Get(), 123456) ## -- object pointer values mobility = ns.mobility.RandomWaypointMobilityModel() ptr = ns.core.PointerValue() mobility.GetAttribute("PositionAllocator", ptr) self.assertEqual(ptr.GetObject(), None) pos = ns.mobility.ListPositionAllocator() mobility.SetAttribute("PositionAllocator", ns.core.PointerValue(pos)) ptr = ns.core.PointerValue() mobility.GetAttribute("PositionAllocator", ptr) self.assert_(ptr.GetObject() is not None) def testIdentity(self): csma = ns.csma.CsmaNetDevice() channel = ns.csma.CsmaChannel() csma.Attach(channel) c1 = csma.GetChannel() c2 = csma.GetChannel() self.assert_(c1 is c2) def testTypeId(self): typeId1 = ns.core.TypeId.LookupByNameFailSafe("ns3::UdpSocketFactory") self.assertEqual(typeId1.GetName (), "ns3::UdpSocketFactory") self.assertRaises(KeyError, ns.core.TypeId.LookupByNameFailSafe, "__InvalidTypeName__") def testCommandLine(self): cmd = ns.core.CommandLine() cmd.AddValue("Test1", "this is a test option") cmd.AddValue("Test2", "this is a test option") cmd.AddValue("Test3", "this is a test option", variable="test_xxx") cmd.Test1 = None cmd.Test2 = None cmd.test_xxx = None class Foo: pass foo = Foo() foo.test_foo = None cmd.AddValue("Test4", "this is a test option", variable="test_foo", namespace=foo) cmd.Parse(["python", "--Test1=value1", "--Test2=value2", "--Test3=123", "--Test4=xpto"]) self.assertEqual(cmd.Test1, "value1") self.assertEqual(cmd.Test2, "value2") self.assertEqual(cmd.test_xxx, "123") self.assertEqual(foo.test_foo, "xpto") def testSubclass(self): class MyNode(ns.network.Node): def __init__(self): super(MyNode, self).__init__() node = MyNode() if __name__ == '__main__': unittest.main()
elcritch/csvkit
refs/heads/master
tests/test_utilities/test_csvsql.py
21
#!/usr/bin/env python import six try: import unittest2 as unittest except ImportError: import unittest from csvkit.utilities.csvsql import CSVSQL from tests.utils import stdin_as_string class TestCSVSQL(unittest.TestCase): def test_create_table(self): args = ['--table', 'foo', 'examples/testfixed_converted.csv'] output_file = six.StringIO() utility = CSVSQL(args, output_file) utility.main() sql = output_file.getvalue() self.assertTrue('CREATE TABLE foo' in sql) self.assertTrue('text VARCHAR(17) NOT NULL' in sql) self.assertTrue('date DATE' in sql) self.assertTrue('integer INTEGER' in sql) self.assertTrue('boolean BOOLEAN' in sql) self.assertTrue('float FLOAT' in sql) self.assertTrue('time TIME' in sql) self.assertTrue('datetime DATETIME' in sql) def test_no_inference(self): args = ['--table', 'foo', '--no-inference', 'examples/testfixed_converted.csv'] output_file = six.StringIO() utility = CSVSQL(args, output_file) utility.main() sql = output_file.getvalue() self.assertTrue('CREATE TABLE foo' in sql) self.assertTrue('text VARCHAR(17) NOT NULL' in sql) self.assertTrue('date VARCHAR(10) NOT NULL' in sql) self.assertTrue('integer VARCHAR(3) NOT NULL' in sql) self.assertTrue('boolean VARCHAR(5) NOT NULL' in sql) self.assertTrue('float VARCHAR(11) NOT NULL' in sql) self.assertTrue('time VARCHAR(8) NOT NULL' in sql) self.assertTrue('datetime VARCHAR(19) NOT NULL' in sql) def test_no_header_row(self): args = ['--table', 'foo', '--no-header-row', 'examples/no_header_row.csv'] output_file = six.StringIO() utility = CSVSQL(args, output_file) utility.main() sql = output_file.getvalue() self.assertTrue('CREATE TABLE foo' in sql) self.assertTrue('column1 INTEGER NOT NULL' in sql) self.assertTrue('column2 INTEGER NOT NULL' in sql) self.assertTrue('column3 INTEGER NOT NULL' in sql) def test_stdin(self): args = ['--table', 'foo'] output_file = six.StringIO() input_file = six.StringIO('a,b,c\n1,2,3\n') with stdin_as_string(input_file): utility = CSVSQL(args, output_file) utility.main() sql = output_file.getvalue() self.assertTrue('CREATE TABLE foo' in sql) self.assertTrue('a INTEGER NOT NULL' in sql) self.assertTrue('b INTEGER NOT NULL' in sql) self.assertTrue('c INTEGER NOT NULL' in sql) def test_stdin_and_filename(self): args = ['examples/dummy.csv'] output_file = six.StringIO() input_file = six.StringIO("a,b,c\n1,2,3\n") with stdin_as_string(input_file): utility = CSVSQL(args, output_file) utility.main() sql = output_file.getvalue() self.assertTrue('CREATE TABLE stdin' in sql) self.assertTrue('CREATE TABLE dummy' in sql) def test_query(self): args = ['--query', 'select m.usda_id, avg(i.sepal_length) as mean_sepal_length from iris as i join irismeta as m on (i.species = m.species) group by m.species', 'examples/iris.csv', 'examples/irismeta.csv'] output_file = six.StringIO() input_file = six.StringIO("a,b,c\n1,2,3\n") with stdin_as_string(input_file): utility = CSVSQL(args, output_file) utility.main() sql = output_file.getvalue() if six.PY2: self.assertTrue('usda_id,mean_sepal_length' in sql) self.assertTrue('IRSE,5.006' in sql) self.assertTrue('IRVE2,5.936' in sql) self.assertTrue('IRVI,6.588' in sql) else: self.assertTrue('usda_id,mean_sepal_length' in sql) self.assertTrue('IRSE,5.005' in sql) self.assertTrue('IRVE2,5.936' in sql) self.assertTrue('IRVI,6.587' in sql)
gpospelov/BornAgain
refs/heads/develop
Doc/FFCatalog/fig/ff2/sim_Cone.py
1
#!/usr/bin/env python3 """ Plot form factor. """ import bornagain as ba from bornagain import nm, deg import bornplot as bp det = ba.SphericalDetector(200, 5*deg, 2.5*deg, 2.5*deg) n = 4 results = [] for i in range(n): theta = 30*i/(n - 1) title = r'$\vartheta=%d^\circ$' % theta ff = ba.FormFactorCone(4*nm, 11*nm, 75*deg) trafo = ba.RotationY(theta*deg) data = bp.run_simulation(det, ff, trafo) results.append(bp.Result(i, data, title)) bp.make_plot(results, det, "ff_Cone")
PayloadSecurity/VxAPI
refs/heads/master
cli/wrappers/scan/cli_scan_url_to_file.py
1
from cli.wrappers.cli_caller import CliCaller from constants import ACTION_OVERVIEW_GET from cli.arguments_builders.submission_cli_arguments import SubmissionCliArguments class CliScanUrlToFile(CliCaller): help_description = 'Submit a file by url for quick scan (you can check results by \'' + ACTION_OVERVIEW_GET + '\' action) by \'{}\'' def build_argument_builder(self, child_parser): return SubmissionCliArguments(child_parser) def add_parser_args(self, child_parser): parser_argument_builder = super(CliScanUrlToFile, self).add_parser_args(child_parser) parser_argument_builder.add_url_arg('Url of file to submit') parser_argument_builder.add_scan_type_arg() parser_argument_builder.add_submission_no_share_third_party_opt() parser_argument_builder.add_submission_allow_community_access_opt() parser_argument_builder.add_submission_comment_opt() parser_argument_builder.add_submission_submit_name_opt()
peterayeni/rapidsms
refs/heads/develop
run_tests.py
1
#!/usr/bin/env python # vim: ai ts=4 sts=4 et sw=4 import os import sys import django from django.conf import settings from django.test.utils import get_runner def run_tests(options, args): if django.VERSION > (1, 7): # http://django.readthedocs.org/en/latest/releases/1.7.html#standalone-scripts django.setup() TestRunner = get_runner(settings) test_runner = TestRunner(verbosity=int(options.verbosity), interactive=options.interactive, failfast=False) if not args: args = ['rapidsms'] failures = test_runner.run_tests(args) sys.exit(failures) def main(): from optparse import OptionParser usage = "%prog [options] [module module module ...]" parser = OptionParser(usage=usage) parser.add_option('-v', '--verbosity', action='store', dest='verbosity', default=1, type='choice', choices=['0', '1', '2', '3'], help='Verbosity level; 0=minimal output, 1=normal ' 'output, 2=all output') parser.add_option('--noinput', action='store_false', dest='interactive', default=True, help='Tells Django to NOT prompt the user for input of ' 'any kind.') parser.add_option('--settings', help='Python path to settings module, e.g. ' '"myproject.settings". If this isn\'t provided, ' 'the DJANGO_SETTINGS_MODULE environment variable ' 'will be used.') options, args = parser.parse_args() if options.settings: os.environ['DJANGO_SETTINGS_MODULE'] = options.settings elif "DJANGO_SETTINGS_MODULE" not in os.environ: parser.error("DJANGO_SETTINGS_MODULE is not set in the environment. " "Set it or use --settings.") else: options.settings = os.environ['DJANGO_SETTINGS_MODULE'] run_tests(options, args=args) if __name__ == '__main__': main()
doduytrung/odoo-8.0
refs/heads/master
addons/purchase_double_validation/__openerp__.py
260
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name' : 'Double Validation on Purchases', 'version' : '1.1', 'category': 'Purchase Management', 'depends' : ['base','purchase'], 'author' : 'OpenERP SA', 'description': """ Double-validation for purchases exceeding minimum amount. ========================================================= This module modifies the purchase workflow in order to validate purchases that exceeds minimum amount set by configuration wizard. """, 'website': 'https://www.odoo.com/page/purchase', 'data': [ 'purchase_double_validation_workflow.xml', 'purchase_double_validation_installer.xml', 'purchase_double_validation_view.xml', ], 'test': [ 'test/purchase_double_validation_demo.yml', 'test/purchase_double_validation_test.yml' ], 'demo': [], 'installable': True, 'auto_install': False } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
arcean/pyopenssl
refs/heads/master
setup.py
1
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) AB Strakt 2001, All rights reserved # Copyright (C) Jean-Paul Calderone 2008, All rights reserved # # @(#) $Id: setup.py,v 1.28 2004/08/10 10:59:01 martin Exp $ # """ Installation script for the OpenSSL module """ import distutils.log distutils.log.set_verbosity(3) import sys, os from distutils.core import Extension, setup from distutils.errors import DistutilsFileError from distutils.command.build_ext import build_ext from version import __version__ crypto_src = ['src/crypto/crypto.c', 'src/crypto/x509.c', 'src/crypto/x509name.c', 'src/crypto/pkey.c', 'src/crypto/x509store.c', 'src/crypto/x509req.c', 'src/crypto/x509ext.c', 'src/crypto/pkcs7.c', 'src/crypto/pkcs12.c', 'src/crypto/netscape_spki.c', 'src/util.c'] crypto_dep = ['src/crypto/crypto.h', 'src/crypto/x509.h', 'src/crypto/x509name.h', 'src/crypto/pkey.h', 'src/crypto/x509store.h', 'src/crypto/x509req.h', 'src/crypto/x509ext.h', 'src/crypto/pkcs7.h', 'src/crypto/pkcs12.h', 'src/crypto/netscape_spki.h', 'src/util.h'] rand_src = ['src/rand/rand.c', 'src/util.c'] rand_dep = ['src/util.h'] ssl_src = ['src/ssl/connection.c', 'src/ssl/context.c', 'src/ssl/ssl.c', 'src/util.c'] ssl_dep = ['src/ssl/connection.h', 'src/ssl/context.h', 'src/ssl/ssl.h', 'src/util.h'] IncludeDirs = None LibraryDirs = None # Add more platforms here when needed if os.name == 'nt' or sys.platform == 'win32': Libraries = ['Ws2_32'] class BuildExtension(build_ext): """ A custom command that semiautomatically finds dependencies required by PyOpenSSL. """ user_options = (build_ext.user_options + [("with-openssl=", None, "directory where OpenSSL is installed")]) with_openssl = None openssl_dlls = () openssl_mingw = False def finalize_options(self): """ Update build options with details about OpenSSL. """ build_ext.finalize_options(self) if self.with_openssl is None: self.find_openssl() self.find_openssl_dlls() self.add_openssl_compile_info() def find_openssl(self): """ Find OpenSSL's install directory. """ potentials = [] dirs = os.environ.get("PATH").split(os.pathsep) for d in dirs: if os.path.exists(os.path.join(d, "openssl.exe")): ssldir, bin = os.path.split(d) if not bin: ssldir, bin = os.path.split(ssldir) potentials.append(ssldir) childdirs = os.listdir(ssldir) if "lib" in childdirs and "include" in childdirs: self.with_openssl = ssldir return if potentials: raise DistutilsFileError( "Only found improper OpenSSL directories: %r" % ( potentials,)) else: raise DistutilsFileError("Could not find 'openssl.exe'") def find_openssl_dlls(self): """ Find OpenSSL's shared libraries. """ self.openssl_dlls = [] self.find_openssl_dll("libssl32.dll", False) if self.openssl_dlls: self.openssl_mingw = True else: self.find_openssl_dll("ssleay32.dll", True) self.find_openssl_dll("libeay32.dll", True) # add zlib to the mix if it looks like OpenSSL # was linked with a private copy of it self.find_openssl_dll("zlib1.dll", False) def find_openssl_dll(self, name, required): """ Find OpenSSL's shared library and its path after installation. """ dllpath = os.path.join(self.with_openssl, "bin", name) if not os.path.exists(dllpath): if required: raise DistutilsFileError("could not find '%s'" % name) else: return newpath = os.path.join(self.build_lib, "OpenSSL", name) self.openssl_dlls.append((dllpath, newpath)) def add_openssl_compile_info(self): """ Set up various compile and link parameters. """ if self.compiler == "mingw32": if self.openssl_mingw: # Library path and library names are sane when OpenSSL is # built with MinGW . libdir = "lib" libs = ["eay32", "ssl32"] else: libdir = "" libs = [] # Unlike when using the binary installer, which creates # an atypical shared library name 'ssleay32', so we have # to use this workaround. if self.link_objects is None: self.link_objects = [] for dllpath, _ in self.openssl_dlls: dllname = os.path.basename(dllpath) libname = os.path.splitext(dllname)[0] + ".a" libpath = os.path.join(self.with_openssl, "lib", "MinGW", libname) self.link_objects.append(libpath) else: libdir = "lib" libs = ["libeay32", "ssleay32"] self.include_dirs.append(os.path.join(self.with_openssl, "include")) self.library_dirs.append(os.path.join(self.with_openssl, libdir)) self.libraries.extend(libs) def run(self): """ Build extension modules and copy shared libraries. """ build_ext.run(self) for dllpath, newpath in self.openssl_dlls: self.copy_file(dllpath, newpath) def get_outputs(self): """ Return a list of file paths built by this comand. """ output = [pathpair[1] for pathpair in self.openssl_dlls] output.extend(build_ext.get_outputs(self)) return output else: Libraries = ['ssl', 'crypto'] BuildExtension = build_ext def mkExtension(name): modname = 'OpenSSL.' + name src = globals()[name.lower() + '_src'] dep = globals()[name.lower() + '_dep'] return Extension(modname, src, libraries=Libraries, depends=dep, include_dirs=IncludeDirs, library_dirs=LibraryDirs) setup(name='pyOpenSSL', version=__version__, packages = ['OpenSSL'], package_dir = {'OpenSSL': '.'}, ext_modules = [mkExtension('crypto'), mkExtension('rand'), mkExtension('SSL')], py_modules = ['OpenSSL.__init__', 'OpenSSL.tsafe', 'OpenSSL.version', 'OpenSSL.test.__init__', 'OpenSSL.test.util', 'OpenSSL.test.test_crypto', 'OpenSSL.test.test_rand', 'OpenSSL.test.test_ssl'], zip_safe = False, cmdclass = {"build_ext": BuildExtension}, description = 'Python wrapper module around the OpenSSL library', author = 'Martin Sjögren, AB Strakt', author_email = '[email protected]', maintainer = 'Jean-Paul Calderone', maintainer_email = '[email protected]', url = 'http://pyopenssl.sourceforge.net/', license = 'LGPL', long_description = """\ High-level wrapper around a subset of the OpenSSL library, includes * SSL.Connection objects, wrapping the methods of Python's portable sockets * Callbacks written in Python * Extensive error-handling mechanism, mirroring OpenSSL's error codes ... and much more ;)""" )
davide-ceretti/DEPRECATED-googleappengine-djangae-skeleton
refs/heads/master
application/crud/tests.py
24123
from django.test import TestCase # Create your tests here.
thnee/ansible
refs/heads/devel
hacking/build_library/build_ansible/jinja2/filters.py
31
# Copyright: (c) 2019, Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import re try: from html import escape as html_escape except ImportError: # Python-3.2 or later import cgi def html_escape(text, quote=True): return cgi.escape(text, quote) from jinja2.runtime import Undefined from ansible.errors import AnsibleError from ansible.module_utils._text import to_text from ansible.module_utils.six import string_types _ITALIC = re.compile(r"I\(([^)]+)\)") _BOLD = re.compile(r"B\(([^)]+)\)") _MODULE = re.compile(r"M\(([^)]+)\)") _URL = re.compile(r"U\(([^)]+)\)") _LINK = re.compile(r"L\(([^)]+), *([^)]+)\)") _CONST = re.compile(r"C\(([^)]+)\)") _RULER = re.compile(r"HORIZONTALLINE") def html_ify(text): ''' convert symbols like I(this is in italics) to valid HTML ''' if not isinstance(text, string_types): text = to_text(text) t = html_escape(text) t = _ITALIC.sub(r"<em>\1</em>", t) t = _BOLD.sub(r"<b>\1</b>", t) t = _MODULE.sub(r"<span class='module'>\1</span>", t) t = _URL.sub(r"<a href='\1'>\1</a>", t) t = _LINK.sub(r"<a href='\2'>\1</a>", t) t = _CONST.sub(r"<code>\1</code>", t) t = _RULER.sub(r"<hr/>", t) return t.strip() def documented_type(text): ''' Convert any python type to a type for documentation ''' if isinstance(text, Undefined): return '-' if text == 'str': return 'string' if text == 'bool': return 'boolean' if text == 'int': return 'integer' if text == 'dict': return 'dictionary' return text # The max filter was added in Jinja2-2.10. Until we can require that version, use this def do_max(seq): return max(seq) def rst_ify(text): ''' convert symbols like I(this is in italics) to valid restructured text ''' try: t = _ITALIC.sub(r"*\1*", text) t = _BOLD.sub(r"**\1**", t) t = _MODULE.sub(r":ref:`\1 <\1_module>`", t) t = _LINK.sub(r"`\1 <\2>`_", t) t = _URL.sub(r"\1", t) t = _CONST.sub(r"``\1``", t) t = _RULER.sub(r"------------", t) except Exception as e: raise AnsibleError("Could not process (%s) : %s" % (text, e)) return t def rst_fmt(text, fmt): ''' helper for Jinja2 to do format strings ''' return fmt % (text) def rst_xline(width, char="="): ''' return a restructured text line of a given length ''' return char * width
maackle/ILC-app
refs/heads/master
lib/pyspatialite-3.0.1/doc/includes/sqlite3/progress.py
49
from pysqlite2 import dbapi2 as sqlite3 def progress(): print "Query still executing. Please wait ..." con = sqlite3.connect(":memory:") con.execute("create table test(x)") # Let's create some data con.executemany("insert into test(x) values (?)", [(x,) for x in xrange(300)]) # A progress handler, executed every 10 million opcodes con.set_progress_handler(progress, 10000000) # A particularly long-running query killer_stament = """ select count(*) from ( select t1.x from test t1, test t2, test t3 ) """ con.execute(killer_stament) print "-" * 50 # Clear the progress handler con.set_progress_handler(None, 0) con.execute(killer_stament)
midroid/mediadrop
refs/heads/master
mediacore/lib/compat/functional.py
14
from mediadrop.lib.compat.functional import *
getsmap/smap4
refs/heads/master
WS/print/printIt.py
1
#!/usr/bin/env python # -*- coding: utf-8 -*- #XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX # # Author: Johan Lahti <johanlahti at gmail com> # Copyright: Malmö Stad (City of Malmö, Sweden) # License: MIT license # Date: August 10, 2010. # # About: This code is meant to be used with the # OpenLayers library. It creates an image # of the current map, and thereby makes it possible # to save the map as an image or pdf. There # is some associated client code (javascript) which provides # the map configuration. This server side code uses # the given map configuration to create an image and # to paste icons into it, and draw features on it, at # the right place and with provided style, so that it # appears the same as the map you see in the browser. # Colours could differ a bit between the map in the browser # and the map in image. # # This code (including client code) is OpenSource limited # by the regulations provided by the libraries used. # Feel free to improve the code. # # Dependencies (this code): Requires external libraries: # - json # http://pypi.python.org/pypi/python-json # - Python Imaging Library (PIL) # http://www.pythonware.com/products/pil/ # - aggdraw # http://effbot.org/zone/aggdraw-index.htm # - ReportLab Toolkit # http://www.reportlab.com/software/opensource/rl-toolkit/ # http://www.reportlab.com/ # # ---------------- Licenses for used libraries: ------------------------------------------------------------------------------------------ # # The Python Imaging Library (PIL) is # # Copyright © 1997-2006 by Secret Labs AB # Copyright © 1995-2006 by Fredrik Lundh # # By obtaining, using, and/or copying this software and/or its associated documentation, you agree that you have read, understood, and will comply with the following terms and conditions: # # Permission to use, copy, modify, and distribute this software and its associated documentation for any purpose and without fee is hereby granted, provided that the above copyright notice appears in all copies, and that both that copyright notice and this permission notice appear in supporting documentation, and that the name of Secret Labs AB or the author not be used in advertising or publicity pertaining to distribution of the software without specific, written prior permission. # # SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. # # --------------------------- # # Aggdraw # # Copyright © Fredrik Lundh # License: Python (MIT style) ### # --------------------------- # # JSON # # Author: Patrick Dlogan <patrickdlogan at stardecisions com> # Copyright: # License: LGPL # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # --------------------------- # # ReportLab Toolkit # # About: A library for programatically creating documents in PDF format. # Copyright: © ReportLab 2010 # License: BSD license # # ----------------------------------------------------- #XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX try: import cgi, cgitb cgitb.enable() import json import Image import aggdraw import urllib2 import cStringIO import sys, os import random from operator import itemgetter except: print "Problem importing libs" ## webContentPath will added before the local file (image) paths in order ## to get the URL right. For some reason the home dir is set ## to wwwroot when this script is run. class Storage: pass # ------- "Help" functions ----------------------------------- def makeBlankImage(width, height, savePath=None): ''' Make a blank image into which everything will be pasted. If path, save it at path. ''' blankImage = Image.new("RGBA", (width, height), (255,255,255,0)) if savePath!=None: ext = savePath.split(".")[-1].upper() savePath = os.path.normpath(savePath) # normalize path blankImage.save(savePath, ext) return blankImage def fetchImageWithUrl(url): #req = urllib2.Request(url) # Make a temporary file without saving it on the harddrive. #url = "http://tilecache.smap.se/skane_karta_sr99tm/00/000/000/000/000/000/002.jpeg" #url = "http://malmo.se/assets-2.0/img/malmo-stad-logo.png" #"http://www.smap.se/tilecache/skane_karta_sr99tm/00/000/000/000/000/000/002.jpeg" #url = "http://xyz.malmo.se/data_e/tilecache/malmo/malmo_karta/00/000/000/001/000/000/001.png" url = url.replace("tilecache.smap.se", "localhost/tilecache") #content = urllib2.urlopen(url).read() #fWrite = open("/var/www/kartor/temp/print/test.png", "w") #fWrite.write(content) #fWrite.close() #print len(content) f = cStringIO.StringIO(urllib2.urlopen(url).read()) img = Image.open(f) return img def pasteImageOnPositions(bgImage, topImage, positions, opacity=None): # Make a new transparent image im = Image.new("RGBA", bgImage.size, (255,255,255,0)) # We need to keep all images in the same mode. if topImage.mode=="P": topImage = topImage.convert("RGBA") # First, paste topImage into new image at given positions w = int(topImage.size[0]) h = int(topImage.size[1]) # A layer in OpenLayers which has opacity will no be saved in # the image. Therefore we have to add the opacity ourselves to # the image. if opacity!=None and opacity!=255: # Explained: We iterate through all pixels in the transparency/alpha band (no. 3). # If alpha is 0 (transparent) we let it be so, otherwise we add the given 'opacity' # to all other pixels. if topImage.mode=="RGB": newAlpha = Image.new("L", topImage.size, opacity) elif topImage.mode=="RGBA": newAlpha = topImage.split()[3].point(lambda a: 0 if a==0 else opacity) topImage.putalpha(newAlpha) for p in positions: bbox = (p[0], p[1], p[0]+int(w), p[1]+int(h)) im.paste(topImage, bbox) # Then, make a mask which protects non-transparent areas (which we want to keep). # 0-value in mask means it will be protected, i.e. not be pasted over. alpha = im.split()[3] mask = alpha.point(lambda a: 255-a) # Invert numbers #alphaBG = bgImage.split()[3] #alphaBG = alphaBG.point(lambda a: 255-a) #alphaBG.paste(mask) # Lastly, paste the background image on top of the new image using the mask. im.paste(bgImage, None, mask) return im def cropImageToFit(bgImgSize, cropImg, left, top): imgW, imgH = bgImgSize cropImgW, cropImgH = cropImg.size # 1. Find out if (and how much) to crop from top and left bboxLeft = 0 bboxTop = 0 # The tile wont be visible... if left<(-cropImgW) or top<(-cropImgH) or left>imgW or top>imgH: return None # Cut from the left and top - invert numbers since # we are using a bbox (counting from left and top). if left<0: bboxLeft = -left # cut this much from left if top<0: bboxTop = -top # cut this much from top # 2. Find out if (and how much) we need to crop on the # other side (right or bottom) # Count overlap on right and bottom side # Negative means there is this amount of overlap # (-10 remaining width or height means 10 pixels overlap) #print "imgH, top, cropImgH", imgH, top, cropImgH #print "imgW, left, cropImgW", imgW, left, cropImgW remainingWidth = imgW - (left + cropImgW) remainingHeight = imgH - (top + cropImgH) bboxRight, bboxBottom = cropImg.size # if no cut is necessary these values will remain... # If overlap (remaining width/height is negative) - alter # the bbox so that this much will be cropped on the right/bottom # side. if remainingWidth<0: bboxRight = cropImgW - remainingWidth.__abs__() if remainingHeight<0: bboxBottom = cropImgH - remainingHeight.__abs__() #print "bboxBottom", bboxBottom, remainingHeight bbox = (bboxLeft, bboxTop, bboxRight, bboxBottom) croppedImg = cropImg.crop(bbox) return croppedImg def flatten2dList(lst): flatList = [] for d in lst: for dd in d: flatList.append(dd) return flatList # ------- "Help" functions END ----------------------------------- def getMapConfig(): f = cgi.FieldStorage() width = f["width"].value height = f["height"].value layers = f["layers"].value outputPath = f["outputPath"].value quality = f["quality"].value headerText = f["headerText"].value scale = f["scale"].value outputPath = os.path.normpath(outputPath) try: root = f["webContentPath"].value # The "home dir" => path to webContent folder if root==None or root=="null": root = "" except: root = "" Storage.webContentPath = root headerText = headerText if headerText!="null" else None dec = json.JSONDecoder() layersDict = dec.decode(layers) return int(width), int(height), layersDict, outputPath, int(quality), headerText, float(scale) def pasteTile(img, t): ''' Paste the tiles into a blank image. ''' x = t["x"] y = t["y"] opacity = t["opacity"] opacity = None if opacity=="null" else opacity #Make null into None... url = t["url"] try: tile = fetchImageWithUrl(url) except: return img croppedTile = cropImageToFit(img.size, tile, x, y) if croppedTile==None: return img # Normalize it to 0 so we can paste the image if x<0: x=0 if y<0: y=0 img = pasteImageOnPositions(img, croppedTile, [[x, y]], opacity=opacity) return img def pasteVector(img, t): url = t["url"] features = t["features"] if url==None: # Draw the features # Extract the values from the dictionary strokeColor = t["strokeColor"] strokeOpacity = t["strokeOpacity"] strokeWidth = t["strokeWidth"] fillOpacity = t["fillOpacity"] fillColor = t["fillColor"] pointRadius = t["pointRadius"] # If fill color -> Create a brush if fillColor!=None: brush = aggdraw.Brush(fillColor, opacity=fillOpacity) # Set pen's draw width if t["strokeColor"]!=None: pen = aggdraw.Pen(strokeColor,\ width=strokeWidth,\ opacity=strokeOpacity) draw = aggdraw.Draw(img) # Iterate thorough features to find what geomType # to draw (point=ellipse, line=line, polygon=polygon) for f in features: geomType = f["geomType"] nodes = f["nodes"] nodes = flatten2dList(nodes) if geomType=="point": x0 = nodes[0] - pointRadius y0 = nodes[1] - pointRadius x1 = nodes[0] + pointRadius y1 = nodes[1] + pointRadius draw.ellipse((x0, y0, x1, y1), pen, brush) elif geomType=="line": draw.line(nodes, pen) elif geomType=="polygon": draw.polygon(nodes, pen, brush) draw.flush() del draw else: if url.find("http")==-1: # If the URL is a local path, make sure to adapt it to the # location of this script so it can find the image folder. # preImageFolder is set on the top of this script. url = str(Storage.webContentPath) + str(url) ## s = "" ## from glob import glob ## for p in glob(Storage.webContentPath+"*"): ## s += (p + "\n") ## print s try: w = t["graphicWidth"] h = t["graphicHeight"] size = (w, h) except: pass topImg = Image.open(url) for f in features: # Paste the features positions = f["nodes"] geomType = f["geomType"] fillOpacity = t["fillOpacity"] # Resize image to given graphic width and height try: a = size[0] + size[1] except: pass else: topImg = topImg.resize(size) img = pasteImageOnPositions(img, topImg, positions, fillOpacity) return img def sortList(layersList): tempList = [] for t in layersList: zIndex = t["zIndex"] tempList.append([zIndex, t]) tempList.sort(key=itemgetter(0)) sortedLayersList = [] for i in tempList: sortedLayersList.append(i[1]) return sortedLayersList def pasteAllLayers(img, layersList): for t in layersList: if t["layerType"]=="tile": img = pasteTile(img, t) elif t["layerType"]=="vector": img = pasteVector(img, t) return img def makeLegend(layersList): from PIL import ImageFont, ImageDraw, ImageOps legend = Image.new("RGBA", (400, 200), (255,255,255,255) ) globX = 0 globY = 0 for t in layersList: layerName = t["layerName"] legendImageURL = t["legendImage"] if legendImageURL not in ["null", None]: icon = Image.open(Storage.webContentPath + "/" + legendImageURL) legend.paste(icon, (globX, globY, globX+icon.size[0], globY+icon.size[1])) legend = pasteImageOnPositions(legend, icon, [[globX, globY]], opacity=None) #font = ImageFont.load_path("C:/WINDOWS/Fonts/ARIAL.TTF") #font = ImageFont.truetype("arial.ttf", 15) #font = ImageFont.load("C:/WINDOWS/Fonts/ARIAL.TTF") #font = ImageFont.load_default() draw = ImageDraw.Draw(legend) draw.text( (globX+40, globY), layerName.encode("utf8"), font=None, fill=(0,0,0)) globY += 15 return legend def makeScaleBar(img, pxRatio): import ImageFont windir = os.environ.get("WINDIR") fontfile = os.path.join(windir, "Fonts", "ProFontWindows.ttf") font = ImageFont.load_default() #ImageFont.truetype(fontfile, 14) width = 100 # pixel width for scalebar margin = 10 draw = aggdraw.Draw(img) pen = aggdraw.Pen("000000", width=2, opacity=255) xMax, yMax = img.size x = xMax-margin y = yMax-margin draw.line([x, y, x-width, y], pen) draw.line([x, y-5, x, y+5], pen) draw.line([x-width, y-5, x-width, y+5], pen) textVal = str(round(pxRatio/1000.0 * width)) + " km" draw.textsize(textVal, font.font) #[10, 10], textVal, None) #[x-width/2, y-15] draw.flush() del draw return img def makeRandomFileName(outputPath): # Add a random no. in the end of the filename to prevent # (minimize the risk of the same name for to images. directory = os.path.abspath(os.path.dirname(outputPath)) + "/" fileName = os.path.basename(outputPath) ext = fileName.split(".")[-1].upper() r = str(random.randint(0, 1000000000)) fileNameArr = fileName.split(".")[:-1] fileNameArr.append("_"+r) outputPath = directory + "".join(fileNameArr) + "." + ext return outputPath def replaceExt(path, newExt): if newExt[0]!=".": newExt = "." + newExt pathList = path.split(".") newPath = ".".join(pathList[:-1]) + newExt return newPath def main(): width, height, layersList, outputPath, quality, headerText, scale = getMapConfig() img = makeBlankImage(width, height, None) layersList = sortList(layersList) img = pasteAllLayers(img, layersList) # IMPORTANT! Files older than given number (seconds) will be delete in this folder. deleteFolder = os.path.dirname(outputPath) import deleteFiles deleteFiles.deleteAllFiles(deleteFolder, ["png", "jpg", "jpeg", "pdf"], maxAge=120) # Make a random file name and extract the file name extension. outputPath = makeRandomFileName(outputPath) ext = outputPath.split(".")[-1].upper() fileName = os.path.basename(outputPath) # ---- Log time and IP ------------------------- import time now = time.ctime() ip = cgi.escape(os.environ["REMOTE_ADDR"]) content = str(now) + "\t" + str(ip) + "\t" + ext + "\n" deleteFiles.writeLog(os.path.normpath(deleteFolder) + "/log.txt", content) # If PDF - paste headerText and image into a new PDF document. if ext=="PDF": # Replace PDF-extension in outpath by PNG so we can insert the PNG later. outputPath = replaceExt(outputPath, "PNG") quality = 100 # These formats cannot be written in mode RGBA (incl. PDF...) if ext=="BMP" and img.mode=="RGBA": img = img.convert("P") img.save(outputPath) if ext=="PDF": import toPDF pdfOutputPath = replaceExt(outputPath, "PDF") toPDF.makePdf(outputPath, pdfOutputPath, headerText) fileName = replaceExt(fileName, "PDF") print fileName if __name__=='__main__': print "Content-Type: text/plain" print main()
sharifmamun/ansible
refs/heads/devel
lib/ansible/plugins/callback/hipchat.py
101
# (C) 2014, Matt Martz <[email protected]> # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import os import urllib import urllib2 try: import prettytable HAS_PRETTYTABLE = True except ImportError: HAS_PRETTYTABLE = False from ansible.plugins.callback import CallbackBase class CallbackModule(CallbackBase): """This is an example ansible callback plugin that sends status updates to a HipChat channel during playbook execution. This plugin makes use of the following environment variables: HIPCHAT_TOKEN (required): HipChat API token HIPCHAT_ROOM (optional): HipChat room to post in. Default: ansible HIPCHAT_FROM (optional): Name to post as. Default: ansible HIPCHAT_NOTIFY (optional): Add notify flag to important messages ("true" or "false"). Default: true Requires: prettytable """ CALLBACK_VERSION = 2.0 CALLBACK_VERSION = 2.0 CALLBACK_NAME = 'hipchat' def __init__(self, display): super(CallbackModule, self).__init__(display) if not HAS_PRETTYTABLE: self.disabled = True self.display.warning('The `prettytable` python module is not installed. ' 'Disabling the HipChat callback plugin.') self.msg_uri = 'https://api.hipchat.com/v1/rooms/message' self.token = os.getenv('HIPCHAT_TOKEN') self.room = os.getenv('HIPCHAT_ROOM', 'ansible') self.from_name = os.getenv('HIPCHAT_FROM', 'ansible') self.allow_notify = (os.getenv('HIPCHAT_NOTIFY') != 'false') if self.token is None: self.disabled = True self.display.warning('HipChat token could not be loaded. The HipChat ' 'token can be provided using the `HIPCHAT_TOKEN` ' 'environment variable.') self.printed_playbook = False self.playbook_name = None def send_msg(self, msg, msg_format='text', color='yellow', notify=False): """Method for sending a message to HipChat""" params = {} params['room_id'] = self.room params['from'] = self.from_name[:15] # max length is 15 params['message'] = msg params['message_format'] = msg_format params['color'] = color params['notify'] = int(self.allow_notify and notify) url = ('%s?auth_token=%s' % (self.msg_uri, self.token)) try: response = urllib2.urlopen(url, urllib.urlencode(params)) return response.read() except: self.display.warning('Could not submit message to hipchat') def playbook_on_play_start(self, name): """Display Playbook and play start messages""" # This block sends information about a playbook when it starts # The playbook object is not immediately available at # playbook_on_start so we grab it via the play # # Displays info about playbook being started by a person on an # inventory, as well as Tags, Skip Tags and Limits if not self.printed_playbook: self.playbook_name, _ = os.path.splitext( os.path.basename(self.play.playbook.filename)) host_list = self.play.playbook.inventory.host_list inventory = os.path.basename(os.path.realpath(host_list)) self.send_msg("%s: Playbook initiated by %s against %s" % (self.playbook_name, self.play.playbook.remote_user, inventory), notify=True) self.printed_playbook = True subset = self.play.playbook.inventory._subset skip_tags = self.play.playbook.skip_tags self.send_msg("%s:\nTags: %s\nSkip Tags: %s\nLimit: %s" % (self.playbook_name, ', '.join(self.play.playbook.only_tags), ', '.join(skip_tags) if skip_tags else None, ', '.join(subset) if subset else subset)) # This is where we actually say we are starting a play self.send_msg("%s: Starting play: %s" % (self.playbook_name, name)) def playbook_on_stats(self, stats): """Display info about playbook statistics""" hosts = sorted(stats.processed.keys()) t = prettytable.PrettyTable(['Host', 'Ok', 'Changed', 'Unreachable', 'Failures']) failures = False unreachable = False for h in hosts: s = stats.summarize(h) if s['failures'] > 0: failures = True if s['unreachable'] > 0: unreachable = True t.add_row([h] + [s[k] for k in ['ok', 'changed', 'unreachable', 'failures']]) self.send_msg("%s: Playbook complete" % self.playbook_name, notify=True) if failures or unreachable: color = 'red' self.send_msg("%s: Failures detected" % self.playbook_name, color=color, notify=True) else: color = 'green' self.send_msg("/code %s:\n%s" % (self.playbook_name, t), color=color)
eduNEXT/edunext-platform
refs/heads/master
cms/envs/common.py
1
# -*- coding: utf-8 -*- """ This is the common settings file, intended to set sane defaults. If you have a piece of configuration that's dependent on a set of feature flags being set, then create a function that returns the calculated value based on the value of FEATURES[...]. Modules that extend this one can change the feature configuration in an environment specific config file and re-calculate those values. We should make a method that calls all these config methods so that you just make one call at the end of your site-specific dev file to reset all the dependent variables (like INSTALLED_APPS) for you. Longer TODO: 1. Right now our treatment of static content in general and in particular course-specific static content is haphazard. 2. We should have a more disciplined approach to feature flagging, even if it just means that we stick them in a dict called FEATURES. 3. We need to handle configuration for multiple courses. This could be as multiple sites, but we do need a way to map their data assets. When refering to XBlocks, we use the entry-point name. For example, | setup( | name='xblock-foobar', | version='0.1', | packages=[ | 'foobar_xblock', | ], | entry_points={ | 'xblock.v1': [ | 'foobar-block = foobar_xblock:FoobarBlock', | # ^^^^^^^^^^^^ This is the one you want. | ] | }, | ) """ # We intentionally define lots of variables that aren't used, and # want to import all variables from base settings files # pylint: disable=unused-import, useless-suppression, wrong-import-order, wrong-import-position import importlib.util import os import sys from datetime import timedelta import lms.envs.common # Although this module itself may not use these imported variables, other dependent modules may. from lms.envs.common import ( USE_TZ, ALL_LANGUAGES, update_module_store_settings, ASSET_IGNORE_REGEX, PARENTAL_CONSENT_AGE_LIMIT, REGISTRATION_EMAIL_PATTERNS_ALLOWED, # The following PROFILE_IMAGE_* settings are included as they are # indirectly accessed through the email opt-in API, which is # technically accessible through the CMS via legacy URLs. PROFILE_IMAGE_BACKEND, PROFILE_IMAGE_DEFAULT_FILENAME, PROFILE_IMAGE_DEFAULT_FILE_EXTENSION, PROFILE_IMAGE_HASH_SEED, PROFILE_IMAGE_MIN_BYTES, PROFILE_IMAGE_MAX_BYTES, PROFILE_IMAGE_SIZES_MAP, # The following setting is included as it is used to check whether to # display credit eligibility table on the CMS or not. COURSE_MODE_DEFAULTS, DEFAULT_COURSE_ABOUT_IMAGE_URL, # User-uploaded content MEDIA_ROOT, MEDIA_URL, # Lazy Gettext _, # Django REST framework configuration REST_FRAMEWORK, STATICI18N_OUTPUT_DIR, # Heartbeat HEARTBEAT_CHECKS, HEARTBEAT_EXTENDED_CHECKS, HEARTBEAT_CELERY_TIMEOUT, # Default site to use if no site exists matching request headers SITE_ID, # constants for redirects app REDIRECT_CACHE_TIMEOUT, REDIRECT_CACHE_KEY_PREFIX, # This is required for the migrations in oauth_dispatch.models # otherwise it fails saying this attribute is not present in Settings # Although Studio does not enable OAuth2 Provider capability, the new approach # to generating test databases will discover and try to create all tables # and this setting needs to be present OAUTH2_PROVIDER_APPLICATION_MODEL, JWT_AUTH, USERNAME_REGEX_PARTIAL, USERNAME_PATTERN, # django-debug-toolbar DEBUG_TOOLBAR_PATCH_SETTINGS, COURSE_ENROLLMENT_MODES, CONTENT_TYPE_GATE_GROUP_IDS, DISABLE_ACCOUNT_ACTIVATION_REQUIREMENT_SWITCH, GENERATE_PROFILE_SCORES, # Enterprise service settings ENTERPRISE_CATALOG_INTERNAL_ROOT_URL, # Methods to derive settings _make_mako_template_dirs, _make_locale_paths, ) from path import Path as path from django.urls import reverse_lazy from lms.djangoapps.lms_xblock.mixin import LmsBlockMixin from cms.lib.xblock.authoring_mixin import AuthoringMixin from xmodule.modulestore.edit_info import EditInfoMixin from openedx.core.djangoapps.theming.helpers_dirs import ( get_themes_unchecked, get_theme_base_dirs_from_settings ) from openedx.core.lib.license import LicenseMixin from openedx.core.lib.derived import derived, derived_collection_entry from openedx.core.release import doc_version # pylint: enable=useless-suppression ################ Enable credit eligibility feature #################### ENABLE_CREDIT_ELIGIBILITY = True ################################ Block Structures ################################### BLOCK_STRUCTURES_SETTINGS = dict( # Delay, in seconds, after a new edit of a course is published # before updating the block structures cache. This is needed # for a better chance at getting the latest changes when there # are secondary reads in sharded mongoDB clusters. See TNL-5041 # for more info. COURSE_PUBLISH_TASK_DELAY=30, # Delay, in seconds, between retry attempts if a task fails. TASK_DEFAULT_RETRY_DELAY=30, # Maximum number of retries per task. TASK_MAX_RETRIES=5, # Backend storage options PRUNING_ACTIVE=False, ) ############################ FEATURE CONFIGURATION ############################# PLATFORM_NAME = _('Your Platform Name Here') PLATFORM_DESCRIPTION = _('Your Platform Description Here') PLATFORM_FACEBOOK_ACCOUNT = "http://www.facebook.com/YourPlatformFacebookAccount" PLATFORM_TWITTER_ACCOUNT = "@YourPlatformTwitterAccount" # Dummy secret key for dev/test SECRET_KEY = 'dev key' FAVICON_PATH = 'images/favicon.ico' STUDIO_NAME = _("Your Platform Studio") STUDIO_SHORT_NAME = _("Studio") FEATURES = { 'GITHUB_PUSH': False, # for consistency in user-experience, keep the value of the following 3 settings # in sync with the ones in lms/envs/common.py 'ENABLE_DISCUSSION_SERVICE': True, 'ENABLE_TEXTBOOK': True, # DO NOT SET TO True IN THIS FILE # Doing so will cause all courses to be released on production 'DISABLE_START_DATES': False, # When True, all courses will be active, regardless of start date # email address for studio staff (eg to request course creation) 'STUDIO_REQUEST_EMAIL': '', # Segment - must explicitly turn it on for production 'CMS_SEGMENT_KEY': None, # Enable URL that shows information about the status of various services 'ENABLE_SERVICE_STATUS': False, # Don't autoplay videos for course authors 'AUTOPLAY_VIDEOS': False, # Move the course author to next page when a video finishes. Set to True to # show an auto-advance button in videos. If False, videos never auto-advance. 'ENABLE_AUTOADVANCE_VIDEOS': False, # If set to True, new Studio users won't be able to author courses unless # an Open edX admin has added them to the course creator group. 'ENABLE_CREATOR_GROUP': True, # Turn off account locking if failed login attempts exceeds a limit 'ENABLE_MAX_FAILED_LOGIN_ATTEMPTS': False, # Allow editing of short description in course settings in cms 'EDITABLE_SHORT_DESCRIPTION': True, # Hide any Personally Identifiable Information from application logs 'SQUELCH_PII_IN_LOGS': False, # Toggles the embargo functionality, which blocks users # based on their location. 'EMBARGO': False, # Allow creating courses with non-ascii characters in the course id 'ALLOW_UNICODE_COURSE_ID': False, # Prevent concurrent logins per user 'PREVENT_CONCURRENT_LOGINS': False, # Turn off Video Upload Pipeline through Studio, by default 'ENABLE_VIDEO_UPLOAD_PIPELINE': False, # let students save and manage their annotations # for consistency in user-experience, keep the value of this feature flag # in sync with the one in lms/envs/common.py 'ENABLE_EDXNOTES': False, # Toggle to enable coordination with the Publisher tool (keep in sync with lms/envs/common.py) 'ENABLE_PUBLISHER': False, # Show a new field in "Advanced settings" that can store custom data about a # course and that can be read from themes 'ENABLE_OTHER_COURSE_SETTINGS': False, # Write new CSM history to the extended table. # This will eventually default to True and may be # removed since all installs should have the separate # extended history table. This is needed in the LMS and CMS # for migration consistency. 'ENABLE_CSMH_EXTENDED': True, # Enable support for content libraries. Note that content libraries are # only supported in courses using split mongo. 'ENABLE_CONTENT_LIBRARIES': True, # Milestones application flag 'MILESTONES_APP': False, # Prerequisite courses feature flag 'ENABLE_PREREQUISITE_COURSES': False, # Toggle course entrance exams feature 'ENTRANCE_EXAMS': False, # Toggle platform-wide course licensing 'LICENSING': False, # Enable the courseware search functionality 'ENABLE_COURSEWARE_INDEX': False, # Enable content libraries search functionality 'ENABLE_LIBRARY_INDEX': False, # Enable course reruns, which will always use the split modulestore 'ALLOW_COURSE_RERUNS': True, # Certificates Web/HTML Views 'CERTIFICATES_HTML_VIEW': False, # Teams feature 'ENABLE_TEAMS': True, # Show video bumper in Studio 'ENABLE_VIDEO_BUMPER': False, # Show issue open badges in Studio 'ENABLE_OPENBADGES': False, # How many seconds to show the bumper again, default is 7 days: 'SHOW_BUMPER_PERIODICITY': 7 * 24 * 3600, # Enable credit eligibility feature 'ENABLE_CREDIT_ELIGIBILITY': ENABLE_CREDIT_ELIGIBILITY, # Special Exams, aka Timed and Proctored Exams 'ENABLE_SPECIAL_EXAMS': False, 'ORGANIZATIONS_APP': False, # Show the language selector in the header 'SHOW_HEADER_LANGUAGE_SELECTOR': False, # At edX it's safe to assume that English transcripts are always available # This is not the case for all installations. # The default value in {lms,cms}/envs/common.py and xmodule/tests/test_video.py should be consistent. 'FALLBACK_TO_ENGLISH_TRANSCRIPTS': True, # Set this to False to facilitate cleaning up invalid xml from your modulestore. 'ENABLE_XBLOCK_XML_VALIDATION': True, # Allow public account creation 'ALLOW_PUBLIC_ACCOUNT_CREATION': True, # Whether or not the dynamic EnrollmentTrackUserPartition should be registered. 'ENABLE_ENROLLMENT_TRACK_USER_PARTITION': True, # Whether to send an email for failed password reset attempts or not. This is mainly useful for notifying users # that they don't have an account associated with email addresses they believe they've registered with. 'ENABLE_PASSWORD_RESET_FAILURE_EMAIL': False, # Whether archived courses (courses with end dates in the past) should be # shown in Studio in a separate list. 'ENABLE_SEPARATE_ARCHIVED_COURSES': True, # For acceptance and load testing 'AUTOMATIC_AUTH_FOR_TESTING': False, # Prevent auto auth from creating superusers or modifying existing users 'RESTRICT_AUTOMATIC_AUTH': True, 'ENABLE_INSTRUCTOR_ANALYTICS': False, 'PREVIEW_LMS_BASE': "preview.localhost:18000", 'ENABLE_GRADE_DOWNLOADS': True, 'ENABLE_MKTG_SITE': False, 'ENABLE_DISCUSSION_HOME_PANEL': True, 'ENABLE_CORS_HEADERS': False, 'ENABLE_CROSS_DOMAIN_CSRF_COOKIE': False, 'ENABLE_COUNTRY_ACCESS': False, 'ENABLE_CREDIT_API': False, 'ENABLE_OAUTH2_PROVIDER': False, 'ENABLE_SYSADMIN_DASHBOARD': False, 'ENABLE_MOBILE_REST_API': False, 'CUSTOM_COURSES_EDX': False, 'ENABLE_READING_FROM_MULTIPLE_HISTORY_TABLES': True, 'SHOW_FOOTER_LANGUAGE_SELECTOR': False, 'ENABLE_ENROLLMENT_RESET': False, 'DISABLE_MOBILE_COURSE_AVAILABLE': False, # .. toggle_name: ENABLE_CHANGE_USER_PASSWORD_ADMIN # .. toggle_implementation: DjangoSetting # .. toggle_default: False # .. toggle_description: Set to True to enable changing a user password through django admin. This is disabled by default because enabling allows a method to bypass password policy. # .. toggle_category: admin # .. toggle_use_cases: open_edx # .. toggle_creation_date: 2020-02-21 # .. toggle_expiration_date: None # .. toggle_tickets: 'https://github.com/edx/edx-platform/pull/21616' # .. toggle_status: supported # .. toggle_warnings: None 'ENABLE_CHANGE_USER_PASSWORD_ADMIN': False, ### ORA Feature Flags ### # .. toggle_name: ENABLE_ORA_TEAM_SUBMISSIONS # .. toggle_implementation: DjangoSetting # .. toggle_default: False # .. toggle_description: Set to True to enable team-based ORA submissions. # .. toggle_category: ora # .. toggle_use_cases: incremental_release # .. toggle_creation_date: 2020-03-03 # .. toggle_expiration_date: None # .. toggle_tickets: https://openedx.atlassian.net/browse/EDUCATOR-4951 # .. toggle_status: supported # .. toggle_warnings: None 'ENABLE_ORA_TEAM_SUBMISSIONS': False, # .. toggle_name: ENABLE_ORA_ALL_FILE_URLS # .. toggle_implementation: DjangoSetting # .. toggle_default: False # .. toggle_description: A "work-around" feature toggle meant to help in cases where some file uploads are not # discoverable. If enabled, will iterate through all possible file key suffixes up to the max for displaying # file metadata in staff assessments. # .. toggle_category: ora # .. toggle_use_cases: graceful_degradation # .. toggle_creation_date: 2020-03-03 # .. toggle_expiration_date: None # .. toggle_tickets: https://openedx.atlassian.net/browse/EDUCATOR-4951 # .. toggle_status: supported # .. toggle_warnings: None 'ENABLE_ORA_ALL_FILE_URLS': False, # .. toggle_name: ENABLE_ORA_USER_STATE_UPLOAD_DATA # .. toggle_implementation: DjangoSetting # .. toggle_default: False # .. toggle_description: A "work-around" feature toggle meant to help in cases where some file uploads are not # discoverable. If enabled, will pull file metadata from StudentModule.state for display in staff assessments. # .. toggle_category: ora # .. toggle_use_cases: graceful_degradation # .. toggle_creation_date: 2020-03-03 # .. toggle_expiration_date: None # .. toggle_tickets: https://openedx.atlassian.net/browse/EDUCATOR-4951 # .. toggle_status: supported # .. toggle_warnings: None 'ENABLE_ORA_USER_STATE_UPLOAD_DATA': False, # .. toggle_name: DEPRECATE_OLD_COURSE_KEYS_IN_STUDIO # .. toggle_implementation: DjangoSetting # .. toggle_default: True # .. toggle_description: Warn about removing support for deprecated course keys. # To enable, set to True. # To disable, set to False. # To enable with a custom support deadline, set to an ISO-8601 date string: # eg: '2020-09-01' # .. toggle_category: n/a # .. toggle_use_cases: incremental_release # .. toggle_creation_date: 2020-06-12 # .. toggle_expiration_date: 2020-09-01 # .. toggle_warnings: This can be removed once support is removed for deprecated course keys. # .. toggle_tickets: https://openedx.atlassian.net/browse/DEPR-58 # .. toggle_status: supported 'DEPRECATE_OLD_COURSE_KEYS_IN_STUDIO': True, # .. toggle_name: ENABLE_COURSE_MODE_CREATION # .. toggle_implementation: DjangoSetting # .. toggle_default: False # .. toggle_description: Set to True to enable course mode creation through studio. # .. toggle_category: n/a # .. toggle_use_cases: open_edx # .. toggle_creation_date: 2021-06-21 # .. toggle_expiration_date: None # .. toggle_warnings: None # .. toggle_tickets: https://github.com/eduNEXT/edunext-platform/pull/524 # .. toggle_status: supported 'ENABLE_COURSE_MODE_CREATION': False, } ENABLE_JASMINE = False # List of logout URIs for each IDA that the learner should be logged out of when they logout of the LMS. Only applies to # IDA for which the social auth flow uses DOT (Django OAuth Toolkit). IDA_LOGOUT_URI_LIST = [] ############################# SOCIAL MEDIA SHARING ############################# SOCIAL_SHARING_SETTINGS = { # Note: Ensure 'CUSTOM_COURSE_URLS' has a matching value in lms/envs/common.py 'CUSTOM_COURSE_URLS': False, 'DASHBOARD_FACEBOOK': False, 'CERTIFICATE_FACEBOOK': False, 'CERTIFICATE_TWITTER': False, 'DASHBOARD_TWITTER': False } SOCIAL_MEDIA_FOOTER_URLS = {} # This is just a placeholder image. # Site operators can customize this with their organization's image. FOOTER_ORGANIZATION_IMAGE = "images/logo.png" ############################# SET PATH INFORMATION ############################# PROJECT_ROOT = path(__file__).abspath().dirname().dirname() # /edx-platform/cms REPO_ROOT = PROJECT_ROOT.dirname() COMMON_ROOT = REPO_ROOT / "common" OPENEDX_ROOT = REPO_ROOT / "openedx" CMS_ROOT = REPO_ROOT / "cms" LMS_ROOT = REPO_ROOT / "lms" ENV_ROOT = REPO_ROOT.dirname() # virtualenv dir /edx-platform is in COURSES_ROOT = ENV_ROOT / "data" GITHUB_REPO_ROOT = ENV_ROOT / "data" sys.path.append(REPO_ROOT) sys.path.append(PROJECT_ROOT / 'djangoapps') sys.path.append(COMMON_ROOT / 'djangoapps') # For geolocation ip database GEOIP_PATH = REPO_ROOT / "common/static/data/geoip/GeoLite2-Country.mmdb" DATA_DIR = COURSES_ROOT DJFS = { 'type': 'osfs', 'directory_root': '/edx/var/edxapp/django-pyfs/static/django-pyfs', 'url_root': '/static/django-pyfs', } ######################## BRANCH.IO ########################### BRANCH_IO_KEY = '' ######################## GOOGLE ANALYTICS ########################### GOOGLE_ANALYTICS_ACCOUNT = None ############################# TEMPLATE CONFIGURATION ############################# # Mako templating import tempfile MAKO_MODULE_DIR = os.path.join(tempfile.gettempdir(), 'mako_cms') MAKO_TEMPLATE_DIRS_BASE = [ PROJECT_ROOT / 'templates', COMMON_ROOT / 'templates', COMMON_ROOT / 'djangoapps' / 'pipeline_mako' / 'templates', COMMON_ROOT / 'static', # required to statically include common Underscore templates OPENEDX_ROOT / 'core' / 'djangoapps' / 'cors_csrf' / 'templates', OPENEDX_ROOT / 'core' / 'djangoapps' / 'dark_lang' / 'templates', OPENEDX_ROOT / 'core' / 'lib' / 'license' / 'templates', CMS_ROOT / 'djangoapps' / 'pipeline_js' / 'templates', ] CONTEXT_PROCESSORS = ( 'django.template.context_processors.request', 'django.template.context_processors.static', 'django.contrib.messages.context_processors.messages', 'django.template.context_processors.i18n', 'django.contrib.auth.context_processors.auth', # this is required for admin 'django.template.context_processors.csrf', 'help_tokens.context_processor', 'openedx.core.djangoapps.site_configuration.context_processors.configuration_context', ) # Django templating TEMPLATES = [ { 'NAME': 'django', 'BACKEND': 'django.template.backends.django.DjangoTemplates', # Don't look for template source files inside installed applications. 'APP_DIRS': False, # Instead, look for template source files in these dirs. 'DIRS': _make_mako_template_dirs, # Options specific to this backend. 'OPTIONS': { 'loaders': ( # We have to use mako-aware template loaders to be able to include # mako templates inside django templates (such as main_django.html). 'openedx.core.djangoapps.theming.template_loaders.ThemeTemplateLoader', 'edxmako.makoloader.MakoFilesystemLoader', 'edxmako.makoloader.MakoAppDirectoriesLoader', ), 'context_processors': CONTEXT_PROCESSORS, # Change 'debug' in your environment settings files - not here. 'debug': False } }, { 'NAME': 'mako', 'BACKEND': 'edxmako.backend.Mako', 'APP_DIRS': False, 'DIRS': _make_mako_template_dirs, 'OPTIONS': { 'context_processors': CONTEXT_PROCESSORS, 'debug': False, } }, { # This separate copy of the Mako backend is used to render previews using the LMS templates 'NAME': 'preview', 'BACKEND': 'edxmako.backend.Mako', 'APP_DIRS': False, 'DIRS': lms.envs.common.MAKO_TEMPLATE_DIRS_BASE, 'OPTIONS': { 'context_processors': CONTEXT_PROCESSORS, 'debug': False, 'namespace': 'lms.main', } }, ] derived_collection_entry('TEMPLATES', 0, 'DIRS') derived_collection_entry('TEMPLATES', 1, 'DIRS') DEFAULT_TEMPLATE_ENGINE = TEMPLATES[0] #################################### AWS ####################################### # S3BotoStorage insists on a timeout for uploaded assets. We should make it # permanent instead, but rather than trying to figure out exactly where that # setting is, I'm just bumping the expiration time to something absurd (100 # years). This is only used if DEFAULT_FILE_STORAGE is overriden to use S3 # in the global settings.py AWS_SES_REGION_NAME = 'us-east-1' AWS_SES_REGION_ENDPOINT = 'email.us-east-1.amazonaws.com' AWS_ACCESS_KEY_ID = None AWS_SECRET_ACCESS_KEY = None AWS_QUERYSTRING_AUTH = False AWS_STORAGE_BUCKET_NAME = 'SET-ME-PLEASE (ex. bucket-name)' AWS_S3_CUSTOM_DOMAIN = 'SET-ME-PLEASE (ex. bucket-name.s3.amazonaws.com)' ############################################################################## EDX_ROOT_URL = '' # use the ratelimit backend to prevent brute force attacks AUTHENTICATION_BACKENDS = [ 'rules.permissions.ObjectPermissionBackend', 'openedx.core.djangoapps.oauth_dispatch.dot_overrides.backends.EdxRateLimitedAllowAllUsersModelBackend', 'bridgekeeper.backends.RulePermissionBackend', ] STATIC_ROOT_BASE = '/edx/var/edxapp/staticfiles' # License for serving content in China ICP_LICENSE = None ICP_LICENSE_INFO = {} LOGGING_ENV = 'sandbox' LMS_BASE = 'localhost:18000' LMS_ROOT_URL = "https://localhost:18000" LMS_INTERNAL_ROOT_URL = LMS_ROOT_URL LOGIN_REDIRECT_URL = EDX_ROOT_URL + '/home/' # TODO: Determine if LOGIN_URL could be set to the FRONTEND_LOGIN_URL value instead. LOGIN_URL = reverse_lazy('login_redirect_to_lms') FRONTEND_LOGIN_URL = lambda settings: settings.LMS_ROOT_URL + '/login' derived('FRONTEND_LOGIN_URL') FRONTEND_LOGOUT_URL = lambda settings: settings.LMS_ROOT_URL + '/logout' derived('FRONTEND_LOGOUT_URL') FRONTEND_REGISTER_URL = lambda settings: settings.LMS_ROOT_URL + '/register' derived('FRONTEND_REGISTER_URL') LMS_ENROLLMENT_API_PATH = "/api/enrollment/v1/" ENTERPRISE_API_URL = LMS_INTERNAL_ROOT_URL + '/enterprise/api/v1/' ENTERPRISE_CONSENT_API_URL = LMS_INTERNAL_ROOT_URL + '/consent/api/v1/' ENTERPRISE_MARKETING_FOOTER_QUERY_PARAMS = {} # Public domain name of Studio (should be resolvable from the end-user's browser) CMS_BASE = 'localhost:18010' LOG_DIR = '/edx/var/log/edx' LOCAL_LOGLEVEL = "INFO" MAINTENANCE_BANNER_TEXT = 'Sample banner message' WIKI_ENABLED = True CERT_QUEUE = 'certificates' # List of logout URIs for each IDA that the learner should be logged out of when they logout of # Studio. Only applies to IDA for which the social auth flow uses DOT (Django OAuth Toolkit). IDA_LOGOUT_URI_LIST = [] ELASTIC_SEARCH_CONFIG = [ { 'use_ssl': False, 'host': 'localhost', 'port': 9200 } ] # These are standard regexes for pulling out info like course_ids, usage_ids, etc. # They are used so that URLs with deprecated-format strings still work. from lms.envs.common import ( COURSE_KEY_PATTERN, COURSE_KEY_REGEX, COURSE_ID_PATTERN, USAGE_KEY_PATTERN, ASSET_KEY_PATTERN ) ######################### CSRF ######################################### # Forwards-compatibility with Django 1.7 CSRF_COOKIE_AGE = 60 * 60 * 24 * 7 * 52 # It is highly recommended that you override this in any environment accessed by # end users CSRF_COOKIE_SECURE = False CROSS_DOMAIN_CSRF_COOKIE_DOMAIN = '' CROSS_DOMAIN_CSRF_COOKIE_NAME = '' CSRF_TRUSTED_ORIGINS = [] #################### CAPA External Code Evaluation ############################# XQUEUE_INTERFACE = { 'url': 'http://localhost:18040', 'basic_auth': ['edx', 'edx'], 'django_auth': { 'username': 'lms', 'password': 'password' } } ################################# Middleware ################################### MIDDLEWARE = [ 'openedx.core.lib.x_forwarded_for.middleware.XForwardedForMiddleware', 'crum.CurrentRequestUserMiddleware', # A newer and safer request cache. 'edx_django_utils.cache.middleware.RequestCacheMiddleware', 'edx_django_utils.monitoring.middleware.MonitoringMemoryMiddleware', # Cookie monitoring 'openedx.core.lib.request_utils.CookieMetricsMiddleware', 'openedx.core.djangoapps.header_control.middleware.HeaderControlMiddleware', 'django.middleware.cache.UpdateCacheMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.sites.middleware.CurrentSiteMiddleware', 'edx_rest_framework_extensions.auth.jwt.middleware.JwtAuthCookieMiddleware', # Allows us to define redirects via Django admin 'django_sites_extensions.middleware.RedirectMiddleware', # Instead of SessionMiddleware, we use a more secure version # 'django.contrib.sessions.middleware.SessionMiddleware', 'openedx.core.djangoapps.safe_sessions.middleware.SafeSessionMiddleware', 'method_override.middleware.MethodOverrideMiddleware', # Instead of AuthenticationMiddleware, we use a cache-backed version 'openedx.core.djangoapps.cache_toolbox.middleware.CacheBackedAuthenticationMiddleware', 'student.middleware.UserStandingMiddleware', 'openedx.core.djangoapps.contentserver.middleware.StaticContentServer', 'django.contrib.messages.middleware.MessageMiddleware', 'track.middleware.TrackMiddleware', # This is used to set or update the user language preferences. 'openedx.core.djangoapps.lang_pref.middleware.LanguagePreferenceMiddleware', # Allows us to dark-launch particular languages 'openedx.core.djangoapps.dark_lang.middleware.DarkLangMiddleware', 'openedx.core.djangoapps.embargo.middleware.EmbargoMiddleware', # Detects user-requested locale from 'accept-language' header in http request 'django.middleware.locale.LocaleMiddleware', 'codejail.django_integration.ConfigureCodeJailMiddleware', # catches any uncaught RateLimitExceptions and returns a 403 instead of a 500 'ratelimitbackend.middleware.RateLimitMiddleware', # for expiring inactive sessions 'openedx.core.djangoapps.session_inactivity_timeout.middleware.SessionInactivityTimeout', 'openedx.core.djangoapps.theming.middleware.CurrentSiteThemeMiddleware', # use Django built in clickjacking protection 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'waffle.middleware.WaffleMiddleware', # Enables force_django_cache_miss functionality for TieredCache. 'edx_django_utils.cache.middleware.TieredCacheMiddleware', # Outputs monitoring metrics for a request. 'edx_rest_framework_extensions.middleware.RequestMetricsMiddleware', 'edx_rest_framework_extensions.auth.jwt.middleware.EnsureJWTAuthSettingsMiddleware', # Handles automatically storing user ids in django-simple-history tables when possible. 'simple_history.middleware.HistoryRequestMiddleware', # This must be last so that it runs first in the process_response chain 'openedx.core.djangoapps.site_configuration.middleware.SessionCookieDomainOverrideMiddleware', ] EXTRA_MIDDLEWARE_CLASSES = [] # Clickjacking protection can be disabled by setting this to 'ALLOW' X_FRAME_OPTIONS = 'DENY' # Platform for Privacy Preferences header P3P_HEADER = 'CP="Open EdX does not have a P3P policy."' ############# XBlock Configuration ########## # Import after sys.path fixup from xmodule.modulestore.inheritance import InheritanceMixin from xmodule.modulestore import prefer_xmodules from xmodule.x_module import XModuleMixin # These are the Mixins that should be added to every XBlock. # This should be moved into an XBlock Runtime/Application object # once the responsibility of XBlock creation is moved out of modulestore - cpennington XBLOCK_MIXINS = ( LmsBlockMixin, InheritanceMixin, XModuleMixin, EditInfoMixin, AuthoringMixin, ) XBLOCK_SELECT_FUNCTION = prefer_xmodules # Paths to wrapper methods which should be applied to every XBlock's FieldData. XBLOCK_FIELD_DATA_WRAPPERS = () ############################ ORA 2 ############################################ # By default, don't use a file prefix ORA2_FILE_PREFIX = 'default_env-default_deployment/ora2' # Default File Upload Storage bucket and prefix. Used by the FileUpload Service. FILE_UPLOAD_STORAGE_BUCKET_NAME = 'SET-ME-PLEASE (ex. bucket-name)' FILE_UPLOAD_STORAGE_PREFIX = 'submissions_attachments' ############################ Modulestore Configuration ################################ DOC_STORE_CONFIG = { 'db': 'edxapp', 'host': 'localhost', 'replicaSet': '', 'user': 'edxapp', 'port': 27017, 'collection': 'modulestore', 'ssl': False, # https://api.mongodb.com/python/2.9.1/api/pymongo/mongo_client.html#module-pymongo.mongo_client # default is never timeout while the connection is open, #this means it needs to explicitly close raising pymongo.errors.NetworkTimeout 'socketTimeoutMS': 6000, 'connectTimeoutMS': 2000, # default is 20000, I believe raises pymongo.errors.ConnectionFailure # Not setting waitQueueTimeoutMS and waitQueueMultiple since pymongo defaults to nobody being allowed to wait 'auth_source': None, 'read_preference': 'PRIMARY' # If 'asset_collection' defined, it'll be used # as the collection name for asset metadata. # Otherwise, a default collection name will be used. } CONTENTSTORE = { 'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore', # connection strings are duplicated temporarily for # backward compatibility 'OPTIONS': { 'db': 'edxapp', 'host': 'localhost', 'password': 'password', 'port': 27017, 'user': 'edxapp', 'ssl': False, 'auth_source': None }, 'ADDITIONAL_OPTIONS': {}, 'DOC_STORE_CONFIG': DOC_STORE_CONFIG } MODULESTORE_BRANCH = 'draft-preferred' MODULESTORE = { 'default': { 'ENGINE': 'xmodule.modulestore.mixed.MixedModuleStore', 'OPTIONS': { 'mappings': {}, 'stores': [ { 'NAME': 'split', 'ENGINE': 'xmodule.modulestore.split_mongo.split_draft.DraftVersioningModuleStore', 'DOC_STORE_CONFIG': DOC_STORE_CONFIG, 'OPTIONS': { 'default_class': 'xmodule.hidden_module.HiddenDescriptor', 'fs_root': DATA_DIR, 'render_template': 'edxmako.shortcuts.render_to_string', } }, { 'NAME': 'draft', 'ENGINE': 'xmodule.modulestore.mongo.DraftMongoModuleStore', 'DOC_STORE_CONFIG': DOC_STORE_CONFIG, 'OPTIONS': { 'default_class': 'xmodule.hidden_module.HiddenDescriptor', 'fs_root': DATA_DIR, 'render_template': 'edxmako.shortcuts.render_to_string', } } ] } } } # Modulestore-level field override providers. These field override providers don't # require student context. MODULESTORE_FIELD_OVERRIDE_PROVIDERS = () DATABASES = { # edxapp's edxapp-migrate scripts and the edxapp_migrate play # will ensure that any DB not named read_replica will be migrated # for both the lms and cms. 'default': { 'ATOMIC_REQUESTS': True, 'CONN_MAX_AGE': 0, 'ENGINE': 'django.db.backends.mysql', 'HOST': 'localhost', 'NAME': 'edxapp', 'OPTIONS': {}, 'PASSWORD': 'password', 'PORT': '3306', 'USER': 'edxapp001' }, 'read_replica': { 'CONN_MAX_AGE': 0, 'ENGINE': 'django.db.backends.mysql', 'HOST': 'localhost', 'NAME': 'edxapp', 'OPTIONS': {}, 'PASSWORD': 'password', 'PORT': '3306', 'USER': 'edxapp001' }, 'student_module_history': { 'CONN_MAX_AGE': 0, 'ENGINE': 'django.db.backends.mysql', 'HOST': 'localhost', 'NAME': 'edxapp_csmh', 'OPTIONS': {}, 'PASSWORD': 'password', 'PORT': '3306', 'USER': 'edxapp001' } } #################### Python sandbox ############################################ CODE_JAIL = { # from https://github.com/edx/codejail/blob/master/codejail/django_integration.py#L24, '' should be same as None 'python_bin': '/edx/app/edxapp/venvs/edxapp-sandbox/bin/python', # User to run as in the sandbox. 'user': 'sandbox', # Configurable limits. 'limits': { # How many CPU seconds can jailed code use? 'CPU': 1, # Limit the memory of the jailed process to something high but not # infinite (512MiB in bytes) 'VMEM': 536870912, # Time in seconds that the jailed process has to run. 'REALTIME': 3, 'PROXY': 0, # Needs to be non-zero so that jailed code can use it as their temp directory.(1MiB in bytes) 'FSIZE': 1048576, }, } # Some courses are allowed to run unsafe code. This is a list of regexes, one # of them must match the course id for that course to run unsafe code. # # For example: # # COURSES_WITH_UNSAFE_CODE = [ # r"Harvard/XY123.1/.*" # ] COURSES_WITH_UNSAFE_CODE = [] ############################ DJANGO_BUILTINS ################################ # Change DEBUG in your environment settings files, not here DEBUG = False SESSION_COOKIE_SECURE = False SESSION_SAVE_EVERY_REQUEST = False SESSION_SERIALIZER = 'openedx.core.lib.session_serializers.PickleSerializer' SESSION_COOKIE_DOMAIN = "" SESSION_COOKIE_NAME = 'sessionid' # Site info SITE_NAME = "localhost" HTTPS = 'on' ROOT_URLCONF = 'cms.urls' COURSE_IMPORT_EXPORT_BUCKET = '' ALTERNATE_WORKER_QUEUES = 'lms' STATIC_URL_BASE = '/static/' X_FRAME_OPTIONS = 'DENY' GIT_REPO_EXPORT_DIR = '/edx/var/edxapp/export_course_repos' # Email TECH_SUPPORT_EMAIL = '[email protected]' EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST = 'localhost' EMAIL_PORT = 25 EMAIL_USE_TLS = False EMAIL_HOST_USER = '' EMAIL_HOST_PASSWORD = '' DEFAULT_FROM_EMAIL = '[email protected]' DEFAULT_FEEDBACK_EMAIL = '[email protected]' TECH_SUPPORT_EMAIL = '[email protected]' CONTACT_EMAIL = '[email protected]' BUGS_EMAIL = '[email protected]' SERVER_EMAIL = '[email protected]' UNIVERSITY_EMAIL = '[email protected]' PRESS_EMAIL = '[email protected]' ADMINS = [] MANAGERS = ADMINS # Initialize to 'release', but read from JSON in production.py EDX_PLATFORM_REVISION = 'release' # Static content STATIC_URL = '/static/studio/' STATIC_ROOT = ENV_ROOT / "staticfiles" / 'studio' STATICFILES_DIRS = [ COMMON_ROOT / "static", PROJECT_ROOT / "static", # This is how you would use the textbook images locally # ("book", ENV_ROOT / "book_images"), ] # Locale/Internationalization CELERY_TIMEZONE = 'UTC' TIME_ZONE = 'UTC' LANGUAGE_CODE = 'en' # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGES_BIDI = lms.envs.common.LANGUAGES_BIDI LANGUAGE_COOKIE = lms.envs.common.LANGUAGE_COOKIE LANGUAGES = lms.envs.common.LANGUAGES LANGUAGE_DICT = dict(LANGUAGES) # Languages supported for custom course certificate templates CERTIFICATE_TEMPLATE_LANGUAGES = { 'en': 'English', 'es': 'Español', } USE_I18N = True USE_L10N = True STATICI18N_FILENAME_FUNCTION = 'statici18n.utils.legacy_filename' STATICI18N_ROOT = PROJECT_ROOT / "static" LOCALE_PATHS = _make_locale_paths derived('LOCALE_PATHS') # Messages MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage' COURSE_IMPORT_EXPORT_STORAGE = 'django.core.files.storage.FileSystemStorage' ##### EMBARGO ##### EMBARGO_SITE_REDIRECT_URL = None ############################### PIPELINE ####################################### PIPELINE = { 'PIPELINE_ENABLED': True, # Don't use compression by default 'CSS_COMPRESSOR': None, 'JS_COMPRESSOR': None, # Don't wrap JavaScript as there is code that depends upon updating the global namespace 'DISABLE_WRAPPER': True, # Specify the UglifyJS binary to use 'UGLIFYJS_BINARY': 'node_modules/.bin/uglifyjs', 'COMPILERS': (), 'YUI_BINARY': 'yui-compressor', } STATICFILES_STORAGE = 'openedx.core.storage.ProductionStorage' STATICFILES_STORAGE_KWARGS = {} # List of finder classes that know how to find static files in various locations. # Note: the pipeline finder is included to be able to discover optimized files STATICFILES_FINDERS = [ 'openedx.core.djangoapps.theming.finders.ThemeFilesFinder', 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', 'openedx.core.lib.xblock_pipeline.finder.XBlockPipelineFinder', 'pipeline.finders.PipelineFinder', ] from openedx.core.lib.rooted_paths import rooted_glob PIPELINE['STYLESHEETS'] = { 'style-vendor': { 'source_filenames': [ 'css/vendor/normalize.css', 'css/vendor/font-awesome.css', 'css/vendor/html5-input-polyfills/number-polyfill.css', 'js/vendor/CodeMirror/codemirror.css', 'css/vendor/ui-lightness/jquery-ui-1.8.22.custom.css', 'css/vendor/jquery.qtip.min.css', 'js/vendor/markitup/skins/simple/style.css', 'js/vendor/markitup/sets/wiki/style.css', ], 'output_filename': 'css/cms-style-vendor.css', }, 'style-vendor-tinymce-content': { 'source_filenames': [ 'css/tinymce-studio-content-fonts.css', 'js/vendor/tinymce/js/tinymce/skins/studio-tmce4/content.min.css', 'css/tinymce-studio-content.css' ], 'output_filename': 'css/cms-style-vendor-tinymce-content.css', }, 'style-vendor-tinymce-skin': { 'source_filenames': [ 'js/vendor/tinymce/js/tinymce/skins/studio-tmce4/skin.min.css' ], 'output_filename': 'css/cms-style-vendor-tinymce-skin.css', }, 'style-main-v1': { 'source_filenames': [ 'css/studio-main-v1.css', ], 'output_filename': 'css/studio-main-v1.css', }, 'style-main-v1-rtl': { 'source_filenames': [ 'css/studio-main-v1-rtl.css', ], 'output_filename': 'css/studio-main-v1-rtl.css', }, 'style-main-v2': { 'source_filenames': [ 'css/studio-main-v2.css', ], 'output_filename': 'css/studio-main-v2.css', }, 'style-main-v2-rtl': { 'source_filenames': [ 'css/studio-main-v2-rtl.css', ], 'output_filename': 'css/studio-main-v2-rtl.css', }, 'style-xmodule-annotations': { 'source_filenames': [ 'css/vendor/ova/annotator.css', 'css/vendor/ova/edx-annotator.css', 'css/vendor/ova/video-js.min.css', 'css/vendor/ova/rangeslider.css', 'css/vendor/ova/share-annotator.css', 'css/vendor/ova/richText-annotator.css', 'css/vendor/ova/tags-annotator.css', 'css/vendor/ova/flagging-annotator.css', 'css/vendor/ova/diacritic-annotator.css', 'css/vendor/ova/grouping-annotator.css', 'css/vendor/ova/ova.css', 'js/vendor/ova/catch/css/main.css' ], 'output_filename': 'css/cms-style-xmodule-annotations.css', }, } base_vendor_js = [ 'js/src/utility.js', 'js/src/logger.js', 'common/js/vendor/jquery.js', 'common/js/vendor/jquery-migrate.js', 'js/vendor/jquery.cookie.js', 'js/vendor/url.min.js', 'common/js/vendor/underscore.js', 'common/js/vendor/underscore.string.js', 'common/js/vendor/backbone.js', 'js/vendor/URI.min.js', # Make some edX UI Toolkit utilities available in the global "edx" namespace 'edx-ui-toolkit/js/utils/global-loader.js', 'edx-ui-toolkit/js/utils/string-utils.js', 'edx-ui-toolkit/js/utils/html-utils.js', # Load Bootstrap and supporting libraries 'common/js/vendor/popper.js', 'common/js/vendor/bootstrap.js', # Finally load RequireJS 'common/js/vendor/require.js' ] # test_order: Determines the position of this chunk of javascript on # the jasmine test page PIPELINE['JAVASCRIPT'] = { 'base_vendor': { 'source_filenames': base_vendor_js, 'output_filename': 'js/cms-base-vendor.js', }, 'module-js': { 'source_filenames': ( rooted_glob(COMMON_ROOT / 'static/', 'xmodule/descriptors/js/*.js') + rooted_glob(COMMON_ROOT / 'static/', 'xmodule/modules/js/*.js') + rooted_glob(COMMON_ROOT / 'static/', 'common/js/discussion/*.js') ), 'output_filename': 'js/cms-modules.js', 'test_order': 1 }, } STATICFILES_IGNORE_PATTERNS = ( "*.py", "*.pyc", # It would be nice if we could do, for example, "**/*.scss", # but these strings get passed down to the `fnmatch` module, # which doesn't support that. :( # http://docs.python.org/2/library/fnmatch.html "sass/*.scss", "sass/*/*.scss", "sass/*/*/*.scss", "sass/*/*/*/*.scss", # Ignore tests "spec", "spec_helpers", # Symlinks used by js-test-tool "xmodule_js", "common_static", ) ################################# DJANGO-REQUIRE ############################### # The baseUrl to pass to the r.js optimizer, relative to STATIC_ROOT. REQUIRE_BASE_URL = "./" # The name of a build profile to use for your project, relative to REQUIRE_BASE_URL. # A sensible value would be 'app.build.js'. Leave blank to use the built-in default build profile. # Set to False to disable running the default profile (e.g. if only using it to build Standalone # Modules) REQUIRE_BUILD_PROFILE = "cms/js/build.js" # The name of the require.js script used by your project, relative to REQUIRE_BASE_URL. REQUIRE_JS = "js/vendor/requiresjs/require.js" # Whether to run django-require in debug mode. REQUIRE_DEBUG = False ########################## DJANGO WEBPACK LOADER ############################## WEBPACK_LOADER = { 'DEFAULT': { 'BUNDLE_DIR_NAME': 'bundles/', 'STATS_FILE': os.path.join(STATIC_ROOT, 'webpack-stats.json') }, 'WORKERS': { 'BUNDLE_DIR_NAME': 'bundles/', 'STATS_FILE': os.path.join(STATIC_ROOT, 'webpack-worker-stats.json') } } WEBPACK_CONFIG_PATH = 'webpack.prod.config.js' ################################# CELERY ###################################### # Auto discover tasks fails to detect contentstore tasks CELERY_IMPORTS = ( 'cms.djangoapps.contentstore.tasks', 'openedx.core.djangoapps.bookmarks.tasks', 'openedx.core.djangoapps.ccxcon.tasks', ) # Message configuration CELERY_TASK_SERIALIZER = 'json' CELERY_RESULT_SERIALIZER = 'json' CELERY_MESSAGE_COMPRESSION = 'gzip' # Results configuration CELERY_IGNORE_RESULT = False CELERY_STORE_ERRORS_EVEN_IF_IGNORED = True # Events configuration CELERY_TRACK_STARTED = True CELERY_SEND_EVENTS = True CELERY_SEND_TASK_SENT_EVENT = True # Exchange configuration CELERY_DEFAULT_EXCHANGE = 'edx.core' CELERY_DEFAULT_EXCHANGE_TYPE = 'direct' # Queues configuration HIGH_PRIORITY_QUEUE = 'edx.core.high' DEFAULT_PRIORITY_QUEUE = 'edx.core.default' CELERY_QUEUE_HA_POLICY = 'all' CELERY_CREATE_MISSING_QUEUES = True CELERY_DEFAULT_QUEUE = DEFAULT_PRIORITY_QUEUE CELERY_DEFAULT_ROUTING_KEY = DEFAULT_PRIORITY_QUEUE CELERY_QUEUES = [ 'edx.cms.core.default', 'edx.cms.core.high', ] CELERY_BROKER_TRANSPORT = 'amqp' CELERY_BROKER_HOSTNAME = 'localhost' CELERY_BROKER_USER = 'celery' CELERY_BROKER_PASSWORD = 'celery' CELERY_BROKER_VHOST = '' CELERY_BROKER_USE_SSL = False CELERY_EVENT_QUEUE_TTL = None ############################## Video ########################################## YOUTUBE = { # YouTube JavaScript API 'API': 'https://www.youtube.com/iframe_api', 'TEST_TIMEOUT': 1500, # URL to get YouTube metadata 'METADATA_URL': 'https://www.googleapis.com/youtube/v3/videos', # Current youtube api for requesting transcripts. # For example: http://video.google.com/timedtext?lang=en&v=j_jEn79vS3g. 'TEXT_API': { 'url': 'video.google.com/timedtext', 'params': { 'lang': 'en', 'v': 'set_youtube_id_of_11_symbols_here', }, }, 'IMAGE_API': 'http://img.youtube.com/vi/{youtube_id}/0.jpg', # /maxresdefault.jpg for 1920*1080 } YOUTUBE_API_KEY = 'PUT_YOUR_API_KEY_HERE' ############################# VIDEO UPLOAD PIPELINE ############################# VIDEO_UPLOAD_PIPELINE = { 'BUCKET': '', 'ROOT_PATH': '', 'CONCURRENT_UPLOAD_LIMIT': 4, } ############################ APPS ##################################### # The order of INSTALLED_APPS is important, when adding new apps here # remember to check that you are not creating new # RemovedInDjango19Warnings in the test logs. INSTALLED_APPS = [ # Standard apps 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.humanize', 'django.contrib.redirects', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', 'django.contrib.staticfiles', 'djcelery', 'method_override', # Common Initialization 'openedx.core.djangoapps.common_initialization.apps.CommonInitializationConfig', # Common views 'openedx.core.djangoapps.common_views', # API access administration 'openedx.core.djangoapps.api_admin', # History tables 'simple_history', # Database-backed configuration 'config_models', 'openedx.core.djangoapps.config_model_utils', 'waffle', # Monitor the status of services 'openedx.core.djangoapps.service_status', # Video module configs (This will be moved to Video once it becomes an XBlock) 'openedx.core.djangoapps.video_config', # edX Video Pipeline integration 'openedx.core.djangoapps.video_pipeline', # For CMS 'contentstore.apps.ContentstoreConfig', 'openedx.core.djangoapps.contentserver', 'course_creators', 'student.apps.StudentConfig', # misleading name due to sharing with lms 'openedx.core.djangoapps.course_groups', # not used in cms (yet), but tests run 'xblock_config.apps.XBlockConfig', # New (Blockstore-based) XBlock runtime 'openedx.core.djangoapps.xblock.apps.StudioXBlockAppConfig', # Maintenance tools 'maintenance', 'openedx.core.djangoapps.util.apps.UtilConfig', # Tracking 'track', 'eventtracking.django.apps.EventTrackingConfig', # For asset pipelining 'edxmako.apps.EdxMakoConfig', 'pipeline', 'static_replace', 'require', 'webpack_loader', # Site configuration for theming and behavioral modification 'openedx.core.djangoapps.site_configuration', # Ability to detect and special-case crawler behavior 'openedx.core.djangoapps.crawlers', # Discussion 'openedx.core.djangoapps.django_comment_common', # for course creator table 'django.contrib.admin', # for managing course modes 'course_modes.apps.CourseModesConfig', # Verified Track Content Cohorting (Beta feature that will hopefully be removed) 'openedx.core.djangoapps.verified_track_content', # Dark-launching languages 'openedx.core.djangoapps.dark_lang', # # User preferences 'wiki', 'django_notify', 'course_wiki', # Our customizations 'mptt', 'sekizai', 'openedx.core.djangoapps.user_api', # Country embargo support 'openedx.core.djangoapps.embargo', # Course action state 'course_action_state', # Additional problem types 'edx_jsme', # Molecular Structure 'openedx.core.djangoapps.content.course_overviews.apps.CourseOverviewsConfig', 'openedx.core.djangoapps.content.block_structure.apps.BlockStructureConfig', # edx-milestones service 'milestones', # Self-paced course configuration 'openedx.core.djangoapps.self_paced', # Coursegraph 'openedx.core.djangoapps.coursegraph.apps.CoursegraphConfig', # Credit courses 'openedx.core.djangoapps.credit.apps.CreditConfig', 'xblock_django', # Catalog integration 'openedx.core.djangoapps.catalog', # django-oauth-toolkit 'oauth2_provider', # These are apps that aren't strictly needed by Studio, but are imported by # other apps that are. Django 1.8 wants to have imported models supported # by installed apps. 'openedx.core.djangoapps.oauth_dispatch.apps.OAuthDispatchAppConfig', 'lms.djangoapps.courseware', 'coursewarehistoryextended', 'survey.apps.SurveyConfig', 'lms.djangoapps.verify_student.apps.VerifyStudentConfig', 'completion', # System Wide Roles 'openedx.core.djangoapps.system_wide_roles', # Static i18n support 'statici18n', # Tagging 'cms.lib.xblock.tagging', # Enables default site and redirects 'django_sites_extensions', # additional release utilities to ease automation 'release_util', # rule-based authorization 'rules.apps.AutodiscoverRulesConfig', 'bridgekeeper', # management of user-triggered async tasks (course import/export, etc.) 'user_tasks', # CMS specific user task handling 'cms_user_tasks.apps.CmsUserTasksConfig', # Unusual migrations 'database_fixups', # Customized celery tasks, including persisting failed tasks so they can # be retried 'celery_utils', # Waffle related utilities 'openedx.core.djangoapps.waffle_utils', # DRF filters 'django_filters', 'cms.djangoapps.api', # Entitlements, used in openedx tests 'entitlements', # Asset management for mako templates 'pipeline_mako', # API Documentation 'drf_yasg', 'openedx.features.course_duration_limits', 'openedx.features.content_type_gating', 'openedx.features.discounts', 'experiments', 'openedx.core.djangoapps.external_user_ids', # so sample_task is available to celery workers 'openedx.core.djangoapps.heartbeat', # signal handlers to capture course dates into edx-when 'openedx.core.djangoapps.course_date_signals', # Management of per-user schedules 'openedx.core.djangoapps.schedules', 'rest_framework_jwt', ] ################# EDX MARKETING SITE ################################## EDXMKTG_LOGGED_IN_COOKIE_NAME = 'edxloggedin' EDXMKTG_USER_INFO_COOKIE_NAME = 'edx-user-info' EDXMKTG_USER_INFO_COOKIE_VERSION = 1 MKTG_URLS = {} MKTG_URL_OVERRIDES = {} MKTG_URL_LINK_MAP = { } SUPPORT_SITE_LINK = '' ID_VERIFICATION_SUPPORT_LINK = '' PASSWORD_RESET_SUPPORT_LINK = '' ACTIVATION_EMAIL_SUPPORT_LINK = '' ############################## EVENT TRACKING ################################# CMS_SEGMENT_KEY = None TRACK_MAX_EVENT = 50000 TRACKING_BACKENDS = { 'logger': { 'ENGINE': 'track.backends.logger.LoggerBackend', 'OPTIONS': { 'name': 'tracking' } } } # We're already logging events, and we don't want to capture user # names/passwords. Heartbeat events are likely not interesting. TRACKING_IGNORE_URL_PATTERNS = [r'^/event', r'^/login', r'^/heartbeat'] EVENT_TRACKING_ENABLED = True EVENT_TRACKING_BACKENDS = { 'tracking_logs': { 'ENGINE': 'eventtracking.backends.routing.RoutingBackend', 'OPTIONS': { 'backends': { 'logger': { 'ENGINE': 'eventtracking.backends.logger.LoggerBackend', 'OPTIONS': { 'name': 'tracking', 'max_event_size': TRACK_MAX_EVENT, } } }, 'processors': [ {'ENGINE': 'track.shim.LegacyFieldMappingProcessor'}, {'ENGINE': 'track.shim.PrefixedEventProcessor'} ] } }, 'segmentio': { 'ENGINE': 'eventtracking.backends.routing.RoutingBackend', 'OPTIONS': { 'backends': { 'segment': {'ENGINE': 'eventtracking.backends.segment.SegmentBackend'} }, 'processors': [ { 'ENGINE': 'eventtracking.processors.whitelist.NameWhitelistProcessor', 'OPTIONS': { 'whitelist': [] } }, { 'ENGINE': 'track.shim.GoogleAnalyticsProcessor' } ] } } } EVENT_TRACKING_PROCESSORS = [] EVENT_TRACKING_SEGMENTIO_EMIT_WHITELIST = [] #### PASSWORD POLICY SETTINGS ##### AUTH_PASSWORD_VALIDATORS = [ { "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", }, { "NAME": "util.password_policy_validators.MinimumLengthValidator", "OPTIONS": { "min_length": 2 } }, { "NAME": "util.password_policy_validators.MaximumLengthValidator", "OPTIONS": { "max_length": 75 } }, ] PASSWORD_POLICY_COMPLIANCE_ROLLOUT_CONFIG = { 'ENFORCE_COMPLIANCE_ON_LOGIN': False } ##### ACCOUNT LOCKOUT DEFAULT PARAMETERS ##### MAX_FAILED_LOGIN_ATTEMPTS_ALLOWED = 6 MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS = 30 * 60 ### Apps only installed in some instances # The order of INSTALLED_APPS matters, so this tuple is the app name and the item in INSTALLED_APPS # that this app should be inserted *before*. A None here means it should be appended to the list. OPTIONAL_APPS = ( ('problem_builder', 'openedx.core.djangoapps.content.course_overviews.apps.CourseOverviewsConfig'), ('edx_sga', None), # edx-ora2 ('submissions', 'openedx.core.djangoapps.content.course_overviews.apps.CourseOverviewsConfig'), ('openassessment', 'openedx.core.djangoapps.content.course_overviews.apps.CourseOverviewsConfig'), ('openassessment.assessment', 'openedx.core.djangoapps.content.course_overviews.apps.CourseOverviewsConfig'), ('openassessment.fileupload', 'openedx.core.djangoapps.content.course_overviews.apps.CourseOverviewsConfig'), ('openassessment.workflow', 'openedx.core.djangoapps.content.course_overviews.apps.CourseOverviewsConfig'), ('openassessment.xblock', 'openedx.core.djangoapps.content.course_overviews.apps.CourseOverviewsConfig'), # edxval ('edxval', 'openedx.core.djangoapps.content.course_overviews.apps.CourseOverviewsConfig'), # Organizations App (http://github.com/edx/edx-organizations) ('organizations', None), # Enterprise App (http://github.com/edx/edx-enterprise) ('enterprise', None), ('consent', None), ('integrated_channels.integrated_channel', None), ('integrated_channels.degreed', None), ('integrated_channels.sap_success_factors', None), ('integrated_channels.xapi', None), ('integrated_channels.cornerstone', None), ) for app_name, insert_before in OPTIONAL_APPS: # First attempt to only find the module rather than actually importing it, # to avoid circular references - only try to import if it can't be found # by find_spec, which doesn't work with import hooks if importlib.util.find_spec(app_name) is None: try: __import__(app_name) except ImportError: continue try: INSTALLED_APPS.insert(INSTALLED_APPS.index(insert_before), app_name) except (IndexError, ValueError): INSTALLED_APPS.append(app_name) ### External auth usage -- prefixes for ENROLLMENT_DOMAIN SHIBBOLETH_DOMAIN_PREFIX = 'shib:' # Set request limits for maximum size of a request body and maximum number of GET/POST parameters. (>=Django 1.10) # Limits are currently disabled - but can be used for finer-grained denial-of-service protection. DATA_UPLOAD_MAX_MEMORY_SIZE = None DATA_UPLOAD_MAX_NUMBER_FIELDS = None ### Size of chunks into which asset uploads will be divided UPLOAD_CHUNK_SIZE_IN_MB = 10 ### Max size of asset uploads to GridFS MAX_ASSET_UPLOAD_FILE_SIZE_IN_MB = 10 # FAQ url to direct users to if they upload # a file that exceeds the above size MAX_ASSET_UPLOAD_FILE_SIZE_URL = "" ### Default value for entrance exam minimum score ENTRANCE_EXAM_MIN_SCORE_PCT = 50 ### Default language for a new course DEFAULT_COURSE_LANGUAGE = "en" # Specify XBlocks that should be treated as advanced problems. Each entry is a # dict: # 'component': the entry-point name of the XBlock. # 'boilerplate_name': an optional YAML template to be used. Specify as # None to omit. # ADVANCED_PROBLEM_TYPES = [ { 'component': 'openassessment', 'boilerplate_name': None, }, { 'component': 'drag-and-drop-v2', 'boilerplate_name': None }, { 'component': 'staffgradedxblock', 'boilerplate_name': None } ] ############### Settings for Retirement ##################### RETIRED_USERNAME_PREFIX = 'retired__user_' RETIRED_EMAIL_PREFIX = 'retired__user_' RETIRED_EMAIL_DOMAIN = 'retired.invalid' RETIRED_USERNAME_FMT = lambda settings: settings.RETIRED_USERNAME_PREFIX + '{}' RETIRED_EMAIL_FMT = lambda settings: settings.RETIRED_EMAIL_PREFIX + '{}@' + settings.RETIRED_EMAIL_DOMAIN derived('RETIRED_USERNAME_FMT', 'RETIRED_EMAIL_FMT') RETIRED_USER_SALTS = ['abc', '123'] RETIREMENT_SERVICE_WORKER_USERNAME = 'RETIREMENT_SERVICE_USER' RETIREMENT_SERVICE_USER_EMAIL = "[email protected]" RETIREMENT_SERVICE_USER_NAME = "retirement_worker" # These states are the default, but are designed to be overridden in configuration. RETIREMENT_STATES = [ 'PENDING', 'LOCKING_ACCOUNT', 'LOCKING_COMPLETE', # Use these states only when ENABLE_DISCUSSION_SERVICE is True. 'RETIRING_FORUMS', 'FORUMS_COMPLETE', # TODO - Change these states to be the LMS-only email opt-out - PLAT-2189 'RETIRING_EMAIL_LISTS', 'EMAIL_LISTS_COMPLETE', 'RETIRING_ENROLLMENTS', 'ENROLLMENTS_COMPLETE', # Use these states only when ENABLE_STUDENT_NOTES is True. 'RETIRING_NOTES', 'NOTES_COMPLETE', 'RETIRING_LMS', 'LMS_COMPLETE', 'ERRORED', 'ABORTED', 'COMPLETE', ] USERNAME_REPLACEMENT_WORKER = "REPLACE WITH VALID USERNAME" # Files and Uploads type filter values FILES_AND_UPLOAD_TYPE_FILTERS = { "Images": ['image/png', 'image/jpeg', 'image/jpg', 'image/gif', 'image/tiff', 'image/tif', 'image/x-icon', 'image/svg+xml', 'image/bmp', 'image/x-ms-bmp', ], "Documents": [ 'application/pdf', 'text/plain', 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', 'application/vnd.openxmlformats-officedocument.wordprocessingml.template', 'application/vnd.openxmlformats-officedocument.presentationml.presentation', 'application/vnd.openxmlformats-officedocument.presentationml.slideshow', 'application/vnd.openxmlformats-officedocument.presentationml.template', 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', 'application/vnd.openxmlformats-officedocument.spreadsheetml.template', 'application/msword', 'application/vnd.ms-excel', 'application/vnd.ms-powerpoint', 'application/csv', 'application/vnd.ms-excel.sheet.macroEnabled.12', 'text/x-tex', 'application/x-pdf', 'application/vnd.ms-excel.sheet.macroenabled.12', 'file/pdf', 'image/pdf', 'text/csv', 'text/pdf', 'text/x-sh', '\"application/pdf\"', ], "Audio": ['audio/mpeg', 'audio/mp3', 'audio/x-wav', 'audio/ogg', 'audio/wav', 'audio/aac', 'audio/x-m4a', 'audio/mp4', 'audio/x-ms-wma', ], "Code": ['application/json', 'text/html', 'text/javascript', 'application/javascript', 'text/css', 'text/x-python', 'application/x-java-jnlp-file', 'application/xml', 'application/postscript', 'application/x-javascript', 'application/java-vm', 'text/x-c++src', 'text/xml', 'text/x-scss', 'application/x-python-code', 'application/java-archive', 'text/x-python-script', 'application/x-ruby', 'application/mathematica', 'text/coffeescript', 'text/x-matlab', 'application/sql', 'text/php', ] } # Default to no Search Engine SEARCH_ENGINE = None ELASTIC_FIELD_MAPPINGS = { "start_date": { "type": "date" } } XBLOCK_SETTINGS = {} XBLOCK_FS_STORAGE_BUCKET = None XBLOCK_FS_STORAGE_PREFIX = None STUDIO_FRONTEND_CONTAINER_URL = None ################################ Settings for Credit Course Requirements ################################ # Initial delay used for retrying tasks. # Additional retries use longer delays. # Value is in seconds. CREDIT_TASK_DEFAULT_RETRY_DELAY = 30 # Maximum number of retries per task for errors that are not related # to throttling. CREDIT_TASK_MAX_RETRIES = 5 # Maximum age in seconds of timestamps we will accept # when a credit provider notifies us that a student has been approved # or denied for credit. CREDIT_PROVIDER_TIMESTAMP_EXPIRATION = 15 * 60 CREDIT_PROVIDER_SECRET_KEYS = {} # dir containing all themes COMPREHENSIVE_THEME_DIRS = [] # Theme directory locale paths COMPREHENSIVE_THEME_LOCALE_PATHS = [] # Theme to use when no site or site theme is defined, # set to None if you want to use openedx theme DEFAULT_SITE_THEME = None ENABLE_COMPREHENSIVE_THEMING = False ############################ Global Database Configuration ##################### DATABASE_ROUTERS = [ 'openedx.core.lib.django_courseware_routers.StudentModuleHistoryExtendedRouter', ] ############################ Cache Configuration ############################### CACHES = { 'blockstore': { 'KEY_PREFIX': 'blockstore', 'KEY_FUNCTION': 'util.memcache.safe_key', 'LOCATION': ['localhost:11211'], 'TIMEOUT': '86400', # This data should be long-lived for performance, BundleCache handles invalidation 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', }, 'course_structure_cache': { 'KEY_PREFIX': 'course_structure', 'KEY_FUNCTION': 'util.memcache.safe_key', 'LOCATION': ['localhost:11211'], 'TIMEOUT': '7200', 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', }, 'celery': { 'KEY_PREFIX': 'celery', 'KEY_FUNCTION': 'util.memcache.safe_key', 'LOCATION': ['localhost:11211'], 'TIMEOUT': '7200', 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', }, 'mongo_metadata_inheritance': { 'KEY_PREFIX': 'mongo_metadata_inheritance', 'KEY_FUNCTION': 'util.memcache.safe_key', 'LOCATION': ['localhost:11211'], 'TIMEOUT': 300, 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', }, 'staticfiles': { 'KEY_FUNCTION': 'util.memcache.safe_key', 'LOCATION': ['localhost:11211'], 'KEY_PREFIX': 'staticfiles_general', 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', }, 'default': { 'VERSION': '1', 'KEY_FUNCTION': 'util.memcache.safe_key', 'LOCATION': ['localhost:11211'], 'KEY_PREFIX': 'default', 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', }, 'configuration': { 'KEY_FUNCTION': 'util.memcache.safe_key', 'LOCATION': ['localhost:11211'], 'KEY_PREFIX': 'configuration', 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', }, 'general': { 'KEY_FUNCTION': 'util.memcache.safe_key', 'LOCATION': ['localhost:11211'], 'KEY_PREFIX': 'general', 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', }, } ############################ OAUTH2 Provider ################################### # 5 minute expiration time for JWT id tokens issued for external API requests. OAUTH_ID_TOKEN_EXPIRATION = 5 * 60 # Partner support link for CMS footer PARTNER_SUPPORT_EMAIL = '' # Affiliate cookie tracking AFFILIATE_COOKIE_NAME = 'dev_affiliate_id' ############## Settings for Studio Context Sensitive Help ############## HELP_TOKENS_INI_FILE = REPO_ROOT / "cms" / "envs" / "help_tokens.ini" HELP_TOKENS_LANGUAGE_CODE = lambda settings: settings.LANGUAGE_CODE HELP_TOKENS_VERSION = lambda settings: doc_version() HELP_TOKENS_BOOKS = { 'learner': 'https://edx.readthedocs.io/projects/open-edx-learner-guide', 'course_author': 'https://edx.readthedocs.io/projects/open-edx-building-and-running-a-course', } derived('HELP_TOKENS_LANGUAGE_CODE', 'HELP_TOKENS_VERSION') # Used with Email sending RETRY_ACTIVATION_EMAIL_MAX_ATTEMPTS = 5 RETRY_ACTIVATION_EMAIL_TIMEOUT = 0.5 # Software Secure request retry settings # Time in seconds before a retry of the task should be 60 mints. SOFTWARE_SECURE_REQUEST_RETRY_DELAY = 60 * 60 # Maximum of 6 retries before giving up. SOFTWARE_SECURE_RETRY_MAX_ATTEMPTS = 6 ############## DJANGO-USER-TASKS ############## # How long until database records about the outcome of a task and its artifacts get deleted? USER_TASKS_MAX_AGE = timedelta(days=7) ############## Settings for the Enterprise App ###################### ENTERPRISE_ENROLLMENT_API_URL = LMS_ROOT_URL + LMS_ENROLLMENT_API_PATH ENTERPRISE_SERVICE_WORKER_USERNAME = 'enterprise_worker' ENTERPRISE_API_CACHE_TIMEOUT = 3600 # Value is in seconds # The default value of this needs to be a 16 character string ENTERPRISE_CUSTOMER_CATALOG_DEFAULT_CONTENT_FILTER = {} BASE_COOKIE_DOMAIN = 'localhost' # This limits the type of roles that are submittable via the `student` app's manual enrollment # audit API. While this isn't used in CMS, it is used via Enterprise which is installed in # the CMS. Without this, we get errors. MANUAL_ENROLLMENT_ROLE_CHOICES = ['Learner', 'Support', 'Partner'] ############## Settings for the Discovery App ###################### COURSE_CATALOG_API_URL = 'http://localhost:8008/api/v1' # which access.py permission name to check in order to determine if a course is visible in # the course catalog. We default this to the legacy permission 'see_exists'. COURSE_CATALOG_VISIBILITY_PERMISSION = 'see_exists' # which access.py permission name to check in order to determine if a course about page is # visible. We default this to the legacy permission 'see_exists'. COURSE_ABOUT_VISIBILITY_PERMISSION = 'see_exists' DEFAULT_COURSE_VISIBILITY_IN_CATALOG = "both" DEFAULT_MOBILE_AVAILABLE = False # How long to cache OpenAPI schemas and UI, in seconds. OPENAPI_CACHE_TIMEOUT = 0 ################# Mobile URLS ########################## # These are URLs to the app store for mobile. MOBILE_STORE_URLS = {} ############################# Persistent Grades #################################### # Queue to use for updating persistent grades RECALCULATE_GRADES_ROUTING_KEY = DEFAULT_PRIORITY_QUEUE # Queue to use for updating grades due to grading policy change POLICY_CHANGE_GRADES_ROUTING_KEY = 'edx.lms.core.default' SOFTWARE_SECURE_VERIFICATION_ROUTING_KEY = 'edx.lms.core.default' # Rate limit for regrading tasks that a grading policy change can kick off POLICY_CHANGE_TASK_RATE_LIMIT = '300/h' ############## Settings for CourseGraph ############################ COURSEGRAPH_JOB_QUEUE = DEFAULT_PRIORITY_QUEUE ########## Settings for video transcript migration tasks ############ VIDEO_TRANSCRIPT_MIGRATIONS_JOB_QUEUE = DEFAULT_PRIORITY_QUEUE ########## Settings youtube thumbnails scraper tasks ############ SCRAPE_YOUTUBE_THUMBNAILS_JOB_QUEUE = DEFAULT_PRIORITY_QUEUE ########## Settings update search index task ############ UPDATE_SEARCH_INDEX_JOB_QUEUE = DEFAULT_PRIORITY_QUEUE ###################### VIDEO IMAGE STORAGE ###################### VIDEO_IMAGE_DEFAULT_FILENAME = 'images/video-images/default_video_image.png' VIDEO_IMAGE_SUPPORTED_FILE_FORMATS = { '.bmp': 'image/bmp', '.bmp2': 'image/x-ms-bmp', # PIL gives x-ms-bmp format '.gif': 'image/gif', '.jpg': 'image/jpeg', '.jpeg': 'image/jpeg', '.png': 'image/png' } VIDEO_IMAGE_MAX_FILE_SIZE_MB = '2 MB' VIDEO_IMAGE_MIN_FILE_SIZE_KB = '2 KB' VIDEO_IMAGE_MAX_WIDTH = 1280 VIDEO_IMAGE_MAX_HEIGHT = 720 VIDEO_IMAGE_MIN_WIDTH = 640 VIDEO_IMAGE_MIN_HEIGHT = 360 VIDEO_IMAGE_ASPECT_RATIO = 16 / 9.0 VIDEO_IMAGE_ASPECT_RATIO_TEXT = '16:9' VIDEO_IMAGE_ASPECT_RATIO_ERROR_MARGIN = 0.1 ###################### ZENDESK ###################### ZENDESK_URL = '' ZENDESK_USER = '' ZENDESK_API_KEY = '' ZENDESK_CUSTOM_FIELDS = {} ZENDESK_OAUTH_ACCESS_TOKEN = '' # A mapping of string names to Zendesk Group IDs # To get the IDs of your groups you can go to # {zendesk_url}/api/v2/groups.json ZENDESK_GROUP_ID_MAPPING = {} ############## Settings for Completion API ######################### # Once a user has watched this percentage of a video, mark it as complete: # (0.0 = 0%, 1.0 = 100%) COMPLETION_VIDEO_COMPLETE_PERCENTAGE = 0.95 ############### Settings for edx-rbac ############### SYSTEM_WIDE_ROLE_CLASSES = [] ############## Installed Django Apps ######################### from openedx.core.djangoapps.plugins import plugin_apps, plugin_settings, constants as plugin_constants INSTALLED_APPS.extend(plugin_apps.get_apps(plugin_constants.ProjectType.CMS)) plugin_settings.add_plugins(__name__, plugin_constants.ProjectType.CMS, plugin_constants.SettingsType.COMMON) # Course exports streamed in blocks of this size. 8192 or 8kb is the default # setting for the FileWrapper class used to iterate over the export file data. # See: https://docs.python.org/2/library/wsgiref.html#wsgiref.util.FileWrapper COURSE_EXPORT_DOWNLOAD_CHUNK_SIZE = 8192 # E-Commerce API Configuration ECOMMERCE_PUBLIC_URL_ROOT = 'http://localhost:8002' ECOMMERCE_API_URL = 'http://localhost:8002/api/v2' ECOMMERCE_API_SIGNING_KEY = 'SET-ME-PLEASE' CREDENTIALS_INTERNAL_SERVICE_URL = 'http://localhost:8005' CREDENTIALS_PUBLIC_SERVICE_URL = 'http://localhost:8005' ANALYTICS_DASHBOARD_URL = 'http://localhost:18110/courses' ANALYTICS_DASHBOARD_NAME = 'Your Platform Name Here Insights' COMMENTS_SERVICE_URL = 'http://localhost:18080' COMMENTS_SERVICE_KEY = 'password' CAS_SERVER_URL = "" CAS_EXTRA_LOGIN_PARAMS = "" CAS_ATTRIBUTE_CALLBACK = "" FINANCIAL_REPORTS = { 'STORAGE_TYPE': 'localfs', 'BUCKET': None, 'ROOT_PATH': 'sandbox', } CORS_ORIGIN_WHITELIST = [] CORS_ORIGIN_ALLOW_ALL = False LOGIN_REDIRECT_WHITELIST = [] ############### Settings for video pipeline ################## VIDEO_UPLOAD_PIPELINE = { 'BUCKET': '', 'ROOT_PATH': '', } DEPRECATED_ADVANCED_COMPONENT_TYPES = [] ########################## VIDEO IMAGE STORAGE ############################ VIDEO_IMAGE_SETTINGS = dict( VIDEO_IMAGE_MAX_BYTES=2 * 1024 * 1024, # 2 MB VIDEO_IMAGE_MIN_BYTES=2 * 1024, # 2 KB # Backend storage # STORAGE_CLASS='storages.backends.s3boto.S3BotoStorage', # STORAGE_KWARGS=dict(bucket='video-image-bucket'), STORAGE_KWARGS=dict( location=MEDIA_ROOT, base_url=MEDIA_URL, ), DIRECTORY_PREFIX='video-images/', ) VIDEO_IMAGE_MAX_AGE = 31536000 ########################## VIDEO TRANSCRIPTS STORAGE ############################ VIDEO_TRANSCRIPTS_SETTINGS = dict( VIDEO_TRANSCRIPTS_MAX_BYTES=3 * 1024 * 1024, # 3 MB # Backend storage # STORAGE_CLASS='storages.backends.s3boto.S3BotoStorage', # STORAGE_KWARGS=dict(bucket='video-transcripts-bucket'), STORAGE_KWARGS=dict( location=MEDIA_ROOT, base_url=MEDIA_URL, ), DIRECTORY_PREFIX='video-transcripts/', ) VIDEO_TRANSCRIPTS_MAX_AGE = 31536000 ############################ TRANSCRIPT PROVIDERS SETTINGS ######################## # Note: These settings will also exist in video-encode-manager, so any update here # should also be done there. CIELO24_SETTINGS = { 'CIELO24_API_VERSION': 1, 'CIELO24_BASE_API_URL': "https://api.cielo24.com/api", 'CIELO24_LOGIN_URL': "https://api.cielo24.com/api/account/login" } ##### shoppingcart Payment ##### PAYMENT_SUPPORT_EMAIL = '[email protected]' ################################ Bulk Email ################################### # Parameters for breaking down course enrollment into subtasks. BULK_EMAIL_EMAILS_PER_TASK = 500 # Suffix used to construct 'from' email address for bulk emails. # A course-specific identifier is prepended. BULK_EMAIL_DEFAULT_FROM_EMAIL = '[email protected]' # Flag to indicate if individual email addresses should be logged as they are sent # a bulk email message. BULK_EMAIL_LOG_SENT_EMAILS = False ############### Settings for django file storage ################## DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' ###################### Grade Downloads ###################### # These keys are used for all of our asynchronous downloadable files, including # the ones that contain information other than grades. GRADES_DOWNLOAD = { 'STORAGE_CLASS': 'django.core.files.storage.FileSystemStorage', 'STORAGE_KWARGS': { 'location': '/tmp/edx-s3/grades', }, 'STORAGE_TYPE': None, 'BUCKET': None, 'ROOT_PATH': None, } ############### Settings swift ##################################### SWIFT_USERNAME = None SWIFT_KEY = None SWIFT_TENANT_ID = None SWIFT_TENANT_NAME = None SWIFT_AUTH_URL = None SWIFT_AUTH_VERSION = None SWIFT_REGION_NAME = None SWIFT_USE_TEMP_URLS = False SWIFT_TEMP_URL_KEY = None SWIFT_TEMP_URL_DURATION = 1800 # seconds ############### The SAML private/public key values ################ SOCIAL_AUTH_SAML_SP_PRIVATE_KEY = "" SOCIAL_AUTH_SAML_SP_PUBLIC_CERT = "" SOCIAL_AUTH_SAML_SP_PRIVATE_KEY_DICT = {} SOCIAL_AUTH_SAML_SP_PUBLIC_CERT_DICT = {} ############### Settings for facebook ############################## FACEBOOK_APP_ID = 'FACEBOOK_APP_ID' FACEBOOK_APP_SECRET = 'FACEBOOK_APP_SECRET' FACEBOOK_API_VERSION = 'v2.1' ############### Settings for django-fernet-fields ################## FERNET_KEYS = [ 'DUMMY KEY CHANGE BEFORE GOING TO PRODUCTION', ] ### Proctoring configuration (redirct URLs and keys shared between systems) #### PROCTORING_BACKENDS = { 'DEFAULT': 'null', # The null key needs to be quoted because # null is a language independent type in YAML 'null': {} } PROCTORING_SETTINGS = {} ################## BLOCKSTORE RELATED SETTINGS ######################### BLOCKSTORE_PUBLIC_URL_ROOT = 'http://localhost:18250' BLOCKSTORE_API_URL = 'http://localhost:18250/api/v1/' # Which of django's caches to use for storing anonymous user state for XBlocks # in the blockstore-based XBlock runtime XBLOCK_RUNTIME_V2_EPHEMERAL_DATA_CACHE = 'default' ###################### LEARNER PORTAL ################################ LEARNER_PORTAL_URL_ROOT = 'https://learner-portal-localhost:18000' ######################### MICROSITE ############################### MICROSITE_ROOT_DIR = '/edx/app/edxapp/edx-microsite' MICROSITE_CONFIGURATION = {} ############################ JWT ################################# JWT_ISSUER = 'http://127.0.0.1:8000/oauth2' DEFAULT_JWT_ISSUER = { 'ISSUER': 'http://127.0.0.1:8000/oauth2', 'AUDIENCE': 'SET-ME-PLEASE', 'SECRET_KEY': 'SET-ME-PLEASE' } JWT_EXPIRATION = 30 JWT_PRIVATE_SIGNING_KEY = None SYSLOG_SERVER = '' FEEDBACK_SUBMISSION_EMAIL = '' REGISTRATION_EXTRA_FIELDS = { 'confirm_email': 'hidden', 'level_of_education': 'optional', 'gender': 'optional', 'year_of_birth': 'optional', 'mailing_address': 'optional', 'goals': 'optional', 'honor_code': 'required', 'terms_of_service': 'hidden', 'city': 'hidden', 'country': 'hidden', } EDXAPP_PARSE_KEYS = {} ###################### DEPRECATED URLS ########################## # .. toggle_name: DISABLE_DEPRECATED_SIGNIN_URL # .. toggle_implementation: DjangoSetting # .. toggle_default: False # .. toggle_description: Toggle for removing the deprecated /signin url. # .. toggle_category: n/a # .. toggle_use_cases: incremental_release # .. toggle_creation_date: 2019-12-02 # .. toggle_expiration_date: 2020-06-01 # .. toggle_warnings: This url can be removed once it no longer has any real traffic. # .. toggle_tickets: ARCH-1253 # .. toggle_status: supported DISABLE_DEPRECATED_SIGNIN_URL = False # .. toggle_name: DISABLE_DEPRECATED_SIGNUP_URL # .. toggle_implementation: DjangoSetting # .. toggle_default: False # .. toggle_description: Toggle for removing the deprecated /signup url. # .. toggle_category: n/a # .. toggle_use_cases: incremental_release # .. toggle_creation_date: 2019-12-02 # .. toggle_expiration_date: 2020-06-01 # .. toggle_warnings: This url can be removed once it no longer has any real traffic. # .. toggle_tickets: ARCH-1253 # .. toggle_status: supported DISABLE_DEPRECATED_SIGNUP_URL = False
hoosteeno/mozillians
refs/heads/master
vendor-local/lib/python/rest_framework/negotiation.py
18
""" Content negotiation deals with selecting an appropriate renderer given the incoming request. Typically this will be based on the request's Accept header. """ from __future__ import unicode_literals from django.http import Http404 from rest_framework import exceptions from rest_framework.settings import api_settings from rest_framework.utils.mediatypes import order_by_precedence, media_type_matches from rest_framework.utils.mediatypes import _MediaType class BaseContentNegotiation(object): def select_parser(self, request, parsers): raise NotImplementedError('.select_parser() must be implemented') def select_renderer(self, request, renderers, format_suffix=None): raise NotImplementedError('.select_renderer() must be implemented') class DefaultContentNegotiation(BaseContentNegotiation): settings = api_settings def select_parser(self, request, parsers): """ Given a list of parsers and a media type, return the appropriate parser to handle the incoming request. """ for parser in parsers: if media_type_matches(parser.media_type, request.content_type): return parser return None def select_renderer(self, request, renderers, format_suffix=None): """ Given a request and a list of renderers, return a two-tuple of: (renderer, media type). """ # Allow URL style format override. eg. "?format=json format_query_param = self.settings.URL_FORMAT_OVERRIDE format = format_suffix or request.QUERY_PARAMS.get(format_query_param) if format: renderers = self.filter_renderers(renderers, format) accepts = self.get_accept_list(request) # Check the acceptable media types against each renderer, # attempting more specific media types first # NB. The inner loop here isn't as bad as it first looks :) # Worst case is we're looping over len(accept_list) * len(self.renderers) for media_type_set in order_by_precedence(accepts): for renderer in renderers: for media_type in media_type_set: if media_type_matches(renderer.media_type, media_type): # Return the most specific media type as accepted. if (_MediaType(renderer.media_type).precedence > _MediaType(media_type).precedence): # Eg client requests '*/*' # Accepted media type is 'application/json' return renderer, renderer.media_type else: # Eg client requests 'application/json; indent=8' # Accepted media type is 'application/json; indent=8' return renderer, media_type raise exceptions.NotAcceptable(available_renderers=renderers) def filter_renderers(self, renderers, format): """ If there is a '.json' style format suffix, filter the renderers so that we only negotiation against those that accept that format. """ renderers = [renderer for renderer in renderers if renderer.format == format] if not renderers: raise Http404 return renderers def get_accept_list(self, request): """ Given the incoming request, return a tokenised list of media type strings. Allows URL style accept override. eg. "?accept=application/json" """ header = request.META.get('HTTP_ACCEPT', '*/*') header = request.QUERY_PARAMS.get(self.settings.URL_ACCEPT_OVERRIDE, header) return [token.strip() for token in header.split(',')]
jkyeung/XlsxWriter
refs/heads/master
xlsxwriter/test/comparison/test_types01.py
1
############################################################################### # # Tests for XlsxWriter. # # Copyright (c), 2013-2016, John McNamara, [email protected] # from ..excel_comparsion_test import ExcelComparisonTest from ...workbook import Workbook class TestCompareXLSXFiles(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.maxDiff = None filename = 'types01.xlsx' test_dir = 'xlsxwriter/test/comparison/' self.got_filename = test_dir + '_test_' + filename self.exp_filename = test_dir + 'xlsx_files/' + filename self.ignore_files = [] self.ignore_elements = {} def test_write_number_as_text(self): """Test writing numbers as text.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() worksheet.write_string(0, 0, 'Hello') worksheet.write_string(1, 0, '123') workbook.close() self.assertExcelEqual() def test_write_number_as_text_with_write(self): """Test writing numbers as text using write() without conversion.""" workbook = Workbook(self.got_filename, {'strings_to_numbers': False}) worksheet = workbook.add_worksheet() worksheet.write(0, 0, 'Hello') worksheet.write(1, 0, '123') workbook.close() self.assertExcelEqual()
stephensong/runtime
refs/heads/master
deps/v8/tools/gc-nvp-trace-processor.py
31
#!/usr/bin/env python # # Copyright 2010 the V8 project authors. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # # This is an utility for plotting charts based on GC traces produced by V8 when # run with flags --trace-gc --trace-gc-nvp. Relies on gnuplot for actual # plotting. # # Usage: gc-nvp-trace-processor.py <GC-trace-filename> # from __future__ import with_statement import sys, types, subprocess, math import gc_nvp_common def flatten(l): flat = [] for i in l: flat.extend(i) return flat def gnuplot(script): gnuplot = subprocess.Popen(["gnuplot"], stdin=subprocess.PIPE) gnuplot.stdin.write(script) gnuplot.stdin.close() gnuplot.wait() x1y1 = 'x1y1' x1y2 = 'x1y2' x2y1 = 'x2y1' x2y2 = 'x2y2' class Item(object): def __init__(self, title, field, axis = x1y1, **keywords): self.title = title self.axis = axis self.props = keywords if type(field) is types.ListType: self.field = field else: self.field = [field] def fieldrefs(self): return self.field def to_gnuplot(self, context): args = ['"%s"' % context.datafile, 'using %s' % context.format_fieldref(self.field), 'title "%s"' % self.title, 'axis %s' % self.axis] if 'style' in self.props: args.append('with %s' % self.props['style']) if 'lc' in self.props: args.append('lc rgb "%s"' % self.props['lc']) if 'fs' in self.props: args.append('fs %s' % self.props['fs']) return ' '.join(args) class Plot(object): def __init__(self, *items): self.items = items def fieldrefs(self): return flatten([item.fieldrefs() for item in self.items]) def to_gnuplot(self, ctx): return 'plot ' + ', '.join([item.to_gnuplot(ctx) for item in self.items]) class Set(object): def __init__(self, value): self.value = value def to_gnuplot(self, ctx): return 'set ' + self.value def fieldrefs(self): return [] class Context(object): def __init__(self, datafile, field_to_index): self.datafile = datafile self.field_to_index = field_to_index def format_fieldref(self, fieldref): return ':'.join([str(self.field_to_index[field]) for field in fieldref]) def collect_fields(plot): field_to_index = {} fields = [] def add_field(field): if field not in field_to_index: fields.append(field) field_to_index[field] = len(fields) for field in flatten([item.fieldrefs() for item in plot]): add_field(field) return (fields, field_to_index) def is_y2_used(plot): for subplot in plot: if isinstance(subplot, Plot): for item in subplot.items: if item.axis == x1y2 or item.axis == x2y2: return True return False def get_field(trace_line, field): t = type(field) if t is types.StringType: return trace_line[field] elif t is types.FunctionType: return field(trace_line) def generate_datafile(datafile_name, trace, fields): with open(datafile_name, 'w') as datafile: for line in trace: data_line = [str(get_field(line, field)) for field in fields] datafile.write('\t'.join(data_line)) datafile.write('\n') def generate_script_and_datafile(plot, trace, datafile, output): (fields, field_to_index) = collect_fields(plot) generate_datafile(datafile, trace, fields) script = [ 'set terminal png', 'set output "%s"' % output, 'set autoscale', 'set ytics nomirror', 'set xtics nomirror', 'set key below' ] if is_y2_used(plot): script.append('set autoscale y2') script.append('set y2tics') context = Context(datafile, field_to_index) for item in plot: script.append(item.to_gnuplot(context)) return '\n'.join(script) def plot_all(plots, trace, prefix): charts = [] for plot in plots: outfilename = "%s_%d.png" % (prefix, len(charts)) charts.append(outfilename) script = generate_script_and_datafile(plot, trace, '~datafile', outfilename) print 'Plotting %s...' % outfilename gnuplot(script) return charts def reclaimed_bytes(row): return row['total_size_before'] - row['total_size_after'] def other_scope(r): if r['gc'] == 's': # there is no 'other' scope for scavenging collections. return 0 return r['pause'] - r['mark'] - r['sweep'] - r['external'] def scavenge_scope(r): if r['gc'] == 's': return r['pause'] - r['external'] return 0 def real_mutator(r): return r['mutator'] - r['steps_took'] plots = [ [ Set('style fill solid 0.5 noborder'), Set('style histogram rowstacked'), Set('style data histograms'), Plot(Item('Scavenge', scavenge_scope, lc = 'green'), Item('Marking', 'mark', lc = 'purple'), Item('Sweep', 'sweep', lc = 'blue'), Item('External', 'external', lc = '#489D43'), Item('Other', other_scope, lc = 'grey'), Item('IGC Steps', 'steps_took', lc = '#FF6347')) ], [ Set('style fill solid 0.5 noborder'), Set('style histogram rowstacked'), Set('style data histograms'), Plot(Item('Scavenge', scavenge_scope, lc = 'green'), Item('Marking', 'mark', lc = 'purple'), Item('Sweep', 'sweep', lc = 'blue'), Item('External', 'external', lc = '#489D43'), Item('Other', other_scope, lc = '#ADD8E6'), Item('External', 'external', lc = '#D3D3D3')) ], [ Plot(Item('Mutator', real_mutator, lc = 'black', style = 'lines')) ], [ Set('style histogram rowstacked'), Set('style data histograms'), Plot(Item('Heap Size (before GC)', 'total_size_before', x1y2, fs = 'solid 0.4 noborder', lc = 'green'), Item('Total holes (after GC)', 'holes_size_before', x1y2, fs = 'solid 0.4 noborder', lc = 'red'), Item('GC Time', ['i', 'pause'], style = 'lines', lc = 'red')) ], [ Set('style histogram rowstacked'), Set('style data histograms'), Plot(Item('Heap Size (after GC)', 'total_size_after', x1y2, fs = 'solid 0.4 noborder', lc = 'green'), Item('Total holes (after GC)', 'holes_size_after', x1y2, fs = 'solid 0.4 noborder', lc = 'red'), Item('GC Time', ['i', 'pause'], style = 'lines', lc = 'red')) ], [ Set('style fill solid 0.5 noborder'), Set('style data histograms'), Plot(Item('Allocated', 'allocated'), Item('Reclaimed', reclaimed_bytes), Item('Promoted', 'promoted', style = 'lines', lc = 'black')) ], ] def freduce(f, field, trace, init): return reduce(lambda t,r: f(t, r[field]), trace, init) def calc_total(trace, field): return freduce(lambda t,v: t + long(v), field, trace, long(0)) def calc_max(trace, field): return freduce(lambda t,r: max(t, r), field, trace, 0) def count_nonzero(trace, field): return freduce(lambda t,r: t if r == 0 else t + 1, field, trace, 0) def process_trace(filename): trace = gc_nvp_common.parse_gc_trace(filename) marksweeps = filter(lambda r: r['gc'] == 'ms', trace) scavenges = filter(lambda r: r['gc'] == 's', trace) globalgcs = filter(lambda r: r['gc'] != 's', trace) charts = plot_all(plots, trace, filename) def stats(out, prefix, trace, field): n = len(trace) total = calc_total(trace, field) max = calc_max(trace, field) if n > 0: avg = total / n else: avg = 0 if n > 1: dev = math.sqrt(freduce(lambda t,r: t + (r - avg) ** 2, field, trace, 0) / (n - 1)) else: dev = 0 out.write('<tr><td>%s</td><td>%d</td><td>%d</td>' '<td>%d</td><td>%d [dev %f]</td></tr>' % (prefix, n, total, max, avg, dev)) def HumanReadable(size): suffixes = ['bytes', 'kB', 'MB', 'GB'] power = 1 for i in range(len(suffixes)): if size < power*1024: return "%.1f" % (float(size) / power) + " " + suffixes[i] power *= 1024 def throughput(name, trace): total_live_after = calc_total(trace, 'total_size_after') total_live_before = calc_total(trace, 'total_size_before') total_gc = calc_total(trace, 'pause') if total_gc == 0: return out.write('GC %s Throughput (after): %s / %s ms = %s/ms<br/>' % (name, HumanReadable(total_live_after), total_gc, HumanReadable(total_live_after / total_gc))) out.write('GC %s Throughput (before): %s / %s ms = %s/ms<br/>' % (name, HumanReadable(total_live_before), total_gc, HumanReadable(total_live_before / total_gc))) with open(filename + '.html', 'w') as out: out.write('<html><body>') out.write('<table>') out.write('<tr><td>Phase</td><td>Count</td><td>Time (ms)</td>') out.write('<td>Max</td><td>Avg</td></tr>') stats(out, 'Total in GC', trace, 'pause') stats(out, 'Scavenge', scavenges, 'pause') stats(out, 'MarkSweep', marksweeps, 'pause') stats(out, 'Mark', filter(lambda r: r['mark'] != 0, trace), 'mark') stats(out, 'Sweep', filter(lambda r: r['sweep'] != 0, trace), 'sweep') stats(out, 'External', filter(lambda r: r['external'] != 0, trace), 'external') out.write('</table>') throughput('TOTAL', trace) throughput('MS', marksweeps) throughput('OLDSPACE', globalgcs) out.write('<br/>') for chart in charts: out.write('<img src="%s">' % chart) out.write('</body></html>') print "%s generated." % (filename + '.html') if len(sys.argv) != 2: print "Usage: %s <GC-trace-filename>" % sys.argv[0] sys.exit(1) process_trace(sys.argv[1])
r0k3/arctic
refs/heads/master
arctic/store/_version_store_utils.py
1
from bson import Binary import hashlib import numpy as np import pickle import pandas as pd import functools import six from pandas.compat import pickle_compat def _split_arrs(array_2d, slices): """ Equivalent to numpy.split(array_2d, slices), but avoids fancy indexing """ if len(array_2d) == 0: return np.empty(0, dtype=np.object) rtn = np.empty(len(slices) + 1, dtype=np.object) start = 0 for i, s in enumerate(slices): rtn[i] = array_2d[start:s] start = s rtn[-1] = array_2d[start:] return rtn def checksum(symbol, doc): """ Checksum the passed in dictionary """ sha = hashlib.sha1() sha.update(symbol.encode('ascii')) for k in sorted(iter(doc.keys()), reverse=True): v = doc[k] if isinstance(v, six.binary_type): sha.update(doc[k]) else: sha.update(str(doc[k]).encode('ascii')) return Binary(sha.digest()) def cleanup(arctic_lib, symbol, version_ids): """ Helper method for cleaning up chunks from a version store """ collection = arctic_lib.get_top_level_collection() # Remove any chunks which contain just the parents, at the outset # We do this here, because $pullALL will make an empty array: [] # and the index which contains the parents field will fail the unique constraint. for v in version_ids: # Remove all documents which only contain the parent collection.delete_many({'symbol': symbol, 'parent': {'$all': [v], '$size': 1}, }) # Pull the parent from the parents field collection.update_many({'symbol': symbol, 'parent': v}, {'$pull': {'parent': v}}) # Now remove all chunks which aren't parented - this is unlikely, as they will # have been removed by the above collection.delete_one({'symbol': symbol, 'parent': {'$size': 0}}) def _define_compat_pickle_load(): """Factory function to initialise the correct Pickle load function based on the Pandas version. """ if pd.__version__.startswith("0.14"): return pickle.load return functools.partial(pickle_compat.load, compat=True) # Initialise the pickle load function and delete the factory function. pickle_compat_load = _define_compat_pickle_load() del _define_compat_pickle_load
kalahbrown/HueBigSQL
refs/heads/master
desktop/core/ext-py/Paste-2.0.1/tests/test_multidict.py
47
# -*- coding: utf-8 -*- # (c) 2007 Ian Bicking and Philip Jenvey; written for Paste (http://pythonpaste.org) # Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php import cgi import six from six.moves import StringIO from nose.tools import assert_raises from paste.util.multidict import MultiDict, UnicodeMultiDict def test_dict(): d = MultiDict({'a': 1}) assert d.items() == [('a', 1)] d['b'] = 2 d['c'] = 3 assert d.items() == [('a', 1), ('b', 2), ('c', 3)] d['b'] = 4 assert d.items() == [('a', 1), ('c', 3), ('b', 4)] d.add('b', 5) assert_raises(KeyError, d.getone, "b") assert d.getall('b') == [4, 5] assert d.items() == [('a', 1), ('c', 3), ('b', 4), ('b', 5)] del d['b'] assert d.items() == [('a', 1), ('c', 3)] assert d.pop('xxx', 5) == 5 assert d.getone('a') == 1 assert d.popitem() == ('c', 3) assert d.items() == [('a', 1)] item = [] assert d.setdefault('z', item) is item assert d.items() == [('a', 1), ('z', item)] assert d.setdefault('y', 6) == 6 assert d.mixed() == {'a': 1, 'y': 6, 'z': item} assert d.dict_of_lists() == {'a': [1], 'y': [6], 'z': [item]} assert 'a' in d dcopy = d.copy() assert dcopy is not d assert dcopy == d d['x'] = 'x test' assert dcopy != d d[(1, None)] = (None, 1) assert d.items() == [('a', 1), ('z', []), ('y', 6), ('x', 'x test'), ((1, None), (None, 1))] def test_unicode_dict(): _test_unicode_dict() _test_unicode_dict(decode_param_names=True) def _test_unicode_dict(decode_param_names=False): d = UnicodeMultiDict(MultiDict({b'a': 'a test'})) d.encoding = 'utf-8' d.errors = 'ignore' if decode_param_names: key_str = six.text_type k = lambda key: key d.decode_keys = True else: key_str = six.binary_type k = lambda key: key.encode() def assert_unicode(obj): assert isinstance(obj, six.text_type) def assert_key_str(obj): assert isinstance(obj, key_str) def assert_unicode_item(obj): key, value = obj assert isinstance(key, key_str) assert isinstance(value, six.text_type) assert d.items() == [(k('a'), u'a test')] map(assert_key_str, d.keys()) map(assert_unicode, d.values()) d[b'b'] = b'2 test' d[b'c'] = b'3 test' assert d.items() == [(k('a'), u'a test'), (k('b'), u'2 test'), (k('c'), u'3 test')] list(map(assert_unicode_item, d.items())) d[k('b')] = b'4 test' assert d.items() == [(k('a'), u'a test'), (k('c'), u'3 test'), (k('b'), u'4 test')], d.items() list(map(assert_unicode_item, d.items())) d.add(k('b'), b'5 test') assert_raises(KeyError, d.getone, k("b")) assert d.getall(k('b')) == [u'4 test', u'5 test'] map(assert_unicode, d.getall('b')) assert d.items() == [(k('a'), u'a test'), (k('c'), u'3 test'), (k('b'), u'4 test'), (k('b'), u'5 test')] list(map(assert_unicode_item, d.items())) del d[k('b')] assert d.items() == [(k('a'), u'a test'), (k('c'), u'3 test')] list(map(assert_unicode_item, d.items())) assert d.pop('xxx', u'5 test') == u'5 test' assert isinstance(d.pop('xxx', u'5 test'), six.text_type) assert d.getone(k('a')) == u'a test' assert isinstance(d.getone(k('a')), six.text_type) assert d.popitem() == (k('c'), u'3 test') d[k('c')] = b'3 test' assert_unicode_item(d.popitem()) assert d.items() == [(k('a'), u'a test')] list(map(assert_unicode_item, d.items())) item = [] assert d.setdefault(k('z'), item) is item items = d.items() assert items == [(k('a'), u'a test'), (k('z'), item)] assert isinstance(items[1][0], key_str) assert isinstance(items[1][1], list) assert isinstance(d.setdefault(k('y'), b'y test'), six.text_type) assert isinstance(d[k('y')], six.text_type) assert d.mixed() == {k('a'): u'a test', k('y'): u'y test', k('z'): item} assert d.dict_of_lists() == {k('a'): [u'a test'], k('y'): [u'y test'], k('z'): [item]} del d[k('z')] list(map(assert_unicode_item, six.iteritems(d.mixed()))) list(map(assert_unicode_item, [(key, value[0]) for \ key, value in six.iteritems(d.dict_of_lists())])) assert k('a') in d dcopy = d.copy() assert dcopy is not d assert dcopy == d d[k('x')] = 'x test' assert dcopy != d d[(1, None)] = (None, 1) assert d.items() == [(k('a'), u'a test'), (k('y'), u'y test'), (k('x'), u'x test'), ((1, None), (None, 1))] item = d.items()[-1] assert isinstance(item[0], tuple) assert isinstance(item[1], tuple) fs = cgi.FieldStorage() fs.name = 'thefile' fs.filename = 'hello.txt' fs.file = StringIO('hello') d[k('f')] = fs ufs = d[k('f')] assert isinstance(ufs, cgi.FieldStorage) assert ufs is not fs assert ufs.name == fs.name assert isinstance(ufs.name, str if six.PY3 else key_str) assert ufs.filename == fs.filename assert isinstance(ufs.filename, six.text_type) assert isinstance(ufs.value, str) assert ufs.value == 'hello'
tashaband/RYU295
refs/heads/master
ryu/contrib/ncclient/transport/__init__.py
69
# Copyright 2009 Shikhar Bhushan # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. "Transport layer" from session import Session, SessionListener from ssh import SSHSession from errors import * __all__ = [ 'Session', 'SessionListener', 'SSHSession', 'TransportError', 'AuthenticationError', 'SessionCloseError', 'SSHError', 'SSHUnknownHostError' ]
tobsan/softwarecontainer
refs/heads/master
servicetest/filesystem/fileapp.py
6
#!/usr/bin/env python # Copyright (C) 2016-2017 Pelagicore AB # # Permission to use, copy, modify, and/or distribute this software for # any purpose with or without fee is hereby granted, provided that the # above copyright notice and this permission notice appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL # WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR # BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES # OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, # WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, # ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS # SOFTWARE. # # For further information see LICENSE import os import argparse import sys if __name__ == '__main__': parser = argparse.ArgumentParser(description='Process some integers.') parser.add_argument('mode', choices=["create", "check", "delete"], help='Mode supported: create, check, delete') parser.add_argument('name', type=str, default='/lala.txt', help='Filename to be used') args = parser.parse_args() if args.mode == 'create': f = open(args.name, 'w') f.write('eriojweroijerfioerjf') f.close() exit(0) elif args.mode == 'check': retval = os.path.exists(args.name) exit(retval) elif args.mode == 'delete': if os.path.exists(args.name): os.remove(args.name) exit(1) exit(0)
ibinti/intellij-community
refs/heads/master
python/testData/refactoring/rename/docstringParams.py
80
class SomeClass(object): """ Awesome class @ivar someVar: great stuff @type someVar: string """ def __init__(self): self.some<caret>Var = None
mcella/django
refs/heads/master
tests/urlpatterns_reverse/nonimported_module.py
633
def view(request): """Stub view""" pass
brstra/ozvuchka
refs/heads/master
ozvuchka/default_settings.py
1
""" Django settings for ozvuchka project. Generated by 'django-admin startproject' using Django 1.9.1. For more information on this file, see https://docs.djangoproject.com/en/1.9/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.9/ref/settings/ """ import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'Asdga$t5agqhAFDBzdBW64hw$Nsdgbhs' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', ] MIDDLEWARE_CLASSES = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'ozvuchka.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'ozvuchka.wsgi.application' # Database # https://docs.djangoproject.com/en/1.9/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Password validation # https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/1.9/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.9/howto/static-files/ STATIC_URL = '/static/'
kilon/sverchok
refs/heads/master
nodes/modifier_make/wireframe.py
1
# ##### BEGIN GPL LICENSE BLOCK ##### # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software Foundation, # Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ##### END GPL LICENSE BLOCK ##### import bpy from bpy.props import FloatProperty, BoolProperty import bmesh from sverchok.node_tree import SverchCustomTreeNode from sverchok.data_structure import (updateNode, Vector_generate, repeat_last, SvSetSocketAnyType, SvGetSocketAnyType) def wireframe(vertices, faces, t, o, replace, boundary, even_offset, relative_offset): if not faces or not vertices: return False if len(faces[0]) == 2: return False bm = bmesh.new() bm_verts = [bm.verts.new(v) for v in vertices] for face in faces: bm.faces.new([bm_verts[i] for i in face]) bmesh.ops.recalc_face_normals(bm, faces=bm.faces[:]) res = bmesh.ops.wireframe(bm, faces=bm.faces[:], thickness=t, offset=o, use_replace=replace, use_boundary=boundary, use_even_offset=even_offset, use_relative_offset=relative_offset) #bmesh.ops.wireframe(bm, faces, thickness, offset, use_replace, # use_boundary, use_even_offset, use_crease, crease_weight, thickness, use_relative_offset, material_offset) edges = [] faces = [] bm.verts.index_update() bm.edges.index_update() bm.faces.index_update() for edge in bm.edges[:]: edges.append([v.index for v in edge.verts[:]]) verts = [vert.co[:] for vert in bm.verts[:]] for face in bm.faces: faces.append([v.index for v in face.verts[:]]) bm.clear() bm.free() return (verts, edges, faces) class SvWireframeNode(bpy.types.Node, SverchCustomTreeNode): '''Wireframe''' bl_idname = 'SvWireframeNode' bl_label = 'Wireframe' bl_icon = 'OUTLINER_OB_EMPTY' thickness = FloatProperty(name='thickness', description='thickness', default=0.01, min=0.0, update=updateNode) offset = FloatProperty(name='offset', description='offset', default=0.01, min=0.0, update=updateNode) replace = BoolProperty(name='replace', description='replace', default=True, update=updateNode) even_offset = BoolProperty(name='even_offset', description='even_offset', default=True, update=updateNode) relative_offset = BoolProperty(name='relative_offset', description='even_offset', default=False, update=updateNode) boundary = BoolProperty(name='boundary', description='boundry', default=True, update=updateNode) def sv_init(self, context): self.inputs.new('StringsSocket', 'thickness').prop_name = 'thickness' self.inputs.new('StringsSocket', 'Offset').prop_name = 'offset' self.inputs.new('VerticesSocket', 'vertices', 'vertices') self.inputs.new('StringsSocket', 'polygons', 'polygons') self.outputs.new('VerticesSocket', 'vertices', 'vertices') self.outputs.new('StringsSocket', 'edges', 'edges') self.outputs.new('StringsSocket', 'polygons', 'polygons') def draw_buttons(self, context, layout): layout.prop(self, 'boundary', text="Boundary") layout.prop(self, 'even_offset', text="Offset even") layout.prop(self, 'relative_offset', text="Offset relative") layout.prop(self, 'replace', text="Replace") def process(self): if not ('vertices' in self.outputs and self.outputs['vertices'].links or 'edges' in self.outputs and self.outputs['edges'].links or 'polygons' in self.outputs and self.outputs['polygons'].links): return if 'vertices' in self.inputs and self.inputs['vertices'].links and \ 'polygons' in self.inputs and self.inputs['polygons'].links: verts = Vector_generate(SvGetSocketAnyType(self, self.inputs['vertices'])) polys = SvGetSocketAnyType(self, self.inputs['polygons']) if 'thickness' in self.inputs: thickness = self.inputs['thickness'].sv_get()[0] else: thickness = [self.thickness] verts_out = [] edges_out = [] polys_out = [] for v, p, t in zip(verts, polys, repeat_last(thickness)): res = wireframe(v, p, t, self.offset, self.replace, self.boundary, self.even_offset, self.relative_offset) if not res: return verts_out.append(res[0]) edges_out.append(res[1]) polys_out.append(res[2]) if 'vertices' in self.outputs and self.outputs['vertices'].links: SvSetSocketAnyType(self, 'vertices', verts_out) if 'edges' in self.outputs and self.outputs['edges'].links: SvSetSocketAnyType(self, 'edges', edges_out) if 'polygons' in self.outputs and self.outputs['polygons'].links: SvSetSocketAnyType(self, 'polygons', polys_out) def update_socket(self, context): self.update() def register(): bpy.utils.register_class(SvWireframeNode) def unregister(): bpy.utils.unregister_class(SvWireframeNode)
deserted/letsencrypt
refs/heads/master
letsencrypt/tests/crypto_util_test.py
3
"""Tests for letsencrypt.crypto_util.""" import logging import shutil import tempfile import unittest import OpenSSL import mock import zope.component from letsencrypt import interfaces from letsencrypt.tests import test_util RSA256_KEY = test_util.load_vector('rsa256_key.pem') RSA512_KEY = test_util.load_vector('rsa512_key.pem') CERT = test_util.load_vector('cert.pem') SAN_CERT = test_util.load_vector('cert-san.pem') class InitSaveKeyTest(unittest.TestCase): """Tests for letsencrypt.crypto_util.init_save_key.""" def setUp(self): logging.disable(logging.CRITICAL) zope.component.provideUtility( mock.Mock(strict_permissions=True), interfaces.IConfig) self.key_dir = tempfile.mkdtemp('key_dir') def tearDown(self): logging.disable(logging.NOTSET) shutil.rmtree(self.key_dir) @classmethod def _call(cls, key_size, key_dir): from letsencrypt.crypto_util import init_save_key return init_save_key(key_size, key_dir, 'key-letsencrypt.pem') @mock.patch('letsencrypt.crypto_util.make_key') def test_success(self, mock_make): mock_make.return_value = 'key_pem' key = self._call(1024, self.key_dir) self.assertEqual(key.pem, 'key_pem') self.assertTrue('key-letsencrypt.pem' in key.file) @mock.patch('letsencrypt.crypto_util.make_key') def test_key_failure(self, mock_make): mock_make.side_effect = ValueError self.assertRaises(ValueError, self._call, 431, self.key_dir) class InitSaveCSRTest(unittest.TestCase): """Tests for letsencrypt.crypto_util.init_save_csr.""" def setUp(self): zope.component.provideUtility( mock.Mock(strict_permissions=True), interfaces.IConfig) self.csr_dir = tempfile.mkdtemp('csr_dir') def tearDown(self): shutil.rmtree(self.csr_dir) @mock.patch('letsencrypt.crypto_util.make_csr') @mock.patch('letsencrypt.crypto_util.le_util.make_or_verify_dir') def test_it(self, unused_mock_verify, mock_csr): from letsencrypt.crypto_util import init_save_csr mock_csr.return_value = ('csr_pem', 'csr_der') csr = init_save_csr( mock.Mock(pem='dummy_key'), 'example.com', self.csr_dir, 'csr-letsencrypt.pem') self.assertEqual(csr.data, 'csr_der') self.assertTrue('csr-letsencrypt.pem' in csr.file) class MakeCSRTest(unittest.TestCase): """Tests for letsencrypt.crypto_util.make_csr.""" @classmethod def _call(cls, *args, **kwargs): from letsencrypt.crypto_util import make_csr return make_csr(*args, **kwargs) def test_san(self): from letsencrypt.crypto_util import get_sans_from_csr # TODO: Fails for RSA256_KEY csr_pem, csr_der = self._call( RSA512_KEY, ['example.com', 'www.example.com']) self.assertEqual( ['example.com', 'www.example.com'], get_sans_from_csr(csr_pem)) self.assertEqual( ['example.com', 'www.example.com'], get_sans_from_csr( csr_der, OpenSSL.crypto.FILETYPE_ASN1)) class ValidCSRTest(unittest.TestCase): """Tests for letsencrypt.crypto_util.valid_csr.""" @classmethod def _call(cls, csr): from letsencrypt.crypto_util import valid_csr return valid_csr(csr) def test_valid_pem_true(self): self.assertTrue(self._call(test_util.load_vector('csr.pem'))) def test_valid_pem_san_true(self): self.assertTrue(self._call(test_util.load_vector('csr-san.pem'))) def test_valid_der_false(self): self.assertFalse(self._call(test_util.load_vector('csr.der'))) def test_valid_der_san_false(self): self.assertFalse(self._call(test_util.load_vector('csr-san.der'))) def test_empty_false(self): self.assertFalse(self._call('')) def test_random_false(self): self.assertFalse(self._call('foo bar')) class CSRMatchesPubkeyTest(unittest.TestCase): """Tests for letsencrypt.crypto_util.csr_matches_pubkey.""" @classmethod def _call(cls, *args, **kwargs): from letsencrypt.crypto_util import csr_matches_pubkey return csr_matches_pubkey(*args, **kwargs) def test_valid_true(self): self.assertTrue(self._call( test_util.load_vector('csr.pem'), RSA512_KEY)) def test_invalid_false(self): self.assertFalse(self._call( test_util.load_vector('csr.pem'), RSA256_KEY)) class MakeKeyTest(unittest.TestCase): # pylint: disable=too-few-public-methods """Tests for letsencrypt.crypto_util.make_key.""" def test_it(self): # pylint: disable=no-self-use from letsencrypt.crypto_util import make_key # Do not test larger keys as it takes too long. OpenSSL.crypto.load_privatekey( OpenSSL.crypto.FILETYPE_PEM, make_key(1024)) class ValidPrivkeyTest(unittest.TestCase): """Tests for letsencrypt.crypto_util.valid_privkey.""" @classmethod def _call(cls, privkey): from letsencrypt.crypto_util import valid_privkey return valid_privkey(privkey) def test_valid_true(self): self.assertTrue(self._call(RSA256_KEY)) def test_empty_false(self): self.assertFalse(self._call('')) def test_random_false(self): self.assertFalse(self._call('foo bar')) class GetSANsFromCertTest(unittest.TestCase): """Tests for letsencrypt.crypto_util.get_sans_from_cert.""" @classmethod def _call(cls, *args, **kwargs): from letsencrypt.crypto_util import get_sans_from_cert return get_sans_from_cert(*args, **kwargs) def test_single(self): self.assertEqual([], self._call(test_util.load_vector('cert.pem'))) def test_san(self): self.assertEqual( ['example.com', 'www.example.com'], self._call(test_util.load_vector('cert-san.pem'))) class GetSANsFromCSRTest(unittest.TestCase): """Tests for letsencrypt.crypto_util.get_sans_from_csr.""" @classmethod def _call(cls, *args, **kwargs): from letsencrypt.crypto_util import get_sans_from_csr return get_sans_from_csr(*args, **kwargs) def test_extract_one_san(self): self.assertEqual(['example.com'], self._call( test_util.load_vector('csr.pem'))) def test_extract_two_sans(self): self.assertEqual(['example.com', 'www.example.com'], self._call( test_util.load_vector('csr-san.pem'))) def test_extract_six_sans(self): self.assertEqual(self._call(test_util.load_vector('csr-6sans.pem')), ["example.com", "example.org", "example.net", "example.info", "subdomain.example.com", "other.subdomain.example.com"]) def test_parse_non_csr(self): self.assertRaises(OpenSSL.crypto.Error, self._call, "hello there") def test_parse_no_sans(self): self.assertEqual( [], self._call(test_util.load_vector('csr-nosans.pem'))) if __name__ == '__main__': unittest.main() # pragma: no cover
incuna/django-denorm
refs/heads/master
test_denorm_project/test_denorm_project/urls.py
27
from django.conf.urls.defaults import patterns, include, url # Uncomment the next two lines to enable the admin: # from django.contrib import admin # admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^$', 'test_project.views.home', name='home'), # url(r'^test_project/', include('test_project.foo.urls')), # Uncomment the admin/doc line below to enable admin documentation: # url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: # url(r'^admin/', include(admin.site.urls)), )
ivanhorvath/openshift-tools
refs/heads/prod
ansible/roles/lib_oa_openshift/src/ansible/oc_service.py
50
# pylint: skip-file # flake8: noqa def main(): ''' ansible oc module for services ''' module = AnsibleModule( argument_spec=dict( kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'), state=dict(default='present', type='str', choices=['present', 'absent', 'list']), debug=dict(default=False, type='bool'), namespace=dict(default='default', type='str'), name=dict(default=None, type='str'), annotations=dict(default=None, type='dict'), labels=dict(default=None, type='dict'), selector=dict(default=None, type='dict'), clusterip=dict(default=None, type='str'), portalip=dict(default=None, type='str'), ports=dict(default=None, type='list'), session_affinity=dict(default='None', type='str'), service_type=dict(default='ClusterIP', type='str'), external_ips=dict(default=None, type='list'), ), supports_check_mode=True, ) rval = OCService.run_ansible(module.params, module.check_mode) if 'failed' in rval: return module.fail_json(**rval) return module.exit_json(**rval) if __name__ == '__main__': main()
gdub/django
refs/heads/master
django/core/cache/utils.py
585
from __future__ import unicode_literals import hashlib from django.utils.encoding import force_bytes from django.utils.http import urlquote TEMPLATE_FRAGMENT_KEY_TEMPLATE = 'template.cache.%s.%s' def make_template_fragment_key(fragment_name, vary_on=None): if vary_on is None: vary_on = () key = ':'.join(urlquote(var) for var in vary_on) args = hashlib.md5(force_bytes(key)) return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, args.hexdigest())
mapr/hue
refs/heads/hue-3.9.0-mapr
desktop/libs/hadoop/src/hadoop/mini_cluster.py
23
#!/usr/bin/env python # Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ####################################################### ## WARNING!!! ## ## This file is stale. Hadoop 0.23 and CDH4 ## ## do not support minicluster. This is replaced ## ## by webhdfs.py, to set up a running cluster. ## ####################################################### # A Python-side driver for MiniHadoopClusterManager # # See README.testing for hints on how to use this, # and also look for other examples. # # If you have one of these running and want to figure out what ports # are open, one way to do so is something like: # for p in $(lsof -p 63564 | grep LISTEN | sed -e 's/.*:\([0-9][0-9]*\).*/\1/') # do # echo $p # echo "GET /" | nc -w 1 localhost $p # done import atexit import subprocess import os import pwd import logging import sys import signal import shutil import socket import time import tempfile import json import lxml.etree import urllib2 from desktop.lib import python_util from hadoop.fs.hadoopfs import HadoopFileSystem from hadoop.job_tracker import LiveJobTracker import hadoop.cluster # Starts mini cluster suspended until a debugger attaches to it. DEBUG_HADOOP=False # Redirects mini cluster stderr to stderr. (Default is to put it in a file.) USE_STDERR=os.environ.get("MINI_CLUSTER_USE_STDERR", False) # Whether to clean up temp dir at exit CLEANUP_TMP_DIR=os.environ.get("MINI_CLUSTER_CLEANUP", True) # How long to wait for cluster to start up. (seconds) MAX_CLUSTER_STARTUP_TIME = 120.0 # List of classes to be used as plugins for the JT of the cluster. CLUSTER_JT_PLUGINS = 'org.apache.hadoop.thriftfs.ThriftJobTrackerPlugin' # MR Task Scheduler. By default use the FIFO scheduler CLUSTER_TASK_SCHEDULER='org.apache.hadoop.mapred.JobQueueTaskScheduler' # MR queue names CLUSTER_QUEUE_NAMES='default' STARTUP_CONFIGS={} # users and their groups which are used in Hue tests. TEST_USER_GROUP_MAPPING = { 'test': ['test','users','supergroup'], 'chown_test': ['chown_test'], 'notsuperuser': ['notsuperuser'], 'gamma': ['gamma'], 'webui': ['webui'], 'hue': ['supergroup'] } LOGGER=logging.getLogger(__name__) class MiniHadoopCluster(object): """ Manages the invocation of a MiniHadoopClusterManager from Python. """ def __init__(self, num_datanodes=1, num_tasktrackers=1): # These are cached self._jt, self._fs = None, None self.num_datanodes = num_datanodes self.num_tasktrackers = num_tasktrackers def start(self, extra_configs=None): """ Start a cluster as a subprocess. """ self.tmpdir = tempfile.mkdtemp() if not extra_configs: extra_configs = {} def tmppath(filename): """Creates paths in tmpdir.""" return os.path.join(self.tmpdir, filename) LOGGER.info("Using temporary directory: %s" % self.tmpdir) in_conf_dir = tmppath("in-conf") os.mkdir(in_conf_dir) self.log_dir = tmppath("logs") os.mkdir(self.log_dir) f = file(os.path.join(in_conf_dir, "hadoop-metrics.properties"), "w") try: f.write(""" dfs.class=org.apache.hadoop.metrics.spi.NoEmitMetricsContext mapred.class=org.apache.hadoop.metrics.spi.NoEmitMetricsContext jvm.class=org.apache.hadoop.metrics.spi.NoEmitMetricsContext rpc.class=org.apache.hadoop.metrics.spi.NoEmitMetricsContext """) finally: f.close() if self.superuser not in TEST_USER_GROUP_MAPPING: TEST_USER_GROUP_MAPPING[self.superuser] = [self.superuser] _write_static_group_mapping(TEST_USER_GROUP_MAPPING, tmppath('ugm.properties')) core_configs = { 'hadoop.proxyuser.%s.groups' % (self.superuser,): 'users,supergroup', 'hadoop.proxyuser.%s.hosts' % (self.superuser,): 'localhost', 'mapred.jobtracker.plugins': CLUSTER_JT_PLUGINS} extra_configs.update(STARTUP_CONFIGS) write_config(core_configs, tmppath('in-conf/core-site.xml')) write_config({'mapred.jobtracker.taskScheduler': CLUSTER_TASK_SCHEDULER, 'mapred.queue.names': CLUSTER_QUEUE_NAMES}, tmppath('in-conf/mapred-site.xml')) hadoop_policy_keys = ['client', 'client.datanode', 'datanode', 'inter.datanode', 'namenode', 'inter.tracker', 'job.submission', 'task.umbilical', 'refresh.policy', 'admin.operations'] hadoop_policy_config = {} for policy in hadoop_policy_keys: hadoop_policy_config['security.' + policy + '.protocol.acl'] = '*' write_config(hadoop_policy_config, tmppath('in-conf/hadoop-policy.xml')) details_file = file(tmppath("details.json"), "w+") try: args = [ os.path.join(hadoop.conf.HADOOP_MR1_HOME.get(), 'bin', 'hadoop'), "jar", hadoop.conf.HADOOP_TEST_JAR.get(), "minicluster", "-writeConfig", tmppath("config.xml"), "-writeDetails", tmppath("details.json"), "-datanodes", str(self.num_datanodes), "-tasktrackers", str(self.num_tasktrackers), "-useloopbackhosts", "-D", "hadoop.tmp.dir=%s" % self.tmpdir, "-D", "mapred.local.dir=%s/mapred/local" % self.tmpdir, "-D", "mapred.system.dir=/mapred/system", "-D", "mapred.temp.dir=/mapred/temp", "-D", "jobclient.completion.poll.interval=100", "-D", "jobclient.progress.monitor.poll.interval=100", "-D", "fs.checkpoint.period=1", # For a reason I don't fully understand, this must be 0.0.0.0 and not 'localhost' "-D", "dfs.secondary.http.address=0.0.0.0:%d" % python_util.find_unused_port(), # We bind the NN's thrift interface to a port we find here. # This is suboptimal, since there's a race. Alas, if we don't # do this here, the datanodes fail to discover the namenode's thrift # address, and there's a race there "-D", "dfs.thrift.address=localhost:%d" % python_util.find_unused_port(), "-D", "jobtracker.thrift.address=localhost:%d" % python_util.find_unused_port(), # Jobs realize they have finished faster with this timeout. "-D", "jobclient.completion.poll.interval=50", "-D", "hadoop.security.authorization=true", "-D", "hadoop.policy.file=%s/hadoop-policy.xml" % in_conf_dir, ] for key,value in extra_configs.iteritems(): args.append("-D") args.append(key + "=" + value) env = {} env["HADOOP_CONF_DIR"] = in_conf_dir env["HADOOP_OPTS"] = "-Dtest.build.data=%s" % (self.tmpdir, ) env["HADOOP_CLASSPATH"] = ':'.join([ # -- BEGIN JAVA TRIVIA -- # Add the -test- jar to the classpath to work around a subtle issue # involving Java classloaders. In brief, hadoop's RunJar class creates # a child classloader with the test jar on it, but the core classes # are loaded by the system classloader. This is fine except that # some classes in the test jar extend package-protected classes in the # core jar. Even though the classes are in the same package name, they # are thus loaded by different classloaders and therefore an IllegalAccessError # prevents the MiniMRCluster from starting. Adding the test jar to the system # classpath prevents this error since then both the MiniMRCluster and the # core classes are loaded by the system classloader. hadoop.conf.HADOOP_TEST_JAR.get(), # -- END JAVA TRIVIA -- hadoop.conf.HADOOP_PLUGIN_CLASSPATH.get(), # Due to CDH-4537, we need to add test dependencies to run minicluster os.path.join(os.path.dirname(__file__), 'test_jars', '*'), ]) env["HADOOP_HEAPSIZE"] = "128" env["HADOOP_HOME"] = hadoop.conf.HADOOP_MR1_HOME.get() env["HADOOP_LOG_DIR"] = self.log_dir env["USER"] = self.superuser if "JAVA_HOME" in os.environ: env["JAVA_HOME"] = os.environ["JAVA_HOME"] # Wait for the debugger to attach if DEBUG_HADOOP: env["HADOOP_OPTS"] = env.get("HADOOP_OPTS", "") + " -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=9999" if USE_STDERR: stderr=sys.stderr else: stderr=file(tmppath("stderr"), "w") LOGGER.debug("Starting minicluster: %s env: %s" % (repr(args), repr(env))) self.clusterproc = subprocess.Popen( args=args, stdout=file(tmppath("stdout"), "w"), stderr=stderr, env=env) details = {} start = time.time() # We consider the cluster started when the details file parses correct JSON. # MiniHadoopCluster currently writes the details file last, and this depends # on that. while not details: try: details_file.seek(0) details = json.load(details_file) except ValueError: pass if self.clusterproc.poll() is not None or (not DEBUG_HADOOP and (time.time() - start) > MAX_CLUSTER_STARTUP_TIME): LOGGER.debug("stdout:" + file(tmppath("stdout")).read()) if not USE_STDERR: LOGGER.debug("stderr:" + file(tmppath("stderr")).read()) self.stop() raise Exception("Cluster process quit or is taking too long to start. Aborting.") finally: details_file.close() LOGGER.debug("Successfully started minicluster") # Place all the details as attributes on self. for k, v in details.iteritems(): setattr(self, k, v) # Parse the configuration using XPath and place into self.config. config = lxml.etree.parse(tmppath("config.xml")) self.config = dict( (property.find("./name").text, property.find("./value").text) for property in config.xpath("/configuration/property")) # Write out Hadoop-style configuration directory, # which can, in turn, be used for /bin/hadoop. self.config_dir = tmppath("conf") os.mkdir(self.config_dir) hadoop.conf.HADOOP_CONF_DIR.set_for_testing(self.config_dir) write_config(self.config, tmppath("conf/core-site.xml"), ["fs.defaultFS", "jobclient.completion.poll.interval", "dfs.namenode.checkpoint.period", "dfs.namenode.checkpoint.dir", 'hadoop.proxyuser.'+self.superuser+'.groups', 'hadoop.proxyuser.'+self.superuser+'.hosts']) write_config(self.config, tmppath("conf/hdfs-site.xml"), ["fs.defaultFS", "dfs.namenode.http-address", "dfs.namenode.secondary.http-address"]) # mapred.job.tracker isn't written out into self.config, so we fill # that one out more manually. write_config({ 'mapred.job.tracker': 'localhost:%d' % self.jobtracker_port }, tmppath("conf/mapred-site.xml")) write_config(hadoop_policy_config, tmppath('conf/hadoop-policy.xml')) # Once the config is written out, we can start the 2NN. args = [hadoop.conf.HADOOP_BIN.get(), '--config', self.config_dir, 'secondarynamenode'] LOGGER.debug("Starting 2NN at: " + self.config['dfs.secondary.http.address']) LOGGER.debug("2NN command: %s env: %s" % (repr(args), repr(env))) self.secondary_proc = subprocess.Popen( args=args, stdout=file(tmppath("stdout.2nn"), "w"), stderr=file(tmppath("stderr.2nn"), "w"), env=env) while True: try: response = urllib2.urlopen(urllib2.Request('http://' + self.config['dfs.secondary.http.address'])) except urllib2.URLError: # If we should abort startup. if self.secondary_proc.poll() is not None or (not DEBUG_HADOOP and (time.time() - start) > MAX_CLUSTER_STARTUP_TIME): LOGGER.debug("stdout:" + file(tmppath("stdout")).read()) if not USE_STDERR: LOGGER.debug("stderr:" + file(tmppath("stderr")).read()) self.stop() raise Exception("2nn process quit or is taking too long to start. Aborting.") break else: time.sleep(1) continue # We didn't get a URLError. 2NN started successfully. response.close() break LOGGER.debug("Successfully started 2NN") def stop(self): """ Kills the cluster ungracefully. """ if self.clusterproc and self.clusterproc.poll() is None: os.kill(self.clusterproc.pid, signal.SIGKILL) self.clusterproc.wait() if self.secondary_proc and self.secondary_proc.poll() is None: os.kill(self.secondary_proc.pid, signal.SIGKILL) self.secondary_proc.wait() if CLEANUP_TMP_DIR != 'false': logging.info("Cleaning up self.tmpdir. Use $MINI_CLUSTER_CLEANUP to avoid.") shutil.rmtree(self.tmpdir) @property def fs(self): """Creates a HadoopFileSystem object configured for this cluster.""" if self._fs is None: self._fs = HadoopFileSystem("localhost", thrift_port=self.namenode_thrift_port, hdfs_port=self.namenode_port, hadoop_bin_path=hadoop.conf.HADOOP_BIN.get()) return self._fs @property def jt(self): """Creates a LiveJobTracker object configured for this cluster.""" if self._jt is None: self._jt = LiveJobTracker("localhost", self.jobtracker_thrift_port) return self._jt @property def superuser(self): """ Returns the "superuser" of this cluster. This is essentially the user that the cluster was started with. """ return pwd.getpwuid(os.getuid()).pw_name @property def namenode_thrift_port(self): """ Return the namenode thrift port. """ _, port = self.config["dfs.thrift.address"].split(":") return int(port) @property def jobtracker_thrift_port(self): """ Return the jobtracker thrift port. """ _, port = self.config["jobtracker.thrift.address"].split(":") return int(port) def dump_ini(self, fd=sys.stdout): """ Dumps an ini-style configuration suitable for configuring desktop to talk to this cluster. TODO(todd) eventually this should use config framework 'writeback' support @param fd: a file-like writable object """ print >>fd, "[hadoop]" print >>fd, "[[hdfs_clusters]]" print >>fd, "[[[default]]]" print >>fd, "thrift_port=%d" % self.namenode_thrift_port print >>fd, "[[mapred_clusters]]" print >>fd, "[[[default]]]" print >>fd, "thrift_port=%d" % self.jobtracker_thrift_port # Shared global cluster returned by shared_cluster context manager. _shared_cluster = None def shared_cluster(conf=False): """ Use a shared cluster that is initialized on demand, and that is torn down at process exit. If conf is True, then configuration is updated to reference the cluster, and relevant caches are cleared. Returns a lambda which must be called when you are done with the shared cluster. """ cluster = shared_cluster_internal() closers = [ ] if conf: closers.extend([ hadoop.conf.HDFS_CLUSTERS["default"].NN_HOST.set_for_testing("localhost"), hadoop.conf.HDFS_CLUSTERS["default"].NN_HDFS_PORT.set_for_testing(cluster.namenode_port), hadoop.conf.MR_CLUSTERS["default"].HOST.set_for_testing("localhost"), hadoop.conf.MR_CLUSTERS["default"].JT_THRIFT_PORT.set_for_testing(cluster.jt.thrift_port), ]) # Clear the caches # This is djanky (that's django for "janky"). # Caches are tricky w.r.t. to to testing; # perhaps there are better patterns? old = hadoop.cluster.clear_caches() def finish(): if conf: hadoop.cluster.restore_caches(old) for x in closers: x() # We don't run the cluster's real stop method, # because a shared cluster should be shutdown at # exit. cluster.shutdown = finish return cluster def write_config(config, path, variables=None): """ Minimal utility to write Hadoop-style configuration from a configuration map (config), into a new file called path. """ f = file(path, "w") try: f.write("""<?xml version="1.0"?> <?xml-stylesheet type="text/xsl" href="configuration.xsl"?> <configuration> """) keys = (variables and (variables,) or (config.keys(),))[0] for name in keys: value = config[name] f.write(" <property>\n") f.write(" <name>%s</name>\n" % name) f.write(" <value>%s</value>\n" % value) f.write(" </property>\n") f.write("</configuration>\n") finally: f.close() def _write_static_group_mapping(user_group_mapping, path): """ Create a Java-style .properties file to contain the static user -> group mapping used by tests. """ f = file(path, 'w') try: for user, groups in user_group_mapping.iteritems(): f.write('%s = %s\n' % (user, ','.join(groups))) finally: f.close() def shared_cluster_internal(): """ Manages _shared_cluster. """ global _shared_cluster if _shared_cluster is None: _shared_cluster = MiniHadoopCluster() _shared_cluster.start() atexit.register(_shared_cluster.stop) return _shared_cluster if __name__ == '__main__': """ It's poor form to write tests for tests (the world-wide stack overflow exception), so this merely tries the code. """ logging.basicConfig(level=logging.DEBUG) import desktop desktop.lib.conf.initialize([hadoop.conf]) if True: cluster = MiniHadoopCluster(num_datanodes=5, num_tasktrackers=5) cluster.start() print cluster.namenode_port print cluster.jobtracker_port print cluster.config.get("dfs.thrift.address") cluster.dump_ini(sys.stdout) from IPython.Shell import IPShellEmbed IPShellEmbed()() cluster.stop()
saghul/gyn
refs/heads/master
test/win/gyptest-link-subsystem.py
239
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Make sure subsystem setting is extracted properly. """ import TestGyp import sys if sys.platform == 'win32': test = TestGyp.TestGyp(formats=['msvs', 'ninja']) CHDIR = 'linker-flags' test.run_gyp('subsystem.gyp', chdir=CHDIR) test.build('subsystem.gyp', 'test_console_ok', chdir=CHDIR) test.build('subsystem.gyp', 'test_console_fail', chdir=CHDIR, status=1) test.build('subsystem.gyp', 'test_windows_ok', chdir=CHDIR) test.build('subsystem.gyp', 'test_windows_fail', chdir=CHDIR, status=1) test.build('subsystem.gyp', 'test_console_xp', chdir=CHDIR) test.build('subsystem.gyp', 'test_windows_xp', chdir=CHDIR) # Make sure we are targeting XP. def GetHeaders(exe): return test.run_dumpbin('/headers', test.built_file_path(exe, chdir=CHDIR)) if '5.01 subsystem version' not in GetHeaders('test_console_xp.exe'): test.fail_test() if '5.01 subsystem version' not in GetHeaders('test_windows_xp.exe'): test.fail_test() # TODO(scottmg): There are other subsystems (WinCE, etc.) that we don't use. test.pass_test()
gitromand/phantomjs
refs/heads/master
src/breakpad/src/tools/gyp/test/generator-output/actions/subdir2/make-file.py
973
#!/usr/bin/env python # Copyright (c) 2009 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import sys contents = "Hello from make-file.py\n" open(sys.argv[1], 'wb').write(contents)
atyndall/cits4211
refs/heads/master
testdata/GenerateTetris.py
1
from random import randint fn = input("What's the input filename? ") ls = open(fn).read().split("\n") n = int(ls[0]) #total no. of pieces ns = [(max(0, float(ls[k])), k) for k in range(1, 8)] #ratios t = sum([x for (x, k) in ns]) #total of ratios ps = [k for (x, k) in ns for i in range(int(x / t * n))] #the pieces ps += [max(ns)[1] for k in range(n - len(ps))] #top up for rounding qs = [] for k in range(len(ps)): x = randint(k, len(ps) - 1) #randomise qs.append(ps[x]) ps[x] = ps[k] fx = open("z" + fn, 'w') while qs != []: s = str(qs[:50]).replace(", ", "")[1:-1] fx.write(s) fx.write("\n") qs = qs[50:] input("Hit Enter to finish")
yongwen/makahiki
refs/heads/master
makahiki/apps/widgets/wallpost/forms.py
27
"""post form""" from django import forms class WallForm(forms.Form): """Wall post form""" post = forms.CharField(widget=forms.widgets.Textarea()) page_name = forms.CharField(widget=forms.HiddenInput(), required=False)
MaximeGLegault/StrategyIA
refs/heads/dev
tests/Algorithm/test_evaluation_module.py
1
import unittest import numpy as np from unittest.mock import create_autospec from RULEngine.Util.Position import Position from RULEngine.Util.Pose import Pose from RULEngine.Game.Player import Player from RULEngine.Game.Team import Team from ai.states.game_state import GameState from ai.Algorithm.evaluation_module import line_of_sight_clearance, trajectory_score class TestEvaluationModule(unittest.TestCase): MULTIPLICATIVE_NULL_VALUE = 1 ADDITIVE_NULL_VALUE = 0 MAX_VALUE = 15 def setUp(self): self.start_point = Position(0, 0) self.goal = Position(0, 0) self.obstacle = Position(0, 0) def test_givenObstacleBehindPlayer_thenReturnsMultiplicativeNullValue(self): self._define_points_obstacle((100, 100), (200, 200), (50, 50)) assert trajectory_score(self.start_point, self.goal, self.obstacle) == self.MULTIPLICATIVE_NULL_VALUE def test_givenObstacleVeryFarFromPlayer_thenTrajectoryScoreReturnsMultiplicativeNullValue(self): self._define_points_obstacle((100, 100), (200, 200), (1500, 1500)) assert trajectory_score(self.start_point, self.goal, self.obstacle) == self.MULTIPLICATIVE_NULL_VALUE def test_givenObstacleOnPath_thenTrajectoryScoreReturnsMaxValue(self): self._define_points_obstacle((100, 100), (200, 200), (150, 150)) assert trajectory_score(self.start_point, self.goal, self.obstacle) == self.MAX_VALUE def test_givenOnePlayerInMyTeamFarFromGoal_thenLineOfSightClearanceIsDistanceToTarget(self): player1 = self._build_mock_player(Position(100, 100), 1) player2 = self._build_mock_player(Position(1500, 1500), 2) self.goal.x, self.goal.y = (200, 200) self._create_mock_teams({player1.id: player1, player2.id: player2}, {}) distance_to_target = np.linalg.norm(player1.pose.position - self.goal) assert line_of_sight_clearance(player1, self.goal) == distance_to_target def test_givenOnePlayerInMyTeamNearFromGoal_thenLineOfSightClearanceIsDistanceToTargetTimesPathScore(self): player1 = self._build_mock_player(Position(100, 100), 1) player2 = self._build_mock_player(Position(130, 130), 2) self.goal.x, self.goal.y = (200, 200) self._create_mock_teams({player1.id: player1, player2.id: player2}, {}) distance_to_target = np.linalg.norm(player1.pose.position - self.goal) path_score = trajectory_score(player1.pose.position, self.goal, player2.pose.position) assert line_of_sight_clearance(player1, self.goal) == distance_to_target * path_score def test_givenTwoPlayerInMyTeamNearFromGoal_thenLineOfSightClearanceIsDistanceToTargetTimesBothPathScores(self): player1 = self._build_mock_player(Position(100, 100), 1) player2 = self._build_mock_player(Position(130, 130), 2) player3 = self._build_mock_player(Position(160, 170), 3) self.goal.x, self.goal.y = (200, 200) self._create_mock_teams({player1.id: player1, player2.id: player2, player3.id: player3}, {}) distance_to_target = np.linalg.norm(player1.pose.position - self.goal) path_score_to_p2 = trajectory_score(player1.pose.position, self.goal, player2.pose.position) path_score_to_p3 = trajectory_score(player1.pose.position, self.goal, player3.pose.position) assert line_of_sight_clearance(player1, self.goal) == distance_to_target * path_score_to_p2 * path_score_to_p3 def test_givenOnePlayerInOtherTeamFarFromGoal_thenLineOfSightClearanceIsDistanceToTarget(self): player1 = self._build_mock_player(Position(100, 100), 1) player2 = self._build_mock_player(Position(1500, 1500), 2) self.goal.x, self.goal.y = (200, 200) self._create_mock_teams({player1.id: player1}, {2: player2}) distance_to_target = np.linalg.norm(player1.pose.position - self.goal) assert line_of_sight_clearance(player1, self.goal) == distance_to_target def test_givenOnePlayerInOtherTeamNearGoal_thenLineOfSightClearanceIsDistanceToTargetTimesPathScore(self): player1 = self._build_mock_player(Position(100, 100), 1) player2 = self._build_mock_player(Position(130, 130), 2) self.goal.x, self.goal.y = (200, 200) self._create_mock_teams({player1.id: player1}, {2: player2}) distance_to_target = np.linalg.norm(player1.pose.position - self.goal) path_score = trajectory_score(player1.pose.position, self.goal, player2.pose.position) assert line_of_sight_clearance(player1, self.goal) == distance_to_target * path_score def test_givenTwoPlayerInOtherTeamNearGoal_thenLineOfSightClearanceIsDistanceToTargetTimesBothPathScores(self): player1 = self._build_mock_player(Position(100, 100), 1) player2 = self._build_mock_player(Position(130, 130), 2) player3 = self._build_mock_player(Position(160, 170), 3) self.goal.x, self.goal.y = (200, 200) self._create_mock_teams({player1.id: player1}, {player2.id: player2, player3.id: player3}) distance_to_target = np.linalg.norm(player1.pose.position - self.goal) path_score_to_p2 = trajectory_score(player1.pose.position, self.goal, player2.pose.position) path_score_to_p3 = trajectory_score(player1.pose.position, self.goal, player3.pose.position) assert line_of_sight_clearance(player1, self.goal) == distance_to_target * path_score_to_p2 * path_score_to_p3 def _build_mock_player(self, position, id): player = create_autospec(Player) pose = create_autospec(Pose) pose.position = position player.pose = pose player.id = id return player def _create_mock_teams(self, allies, opponents): team1 = create_autospec(Team) team1.available_players = allies GameState().my_team = team1 team2 = create_autospec(Team) team2.available_players = opponents GameState().other_team = team2 def _define_points_obstacle(self, start_point, goal, obstacle): self.start_point.x, self.start_point.y = start_point self.goal.x, self.goal.y = goal self.obstacle.x, self.obstacle.y = obstacle
edx/ecommerce
refs/heads/master
ecommerce/extensions/dashboard/forms.py
1
from django import forms from django.utils.translation import ugettext_lazy as _ class UserFormMixin(forms.Form): """ Mixin for user field filtering. """ username = forms.CharField(required=False, label=_("Username")) email = forms.CharField(required=False, label=_("Email"))
robmagee/django-cms
refs/heads/develop
cms/forms/fields.py
35
# -*- coding: utf-8 -*- import six from django import forms from django.contrib.admin.widgets import RelatedFieldWidgetWrapper from django.forms.fields import EMPTY_VALUES from django.utils.translation import ugettext_lazy as _ from cms.forms.utils import get_site_choices, get_page_choices from cms.forms.widgets import PageSelectWidget, PageSmartLinkWidget from cms.models.pagemodel import Page class SuperLazyIterator(object): def __init__(self, func): self.func = func def __iter__(self): return iter(self.func()) class LazyChoiceField(forms.ChoiceField): def _set_choices(self, value): # we overwrite this function so no list(value) is called self._choices = self.widget.choices = value choices = property(forms.ChoiceField._get_choices, _set_choices) class PageSelectFormField(forms.MultiValueField): widget = PageSelectWidget default_error_messages = { 'invalid_site': _(u'Select a valid site'), 'invalid_page': _(u'Select a valid page'), } def __init__(self, queryset=None, empty_label=u"---------", cache_choices=False, required=True, widget=None, to_field_name=None, limit_choices_to=None, *args, **kwargs): errors = self.default_error_messages.copy() if 'error_messages' in kwargs: errors.update(kwargs['error_messages']) site_choices = SuperLazyIterator(get_site_choices) page_choices = SuperLazyIterator(get_page_choices) self.limit_choices_to = limit_choices_to kwargs['required'] = required fields = ( LazyChoiceField(choices=site_choices, required=False, error_messages={'invalid': errors['invalid_site']}), LazyChoiceField(choices=page_choices, required=False, error_messages={'invalid': errors['invalid_page']}), ) super(PageSelectFormField, self).__init__(fields, *args, **kwargs) def compress(self, data_list): if data_list: page_id = data_list[1] if page_id in EMPTY_VALUES: if not self.required: return None raise forms.ValidationError(self.error_messages['invalid_page']) return Page.objects.get(pk=page_id) return None def _has_changed(self, initial, data): is_empty = data and (len(data) >= 2 and data[1] in [None, '']) if isinstance(self.widget, RelatedFieldWidgetWrapper): self.widget.decompress = self.widget.widget.decompress if is_empty and initial is None: # when empty data will have [u'1', u'', u''] as value # this will cause django to always return True because of the '1' # so we simply follow django's default behavior when initial is None and data is "empty" data = ['' for x in range(0, len(data))] return super(PageSelectFormField, self)._has_changed(initial, data) class PageSmartLinkField(forms.CharField): widget = PageSmartLinkWidget def __init__(self, max_length=None, min_length=None, placeholder_text=None, ajax_view=None, *args, **kwargs): self.placeholder_text = placeholder_text widget = self.widget(ajax_view=ajax_view) super(PageSmartLinkField, self).__init__(max_length, min_length, widget=widget, *args, **kwargs) def widget_attrs(self, widget): attrs = super(PageSmartLinkField, self).widget_attrs(widget) attrs.update({'placeholder_text': six.text_type(self.placeholder_text)}) return attrs
MFoster/breeze
refs/heads/master
django/conf/locale/is/formats.py
433
# -*- encoding: utf-8 -*- # This file is distributed under the same license as the Django package. # # The *_FORMAT strings use the Django date format syntax, # see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = 'j. F Y' TIME_FORMAT = 'H:i:s' # DATETIME_FORMAT = YEAR_MONTH_FORMAT = 'F Y' MONTH_DAY_FORMAT = 'j. F' SHORT_DATE_FORMAT = 'j.n.Y' # SHORT_DATETIME_FORMAT = # FIRST_DAY_OF_WEEK = # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior # DATE_INPUT_FORMATS = # TIME_INPUT_FORMATS = # DATETIME_INPUT_FORMATS = DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = '.' # NUMBER_GROUPING =
YYWen0o0/python-frame-django
refs/heads/master
django/contrib/flatpages/admin.py
80
from django.contrib import admin from django.contrib.flatpages.models import FlatPage from django.utils.translation import ugettext_lazy as _ from django.contrib.flatpages.forms import FlatpageForm class FlatPageAdmin(admin.ModelAdmin): form = FlatpageForm fieldsets = ( (None, {'fields': ('url', 'title', 'content', 'sites')}), (_('Advanced options'), {'classes': ('collapse',), 'fields': ('enable_comments', 'registration_required', 'template_name')}), ) list_display = ('url', 'title') list_filter = ('sites', 'enable_comments', 'registration_required') search_fields = ('url', 'title') admin.site.register(FlatPage, FlatPageAdmin)
panchenji/Ryu_modified
refs/heads/master
ryu/app/rest_router.py
2
# Copyright (C) 2013 Nippon Telegraph and Telephone Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import logging import socket import struct import json from webob import Response from ryu.app.wsgi import ControllerBase from ryu.app.wsgi import WSGIApplication from ryu.base import app_manager from ryu.controller import dpset from ryu.controller import ofp_event from ryu.controller.handler import set_ev_cls from ryu.controller.handler import MAIN_DISPATCHER from ryu.exception import OFPUnknownVersion from ryu.exception import RyuException from ryu.lib import dpid as dpid_lib from ryu.lib import hub from ryu.lib import mac as mac_lib from ryu.lib import addrconv from ryu.lib.packet import arp from ryu.lib.packet import ethernet from ryu.lib.packet import icmp from ryu.lib.packet import ipv4 from ryu.lib.packet import packet from ryu.lib.packet import tcp from ryu.lib.packet import udp from ryu.lib.packet import vlan from ryu.ofproto import ether from ryu.ofproto import inet from ryu.ofproto import ofproto_v1_0 from ryu.ofproto import ofproto_v1_2 #============================= # REST API #============================= # # Note: specify switch and vlan group, as follows. # {switch_id} : 'all' or switchID # {vlan_id} : 'all' or vlanID # # ## 1. get address data and routing data. # # * get data of no vlan # GET /router/{switch_id} # # * get data of specific vlan group # GET /router/{switch_id}/{vlan_id} # # ## 2. set address data or routing data. # # * set data of no vlan # POST /router/{switch_id} # # * set data of specific vlan group # POST /router/{switch_id}/{vlan_id} # # case1: set address data. # parameter = {"address": "A.B.C.D/M"} # case2-1: set static route. # parameter = {"destination": "A.B.C.D/M", "gateway": "E.F.G.H"} # case2-2: set default route. # parameter = {"gateway": "E.F.G.H"} # # ## 3. delete address data or routing data. # # * delete data of no vlan # DELETE /router/{switch_id} # # * delete data of specific vlan group # DELETE /router/{switch_id}/{vlan_id} # # case1: delete address data. # parameter = {"address_id": "<int>"} or {"address_id": "all"} # case2: delete routing data. # parameter = {"route_id": "<int>"} or {"route_id": "all"} # # UINT16_MAX = 0xffff UINT32_MAX = 0xffffffff UINT64_MAX = 0xffffffffffffffff ETHERNET = ethernet.ethernet.__name__ VLAN = vlan.vlan.__name__ IPV4 = ipv4.ipv4.__name__ ARP = arp.arp.__name__ ICMP = icmp.icmp.__name__ TCP = tcp.tcp.__name__ UDP = udp.udp.__name__ MAX_SUSPENDPACKETS = 50 # Threshold of the packet suspends thread count. ARP_REPLY_TIMER = 2 # sec OFP_REPLY_TIMER = 1.0 # sec CHK_ROUTING_TBL_INTERVAL = 1800 # sec SWITCHID_PATTERN = dpid_lib.DPID_PATTERN + r'|all' VLANID_PATTERN = r'[0-9]{1,4}|all' VLANID_NONE = 0 VLANID_MIN = 2 VLANID_MAX = 4094 COOKIE_DEFAULT_ID = 0 COOKIE_SHIFT_VLANID = 32 COOKIE_SHIFT_ROUTEID = 16 DEFAULT_ROUTE = '0.0.0.0/0' IDLE_TIMEOUT = 1800 # sec DEFAULT_TTL = 64 REST_COMMAND_RESULT = 'command_result' REST_RESULT = 'result' REST_DETAILS = 'details' REST_OK = 'success' REST_NG = 'failure' REST_ALL = 'all' REST_SWITCHID = 'switch_id' REST_VLANID = 'vlan_id' REST_NW = 'internal_network' REST_ADDRESSID = 'address_id' REST_ADDRESS = 'address' REST_ROUTEID = 'route_id' REST_ROUTE = 'route' REST_DESTINATION = 'destination' REST_GATEWAY = 'gateway' PRIORITY_VLAN_SHIFT = 1000 PRIORITY_NETMASK_SHIFT = 32 PRIORITY_NORMAL = 0 PRIORITY_ARP_HANDLING = 1 PRIORITY_DEFAULT_ROUTING = 1 PRIORITY_MAC_LEARNING = 2 PRIORITY_STATIC_ROUTING = 2 PRIORITY_IMPLICIT_ROUTING = 3 PRIORITY_L2_SWITCHING = 4 PRIORITY_IP_HANDLING = 5 PRIORITY_TYPE_ROUTE = 'priority_route' def get_priority(priority_type, vid=0, route=None): log_msg = None priority = priority_type if priority_type == PRIORITY_TYPE_ROUTE: assert route is not None if route.dst_ip: priority_type = PRIORITY_STATIC_ROUTING priority = priority_type + route.netmask log_msg = 'static routing' else: priority_type = PRIORITY_DEFAULT_ROUTING priority = priority_type log_msg = 'default routing' if vid or priority_type == PRIORITY_IP_HANDLING: priority += PRIORITY_VLAN_SHIFT if priority_type > PRIORITY_STATIC_ROUTING: priority += PRIORITY_NETMASK_SHIFT if log_msg is None: return priority else: return priority, log_msg def get_priority_type(priority, vid): if vid: priority -= PRIORITY_VLAN_SHIFT return priority class NotFoundError(RyuException): message = 'Router SW is not connected. : switch_id=%(switch_id)s' class CommandFailure(RyuException): pass class RestRouterAPI(app_manager.RyuApp): OFP_VERSIONS = [ofproto_v1_0.OFP_VERSION, ofproto_v1_2.OFP_VERSION] _CONTEXTS = {'dpset': dpset.DPSet, 'wsgi': WSGIApplication} def __init__(self, *args, **kwargs): super(RestRouterAPI, self).__init__(*args, **kwargs) # logger configure RouterController.set_logger(self.logger) wsgi = kwargs['wsgi'] self.waiters = {} self.data = {'waiters': self.waiters} mapper = wsgi.mapper wsgi.registory['RouterController'] = self.data requirements = {'switch_id': SWITCHID_PATTERN, 'vlan_id': VLANID_PATTERN} # For no vlan data path = '/router/{switch_id}' mapper.connect('router', path, controller=RouterController, requirements=requirements, action='get_data', conditions=dict(method=['GET'])) mapper.connect('router', path, controller=RouterController, requirements=requirements, action='set_data', conditions=dict(method=['POST'])) mapper.connect('router', path, controller=RouterController, requirements=requirements, action='delete_data', conditions=dict(method=['DELETE'])) # For vlan data path = '/router/{switch_id}/{vlan_id}' mapper.connect('router', path, controller=RouterController, requirements=requirements, action='get_vlan_data', conditions=dict(method=['GET'])) mapper.connect('router', path, controller=RouterController, requirements=requirements, action='set_vlan_data', conditions=dict(method=['POST'])) mapper.connect('router', path, controller=RouterController, requirements=requirements, action='delete_vlan_data', conditions=dict(method=['DELETE'])) @set_ev_cls(dpset.EventDP, dpset.DPSET_EV_DISPATCHER) def datapath_handler(self, ev): if ev.enter: RouterController.register_router(ev.dp) else: RouterController.unregister_router(ev.dp) @set_ev_cls(ofp_event.EventOFPPacketIn, MAIN_DISPATCHER) def packet_in_handler(self, ev): RouterController.packet_in_handler(ev.msg) def _stats_reply_handler(self, ev): msg = ev.msg dp = msg.datapath if (dp.id not in self.waiters or msg.xid not in self.waiters[dp.id]): return event, msgs = self.waiters[dp.id][msg.xid] msgs.append(msg) if msg.flags & dp.ofproto.OFPSF_REPLY_MORE: return del self.waiters[dp.id][msg.xid] event.set() # for OpenFlow version1.0 @set_ev_cls(ofp_event.EventOFPFlowStatsReply, MAIN_DISPATCHER) def stats_reply_handler_v1_0(self, ev): self._stats_reply_handler(ev) # for OpenFlow version1.2 @set_ev_cls(ofp_event.EventOFPStatsReply, MAIN_DISPATCHER) def stats_reply_handler_v1_2(self, ev): self._stats_reply_handler(ev) #TODO: Update routing table when port status is changed. # REST command template def rest_command(func): def _rest_command(*args, **kwargs): try: msg = func(*args, **kwargs) return Response(content_type='application/json', body=json.dumps(msg)) except SyntaxError as e: status = 400 details = e.msg except (ValueError, NameError) as e: status = 400 details = e.message except NotFoundError as msg: status = 404 details = str(msg) msg = {REST_RESULT: REST_NG, REST_DETAILS: details} return Response(status=status, body=json.dumps(msg)) return _rest_command class RouterController(ControllerBase): _ROUTER_LIST = {} _LOGGER = None def __init__(self, req, link, data, **config): super(RouterController, self).__init__(req, link, data, **config) self.waiters = data['waiters'] @classmethod def set_logger(cls, logger): cls._LOGGER = logger cls._LOGGER.propagate = False hdlr = logging.StreamHandler() fmt_str = '[RT][%(levelname)s] switch_id=%(sw_id)s: %(message)s' hdlr.setFormatter(logging.Formatter(fmt_str)) cls._LOGGER.addHandler(hdlr) @classmethod def register_router(cls, dp): dpid = {'sw_id': dpid_lib.dpid_to_str(dp.id)} try: router = Router(dp, cls._LOGGER) except OFPUnknownVersion as message: cls._LOGGER.error(str(message), extra=dpid) return cls._ROUTER_LIST.setdefault(dp.id, router) cls._LOGGER.info('Join as router.', extra=dpid) @classmethod def unregister_router(cls, dp): if dp.id in cls._ROUTER_LIST: cls._ROUTER_LIST[dp.id].delete() del cls._ROUTER_LIST[dp.id] dpid = {'sw_id': dpid_lib.dpid_to_str(dp.id)} cls._LOGGER.info('Leave router.', extra=dpid) @classmethod def packet_in_handler(cls, msg): dp_id = msg.datapath.id if dp_id in cls._ROUTER_LIST: router = cls._ROUTER_LIST[dp_id] router.packet_in_handler(msg) # GET /router/{switch_id} @rest_command def get_data(self, req, switch_id, **_kwargs): return self._access_router(switch_id, VLANID_NONE, 'get_data', req.body) # GET /router/{switch_id}/{vlan_id} @rest_command def get_vlan_data(self, req, switch_id, vlan_id, **_kwargs): return self._access_router(switch_id, vlan_id, 'get_data', req.body) # POST /router/{switch_id} @rest_command def set_data(self, req, switch_id, **_kwargs): return self._access_router(switch_id, VLANID_NONE, 'set_data', req.body) # POST /router/{switch_id}/{vlan_id} @rest_command def set_vlan_data(self, req, switch_id, vlan_id, **_kwargs): return self._access_router(switch_id, vlan_id, 'set_data', req.body) # DELETE /router/{switch_id} @rest_command def delete_data(self, req, switch_id, **_kwargs): return self._access_router(switch_id, VLANID_NONE, 'delete_data', req.body) # DELETE /router/{switch_id}/{vlan_id} @rest_command def delete_vlan_data(self, req, switch_id, vlan_id, **_kwargs): return self._access_router(switch_id, vlan_id, 'delete_data', req.body) def _access_router(self, switch_id, vlan_id, func, rest_param): rest_message = [] routers = self._get_router(switch_id) param = eval(rest_param) if rest_param else {} for router in routers.values(): function = getattr(router, func) data = function(vlan_id, param, self.waiters) rest_message.append(data) return rest_message def _get_router(self, switch_id): routers = {} if switch_id == REST_ALL: routers = self._ROUTER_LIST else: sw_id = dpid_lib.str_to_dpid(switch_id) if sw_id in self._ROUTER_LIST: routers = {sw_id: self._ROUTER_LIST[sw_id]} if routers: return routers else: raise NotFoundError(switch_id=switch_id) class Router(dict): def __init__(self, dp, logger): super(Router, self).__init__() self.dp = dp self.dpid_str = dpid_lib.dpid_to_str(dp.id) self.sw_id = {'sw_id': self.dpid_str} self.logger = logger self.port_data = PortData(dp.ports) ofctl = OfCtl.factory(dp, logger) cookie = COOKIE_DEFAULT_ID # Set SW config: TTL error packet in (only OFPv1.2) ofctl.set_sw_config_for_ttl() # Set flow: ARP handling (packet in) priority = get_priority(PRIORITY_ARP_HANDLING) ofctl.set_packetin_flow(cookie, priority, dl_type=ether.ETH_TYPE_ARP) self.logger.info('Set ARP handling (packet in) flow [cookie=0x%x]', cookie, extra=self.sw_id) # Set flow: L2 switching (normal) priority = get_priority(PRIORITY_NORMAL) ofctl.set_normal_flow(cookie, priority) self.logger.info('Set L2 switching (normal) flow [cookie=0x%x]', cookie, extra=self.sw_id) # Set VlanRouter for vid=None. vlan_router = VlanRouter(VLANID_NONE, dp, self.port_data, logger) self[VLANID_NONE] = vlan_router # Start cyclic routing table check. self.thread = hub.spawn(self._cyclic_update_routing_tbl) self.logger.info('Start cyclic routing table update.', extra=self.sw_id) def delete(self): hub.kill(self.thread) self.thread.wait() self.logger.info('Stop cyclic routing table update.', extra=self.sw_id) def _get_vlan_router(self, vlan_id): vlan_routers = [] if vlan_id == REST_ALL: vlan_routers = self.values() else: vlan_id = int(vlan_id) if (vlan_id != VLANID_NONE and (vlan_id < VLANID_MIN or VLANID_MAX < vlan_id)): msg = 'Invalid {vlan_id} value. Set [%d-%d]' raise ValueError(msg % (VLANID_MIN, VLANID_MAX)) elif vlan_id in self: vlan_routers = [self[vlan_id]] return vlan_routers def _add_vlan_router(self, vlan_id): vlan_id = int(vlan_id) if vlan_id not in self: vlan_router = VlanRouter(vlan_id, self.dp, self.port_data, self.logger) self[vlan_id] = vlan_router return self[vlan_id] def _del_vlan_router(self, vlan_id, waiters): # Remove unnecessary VlanRouter. if vlan_id == VLANID_NONE: return vlan_router = self[vlan_id] if (len(vlan_router.address_data) == 0 and len(vlan_router.routing_tbl) == 0): vlan_router.delete(waiters) del self[vlan_id] def get_data(self, vlan_id, dummy1, dummy2): vlan_routers = self._get_vlan_router(vlan_id) if vlan_routers: msgs = [vlan_router.get_data() for vlan_router in vlan_routers] else: msgs = [{REST_VLANID: vlan_id}] return {REST_SWITCHID: self.dpid_str, REST_NW: msgs} def set_data(self, vlan_id, param, waiters): vlan_routers = self._get_vlan_router(vlan_id) if not vlan_routers: vlan_routers = [self._add_vlan_router(vlan_id)] msgs = [] for vlan_router in vlan_routers: try: msg = vlan_router.set_data(param) msgs.append(msg) if msg[REST_RESULT] == REST_NG: # Data setting is failure. self._del_vlan_router(vlan_router.vlan_id, waiters) except ValueError as err_msg: # Data setting is failure. self._del_vlan_router(vlan_router.vlan_id, waiters) raise err_msg return {REST_SWITCHID: self.dpid_str, REST_COMMAND_RESULT: msgs} def delete_data(self, vlan_id, param, waiters): msgs = [] vlan_routers = self._get_vlan_router(vlan_id) if vlan_routers: for vlan_router in vlan_routers: msg = vlan_router.delete_data(param, waiters) if msg: msgs.append(msg) # Check unnecessary VlanRouter. self._del_vlan_router(vlan_router.vlan_id, waiters) if not msgs: msgs = [{REST_RESULT: REST_NG, REST_DETAILS: 'Data is nothing.'}] return {REST_SWITCHID: self.dpid_str, REST_COMMAND_RESULT: msgs} def packet_in_handler(self, msg): pkt = packet.Packet(msg.data) #TODO: Packet library convert to string #self.logger.debug('Packet in = %s', str(pkt), self.sw_id) header_list = dict((p.protocol_name, p) for p in pkt.protocols if type(p) != str) if header_list: # Check vlan-tag vlan_id = VLANID_NONE if VLAN in header_list: vlan_id = header_list[VLAN].vid # Event dispatch if vlan_id in self: self[vlan_id].packet_in_handler(msg, header_list) else: self.logger.debug('Drop unknown vlan packet. [vlan_id=%d]', vlan_id, extra=self.sw_id) def _cyclic_update_routing_tbl(self): while True: # send ARP to all gateways. for vlan_router in self.values(): vlan_router.send_arp_all_gw() hub.sleep(1) hub.sleep(CHK_ROUTING_TBL_INTERVAL) class VlanRouter(object): def __init__(self, vlan_id, dp, port_data, logger): super(VlanRouter, self).__init__() self.vlan_id = vlan_id self.dp = dp self.sw_id = {'sw_id': dpid_lib.dpid_to_str(dp.id)} self.logger = logger self.port_data = port_data self.address_data = AddressData() self.routing_tbl = RoutingTable() self.packet_buffer = SuspendPacketList(self.send_icmp_unreach_error) self.ofctl = OfCtl.factory(dp, logger) # Set flow: default route (drop) self._set_defaultroute_drop() def delete(self, waiters): # Delete flow. msgs = self.ofctl.get_all_flow(waiters) for msg in msgs: for stats in msg.body: vlan_id = VlanRouter._cookie_to_id(REST_VLANID, stats.cookie) if vlan_id == self.vlan_id: self.ofctl.delete_flow(stats) assert len(self.packet_buffer) == 0 @staticmethod def _cookie_to_id(id_type, cookie): if id_type == REST_VLANID: rest_id = cookie >> COOKIE_SHIFT_VLANID elif id_type == REST_ADDRESSID: rest_id = cookie & UINT32_MAX else: assert id_type == REST_ROUTEID rest_id = (cookie & UINT32_MAX) >> COOKIE_SHIFT_ROUTEID return rest_id def _id_to_cookie(self, id_type, rest_id): vid = self.vlan_id << COOKIE_SHIFT_VLANID if id_type == REST_VLANID: cookie = rest_id << COOKIE_SHIFT_VLANID elif id_type == REST_ADDRESSID: cookie = vid + rest_id else: assert id_type == REST_ROUTEID cookie = vid + (rest_id << COOKIE_SHIFT_ROUTEID) return cookie def _get_priority(self, priority_type, route=None): return get_priority(priority_type, vid=self.vlan_id, route=route) def _response(self, msg): if msg and self.vlan_id: msg.setdefault(REST_VLANID, self.vlan_id) return msg def get_data(self): address_data = self._get_address_data() routing_data = self._get_routing_data() data = {} if address_data[REST_ADDRESS]: data.update(address_data) if routing_data[REST_ROUTE]: data.update(routing_data) return self._response(data) def _get_address_data(self): address_data = [] for value in self.address_data.values(): default_gw = ip_addr_ntoa(value.default_gw) address = '%s/%d' % (default_gw, value.netmask) data = {REST_ADDRESSID: value.address_id, REST_ADDRESS: address} address_data.append(data) return {REST_ADDRESS: address_data} def _get_routing_data(self): routing_data = [] for key, value in self.routing_tbl.items(): if value.gateway_mac is not None: gateway = ip_addr_ntoa(value.gateway_ip) data = {REST_ROUTEID: value.route_id, REST_DESTINATION: key, REST_GATEWAY: gateway} routing_data.append(data) return {REST_ROUTE: routing_data} def set_data(self, data): details = None try: # Set address data if REST_ADDRESS in data: address = data[REST_ADDRESS] address_id = self._set_address_data(address) details = 'Add address [address_id=%d]' % address_id # Set routing data elif REST_GATEWAY in data: gateway = data[REST_GATEWAY] if REST_DESTINATION in data: destination = data[REST_DESTINATION] else: destination = DEFAULT_ROUTE route_id = self._set_routing_data(destination, gateway) details = 'Add route [route_id=%d]' % route_id except CommandFailure as err_msg: msg = {REST_RESULT: REST_NG, REST_DETAILS: str(err_msg)} return self._response(msg) if details is not None: msg = {REST_RESULT: REST_OK, REST_DETAILS: details} return self._response(msg) else: raise ValueError('Invalid parameter.') def _set_address_data(self, address): address = self.address_data.add(address) cookie = self._id_to_cookie(REST_ADDRESSID, address.address_id) # Set flow: host MAC learning (packet in) priority = self._get_priority(PRIORITY_MAC_LEARNING) self.ofctl.set_packetin_flow(cookie, priority, dl_type=ether.ETH_TYPE_IP, dl_vlan=self.vlan_id, dst_ip=address.nw_addr, dst_mask=address.netmask) log_msg = 'Set host MAC learning (packet in) flow [cookie=0x%x]' self.logger.info(log_msg, cookie, extra=self.sw_id) # set Flow: IP handling(PacketIn) priority = self._get_priority(PRIORITY_IP_HANDLING) self.ofctl.set_packetin_flow(cookie, priority, dl_type=ether.ETH_TYPE_IP, dl_vlan=self.vlan_id, dst_ip=address.default_gw) self.logger.info('Set IP handling (packet in) flow [cookie=0x%x]', cookie, extra=self.sw_id) # Set flow: L2 switching (normal) outport = self.ofctl.dp.ofproto.OFPP_NORMAL priority = self._get_priority(PRIORITY_L2_SWITCHING) self.ofctl.set_routing_flow( cookie, priority, outport, dl_vlan=self.vlan_id, nw_src=address.nw_addr, src_mask=address.netmask, nw_dst=address.nw_addr, dst_mask=address.netmask) self.logger.info('Set L2 switching (normal) flow [cookie=0x%x]', cookie, extra=self.sw_id) # Send GARP self.send_arp_request(address.default_gw, address.default_gw) return address.address_id def _set_routing_data(self, destination, gateway): err_msg = 'Invalid [%s] value.' % REST_GATEWAY dst_ip = ip_addr_aton(gateway, err_msg=err_msg) address = self.address_data.get_data(ip=dst_ip) if address is None: msg = 'Gateway=%s\'s address is not registered.' % gateway raise CommandFailure(msg=msg) elif dst_ip == address.default_gw: msg = 'Gateway=%s is used as default gateway of address_id=%d'\ % (gateway, address.address_id) raise CommandFailure(msg=msg) else: src_ip = address.default_gw route = self.routing_tbl.add(destination, gateway) self._set_route_packetin(route) self.send_arp_request(src_ip, dst_ip) return route.route_id def _set_defaultroute_drop(self): cookie = self._id_to_cookie(REST_VLANID, self.vlan_id) priority = self._get_priority(PRIORITY_DEFAULT_ROUTING) outport = None # for drop self.ofctl.set_routing_flow(cookie, priority, outport, dl_vlan=self.vlan_id) self.logger.info('Set default route (drop) flow [cookie=0x%x]', cookie, extra=self.sw_id) def _set_route_packetin(self, route): cookie = self._id_to_cookie(REST_ROUTEID, route.route_id) priority, log_msg = self._get_priority(PRIORITY_TYPE_ROUTE, route=route) self.ofctl.set_packetin_flow(cookie, priority, dl_type=ether.ETH_TYPE_IP, dl_vlan=self.vlan_id, dst_ip=route.dst_ip, dst_mask=route.netmask) self.logger.info('Set %s (packet in) flow [cookie=0x%x]', log_msg, cookie, extra=self.sw_id) def delete_data(self, data, waiters): if REST_ROUTEID in data: route_id = data[REST_ROUTEID] msg = self._delete_routing_data(route_id, waiters) elif REST_ADDRESSID in data: address_id = data[REST_ADDRESSID] msg = self._delete_address_data(address_id, waiters) else: raise ValueError('Invalid parameter.') return self._response(msg) def _delete_address_data(self, address_id, waiters): if address_id != REST_ALL: try: address_id = int(address_id) except ValueError as e: err_msg = 'Invalid [%s] value. %s' raise ValueError(err_msg % (REST_ADDRESSID, e.message)) skip_ids = self._chk_addr_relation_route(address_id) # Get all flow. delete_list = [] msgs = self.ofctl.get_all_flow(waiters) max_id = UINT16_MAX for msg in msgs: for stats in msg.body: vlan_id = VlanRouter._cookie_to_id(REST_VLANID, stats.cookie) if vlan_id != self.vlan_id: continue addr_id = VlanRouter._cookie_to_id(REST_ADDRESSID, stats.cookie) if addr_id in skip_ids: continue elif address_id == REST_ALL: if addr_id <= COOKIE_DEFAULT_ID or max_id < addr_id: continue elif address_id != addr_id: continue delete_list.append(stats) delete_ids = [] for flow_stats in delete_list: # Delete flow self.ofctl.delete_flow(flow_stats) address_id = VlanRouter._cookie_to_id(REST_ADDRESSID, flow_stats.cookie) del_address = self.address_data.get_data(addr_id=address_id) if del_address is not None: # Clean up suspend packet threads. self.packet_buffer.delete(del_addr=del_address) # Delete data. self.address_data.delete(address_id) if address_id not in delete_ids: delete_ids.append(address_id) msg = {} if delete_ids: delete_ids = ','.join(str(addr_id) for addr_id in delete_ids) details = 'Delete address [address_id=%s]' % delete_ids msg = {REST_RESULT: REST_OK, REST_DETAILS: details} if skip_ids: skip_ids = ','.join(str(addr_id) for addr_id in skip_ids) details = 'Skip delete (related route exist) [address_id=%s]'\ % skip_ids if msg: msg[REST_DETAILS] += ', %s' % details else: msg = {REST_RESULT: REST_NG, REST_DETAILS: details} return msg def _delete_routing_data(self, route_id, waiters): if route_id != REST_ALL: try: route_id = int(route_id) except ValueError as e: err_msg = 'Invalid [%s] value. %s' raise ValueError(err_msg % (REST_ROUTEID, e.message)) # Get all flow. msgs = self.ofctl.get_all_flow(waiters) delete_list = [] for msg in msgs: for stats in msg.body: vlan_id = VlanRouter._cookie_to_id(REST_VLANID, stats.cookie) if vlan_id != self.vlan_id: continue rt_id = VlanRouter._cookie_to_id(REST_ROUTEID, stats.cookie) if route_id == REST_ALL: if rt_id == COOKIE_DEFAULT_ID: continue elif route_id != rt_id: continue delete_list.append(stats) # Delete flow. delete_ids = [] for flow_stats in delete_list: self.ofctl.delete_flow(flow_stats) route_id = VlanRouter._cookie_to_id(REST_ROUTEID, flow_stats.cookie) self.routing_tbl.delete(route_id) if route_id not in delete_ids: delete_ids.append(route_id) # case: Default route deleted. -> set flow (drop) route_type = get_priority_type(flow_stats.priority, vid=self.vlan_id) if route_type == PRIORITY_DEFAULT_ROUTING: self._set_defaultroute_drop() msg = {} if delete_ids: delete_ids = ','.join(str(route_id) for route_id in delete_ids) details = 'Delete route [route_id=%s]' % delete_ids msg = {REST_RESULT: REST_OK, REST_DETAILS: details} return msg def _chk_addr_relation_route(self, address_id): # Check exist of related routing data. relate_list = [] gateways = self.routing_tbl.get_gateways() for gateway in gateways: address = self.address_data.get_data(ip=gateway) if address is not None: if (address_id == REST_ALL and address.address_id not in relate_list): relate_list.append(address.address_id) elif address.address_id == address_id: relate_list = [address_id] break return relate_list def packet_in_handler(self, msg, header_list): # Check invalid TTL (only OpenFlow V1.2) ofproto = self.dp.ofproto if ofproto.OFP_VERSION == ofproto_v1_2.OFP_VERSION: if msg.reason == ofproto.OFPR_INVALID_TTL: self._packetin_invalid_ttl(msg, header_list) return # Analyze event type. if ARP in header_list: self._packetin_arp(msg, header_list) return if IPV4 in header_list: rt_ports = self.address_data.get_default_gw() if header_list[IPV4].dst in rt_ports: # Packet to router's port. if ICMP in header_list: if header_list[ICMP].type == icmp.ICMP_ECHO_REQUEST: self._packetin_icmp_req(msg, header_list) return elif TCP in header_list or UDP in header_list: self._packetin_tcp_udp(msg, header_list) return else: # Packet to internal host or gateway router. self._packetin_to_node(msg, header_list) return def _packetin_arp(self, msg, header_list): src_addr = self.address_data.get_data(ip=header_list[ARP].src_ip) if src_addr is None: return # case: Receive ARP from the gateway # Update routing table. # case: Receive ARP from an internal host # Learning host MAC. gw_flg = self._update_routing_tbl(msg, header_list) if gw_flg is False: self._learning_host_mac(msg, header_list) # ARP packet handling. in_port = self.ofctl.get_packetin_inport(msg) src_ip = header_list[ARP].src_ip dst_ip = header_list[ARP].dst_ip srcip = ip_addr_ntoa(src_ip) dstip = ip_addr_ntoa(dst_ip) rt_ports = self.address_data.get_default_gw() if src_ip == dst_ip: # GARP -> packet forward (normal) output = self.ofctl.dp.ofproto.OFPP_NORMAL self.ofctl.send_packet_out(in_port, output, msg.data) self.logger.info('Receive GARP from [%s].', srcip, extra=self.sw_id) self.logger.info('Send GARP (normal).', extra=self.sw_id) elif dst_ip not in rt_ports: dst_addr = self.address_data.get_data(ip=dst_ip) if (dst_addr is not None and src_addr.address_id == dst_addr.address_id): # ARP from internal host -> packet forward (normal) output = self.ofctl.dp.ofproto.OFPP_NORMAL self.ofctl.send_packet_out(in_port, output, msg.data) self.logger.info('Receive ARP from an internal host [%s].', srcip, extra=self.sw_id) self.logger.info('Send ARP (normal)', extra=self.sw_id) else: if header_list[ARP].opcode == arp.ARP_REQUEST: # ARP request to router port -> send ARP reply src_mac = header_list[ARP].src_mac dst_mac = self.port_data[in_port].mac arp_target_mac = dst_mac output = in_port in_port = self.ofctl.dp.ofproto.OFPP_CONTROLLER self.ofctl.send_arp(arp.ARP_REPLY, self.vlan_id, dst_mac, src_mac, dst_ip, src_ip, arp_target_mac, in_port, output) log_msg = 'Receive ARP request from [%s] to router port [%s].' self.logger.info(log_msg, srcip, dstip, extra=self.sw_id) self.logger.info('Send ARP reply to [%s]', srcip, extra=self.sw_id) elif header_list[ARP].opcode == arp.ARP_REPLY: # ARP reply to router port -> suspend packets forward log_msg = 'Receive ARP reply from [%s] to router port [%s].' self.logger.info(log_msg, srcip, dstip, extra=self.sw_id) packet_list = self.packet_buffer.get_data(src_ip) if packet_list: # stop ARP reply wait thread. for suspend_packet in packet_list: self.packet_buffer.delete(pkt=suspend_packet) # send suspend packet. output = self.ofctl.dp.ofproto.OFPP_TABLE for suspend_packet in packet_list: self.ofctl.send_packet_out(suspend_packet.in_port, output, suspend_packet.data) self.logger.info('Send suspend packet to [%s].', srcip, extra=self.sw_id) def _packetin_icmp_req(self, msg, header_list): # Send ICMP echo reply. in_port = self.ofctl.get_packetin_inport(msg) self.ofctl.send_icmp(in_port, header_list, self.vlan_id, icmp.ICMP_ECHO_REPLY, icmp.ICMP_ECHO_REPLY_CODE, icmp_data=header_list[ICMP].data) srcip = ip_addr_ntoa(header_list[IPV4].src) dstip = ip_addr_ntoa(header_list[IPV4].dst) log_msg = 'Receive ICMP echo request from [%s] to router port [%s].' self.logger.info(log_msg, srcip, dstip, extra=self.sw_id) self.logger.info('Send ICMP echo reply to [%s].', srcip, extra=self.sw_id) def _packetin_tcp_udp(self, msg, header_list): # Send ICMP port unreach error. in_port = self.ofctl.get_packetin_inport(msg) self.ofctl.send_icmp(in_port, header_list, self.vlan_id, icmp.ICMP_DEST_UNREACH, icmp.ICMP_PORT_UNREACH_CODE, msg_data=msg.data) srcip = ip_addr_ntoa(header_list[IPV4].src) dstip = ip_addr_ntoa(header_list[IPV4].dst) self.logger.info('Receive TCP/UDP from [%s] to router port [%s].', srcip, dstip, extra=self.sw_id) self.logger.info('Send ICMP destination unreachable to [%s].', srcip, extra=self.sw_id) def _packetin_to_node(self, msg, header_list): if len(self.packet_buffer) >= MAX_SUSPENDPACKETS: self.logger.info('Packet is dropped, MAX_SUSPENDPACKETS exceeded.', extra=self.sw_id) return # Send ARP request to get node MAC address. in_port = self.ofctl.get_packetin_inport(msg) src_ip = None dst_ip = header_list[IPV4].dst srcip = ip_addr_ntoa(header_list[IPV4].src) dstip = ip_addr_ntoa(dst_ip) address = self.address_data.get_data(ip=dst_ip) if address is not None: log_msg = 'Receive IP packet from [%s] to an internal host [%s].' self.logger.info(log_msg, srcip, dstip, extra=self.sw_id) src_ip = address.default_gw else: route = self.routing_tbl.get_data(dst_ip=dst_ip) if route is not None: log_msg = 'Receive IP packet from [%s] to [%s].' self.logger.info(log_msg, srcip, dstip, extra=self.sw_id) gw_address = self.address_data.get_data(ip=route.gateway_ip) if gw_address is not None: src_ip = gw_address.default_gw dst_ip = route.gateway_ip if src_ip is not None: self.packet_buffer.add(in_port, header_list, msg.data) self.send_arp_request(src_ip, dst_ip, in_port=in_port) self.logger.info('Send ARP request (flood)', extra=self.sw_id) def _packetin_invalid_ttl(self, msg, header_list): # Send ICMP TTL error. srcip = ip_addr_ntoa(header_list[IPV4].src) self.logger.info('Receive invalid ttl packet from [%s].', srcip, extra=self.sw_id) in_port = self.ofctl.get_packetin_inport(msg) src_ip = self._get_send_port_ip(header_list) if src_ip is not None: self.ofctl.send_icmp(in_port, header_list, self.vlan_id, icmp.ICMP_TIME_EXCEEDED, icmp.ICMP_TTL_EXPIRED_CODE, msg_data=msg.data, src_ip=src_ip) self.logger.info('Send ICMP time exceeded to [%s].', srcip, extra=self.sw_id) def send_arp_all_gw(self): gateways = self.routing_tbl.get_gateways() for gateway in gateways: address = self.address_data.get_data(ip=gateway) self.send_arp_request(address.default_gw, gateway) def send_arp_request(self, src_ip, dst_ip, in_port=None): # Send ARP request from all ports. for send_port in self.port_data.values(): if in_port is None or in_port != send_port.port_no: src_mac = send_port.mac dst_mac = mac_lib.BROADCAST_STR arp_target_mac = mac_lib.DONTCARE_STR inport = self.ofctl.dp.ofproto.OFPP_CONTROLLER output = send_port.port_no self.ofctl.send_arp(arp.ARP_REQUEST, self.vlan_id, src_mac, dst_mac, src_ip, dst_ip, arp_target_mac, inport, output) def send_icmp_unreach_error(self, packet_buffer): # Send ICMP host unreach error. self.logger.info('ARP reply wait timer was timed out.', extra=self.sw_id) src_ip = self._get_send_port_ip(packet_buffer.header_list) if src_ip is not None: self.ofctl.send_icmp(packet_buffer.in_port, packet_buffer.header_list, self.vlan_id, icmp.ICMP_DEST_UNREACH, icmp.ICMP_HOST_UNREACH_CODE, msg_data=packet_buffer.data, src_ip=src_ip) dstip = ip_addr_ntoa(packet_buffer.dst_ip) self.logger.info('Send ICMP destination unreachable to [%s].', dstip, extra=self.sw_id) def _update_routing_tbl(self, msg, header_list): # Set flow: routing to gateway. out_port = self.ofctl.get_packetin_inport(msg) src_mac = header_list[ARP].src_mac dst_mac = self.port_data[out_port].mac src_ip = header_list[ARP].src_ip gateway_flg = False for key, value in self.routing_tbl.items(): if value.gateway_ip == src_ip: gateway_flg = True if value.gateway_mac == src_mac: continue self.routing_tbl[key].gateway_mac = src_mac cookie = self._id_to_cookie(REST_ROUTEID, value.route_id) priority, log_msg = self._get_priority(PRIORITY_TYPE_ROUTE, route=value) self.ofctl.set_routing_flow(cookie, priority, out_port, dl_vlan=self.vlan_id, src_mac=dst_mac, dst_mac=src_mac, nw_dst=value.dst_ip, dst_mask=value.netmask, dec_ttl=True) self.logger.info('Set %s flow [cookie=0x%x]', log_msg, cookie, extra=self.sw_id) return gateway_flg def _learning_host_mac(self, msg, header_list): # Set flow: routing to internal Host. out_port = self.ofctl.get_packetin_inport(msg) src_mac = header_list[ARP].src_mac dst_mac = self.port_data[out_port].mac src_ip = header_list[ARP].src_ip gateways = self.routing_tbl.get_gateways() if src_ip not in gateways: address = self.address_data.get_data(ip=src_ip) if address is not None: cookie = self._id_to_cookie(REST_ADDRESSID, address.address_id) priority = self._get_priority(PRIORITY_IMPLICIT_ROUTING) self.ofctl.set_routing_flow(cookie, priority, out_port, dl_vlan=self.vlan_id, src_mac=dst_mac, dst_mac=src_mac, nw_dst=src_ip, idle_timeout=IDLE_TIMEOUT, dec_ttl=True) self.logger.info('Set implicit routing flow [cookie=0x%x]', cookie, extra=self.sw_id) def _get_send_port_ip(self, header_list): try: src_mac = header_list[ETHERNET].src if IPV4 in header_list: src_ip = header_list[IPV4].src else: src_ip = header_list[ARP].src_ip except KeyError: self.logger.debug('Receive unsupported packet.', extra=self.sw_id) return None address = self.address_data.get_data(ip=src_ip) if address is not None: return address.default_gw else: route = self.routing_tbl.get_data(gw_mac=src_mac) if route is not None: address = self.address_data.get_data(ip=route.gateway_ip) if address is not None: return address.default_gw self.logger.debug('Receive packet from unknown IP[%s].', ip_addr_ntoa(src_ip), extra=self.sw_id) return None class PortData(dict): def __init__(self, ports): super(PortData, self).__init__() for port in ports.values(): data = Port(port.port_no, port.hw_addr) self[port.port_no] = data class Port(object): def __init__(self, port_no, hw_addr): super(Port, self).__init__() self.port_no = port_no self.mac = hw_addr class AddressData(dict): def __init__(self): super(AddressData, self).__init__() self.address_id = 1 def add(self, address): err_msg = 'Invalid [%s] value.' % REST_ADDRESS nw_addr, mask, default_gw = nw_addr_aton(address, err_msg=err_msg) # Check overlaps for other in self.values(): other_mask = mask_ntob(other.netmask) add_mask = mask_ntob(mask, err_msg=err_msg) if (other.nw_addr == ipv4_apply_mask(default_gw, other.netmask) or nw_addr == ipv4_apply_mask(other.default_gw, mask, err_msg)): msg = 'Address overlaps [address_id=%d]' % other.address_id raise CommandFailure(msg=msg) address = Address(self.address_id, nw_addr, mask, default_gw) ip_str = ip_addr_ntoa(nw_addr) key = '%s/%d' % (ip_str, mask) self[key] = address self.address_id += 1 self.address_id &= UINT32_MAX if self.address_id == COOKIE_DEFAULT_ID: self.address_id = 1 return address def delete(self, address_id): for key, value in self.items(): if value.address_id == address_id: del self[key] return def get_default_gw(self): return [address.default_gw for address in self.values()] def get_data(self, addr_id=None, ip=None): for address in self.values(): if addr_id is not None: if addr_id == address.address_id: return address else: assert ip is not None if ipv4_apply_mask(ip, address.netmask) == address.nw_addr: return address return None class Address(object): def __init__(self, address_id, nw_addr, netmask, default_gw): super(Address, self).__init__() self.address_id = address_id self.nw_addr = nw_addr self.netmask = netmask self.default_gw = default_gw def __contains__(self, ip): return bool(ipv4_apply_mask(ip, self.netmask) == self.nw_addr) class RoutingTable(dict): def __init__(self): super(RoutingTable, self).__init__() self.route_id = 1 def add(self, dst_nw_addr, gateway_ip): err_msg = 'Invalid [%s] value.' if dst_nw_addr == DEFAULT_ROUTE: dst_ip = 0 netmask = 0 else: dst_ip, netmask, dummy = nw_addr_aton( dst_nw_addr, err_msg=err_msg % REST_DESTINATION) gateway_ip = ip_addr_aton(gateway_ip, err_msg=err_msg % REST_GATEWAY) # Check overlaps overlap_route = None if dst_nw_addr == DEFAULT_ROUTE: if DEFAULT_ROUTE in self: overlap_route = self[DEFAULT_ROUTE].route_id elif dst_nw_addr in self: overlap_route = self[dst_nw_addr].route_id if overlap_route is not None: msg = 'Destination overlaps [route_id=%d]' % overlap_route raise CommandFailure(msg=msg) routing_data = Route(self.route_id, dst_ip, netmask, gateway_ip) ip_str = ip_addr_ntoa(dst_ip) key = '%s/%d' % (ip_str, netmask) self[key] = routing_data self.route_id += 1 self.route_id &= UINT32_MAX if self.route_id == COOKIE_DEFAULT_ID: self.route_id = 1 return routing_data def delete(self, route_id): for key, value in self.items(): if value.route_id == route_id: del self[key] return def get_gateways(self): return [routing_data.gateway_ip for routing_data in self.values()] def get_data(self, gw_mac=None, dst_ip=None): if gw_mac is not None: for route in self.values(): if gw_mac == route.gateway_mac: return route return None elif dst_ip is not None: get_route = None mask = 0 for route in self.values(): if ipv4_apply_mask(dst_ip, route.netmask) == route.dst_ip: # For longest match if mask < route.netmask: get_route = route mask = route.netmask if get_route is None: get_route = self.get(DEFAULT_ROUTE, None) return get_route else: return None class Route(object): def __init__(self, route_id, dst_ip, netmask, gateway_ip): super(Route, self).__init__() self.route_id = route_id self.dst_ip = dst_ip self.netmask = netmask self.gateway_ip = gateway_ip self.gateway_mac = None class SuspendPacketList(list): def __init__(self, timeout_function): super(SuspendPacketList, self).__init__() self.timeout_function = timeout_function def add(self, in_port, header_list, data): suspend_pkt = SuspendPacket(in_port, header_list, data, self.wait_arp_reply_timer) self.append(suspend_pkt) def delete(self, pkt=None, del_addr=None): if pkt is not None: del_list = [pkt] else: assert del_addr is not None del_list = [pkt for pkt in self if pkt.dst_ip in del_addr] for pkt in del_list: self.remove(pkt) hub.kill(pkt.wait_thread) pkt.wait_thread.wait() def get_data(self, dst_ip): return [pkt for pkt in self if pkt.dst_ip == dst_ip] def wait_arp_reply_timer(self, suspend_pkt): hub.sleep(ARP_REPLY_TIMER) if suspend_pkt in self: self.timeout_function(suspend_pkt) self.delete(pkt=suspend_pkt) class SuspendPacket(object): def __init__(self, in_port, header_list, data, timer): super(SuspendPacket, self).__init__() self.in_port = in_port self.dst_ip = header_list[IPV4].dst self.header_list = header_list self.data = data # Start ARP reply wait timer. self.wait_thread = hub.spawn(timer, self) class OfCtl(object): _OF_VERSIONS = {} @staticmethod def register_of_version(version): def _register_of_version(cls): OfCtl._OF_VERSIONS.setdefault(version, cls) return cls return _register_of_version @staticmethod def factory(dp, logger): of_version = dp.ofproto.OFP_VERSION if of_version in OfCtl._OF_VERSIONS: ofctl = OfCtl._OF_VERSIONS[of_version](dp, logger) else: raise OFPUnknownVersion(version=of_version) return ofctl def __init__(self, dp, logger): super(OfCtl, self).__init__() self.dp = dp self.sw_id = {'sw_id': dpid_lib.dpid_to_str(dp.id)} self.logger = logger def set_sw_config_for_ttl(self): # OpenFlow v1_2 only. pass def set_flow(self, cookie, priority, dl_type=0, dl_dst=0, dl_vlan=0, nw_src=0, src_mask=32, nw_dst=0, dst_mask=32, nw_proto=0, idle_timeout=0, actions=None): # Abstract method raise NotImplementedError() def send_arp(self, arp_opcode, vlan_id, src_mac, dst_mac, src_ip, dst_ip, arp_target_mac, in_port, output): # Generate ARP packet if vlan_id != VLANID_NONE: ether_proto = ether.ETH_TYPE_8021Q pcp = 0 cfi = 0 vlan_ether = ether.ETH_TYPE_ARP v = vlan.vlan(pcp, cfi, vlan_id, vlan_ether) else: ether_proto = ether.ETH_TYPE_ARP hwtype = 1 arp_proto = ether.ETH_TYPE_IP hlen = 6 plen = 4 pkt = packet.Packet() e = ethernet.ethernet(dst_mac, src_mac, ether_proto) a = arp.arp(hwtype, arp_proto, hlen, plen, arp_opcode, src_mac, src_ip, arp_target_mac, dst_ip) pkt.add_protocol(e) if vlan_id != VLANID_NONE: pkt.add_protocol(v) pkt.add_protocol(a) pkt.serialize() # Send packet out self.send_packet_out(in_port, output, pkt.data, data_str=str(pkt)) def send_icmp(self, in_port, protocol_list, vlan_id, icmp_type, icmp_code, icmp_data=None, msg_data=None, src_ip=None): # Generate ICMP reply packet csum = 0 offset = ethernet.ethernet._MIN_LEN if vlan_id != VLANID_NONE: ether_proto = ether.ETH_TYPE_8021Q pcp = 0 cfi = 0 vlan_ether = ether.ETH_TYPE_IP v = vlan.vlan(pcp, cfi, vlan_id, vlan_ether) offset += vlan.vlan._MIN_LEN else: ether_proto = ether.ETH_TYPE_IP eth = protocol_list[ETHERNET] e = ethernet.ethernet(eth.src, eth.dst, ether_proto) if icmp_data is None and msg_data is not None: ip_datagram = msg_data[offset:] if icmp_type == icmp.ICMP_DEST_UNREACH: icmp_data = icmp.dest_unreach(data_len=len(ip_datagram), data=ip_datagram) elif icmp_type == icmp.ICMP_TIME_EXCEEDED: icmp_data = icmp.TimeExceeded(data_len=len(ip_datagram), data=ip_datagram) ic = icmp.icmp(icmp_type, icmp_code, csum, data=icmp_data) ip = protocol_list[IPV4] if src_ip is None: src_ip = ip.dst ip_total_length = ip.header_length * 4 + ic._MIN_LEN if ic.data is not None: ip_total_length += ic.data._MIN_LEN if ic.data.data is not None: ip_total_length += + len(ic.data.data) i = ipv4.ipv4(ip.version, ip.header_length, ip.tos, ip_total_length, ip.identification, ip.flags, ip.offset, DEFAULT_TTL, inet.IPPROTO_ICMP, csum, src_ip, ip.src) pkt = packet.Packet() pkt.add_protocol(e) if vlan_id != VLANID_NONE: pkt.add_protocol(v) pkt.add_protocol(i) pkt.add_protocol(ic) pkt.serialize() # Send packet out self.send_packet_out(in_port, self.dp.ofproto.OFPP_IN_PORT, pkt.data, data_str=str(pkt)) def send_packet_out(self, in_port, output, data, data_str=None): actions = [self.dp.ofproto_parser.OFPActionOutput(output, 0)] self.dp.send_packet_out(buffer_id=UINT32_MAX, in_port=in_port, actions=actions, data=data) #TODO: Packet library convert to string #if data_str is None: # data_str = str(packet.Packet(data)) #self.logger.debug('Packet out = %s', data_str, extra=self.sw_id) def set_normal_flow(self, cookie, priority): out_port = self.dp.ofproto.OFPP_NORMAL actions = [self.dp.ofproto_parser.OFPActionOutput(out_port, 0)] self.set_flow(cookie, priority, actions=actions) def set_packetin_flow(self, cookie, priority, dl_type=0, dl_dst=0, dl_vlan=0, dst_ip=0, dst_mask=32, nw_proto=0): miss_send_len = UINT16_MAX actions = [self.dp.ofproto_parser.OFPActionOutput( self.dp.ofproto.OFPP_CONTROLLER, miss_send_len)] self.set_flow(cookie, priority, dl_type=dl_type, dl_dst=dl_dst, dl_vlan=dl_vlan, nw_dst=dst_ip, dst_mask=dst_mask, nw_proto=nw_proto, actions=actions) def send_stats_request(self, stats, waiters): self.dp.set_xid(stats) waiters_per_dp = waiters.setdefault(self.dp.id, {}) event = hub.Event() msgs = [] waiters_per_dp[stats.xid] = (event, msgs) self.dp.send_msg(stats) try: event.wait(timeout=OFP_REPLY_TIMER) except hub.Timeout: del waiters_per_dp[stats.xid] return msgs @OfCtl.register_of_version(ofproto_v1_0.OFP_VERSION) class OfCtl_v1_0(OfCtl): def __init__(self, dp, logger): super(OfCtl_v1_0, self).__init__(dp, logger) def get_packetin_inport(self, msg): return msg.in_port def get_all_flow(self, waiters): ofp = self.dp.ofproto ofp_parser = self.dp.ofproto_parser match = ofp_parser.OFPMatch(ofp.OFPFW_ALL, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) stats = ofp_parser.OFPFlowStatsRequest(self.dp, 0, match, 0xff, ofp.OFPP_NONE) return self.send_stats_request(stats, waiters) def set_flow(self, cookie, priority, dl_type=0, dl_dst=0, dl_vlan=0, nw_src=0, src_mask=32, nw_dst=0, dst_mask=32, nw_proto=0, idle_timeout=0, actions=None): ofp = self.dp.ofproto ofp_parser = self.dp.ofproto_parser cmd = ofp.OFPFC_ADD # Match wildcards = ofp.OFPFW_ALL if dl_type: wildcards &= ~ofp.OFPFW_DL_TYPE if dl_dst: wildcards &= ~ofp.OFPFW_DL_DST if dl_vlan: wildcards &= ~ofp.OFPFW_DL_VLAN if nw_src: v = (32 - src_mask) << ofp.OFPFW_NW_SRC_SHIFT | \ ~ofp.OFPFW_NW_SRC_MASK wildcards &= v nw_src = ipv4_text_to_int(nw_src) if nw_dst: v = (32 - dst_mask) << ofp.OFPFW_NW_DST_SHIFT | \ ~ofp.OFPFW_NW_DST_MASK wildcards &= v nw_dst = ipv4_text_to_int(nw_dst) if nw_proto: wildcards &= ~ofp.OFPFW_NW_PROTO match = ofp_parser.OFPMatch(wildcards, 0, 0, dl_dst, dl_vlan, 0, dl_type, 0, nw_proto, nw_src, nw_dst, 0, 0) actions = actions or [] m = ofp_parser.OFPFlowMod(self.dp, match, cookie, cmd, idle_timeout=idle_timeout, priority=priority, actions=actions) self.dp.send_msg(m) def set_routing_flow(self, cookie, priority, outport, dl_vlan=0, nw_src=0, src_mask=32, nw_dst=0, dst_mask=32, src_mac=0, dst_mac=0, idle_timeout=0, **dummy): ofp_parser = self.dp.ofproto_parser dl_type = ether.ETH_TYPE_IP # Decrement TTL value is not supported at OpenFlow V1.0 actions = [] if src_mac: actions.append(ofp_parser.OFPActionSetDlSrc( mac_lib.haddr_to_bin(src_mac))) if dst_mac: actions.append(ofp_parser.OFPActionSetDlDst( mac_lib.haddr_to_bin(dst_mac))) if outport is not None: actions.append(ofp_parser.OFPActionOutput(outport)) self.set_flow(cookie, priority, dl_type=dl_type, dl_vlan=dl_vlan, nw_src=nw_src, src_mask=src_mask, nw_dst=nw_dst, dst_mask=dst_mask, idle_timeout=idle_timeout, actions=actions) def delete_flow(self, flow_stats): match = flow_stats.match cookie = flow_stats.cookie cmd = self.dp.ofproto.OFPFC_DELETE_STRICT priority = flow_stats.priority actions = [] flow_mod = self.dp.ofproto_parser.OFPFlowMod( self.dp, match, cookie, cmd, priority=priority, actions=actions) self.dp.send_msg(flow_mod) self.logger.info('Delete flow [cookie=0x%x]', cookie, extra=self.sw_id) @OfCtl.register_of_version(ofproto_v1_2.OFP_VERSION) class OfCtl_v1_2(OfCtl): def __init__(self, dp, logger): super(OfCtl_v1_2, self).__init__(dp, logger) def set_sw_config_for_ttl(self): flags = self.dp.ofproto.OFPC_INVALID_TTL_TO_CONTROLLER miss_send_len = UINT16_MAX m = self.dp.ofproto_parser.OFPSetConfig(self.dp, flags, miss_send_len) self.dp.send_msg(m) self.logger.info('Set SW config for TTL error packet in.', extra=self.sw_id) def get_packetin_inport(self, msg): in_port = self.dp.ofproto.OFPP_ANY for match_field in msg.match.fields: if match_field.header == self.dp.ofproto.OXM_OF_IN_PORT: in_port = match_field.value break return in_port def get_all_flow(self, waiters): ofp = self.dp.ofproto ofp_parser = self.dp.ofproto_parser match = ofp_parser.OFPMatch() stats = ofp_parser.OFPFlowStatsRequest(self.dp, 0, ofp.OFPP_ANY, ofp.OFPG_ANY, 0, 0, match) return self.send_stats_request(stats, waiters) def set_flow(self, cookie, priority, dl_type=0, dl_dst=0, dl_vlan=0, nw_src=0, src_mask=32, nw_dst=0, dst_mask=32, nw_proto=0, idle_timeout=0, actions=None): ofp = self.dp.ofproto ofp_parser = self.dp.ofproto_parser cmd = ofp.OFPFC_ADD # Match match = ofp_parser.OFPMatch() if dl_type: match.set_dl_type(dl_type) if dl_dst: match.set_dl_dst(dl_dst) if dl_vlan: match.set_vlan_vid(dl_vlan) if nw_src: match.set_ipv4_src_masked(ipv4_text_to_int(nw_src), mask_ntob(src_mask)) if nw_dst: match.set_ipv4_dst_masked(ipv4_text_to_int(nw_dst), mask_ntob(dst_mask)) if nw_proto: if dl_type == ether.ETH_TYPE_IP: match.set_ip_proto(nw_proto) elif dl_type == ether.ETH_TYPE_ARP: match.set_arp_opcode(nw_proto) # Instructions actions = actions or [] inst = [ofp_parser.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions)] m = ofp_parser.OFPFlowMod(self.dp, cookie, 0, 0, cmd, idle_timeout, 0, priority, UINT32_MAX, ofp.OFPP_ANY, ofp.OFPG_ANY, 0, match, inst) self.dp.send_msg(m) def set_routing_flow(self, cookie, priority, outport, dl_vlan=0, nw_src=0, src_mask=32, nw_dst=0, dst_mask=32, src_mac=0, dst_mac=0, idle_timeout=0, dec_ttl=False): ofp = self.dp.ofproto ofp_parser = self.dp.ofproto_parser dl_type = ether.ETH_TYPE_IP actions = [] if dec_ttl: actions.append(ofp_parser.OFPActionDecNwTtl()) if src_mac: actions.append(ofp_parser.OFPActionSetField(eth_src=src_mac)) if dst_mac: actions.append(ofp_parser.OFPActionSetField(eth_dst=dst_mac)) if outport is not None: actions.append(ofp_parser.OFPActionOutput(outport, 0)) self.set_flow(cookie, priority, dl_type=dl_type, dl_vlan=dl_vlan, nw_src=nw_src, src_mask=src_mask, nw_dst=nw_dst, dst_mask=dst_mask, idle_timeout=idle_timeout, actions=actions) def delete_flow(self, flow_stats): ofp = self.dp.ofproto ofp_parser = self.dp.ofproto_parser cmd = ofp.OFPFC_DELETE cookie = flow_stats.cookie cookie_mask = UINT64_MAX match = ofp_parser.OFPMatch() inst = [] flow_mod = ofp_parser.OFPFlowMod(self.dp, cookie, cookie_mask, 0, cmd, 0, 0, 0, UINT32_MAX, ofp.OFPP_ANY, ofp.OFPG_ANY, 0, match, inst) self.dp.send_msg(flow_mod) self.logger.info('Delete flow [cookie=0x%x]', cookie, extra=self.sw_id) def ip_addr_aton(ip_str, err_msg=None): try: return addrconv.ipv4.bin_to_text(socket.inet_aton(ip_str)) except (struct.error, socket.error) as e: if err_msg is not None: e.message = '%s %s' % (err_msg, e.message) raise ValueError(e.message) def ip_addr_ntoa(ip): return socket.inet_ntoa(addrconv.ipv4.text_to_bin(ip)) def mask_ntob(mask, err_msg=None): try: return (UINT32_MAX << (32 - mask)) & UINT32_MAX except ValueError: msg = 'illegal netmask' if err_msg is not None: msg = '%s %s' % (err_msg, msg) raise ValueError(msg) def ipv4_apply_mask(address, prefix_len, err_msg=None): import itertools assert isinstance(address, str) address_int = ipv4_text_to_int(address) return ipv4_int_to_text(address_int & mask_ntob(prefix_len, err_msg)) def ipv4_int_to_text(ip_int): assert isinstance(ip_int, (int, long)) return addrconv.ipv4.bin_to_text(struct.pack('!I', ip_int)) def ipv4_text_to_int(ip_text): if ip_text == 0: return ip_text assert isinstance(ip_text, str) return struct.unpack('!I', addrconv.ipv4.text_to_bin(ip_text))[0] def nw_addr_aton(nw_addr, err_msg=None): ip_mask = nw_addr.split('/') default_route = ip_addr_aton(ip_mask[0], err_msg=err_msg) netmask = 32 if len(ip_mask) == 2: try: netmask = int(ip_mask[1]) except ValueError as e: if err_msg is not None: e.message = '%s %s' % (err_msg, e.message) raise ValueError(e.message) if netmask < 0: msg = 'illegal netmask' if err_msg is not None: msg = '%s %s' % (err_msg, msg) raise ValueError(msg) nw_addr = ipv4_apply_mask(default_route, netmask, err_msg) return nw_addr, netmask, default_route
zenn1989/scoria-interlude
refs/heads/master
L2Jscoria-Game/data/scripts/quests/378_MagnificentFeast/__init__.py
1
# Magnificent Feast - v0.1 by DrLecter import sys from com.l2scoria.gameserver.model.quest import State from com.l2scoria.gameserver.model.quest import QuestState from com.l2scoria.gameserver.model.quest.jython import QuestJython as JQuest qn = "378_MagnificentFeast" #NPC RANSPO = 30594 #ITEMS WINE_15,WINE_30,WINE_60 = range(5956,5959) SCORE = 4421 RP_SALAD,RP_SAUCE,RP_STEAK = range(1455,1458) RP_DESSERT = 5959 #REWARDS REWARDS={ 9:[847,1,5700], 10:[846,2,0], 12:[909,1,25400], 17:[846,2,1200], 18:[879,1,6900], 20:[890,2,8500], 33:[879,1,8100], 34:[910,1,0], 36:[848,1,2200], } class Quest (JQuest) : def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr) def onEvent (self,event,st) : htmltext = event score = st.getInt("score") cond = st.getInt("cond") if event == "30594-2.htm" and cond == 0 : st.set("cond","1") st.setState(STARTED) st.playSound("ItemSound.quest_accept") elif event == "30594-4a.htm" : if st.getQuestItemsCount(WINE_15) and cond == 1 : st.takeItems(WINE_15,1) st.set("cond","2") st.set("score",str(score+1)) else : htmltext = "30594-4.htm" elif event == "30594-4b.htm" : if st.getQuestItemsCount(WINE_30) and cond == 1 : st.takeItems(WINE_30,1) st.set("cond","2") st.set("score",str(score+2)) else : htmltext = "30594-4.htm" elif event == "30594-4c.htm" : if st.getQuestItemsCount(WINE_60) and cond == 1 : st.takeItems(WINE_60,1) st.set("cond","2") st.set("score",str(score+4)) else : htmltext = "30594-4.htm" elif event == "30594-6.htm" : if st.getQuestItemsCount(SCORE) and cond == 2 : st.takeItems(SCORE,1) st.set("cond","3") else : htmltext = "30594-5.htm" elif event == "30594-8a.htm" : if st.getQuestItemsCount(RP_SALAD) and cond == 3 : st.takeItems(RP_SALAD,1) st.set("cond","4") st.set("score",str(score+8)) else : htmltext = "30594-8.htm" elif event == "30594-8b.htm" : if st.getQuestItemsCount(RP_SAUCE) and cond == 3 : st.takeItems(RP_SAUCE,1) st.set("cond","4") st.set("score",str(score+16)) else : htmltext = "30594-8.htm" elif event == "30594-8c.htm" : if st.getQuestItemsCount(RP_STEAK) and cond == 3 : st.takeItems(RP_STEAK,1) st.set("cond","4") st.set("score",str(score+32)) else : htmltext = "30594-8.htm" return htmltext def onTalk (self,npc,player): htmltext = "<html><body>You are either not carrying out your quest or don't meet the criteria.</body></html>" st = player.getQuestState(qn) if not st : return htmltext npcId = npc.getNpcId() id = st.getState() cond=st.getInt("cond") if cond == 0 : if player.getLevel() >= 20 : htmltext = "30594-1.htm" else: htmltext = "30594-0.htm" st.exitQuest(1) elif cond == 1 : htmltext = "30594-3.htm" elif cond == 2 : if st.getQuestItemsCount(SCORE) : htmltext = "30594-5a.htm" else : htmltext = "30594-5.htm" elif cond == 3 : htmltext = "30594-7.htm" elif cond == 4 : score = st.getInt("score") if st.getQuestItemsCount(RP_DESSERT) and score in REWARDS.keys() : item,qty,adena=REWARDS[score] st.giveItems(item,qty) if adena : st.giveItems(57,adena) st.takeItems(RP_DESSERT,1) st.playSound("ItemSound.quest_finish") htmltext = "30594-10.htm" st.exitQuest(1) else : htmltext = "30594-9.htm" return htmltext QUEST = Quest(378,qn,"Magnificent Feast") CREATED = State('Start', QUEST) STARTED = State('Started', QUEST) QUEST.setInitialState(CREATED) QUEST.addStartNpc(RANSPO) QUEST.addTalkId(RANSPO)
erikld/Bobo
refs/heads/master
python3/koans/a_package_folder/__init__.py
279
#!/usr/bin/env python # -*- coding: utf-8 -*- an_attribute = 1984
mrkm4ntr/incubator-airflow
refs/heads/master
tests/test_utils/asserts.py
7
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import logging import re import traceback from collections import Counter from contextlib import contextmanager from sqlalchemy import event # Long import to not create a copy of the reference, but to refer to one place. import airflow.settings log = logging.getLogger(__name__) def assert_equal_ignore_multiple_spaces(case, first, second, msg=None): def _trim(s): return re.sub(r"\s+", " ", s.strip()) return case.assertEqual(_trim(first), _trim(second), msg) class CountQueries: """ Counts the number of queries sent to Airflow Database in a given context. Does not support multiple processes. When a new process is started in context, its queries will not be included. """ def __init__(self): self.result = Counter() def __enter__(self): event.listen(airflow.settings.engine, "after_cursor_execute", self.after_cursor_execute) return self.result def __exit__(self, type_, value, tb): event.remove(airflow.settings.engine, "after_cursor_execute", self.after_cursor_execute) log.debug("Queries count: %d", sum(self.result.values())) def after_cursor_execute(self, *args, **kwargs): stack = [ f for f in traceback.extract_stack() if 'sqlalchemy' not in f.filename and __file__ != f.filename and ('session.py' not in f.filename and f.name != 'wrapper') ] stack_info = ">".join([f"{f.filename.rpartition('/')[-1]}:{f.name}" for f in stack][-3:]) lineno = stack[-1].lineno self.result[f"{stack_info}:{lineno}"] += 1 count_queries = CountQueries # pylint: disable=invalid-name @contextmanager def assert_queries_count(expected_count, message_fmt=None): with count_queries() as result: yield None count = sum(result.values()) if expected_count != count: message_fmt = ( message_fmt or "The expected number of db queries is {expected_count}. " "The current number is {current_count}.\n\n" "Recorded query locations:" ) message = message_fmt.format(current_count=count, expected_count=expected_count) for location, count in result.items(): message += f'\n\t{location}:\t{count}' raise AssertionError(message)
samedder/azure-cli
refs/heads/master
src/azure-cli-core/azure/cli/core/_profile.py
3
# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- from __future__ import print_function import collections import errno import json import os.path from copy import deepcopy from enum import Enum import azure.cli.core.azlogging as azlogging from azure.cli.core._environment import get_config_dir from azure.cli.core._session import ACCOUNT from azure.cli.core.util import CLIError, get_file_json, in_cloud_console from azure.cli.core.cloud import get_active_cloud, set_cloud_subscription, init_known_clouds logger = azlogging.get_az_logger(__name__) # Names below are used by azure-xplat-cli to persist account information into # ~/.azure/azureProfile.json or osx/keychainer or windows secure storage, # which azure-cli will share. # Please do not rename them unless you know what you are doing. _IS_DEFAULT_SUBSCRIPTION = 'isDefault' _SUBSCRIPTION_ID = 'id' _SUBSCRIPTION_NAME = 'name' _TENANT_ID = 'tenantId' _USER_ENTITY = 'user' _USER_NAME = 'name' _SUBSCRIPTIONS = 'subscriptions' _INSTALLATION_ID = 'installationId' _ENVIRONMENT_NAME = 'environmentName' _STATE = 'state' _USER_TYPE = 'type' _USER = 'user' _SERVICE_PRINCIPAL = 'servicePrincipal' _SERVICE_PRINCIPAL_ID = 'servicePrincipalId' _SERVICE_PRINCIPAL_TENANT = 'servicePrincipalTenant' _SERVICE_PRINCIPAL_CERT_FILE = 'certificateFile' _SERVICE_PRINCIPAL_CERT_THUMBPRINT = 'thumbprint' _TOKEN_ENTRY_USER_ID = 'userId' _TOKEN_ENTRY_TOKEN_TYPE = 'tokenType' # This could mean either real access token, or client secret of a service principal # This naming is no good, but can't change because xplat-cli does so. _ACCESS_TOKEN = 'accessToken' _REFRESH_TOKEN = 'refreshToken' TOKEN_FIELDS_EXCLUDED_FROM_PERSISTENCE = ['familyName', 'givenName', 'isUserIdDisplayable', 'tenantId'] _CLIENT_ID = '04b07795-8ddb-461a-bbee-02f9e1bf7b46' _COMMON_TENANT = 'common' _MSI_ACCOUNT_NAME = 'MSI@' _TENANT_LEVEL_ACCOUNT_NAME = 'N/A(tenant level account)' def _authentication_context_factory(tenant, cache): import adal authority_url = CLOUD.endpoints.active_directory is_adfs = authority_url.lower().endswith('/adfs') if not is_adfs: authority_url = authority_url + '/' + (tenant or _COMMON_TENANT) return adal.AuthenticationContext(authority_url, cache=cache, api_version=None, validate_authority=(not is_adfs)) _AUTH_CTX_FACTORY = _authentication_context_factory init_known_clouds(force=True) CLOUD = get_active_cloud() logger.debug('Current cloud config:\n%s', str(CLOUD)) def _load_tokens_from_file(file_path): all_entries = [] if os.path.isfile(file_path): all_entries = get_file_json(file_path, throw_on_empty=False) or [] return all_entries def _delete_file(file_path): try: os.remove(file_path) except OSError as e: if e.errno != errno.ENOENT: raise class CredentialType(Enum): # pylint: disable=too-few-public-methods management = CLOUD.endpoints.management rbac = CLOUD.endpoints.active_directory_graph_resource_id class Profile(object): def __init__(self, storage=None, auth_ctx_factory=None, use_global_creds_cache=True): self._storage = storage or ACCOUNT self.auth_ctx_factory = auth_ctx_factory or _AUTH_CTX_FACTORY if use_global_creds_cache: self._creds_cache = _GLOBAL_CREDS_CACHE else: self._creds_cache = CredsCache(self.auth_ctx_factory, async_persist=False) self._management_resource_uri = CLOUD.endpoints.management self._ad_resource_uri = CLOUD.endpoints.active_directory_resource_id self._msi_creds = None def find_subscriptions_on_login(self, interactive, username, password, is_service_principal, tenant, allow_no_subscriptions=False, subscription_finder=None): from azure.cli.core._debug import allow_debug_adal_connection allow_debug_adal_connection() subscriptions = [] if not subscription_finder: subscription_finder = SubscriptionFinder(self.auth_ctx_factory, self._creds_cache.adal_token_cache) if interactive: subscriptions = subscription_finder.find_through_interactive_flow( tenant, self._ad_resource_uri) else: if is_service_principal: if not tenant: raise CLIError('Please supply tenant using "--tenant"') sp_auth = ServicePrincipalAuth(password) subscriptions = subscription_finder.find_from_service_principal_id( username, sp_auth, tenant, self._ad_resource_uri) else: subscriptions = subscription_finder.find_from_user_account( username, password, tenant, self._ad_resource_uri) if not allow_no_subscriptions and not subscriptions: raise CLIError("No subscriptions were found for '{}'. If this is expected, use " "'--allow-no-subscriptions' to have tenant level accesses".format( username)) if is_service_principal: self._creds_cache.save_service_principal_cred(sp_auth.get_entry_to_persist(username, tenant)) if self._creds_cache.adal_token_cache.has_state_changed: self._creds_cache.persist_cached_creds() if allow_no_subscriptions: t_list = [s.tenant_id for s in subscriptions] bare_tenants = [t for t in subscription_finder.tenants if t not in t_list] subscriptions = Profile._build_tenant_level_accounts(bare_tenants) if not subscriptions: return [] consolidated = Profile._normalize_properties(subscription_finder.user_id, subscriptions, is_service_principal) self._set_subscriptions(consolidated) # use deepcopy as we don't want to persist these changes to file. return deepcopy(consolidated) def find_subscriptions_in_cloud_console(self, tokens): from datetime import datetime, timedelta import jwt arm_token = tokens[0] # cloud shell gurantees that the 1st is for ARM arm_token_decoded = jwt.decode(arm_token, verify=False, algorithms=['RS256']) tenant = arm_token_decoded['tid'] user_id = arm_token_decoded['unique_name'].split('#')[-1] subscription_finder = SubscriptionFinder(self.auth_ctx_factory, None) subscriptions = subscription_finder.find_from_raw_token(tenant, arm_token) consolidated = Profile._normalize_properties(user_id, subscriptions, is_service_principal=False) self._set_subscriptions(consolidated) # construct token entries to cache decoded_tokens = [arm_token_decoded] for t in tokens[1:]: decoded_tokens.append(jwt.decode(t, verify=False, algorithms=['RS256'])) final_tokens = [] for t in decoded_tokens: final_tokens.append({ '_clientId': _CLIENT_ID, 'expiresIn': '3600', 'expiresOn': str(datetime.utcnow() + timedelta(seconds=3600 * 24)), 'userId': t['unique_name'].split('#')[-1], '_authority': CLOUD.endpoints.active_directory.rstrip('/') + '/' + t['tid'], 'resource': t['aud'], 'isMRRT': True, 'accessToken': tokens[decoded_tokens.index(t)], 'tokenType': 'Bearer', 'oid': t['oid'] }) # merging with existing cached ones for t in final_tokens: cached_tokens = [entry for _, entry in self._creds_cache.adal_token_cache.read_items()] to_delete = [c for c in cached_tokens if (c['_clientId'].lower() == t['_clientId'].lower() and c['resource'].lower() == t['resource'].lower() and c['_authority'].lower() == t['_authority'].lower() and c['userId'].lower() == t['userId'].lower())] if to_delete: self._creds_cache.adal_token_cache.remove(to_delete) self._creds_cache.adal_token_cache.add(final_tokens) self._creds_cache.persist_cached_creds() return deepcopy(consolidated) @staticmethod def _normalize_properties(user, subscriptions, is_service_principal): consolidated = [] for s in subscriptions: consolidated.append({ _SUBSCRIPTION_ID: s.id.rpartition('/')[2], _SUBSCRIPTION_NAME: s.display_name, _STATE: s.state.value, _USER_ENTITY: { _USER_NAME: user, _USER_TYPE: _SERVICE_PRINCIPAL if is_service_principal else _USER }, _IS_DEFAULT_SUBSCRIPTION: False, _TENANT_ID: s.tenant_id, _ENVIRONMENT_NAME: CLOUD.name }) return consolidated @staticmethod def _build_tenant_level_accounts(tenants): result = [] for t in tenants: s = Profile._new_account() s.id = '/subscriptions/' + t s.subscription = t s.tenant_id = t s.display_name = _TENANT_LEVEL_ACCOUNT_NAME result.append(s) return result @staticmethod def _new_account(): from azure.cli.core.profiles import get_sdk, ResourceType SubscriptionType, StateType = get_sdk(ResourceType.MGMT_RESOURCE_SUBSCRIPTIONS, 'Subscription', 'SubscriptionState', mod='models') s = SubscriptionType() s.state = StateType.enabled return s def find_subscriptions_in_vm_with_msi(self, msi_port): import jwt _, token, _ = Profile.get_msi_token(CLOUD.endpoints.active_directory_resource_id, msi_port) logger.info('MSI: token was retrieved. Now trying to initialize local accounts...') decode = jwt.decode(token, verify=False, algorithms=['RS256']) tenant = decode['tid'] subscription_finder = SubscriptionFinder(self.auth_ctx_factory, None) subscriptions = subscription_finder.find_from_raw_token(tenant, token) if not subscriptions: raise CLIError('No access was configured for the VM, hence no subscriptions were found') consolidated = Profile._normalize_properties('VM', subscriptions, is_service_principal=True) for s in consolidated: # use a special name to trigger a special token acquisition s[_SUBSCRIPTION_NAME] = "{}{}".format(_MSI_ACCOUNT_NAME, msi_port) self._set_subscriptions(consolidated) return deepcopy(consolidated) def _set_subscriptions(self, new_subscriptions, merge=True): existing_ones = self.load_cached_subscriptions(all_clouds=True) active_one = next((x for x in existing_ones if x.get(_IS_DEFAULT_SUBSCRIPTION)), None) active_subscription_id = active_one[_SUBSCRIPTION_ID] if active_one else None active_cloud = get_active_cloud() default_sub_id = None # merge with existing ones if merge: dic = collections.OrderedDict((x[_SUBSCRIPTION_ID], x) for x in existing_ones) else: dic = collections.OrderedDict() dic.update((x[_SUBSCRIPTION_ID], x) for x in new_subscriptions) subscriptions = list(dic.values()) if subscriptions: if active_one: new_active_one = next( (x for x in new_subscriptions if x[_SUBSCRIPTION_ID] == active_subscription_id), None) for s in subscriptions: s[_IS_DEFAULT_SUBSCRIPTION] = False if not new_active_one: new_active_one = Profile._pick_working_subscription(new_subscriptions) else: new_active_one = Profile._pick_working_subscription(new_subscriptions) new_active_one[_IS_DEFAULT_SUBSCRIPTION] = True default_sub_id = new_active_one[_SUBSCRIPTION_ID] set_cloud_subscription(active_cloud.name, default_sub_id) self._storage[_SUBSCRIPTIONS] = subscriptions @staticmethod def _pick_working_subscription(subscriptions): from azure.mgmt.resource.subscriptions.models import SubscriptionState s = next((x for x in subscriptions if x['state'] == SubscriptionState.enabled.value), None) return s or subscriptions[0] def set_active_subscription(self, subscription): # take id or name subscriptions = self.load_cached_subscriptions(all_clouds=True) active_cloud = get_active_cloud() subscription = subscription.lower() result = [x for x in subscriptions if subscription in [x[_SUBSCRIPTION_ID].lower(), x[_SUBSCRIPTION_NAME].lower()] and x[_ENVIRONMENT_NAME] == active_cloud.name] if len(result) != 1: raise CLIError("The subscription of '{}' does not exist or has more than" " one match in cloud '{}'.".format(subscription, active_cloud.name)) for s in subscriptions: s[_IS_DEFAULT_SUBSCRIPTION] = False result[0][_IS_DEFAULT_SUBSCRIPTION] = True set_cloud_subscription(active_cloud.name, result[0][_SUBSCRIPTION_ID]) self._storage[_SUBSCRIPTIONS] = subscriptions def logout(self, user_or_sp): subscriptions = self.load_cached_subscriptions(all_clouds=True) result = [x for x in subscriptions if user_or_sp.lower() == x[_USER_ENTITY][_USER_NAME].lower()] subscriptions = [x for x in subscriptions if x not in result] self._storage[_SUBSCRIPTIONS] = subscriptions self._creds_cache.remove_cached_creds(user_or_sp) def logout_all(self): self._storage[_SUBSCRIPTIONS] = [] self._creds_cache.remove_all_cached_creds() def load_cached_subscriptions(self, all_clouds=False): subscriptions = self._storage.get(_SUBSCRIPTIONS) or [] active_cloud = get_active_cloud() cached_subscriptions = [sub for sub in subscriptions if all_clouds or sub[_ENVIRONMENT_NAME] == active_cloud.name] # use deepcopy as we don't want to persist these changes to file. return deepcopy(cached_subscriptions) def get_current_account_user(self): try: active_account = self.get_subscription() except CLIError: raise CLIError('There are no active accounts.') return active_account[_USER_ENTITY][_USER_NAME] def get_subscription(self, subscription=None): # take id or name subscriptions = self.load_cached_subscriptions() if not subscriptions: raise CLIError("Please run 'az login' to setup account.") result = [x for x in subscriptions if ( not subscription and x.get(_IS_DEFAULT_SUBSCRIPTION) or subscription and subscription.lower() in [x[_SUBSCRIPTION_ID].lower(), x[ _SUBSCRIPTION_NAME].lower()])] if len(result) != 1: raise CLIError("Please run 'az account set' to select active account.") return result[0] def get_subscription_id(self): return self.get_subscription()[_SUBSCRIPTION_ID] def get_access_token_for_resource(self, username, tenant, resource): tenant = tenant or 'common' _, access_token, _ = self._creds_cache.retrieve_token_for_user( username, tenant, resource) return access_token def get_login_credentials(self, resource=CLOUD.endpoints.active_directory_resource_id, subscription_id=None): account = self.get_subscription(subscription_id) user_type = account[_USER_ENTITY][_USER_TYPE] username_or_sp_id = account[_USER_ENTITY][_USER_NAME] def _retrieve_token(): if account[_SUBSCRIPTION_NAME].startswith(_MSI_ACCOUNT_NAME): return Profile.get_msi_token(resource, account[_SUBSCRIPTION_NAME][len(_MSI_ACCOUNT_NAME):]) elif user_type == _USER: return self._creds_cache.retrieve_token_for_user(username_or_sp_id, account[_TENANT_ID], resource) return self._creds_cache.retrieve_token_for_service_principal(username_or_sp_id, resource) from azure.cli.core.adal_authentication import AdalAuthentication auth_object = AdalAuthentication(_retrieve_token) return (auth_object, str(account[_SUBSCRIPTION_ID]), str(account[_TENANT_ID])) def get_refresh_token(self, resource=CLOUD.endpoints.active_directory_resource_id, subscription=None): account = self.get_subscription(subscription) user_type = account[_USER_ENTITY][_USER_TYPE] username_or_sp_id = account[_USER_ENTITY][_USER_NAME] if user_type == _USER: _, _, token_entry = self._creds_cache.retrieve_token_for_user( username_or_sp_id, account[_TENANT_ID], resource) return None, token_entry.get(_REFRESH_TOKEN), token_entry[_ACCESS_TOKEN], str(account[_TENANT_ID]) sp_secret = self._creds_cache.retrieve_secret_of_service_principal(username_or_sp_id) return username_or_sp_id, sp_secret, None, str(account[_TENANT_ID]) def get_raw_token(self, resource, subscription=None): account = self.get_subscription(subscription) user_type = account[_USER_ENTITY][_USER_TYPE] username_or_sp_id = account[_USER_ENTITY][_USER_NAME] if user_type == _USER: creds = self._creds_cache.retrieve_token_for_user(username_or_sp_id, account[_TENANT_ID], resource) else: creds = self._creds_cache.retrieve_token_for_service_principal(username_or_sp_id, resource) return (creds, str(account[_SUBSCRIPTION_ID]), str(account[_TENANT_ID])) def refresh_accounts(self, subscription_finder=None): subscriptions = self.load_cached_subscriptions() to_refresh = [s for s in subscriptions if not s[_SUBSCRIPTION_NAME].startswith(_MSI_ACCOUNT_NAME)] not_to_refresh = [s for s in subscriptions if s not in to_refresh] from azure.cli.core._debug import allow_debug_adal_connection allow_debug_adal_connection() subscription_finder = subscription_finder or SubscriptionFinder(self.auth_ctx_factory, self._creds_cache.adal_token_cache) refreshed_list = set() result = [] for s in to_refresh: user_name = s[_USER_ENTITY][_USER_NAME] if user_name in refreshed_list: continue refreshed_list.add(user_name) is_service_principal = (s[_USER_ENTITY][_USER_TYPE] == _SERVICE_PRINCIPAL) tenant = s[_TENANT_ID] subscriptions = [] try: if is_service_principal: sp_auth = ServicePrincipalAuth(self._creds_cache.retrieve_secret_of_service_principal(user_name)) subscriptions = subscription_finder.find_from_service_principal_id(user_name, sp_auth, tenant, self._ad_resource_uri) else: subscriptions = subscription_finder.find_from_user_account(user_name, None, None, self._ad_resource_uri) except Exception as ex: # pylint: disable=broad-except logger.warning("Refreshing for '%s' failed with an error '%s'. The existing accounts were not " "modified. You can run 'az login' later to explictly refresh them", user_name, ex) result += deepcopy([r for r in to_refresh if r[_USER_ENTITY][_USER_NAME] == user_name]) continue if not subscriptions: if s[_SUBSCRIPTION_NAME] == _TENANT_LEVEL_ACCOUNT_NAME: subscriptions = Profile._build_tenant_level_accounts([s[_TENANT_ID]]) if not subscriptions: continue consolidated = Profile._normalize_properties(subscription_finder.user_id, subscriptions, is_service_principal) result += consolidated if self._creds_cache.adal_token_cache.has_state_changed: self._creds_cache.persist_cached_creds() result = result + not_to_refresh self._set_subscriptions(result, merge=False) def get_sp_auth_info(self, subscription_id=None, name=None, password=None, cert_file=None): from collections import OrderedDict account = self.get_subscription(subscription_id) # is the credential created through command like 'create-for-rbac'? result = OrderedDict() if name and (password or cert_file): result['clientId'] = name if password: result['clientSecret'] = password else: result['clientCertificate'] = cert_file result['subscriptionId'] = subscription_id or account[_SUBSCRIPTION_ID] else: # has logged in through cli user_type = account[_USER_ENTITY].get(_USER_TYPE) if user_type == _SERVICE_PRINCIPAL: result['clientId'] = account[_USER_ENTITY][_USER_NAME] sp_auth = ServicePrincipalAuth(self._creds_cache.retrieve_secret_of_service_principal( account[_USER_ENTITY][_USER_NAME])) secret = getattr(sp_auth, 'secret', None) if secret: result['clientSecret'] = secret else: # we can output 'clientCertificateThumbprint' if asked result['clientCertificate'] = sp_auth.certificate_file result['subscriptionId'] = account[_SUBSCRIPTION_ID] else: raise CLIError('SDK Auth file is only applicable on service principals') result[_TENANT_ID] = account[_TENANT_ID] endpoint_mappings = OrderedDict() # use OrderedDict to control the output sequence endpoint_mappings['active_directory'] = 'activeDirectoryEndpointUrl' endpoint_mappings['resource_manager'] = 'resourceManagerEndpointUrl' endpoint_mappings['active_directory_graph_resource_id'] = 'activeDirectoryGraphResourceId' endpoint_mappings['sql_management'] = 'sqlManagementEndpointUrl' endpoint_mappings['gallery'] = 'galleryEndpointUrl' endpoint_mappings['management'] = 'managementEndpointUrl' for e in endpoint_mappings: result[endpoint_mappings[e]] = getattr(CLOUD.endpoints, e) return result def get_installation_id(self): installation_id = self._storage.get(_INSTALLATION_ID) if not installation_id: import uuid installation_id = str(uuid.uuid1()) self._storage[_INSTALLATION_ID] = installation_id return installation_id @staticmethod def get_msi_token(resource, port): import requests import time request_uri = 'http://localhost:{}/oauth2/token'.format(port) payload = { 'resource': resource } # retry as the token endpoint might not be available yet, one example is you use CLI in a # custom script extension of VMSS, which might get provisioned before the MSI extensioon while True: err = None try: result = requests.post(request_uri, data=payload, headers={'Metadata': 'true'}) logger.debug("MSI: Retrieving a token from %s, with payload %s", request_uri, payload) if result.status_code != 200: err = result.text except Exception as ex: # pylint: disable=broad-except err = str(ex) if err: # we might need some error code checking to avoid silly waiting. The bottom line is users can # always press ctrl+c to stop it logger.warning("MSI: Failed to retrieve a token from '%s' with an error of '%s'. This could be caused " "by the MSI extension not yet fullly provisioned. Will retry in 60 seconds...", request_uri, err) time.sleep(60) else: logger.debug('MSI: token retrieved') break token_entry = json.loads(result.content.decode()) return token_entry['token_type'], token_entry['access_token'], token_entry class SubscriptionFinder(object): '''finds all subscriptions for a user or service principal''' def __init__(self, auth_context_factory, adal_token_cache, arm_client_factory=None): self._adal_token_cache = adal_token_cache self._auth_context_factory = auth_context_factory self.user_id = None # will figure out after log user in def create_arm_client_factory(credentials): if arm_client_factory: return arm_client_factory(credentials) from azure.cli.core.profiles._shared import get_client_class from azure.cli.core.profiles import get_api_version, ResourceType from azure.cli.core._debug import change_ssl_cert_verification client_type = get_client_class(ResourceType.MGMT_RESOURCE_SUBSCRIPTIONS) api_version = get_api_version(ResourceType.MGMT_RESOURCE_SUBSCRIPTIONS) return change_ssl_cert_verification(client_type(credentials, api_version=api_version, base_url=CLOUD.endpoints.resource_manager)) self._arm_client_factory = create_arm_client_factory self.tenants = [] def find_from_user_account(self, username, password, tenant, resource): context = self._create_auth_context(tenant) if password: token_entry = context.acquire_token_with_username_password(resource, username, password, _CLIENT_ID) else: # when refresh account, we will leverage local cached tokens token_entry = context.acquire_token(resource, username, _CLIENT_ID) self.user_id = token_entry[_TOKEN_ENTRY_USER_ID] if tenant is None: result = self._find_using_common_tenant(token_entry[_ACCESS_TOKEN], resource) else: result = self._find_using_specific_tenant(tenant, token_entry[_ACCESS_TOKEN]) return result def find_through_interactive_flow(self, tenant, resource): context = self._create_auth_context(tenant) code = context.acquire_user_code(resource, _CLIENT_ID) logger.warning(code['message']) token_entry = context.acquire_token_with_device_code(resource, code, _CLIENT_ID) self.user_id = token_entry[_TOKEN_ENTRY_USER_ID] if tenant is None: result = self._find_using_common_tenant(token_entry[_ACCESS_TOKEN], resource) else: result = self._find_using_specific_tenant(tenant, token_entry[_ACCESS_TOKEN]) return result def find_from_service_principal_id(self, client_id, sp_auth, tenant, resource): context = self._create_auth_context(tenant, False) token_entry = sp_auth.acquire_token(context, resource, client_id) self.user_id = client_id result = self._find_using_specific_tenant(tenant, token_entry[_ACCESS_TOKEN]) self.tenants = [tenant] return result # only occur inside cloud console or VM with identity def find_from_raw_token(self, tenant, token): # decode the token, so we know the tenant result = self._find_using_specific_tenant(tenant, token) self.tenants = [tenant] return result def _create_auth_context(self, tenant, use_token_cache=True): token_cache = self._adal_token_cache if use_token_cache else None return self._auth_context_factory(tenant, token_cache) def _find_using_common_tenant(self, access_token, resource): import adal from msrest.authentication import BasicTokenAuthentication all_subscriptions = [] token_credential = BasicTokenAuthentication({'access_token': access_token}) client = self._arm_client_factory(token_credential) tenants = client.tenants.list() for t in tenants: tenant_id = t.tenant_id temp_context = self._create_auth_context(tenant_id) try: temp_credentials = temp_context.acquire_token(resource, self.user_id, _CLIENT_ID) except adal.AdalError as ex: # because user creds went through the 'common' tenant, the error here must be # tenant specific, like the account was disabled. For such errors, we will continue # with other tenants. logger.warning("Failed to authenticate '%s' due to error '%s'", t, ex) continue subscriptions = self._find_using_specific_tenant( tenant_id, temp_credentials[_ACCESS_TOKEN]) all_subscriptions.extend(subscriptions) return all_subscriptions def _find_using_specific_tenant(self, tenant, access_token): from msrest.authentication import BasicTokenAuthentication token_credential = BasicTokenAuthentication({'access_token': access_token}) client = self._arm_client_factory(token_credential) subscriptions = client.subscriptions.list() all_subscriptions = [] for s in subscriptions: setattr(s, 'tenant_id', tenant) all_subscriptions.append(s) self.tenants.append(tenant) return all_subscriptions class CredsCache(object): '''Caches AAD tokena and service principal secrets, and persistence will also be handled ''' def __init__(self, auth_ctx_factory=None, async_persist=True): # AZURE_ACCESS_TOKEN_FILE is used by Cloud Console and not meant to be user configured self._token_file = (os.environ.get('AZURE_ACCESS_TOKEN_FILE', None) or os.path.join(get_config_dir(), 'accessTokens.json')) self._service_principal_creds = [] self._auth_ctx_factory = auth_ctx_factory self._adal_token_cache_attr = None self._should_flush_to_disk = False self._async_persist = async_persist if async_persist: import atexit atexit.register(self.flush_to_disk) def persist_cached_creds(self): self._should_flush_to_disk = True if not self._async_persist: self.flush_to_disk() self.adal_token_cache.has_state_changed = False def flush_to_disk(self): if self._should_flush_to_disk: with os.fdopen(os.open(self._token_file, os.O_RDWR | os.O_CREAT | os.O_TRUNC, 0o600), 'w+') as cred_file: items = self.adal_token_cache.read_items() all_creds = [entry for _, entry in items] # trim away useless fields (needed for cred sharing with xplat) for i in all_creds: for key in TOKEN_FIELDS_EXCLUDED_FROM_PERSISTENCE: i.pop(key, None) all_creds.extend(self._service_principal_creds) cred_file.write(json.dumps(all_creds)) def retrieve_token_for_user(self, username, tenant, resource): context = self._auth_ctx_factory(tenant, cache=self.adal_token_cache) token_entry = context.acquire_token(resource, username, _CLIENT_ID) if not token_entry: raise CLIError("Could not retrieve token from local cache.{}".format( " Please run 'az login'." if not in_cloud_console() else '')) if self.adal_token_cache.has_state_changed: self.persist_cached_creds() return (token_entry[_TOKEN_ENTRY_TOKEN_TYPE], token_entry[_ACCESS_TOKEN], token_entry) def retrieve_token_for_service_principal(self, sp_id, resource): self.load_adal_token_cache() matched = [x for x in self._service_principal_creds if sp_id == x[_SERVICE_PRINCIPAL_ID]] if not matched: raise CLIError("Please run 'az account set' to select active account.") cred = matched[0] context = self._auth_ctx_factory(cred[_SERVICE_PRINCIPAL_TENANT], None) sp_auth = ServicePrincipalAuth(cred.get(_ACCESS_TOKEN, None) or cred.get(_SERVICE_PRINCIPAL_CERT_FILE, None)) token_entry = sp_auth.acquire_token(context, resource, sp_id) return (token_entry[_TOKEN_ENTRY_TOKEN_TYPE], token_entry[_ACCESS_TOKEN], token_entry) def retrieve_secret_of_service_principal(self, sp_id): self.load_adal_token_cache() matched = [x for x in self._service_principal_creds if sp_id == x[_SERVICE_PRINCIPAL_ID]] if not matched: raise CLIError("No matched service principal found") cred = matched[0] return cred[_ACCESS_TOKEN] @property def adal_token_cache(self): return self.load_adal_token_cache() def load_adal_token_cache(self): if self._adal_token_cache_attr is None: import adal all_entries = _load_tokens_from_file(self._token_file) self._load_service_principal_creds(all_entries) real_token = [x for x in all_entries if x not in self._service_principal_creds] self._adal_token_cache_attr = adal.TokenCache(json.dumps(real_token)) return self._adal_token_cache_attr def save_service_principal_cred(self, sp_entry): self.load_adal_token_cache() matched = [x for x in self._service_principal_creds if sp_entry[_SERVICE_PRINCIPAL_ID] == x[_SERVICE_PRINCIPAL_ID] and sp_entry[_SERVICE_PRINCIPAL_TENANT] == x[_SERVICE_PRINCIPAL_TENANT]] state_changed = False if matched: # pylint: disable=line-too-long if (sp_entry.get(_ACCESS_TOKEN, None) != getattr(matched[0], _ACCESS_TOKEN, None) or sp_entry.get(_SERVICE_PRINCIPAL_CERT_FILE, None) != getattr(matched[0], _SERVICE_PRINCIPAL_CERT_FILE, None)): self._service_principal_creds.remove(matched[0]) self._service_principal_creds.append(matched[0]) state_changed = True else: self._service_principal_creds.append(sp_entry) state_changed = True if state_changed: self.persist_cached_creds() def _load_service_principal_creds(self, creds): for c in creds: if c.get(_SERVICE_PRINCIPAL_ID): self._service_principal_creds.append(c) return self._service_principal_creds def remove_cached_creds(self, user_or_sp): state_changed = False # clear AAD tokens tokens = self.adal_token_cache.find({_TOKEN_ENTRY_USER_ID: user_or_sp}) if tokens: state_changed = True self.adal_token_cache.remove(tokens) # clear service principal creds matched = [x for x in self._service_principal_creds if x[_SERVICE_PRINCIPAL_ID] == user_or_sp] if matched: state_changed = True self._service_principal_creds = [x for x in self._service_principal_creds if x not in matched] if state_changed: self.persist_cached_creds() def remove_all_cached_creds(self): # we can clear file contents, but deleting it is simpler _delete_file(self._token_file) _GLOBAL_CREDS_CACHE = CredsCache(_AUTH_CTX_FACTORY, async_persist=True) class ServicePrincipalAuth(object): def __init__(self, password_arg_value): if not password_arg_value: raise CLIError('missing secret or certificate in order to ' 'authnenticate through a service principal') if os.path.isfile(password_arg_value): certificate_file = password_arg_value from OpenSSL.crypto import load_certificate, FILETYPE_PEM self.certificate_file = certificate_file with open(certificate_file, 'r') as file_reader: self.cert_file_string = file_reader.read() cert = load_certificate(FILETYPE_PEM, self.cert_file_string) self.thumbprint = cert.digest("sha1").decode() else: self.secret = password_arg_value def acquire_token(self, authentication_context, resource, client_id): if hasattr(self, 'secret'): return authentication_context.acquire_token_with_client_credentials(resource, client_id, self.secret) return authentication_context.acquire_token_with_client_certificate(resource, client_id, self.cert_file_string, self.thumbprint) def get_entry_to_persist(self, sp_id, tenant): entry = { _SERVICE_PRINCIPAL_ID: sp_id, _SERVICE_PRINCIPAL_TENANT: tenant, } if hasattr(self, 'secret'): entry[_ACCESS_TOKEN] = self.secret else: entry[_SERVICE_PRINCIPAL_CERT_FILE] = self.certificate_file entry[_SERVICE_PRINCIPAL_CERT_THUMBPRINT] = self.thumbprint return entry
gpapaz/eve-wspace
refs/heads/develop
evewspace/POS/urls.py
9
# Eve W-Space # Copyright 2014 Andrew Austin and contributors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from django.conf.urls import patterns, include, url pospatterns = patterns('POS.views', url(r'remove/$', 'remove_pos'), url(r'edit/$', 'edit_pos'), ) syspatterns = patterns('POS.views', url(r'(?P<posID>\d+)/', include(pospatterns)), url(r'add/$', 'add_pos'), url(r'$', 'get_pos_list'), ) urlpatterns = patterns('POS.views', url(r'(?P<msID>\d+)/', include(syspatterns)), )
mantl/mantl
refs/heads/master
plugins/inventory/terraform.py
7
#!/usr/bin/env python # # Copyright 2015 Cisco Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """\ Dynamic inventory for Terraform - finds all `.tfstate` files below the working directory and generates an inventory based on them. """ from __future__ import unicode_literals, print_function import argparse from collections import defaultdict from functools import wraps import json import os import re VERSION = '0.3.0pre' def tfstates(root=None): root = root or os.getcwd() for dirpath, _, filenames in os.walk(root): for name in filenames: if os.path.splitext(name)[-1] == '.tfstate': yield os.path.join(dirpath, name) def iterresources(filenames): for filename in filenames: with open(filename, 'r') as json_file: state = json.load(json_file) for module in state['modules']: name = module['path'][-1] for key, resource in module['resources'].items(): yield name, key, resource ## READ RESOURCES PARSERS = {} def _clean_dc(dcname): # Consul DCs are strictly alphanumeric with underscores and hyphens - # ensure that the consul_dc attribute meets these requirements. return re.sub('[^\w_\-]', '-', dcname) def iterhosts(resources): '''yield host tuples of (name, attributes, groups)''' for module_name, key, resource in resources: resource_type, name = key.split('.', 1) try: parser = PARSERS[resource_type] except KeyError: continue yield parser(resource, module_name) def parses(prefix): def inner(func): PARSERS[prefix] = func return func return inner def calculate_mantl_vars(func): """calculate Mantl vars""" @wraps(func) def inner(*args, **kwargs): name, attrs, groups = func(*args, **kwargs) # attrs if attrs.get('role', '') == 'control': attrs['consul_is_server'] = True else: attrs['consul_is_server'] = False # groups if attrs.get('publicly_routable', False): groups.append('publicly_routable') return name, attrs, groups return inner def _parse_prefix(source, prefix, sep='.'): for compkey, value in source.items(): try: curprefix, rest = compkey.split(sep, 1) except ValueError: continue if curprefix != prefix or rest == '#': continue yield rest, value def parse_attr_list(source, prefix, sep='.'): attrs = defaultdict(dict) for compkey, value in _parse_prefix(source, prefix, sep): idx, key = compkey.split(sep, 1) attrs[idx][key] = value return attrs.values() def parse_dict(source, prefix, sep='.'): return dict(_parse_prefix(source, prefix, sep)) def parse_list(source, prefix, sep='.'): return [value for _, value in _parse_prefix(source, prefix, sep)] def parse_bool(string_form): token = string_form.lower()[0] if token == 't': return True elif token == 'f': return False else: raise ValueError('could not convert %r to a bool' % string_form) @parses('triton_machine') @calculate_mantl_vars def triton_machine(resource, module_name): raw_attrs = resource['primary']['attributes'] name = raw_attrs.get('name') groups = [] attrs = { 'id': raw_attrs['id'], 'dataset': raw_attrs['dataset'], 'disk': raw_attrs['disk'], 'firewall_enabled': parse_bool(raw_attrs['firewall_enabled']), 'image': raw_attrs['image'], 'ips': parse_list(raw_attrs, 'ips'), 'memory': raw_attrs['memory'], 'name': raw_attrs['name'], 'networks': parse_list(raw_attrs, 'networks'), 'package': raw_attrs['package'], 'primary_ip': raw_attrs['primaryip'], 'root_authorized_keys': raw_attrs['root_authorized_keys'], 'state': raw_attrs['state'], 'tags': parse_dict(raw_attrs, 'tags'), 'type': raw_attrs['type'], 'user_data': raw_attrs['user_data'], 'user_script': raw_attrs['user_script'], # ansible 'ansible_ssh_host': raw_attrs['primaryip'], 'ansible_ssh_port': 22, 'ansible_ssh_user': 'root', # it's "root" on Triton by default # generic 'public_ipv4': raw_attrs['primaryip'], 'provider': 'triton', } # private IPv4 for ip in attrs['ips']: if ip.startswith('10') or ip.startswith('192.168'): # private IPs attrs['private_ipv4'] = ip break if 'private_ipv4' not in attrs: attrs['private_ipv4'] = attrs['public_ipv4'] # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['tags'].get('dc', 'none')), 'role': attrs['tags'].get('role', 'none'), 'ansible_python_interpreter': attrs['tags'].get('python_bin', 'python') }) # add groups based on attrs groups.append('triton_image=' + attrs['image']) groups.append('triton_package=' + attrs['package']) groups.append('triton_state=' + attrs['state']) groups.append('triton_firewall_enabled=%s' % attrs['firewall_enabled']) groups.extend('triton_tags_%s=%s' % item for item in attrs['tags'].items()) groups.extend('triton_network=' + network for network in attrs['networks']) # groups specific to Mantl groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('digitalocean_droplet') @calculate_mantl_vars def digitalocean_host(resource, tfvars=None): raw_attrs = resource['primary']['attributes'] name = raw_attrs['name'] groups = [] attrs = { 'id': raw_attrs['id'], 'image': raw_attrs['image'], 'ipv4_address': raw_attrs['ipv4_address'], 'locked': parse_bool(raw_attrs['locked']), 'metadata': json.loads(raw_attrs.get('user_data', '{}')), 'region': raw_attrs['region'], 'size': raw_attrs['size'], 'ssh_keys': parse_list(raw_attrs, 'ssh_keys'), 'status': raw_attrs['status'], # ansible 'ansible_ssh_host': raw_attrs['ipv4_address'], 'ansible_ssh_port': 22, 'ansible_ssh_user': 'root', # it's always "root" on DO # generic 'public_ipv4': raw_attrs['ipv4_address'], 'private_ipv4': raw_attrs.get('ipv4_address_private', raw_attrs['ipv4_address']), 'provider': 'digitalocean', } # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', attrs['region'])), 'role': attrs['metadata'].get('role', 'none'), 'ansible_python_interpreter': attrs['metadata'].get('python_bin','python') }) # add groups based on attrs groups.append('do_image=' + attrs['image']) groups.append('do_locked=%s' % attrs['locked']) groups.append('do_region=' + attrs['region']) groups.append('do_size=' + attrs['size']) groups.append('do_status=' + attrs['status']) groups.extend('do_metadata_%s=%s' % item for item in attrs['metadata'].items()) # groups specific to Mantl groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('softlayer_virtualserver') @calculate_mantl_vars def softlayer_host(resource, module_name): raw_attrs = resource['primary']['attributes'] name = raw_attrs['name'] groups = [] attrs = { 'id': raw_attrs['id'], 'image': raw_attrs['image'], 'ipv4_address': raw_attrs['ipv4_address'], 'metadata': json.loads(raw_attrs.get('user_data', '{}')), 'region': raw_attrs['region'], 'ram': raw_attrs['ram'], 'cpu': raw_attrs['cpu'], 'ssh_keys': parse_list(raw_attrs, 'ssh_keys'), 'public_ipv4': raw_attrs['ipv4_address'], 'private_ipv4': raw_attrs['ipv4_address_private'], 'ansible_ssh_host': raw_attrs['ipv4_address'], 'ansible_ssh_port': 22, 'ansible_ssh_user': 'root', 'provider': 'softlayer', } # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', attrs['region'])), 'role': attrs['metadata'].get('role', 'none'), 'ansible_python_interpreter': attrs['metadata'].get('python_bin','python') }) # groups specific to Mantl groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('openstack_compute_instance_v2') @calculate_mantl_vars def openstack_host(resource, module_name): raw_attrs = resource['primary']['attributes'] name = raw_attrs['name'] groups = [] attrs = { 'access_ip_v4': raw_attrs['access_ip_v4'], 'access_ip_v6': raw_attrs['access_ip_v6'], 'flavor': parse_dict(raw_attrs, 'flavor', sep='_'), 'id': raw_attrs['id'], 'image': parse_dict(raw_attrs, 'image', sep='_'), 'key_pair': raw_attrs['key_pair'], 'metadata': parse_dict(raw_attrs, 'metadata'), 'network': parse_attr_list(raw_attrs, 'network'), 'region': raw_attrs.get('region', ''), 'security_groups': parse_list(raw_attrs, 'security_groups'), # ansible 'ansible_ssh_port': 22, # workaround for an OpenStack bug where hosts have a different domain # after they're restarted 'host_domain': 'novalocal', 'use_host_domain': True, # generic 'public_ipv4': raw_attrs['access_ip_v4'], 'private_ipv4': raw_attrs['access_ip_v4'], 'provider': 'openstack', } if 'floating_ip' in raw_attrs: attrs['private_ipv4'] = raw_attrs['network.0.fixed_ip_v4'] try: attrs.update({ 'ansible_ssh_host': raw_attrs['access_ip_v4'], 'publicly_routable': True, }) except (KeyError, ValueError): attrs.update({'ansible_ssh_host': '', 'publicly_routable': False}) # attrs specific to Ansible if 'metadata.ssh_user' in raw_attrs: attrs['ansible_ssh_user'] = raw_attrs['metadata.ssh_user'] # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', module_name)), 'role': attrs['metadata'].get('role', 'none'), 'ansible_python_interpreter': attrs['metadata'].get('python_bin','python') }) # add groups based on attrs groups.append('os_image=' + attrs['image']['name']) groups.append('os_flavor=' + attrs['flavor']['name']) groups.extend('os_metadata_%s=%s' % item for item in attrs['metadata'].items()) groups.append('os_region=' + attrs['region']) # groups specific to Mantl groups.append('role=' + attrs['metadata'].get('role', 'none')) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('aws_instance') @calculate_mantl_vars def aws_host(resource, module_name): name = resource['primary']['attributes']['tags.Name'] raw_attrs = resource['primary']['attributes'] groups = [] attrs = { 'ami': raw_attrs['ami'], 'availability_zone': raw_attrs['availability_zone'], 'ebs_block_device': parse_attr_list(raw_attrs, 'ebs_block_device'), 'ebs_optimized': parse_bool(raw_attrs['ebs_optimized']), 'ephemeral_block_device': parse_attr_list(raw_attrs, 'ephemeral_block_device'), 'id': raw_attrs['id'], 'key_name': raw_attrs['key_name'], 'private': parse_dict(raw_attrs, 'private', sep='_'), 'public': parse_dict(raw_attrs, 'public', sep='_'), 'root_block_device': parse_attr_list(raw_attrs, 'root_block_device'), 'security_groups': parse_list(raw_attrs, 'security_groups'), 'subnet': parse_dict(raw_attrs, 'subnet', sep='_'), 'tags': parse_dict(raw_attrs, 'tags'), 'tenancy': raw_attrs['tenancy'], 'vpc_security_group_ids': parse_list(raw_attrs, 'vpc_security_group_ids'), # ansible-specific 'ansible_ssh_port': 22, 'ansible_ssh_host': raw_attrs['public_ip'], # generic 'public_ipv4': raw_attrs['public_ip'], 'private_ipv4': raw_attrs['private_ip'], 'provider': 'aws', } # attrs specific to Ansible if 'tags.sshUser' in raw_attrs: attrs['ansible_ssh_user'] = raw_attrs['tags.sshUser'] if 'tags.sshPrivateIp' in raw_attrs: attrs['ansible_ssh_host'] = raw_attrs['private_ip'] # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['tags'].get('dc', module_name)), 'role': attrs['tags'].get('role', 'none'), 'ansible_python_interpreter': attrs['tags'].get('python_bin','python') }) # groups specific to Mantl groups.extend(['aws_ami=' + attrs['ami'], 'aws_az=' + attrs['availability_zone'], 'aws_key_name=' + attrs['key_name'], 'aws_tenancy=' + attrs['tenancy']]) groups.extend('aws_tag_%s=%s' % item for item in attrs['tags'].items()) groups.extend('aws_vpc_security_group=' + group for group in attrs['vpc_security_group_ids']) groups.extend('aws_subnet_%s=%s' % subnet for subnet in attrs['subnet'].items()) # groups specific to Mantl groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('google_compute_instance') @calculate_mantl_vars def gce_host(resource, module_name): name = resource['primary']['id'] raw_attrs = resource['primary']['attributes'] groups = [] # network interfaces interfaces = parse_attr_list(raw_attrs, 'network_interface') for interface in interfaces: interface['access_config'] = parse_attr_list(interface, 'access_config') for key in interface.keys(): if '.' in key: del interface[key] # general attrs attrs = { 'can_ip_forward': raw_attrs['can_ip_forward'] == 'true', 'disks': parse_attr_list(raw_attrs, 'disk'), 'machine_type': raw_attrs['machine_type'], 'metadata': parse_dict(raw_attrs, 'metadata'), 'network': parse_attr_list(raw_attrs, 'network'), 'network_interface': interfaces, 'self_link': raw_attrs['self_link'], 'service_account': parse_attr_list(raw_attrs, 'service_account'), 'tags': parse_list(raw_attrs, 'tags'), 'zone': raw_attrs['zone'], # ansible 'ansible_ssh_port': 22, 'provider': 'gce', } # attrs specific to Ansible if 'metadata.ssh_user' in raw_attrs: attrs['ansible_ssh_user'] = raw_attrs['metadata.ssh_user'] # attrs specific to Mantl attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', module_name)), 'role': attrs['metadata'].get('role', 'none'), 'ansible_python_interpreter': attrs['metadata'].get('python_bin','python') }) try: attrs.update({ 'ansible_ssh_host': interfaces[0]['access_config'][0]['nat_ip'] or interfaces[0]['access_config'][0]['assigned_nat_ip'], 'public_ipv4': interfaces[0]['access_config'][0]['nat_ip'] or interfaces[0]['access_config'][0]['assigned_nat_ip'], 'private_ipv4': interfaces[0]['address'], 'publicly_routable': True, }) except (KeyError, ValueError): attrs.update({'ansible_ssh_host': '', 'publicly_routable': False}) # add groups based on attrs groups.extend('gce_image=' + disk['image'] for disk in attrs['disks']) groups.append('gce_machine_type=' + attrs['machine_type']) groups.extend('gce_metadata_%s=%s' % (key, value) for (key, value) in attrs['metadata'].items() if key not in set(['sshKeys'])) groups.extend('gce_tag=' + tag for tag in attrs['tags']) groups.append('gce_zone=' + attrs['zone']) if attrs['can_ip_forward']: groups.append('gce_ip_forward') if attrs['publicly_routable']: groups.append('gce_publicly_routable') # groups specific to Mantl groups.append('role=' + attrs['metadata'].get('role', 'none')) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('vsphere_virtual_machine') @calculate_mantl_vars def vsphere_host(resource, module_name): raw_attrs = resource['primary']['attributes'] network_attrs = parse_dict(raw_attrs, 'network_interface') network = parse_dict(network_attrs, '0') ip_address = network.get('ipv4_address', network['ip_address']) name = raw_attrs['name'] groups = [] attrs = { 'id': raw_attrs['id'], 'ip_address': ip_address, 'private_ipv4': ip_address, 'public_ipv4': ip_address, 'metadata': parse_dict(raw_attrs, 'custom_configuration_parameters'), 'ansible_ssh_port': 22, 'provider': 'vsphere', } try: attrs.update({ 'ansible_ssh_host': ip_address, }) except (KeyError, ValueError): attrs.update({'ansible_ssh_host': '', }) attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('consul_dc', module_name)), 'role': attrs['metadata'].get('role', 'none'), 'ansible_python_interpreter': attrs['metadata'].get('python_bin','python') }) # attrs specific to Ansible if 'ssh_user' in attrs['metadata']: attrs['ansible_ssh_user'] = attrs['metadata']['ssh_user'] groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('azure_instance') @calculate_mantl_vars def azure_host(resource, module_name): name = resource['primary']['attributes']['name'] raw_attrs = resource['primary']['attributes'] groups = [] attrs = { 'automatic_updates': raw_attrs['automatic_updates'], 'description': raw_attrs['description'], 'hosted_service_name': raw_attrs['hosted_service_name'], 'id': raw_attrs['id'], 'image': raw_attrs['image'], 'ip_address': raw_attrs['ip_address'], 'location': raw_attrs['location'], 'name': raw_attrs['name'], 'reverse_dns': raw_attrs['reverse_dns'], 'security_group': raw_attrs['security_group'], 'size': raw_attrs['size'], 'ssh_key_thumbprint': raw_attrs['ssh_key_thumbprint'], 'subnet': raw_attrs['subnet'], 'username': raw_attrs['username'], 'vip_address': raw_attrs['vip_address'], 'virtual_network': raw_attrs['virtual_network'], 'endpoint': parse_attr_list(raw_attrs, 'endpoint'), # ansible 'ansible_ssh_port': 22, 'ansible_ssh_user': raw_attrs['username'], 'ansible_ssh_host': raw_attrs['vip_address'], } # attrs specific to mantl attrs.update({ 'consul_dc': attrs['location'].lower().replace(" ", "-"), 'role': attrs['description'] }) # groups specific to mantl groups.extend(['azure_image=' + attrs['image'], 'azure_location=' + attrs['location'].lower().replace(" ", "-"), 'azure_username=' + attrs['username'], 'azure_security_group=' + attrs['security_group']]) # groups specific to mantl groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('clc_server') @calculate_mantl_vars def clc_server(resource, module_name): raw_attrs = resource['primary']['attributes'] name = raw_attrs.get('id') groups = [] md = parse_dict(raw_attrs, 'metadata') attrs = { 'metadata': md, 'ansible_ssh_port': md.get('ssh_port', 22), 'ansible_ssh_user': md.get('ssh_user', 'root'), 'provider': 'clc', 'publicly_routable': False, } try: attrs.update({ 'public_ipv4': raw_attrs['public_ip_address'], 'private_ipv4': raw_attrs['private_ip_address'], 'ansible_ssh_host': raw_attrs['public_ip_address'], 'publicly_routable': True, }) except (KeyError, ValueError): attrs.update({ 'ansible_ssh_host': raw_attrs['private_ip_address'], 'private_ipv4': raw_attrs['private_ip_address'], }) attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', module_name)), 'role': attrs['metadata'].get('role', 'none'), }) groups.append('role=' + attrs['role']) groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups @parses('ucs_service_profile') @calculate_mantl_vars def ucs_host(resource, module_name): name = resource['primary']['id'] raw_attrs = resource['primary']['attributes'] groups = [] # general attrs attrs = { 'metadata': parse_dict(raw_attrs, 'metadata'), 'provider': 'ucs', } # attrs specific to mantl attrs.update({ 'consul_dc': _clean_dc(attrs['metadata'].get('dc', module_name)), 'role': attrs['metadata'].get('role', 'none'), }) try: attrs.update({ 'ansible_ssh_host': raw_attrs['vNIC.0.ip'], 'public_ipv4': raw_attrs['vNIC.0.ip'], 'private_ipv4': raw_attrs['vNIC.0.ip'] }) except (KeyError, ValueError): attrs.update({'ansible_ssh_host': '', 'publicly_routable': False}) # add groups based on attrs groups.append('role=' + attrs['role']) #.get('role', 'none')) # groups.append('all:children') groups.append('dc=' + attrs['consul_dc']) return name, attrs, groups ## QUERY TYPES def query_host(hosts, target): for name, attrs, _ in hosts: if name == target: return attrs return {} def query_list(hosts): groups = defaultdict(dict) meta = {} for name, attrs, hostgroups in hosts: for group in set(hostgroups): groups[group].setdefault('hosts', []) groups[group]['hosts'].append(name) meta[name] = attrs groups['_meta'] = {'hostvars': meta} return groups def query_hostfile(hosts): out = ['## begin hosts generated by terraform.py ##'] out.extend( '{}\t{}'.format(attrs['ansible_ssh_host'].ljust(16), name) for name, attrs, _ in hosts ) out.append('## end hosts generated by terraform.py ##') return '\n'.join(out) def main(): parser = argparse.ArgumentParser( __file__, __doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) modes = parser.add_mutually_exclusive_group(required=True) modes.add_argument('--list', action='store_true', help='list all variables') modes.add_argument('--host', help='list variables for a single host') modes.add_argument('--version', action='store_true', help='print version and exit') modes.add_argument('--hostfile', action='store_true', help='print hosts as a /etc/hosts snippet') parser.add_argument('--pretty', action='store_true', help='pretty-print output JSON') parser.add_argument('--nometa', action='store_true', help='with --list, exclude hostvars') default_root = os.environ.get('TERRAFORM_STATE_ROOT', os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', ))) parser.add_argument('--root', default=default_root, help='custom root to search for `.tfstate`s in') args = parser.parse_args() if args.version: print('%s %s' % (__file__, VERSION)) parser.exit() hosts = iterhosts(iterresources(tfstates(args.root))) if args.list: output = query_list(hosts) if args.nometa: del output['_meta'] print(json.dumps(output, indent=4 if args.pretty else None)) elif args.host: output = query_host(hosts, args.host) print(json.dumps(output, indent=4 if args.pretty else None)) elif args.hostfile: output = query_hostfile(hosts) print(output) parser.exit() if __name__ == '__main__': main()
YuiYukihira/traffic-sim
refs/heads/master
utils/controllers.py
1
""" traffic-sim - Simulates basic traffic conditions, allows users to compare different driving methods. Copyright (C) 2017 Yui Yukihira This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ class PIDController: def __init__(self, Kp, Ki, Kd): ## I think i implemented this right... self.Kp = Kp self.Ki = Ki self.Kd = Kd self.target = 0 self.reset = 0 self.clock = 0 self.last_error = 0 def set_target(self, val): self.target = val def calculate_output(self, dt, curr_val): last_clock = self.clock self.clock += dt error = self.target - curr_val self.reset + (self.clock - last_clock)*error deriv = (error - self.last_error)/(self.clock - last_clock) output = (self.Kp*error)+(self.Ki*self.reset)+(self.Kp*deriv) return output
android-ia/platform_external_chromium_org_third_party_WebKit
refs/heads/master
Tools/Scripts/webkitpy/tool/servers/reflectionhandler.py
53
# Copyright (c) 2011 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import BaseHTTPServer import cgi import codecs import datetime import fnmatch import json import mimetypes import os.path import shutil import threading import time import urlparse import wsgiref.handlers import BaseHTTPServer class ReflectionHandler(BaseHTTPServer.BaseHTTPRequestHandler): STATIC_FILE_EXTENSIONS = ['.js', '.css', '.html'] # Subclasses should override. STATIC_FILE_DIRECTORY = None # Setting this flag to True causes the server to send # Access-Control-Allow-Origin: * # with every response. allow_cross_origin_requests = False def do_GET(self): self._handle_request() def do_POST(self): self._handle_request() def do_HEAD(self): self._handle_request() def read_entity_body(self): length = int(self.headers.getheader('content-length')) return self.rfile.read(length) def _read_entity_body_as_json(self): return json.loads(self.read_entity_body()) def _handle_request(self): if "?" in self.path: path, query_string = self.path.split("?", 1) self.query = cgi.parse_qs(query_string) else: path = self.path self.query = {} function_or_file_name = path[1:] or "index.html" _, extension = os.path.splitext(function_or_file_name) if extension in self.STATIC_FILE_EXTENSIONS: self._serve_static_file(function_or_file_name) return function_name = function_or_file_name.replace(".", "_") if not hasattr(self, function_name): self.send_error(404, "Unknown function %s" % function_name) return if function_name[0] == "_": self.send_error(401, "Not allowed to invoke private or protected methods") return function = getattr(self, function_name) function() def _serve_static_file(self, static_path): self._serve_file(os.path.join(self.STATIC_FILE_DIRECTORY, static_path)) def quitquitquit(self): self._serve_text("Server quit.\n") # Shutdown has to happen on another thread from the server's thread, # otherwise there's a deadlock threading.Thread(target=lambda: self.server.shutdown()).start() def _send_access_control_header(self): if self.allow_cross_origin_requests: self.send_header('Access-Control-Allow-Origin', '*') def _serve_text(self, text): self.send_response(200) self._send_access_control_header() self.send_header("Content-type", "text/plain") self.end_headers() self.wfile.write(text) def _serve_json(self, json_object): self.send_response(200) self._send_access_control_header() self.send_header('Content-type', 'application/json') self.end_headers() json.dump(json_object, self.wfile) def _serve_file(self, file_path, cacheable_seconds=0, headers_only=False): if not os.path.exists(file_path): self.send_error(404, "File not found") return with codecs.open(file_path, "rb") as static_file: self.send_response(200) self._send_access_control_header() self.send_header("Content-Length", os.path.getsize(file_path)) mime_type, encoding = mimetypes.guess_type(file_path) if mime_type: self.send_header("Content-type", mime_type) if cacheable_seconds: expires_time = (datetime.datetime.now() + datetime.timedelta(0, cacheable_seconds)) expires_formatted = wsgiref.handlers.format_date_time( time.mktime(expires_time.timetuple())) self.send_header("Expires", expires_formatted) self.end_headers() if not headers_only: shutil.copyfileobj(static_file, self.wfile) def _serve_xml(self, xml): self.send_response(200) self._send_access_control_header() self.send_header("Content-type", "text/xml") self.end_headers() xml = xml.encode('utf-8') self.wfile.write(xml)
technologiescollege/s2a_fr
refs/heads/portable
s2a/Python/Lib/distutils/bcppcompiler.py
250
"""distutils.bcppcompiler Contains BorlandCCompiler, an implementation of the abstract CCompiler class for the Borland C++ compiler. """ # This implementation by Lyle Johnson, based on the original msvccompiler.py # module and using the directions originally published by Gordon Williams. # XXX looks like there's a LOT of overlap between these two classes: # someone should sit down and factor out the common code as # WindowsCCompiler! --GPW __revision__ = "$Id$" import os from distutils.errors import (DistutilsExecError, CompileError, LibError, LinkError, UnknownFileError) from distutils.ccompiler import CCompiler, gen_preprocess_options from distutils.file_util import write_file from distutils.dep_util import newer from distutils import log class BCPPCompiler(CCompiler) : """Concrete class that implements an interface to the Borland C/C++ compiler, as defined by the CCompiler abstract class. """ compiler_type = 'bcpp' # Just set this so CCompiler's constructor doesn't barf. We currently # don't use the 'set_executables()' bureaucracy provided by CCompiler, # as it really isn't necessary for this sort of single-compiler class. # Would be nice to have a consistent interface with UnixCCompiler, # though, so it's worth thinking about. executables = {} # Private class data (need to distinguish C from C++ source for compiler) _c_extensions = ['.c'] _cpp_extensions = ['.cc', '.cpp', '.cxx'] # Needed for the filename generation methods provided by the # base class, CCompiler. src_extensions = _c_extensions + _cpp_extensions obj_extension = '.obj' static_lib_extension = '.lib' shared_lib_extension = '.dll' static_lib_format = shared_lib_format = '%s%s' exe_extension = '.exe' def __init__ (self, verbose=0, dry_run=0, force=0): CCompiler.__init__ (self, verbose, dry_run, force) # These executables are assumed to all be in the path. # Borland doesn't seem to use any special registry settings to # indicate their installation locations. self.cc = "bcc32.exe" self.linker = "ilink32.exe" self.lib = "tlib.exe" self.preprocess_options = None self.compile_options = ['/tWM', '/O2', '/q', '/g0'] self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0'] self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x'] self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x'] self.ldflags_static = [] self.ldflags_exe = ['/Gn', '/q', '/x'] self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r'] # -- Worker methods ------------------------------------------------ def compile(self, sources, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None): macros, objects, extra_postargs, pp_opts, build = \ self._setup_compile(output_dir, macros, include_dirs, sources, depends, extra_postargs) compile_opts = extra_preargs or [] compile_opts.append ('-c') if debug: compile_opts.extend (self.compile_options_debug) else: compile_opts.extend (self.compile_options) for obj in objects: try: src, ext = build[obj] except KeyError: continue # XXX why do the normpath here? src = os.path.normpath(src) obj = os.path.normpath(obj) # XXX _setup_compile() did a mkpath() too but before the normpath. # Is it possible to skip the normpath? self.mkpath(os.path.dirname(obj)) if ext == '.res': # This is already a binary file -- skip it. continue # the 'for' loop if ext == '.rc': # This needs to be compiled to a .res file -- do it now. try: self.spawn (["brcc32", "-fo", obj, src]) except DistutilsExecError, msg: raise CompileError, msg continue # the 'for' loop # The next two are both for the real compiler. if ext in self._c_extensions: input_opt = "" elif ext in self._cpp_extensions: input_opt = "-P" else: # Unknown file type -- no extra options. The compiler # will probably fail, but let it just in case this is a # file the compiler recognizes even if we don't. input_opt = "" output_opt = "-o" + obj # Compiler command line syntax is: "bcc32 [options] file(s)". # Note that the source file names must appear at the end of # the command line. try: self.spawn ([self.cc] + compile_opts + pp_opts + [input_opt, output_opt] + extra_postargs + [src]) except DistutilsExecError, msg: raise CompileError, msg return objects # compile () def create_static_lib (self, objects, output_libname, output_dir=None, debug=0, target_lang=None): (objects, output_dir) = self._fix_object_args (objects, output_dir) output_filename = \ self.library_filename (output_libname, output_dir=output_dir) if self._need_link (objects, output_filename): lib_args = [output_filename, '/u'] + objects if debug: pass # XXX what goes here? try: self.spawn ([self.lib] + lib_args) except DistutilsExecError, msg: raise LibError, msg else: log.debug("skipping %s (up-to-date)", output_filename) # create_static_lib () def link (self, target_desc, objects, output_filename, output_dir=None, libraries=None, library_dirs=None, runtime_library_dirs=None, export_symbols=None, debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, target_lang=None): # XXX this ignores 'build_temp'! should follow the lead of # msvccompiler.py (objects, output_dir) = self._fix_object_args (objects, output_dir) (libraries, library_dirs, runtime_library_dirs) = \ self._fix_lib_args (libraries, library_dirs, runtime_library_dirs) if runtime_library_dirs: log.warn("I don't know what to do with 'runtime_library_dirs': %s", str(runtime_library_dirs)) if output_dir is not None: output_filename = os.path.join (output_dir, output_filename) if self._need_link (objects, output_filename): # Figure out linker args based on type of target. if target_desc == CCompiler.EXECUTABLE: startup_obj = 'c0w32' if debug: ld_args = self.ldflags_exe_debug[:] else: ld_args = self.ldflags_exe[:] else: startup_obj = 'c0d32' if debug: ld_args = self.ldflags_shared_debug[:] else: ld_args = self.ldflags_shared[:] # Create a temporary exports file for use by the linker if export_symbols is None: def_file = '' else: head, tail = os.path.split (output_filename) modname, ext = os.path.splitext (tail) temp_dir = os.path.dirname(objects[0]) # preserve tree structure def_file = os.path.join (temp_dir, '%s.def' % modname) contents = ['EXPORTS'] for sym in (export_symbols or []): contents.append(' %s=_%s' % (sym, sym)) self.execute(write_file, (def_file, contents), "writing %s" % def_file) # Borland C++ has problems with '/' in paths objects2 = map(os.path.normpath, objects) # split objects in .obj and .res files # Borland C++ needs them at different positions in the command line objects = [startup_obj] resources = [] for file in objects2: (base, ext) = os.path.splitext(os.path.normcase(file)) if ext == '.res': resources.append(file) else: objects.append(file) for l in library_dirs: ld_args.append("/L%s" % os.path.normpath(l)) ld_args.append("/L.") # we sometimes use relative paths # list of object files ld_args.extend(objects) # XXX the command-line syntax for Borland C++ is a bit wonky; # certain filenames are jammed together in one big string, but # comma-delimited. This doesn't mesh too well with the # Unix-centric attitude (with a DOS/Windows quoting hack) of # 'spawn()', so constructing the argument list is a bit # awkward. Note that doing the obvious thing and jamming all # the filenames and commas into one argument would be wrong, # because 'spawn()' would quote any filenames with spaces in # them. Arghghh!. Apparently it works fine as coded... # name of dll/exe file ld_args.extend([',',output_filename]) # no map file and start libraries ld_args.append(',,') for lib in libraries: # see if we find it and if there is a bcpp specific lib # (xxx_bcpp.lib) libfile = self.find_library_file(library_dirs, lib, debug) if libfile is None: ld_args.append(lib) # probably a BCPP internal library -- don't warn else: # full name which prefers bcpp_xxx.lib over xxx.lib ld_args.append(libfile) # some default libraries ld_args.append ('import32') ld_args.append ('cw32mt') # def file for export symbols ld_args.extend([',',def_file]) # add resource files ld_args.append(',') ld_args.extend(resources) if extra_preargs: ld_args[:0] = extra_preargs if extra_postargs: ld_args.extend(extra_postargs) self.mkpath (os.path.dirname (output_filename)) try: self.spawn ([self.linker] + ld_args) except DistutilsExecError, msg: raise LinkError, msg else: log.debug("skipping %s (up-to-date)", output_filename) # link () # -- Miscellaneous methods ----------------------------------------- def find_library_file (self, dirs, lib, debug=0): # List of effective library names to try, in order of preference: # xxx_bcpp.lib is better than xxx.lib # and xxx_d.lib is better than xxx.lib if debug is set # # The "_bcpp" suffix is to handle a Python installation for people # with multiple compilers (primarily Distutils hackers, I suspect # ;-). The idea is they'd have one static library for each # compiler they care about, since (almost?) every Windows compiler # seems to have a different format for static libraries. if debug: dlib = (lib + "_d") try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib) else: try_names = (lib + "_bcpp", lib) for dir in dirs: for name in try_names: libfile = os.path.join(dir, self.library_filename(name)) if os.path.exists(libfile): return libfile else: # Oops, didn't find it in *any* of 'dirs' return None # overwrite the one from CCompiler to support rc and res-files def object_filenames (self, source_filenames, strip_dir=0, output_dir=''): if output_dir is None: output_dir = '' obj_names = [] for src_name in source_filenames: # use normcase to make sure '.rc' is really '.rc' and not '.RC' (base, ext) = os.path.splitext (os.path.normcase(src_name)) if ext not in (self.src_extensions + ['.rc','.res']): raise UnknownFileError, \ "unknown file type '%s' (from '%s')" % \ (ext, src_name) if strip_dir: base = os.path.basename (base) if ext == '.res': # these can go unchanged obj_names.append (os.path.join (output_dir, base + ext)) elif ext == '.rc': # these need to be compiled to .res-files obj_names.append (os.path.join (output_dir, base + '.res')) else: obj_names.append (os.path.join (output_dir, base + self.obj_extension)) return obj_names # object_filenames () def preprocess (self, source, output_file=None, macros=None, include_dirs=None, extra_preargs=None, extra_postargs=None): (_, macros, include_dirs) = \ self._fix_compile_args(None, macros, include_dirs) pp_opts = gen_preprocess_options(macros, include_dirs) pp_args = ['cpp32.exe'] + pp_opts if output_file is not None: pp_args.append('-o' + output_file) if extra_preargs: pp_args[:0] = extra_preargs if extra_postargs: pp_args.extend(extra_postargs) pp_args.append(source) # We need to preprocess: either we're being forced to, or the # source file is newer than the target (or the target doesn't # exist). if self.force or output_file is None or newer(source, output_file): if output_file: self.mkpath(os.path.dirname(output_file)) try: self.spawn(pp_args) except DistutilsExecError, msg: print msg raise CompileError, msg # preprocess()
bregman-arie/ansible
refs/heads/devel
lib/ansible/utils/module_docs_fragments/vyos.py
58
# # (c) 2015, Peter Sprygada <[email protected]> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. class ModuleDocFragment(object): # Standard files documentation fragment DOCUMENTATION = """ options: provider: description: - B(Deprecated) - "Starting with Ansible 2.5 we recommend using C(connection: network_cli)." - For more information please see the L(Network Guide, ../network/getting_started/network_differences.html#multiple-communication-protocols). - HORIZONTALLINE - A dict object containing connection details. suboptions: host: description: - Specifies the DNS host name or address for connecting to the remote device over the specified transport. The value of host is used as the destination address for the transport. required: true port: description: - Specifies the port to use when building the connection to the remote device. default: 22 username: description: - Configures the username to use to authenticate the connection to the remote device. This value is used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead. password: description: - Specifies the password to use to authenticate the connection to the remote device. This value is used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead. timeout: description: - Specifies the timeout in seconds for communicating with the network device for either connecting or sending commands. If the timeout is exceeded before the operation is completed, the module will error. default: 10 ssh_keyfile: description: - Specifies the SSH key to use to authenticate the connection to the remote device. This value is the path to the key used to authenticate the SSH session. If the value is not specified in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE) will be used instead. notes: - For more information on using Ansible to manage network devices see the :ref:`Ansible Network Guide <network_guide>` """
mickele77/FreeCAD
refs/heads/master
src/Mod/Fem/_FemAnalysis.py
1
#*************************************************************************** #* * #* Copyright (c) 2013-2015 - Juergen Riegel <[email protected]> * #* * #* This program is free software; you can redistribute it and/or modify * #* it under the terms of the GNU Lesser General Public License (LGPL) * #* as published by the Free Software Foundation; either version 2 of * #* the License, or (at your option) any later version. * #* for detail see the LICENCE text file. * #* * #* This program is distributed in the hope that it will be useful, * #* but WITHOUT ANY WARRANTY; without even the implied warranty of * #* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * #* GNU Library General Public License for more details. * #* * #* You should have received a copy of the GNU Library General Public * #* License along with this program; if not, write to the Free Software * #* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * #* USA * #* * #*************************************************************************** __title__ = "Fem Analysis" __author__ = "Juergen Riegel" __url__ = "http://www.freecadweb.org" import FreeCAD from FemTools import FemTools class _FemAnalysis: "The FemAnalysis container object" def __init__(self, obj): self.Type = "FemAnalysis" obj.Proxy = self fem_prefs = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Fem") obj.addProperty("App::PropertyEnumeration", "AnalysisType", "Fem", "Type of the analysis") obj.AnalysisType = FemTools.known_analysis_types analysis_type = fem_prefs.GetInt("AnalysisType", 0) obj.AnalysisType = FemTools.known_analysis_types[analysis_type] obj.addProperty("App::PropertyPath", "WorkingDir", "Fem", "Working directory for calculations") obj.WorkingDir = fem_prefs.GetString("WorkingDir", "") obj.addProperty("App::PropertyIntegerConstraint", "NumberOfEigenmodes", "Fem", "Number of modes for frequency calculations") noe = fem_prefs.GetInt("NumberOfEigenmodes", 10) obj.NumberOfEigenmodes = (noe, 1, 100, 1) obj.addProperty("App::PropertyFloatConstraint", "EigenmodeLowLimit", "Fem", "Low frequency limit for eigenmode calculations") #Not yet in prefs, so it will always default to 0.0 ell = fem_prefs.GetFloat("EigenmodeLowLimit", 0.0) obj.EigenmodeLowLimit = (ell, 0.0, 1000000.0, 10000.0) obj.addProperty("App::PropertyFloatConstraint", "EigenmodeHighLimit", "Fem", "High frequency limit for eigenmode calculations") ehl = fem_prefs.GetFloat("EigenmodeHighLimit", 1000000.0) obj.EigenmodeHighLimit = (ehl, 0.0, 1000000.0, 10000.0) def execute(self, obj): return def onChanged(self, obj, prop): if prop in ["MaterialName"]: return def __getstate__(self): return self.Type def __setstate__(self, state): if state: self.Type = state
Seagate/hadoop-on-lustre
refs/heads/master
contrib/hod/hodlib/ServiceProxy/serviceProxy.py
182
#Licensed to the Apache Software Foundation (ASF) under one #or more contributor license agreements. See the NOTICE file #distributed with this work for additional information #regarding copyright ownership. The ASF licenses this file #to you under the Apache License, Version 2.0 (the #"License"); you may not use this file except in compliance #with the License. You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 #Unless required by applicable law or agreed to in writing, software #distributed under the License is distributed on an "AS IS" BASIS, #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #See the License for the specific language governing permissions and #limitations under the License. """HOD Service Proxy Implementation""" # -*- python -*- import sys, time, signal, httplib, socket, threading import sha, base64, hmac import xml.dom.minidom from hodlib.Common.socketServers import hodHTTPServer from hodlib.Common.hodsvc import hodBaseService from hodlib.Common.threads import loop from hodlib.Common.tcp import tcpSocket from hodlib.Common.util import get_exception_string from hodlib.Common.AllocationManagerUtil import * class svcpxy(hodBaseService): def __init__(self, config): hodBaseService.__init__(self, 'serviceProxy', config['service_proxy'], xrtype='twisted') self.amcfg=config['allocation_manager'] def _xr_method_isProjectUserValid(self, userid, project, ignoreErrors = False, timeOut = 15): return self.isProjectUserValid(userid, project, ignoreErrors, timeOut) def isProjectUserValid(self, userid, project, ignoreErrors, timeOut): """Method thats called upon by the hodshell to verify if the specified (user, project) combination is valid""" self.logs['main'].info("Begin isProjectUserValid()") am = AllocationManagerUtil.getAllocationManager(self.amcfg['id'], self.amcfg, self.logs['main']) self.logs['main'].info("End isProjectUserValid()") return am.getQuote(userid, project)
anand-c-goog/tensorflow
refs/heads/master
tensorflow/python/user_ops/user_ops.py
33
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """All user ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.ops import gen_user_ops from tensorflow.python.ops.gen_user_ops import * def my_fact(): """Example of overriding the generated code for an Op.""" return gen_user_ops._fact()
pombredanne/parakeet
refs/heads/master
test/numpy_lib/test_linspace.py
2
import numpy as np from parakeet import testing_helpers def test_simple_linspace(): testing_helpers.expect(np.linspace, [0.2, 0.9], np.linspace(0.2, 0.9)) def test_linspace_with_count(): testing_helpers.expect(np.linspace, [-0.2, 0.9, 30], np.linspace(-0.2, 0.9, 30)) if __name__ == "__main__": testing_helpers.run_local_tests()
ARTbio/GalaxyKickStart
refs/heads/master
extra-files/get_tool_list_from_galaxy.py
3
#!/usr/bin/env python import json import requests import yaml from argparse import ArgumentParser from argparse import ArgumentDefaultsHelpFormatter class GiToToolYaml: def __init__(self, url, output_file, include_tool_panel_section_id=False, skip_tool_panel_section_name=True): self.url = url self.output_file = output_file self.include_tool_panel_section_id = include_tool_panel_section_id self.skip_tool_panel_section_name = skip_tool_panel_section_name self.repository_list = self.get_repositories() self.merge_tool_changeset_revisions() self.filter_section_name_or_id() self.write_to_yaml() @property def toolbox(self): """ Gets the toolbox elements from <galaxy_url>/api/tools """ r = requests.get("{url}/api/tools".format(url=self.url)) return json.loads(r.text) def get_repositories(self): """ Toolbox elements returned by api/tools may be of class ToolSection or Tool. Parse these accordingly to get a list of repositories. """ repositories = [] for elem in self.toolbox: if elem['model_class'] == 'Tool': repositories.append(self.get_repo_from_tool(elem)) elif elem['model_class'] == 'ToolSection': repositories.extend(self.get_repos_from_section(elem)) return repositories def get_repo_from_tool(self, tool): """ Get the minimum items required for re-installing a (list of) tools """ if not tool.get('tool_shed_repository', None): return {} tsr = tool['tool_shed_repository'] repo = {'name': tsr['name'], 'owner': tsr['owner'], 'tool_shed_url': tsr['tool_shed'], 'revisions': [tsr['changeset_revision']], 'tool_panel_section_id': tool['panel_section_id'], 'tool_panel_section_name': tool['panel_section_name']} return repo def get_repos_from_section(self, section): repos = [] for elem in section['elems']: if elem['model_class'] == 'Tool': repos.append(self.get_repo_from_tool(elem)) elif elem['model_class'] == 'ToolSection': repos.extend(self.get_repos_from_section(elem)) return [repo for repo in repos if repo] def merge_tool_changeset_revisions(self): """ Each installed changeset revision of a tool is listed individually. Merge revisions of the same tool into a list. """ tool_list = self.repository_list for current_tool in tool_list: for tool in tool_list: if current_tool is tool: continue if (tool["name"] == current_tool['name'] and tool['owner'] == current_tool['owner'] and tool['tool_panel_section_id'] == current_tool['tool_panel_section_id'] and tool['tool_shed_url'] == current_tool['tool_shed_url']): current_tool["revisions"].extend(tool["revisions"]) tool_list.remove(tool) current_tool['revisions'] = list(set(current_tool['revisions'])) def filter_section_name_or_id(self): repo_list = [] for repo in self.repository_list: if self.skip_tool_panel_section_name: del repo['tool_panel_section_name'] if not self.include_tool_panel_section_id: del repo['tool_panel_section_id'] repo_list.append(repo) self.repository_list = repo_list def write_to_yaml(self): tool_dict = {"tools": self.repository_list} with open(self.output_file, "w") as output: output.write(yaml.safe_dump(tool_dict, default_flow_style=False)) def _parse_cli_options(): """ Parse command line options, returning `parse_args` from `ArgumentParser`. """ parser = ArgumentParser(usage="usage: python %(prog)s <options>", epilog='Example usage: python get_tool_yml_from_gi.py ' '-g https://usegalaxy.org/ -o tool_list.yml', formatter_class=ArgumentDefaultsHelpFormatter) parser.add_argument("-g", "--galaxy", dest="galaxy_url", required=True, help="Target Galaxy instance URL/IP address (required)") parser.add_argument("-o", "--output-file", required=True, dest="output", help="tool_list.yml output file") parser.add_argument("-include_id", "--include_tool_panel_id", action="store_true", default=False, help="Include tool_panel_id in tool_list.yml ? " "Use this only if the tool panel id already exists. See " "https://github.com/galaxyproject/ansible-galaxy-tools/blob/master/files/tool_list.yaml.sample") parser.add_argument("-skip_name", "--skip_tool_panel_name", action="store_true", default=False, help="Do not include tool_panel_name in tool_list.yml ?") return parser.parse_args() if __name__ == "__main__": options = _parse_cli_options() GiToToolYaml(url=options.galaxy_url, output_file=options.output, include_tool_panel_section_id=options.include_tool_panel_id, skip_tool_panel_section_name=options.skip_tool_panel_name)
tarzasai/Flexget
refs/heads/develop
flexget/utils/archive.py
3
""" Utilities for handling RAR and ZIP archives Provides wrapper archive and exception classes to simplify archive extraction """ from __future__ import unicode_literals, division, absolute_import from builtins import * # pylint: disable=unused-import, redefined-builtin import zipfile import os import shutil import logging try: import rarfile except ImportError: rarfile = None log = logging.getLogger('archive') class ArchiveError(Exception): """Base exception for archive""" pass class NeedRarFile(ArchiveError): """Exception to be raised when rarfile module is missing""" pass class BadArchive(ArchiveError): """Wrapper exception for BadZipFile and BadRarFile""" pass class NeedFirstVolume(ArchiveError): """Wrapper exception for rarfile.NeedFirstVolume""" pass class PathError(ArchiveError): """Exception to be raised when an archive file doesn't exist""" pass class FSError(ArchiveError): """Exception to be raised on OS/IO exceptions""" pass def rarfile_set_tool_path(config): """ Manually set the path of unrar executable if it can't be resolved from the PATH environment variable """ unrar_tool = config['unrar_tool'] if unrar_tool: if not rarfile: log.error('rar_tool specified with no rarfile module installed.') else: rarfile.UNRAR_TOOL = unrar_tool log.debug('Set RarFile.unrar_tool to: %s', unrar_tool) def rarfile_set_path_sep(separator): """ Set the path separator on rarfile module """ if rarfile: rarfile.PATH_SEP = separator class Archive(object): """ Base archive class. Assumes an interface similar to zipfile.ZipFile or rarfile.RarFile """ def __init__(self, archive_object, path): self.path = path self.archive = archive_object(self.path) def close(self): """Release open resources.""" self.archive.close() def delete(self): """Delete the volumes that make up this archive""" volumes = self.volumes() self.close() try: for volume in volumes: os.remove(volume) log.verbose('Deleted archive: %s', volume) except (IOError, os.error) as error: raise FSError(error) def volumes(self): """Returns the list of volumes that comprise this archive""" return [self.path] def infolist(self): """Returns a list of info objects describing the contents of this archive""" return self.archive.infolist() def open(self, member): """Returns file-like object from where the data of a member file can be read.""" return self.archive.open(member) def extract_file(self, member, destination): """Extract a member file to the specified destination""" try: with self.open(member) as source: with open(destination, 'wb') as target: shutil.copyfileobj(source, target) except (IOError, os.error) as error: raise FSError(error) log.verbose('Extracted: %s', member) class RarArchive(Archive): """ Wrapper class for rarfile.RarFile """ def __init__(self, path): if not rarfile: raise NeedRarFile('Python module rarfile needed to handle RAR archives') try: super(RarArchive, self).__init__(rarfile.RarFile, path) except rarfile.BadRarFile as error: raise BadArchive(error) except rarfile.NeedFirstVolume as error: raise NeedFirstVolume(error) except rarfile.Error as error: raise ArchiveError(error) def volumes(self): """Returns the list of volumes that comprise this archive""" return self.archive.volumelist() def open(self, member): """Returns file-like object from where the data of a member file can be read.""" try: return super(RarArchive, self).open(member) except rarfile.Error as error: raise ArchiveError(error) class ZipArchive(Archive): """ Wrapper class for zipfile.ZipFile """ def __init__(self, path): try: super(ZipArchive, self).__init__(zipfile.ZipFile, path) except zipfile.BadZipfile as error: raise BadArchive(error) def open(self, member): """Returns file-like object from where the data of a member file can be read.""" try: return super(ZipArchive, self).open(member) except zipfile.BadZipfile as error: raise ArchiveError(error) def open_archive(archive_path): """ Returns the appropriate archive object """ archive = None if not os.path.exists(archive_path): raise PathError('Path doesn\'t exist') if zipfile.is_zipfile(archive_path): archive = ZipArchive(archive_path) log.debug('Successfully opened ZIP: %s', archive_path) elif rarfile and rarfile.is_rarfile(archive_path): archive = RarArchive(archive_path) log.debug('Successfully opened RAR: %s', archive_path) else: if not rarfile: log.warning('Rarfile module not installed; unable to handle RAR archives.') return archive def is_archive(path): """ Attempts to open an entry as an archive; returns True on success, False on failure. """ archive = None try: archive = open_archive(path) if archive: archive.close() return True except ArchiveError as error: error_message = 'Failed to open file as archive: %s (%s)' % (path, error) log.debug(error_message) return False
jlspyaozhongkai/Uter
refs/heads/master
third_party_backup/Python-2.7.9/Lib/plat-mac/lib-scriptpackages/Explorer/Microsoft_Internet_Explorer.py
82
"""Suite Microsoft Internet Explorer Suite: Events defined by Internet Explorer Level 1, version 1 Generated from /Applications/Internet Explorer.app AETE/AEUT resource version 1/0, language 0, script 0 """ import aetools import MacOS _code = 'MSIE' class Microsoft_Internet_Explorer_Events: def GetSource(self, _object=None, _attributes={}, **_arguments): """GetSource: Get the HTML source of a browser window Required argument: Window Identifier of window from which to get the source. No value means get the source from the frontmost window. Keyword argument _attributes: AppleEvent attribute dictionary Returns: undocumented, typecode 'TEXT' """ _code = 'MSIE' _subcode = 'SORC' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def PrintBrowserWindow(self, _object=None, _attributes={}, **_arguments): """PrintBrowserWindow: Print contents of browser window (HTML) Required argument: Window Identifier of the window to print. No value means print the frontmost browser window. Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'misc' _subcode = 'pWND' if _arguments: raise TypeError, 'No optional args expected' _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] _argmap_do_script = { 'window' : 'WIND', } def do_script(self, _object, _attributes={}, **_arguments): """do script: Execute script commands Required argument: JavaScript text to execute Keyword argument window: optional Window Identifier (as supplied by the ListWindows event) specifying context in which to execute the script Keyword argument _attributes: AppleEvent attribute dictionary Returns: Return value """ _code = 'misc' _subcode = 'dosc' aetools.keysubst(_arguments, self._argmap_do_script) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] # # Indices of types declared in this module # _classdeclarations = { } _propdeclarations = { } _compdeclarations = { } _enumdeclarations = { }
westinedu/wrgroups
refs/heads/master
django/conf/locale/bn/formats.py
433
# -*- encoding: utf-8 -*- # This file is distributed under the same license as the Django package. # # The *_FORMAT strings use the Django date format syntax, # see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date DATE_FORMAT = 'j F, Y' TIME_FORMAT = 'g:i:s A' # DATETIME_FORMAT = YEAR_MONTH_FORMAT = 'F Y' MONTH_DAY_FORMAT = 'j F' SHORT_DATE_FORMAT = 'j M, Y' # SHORT_DATETIME_FORMAT = # FIRST_DAY_OF_WEEK = # The *_INPUT_FORMATS strings use the Python strftime format syntax, # see http://docs.python.org/library/datetime.html#strftime-strptime-behavior # DATE_INPUT_FORMATS = # TIME_INPUT_FORMATS = # DATETIME_INPUT_FORMATS = DECIMAL_SEPARATOR = '.' THOUSAND_SEPARATOR = ',' # NUMBER_GROUPING =
Pluto-tv/chromium-crosswalk
refs/heads/master
tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/pass_through.py
31
# Copyright 2015 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page_module from telemetry import story class PassThroughPage(page_module.Page): """ A test page for the chrome proxy pass-through tests. """ def __init__(self, url, page_set): super(PassThroughPage, self).__init__(url=url, page_set=page_set) def RunNavigateSteps(self, action_runner): super(PassThroughPage, self).RunNavigateSteps(action_runner) action_runner.ExecuteJavaScript(''' (function() { var request = new XMLHttpRequest(); request.open("GET", "%s"); request.setRequestHeader("Chrome-Proxy", "pass-through"); request.send(null); })();''' % (self.url)) action_runner.Wait(1) class PassThroughStorySet(story.StorySet): """ Chrome proxy test sites """ def __init__(self): super(PassThroughStorySet, self).__init__() urls_list = [ 'http://check.googlezip.net/image.png', ] for url in urls_list: self.AddStory(PassThroughPage(url, self))
1mgOfficial/vyked
refs/heads/master
vyked/config.py
1
from .utils.common_utils import json_file_to_dict, valid_timeout class CONFIG: config = json_file_to_dict('./config.json') HTTP_TIMEOUT = config['HTTP_TIMEOUT'] if isinstance(config, dict) and 'HTTP_TIMEOUT' in config and valid_timeout(config['HTTP_TIMEOUT']) else 60 TCP_TIMEOUT = config['TCP_TIMEOUT'] if isinstance(config, dict) and 'TCP_TIMEOUT' in config and valid_timeout(config['TCP_TIMEOUT']) else 60 HTTP_KEEP_ALIVE_TIMEOUT = config['HTTP_KEEP_ALIVE_TIMEOUT'] if isinstance(config, dict) and 'HTTP_KEEP_ALIVE_TIMEOUT' in config and valid_timeout(config['HTTP_KEEP_ALIVE_TIMEOUT']) else 15 INTERNAL_HTTP_PREFIX = '/__onemg-internal__' SLOW_API_THRESHOLD = config['SLOW_API_THRESHOLD'] if isinstance(config, dict) and 'SLOW_API_THRESHOLD' in config and valid_timeout(config['SLOW_API_THRESHOLD']) else 1
wainersm/buildbot
refs/heads/master
worker/buildbot_worker/compat.py
9
# coding=utf-8 # This file is part of Buildbot. Buildbot is free software: you can # redistribute it and/or modify it under the terms of the GNU General Public # License as published by the Free Software Foundation, version 2. # # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more # details. # # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51 # Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # Copyright Buildbot Team Members """ Helpers for handling compatibility differences between Python 2 and Python 3. """ from __future__ import absolute_import from __future__ import print_function from future.utils import text_type if str != bytes: # On Python 3 and higher, str and bytes # are not equivalent. We must use StringIO for # doing io on native strings. from io import StringIO as NativeStringIO else: # On Python 2 and older, str and bytes # are equivalent. We must use BytesIO for # doing io on native strings. from io import BytesIO as NativeStringIO def bytes2NativeString(x, encoding='utf-8'): """ Convert C{bytes} to a native C{str}. On Python 3 and higher, str and bytes are not equivalent. In this case, decode the bytes, and return a native string. On Python 2 and lower, str and bytes are equivalent. In this case, just just return the native string. @param x: a string of type C{bytes} @param encoding: an optional codec, default: 'utf-8' @return: a string of type C{str} """ if isinstance(x, bytes) and str != bytes: return x.decode(encoding) return x def unicode2bytes(x, encoding='utf-8'): """ Convert a unicode string to C{bytes}. @param x: a unicode string, of type C{unicode} on Python 2, or C{str} on Python 3. @param encoding: an optional codec, default: 'utf-8' @return: a string of type C{bytes} """ if isinstance(x, text_type): x = x.encode(encoding) return x __all__ = [ "NativeStringIO", "bytes2NativeString", "unicode2bytes" ]
BaladiDogGames/baladidoggames.github.io
refs/heads/master
mingw/bin/lib/lib2to3/fixes/fix_filter.py
326
# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Fixer that changes filter(F, X) into list(filter(F, X)). We avoid the transformation if the filter() call is directly contained in iter(<>), list(<>), tuple(<>), sorted(<>), ...join(<>), or for V in <>:. NOTE: This is still not correct if the original code was depending on filter(F, X) to return a string if X is a string and a tuple if X is a tuple. That would require type inference, which we don't do. Let Python 2.6 figure it out. """ # Local imports from ..pgen2 import token from .. import fixer_base from ..fixer_util import Name, Call, ListComp, in_special_context class FixFilter(fixer_base.ConditionalFix): BM_compatible = True PATTERN = """ filter_lambda=power< 'filter' trailer< '(' arglist< lambdef< 'lambda' (fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any > ',' it=any > ')' > > | power< 'filter' trailer< '(' arglist< none='None' ',' seq=any > ')' > > | power< 'filter' args=trailer< '(' [any] ')' > > """ skip_on = "future_builtins.filter" def transform(self, node, results): if self.should_skip(node): return if "filter_lambda" in results: new = ListComp(results.get("fp").clone(), results.get("fp").clone(), results.get("it").clone(), results.get("xp").clone()) elif "none" in results: new = ListComp(Name(u"_f"), Name(u"_f"), results["seq"].clone(), Name(u"_f")) else: if in_special_context(node): return None new = node.clone() new.prefix = u"" new = Call(Name(u"list"), [new]) new.prefix = node.prefix return new
endlessm/systemd
refs/heads/master
hwdb.d/parse_hwdb.py
1
#!/usr/bin/env python3 # SPDX-License-Identifier: MIT # # This file is distributed under the MIT license, see below. # # The MIT License (MIT) # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import glob import string import sys import os try: from pyparsing import (Word, White, Literal, ParserElement, Regex, LineEnd, OneOrMore, Combine, Or, Optional, Suppress, Group, nums, alphanums, printables, stringEnd, pythonStyleComment, ParseBaseException) except ImportError: print('pyparsing is not available') sys.exit(77) try: from evdev.ecodes import ecodes except ImportError: ecodes = None print('WARNING: evdev is not available') try: from functools import lru_cache except ImportError: # don't do caching on old python lru_cache = lambda: (lambda f: f) EOL = LineEnd().suppress() EMPTYLINE = LineEnd() COMMENTLINE = pythonStyleComment + EOL INTEGER = Word(nums) REAL = Combine((INTEGER + Optional('.' + Optional(INTEGER))) ^ ('.' + INTEGER)) SIGNED_REAL = Combine(Optional(Word('-+')) + REAL) UDEV_TAG = Word(string.ascii_uppercase, alphanums + '_') # Those patterns are used in type-specific matches TYPES = {'mouse': ('usb', 'bluetooth', 'ps2', '*'), 'evdev': ('name', 'atkbd', 'input'), 'id-input': ('modalias'), 'touchpad': ('i8042', 'rmi', 'bluetooth', 'usb'), 'joystick': ('i8042', 'rmi', 'bluetooth', 'usb'), 'keyboard': ('name', ), 'sensor': ('modalias', ), } # Patterns that are used to set general properties on a device GENERAL_MATCHES = {'acpi', 'bluetooth', 'usb', 'pci', 'sdio', 'vmbus', 'OUI', } def upperhex_word(length): return Word(nums + 'ABCDEF', exact=length) @lru_cache() def hwdb_grammar(): ParserElement.setDefaultWhitespaceChars('') prefix = Or(category + ':' + Or(conn) + ':' for category, conn in TYPES.items()) matchline_typed = Combine(prefix + Word(printables + ' ' + '®')) matchline_general = Combine(Or(GENERAL_MATCHES) + ':' + Word(printables + ' ' + '®')) matchline = (matchline_typed | matchline_general) + EOL propertyline = (White(' ', exact=1).suppress() + Combine(UDEV_TAG - '=' - Optional(Word(alphanums + '_=:@*.!-;, "')) - Optional(pythonStyleComment)) + EOL) propertycomment = White(' ', exact=1) + pythonStyleComment + EOL group = (OneOrMore(matchline('MATCHES*') ^ COMMENTLINE.suppress()) - OneOrMore(propertyline('PROPERTIES*') ^ propertycomment.suppress()) - (EMPTYLINE ^ stringEnd()).suppress()) commentgroup = OneOrMore(COMMENTLINE).suppress() - EMPTYLINE.suppress() grammar = OneOrMore(Group(group)('GROUPS*') ^ commentgroup) + stringEnd() return grammar @lru_cache() def property_grammar(): ParserElement.setDefaultWhitespaceChars(' ') dpi_setting = (Optional('*')('DEFAULT') + INTEGER('DPI') + Suppress('@') + INTEGER('HZ'))('SETTINGS*') mount_matrix_row = SIGNED_REAL + ',' + SIGNED_REAL + ',' + SIGNED_REAL mount_matrix = (mount_matrix_row + ';' + mount_matrix_row + ';' + mount_matrix_row)('MOUNT_MATRIX') xkb_setting = Optional(Word(alphanums + '+-/@._')) props = (('MOUSE_DPI', Group(OneOrMore(dpi_setting))), ('MOUSE_WHEEL_CLICK_ANGLE', INTEGER), ('MOUSE_WHEEL_CLICK_ANGLE_HORIZONTAL', INTEGER), ('MOUSE_WHEEL_CLICK_COUNT', INTEGER), ('MOUSE_WHEEL_CLICK_COUNT_HORIZONTAL', INTEGER), ('ID_AUTOSUSPEND', Literal('1')), ('ID_INPUT', Literal('1')), ('ID_INPUT_ACCELEROMETER', Literal('1')), ('ID_INPUT_JOYSTICK', Literal('1')), ('ID_INPUT_KEY', Literal('1')), ('ID_INPUT_KEYBOARD', Literal('1')), ('ID_INPUT_MOUSE', Literal('1')), ('ID_INPUT_POINTINGSTICK', Literal('1')), ('ID_INPUT_SWITCH', Literal('1')), ('ID_INPUT_TABLET', Literal('1')), ('ID_INPUT_TABLET_PAD', Literal('1')), ('ID_INPUT_TOUCHPAD', Literal('1')), ('ID_INPUT_TOUCHSCREEN', Literal('1')), ('ID_INPUT_TRACKBALL', Literal('1')), ('POINTINGSTICK_SENSITIVITY', INTEGER), ('POINTINGSTICK_CONST_ACCEL', REAL), ('ID_INPUT_JOYSTICK_INTEGRATION', Or(('internal', 'external'))), ('ID_INPUT_TOUCHPAD_INTEGRATION', Or(('internal', 'external'))), ('XKB_FIXED_LAYOUT', xkb_setting), ('XKB_FIXED_VARIANT', xkb_setting), ('XKB_FIXED_MODEL', xkb_setting), ('KEYBOARD_LED_NUMLOCK', Literal('0')), ('KEYBOARD_LED_CAPSLOCK', Literal('0')), ('ACCEL_MOUNT_MATRIX', mount_matrix), ('ACCEL_LOCATION', Or(('display', 'base'))), ('PROXIMITY_NEAR_LEVEL', INTEGER), ) fixed_props = [Literal(name)('NAME') - Suppress('=') - val('VALUE') for name, val in props] kbd_props = [Regex(r'KEYBOARD_KEY_[0-9a-f]+')('NAME') - Suppress('=') - ('!' ^ (Optional('!') - Word(alphanums + '_')))('VALUE') ] abs_props = [Regex(r'EVDEV_ABS_[0-9a-f]{2}')('NAME') - Suppress('=') - Word(nums + ':')('VALUE') ] grammar = Or(fixed_props + kbd_props + abs_props) + EOL return grammar ERROR = False def error(fmt, *args, **kwargs): global ERROR ERROR = True print(fmt.format(*args, **kwargs)) def convert_properties(group): matches = [m[0] for m in group.MATCHES] props = [p[0] for p in group.PROPERTIES] return matches, props def parse(fname): grammar = hwdb_grammar() try: with open(fname, 'r', encoding='UTF-8') as f: parsed = grammar.parseFile(f) except ParseBaseException as e: error('Cannot parse {}: {}', fname, e) return [] return [convert_properties(g) for g in parsed.GROUPS] def check_matches(groups): matches = sum((group[0] for group in groups), []) # This is a partial check. The other cases could be also done, but those # two are most commonly wrong. grammars = { 'usb' : 'v' + upperhex_word(4) + Optional('p' + upperhex_word(4)), 'pci' : 'v' + upperhex_word(8) + Optional('d' + upperhex_word(8)), } for match in matches: prefix, rest = match.split(':', maxsplit=1) gr = grammars.get(prefix) if gr: try: gr.parseString(rest) except ParseBaseException as e: error('Pattern {!r} is invalid: {}', rest, e) continue if rest[-1] not in '*:': error('pattern {} does not end with "*" or ":"', match) matches.sort() prev = None for match in matches: if match == prev: error('Match {!r} is duplicated', match) prev = match def check_one_default(prop, settings): defaults = [s for s in settings if s.DEFAULT] if len(defaults) > 1: error('More than one star entry: {!r}', prop) def check_one_mount_matrix(prop, value): numbers = [s for s in value if s not in {';', ','}] if len(numbers) != 9: error('Wrong accel matrix: {!r}', prop) try: numbers = [abs(float(number)) for number in numbers] except ValueError: error('Wrong accel matrix: {!r}', prop) bad_x, bad_y, bad_z = max(numbers[0:3]) == 0, max(numbers[3:6]) == 0, max(numbers[6:9]) == 0 if bad_x or bad_y or bad_z: error('Mount matrix is all zero in {} row: {!r}', 'x' if bad_x else ('y' if bad_y else 'z'), prop) def check_one_keycode(prop, value): if value != '!' and ecodes is not None: key = 'KEY_' + value.upper() if not (key in ecodes or value.upper() in ecodes or # new keys added in kernel 5.5 'KBD_LCD_MENU' in key): error('Keycode {} unknown', key) def check_properties(groups): grammar = property_grammar() for matches, props in groups: prop_names = set() for prop in props: # print('--', prop) prop = prop.partition('#')[0].rstrip() try: parsed = grammar.parseString(prop) except ParseBaseException as e: error('Failed to parse: {!r}', prop) continue # print('{!r}'.format(parsed)) if parsed.NAME in prop_names: error('Property {} is duplicated', parsed.NAME) prop_names.add(parsed.NAME) if parsed.NAME == 'MOUSE_DPI': check_one_default(prop, parsed.VALUE.SETTINGS) elif parsed.NAME == 'ACCEL_MOUNT_MATRIX': check_one_mount_matrix(prop, parsed.VALUE) elif parsed.NAME.startswith('KEYBOARD_KEY_'): val = parsed.VALUE if isinstance(parsed.VALUE, str) else parsed.VALUE[0] check_one_keycode(prop, val) def print_summary(fname, groups): n_matches = sum(len(matches) for matches, props in groups) n_props = sum(len(props) for matches, props in groups) print('{}: {} match groups, {} matches, {} properties' .format(fname, len(groups), n_matches, n_props)) if n_matches == 0 or n_props == 0: error('{}: no matches or props'.format(fname)) if __name__ == '__main__': args = sys.argv[1:] or sorted(glob.glob(os.path.dirname(sys.argv[0]) + '/[67][0-9]-*.hwdb')) for fname in args: groups = parse(fname) print_summary(fname, groups) check_matches(groups) check_properties(groups) sys.exit(ERROR)
jhaux/tensorflow
refs/heads/master
tensorflow/python/ops/metrics.py
72
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Evaluation-related metrics. @@accuracy @@auc @@false_negatives @@false_positives @@mean @@mean_absolute_error @@mean_cosine_distance @@mean_iou @@mean_per_class_accuracy @@mean_relative_error @@mean_squared_error @@mean_tensor @@percentage_below @@precision @@precision_at_thresholds @@recall @@recall_at_k @@recall_at_thresholds @@root_mean_squared_error @@sensitivity_at_specificity @@sparse_average_precision_at_k @@sparse_precision_at_k @@specificity_at_sensitivity @@true_negatives @@true_positives """ from __future__ import absolute_import from __future__ import division from __future__ import print_function # go/tf-wildcard-import # pylint: disable=wildcard-import from tensorflow.python.ops.metrics_impl import * # pylint: enable=wildcard-import from tensorflow.python.util.all_util import remove_undocumented _allowed_symbols = [] remove_undocumented(__name__, _allowed_symbols)
mfcabrera/luigi
refs/heads/master
luigi/scalding.py
38
# -*- coding: utf-8 -*- # # Copyright 2012-2015 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ luigi.scalding has moved to luigi.contrib.scalding """ import warnings from luigi.contrib.scalding import * # NOQA warnings.warn("luigi.scalding has now moved to luigi.contrib.scalding", DeprecationWarning, stacklevel=3)
AlexOugh/horizon
refs/heads/master
openstack_dashboard/dashboards/project/images/snapshots/forms.py
81
# Copyright 2012 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Copyright 2012 Nebula, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from django.core.urlresolvers import reverse from django.utils.translation import ugettext_lazy as _ from horizon import exceptions from horizon import forms from horizon import messages from openstack_dashboard import api class CreateSnapshot(forms.SelfHandlingForm): instance_id = forms.CharField(label=_("Instance ID"), widget=forms.HiddenInput(), required=False) name = forms.CharField(max_length=255, label=_("Snapshot Name")) def handle(self, request, data): try: snapshot = api.nova.snapshot_create(request, data['instance_id'], data['name']) # NOTE(gabriel): This API call is only to display a pretty name. instance = api.nova.server_get(request, data['instance_id']) vals = {"name": data['name'], "inst": instance.name} messages.success(request, _('Snapshot "%(name)s" created for ' 'instance "%(inst)s"') % vals) return snapshot except Exception: redirect = reverse("horizon:project:instances:index") exceptions.handle(request, _('Unable to create snapshot.'), redirect=redirect)
GunnerJnr/_CodeInstitute
refs/heads/master
Stream-3/Full-Stack-Development/2.Hello-Django-Templates/3.Template-Inheritance/hello_django/HelloWorld_prj/HelloWorld_app/tests.py
873
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.test import TestCase # Create your tests here.
stack-of-tasks/rbdlpy
refs/heads/master
tutorial/lib/python2.7/site-packages/OpenGL/raw/GLES1/OES/vertex_array_object.py
8
'''Autogenerated by xml_generate script, do not edit!''' from OpenGL import platform as _p, arrays # Code generation uses this from OpenGL.raw.GLES1 import _types as _cs # End users want this... from OpenGL.raw.GLES1._types import * from OpenGL.raw.GLES1 import _errors from OpenGL.constant import Constant as _C import ctypes _EXTENSION_NAME = 'GLES1_OES_vertex_array_object' def _f( function ): return _p.createFunction( function,_p.PLATFORM.GLES1,'GLES1_OES_vertex_array_object',error_checker=_errors._error_checker) GL_VERTEX_ARRAY_BINDING_OES=_C('GL_VERTEX_ARRAY_BINDING_OES',0x85B5) @_f @_p.types(None,_cs.GLuint) def glBindVertexArrayOES(array):pass @_f @_p.types(None,_cs.GLsizei,arrays.GLuintArray) def glDeleteVertexArraysOES(n,arrays):pass @_f @_p.types(None,_cs.GLsizei,arrays.GLuintArray) def glGenVertexArraysOES(n,arrays):pass @_f @_p.types(_cs.GLboolean,_cs.GLuint) def glIsVertexArrayOES(array):pass
jennyzhang0215/incubator-mxnet
refs/heads/master
python/mxnet/ndarray_doc.py
43
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # coding: utf-8 # pylint: disable=unused-argument, too-many-arguments """Extra symbol documents""" from __future__ import absolute_import as _abs import re as _re from .base import build_param_doc as _build_param_doc class NDArrayDoc(object): """The basic class""" pass class ReshapeDoc(NDArrayDoc): """ Examples -------- Reshapes the input array into a new shape. >>> x = mx.nd.array([1, 2, 3, 4]) >>> y = mx.nd.reshape(x, shape=(2, 2)) >>> x.shape (4L,) >>> y.shape (2L, 2L) >>> y.asnumpy() array([[ 1., 2.], [ 3., 4.]], dtype=float32) You can use ``0`` to copy a particular dimension from the input to the output shape and '-1' to infer the dimensions of the output. >>> x = mx.nd.ones((2, 3, 4)) >>> x.shape (2L, 3L, 4L) >>> y = mx.nd.reshape(x, shape=(4, 0, -1)) >>> y.shape (4L, 3L, 2L) """ class elemwise_addDoc(NDArrayDoc): """ Example ------- >>> x = mx.nd.array([1, 2, 3, 4]) >>> y = mx.nd.array([1.1, 2.1, 3.1, 4.1]) >>> mx.nd.elemwise_add(x, y).asnumpy() array([ 2.0999999 , 4.0999999 , 6.0999999 , 8.10000038], dtype=float32) """ class BroadcastToDoc(NDArrayDoc): """ Examples -------- Broadcasts the input array into a new shape. >>> a = mx.nd.array(np.arange(6).reshape(6,1)) >>> b = a.broadcast_to((6,2)) >>> a.shape (6L, 1L) >>> b.shape (6L, 2L) >>> b.asnumpy() array([[ 0., 0.], [ 1., 1.], [ 2., 2.], [ 3., 3.], [ 4., 4.], [ 5., 5.]], dtype=float32) Broadcasts along axes 1 and 2. >>> c = a.reshape((2,1,1,3)) >>> d = c.broadcast_to((2,2,2,3)) >>> d.asnumpy() array([[[[ 0., 1., 2.], [ 0., 1., 2.]], [[ 0., 1., 2.], [ 0., 1., 2.]]], [[[ 3., 4., 5.], [ 3., 4., 5.]], [[ 3., 4., 5.], [ 3., 4., 5.]]]], dtype=float32) >>> c.shape (2L, 1L, 1L, 3L) >>> d.shape (2L, 2L, 2L, 3L) """ class CustomDoc(NDArrayDoc): """ Example ------- Applies a custom operator named `my_custom_operator` to `input`. >>> output = mx.symbol.Custom(op_type='my_custom_operator', data=input) """ def _build_doc(func_name, desc, arg_names, arg_types, arg_desc, key_var_num_args=None, ret_type=None): """Build docstring for imperative functions.""" param_str = _build_param_doc(arg_names, arg_types, arg_desc) # if key_var_num_args: # desc += '\nThis function support variable length of positional input.' doc_str = ('%s\n\n' + '%s\n' + 'out : NDArray, optional\n' + ' The output NDArray to hold the result.\n\n'+ 'Returns\n' + '-------\n' + 'out : NDArray or list of NDArrays\n' + ' The output of this function.') doc_str = doc_str % (desc, param_str) extra_doc = "\n" + '\n'.join([x.__doc__ for x in type.__subclasses__(NDArrayDoc) if x.__name__ == '%sDoc' % func_name]) doc_str += _re.sub(_re.compile(" "), "", extra_doc) doc_str = _re.sub('NDArray-or-Symbol', 'NDArray', doc_str) return doc_str
minhphung171093/GreenERP_V8
refs/heads/master
openerp/addons/crm_partner_assign/crm_partner_assign.py
89
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import random from openerp.addons.base_geolocalize.models.res_partner import geo_find, geo_query_address from openerp.osv import osv from openerp.osv import fields class res_partner_grade(osv.osv): _order = 'sequence' _name = 'res.partner.grade' _columns = { 'sequence': fields.integer('Sequence'), 'active': fields.boolean('Active'), 'name': fields.char('Grade Name'), 'partner_weight': fields.integer('Grade Weight', help="Gives the probability to assign a lead to this partner. (0 means no assignation.)"), } _defaults = { 'active': lambda *args: 1, 'partner_weight':1 } class res_partner_activation(osv.osv): _name = 'res.partner.activation' _order = 'sequence' _columns = { 'sequence' : fields.integer('Sequence'), 'name' : fields.char('Name', required=True), } class res_partner(osv.osv): _inherit = "res.partner" _columns = { 'partner_weight': fields.integer('Grade Weight', help="Gives the probability to assign a lead to this partner. (0 means no assignation.)"), 'opportunity_assigned_ids': fields.one2many('crm.lead', 'partner_assigned_id',\ 'Assigned Opportunities'), 'grade_id': fields.many2one('res.partner.grade', 'Grade'), 'activation' : fields.many2one('res.partner.activation', 'Activation', select=1), 'date_partnership' : fields.date('Partnership Date'), 'date_review' : fields.date('Latest Partner Review'), 'date_review_next' : fields.date('Next Partner Review'), # customer implementation 'assigned_partner_id': fields.many2one( 'res.partner', 'Implemented by', ), 'implemented_partner_ids': fields.one2many( 'res.partner', 'assigned_partner_id', string='Implementation References', ), } _defaults = { 'partner_weight': lambda *args: 0 } def onchange_grade_id(self, cr, uid, ids, grade_id, context=None): res = {'value' :{'partner_weight':0}} if grade_id: partner_grade = self.pool.get('res.partner.grade').browse(cr, uid, grade_id) res['value']['partner_weight'] = partner_grade.partner_weight return res class crm_lead(osv.osv): _inherit = "crm.lead" _columns = { 'partner_latitude': fields.float('Geo Latitude', digits=(16, 5)), 'partner_longitude': fields.float('Geo Longitude', digits=(16, 5)), 'partner_assigned_id': fields.many2one('res.partner', 'Assigned Partner',track_visibility='onchange' , help="Partner this case has been forwarded/assigned to.", select=True), 'date_assign': fields.date('Assignation Date', help="Last date this case was forwarded/assigned to a partner"), } def _merge_data(self, cr, uid, ids, oldest, fields, context=None): fields += ['partner_latitude', 'partner_longitude', 'partner_assigned_id', 'date_assign'] return super(crm_lead, self)._merge_data(cr, uid, ids, oldest, fields, context=context) def onchange_assign_id(self, cr, uid, ids, partner_assigned_id, context=None): """This function updates the "assignation date" automatically, when manually assign a partner in the geo assign tab """ if not partner_assigned_id: return {'value':{'date_assign': False}} else: partners = self.pool.get('res.partner').browse(cr, uid, [partner_assigned_id], context=context) user_id = partners[0] and partners[0].user_id.id or False return {'value': {'date_assign': fields.date.context_today(self,cr,uid,context=context), 'user_id' : user_id} } def action_assign_partner(self, cr, uid, ids, context=None): return self.assign_partner(cr, uid, ids, partner_id=False, context=context) def assign_partner(self, cr, uid, ids, partner_id=False, context=None): partner_ids = {} res = False res_partner = self.pool.get('res.partner') if not partner_id: partner_ids = self.search_geo_partner(cr, uid, ids, context=context) for lead in self.browse(cr, uid, ids, context=context): if not partner_id: partner_id = partner_ids.get(lead.id, False) if not partner_id: continue self.assign_geo_localize(cr, uid, [lead.id], lead.partner_latitude, lead.partner_longitude, context=context) partner = res_partner.browse(cr, uid, partner_id, context=context) if partner.user_id: salesteam_id = partner.section_id and partner.section_id.id or False for lead_id in ids: self.allocate_salesman(cr, uid, [lead_id], [partner.user_id.id], team_id=salesteam_id, context=context) self.write(cr, uid, [lead.id], {'date_assign': fields.date.context_today(self,cr,uid,context=context), 'partner_assigned_id': partner_id}, context=context) return res def assign_geo_localize(self, cr, uid, ids, latitude=False, longitude=False, context=None): if latitude and longitude: self.write(cr, uid, ids, { 'partner_latitude': latitude, 'partner_longitude': longitude }, context=context) return True # Don't pass context to browse()! We need country name in english below for lead in self.browse(cr, uid, ids): if lead.partner_latitude and lead.partner_longitude: continue if lead.country_id: result = geo_find(geo_query_address(street=lead.street, zip=lead.zip, city=lead.city, state=lead.state_id.name, country=lead.country_id.name)) if result: self.write(cr, uid, [lead.id], { 'partner_latitude': result[0], 'partner_longitude': result[1] }, context=context) return True def search_geo_partner(self, cr, uid, ids, context=None): res_partner = self.pool.get('res.partner') res_partner_ids = {} self.assign_geo_localize(cr, uid, ids, context=context) for lead in self.browse(cr, uid, ids, context=context): partner_ids = [] if not lead.country_id: continue latitude = lead.partner_latitude longitude = lead.partner_longitude if latitude and longitude: # 1. first way: in the same country, small area partner_ids = res_partner.search(cr, uid, [ ('partner_weight', '>', 0), ('partner_latitude', '>', latitude - 2), ('partner_latitude', '<', latitude + 2), ('partner_longitude', '>', longitude - 1.5), ('partner_longitude', '<', longitude + 1.5), ('country_id', '=', lead.country_id.id), ], context=context) # 2. second way: in the same country, big area if not partner_ids: partner_ids = res_partner.search(cr, uid, [ ('partner_weight', '>', 0), ('partner_latitude', '>', latitude - 4), ('partner_latitude', '<', latitude + 4), ('partner_longitude', '>', longitude - 3), ('partner_longitude', '<' , longitude + 3), ('country_id', '=', lead.country_id.id), ], context=context) # 3. third way: in the same country, extra large area if not partner_ids: partner_ids = res_partner.search(cr, uid, [ ('partner_weight','>', 0), ('partner_latitude','>', latitude - 8), ('partner_latitude','<', latitude + 8), ('partner_longitude','>', longitude - 8), ('partner_longitude','<', longitude + 8), ('country_id', '=', lead.country_id.id), ], context=context) # 5. fifth way: anywhere in same country if not partner_ids: # still haven't found any, let's take all partners in the country! partner_ids = res_partner.search(cr, uid, [ ('partner_weight', '>', 0), ('country_id', '=', lead.country_id.id), ], context=context) # 6. sixth way: closest partner whatsoever, just to have at least one result if not partner_ids: # warning: point() type takes (longitude, latitude) as parameters in this order! cr.execute("""SELECT id, distance FROM (select id, (point(partner_longitude, partner_latitude) <-> point(%s,%s)) AS distance FROM res_partner WHERE partner_longitude is not null AND partner_latitude is not null AND partner_weight > 0) AS d ORDER BY distance LIMIT 1""", (longitude, latitude)) res = cr.dictfetchone() if res: partner_ids.append(res['id']) total_weight = 0 toassign = [] for partner in res_partner.browse(cr, uid, partner_ids, context=context): total_weight += partner.partner_weight toassign.append( (partner.id, total_weight) ) random.shuffle(toassign) # avoid always giving the leads to the first ones in db natural order! nearest_weight = random.randint(0, total_weight) for partner_id, weight in toassign: if nearest_weight <= weight: res_partner_ids[lead.id] = partner_id break return res_partner_ids # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
amishb/youtube-dl
refs/heads/master
youtube_dl/extractor/buzzfeed.py
133
# coding: utf-8 from __future__ import unicode_literals import json import re from .common import InfoExtractor class BuzzFeedIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?buzzfeed\.com/[^?#]*?/(?P<id>[^?#]+)' _TESTS = [{ 'url': 'http://www.buzzfeed.com/abagg/this-angry-ram-destroys-a-punching-bag-like-a-boss?utm_term=4ldqpia', 'info_dict': { 'id': 'this-angry-ram-destroys-a-punching-bag-like-a-boss', 'title': 'This Angry Ram Destroys A Punching Bag Like A Boss', 'description': 'Rambro!', }, 'playlist': [{ 'info_dict': { 'id': 'aVCR29aE_OQ', 'ext': 'mp4', 'upload_date': '20141024', 'uploader_id': 'Buddhanz1', 'description': 'He likes to stay in shape with his heavy bag, he wont stop until its on the ground\n\nFollow Angry Ram on Facebook for regular updates -\nhttps://www.facebook.com/pages/Angry-Ram/1436897249899558?ref=hl', 'uploader': 'Buddhanz', 'title': 'Angry Ram destroys a punching bag', } }] }, { 'url': 'http://www.buzzfeed.com/sheridanwatson/look-at-this-cute-dog-omg?utm_term=4ldqpia', 'params': { 'skip_download': True, # Got enough YouTube download tests }, 'info_dict': { 'id': 'look-at-this-cute-dog-omg', 'description': 're:Munchkin the Teddy Bear is back ?!', 'title': 'You Need To Stop What You\'re Doing And Watching This Dog Walk On A Treadmill', }, 'playlist': [{ 'info_dict': { 'id': 'mVmBL8B-In0', 'ext': 'mp4', 'upload_date': '20141124', 'uploader_id': 'CindysMunchkin', 'description': 're:© 2014 Munchkin the', 'uploader': 're:^Munchkin the', 'title': 're:Munchkin the Teddy Bear gets her exercise', }, }] }] def _real_extract(self, url): playlist_id = self._match_id(url) webpage = self._download_webpage(url, playlist_id) all_buckets = re.findall( r'(?s)<div class="video-embed[^"]*"..*?rel:bf_bucket_data=\'([^\']+)\'', webpage) entries = [] for bd_json in all_buckets: bd = json.loads(bd_json) video = bd.get('video') or bd.get('progload_video') if not video: continue entries.append(self.url_result(video['url'])) return { '_type': 'playlist', 'id': playlist_id, 'title': self._og_search_title(webpage), 'description': self._og_search_description(webpage), 'entries': entries, }
antiface/OpenCLNoise
refs/heads/master
raw_to_mc_schematic.py
2
#!/usr/bin/python from nbt import * import numpy from openclnoise.vec import vec # Our own local copy! :) import sys,os if len(sys.argv) < 3: print "Usage: raw_to_mc_schematic.py <input.raw> <output.schematic>" sys.exit(1) inf = sys.argv[1] if not os.path.exists(inf): print "Can't find",inf sys.exit(1) size = os.stat(inf).st_size print "Reading",inf with open(inf) as inp: dims = numpy.empty((3,),dtype=numpy.uint64) dims.data = inp.read(24) w,h,d = dims elmsize = (size - 24) / (w*h*d) datatype = vec.float4 if (elmsize == 4*4) else vec.uchar4 print "Found block {0} x {1} x {2}, size {3} bytes ({4}b / elm)".format(w,h,d,size,elmsize) data = numpy.empty((dims[0],dims[1],dims[2]),dtype=datatype) data.data = inp.read() print data comp = 0.50 if datatype == vec.float4: print "Data is in float4s, comparing to {0}".format(comp) else: comp = 255 * comp # dirtlower = 0.47 # dirtupper = 0.50 print "Data is in byte4s, comparing to {0}".format(comp) #print data w,h,d = dims narr = numpy.empty(w*h*d,dtype=numpy.uint8) for x in xrange(w): for y in xrange(h): for z in xrange(d): mcd = y + z * h + x * d * h point = (data[x,y,z])[0] if point > comp: narr[mcd] = 1 alpha = (data[x,y,z])[3] if abs(alpha - 128) <= 8: narr[mcd] = 3 else: narr[mcd] = 0 # Insert air s = TAG_Compound(name="Schematic") s["Width"] = TAG_Short(w) s["Length"] = TAG_Short(d) s["Height"] = TAG_Short(h) s["Materials"] = TAG_String("Alpha") s["Blocks"] = TAG_Byte_Array() s["Blocks"].value = narr #s["Blocks"].value.shape = (w,d,h) s["Data"] = TAG_Byte_Array() s["Data"].value = numpy.zeros(w*d*h, dtype=numpy.uint8) #s["Data"].value.shape = (w,d,h) s["Entities"] = TAG_List() s["TileEntities"] = TAG_List() # Put wood at the bottom :) #s["Blocks"].value[0, :, :] = 5 outf = sys.argv[2] print "Writing",outf s.save(outf)
mujiansu/arangodb
refs/heads/devel
3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/win32comext/shell/test/testShellFolder.py
37
from win32com.shell import shell from win32com.shell.shellcon import * sf = shell.SHGetDesktopFolder() print "Shell Folder is", sf names = [] for i in sf: # Magically calls EnumObjects name = sf.GetDisplayNameOf(i, SHGDN_NORMAL) names.append(name) # And get the enumerator manually enum = sf.EnumObjects(0, SHCONTF_FOLDERS | SHCONTF_NONFOLDERS | SHCONTF_INCLUDEHIDDEN) num = 0 for i in enum: num += 1 if num != len(names): print "Should have got the same number of names!?" print "Found", len(names), "items on the desktop" for name in names: print name
lsinfo/odoo
refs/heads/8.0
addons/website_mail_group/models/mail_group.py
321
# -*- coding: utf-8 -*- from openerp.osv import osv from openerp import tools from openerp.tools.translate import _ from openerp.tools.safe_eval import safe_eval as eval from openerp.addons.website.models.website import slug class MailGroup(osv.Model): _inherit = 'mail.group' def message_get_email_values(self, cr, uid, id, notif_mail=None, context=None): res = super(MailGroup, self).message_get_email_values(cr, uid, id, notif_mail=notif_mail, context=context) group = self.browse(cr, uid, id, context=context) base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url') headers = {} if res.get('headers'): try: headers = eval(res['headers']) except Exception: pass headers.update({ 'List-Archive': '<%s/groups/%s>' % (base_url, slug(group)), 'List-Subscribe': '<%s/groups>' % (base_url), 'List-Unsubscribe': '<%s/groups?unsubscribe>' % (base_url,), }) res['headers'] = repr(headers) return res class MailMail(osv.Model): _inherit = 'mail.mail' def send_get_mail_body(self, cr, uid, mail, partner=None, context=None): """ Short-circuit parent method for mail groups, replace the default footer with one appropriate for mailing-lists.""" if mail.model == 'mail.group' and mail.res_id: # no super() call on purpose, no private links that could be quoted! group = self.pool['mail.group'].browse(cr, uid, mail.res_id, context=context) base_url = self.pool['ir.config_parameter'].get_param(cr, uid, 'web.base.url') vals = { 'maillist': _('Mailing-List'), 'post_to': _('Post to'), 'unsub': _('Unsubscribe'), 'mailto': 'mailto:%s@%s' % (group.alias_name, group.alias_domain), 'group_url': '%s/groups/%s' % (base_url, slug(group)), 'unsub_url': '%s/groups?unsubscribe' % (base_url,), } footer = """_______________________________________________ %(maillist)s: %(group_url)s %(post_to)s: %(mailto)s %(unsub)s: %(unsub_url)s """ % vals body = tools.append_content_to_html(mail.body, footer, container_tag='div') return body else: return super(MailMail, self).send_get_mail_body(cr, uid, mail, partner=partner, context=context)
alexiskulash/ia-caucus-sentiment
refs/heads/master
src/prediction.py
1
from sklearn.ensemble import RandomForestClassifier from sklearn import model_selection as ms from gensim.models import Word2Vec from vectorizer import vectorize from cleaning import clean import pandas as pd import numpy as np def prediction(vectorized_data): headers = list(vectorized_data.columns.values) vector_headers = headers[5:] target_column = vectorized_data['Number of Votes'] predictor_columns = vectorized_data.drop('Number of Votes', 1) vector_columns = vectorized_data[vector_headers] vectorized_data.reindex(np.random.permutation(vectorized_data.index)) NUM_ROWS = vectorized_data.shape[0] NUM_TEST = int(NUM_ROWS*.15) train_data = vectorized_data[NUM_TEST:] train_target = train_data['Number of Votes'] train_data = train_data[vector_header] test_data = vectorized_data[:NUM_TEST] test_candidates = test_data['Candidate'] test_counties = test_data['County'] test_target = test_data['Number of Votes'] test_data = test_data[vector_header] classifier = RandomForestClassifier(n_estimators=10) classifier = classifier.fit(train_data[vector_headers], train_target) results = classifier.predict(test_data[vector_headers]) output = pd.DataFrame(data={"Candidate":test_candidates, "County":test_counties, "Estimated Votes":results, "Actual Votes":test_target}) return output cleaned_tweets = clean() vectorized_data = vectorize(cleaned_tweets) results = prediction(vectorized_data) print(results)
mattupstate/overholt
refs/heads/master
overholt/users/forms.py
9
# -*- coding: utf-8 -*- """ overholt.users.forms ~~~~~~~~~~~~~~~~~~~~ User forms """
hwmay/pordb3
refs/heads/master
pypordb_neu.py
1
# -*- coding: utf-8 -*- ''' Copyright 2012-2018 HWM This file is part of PorDB3. PorDB3 is free software: you can redistribute it and or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. PorDB3 is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Foobar. If not, see <http: www.gnu.org licenses >. ''' from PyQt5 import QtGui, QtCore, QtWidgets from pordb_neu import Ui_Dialog as pordb_neu from pypordb_dblesen import DBLesen from pypordb_dbupdate import DBUpdate from pypordb_bilddatei_umbenennen import BilddateiUmbenennen from pypordb_bildbeschneiden import Bildbeschneiden from pypordb_neueingabe_darsteller import NeueingabeDarsteller from pypordb_darsteller_korrigieren import DarstellerKorrigieren from pypordb_original import OriginalErfassen import os import datetime size = QtCore.QSize(260, 260) sizeneu = QtCore.QSize(300, 300) size_darsteller = QtCore.QSize(1920, 1080) videodateien = (".asf", ".avi", ".divx", ".f4v", ".m4v", ".mkv", ".mpg", ".mpeg", ".mp4", ".mov", ".wmv") class Neueingabe(QtWidgets.QDialog, pordb_neu): def __init__(self, verzeichnis, verzeichnis_original, verzeichnis_thumbs, verzeichnis_trash, verzeichnis_cover, bilddatei, titel=None, darsteller=None, cd=None, bild=None, gesehen=None, original=None, cs=None, vorhanden=None, remarks=None, stars=None, cover=None, undo=None, cover_anlegen=None, original_weitere=None, original_cover = None, high_definition = None, access_from_iafd = None): QtWidgets.QDialog.__init__(self) self.setupUi(self) self.bilddatei = bilddatei self.titel = titel self.darsteller = darsteller self.cd = cd self.bild = bild self.gesehen = gesehen self.original = original self.cs = cs self.vorhanden = vorhanden self.stars = stars self.remarks = remarks self.undo = undo self.cover = cover self.cover_anlegen = cover_anlegen self.cover_austauschen = 0 self.original_weitere = original_weitere self.verzeichnis = verzeichnis self.verzeichnis_original = verzeichnis_original self.verzeichnis_thumbs = verzeichnis_thumbs self.verzeichnis_trash = verzeichnis_trash self.verzeichnis_cover = verzeichnis_cover self.original_cover = original_cover self.high_definition = high_definition self.access_from_iafd = access_from_iafd self.icon_starred = QtGui.QIcon() self.icon_starred.addPixmap(QtGui.QPixmap("pypordb/starred.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.icon_nonstarred = QtGui.QIcon() self.icon_nonstarred.addPixmap(QtGui.QPixmap("pypordb/non-starred.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.set_stars = stars self.pushButtonNeuOK.clicked.connect(self.accept) self.pushButtonNeuCancel.clicked.connect(self.close) self.pushButtonNeuDelete.clicked.connect(self.onDelete) self.pushButtonOriginal.clicked.connect(self.onOriginal) self.pushButtonOriginalAlt.clicked.connect(self.onOriginalAlt) self.pushButtonRepeat.clicked.connect(self.onRepeat) self.pushButtonAddYear.clicked.connect(self.onAddYear) self.pushButtonStar1.clicked.connect(self.onStar1) self.pushButtonStar2.clicked.connect(self.onStar2) self.pushButtonStar3.clicked.connect(self.onStar3) self.pushButtonStar4.clicked.connect(self.onStar4) self.pushButtonStar5.clicked.connect(self.onStar5) self.pushButtonClearRating.clicked.connect(self.onClearRating) self.listWidgetW.itemDoubleClicked.connect(self.onDarstelleruebernehmen) self.listWidgetM.itemDoubleClicked.connect(self.onDarstelleruebernehmen) self.pushButtonNeuDarstelleruebernehmen.clicked.connect(self.onDarstelleruebernehmen) self.pushButtonBildloeschen.clicked.connect(self.onBildloeschen) self.pushButtonVerz.clicked.connect(self.onVerzeichnisWechseln) self.pushButtonBildbeschneiden.clicked.connect(self.onBildbeschneiden) self.pushButtonNeuOK.setDefault(True) settings = QtCore.QSettings() window_size = settings.value("Neueingabe/Size", QtCore.QSize(600, 500)) self.resize(window_size) window_position = settings.value("Neueingabe/Position", QtCore.QPoint(0, 0)) self.move(window_position) # populate combox for years today = datetime.date.today() self.comboBoxYear.clear() for i in range(today.year + 1, 1899, -1): self.comboBoxYear.addItem(str(i)) self.comboBoxYear.setCurrentIndex(1) # set default position for cropping images self.positionX = 0 self.positionY = 0 zu_lesen = "SELECT * FROM pordb_vid_neu" self.lese_func = DBLesen(self, zu_lesen) self.res_vid_neu = DBLesen.get_data(self.lese_func) if self.res_vid_neu[0][3]: self.labelOriginal.setText(self.res_vid_neu[0][3]) zu_lesen = "SELECT * FROM pordb_darsteller100 ORDER BY darsteller" self.lese_func = DBLesen(self, zu_lesen) res = DBLesen.get_data(self.lese_func) res.sort() res.reverse() darsteller_m = [] darsteller_w = [] for i in res: zu_lesen = "SELECT sex FROM pordb_darsteller WHERE darsteller = %s" self.lese_func = DBLesen(self, zu_lesen, tuple(i)[1].rstrip()) res2 = DBLesen.get_data(self.lese_func) try: if res2 [0][0] == "w": darsteller_w.append(tuple (i)[1].rstrip()) else: darsteller_m.append(tuple (i)[1].rstrip()) except: pass darsteller_w.sort() darsteller_m.sort() self.listWidgetM.clear() self.listWidgetW.clear() initial = ' ' for i in darsteller_w: newitem = QtWidgets.QListWidgetItem(i) if i[0] != initial: initial = i[0] newitem.setForeground(QtGui.QColor('red')) else: newitem.setForeground(QtGui.QColor('black')) self.listWidgetW.addItem(newitem) initial = ' ' for i in darsteller_m: newitem = QtWidgets.QListWidgetItem(i) if i[0] != initial: initial = i[0] newitem.setForeground(QtGui.QColor('red')) else: newitem.setForeground(QtGui.QColor('black')) self.listWidgetM.addItem(newitem) self.bilddarstellen() if self.titel: self.korrektur = True self.lineEditNeuTitel.setText(self.titel.strip()) self.lineEditNeuDarsteller.setText(self.darsteller.strip()) self.lineEditNeuCD.setText(str(self.cd)) self.cd_alt = str(self.cd) self.lineEditNeuBild.setText(self.bild.strip()) if self.gesehen == "x": self.radioButtonGesehenJa.setChecked(True) else: self.radioButtonGesehenNein.setChecked(True) self.lineEditNeuOriginal.setText(self.original.strip()) for i in cs: anzahl = i[0] if i[1] == "f": self.spinBoxF.setValue(int(anzahl)) elif i[1] == "h": self.spinBoxH.setValue(int(anzahl)) elif i[1] == "t": self.spinBoxT.setValue(int(anzahl)) elif i[1] == "c": self.spinBoxC.setValue(int(anzahl)) elif i[1] == "x": self.spinBoxX.setValue(int(anzahl)) elif i[1] == "o": self.spinBoxO.setValue(int(anzahl)) elif i[1] == "v": self.spinBoxV.setValue(int(anzahl)) elif i[1] == "b": self.spinBoxB.setValue(int(anzahl)) elif i[1] == "a": self.spinBoxA.setValue(int(anzahl)) elif i[1] == "s": self.spinBoxS.setValue(int(anzahl)) elif i[1] == "k": self.spinBoxK.setValue(int(anzahl)) if self.vorhanden == "x": self.radioButtonVorhandenJa.setChecked(True) else: self.radioButtonVorhandenNein.setChecked(True) self.plainTextEditRemarks.setPlainText(self.remarks) if self.stars == 1: self.pushButtonStar1.setIcon(self.icon_starred) elif self.stars == 2: self.pushButtonStar1.setIcon(self.icon_starred) self.pushButtonStar2.setIcon(self.icon_starred) elif self.stars == 3: self.pushButtonStar1.setIcon(self.icon_starred) self.pushButtonStar2.setIcon(self.icon_starred) self.pushButtonStar3.setIcon(self.icon_starred) elif self.stars == 4: self.pushButtonStar1.setIcon(self.icon_starred) self.pushButtonStar2.setIcon(self.icon_starred) self.pushButtonStar3.setIcon(self.icon_starred) self.pushButtonStar4.setIcon(self.icon_starred) elif self.stars == 5: self.pushButtonStar1.setIcon(self.icon_starred) self.pushButtonStar2.setIcon(self.icon_starred) self.pushButtonStar3.setIcon(self.icon_starred) self.pushButtonStar4.setIcon(self.icon_starred) self.pushButtonStar5.setIcon(self.icon_starred) self.pushButtonBildloeschen.setEnabled(False) self.pushButtonBildbeschneiden.setEnabled(False) if self.undo: self.pushButtonNeuDelete.setEnabled(False) self.pushButtonVerz.setEnabled(False) if self.cover or self.original_cover: self.radioButtonCoverJa.setChecked(True) self.radioButtonCoverNein.setChecked(False) else: self.radioButtonCoverNein.setChecked(True) self.radioButtonCoverJa.setChecked(False) if self.high_definition == "0": self.comboBoxDefinition.setCurrentIndex(1) elif self.high_definition == "1": self.comboBoxDefinition.setCurrentIndex(2) elif self.high_definition == "2": self.comboBoxDefinition.setCurrentIndex(3) elif self.high_definition == "3": self.comboBoxDefinition.setCurrentIndex(4) elif self.high_definition == "9": self.comboBoxDefinition.setCurrentIndex(5) else: self.comboBoxDefinition.setCurrentIndex(0) else: self.korrektur = False if self.darsteller: self.lineEditNeuDarsteller.setText(self.darsteller) if self.original: self.lineEditNeuOriginal.setText(self.original.strip()) if self.cover_anlegen: self.radioButtonCoverJa.setChecked(True) self.radioButtonCoverNein.setChecked(False) anfang = os.path.basename(self.bilddatei).rfind('.') if self.original: self.lineEditNeuOriginal.setText(self.original) else: self.lineEditNeuOriginal.setText((os.path.basename(str(self.bilddatei)))[0:anfang]) else: self.radioButtonCoverJa.setChecked(False) self.radioButtonCoverNein.setChecked(True) anfang = os.path.basename(str(self.bilddatei)).rfind('.') self.lineEditNeuTitel.setText((os.path.basename(str(self.bilddatei)))[0:anfang]) dateiliste = os.listdir(self.verzeichnis) videodatei = os.path.splitext(os.path.basename(str(self.bilddatei)))[0] self.lineEditNeuTitel.setFocus() for i in dateiliste: datei = os.path.splitext(i)[0] ext = os.path.splitext(i)[1].lower() if ext in videodateien: if videodatei == datei or videodatei[0:len(videodatei) -1] == datei or videodatei[0:len(videodatei) -2] == datei: self.lineEditNeuTitel.setText(os.path.basename(i)) self.lineEditNeuDarsteller.setFocus() break self.lineEditNeuCD.setText(str(self.res_vid_neu[0][2])) self.lineEditNeuBild.setText(os.path.basename(str(self.bilddatei))) if self.access_from_iafd: self.pushButtonBildloeschen.setEnabled(False) self.pushButtonBildbeschneiden.setEnabled(False) self.pushButtonVerz.setEnabled(False) else: self.pushButtonBildloeschen.setEnabled(True) self.pushButtonBildbeschneiden.setEnabled(True) self.pushButtonVerz.setEnabled(True) self.pushButtonNeuDelete.setEnabled(False) def keyPressEvent(self, event): try: if event.modifiers() & QtCore.Qt.ControlModifier: if event.key() == QtCore.Qt.Key_Y: self.onOriginalAlt() self.update() elif event.key() == QtCore.Qt.Key_B: self.onBildbeschneiden() elif event.key() == QtCore.Qt.Key_D: self.onVerzeichnisWechseln() elif event.modifiers() & QtCore.Qt.ShiftModifier and event.key() == QtCore.Qt.Key_Minus: self.onDelete() elif event.modifiers() & QtCore.Qt.ControlModifier and event.key() == QtCore.Qt.Key_Minus: self.onBildloeschen() elif event.key() == QtCore.Qt.Key_Return or event.key() == QtCore.Qt.Key_Enter: self.accept() elif event.key() == QtCore.Qt.Key_Escape: self.close() else: self.keyPressEvent(self) except: pass def onOriginal(self): originaldialog = OriginalErfassen(self.original_weitere) originaldialog.exec_() try: self.original_weitere = originaldialog.original except: pass self.pushButtonNeuOK.setFocus() def onOriginalAlt(self): if self.res_vid_neu[0][3]: self.lineEditNeuOriginal.setText(self.res_vid_neu[0][3]) self.pushButtonNeuOK.setFocus() def onRepeat(self): if self.res_vid_neu[0][0]: self.lineEditNeuTitel.setText(self.res_vid_neu[0][0]) if self.res_vid_neu[0][1]: self.lineEditNeuDarsteller.setText(self.res_vid_neu[0][1]) if self.res_vid_neu[0][2]: self.lineEditNeuCD.setText(str(self.res_vid_neu[0][2])) if self.res_vid_neu[0][3]: self.lineEditNeuOriginal.setText(self.res_vid_neu[0][3]) self.pushButtonNeuOK.setFocus() def onAddYear(self): year = self.comboBoxYear.currentText() self.lineEditNeuOriginal.setText(self.lineEditNeuOriginal.text().strip() + " (" + str(year) + ")") self.pushButtonNeuOK.setFocus() def onStar1(self): self.pushButtonStar1.setIcon(self.icon_starred) self.pushButtonStar2.setIcon(self.icon_nonstarred) self.pushButtonStar3.setIcon(self.icon_nonstarred) self.pushButtonStar4.setIcon(self.icon_nonstarred) self.pushButtonStar5.setIcon(self.icon_nonstarred) self.pushButtonNeuOK.setFocus() self.set_stars = 1 def onStar2(self): self.pushButtonStar1.setIcon(self.icon_starred) self.pushButtonStar2.setIcon(self.icon_starred) self.pushButtonStar3.setIcon(self.icon_nonstarred) self.pushButtonStar4.setIcon(self.icon_nonstarred) self.pushButtonStar5.setIcon(self.icon_nonstarred) self.pushButtonNeuOK.setFocus() self.set_stars = 2 def onStar3(self): self.pushButtonStar1.setIcon(self.icon_starred) self.pushButtonStar2.setIcon(self.icon_starred) self.pushButtonStar3.setIcon(self.icon_starred) self.pushButtonStar4.setIcon(self.icon_nonstarred) self.pushButtonStar5.setIcon(self.icon_nonstarred) self.pushButtonNeuOK.setFocus() self.set_stars = 3 def onStar4(self): self.pushButtonStar1.setIcon(self.icon_starred) self.pushButtonStar2.setIcon(self.icon_starred) self.pushButtonStar3.setIcon(self.icon_starred) self.pushButtonStar4.setIcon(self.icon_starred) self.pushButtonStar5.setIcon(self.icon_nonstarred) self.pushButtonNeuOK.setFocus() self.set_stars = 4 def onStar5(self): self.pushButtonStar1.setIcon(self.icon_starred) self.pushButtonStar2.setIcon(self.icon_starred) self.pushButtonStar3.setIcon(self.icon_starred) self.pushButtonStar4.setIcon(self.icon_starred) self.pushButtonStar5.setIcon(self.icon_starred) self.pushButtonNeuOK.setFocus() self.set_stars = 5 def onClearRating(self): self.pushButtonStar1.setIcon(self.icon_nonstarred) self.pushButtonStar2.setIcon(self.icon_nonstarred) self.pushButtonStar3.setIcon(self.icon_nonstarred) self.pushButtonStar4.setIcon(self.icon_nonstarred) self.pushButtonStar5.setIcon(self.icon_nonstarred) self.pushButtonNeuOK.setFocus() self.set_stars = 0 def onDarstelleruebernehmen(self): selected = self.listWidgetW.selectedItems() selected.extend(self.listWidgetM.selectedItems()) selected_str = [] for i in selected: selected_str.append(str(i.text())) text = ", ".join(selected_str) self.lineEditNeuDarsteller.setText(text) self.lineEditNeuDarsteller.setFocus() self.lineEditNeuDarsteller.setCursorPosition(len(text)) def onBildloeschen(self): os.remove(self.bilddatei) self.close() def onVerzeichnisWechseln(self): self.file, _ = QtWidgets.QFileDialog.getOpenFileName(self, self.tr("Image files"), os.path.dirname(str(self.bilddatei)), self.tr("Image files (*.jpg *.jpeg *.png);;all files (*.*)")) if not self.file: return self.bilddatei = str(self.file) self.bilddarstellen() anfang = (os.path.basename(str(self.bilddatei))).rfind('.') self.lineEditNeuTitel.setText((os.path.basename(str(self.file)))[0:anfang]) self.lineEditNeuBild.setText(os.path.basename(str(self.file))) self.verzeichnis = os.path.dirname(str(self.file)) def onBildbeschneiden(self): bilddialog = Bildbeschneiden(self.bilddatei, self.positionX, self.positionY) bilddialog.exec_() self.positionX = bilddialog.positionX self.positionY = bilddialog.positionY self.bilddarstellen() def bilddarstellen(self): bild = QtGui.QPixmap(self.bilddatei).scaled(sizeneu, QtCore.Qt.KeepAspectRatio) self.labelNeuBildanzeige.setPixmap(bild) text = str(QtGui.QPixmap(self.bilddatei).width()) +"x" +str(QtGui.QPixmap(self.bilddatei).height()) self.groupBox_2.setTitle(text) def accept(self): fehler = 1 actor_added = False actor_adding_asked = False while fehler: darsteller, fehler, fehler_index = self.darsteller_pruefen(str(self.lineEditNeuDarsteller.text()).title()) if fehler: if fehler == 1: zu_lesen = "SELECT darsteller FROM pordb_pseudo WHERE pseudo = %s" self.lese_func = DBLesen(self, zu_lesen, darsteller[fehler_index].title().strip()) res = DBLesen.get_data(self.lese_func) if res: messageBox = QtWidgets.QMessageBox() messageBox.addButton(self.tr("Yes"), QtWidgets.QMessageBox.AcceptRole) messageBox.addButton(self.tr("No, correct entry"), QtWidgets.QMessageBox.RejectRole) messageBox.addButton(self.tr("No, add new actor"), QtWidgets.QMessageBox.ActionRole) messageBox.setWindowTitle(darsteller[fehler_index] +self.tr(" does not exist") +self.tr(", but I have found ") +res[0][0].strip() +self.tr(" as alias.")) messageBox.setIcon(QtWidgets.QMessageBox.Question) messageBox.setText(self.tr("Do you want to take this actor instead?")) messageBox.setDetailedText(darsteller[fehler_index] +self.tr(" does not exist") +self.tr(", but I have found ") +res[0][0].strip() +self.tr(" as alias. If you want to take this actor, click on yes, else change your entry or add a new actor to the database.")) message = messageBox.exec_() if message == 0: darsteller_alt = str(self.lineEditNeuDarsteller.text()).title().strip() darsteller_neu = darsteller_alt.replace(darsteller[fehler_index].strip(), str(res[0][0]).strip()) try: self.lineEditNeuDarsteller.setText(darsteller_neu) except: pass return elif message == 2: self.darsteller_addieren(darsteller, fehler_index) actor_added = True elif fehler == 2: message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("You have entered some actors twice, please correct")) return if actor_adding_asked: return if not actor_added: self.darsteller_addieren(darsteller, fehler_index) actor_adding_asked = True titel = self.lineEditNeuTitel.text() if darsteller: darsteller = self.darsteller_sortieren(darsteller) if self.checkBoxUninteressant.isChecked(): darsteller.append("(Uninteressant)") try: cd = int(self.lineEditNeuCD.text()) except: message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("CD is not a number")) return bild = self.lineEditNeuBild.text() if not self.radioButtonVorhandenJa.isChecked() and not self.radioButtonVorhandenNein.isChecked(): message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("Please mark whether movie is available")) return if self.radioButtonVorhandenJa.isChecked(): vorhanden = "x" else: vorhanden = "" if not self.radioButtonGesehenNein.isChecked() and not self.radioButtonGesehenJa.isChecked(): message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("Please mark whether movie has been watched")) return if self.radioButtonGesehenNein.isChecked(): gesehen = " " else: gesehen = "x" try: original = str(self.lineEditNeuOriginal.text()).title().split() except: message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("Error: original title has invalid characters")) return # get rid of double spaces original = " ".join(original) if len(original) > 256: message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("Error, original title is longer than 256 characters.")) return if not self.radioButtonCoverJa.isChecked() and not self.radioButtonCoverNein.isChecked(): message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("Please check if image file is a cover")) return if self.radioButtonVorhandenJa.isChecked() and self.comboBoxDefinition.currentIndex() == 0 and not self.cover_austauschen: message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("Please select a resolution")) return if self.radioButtonVorhandenNein.isChecked() and self.comboBoxDefinition.currentIndex() != 0: message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("Video is not in stock: resolution deleted")) self.comboBoxDefinition.setCurrentIndex(0) zu_erfassen = [] if self.korrektur and not self.undo: darsteller_liste = self.darsteller.strip().split(", ") if not darsteller_liste[0]: darsteller_liste = [] for i in darsteller_liste: werte = [] werte.append(i) zu_erfassen.append(["UPDATE pordb_darsteller SET anzahl = anzahl - 1 WHERE darsteller = %s", werte]) if not self.radioButtonCoverJa.isChecked(): bilddatei_alt = os.path.join(self.verzeichnis_thumbs, "cd" + str(self.cd_alt), str(bild).rstrip()) if str(cd) != self.cd_alt: bilddatei_neu = os.path.join(self.verzeichnis_thumbs, "cd" + str(cd), str(bild).rstrip()) os.renames(bilddatei_alt, bilddatei_neu) else: if self.bilddatei != bilddatei_alt: bilddatei = QtGui.QImage(self.bilddatei).scaled(size, QtCore.Qt.KeepAspectRatio) if bilddatei.save(bilddatei_alt): os.remove(self.bilddatei) else: message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("Error saving image file")) return werte = [] werte.append(self.cd_alt) werte.append(bild) zu_erfassen.append(["DELETE FROM pordb_partner WHERE cd = %s AND bild = %s", werte]) werte = [] werte.append(titel) werte.append(", ".join(darsteller)) werte.append(cd) werte.append(bild) werte.append(gesehen) werte.append(original) zu_erfassen_zw = "UPDATE pordb_vid SET titel = %s, darsteller = %s, cd = %s, bild = %s, gesehen = %s, original = %s, csf = %s, csh = %s, cst = %s, csc = %s, csx = %s, cso = %s, csv = %s, csb = %s, csa = %s, css = %s, csk = %s, hd = %s, vorhanden = %s, remarks = %s, stars = %s WHERE cd = %s AND bild = %s" if self.spinBoxF.value() > 0: werte.append(self.spinBoxF.value()) self.spinBoxK.setValue(0) else: werte.append(0) if self.spinBoxH.value() > 0: werte.append(self.spinBoxH.value()) self.spinBoxK.setValue(0) else: werte.append(0) if self.spinBoxT.value() > 0: werte.append(self.spinBoxT.value()) self.spinBoxK.setValue(0) else: werte.append(0) if self.spinBoxC.value() > 0: werte.append(self.spinBoxC.value()) self.spinBoxK.setValue(0) else: werte.append(0) if self.spinBoxX.value() > 0: werte.append(self.spinBoxX.value()) self.spinBoxK.setValue(0) else: werte.append(0) if self.spinBoxO.value() > 0: werte.append(self.spinBoxO.value()) self.spinBoxK.setValue(0) else: werte.append(0) if self.spinBoxV.value() > 0: werte.append(self.spinBoxV.value()) self.spinBoxK.setValue(0) else: werte.append(0) if self.spinBoxB.value() > 0: werte.append(self.spinBoxB.value()) self.spinBoxK.setValue(0) else: werte.append(0) if self.spinBoxA.value() > 0: werte.append(self.spinBoxA.value()) self.spinBoxK.setValue(0) else: werte.append(0) if self.spinBoxS.value() > 0: werte.append(self.spinBoxS.value()) self.spinBoxK.setValue(0) else: werte.append(0) if self.spinBoxK.value() > 0: werte.append(self.spinBoxK.value()) else: werte.append(0) if self.comboBoxDefinition.currentIndex() == 0: werte.append(None) elif self.comboBoxDefinition.currentIndex() == 1: werte.append("0") elif self.comboBoxDefinition.currentIndex() == 2: werte.append("1") elif self.comboBoxDefinition.currentIndex() == 3: werte.append("2") elif self.comboBoxDefinition.currentIndex() == 4: werte.append("3") elif self.comboBoxDefinition.currentIndex() == 5: werte.append("9") werte.append(vorhanden) werte.append(self.plainTextEditRemarks.toPlainText()) werte.append(self.set_stars) werte.append(self.cd_alt) werte.append(bild) if self.radioButtonCoverJa.isChecked() and self.cover_austauschen: if os.path.exists(os.path.join(self.verzeichnis_thumbs, "cd" + str(self.cd_alt), bild.rstrip())): # Bild war Thumbnail im CD Verzeichnis -> dieses löschen und neues im Cover Verzeichnis anlegen os.remove(os.path.join(self.verzeichnis_thumbs, "cd" + str(self.cd_alt), bild.rstrip())) os.rename(self.bilddatei, os.path.join(self.verzeichnis_cover, self.bild.strip())) else: os.rename(self.bilddatei, os.path.join(self.verzeichnis_cover, self.bild.strip())) else: if self.radioButtonCoverJa.isChecked() and not original: message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("When adding a cover you must also enter a movie title")) return if self.undo: bilddatei = QtGui.QImage(os.path.join(self.verzeichnis_trash, bild)) else: if self.radioButtonCoverJa.isChecked(): bilddatei = QtGui.QImage(os.path.join(self.verzeichnis, bild)) else: bilddatei = QtGui.QImage(os.path.join(self.verzeichnis, bild)).scaled(size, QtCore.Qt.KeepAspectRatio) if self.radioButtonCoverJa.isChecked(): newfilename = os.path.join(self.verzeichnis_cover, bild) else: newfilename = os.path.join(self.verzeichnis_thumbs, "cd" +str(cd), bild) # hier klappt noch etwas nicht richtig mit den Partnern, wenn len>256 if len(bild) > 256 or os.path.exists(newfilename): neue_bilddatei = BilddateiUmbenennen(newfilename) if neue_bilddatei.exec_(): try: bild_alt = os.path.join(self.verzeichnis, bild) bild_neu = os.path.join(self.verzeichnis, neue_bilddatei.lineEditDateiname.text()) os.rename(bild_alt, bild_neu) newfilename = os.path.join(os.path.dirname(newfilename), neue_bilddatei.lineEditDateiname.text()) bild = neue_bilddatei.lineEditDateiname.text() titel = str(bild.split('.')[0]) except: message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("Error on renaming image file")) return else: return else: if not os.path.exists(os.path.dirname(newfilename)): os.mkdir(os.path.dirname(newfilename)) if bilddatei.save(newfilename): if not self.undo: os.remove(os.path.join(self.verzeichnis, str(bild))) else: message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("Error saving image file")) return werte = [] werte.append("pordb_vid_primkey_seq") zu_lesen = "SELECT nextval(%s)" self.lese_func = DBLesen(self, zu_lesen, werte) res = DBLesen.get_data(self.lese_func) werte = [] werte.append(titel) werte.append(", ".join(darsteller)) werte.append(cd) werte.append(bild) werte.append(gesehen) werte.append(original) werte.append("") werte.append(vorhanden) werte.append(res[0][0]) zu_erfassen_zw = "INSERT INTO pordb_vid VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" if self.spinBoxF.value() > 0: cs = self.spinBoxF.value() else: cs = 0 werte.append(cs) if self.spinBoxH.value() > 0: cs = self.spinBoxH.value() else: cs = 0 werte.append(cs) if self.spinBoxT.value() > 0: cs = self.spinBoxT.value() else: cs = 0 werte.append(cs) if self.spinBoxC.value() > 0: cs = self.spinBoxC.value() else: cs = 0 werte.append(cs) if self.spinBoxX.value() > 0: cs = self.spinBoxX.value() else: cs = 0 werte.append(cs) if self.spinBoxO.value() > 0: cs = self.spinBoxO.value() else: cs = 0 werte.append(cs) if self.spinBoxV.value() > 0: cs = self.spinBoxV.value() else: cs = 0 werte.append(cs) if self.spinBoxB.value() > 0: cs = self.spinBoxB.value() else: cs = 0 werte.append(cs) if self.spinBoxA.value() > 0: cs = self.spinBoxA.value() else: cs = 0 werte.append(cs) if self.spinBoxS.value() > 0: cs = self.spinBoxS.value() else: cs = 0 werte.append(cs) if self.spinBoxK.value() > 0: cs = self.spinBoxK.value() else: cs = 0 werte.append(cs) if self.comboBoxDefinition.currentIndex() == 0: werte.append(None) elif self.comboBoxDefinition.currentIndex() == 1: werte.append("0") elif self.comboBoxDefinition.currentIndex() == 2: werte.append("1") elif self.comboBoxDefinition.currentIndex() == 3: werte.append("2") elif self.comboBoxDefinition.currentIndex() == 4: werte.append("3") elif self.comboBoxDefinition.currentIndex() == 5: werte.append("9") werte.append(self.plainTextEditRemarks.toPlainText()) werte.append(self.set_stars) zu_erfassen.append([zu_erfassen_zw, werte]) for i in darsteller: if i.lstrip() == "" or i.lstrip() == "?": continue werte = [] werte.append(i) zu_erfassen.append(["UPDATE pordb_darsteller SET anzahl = anzahl + 1 WHERE darsteller = %s", werte]) if i == "" or i == "?" or i == "(Uninteressant)" or i == "(Komplett)" or i == "(Schlechte Qualitaet)": continue zu_erfassen.append(["DELETE FROM pordb_darsteller100 WHERE darsteller = %s", werte]) zu_erfassen.append(["INSERT INTO pordb_darsteller100 (darsteller) VALUES (%s)", werte]) partner_zaehler = 0 if i.strip() != "(Uninteressant)" and i.strip() != "Defekt": zu_lesen = "SELECT sex FROM pordb_darsteller WHERE darsteller = %s" self.lese_func = DBLesen(self, zu_lesen, i) res = DBLesen.get_data(self.lese_func) geschlecht = res[0][0] for j in darsteller: if j.strip() != "(Uninteressant)" and j.strip() != "Defekt" and i != j: zu_lesen = "SELECT sex FROM pordb_darsteller WHERE darsteller = %s" self.lese_func = DBLesen(self, zu_lesen, j) res2 = DBLesen.get_data(self.lese_func) geschlecht2 = res2[0][0] if geschlecht != geschlecht2: werte = [] werte.append(i) werte.append(j) werte.append(cd) werte.append(bild) zu_erfassen.append(["INSERT INTO pordb_partner VALUES (%s, %s, %s, %s)", werte]) zu_lesen = "SELECT darsteller FROM pordb_partner WHERE darsteller = %s AND partner = %s" self.lese_func = DBLesen(self, zu_lesen, (i, j)) res3 = DBLesen.get_data(self.lese_func) if not res3: partner_zaehler += 1 if partner_zaehler > 0: werte = [] werte.append(partner_zaehler) werte.append(i) zu_erfassen.append(["UPDATE pordb_darsteller SET partner = partner + %s WHERE darsteller = %s", werte]) zu_lesen = "SELECT * FROM pordb_darsteller100" self.lese_func = DBLesen(self, zu_lesen) res1 = DBLesen.get_data(self.lese_func) anzahl_loeschen = len(res1) - 200 if anzahl_loeschen > 0: res1.sort() for zaehler in range(anzahl_loeschen): werte = [] werte.append(str(res1[zaehler][0])) zu_erfassen.append(["DELETE FROM pordb_darsteller100 WHERE nr = %s", werte]) if not self.korrektur: werte = [] werte.append(titel) werte.append(", ".join(darsteller)) werte.append(cd) if original: werte.append(original) zu_erfassen.append(["UPDATE pordb_vid_neu SET titel = %s, darsteller = %s, cd = %s, original = %s", werte]) else: zu_erfassen.append(["UPDATE pordb_vid_neu SET titel = %s, darsteller = %s, cd = %s", werte]) update_func = DBUpdate(self, zu_erfassen) DBUpdate.update_data(update_func) if self.original_weitere: zu_erfassen = [] if self.korrektur: zu_lesen = "SELECT primkey FROM pordb_vid WHERE cd = %s AND bild = %s" self.lese_func = DBLesen(self, zu_lesen, (str(self.cd_alt), str(bild))) curr_key = DBLesen.get_data(self.lese_func) werte = [] werte.append(str(curr_key[0][0])) zu_erfassen.append(["DELETE FROM pordb_original WHERE foreign_key_pordb_vid = %s", werte]) else: zu_lesen = "SELECT primkey FROM pordb_vid WHERE cd = %s AND bild = %s" self.lese_func = DBLesen(self, zu_lesen, (str(cd), bild)) curr_key = DBLesen.get_data(self.lese_func) for i in self.original_weitere: if i: if type(i) == str: werte = [] werte.append(i.title()) werte.append(str(curr_key[0][0])) zu_erfassen.append(["INSERT INTO pordb_original (original, foreign_key_pordb_vid) VALUES (%s, %s)", werte]) else: werte = [] werte.append(i.decode().title()) werte.append(str(curr_key[0][0])) zu_erfassen.append(["INSERT INTO pordb_original (original, foreign_key_pordb_vid) VALUES (%s, %s)", werte]) update_func = DBUpdate(self, zu_erfassen) DBUpdate.update_data(update_func) self.close() QtWidgets.QDialog.accept(self) # end of accept def darsteller_addieren (self, darsteller, fehler_index): messageBox = QtWidgets.QMessageBox() messageBox.addButton(self.tr("Yes, image exists"), QtWidgets.QMessageBox.AcceptRole) messageBox.addButton(self.tr("Yes, no image"), QtWidgets.QMessageBox.YesRole) messageBox.addButton(self.tr("No, correct entry"), QtWidgets.QMessageBox.RejectRole) messageBox.setWindowTitle(darsteller[fehler_index] +self.tr(" does not exist")) messageBox.setIcon(QtWidgets.QMessageBox.Question) messageBox.setText(self.tr("Do you want to add this actor?")) message = messageBox.exec_() if message == 2: korrekt = DarstellerKorrigieren(self.lineEditNeuDarsteller.text()) korrekt.exec_() try: self.lineEditNeuDarsteller.setText(korrekt.darsteller) except: pass return neuer_darsteller = NeueingabeDarsteller(darsteller[fehler_index]) neuer_darsteller.exec_() if message == 0: actor_file = False while not actor_file: self.file, _ = QtWidgets.QFileDialog.getOpenFileName(self, self.tr("Image of the actor ") +darsteller[fehler_index] +": " +self.tr("please select one"), self.verzeichnis, self.tr("Image files (*.jpg *.jpeg *.png);;all files (*.*)")) if self.file: if self.file == self.bilddatei: message = QtWidgets.QMessageBox.critical(self, self.tr("Error "), self.tr("Selected image is the one which should be added to the database. Please select another one.")) continue else: bild = QtGui.QImage(self.file) if bild.width() > size_darsteller.width() or bild.height() > size_darsteller.height(): message = QtWidgets.QMessageBox.warning(self, self.tr("Caution! "), self.tr("Image of the actor is very big")) zu_lesen = "SELECT sex FROM pordb_darsteller WHERE darsteller = %s" self.lese_func = DBLesen(self, zu_lesen, darsteller[fehler_index].strip()) res = DBLesen.get_data(self.lese_func) extension = os.path.splitext(str(self.file))[-1].lower() if extension == '.jpeg': extension = '.jpg' try: sex = res[0][0] newfilename = os.path.join(self.verzeichnis_thumbs, "darsteller_" + sex, darsteller[fehler_index].strip().replace(" ", "_").replace("'", "_apostroph_").lower() + extension.strip()) os.rename(self.file, newfilename) except: pass actor_file = True # end of darsteller_addieren def darsteller_pruefen(self, darsteller_liste): darstellerliste = darsteller_liste.split(",") darsteller = [] for i in darstellerliste: darsteller.append(i.strip()) fehler = 0 k = -1 for i in darsteller: k += 1 if i and i != "Defekt": zu_lesen = "SELECT sex FROM pordb_darsteller WHERE darsteller = %s" self.lese_func = DBLesen(self, zu_lesen, i.strip().title()) res = DBLesen.get_data(self.lese_func) if not res: fehler = 1 for j in range(0, k): if i == darsteller[j]: fehler = 2 break if fehler: break return (darsteller, fehler, k) def darsteller_sortieren(self, darsteller): darsteller_m = [] darsteller_w = [] defekt_schalter = False for i in darsteller: if i: if i == "Defekt": defekt_schalter = True zu_lesen = "SELECT sex FROM pordb_darsteller WHERE darsteller = %s" self.lese_func = DBLesen(self, zu_lesen, i.strip().replace("''''", "''").title()) # 2nd replace when coming from actor renaming function res = DBLesen.get_data(self.lese_func) try: sex = res[0][0] try: if sex == "w": darsteller_w.append(i.strip().title()) else: darsteller_m.append(i.strip().title()) except: pass except: pass darsteller_w.sort() darsteller_m.sort() darsteller_liste = darsteller_w + darsteller_m if defekt_schalter: darsteller_liste.append("Defekt") return darsteller_liste def onDelete(self): if self.undo: self.close() return darsteller_liste = self.darsteller.strip().split(", ") zu_erfassen = [] for i in darsteller_liste: if i: werte = [] werte.append(i) zu_erfassen.append(["UPDATE pordb_darsteller SET anzahl = anzahl - 1 WHERE darsteller = %s", werte]) # Daten für undo sichern zu_lesen = "SELECT * FROM pordb_vid WHERE cd = %s AND bild = %s" self.lese_func = DBLesen(self, zu_lesen, (str(self.cd), self.bild)) res = DBLesen.get_data(self.lese_func) # Dateien in Trash Verzeichnis löschen dateiliste = os.listdir(self.verzeichnis_trash) for datei in dateiliste: if datei.find("pypordb_bildalt") == -1: os.remove(self.verzeichnis_trash + '/' + datei) # Bild in Trash Verzeichnis verschieben if not os.path.exists(self.verzeichnis_trash): os.mkdir(self.verzeichnis_trash) filename = os.path.join(self.verzeichnis_thumbs, "cd" + str(self.cd), self.bild.strip()) cover = None if not os.path.exists(filename): filename = os.path.join(self.verzeichnis_cover, self.bild.strip()) cover = "x" newfilename = os.path.join(self.verzeichnis_trash, self.bild.strip()) if os.path.exists(filename): os.rename(filename, newfilename) # Textdatei erstellen mit alten Daten textdatei = open(os.path.join(self.verzeichnis_trash, self.bild[-2] + ".txt"), "w") for i in res: for j in i: try: textdatei.write(j.encode("utf-8").rstrip() +"\n") except: textdatei.write(str(j).rstrip() +"\n") if cover: textdatei.write("COVER" +"\n") textdatei.close() werte = [] werte.append(str(self.cd)) werte.append(self.bild.strip()) zu_erfassen.append(["DELETE FROM pordb_vid WHERE cd = %s AND bild = %s", werte]) werte = [] werte.append(str(self.cd)) werte.append(self.bild.strip()) zu_erfassen.append(["DELETE FROM pordb_partner WHERE cd = %s AND bild = %s", werte]) update_func = DBUpdate(self, zu_erfassen) DBUpdate.update_data(update_func) zu_erfassen = [] for i in darsteller_liste: if i: zu_lesen = "SELECT DISTINCT ON (partner) partner FROM pordb_partner WHERE darsteller = %s" self.lese_func = DBLesen(self, zu_lesen, i) res1 = DBLesen.get_data(self.lese_func) werte = [] werte.append(len(res1)) werte.append(i) zu_erfassen.append(["UPDATE pordb_darsteller SET partner = %s WHERE darsteller = %s", werte]) if zu_erfassen: update_func = DBUpdate(self, zu_erfassen) DBUpdate.update_data(update_func) self.close() def closeEvent(self, event): settings = QtCore.QSettings() settings.setValue("Neueingabe/Size", self.size()) settings.setValue("Neueingabe/Position", self.pos())
steventimberman/masterDebater
refs/heads/master
venv/lib/python2.7/site-packages/whoosh/qparser/taggers.py
96
# Copyright 2011 Matt Chaput. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO # EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, # OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, # EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # The views and conclusions contained in the software and documentation are # those of the authors and should not be interpreted as representing official # policies, either expressed or implied, of Matt Chaput. from whoosh.util.text import rcompile # Tagger objects class Tagger(object): """Base class for taggers, objects which match syntax in the query string and translate it into a :class:`whoosh.qparser.syntax.SyntaxNode` object. """ def match(self, parser, text, pos): """This method should see if this tagger matches the query string at the given position. If it matches, it should return :param parser: the :class:`whoosh.qparser.default.QueryParser` object. :param text: the text being parsed. :param pos: the position in the text at which the tagger should try to match. """ raise NotImplementedError class RegexTagger(Tagger): """Tagger class that uses regular expressions to match the query string. Subclasses should override ``create()`` instead of ``match()``. """ def __init__(self, expr): self.expr = rcompile(expr) def match(self, parser, text, pos): match = self.expr.match(text, pos) if match: node = self.create(parser, match) if node is not None: node = node.set_range(match.start(), match.end()) return node def create(self, parser, match): """When the regular expression matches, this method is called to translate the regex match object into a syntax node. :param parser: the :class:`whoosh.qparser.default.QueryParser` object. :param match: the regex match object. """ raise NotImplementedError class FnTagger(RegexTagger): """Tagger that takes a regular expression and a class or function, and for matches calls the class/function with the regex match's named groups as keyword arguments. """ def __init__(self, expr, fn, memo=""): RegexTagger.__init__(self, expr) self.fn = fn self.memo = memo def __repr__(self): return "<%s %r (%s)>" % (self.__class__.__name__, self.expr, self.memo) def create(self, parser, match): return self.fn(**match.groupdict())
sharhar/USB-Thing
refs/heads/master
UpdaterFiles/Lib/python-3.5.1.amd64/Lib/site-packages/pip/_vendor/distlib/database.py
224
# -*- coding: utf-8 -*- # # Copyright (C) 2012-2014 The Python Software Foundation. # See LICENSE.txt and CONTRIBUTORS.txt. # """PEP 376 implementation.""" from __future__ import unicode_literals import base64 import codecs import contextlib import hashlib import logging import os import posixpath import sys import zipimport from . import DistlibException, resources from .compat import StringIO from .version import get_scheme, UnsupportedVersionError from .metadata import Metadata, METADATA_FILENAME from .util import (parse_requirement, cached_property, parse_name_and_version, read_exports, write_exports, CSVReader, CSVWriter) __all__ = ['Distribution', 'BaseInstalledDistribution', 'InstalledDistribution', 'EggInfoDistribution', 'DistributionPath'] logger = logging.getLogger(__name__) EXPORTS_FILENAME = 'pydist-exports.json' COMMANDS_FILENAME = 'pydist-commands.json' DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', 'RESOURCES', EXPORTS_FILENAME, 'SHARED') DISTINFO_EXT = '.dist-info' class _Cache(object): """ A simple cache mapping names and .dist-info paths to distributions """ def __init__(self): """ Initialise an instance. There is normally one for each DistributionPath. """ self.name = {} self.path = {} self.generated = False def clear(self): """ Clear the cache, setting it to its initial state. """ self.name.clear() self.path.clear() self.generated = False def add(self, dist): """ Add a distribution to the cache. :param dist: The distribution to add. """ if dist.path not in self.path: self.path[dist.path] = dist self.name.setdefault(dist.key, []).append(dist) class DistributionPath(object): """ Represents a set of distributions installed on a path (typically sys.path). """ def __init__(self, path=None, include_egg=False): """ Create an instance from a path, optionally including legacy (distutils/ setuptools/distribute) distributions. :param path: The path to use, as a list of directories. If not specified, sys.path is used. :param include_egg: If True, this instance will look for and return legacy distributions as well as those based on PEP 376. """ if path is None: path = sys.path self.path = path self._include_dist = True self._include_egg = include_egg self._cache = _Cache() self._cache_egg = _Cache() self._cache_enabled = True self._scheme = get_scheme('default') def _get_cache_enabled(self): return self._cache_enabled def _set_cache_enabled(self, value): self._cache_enabled = value cache_enabled = property(_get_cache_enabled, _set_cache_enabled) def clear_cache(self): """ Clears the internal cache. """ self._cache.clear() self._cache_egg.clear() def _yield_distributions(self): """ Yield .dist-info and/or .egg(-info) distributions. """ # We need to check if we've seen some resources already, because on # some Linux systems (e.g. some Debian/Ubuntu variants) there are # symlinks which alias other files in the environment. seen = set() for path in self.path: finder = resources.finder_for_path(path) if finder is None: continue r = finder.find('') if not r or not r.is_container: continue rset = sorted(r.resources) for entry in rset: r = finder.find(entry) if not r or r.path in seen: continue if self._include_dist and entry.endswith(DISTINFO_EXT): metadata_path = posixpath.join(entry, METADATA_FILENAME) pydist = finder.find(metadata_path) if not pydist: continue metadata = Metadata(fileobj=pydist.as_stream(), scheme='legacy') logger.debug('Found %s', r.path) seen.add(r.path) yield new_dist_class(r.path, metadata=metadata, env=self) elif self._include_egg and entry.endswith(('.egg-info', '.egg')): logger.debug('Found %s', r.path) seen.add(r.path) yield old_dist_class(r.path, self) def _generate_cache(self): """ Scan the path for distributions and populate the cache with those that are found. """ gen_dist = not self._cache.generated gen_egg = self._include_egg and not self._cache_egg.generated if gen_dist or gen_egg: for dist in self._yield_distributions(): if isinstance(dist, InstalledDistribution): self._cache.add(dist) else: self._cache_egg.add(dist) if gen_dist: self._cache.generated = True if gen_egg: self._cache_egg.generated = True @classmethod def distinfo_dirname(cls, name, version): """ The *name* and *version* parameters are converted into their filename-escaped form, i.e. any ``'-'`` characters are replaced with ``'_'`` other than the one in ``'dist-info'`` and the one separating the name from the version number. :parameter name: is converted to a standard distribution name by replacing any runs of non- alphanumeric characters with a single ``'-'``. :type name: string :parameter version: is converted to a standard version string. Spaces become dots, and all other non-alphanumeric characters (except dots) become dashes, with runs of multiple dashes condensed to a single dash. :type version: string :returns: directory name :rtype: string""" name = name.replace('-', '_') return '-'.join([name, version]) + DISTINFO_EXT def get_distributions(self): """ Provides an iterator that looks for distributions and returns :class:`InstalledDistribution` or :class:`EggInfoDistribution` instances for each one of them. :rtype: iterator of :class:`InstalledDistribution` and :class:`EggInfoDistribution` instances """ if not self._cache_enabled: for dist in self._yield_distributions(): yield dist else: self._generate_cache() for dist in self._cache.path.values(): yield dist if self._include_egg: for dist in self._cache_egg.path.values(): yield dist def get_distribution(self, name): """ Looks for a named distribution on the path. This function only returns the first result found, as no more than one value is expected. If nothing is found, ``None`` is returned. :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` or ``None`` """ result = None name = name.lower() if not self._cache_enabled: for dist in self._yield_distributions(): if dist.key == name: result = dist break else: self._generate_cache() if name in self._cache.name: result = self._cache.name[name][0] elif self._include_egg and name in self._cache_egg.name: result = self._cache_egg.name[name][0] return result def provides_distribution(self, name, version=None): """ Iterates over all distributions to find which distributions provide *name*. If a *version* is provided, it will be used to filter the results. This function only returns the first result found, since no more than one values are expected. If the directory is not found, returns ``None``. :parameter version: a version specifier that indicates the version required, conforming to the format in ``PEP-345`` :type name: string :type version: string """ matcher = None if not version is None: try: matcher = self._scheme.matcher('%s (%s)' % (name, version)) except ValueError: raise DistlibException('invalid name or version: %r, %r' % (name, version)) for dist in self.get_distributions(): provided = dist.provides for p in provided: p_name, p_ver = parse_name_and_version(p) if matcher is None: if p_name == name: yield dist break else: if p_name == name and matcher.match(p_ver): yield dist break def get_file_path(self, name, relative_path): """ Return the path to a resource file. """ dist = self.get_distribution(name) if dist is None: raise LookupError('no distribution named %r found' % name) return dist.get_resource_path(relative_path) def get_exported_entries(self, category, name=None): """ Return all of the exported entries in a particular category. :param category: The category to search for entries. :param name: If specified, only entries with that name are returned. """ for dist in self.get_distributions(): r = dist.exports if category in r: d = r[category] if name is not None: if name in d: yield d[name] else: for v in d.values(): yield v class Distribution(object): """ A base class for distributions, whether installed or from indexes. Either way, it must have some metadata, so that's all that's needed for construction. """ build_time_dependency = False """ Set to True if it's known to be only a build-time dependency (i.e. not needed after installation). """ requested = False """A boolean that indicates whether the ``REQUESTED`` metadata file is present (in other words, whether the package was installed by user request or it was installed as a dependency).""" def __init__(self, metadata): """ Initialise an instance. :param metadata: The instance of :class:`Metadata` describing this distribution. """ self.metadata = metadata self.name = metadata.name self.key = self.name.lower() # for case-insensitive comparisons self.version = metadata.version self.locator = None self.digest = None self.extras = None # additional features requested self.context = None # environment marker overrides self.download_urls = set() self.digests = {} @property def source_url(self): """ The source archive download URL for this distribution. """ return self.metadata.source_url download_url = source_url # Backward compatibility @property def name_and_version(self): """ A utility property which displays the name and version in parentheses. """ return '%s (%s)' % (self.name, self.version) @property def provides(self): """ A set of distribution names and versions provided by this distribution. :return: A set of "name (version)" strings. """ plist = self.metadata.provides s = '%s (%s)' % (self.name, self.version) if s not in plist: plist.append(s) return plist def _get_requirements(self, req_attr): md = self.metadata logger.debug('Getting requirements from metadata %r', md.todict()) reqts = getattr(md, req_attr) return set(md.get_requirements(reqts, extras=self.extras, env=self.context)) @property def run_requires(self): return self._get_requirements('run_requires') @property def meta_requires(self): return self._get_requirements('meta_requires') @property def build_requires(self): return self._get_requirements('build_requires') @property def test_requires(self): return self._get_requirements('test_requires') @property def dev_requires(self): return self._get_requirements('dev_requires') def matches_requirement(self, req): """ Say if this instance matches (fulfills) a requirement. :param req: The requirement to match. :rtype req: str :return: True if it matches, else False. """ # Requirement may contain extras - parse to lose those # from what's passed to the matcher r = parse_requirement(req) scheme = get_scheme(self.metadata.scheme) try: matcher = scheme.matcher(r.requirement) except UnsupportedVersionError: # XXX compat-mode if cannot read the version logger.warning('could not read version %r - using name only', req) name = req.split()[0] matcher = scheme.matcher(name) name = matcher.key # case-insensitive result = False for p in self.provides: p_name, p_ver = parse_name_and_version(p) if p_name != name: continue try: result = matcher.match(p_ver) break except UnsupportedVersionError: pass return result def __repr__(self): """ Return a textual representation of this instance, """ if self.source_url: suffix = ' [%s]' % self.source_url else: suffix = '' return '<Distribution %s (%s)%s>' % (self.name, self.version, suffix) def __eq__(self, other): """ See if this distribution is the same as another. :param other: The distribution to compare with. To be equal to one another. distributions must have the same type, name, version and source_url. :return: True if it is the same, else False. """ if type(other) is not type(self): result = False else: result = (self.name == other.name and self.version == other.version and self.source_url == other.source_url) return result def __hash__(self): """ Compute hash in a way which matches the equality test. """ return hash(self.name) + hash(self.version) + hash(self.source_url) class BaseInstalledDistribution(Distribution): """ This is the base class for installed distributions (whether PEP 376 or legacy). """ hasher = None def __init__(self, metadata, path, env=None): """ Initialise an instance. :param metadata: An instance of :class:`Metadata` which describes the distribution. This will normally have been initialised from a metadata file in the ``path``. :param path: The path of the ``.dist-info`` or ``.egg-info`` directory for the distribution. :param env: This is normally the :class:`DistributionPath` instance where this distribution was found. """ super(BaseInstalledDistribution, self).__init__(metadata) self.path = path self.dist_path = env def get_hash(self, data, hasher=None): """ Get the hash of some data, using a particular hash algorithm, if specified. :param data: The data to be hashed. :type data: bytes :param hasher: The name of a hash implementation, supported by hashlib, or ``None``. Examples of valid values are ``'sha1'``, ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and ``'sha512'``. If no hasher is specified, the ``hasher`` attribute of the :class:`InstalledDistribution` instance is used. If the hasher is determined to be ``None``, MD5 is used as the hashing algorithm. :returns: The hash of the data. If a hasher was explicitly specified, the returned hash will be prefixed with the specified hasher followed by '='. :rtype: str """ if hasher is None: hasher = self.hasher if hasher is None: hasher = hashlib.md5 prefix = '' else: hasher = getattr(hashlib, hasher) prefix = '%s=' % self.hasher digest = hasher(data).digest() digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') return '%s%s' % (prefix, digest) class InstalledDistribution(BaseInstalledDistribution): """ Created with the *path* of the ``.dist-info`` directory provided to the constructor. It reads the metadata contained in ``pydist.json`` when it is instantiated., or uses a passed in Metadata instance (useful for when dry-run mode is being used). """ hasher = 'sha256' def __init__(self, path, metadata=None, env=None): self.finder = finder = resources.finder_for_path(path) if finder is None: import pdb; pdb.set_trace () if env and env._cache_enabled and path in env._cache.path: metadata = env._cache.path[path].metadata elif metadata is None: r = finder.find(METADATA_FILENAME) # Temporary - for legacy support if r is None: r = finder.find('METADATA') if r is None: raise ValueError('no %s found in %s' % (METADATA_FILENAME, path)) with contextlib.closing(r.as_stream()) as stream: metadata = Metadata(fileobj=stream, scheme='legacy') super(InstalledDistribution, self).__init__(metadata, path, env) if env and env._cache_enabled: env._cache.add(self) try: r = finder.find('REQUESTED') except AttributeError: import pdb; pdb.set_trace () self.requested = r is not None def __repr__(self): return '<InstalledDistribution %r %s at %r>' % ( self.name, self.version, self.path) def __str__(self): return "%s %s" % (self.name, self.version) def _get_records(self): """ Get the list of installed files for the distribution :return: A list of tuples of path, hash and size. Note that hash and size might be ``None`` for some entries. The path is exactly as stored in the file (which is as in PEP 376). """ results = [] r = self.get_distinfo_resource('RECORD') with contextlib.closing(r.as_stream()) as stream: with CSVReader(stream=stream) as record_reader: # Base location is parent dir of .dist-info dir #base_location = os.path.dirname(self.path) #base_location = os.path.abspath(base_location) for row in record_reader: missing = [None for i in range(len(row), 3)] path, checksum, size = row + missing #if not os.path.isabs(path): # path = path.replace('/', os.sep) # path = os.path.join(base_location, path) results.append((path, checksum, size)) return results @cached_property def exports(self): """ Return the information exported by this distribution. :return: A dictionary of exports, mapping an export category to a dict of :class:`ExportEntry` instances describing the individual export entries, and keyed by name. """ result = {} r = self.get_distinfo_resource(EXPORTS_FILENAME) if r: result = self.read_exports() return result def read_exports(self): """ Read exports data from a file in .ini format. :return: A dictionary of exports, mapping an export category to a list of :class:`ExportEntry` instances describing the individual export entries. """ result = {} r = self.get_distinfo_resource(EXPORTS_FILENAME) if r: with contextlib.closing(r.as_stream()) as stream: result = read_exports(stream) return result def write_exports(self, exports): """ Write a dictionary of exports to a file in .ini format. :param exports: A dictionary of exports, mapping an export category to a list of :class:`ExportEntry` instances describing the individual export entries. """ rf = self.get_distinfo_file(EXPORTS_FILENAME) with open(rf, 'w') as f: write_exports(exports, f) def get_resource_path(self, relative_path): """ NOTE: This API may change in the future. Return the absolute path to a resource file with the given relative path. :param relative_path: The path, relative to .dist-info, of the resource of interest. :return: The absolute path where the resource is to be found. """ r = self.get_distinfo_resource('RESOURCES') with contextlib.closing(r.as_stream()) as stream: with CSVReader(stream=stream) as resources_reader: for relative, destination in resources_reader: if relative == relative_path: return destination raise KeyError('no resource file with relative path %r ' 'is installed' % relative_path) def list_installed_files(self): """ Iterates over the ``RECORD`` entries and returns a tuple ``(path, hash, size)`` for each line. :returns: iterator of (path, hash, size) """ for result in self._get_records(): yield result def write_installed_files(self, paths, prefix, dry_run=False): """ Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any existing ``RECORD`` file is silently overwritten. prefix is used to determine when to write absolute paths. """ prefix = os.path.join(prefix, '') base = os.path.dirname(self.path) base_under_prefix = base.startswith(prefix) base = os.path.join(base, '') record_path = self.get_distinfo_file('RECORD') logger.info('creating %s', record_path) if dry_run: return None with CSVWriter(record_path) as writer: for path in paths: if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): # do not put size and hash, as in PEP-376 hash_value = size = '' else: size = '%d' % os.path.getsize(path) with open(path, 'rb') as fp: hash_value = self.get_hash(fp.read()) if path.startswith(base) or (base_under_prefix and path.startswith(prefix)): path = os.path.relpath(path, base) writer.writerow((path, hash_value, size)) # add the RECORD file itself if record_path.startswith(base): record_path = os.path.relpath(record_path, base) writer.writerow((record_path, '', '')) return record_path def check_installed_files(self): """ Checks that the hashes and sizes of the files in ``RECORD`` are matched by the files themselves. Returns a (possibly empty) list of mismatches. Each entry in the mismatch list will be a tuple consisting of the path, 'exists', 'size' or 'hash' according to what didn't match (existence is checked first, then size, then hash), the expected value and the actual value. """ mismatches = [] base = os.path.dirname(self.path) record_path = self.get_distinfo_file('RECORD') for path, hash_value, size in self.list_installed_files(): if not os.path.isabs(path): path = os.path.join(base, path) if path == record_path: continue if not os.path.exists(path): mismatches.append((path, 'exists', True, False)) elif os.path.isfile(path): actual_size = str(os.path.getsize(path)) if size and actual_size != size: mismatches.append((path, 'size', size, actual_size)) elif hash_value: if '=' in hash_value: hasher = hash_value.split('=', 1)[0] else: hasher = None with open(path, 'rb') as f: actual_hash = self.get_hash(f.read(), hasher) if actual_hash != hash_value: mismatches.append((path, 'hash', hash_value, actual_hash)) return mismatches @cached_property def shared_locations(self): """ A dictionary of shared locations whose keys are in the set 'prefix', 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. The corresponding value is the absolute path of that category for this distribution, and takes into account any paths selected by the user at installation time (e.g. via command-line arguments). In the case of the 'namespace' key, this would be a list of absolute paths for the roots of namespace packages in this distribution. The first time this property is accessed, the relevant information is read from the SHARED file in the .dist-info directory. """ result = {} shared_path = os.path.join(self.path, 'SHARED') if os.path.isfile(shared_path): with codecs.open(shared_path, 'r', encoding='utf-8') as f: lines = f.read().splitlines() for line in lines: key, value = line.split('=', 1) if key == 'namespace': result.setdefault(key, []).append(value) else: result[key] = value return result def write_shared_locations(self, paths, dry_run=False): """ Write shared location information to the SHARED file in .dist-info. :param paths: A dictionary as described in the documentation for :meth:`shared_locations`. :param dry_run: If True, the action is logged but no file is actually written. :return: The path of the file written to. """ shared_path = os.path.join(self.path, 'SHARED') logger.info('creating %s', shared_path) if dry_run: return None lines = [] for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): path = paths[key] if os.path.isdir(paths[key]): lines.append('%s=%s' % (key, path)) for ns in paths.get('namespace', ()): lines.append('namespace=%s' % ns) with codecs.open(shared_path, 'w', encoding='utf-8') as f: f.write('\n'.join(lines)) return shared_path def get_distinfo_resource(self, path): if path not in DIST_FILES: raise DistlibException('invalid path for a dist-info file: ' '%r at %r' % (path, self.path)) finder = resources.finder_for_path(self.path) if finder is None: raise DistlibException('Unable to get a finder for %s' % self.path) return finder.find(path) def get_distinfo_file(self, path): """ Returns a path located under the ``.dist-info`` directory. Returns a string representing the path. :parameter path: a ``'/'``-separated path relative to the ``.dist-info`` directory or an absolute path; If *path* is an absolute path and doesn't start with the ``.dist-info`` directory path, a :class:`DistlibException` is raised :type path: str :rtype: str """ # Check if it is an absolute path # XXX use relpath, add tests if path.find(os.sep) >= 0: # it's an absolute path? distinfo_dirname, path = path.split(os.sep)[-2:] if distinfo_dirname != self.path.split(os.sep)[-1]: raise DistlibException( 'dist-info file %r does not belong to the %r %s ' 'distribution' % (path, self.name, self.version)) # The file must be relative if path not in DIST_FILES: raise DistlibException('invalid path for a dist-info file: ' '%r at %r' % (path, self.path)) return os.path.join(self.path, path) def list_distinfo_files(self): """ Iterates over the ``RECORD`` entries and returns paths for each line if the path is pointing to a file located in the ``.dist-info`` directory or one of its subdirectories. :returns: iterator of paths """ base = os.path.dirname(self.path) for path, checksum, size in self._get_records(): # XXX add separator or use real relpath algo if not os.path.isabs(path): path = os.path.join(base, path) if path.startswith(self.path): yield path def __eq__(self, other): return (isinstance(other, InstalledDistribution) and self.path == other.path) # See http://docs.python.org/reference/datamodel#object.__hash__ __hash__ = object.__hash__ class EggInfoDistribution(BaseInstalledDistribution): """Created with the *path* of the ``.egg-info`` directory or file provided to the constructor. It reads the metadata contained in the file itself, or if the given path happens to be a directory, the metadata is read from the file ``PKG-INFO`` under that directory.""" requested = True # as we have no way of knowing, assume it was shared_locations = {} def __init__(self, path, env=None): def set_name_and_version(s, n, v): s.name = n s.key = n.lower() # for case-insensitive comparisons s.version = v self.path = path self.dist_path = env if env and env._cache_enabled and path in env._cache_egg.path: metadata = env._cache_egg.path[path].metadata set_name_and_version(self, metadata.name, metadata.version) else: metadata = self._get_metadata(path) # Need to be set before caching set_name_and_version(self, metadata.name, metadata.version) if env and env._cache_enabled: env._cache_egg.add(self) super(EggInfoDistribution, self).__init__(metadata, path, env) def _get_metadata(self, path): requires = None def parse_requires_data(data): """Create a list of dependencies from a requires.txt file. *data*: the contents of a setuptools-produced requires.txt file. """ reqs = [] lines = data.splitlines() for line in lines: line = line.strip() if line.startswith('['): logger.warning('Unexpected line: quitting requirement scan: %r', line) break r = parse_requirement(line) if not r: logger.warning('Not recognised as a requirement: %r', line) continue if r.extras: logger.warning('extra requirements in requires.txt are ' 'not supported') if not r.constraints: reqs.append(r.name) else: cons = ', '.join('%s%s' % c for c in r.constraints) reqs.append('%s (%s)' % (r.name, cons)) return reqs def parse_requires_path(req_path): """Create a list of dependencies from a requires.txt file. *req_path*: the path to a setuptools-produced requires.txt file. """ reqs = [] try: with codecs.open(req_path, 'r', 'utf-8') as fp: reqs = parse_requires_data(fp.read()) except IOError: pass return reqs if path.endswith('.egg'): if os.path.isdir(path): meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO') metadata = Metadata(path=meta_path, scheme='legacy') req_path = os.path.join(path, 'EGG-INFO', 'requires.txt') requires = parse_requires_path(req_path) else: # FIXME handle the case where zipfile is not available zipf = zipimport.zipimporter(path) fileobj = StringIO( zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) metadata = Metadata(fileobj=fileobj, scheme='legacy') try: data = zipf.get_data('EGG-INFO/requires.txt') requires = parse_requires_data(data.decode('utf-8')) except IOError: requires = None elif path.endswith('.egg-info'): if os.path.isdir(path): req_path = os.path.join(path, 'requires.txt') requires = parse_requires_path(req_path) path = os.path.join(path, 'PKG-INFO') metadata = Metadata(path=path, scheme='legacy') else: raise DistlibException('path must end with .egg-info or .egg, ' 'got %r' % path) if requires: metadata.add_requirements(requires) return metadata def __repr__(self): return '<EggInfoDistribution %r %s at %r>' % ( self.name, self.version, self.path) def __str__(self): return "%s %s" % (self.name, self.version) def check_installed_files(self): """ Checks that the hashes and sizes of the files in ``RECORD`` are matched by the files themselves. Returns a (possibly empty) list of mismatches. Each entry in the mismatch list will be a tuple consisting of the path, 'exists', 'size' or 'hash' according to what didn't match (existence is checked first, then size, then hash), the expected value and the actual value. """ mismatches = [] record_path = os.path.join(self.path, 'installed-files.txt') if os.path.exists(record_path): for path, _, _ in self.list_installed_files(): if path == record_path: continue if not os.path.exists(path): mismatches.append((path, 'exists', True, False)) return mismatches def list_installed_files(self): """ Iterates over the ``installed-files.txt`` entries and returns a tuple ``(path, hash, size)`` for each line. :returns: a list of (path, hash, size) """ def _md5(path): f = open(path, 'rb') try: content = f.read() finally: f.close() return hashlib.md5(content).hexdigest() def _size(path): return os.stat(path).st_size record_path = os.path.join(self.path, 'installed-files.txt') result = [] if os.path.exists(record_path): with codecs.open(record_path, 'r', encoding='utf-8') as f: for line in f: line = line.strip() p = os.path.normpath(os.path.join(self.path, line)) # "./" is present as a marker between installed files # and installation metadata files if not os.path.exists(p): logger.warning('Non-existent file: %s', p) if p.endswith(('.pyc', '.pyo')): continue #otherwise fall through and fail if not os.path.isdir(p): result.append((p, _md5(p), _size(p))) result.append((record_path, None, None)) return result def list_distinfo_files(self, absolute=False): """ Iterates over the ``installed-files.txt`` entries and returns paths for each line if the path is pointing to a file located in the ``.egg-info`` directory or one of its subdirectories. :parameter absolute: If *absolute* is ``True``, each returned path is transformed into a local absolute path. Otherwise the raw value from ``installed-files.txt`` is returned. :type absolute: boolean :returns: iterator of paths """ record_path = os.path.join(self.path, 'installed-files.txt') skip = True with codecs.open(record_path, 'r', encoding='utf-8') as f: for line in f: line = line.strip() if line == './': skip = False continue if not skip: p = os.path.normpath(os.path.join(self.path, line)) if p.startswith(self.path): if absolute: yield p else: yield line def __eq__(self, other): return (isinstance(other, EggInfoDistribution) and self.path == other.path) # See http://docs.python.org/reference/datamodel#object.__hash__ __hash__ = object.__hash__ new_dist_class = InstalledDistribution old_dist_class = EggInfoDistribution class DependencyGraph(object): """ Represents a dependency graph between distributions. The dependency relationships are stored in an ``adjacency_list`` that maps distributions to a list of ``(other, label)`` tuples where ``other`` is a distribution and the edge is labeled with ``label`` (i.e. the version specifier, if such was provided). Also, for more efficient traversal, for every distribution ``x``, a list of predecessors is kept in ``reverse_list[x]``. An edge from distribution ``a`` to distribution ``b`` means that ``a`` depends on ``b``. If any missing dependencies are found, they are stored in ``missing``, which is a dictionary that maps distributions to a list of requirements that were not provided by any other distributions. """ def __init__(self): self.adjacency_list = {} self.reverse_list = {} self.missing = {} def add_distribution(self, distribution): """Add the *distribution* to the graph. :type distribution: :class:`distutils2.database.InstalledDistribution` or :class:`distutils2.database.EggInfoDistribution` """ self.adjacency_list[distribution] = [] self.reverse_list[distribution] = [] #self.missing[distribution] = [] def add_edge(self, x, y, label=None): """Add an edge from distribution *x* to distribution *y* with the given *label*. :type x: :class:`distutils2.database.InstalledDistribution` or :class:`distutils2.database.EggInfoDistribution` :type y: :class:`distutils2.database.InstalledDistribution` or :class:`distutils2.database.EggInfoDistribution` :type label: ``str`` or ``None`` """ self.adjacency_list[x].append((y, label)) # multiple edges are allowed, so be careful if x not in self.reverse_list[y]: self.reverse_list[y].append(x) def add_missing(self, distribution, requirement): """ Add a missing *requirement* for the given *distribution*. :type distribution: :class:`distutils2.database.InstalledDistribution` or :class:`distutils2.database.EggInfoDistribution` :type requirement: ``str`` """ logger.debug('%s missing %r', distribution, requirement) self.missing.setdefault(distribution, []).append(requirement) def _repr_dist(self, dist): return '%s %s' % (dist.name, dist.version) def repr_node(self, dist, level=1): """Prints only a subgraph""" output = [self._repr_dist(dist)] for other, label in self.adjacency_list[dist]: dist = self._repr_dist(other) if label is not None: dist = '%s [%s]' % (dist, label) output.append(' ' * level + str(dist)) suboutput = self.repr_node(other, level + 1) subs = suboutput.split('\n') output.extend(subs[1:]) return '\n'.join(output) def to_dot(self, f, skip_disconnected=True): """Writes a DOT output for the graph to the provided file *f*. If *skip_disconnected* is set to ``True``, then all distributions that are not dependent on any other distribution are skipped. :type f: has to support ``file``-like operations :type skip_disconnected: ``bool`` """ disconnected = [] f.write("digraph dependencies {\n") for dist, adjs in self.adjacency_list.items(): if len(adjs) == 0 and not skip_disconnected: disconnected.append(dist) for other, label in adjs: if not label is None: f.write('"%s" -> "%s" [label="%s"]\n' % (dist.name, other.name, label)) else: f.write('"%s" -> "%s"\n' % (dist.name, other.name)) if not skip_disconnected and len(disconnected) > 0: f.write('subgraph disconnected {\n') f.write('label = "Disconnected"\n') f.write('bgcolor = red\n') for dist in disconnected: f.write('"%s"' % dist.name) f.write('\n') f.write('}\n') f.write('}\n') def topological_sort(self): """ Perform a topological sort of the graph. :return: A tuple, the first element of which is a topologically sorted list of distributions, and the second element of which is a list of distributions that cannot be sorted because they have circular dependencies and so form a cycle. """ result = [] # Make a shallow copy of the adjacency list alist = {} for k, v in self.adjacency_list.items(): alist[k] = v[:] while True: # See what we can remove in this run to_remove = [] for k, v in list(alist.items())[:]: if not v: to_remove.append(k) del alist[k] if not to_remove: # What's left in alist (if anything) is a cycle. break # Remove from the adjacency list of others for k, v in alist.items(): alist[k] = [(d, r) for d, r in v if d not in to_remove] logger.debug('Moving to result: %s', ['%s (%s)' % (d.name, d.version) for d in to_remove]) result.extend(to_remove) return result, list(alist.keys()) def __repr__(self): """Representation of the graph""" output = [] for dist, adjs in self.adjacency_list.items(): output.append(self.repr_node(dist)) return '\n'.join(output) def make_graph(dists, scheme='default'): """Makes a dependency graph from the given distributions. :parameter dists: a list of distributions :type dists: list of :class:`distutils2.database.InstalledDistribution` and :class:`distutils2.database.EggInfoDistribution` instances :rtype: a :class:`DependencyGraph` instance """ scheme = get_scheme(scheme) graph = DependencyGraph() provided = {} # maps names to lists of (version, dist) tuples # first, build the graph and find out what's provided for dist in dists: graph.add_distribution(dist) for p in dist.provides: name, version = parse_name_and_version(p) logger.debug('Add to provided: %s, %s, %s', name, version, dist) provided.setdefault(name, []).append((version, dist)) # now make the edges for dist in dists: requires = (dist.run_requires | dist.meta_requires | dist.build_requires | dist.dev_requires) for req in requires: try: matcher = scheme.matcher(req) except UnsupportedVersionError: # XXX compat-mode if cannot read the version logger.warning('could not read version %r - using name only', req) name = req.split()[0] matcher = scheme.matcher(name) name = matcher.key # case-insensitive matched = False if name in provided: for version, provider in provided[name]: try: match = matcher.match(version) except UnsupportedVersionError: match = False if match: graph.add_edge(dist, provider, req) matched = True break if not matched: graph.add_missing(dist, req) return graph def get_dependent_dists(dists, dist): """Recursively generate a list of distributions from *dists* that are dependent on *dist*. :param dists: a list of distributions :param dist: a distribution, member of *dists* for which we are interested """ if dist not in dists: raise DistlibException('given distribution %r is not a member ' 'of the list' % dist.name) graph = make_graph(dists) dep = [dist] # dependent distributions todo = graph.reverse_list[dist] # list of nodes we should inspect while todo: d = todo.pop() dep.append(d) for succ in graph.reverse_list[d]: if succ not in dep: todo.append(succ) dep.pop(0) # remove dist from dep, was there to prevent infinite loops return dep def get_required_dists(dists, dist): """Recursively generate a list of distributions from *dists* that are required by *dist*. :param dists: a list of distributions :param dist: a distribution, member of *dists* for which we are interested """ if dist not in dists: raise DistlibException('given distribution %r is not a member ' 'of the list' % dist.name) graph = make_graph(dists) req = [] # required distributions todo = graph.adjacency_list[dist] # list of nodes we should inspect while todo: d = todo.pop()[0] req.append(d) for pred in graph.adjacency_list[d]: if pred not in req: todo.append(pred) return req def make_dist(name, version, **kwargs): """ A convenience method for making a dist given just a name and version. """ summary = kwargs.pop('summary', 'Placeholder for summary') md = Metadata(**kwargs) md.name = name md.version = version md.summary = summary or 'Plaeholder for summary' return Distribution(md)
Changaco/oh-mainline
refs/heads/master
vendor/packages/twisted/twisted/mail/test/test_smtp.py
17
# Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Test cases for twisted.mail.smtp module. """ from zope.interface import implements from twisted.python.util import LineLog from twisted.trial import unittest, util from twisted.protocols import basic, loopback from twisted.mail import smtp from twisted.internet import defer, protocol, reactor, interfaces from twisted.internet import address, error, task from twisted.test.proto_helpers import StringTransport from twisted import cred import twisted.cred.error import twisted.cred.portal import twisted.cred.checkers import twisted.cred.credentials from twisted.cred.portal import IRealm, Portal from twisted.cred.checkers import ICredentialsChecker, AllowAnonymousAccess from twisted.cred.credentials import IAnonymous from twisted.cred.error import UnauthorizedLogin from twisted.mail import imap4 try: from twisted.test.ssl_helpers import ClientTLSContext, ServerTLSContext except ImportError: ClientTLSContext = ServerTLSContext = None import re try: from cStringIO import StringIO except ImportError: from StringIO import StringIO def spameater(*spam, **eggs): return None class BrokenMessage(object): """ L{BrokenMessage} is an L{IMessage} which raises an unexpected exception from its C{eomReceived} method. This is useful for creating a server which can be used to test client retry behavior. """ implements(smtp.IMessage) def __init__(self, user): pass def lineReceived(self, line): pass def eomReceived(self): raise RuntimeError("Some problem, delivery is failing.") def connectionLost(self): pass class DummyMessage(object): """ L{BrokenMessage} is an L{IMessage} which saves the message delivered to it to its domain object. @ivar domain: A L{DummyDomain} which will be used to store the message once it is received. """ def __init__(self, domain, user): self.domain = domain self.user = user self.buffer = [] def lineReceived(self, line): # Throw away the generated Received: header if not re.match('Received: From yyy.com \(\[.*\]\) by localhost;', line): self.buffer.append(line) def eomReceived(self): message = '\n'.join(self.buffer) + '\n' self.domain.messages[self.user.dest.local].append(message) deferred = defer.Deferred() deferred.callback("saved") return deferred class DummyDomain(object): """ L{DummyDomain} is an L{IDomain} which keeps track of messages delivered to it in memory. """ def __init__(self, names): self.messages = {} for name in names: self.messages[name] = [] def exists(self, user): if user.dest.local in self.messages: return defer.succeed(lambda: self.startMessage(user)) return defer.fail(smtp.SMTPBadRcpt(user)) def startMessage(self, user): return DummyMessage(self, user) class SMTPTestCase(unittest.TestCase): messages = [('[email protected]', ['[email protected]', '[email protected]'], '''\ Subject: urgent\015 \015 Someone set up us the bomb!\015 ''')] mbox = {'foo': ['Subject: urgent\n\nSomeone set up us the bomb!\n']} def setUp(self): """ Create an in-memory mail domain to which messages may be delivered by tests and create a factory and transport to do the delivering. """ self.factory = smtp.SMTPFactory() self.factory.domains = {} self.factory.domains['baz.com'] = DummyDomain(['foo']) self.transport = StringTransport() def testMessages(self): from twisted.mail import protocols protocol = protocols.DomainSMTP() protocol.service = self.factory protocol.factory = self.factory protocol.receivedHeader = spameater protocol.makeConnection(self.transport) protocol.lineReceived('HELO yyy.com') for message in self.messages: protocol.lineReceived('MAIL FROM:<%s>' % message[0]) for target in message[1]: protocol.lineReceived('RCPT TO:<%s>' % target) protocol.lineReceived('DATA') protocol.dataReceived(message[2]) protocol.lineReceived('.') protocol.lineReceived('QUIT') if self.mbox != self.factory.domains['baz.com'].messages: raise AssertionError(self.factory.domains['baz.com'].messages) protocol.setTimeout(None) testMessages.suppress = [util.suppress(message='DomainSMTP', category=DeprecationWarning)] mail = '''\ Subject: hello Goodbye ''' class MyClient: def __init__(self, messageInfo=None): if messageInfo is None: messageInfo = ( '[email protected]', ['[email protected]'], StringIO(mail)) self._sender = messageInfo[0] self._recipient = messageInfo[1] self._data = messageInfo[2] def getMailFrom(self): return self._sender def getMailTo(self): return self._recipient def getMailData(self): return self._data def sendError(self, exc): self._error = exc def sentMail(self, code, resp, numOk, addresses, log): # Prevent another mail from being sent. self._sender = None self._recipient = None self._data = None class MySMTPClient(MyClient, smtp.SMTPClient): def __init__(self, messageInfo=None): smtp.SMTPClient.__init__(self, 'foo.baz') MyClient.__init__(self, messageInfo) class MyESMTPClient(MyClient, smtp.ESMTPClient): def __init__(self, secret = '', contextFactory = None): smtp.ESMTPClient.__init__(self, secret, contextFactory, 'foo.baz') MyClient.__init__(self) class LoopbackMixin: def loopback(self, server, client): return loopback.loopbackTCP(server, client) class LoopbackTestCase(LoopbackMixin): def testMessages(self): factory = smtp.SMTPFactory() factory.domains = {} factory.domains['foo.bar'] = DummyDomain(['moshez']) from twisted.mail.protocols import DomainSMTP protocol = DomainSMTP() protocol.service = factory protocol.factory = factory clientProtocol = self.clientClass() return self.loopback(protocol, clientProtocol) testMessages.suppress = [util.suppress(message='DomainSMTP', category=DeprecationWarning)] class LoopbackSMTPTestCase(LoopbackTestCase, unittest.TestCase): clientClass = MySMTPClient class LoopbackESMTPTestCase(LoopbackTestCase, unittest.TestCase): clientClass = MyESMTPClient class FakeSMTPServer(basic.LineReceiver): clientData = [ '220 hello', '250 nice to meet you', '250 great', '250 great', '354 go on, lad' ] def connectionMade(self): self.buffer = [] self.clientData = self.clientData[:] self.clientData.reverse() self.sendLine(self.clientData.pop()) def lineReceived(self, line): self.buffer.append(line) if line == "QUIT": self.transport.write("221 see ya around\r\n") self.transport.loseConnection() elif line == ".": self.transport.write("250 gotcha\r\n") elif line == "RSET": self.transport.loseConnection() if self.clientData: self.sendLine(self.clientData.pop()) class SMTPClientTestCase(unittest.TestCase, LoopbackMixin): """ Tests for L{smtp.SMTPClient}. """ def test_timeoutConnection(self): """ L{smtp.SMTPClient.timeoutConnection} calls the C{sendError} hook with a fatal L{SMTPTimeoutError} with the current line log. """ error = [] client = MySMTPClient() client.sendError = error.append client.makeConnection(StringTransport()) client.lineReceived("220 hello") client.timeoutConnection() self.assertIsInstance(error[0], smtp.SMTPTimeoutError) self.assertTrue(error[0].isFatal) self.assertEqual( str(error[0]), "Timeout waiting for SMTP server response\n" "<<< 220 hello\n" ">>> HELO foo.baz\n") expected_output = [ 'HELO foo.baz', 'MAIL FROM:<[email protected]>', 'RCPT TO:<[email protected]>', 'DATA', 'Subject: hello', '', 'Goodbye', '.', 'RSET' ] def test_messages(self): """ L{smtp.SMTPClient} sends I{HELO}, I{MAIL FROM}, I{RCPT TO}, and I{DATA} commands based on the return values of its C{getMailFrom}, C{getMailTo}, and C{getMailData} methods. """ client = MySMTPClient() server = FakeSMTPServer() d = self.loopback(server, client) d.addCallback(lambda x : self.assertEquals(server.buffer, self.expected_output)) return d def test_transferError(self): """ If there is an error while producing the message body to the connection, the C{sendError} callback is invoked. """ client = MySMTPClient( ('[email protected]', ['[email protected]'], StringIO("foo"))) transport = StringTransport() client.makeConnection(transport) client.dataReceived( '220 Ok\r\n' # Greeting '250 Ok\r\n' # EHLO response '250 Ok\r\n' # MAIL FROM response '250 Ok\r\n' # RCPT TO response '354 Ok\r\n' # DATA response ) # Sanity check - a pull producer should be registered now. self.assertNotIdentical(transport.producer, None) self.assertFalse(transport.streaming) # Now stop the producer prematurely, meaning the message was not sent. transport.producer.stopProducing() # The sendError hook should have been invoked as a result. self.assertIsInstance(client._error, Exception) def test_sendFatalError(self): """ If L{smtp.SMTPClient.sendError} is called with an L{SMTPClientError} which is fatal, it disconnects its transport without writing anything more to it. """ client = smtp.SMTPClient(None) transport = StringTransport() client.makeConnection(transport) client.sendError(smtp.SMTPClientError(123, "foo", isFatal=True)) self.assertEqual(transport.value(), "") self.assertTrue(transport.disconnecting) def test_sendNonFatalError(self): """ If L{smtp.SMTPClient.sendError} is called with an L{SMTPClientError} which is not fatal, it sends C{"QUIT"} and waits for the server to close the connection. """ client = smtp.SMTPClient(None) transport = StringTransport() client.makeConnection(transport) client.sendError(smtp.SMTPClientError(123, "foo", isFatal=False)) self.assertEqual(transport.value(), "QUIT\r\n") self.assertFalse(transport.disconnecting) def test_sendOtherError(self): """ If L{smtp.SMTPClient.sendError} is called with an exception which is not an L{SMTPClientError}, it disconnects its transport without writing anything more to it. """ client = smtp.SMTPClient(None) transport = StringTransport() client.makeConnection(transport) client.sendError(Exception("foo")) self.assertEqual(transport.value(), "") self.assertTrue(transport.disconnecting) class DummySMTPMessage: def __init__(self, protocol, users): self.protocol = protocol self.users = users self.buffer = [] def lineReceived(self, line): self.buffer.append(line) def eomReceived(self): message = '\n'.join(self.buffer) + '\n' helo, origin = self.users[0].helo[0], str(self.users[0].orig) recipients = [] for user in self.users: recipients.append(str(user)) self.protocol.message[tuple(recipients)] = (helo, origin, recipients, message) return defer.succeed("saved") class DummyProto: def connectionMade(self): self.dummyMixinBase.connectionMade(self) self.message = {} def startMessage(self, users): return DummySMTPMessage(self, users) def receivedHeader(*spam): return None def validateTo(self, user): self.delivery = SimpleDelivery(None) return lambda: self.startMessage([user]) def validateFrom(self, helo, origin): return origin class DummySMTP(DummyProto, smtp.SMTP): dummyMixinBase = smtp.SMTP class DummyESMTP(DummyProto, smtp.ESMTP): dummyMixinBase = smtp.ESMTP class AnotherTestCase: serverClass = None clientClass = None messages = [ ('foo.com', '[email protected]', ['[email protected]'], '[email protected]', ['[email protected]'], '''\ From: Moshe To: Moshe Hi, how are you? '''), ('foo.com', '[email protected]', ['uuu@ooo', 'yyy@eee'], '[email protected]', ['uuu@ooo', 'yyy@eee'], '''\ Subject: pass ..rrrr.. '''), ('foo.com', '@this,@is,@ignored:[email protected]', ['@ignore,@this,@too:[email protected]'], '[email protected]', ['[email protected]'], '''\ Subject: apa To: foo 123 . 456 '''), ] data = [ ('', '220.*\r\n$', None, None), ('HELO foo.com\r\n', '250.*\r\n$', None, None), ('RSET\r\n', '250.*\r\n$', None, None), ] for helo_, from_, to_, realfrom, realto, msg in messages: data.append(('MAIL FROM:<%s>\r\n' % from_, '250.*\r\n', None, None)) for rcpt in to_: data.append(('RCPT TO:<%s>\r\n' % rcpt, '250.*\r\n', None, None)) data.append(('DATA\r\n','354.*\r\n', msg, ('250.*\r\n', (helo_, realfrom, realto, msg)))) def test_buffer(self): """ Exercise a lot of the SMTP client code. This is a "shotgun" style unit test. It does a lot of things and hopes that something will go really wrong if it is going to go wrong. This test should be replaced with a suite of nicer tests. """ transport = StringTransport() a = self.serverClass() class fooFactory: domain = 'foo.com' a.factory = fooFactory() a.makeConnection(transport) for (send, expect, msg, msgexpect) in self.data: if send: a.dataReceived(send) data = transport.value() transport.clear() if not re.match(expect, data): raise AssertionError, (send, expect, data) if data[:3] == '354': for line in msg.splitlines(): if line and line[0] == '.': line = '.' + line a.dataReceived(line + '\r\n') a.dataReceived('.\r\n') # Special case for DATA. Now we want a 250, and then # we compare the messages data = transport.value() transport.clear() resp, msgdata = msgexpect if not re.match(resp, data): raise AssertionError, (resp, data) for recip in msgdata[2]: expected = list(msgdata[:]) expected[2] = [recip] self.assertEquals( a.message[(recip,)], tuple(expected) ) a.setTimeout(None) class AnotherESMTPTestCase(AnotherTestCase, unittest.TestCase): serverClass = DummyESMTP clientClass = MyESMTPClient class AnotherSMTPTestCase(AnotherTestCase, unittest.TestCase): serverClass = DummySMTP clientClass = MySMTPClient class DummyChecker: implements(cred.checkers.ICredentialsChecker) users = { 'testuser': 'testpassword' } credentialInterfaces = (cred.credentials.IUsernamePassword, cred.credentials.IUsernameHashedPassword) def requestAvatarId(self, credentials): return defer.maybeDeferred( credentials.checkPassword, self.users[credentials.username] ).addCallback(self._cbCheck, credentials.username) def _cbCheck(self, result, username): if result: return username raise cred.error.UnauthorizedLogin() class SimpleDelivery(object): """ L{SimpleDelivery} is a message delivery factory with no interesting behavior. """ implements(smtp.IMessageDelivery) def __init__(self, messageFactory): self._messageFactory = messageFactory def receivedHeader(self, helo, origin, recipients): return None def validateFrom(self, helo, origin): return origin def validateTo(self, user): return lambda: self._messageFactory(user) class DummyRealm: def requestAvatar(self, avatarId, mind, *interfaces): return smtp.IMessageDelivery, SimpleDelivery(None), lambda: None class AuthTestCase(unittest.TestCase, LoopbackMixin): def test_crammd5Auth(self): """ L{ESMTPClient} can authenticate using the I{CRAM-MD5} SASL mechanism. @see: U{http://tools.ietf.org/html/rfc2195} """ realm = DummyRealm() p = cred.portal.Portal(realm) p.registerChecker(DummyChecker()) server = DummyESMTP({'CRAM-MD5': cred.credentials.CramMD5Credentials}) server.portal = p client = MyESMTPClient('testpassword') cAuth = smtp.CramMD5ClientAuthenticator('testuser') client.registerAuthenticator(cAuth) d = self.loopback(server, client) d.addCallback(lambda x : self.assertEquals(server.authenticated, 1)) return d def test_loginAuth(self): """ L{ESMTPClient} can authenticate using the I{LOGIN} SASL mechanism. @see: U{http://sepp.oetiker.ch/sasl-2.1.19-ds/draft-murchison-sasl-login-00.txt} """ realm = DummyRealm() p = cred.portal.Portal(realm) p.registerChecker(DummyChecker()) server = DummyESMTP({'LOGIN': imap4.LOGINCredentials}) server.portal = p client = MyESMTPClient('testpassword') cAuth = smtp.LOGINAuthenticator('testuser') client.registerAuthenticator(cAuth) d = self.loopback(server, client) d.addCallback(lambda x: self.assertTrue(server.authenticated)) return d def test_loginAgainstWeirdServer(self): """ When communicating with a server which implements the I{LOGIN} SASL mechanism using C{"Username:"} as the challenge (rather than C{"User Name\\0"}), L{ESMTPClient} can still authenticate successfully using the I{LOGIN} mechanism. """ realm = DummyRealm() p = cred.portal.Portal(realm) p.registerChecker(DummyChecker()) class WeirdLOGIN(imap4.LOGINCredentials): def __init__(self): imap4.LOGINCredentials.__init__(self) self.challenges[1] = 'Username:' server = DummyESMTP({'LOGIN': WeirdLOGIN}) server.portal = p client = MyESMTPClient('testpassword') cAuth = smtp.LOGINAuthenticator('testuser') client.registerAuthenticator(cAuth) d = self.loopback(server, client) d.addCallback(lambda x: self.assertTrue(server.authenticated)) return d class SMTPHelperTestCase(unittest.TestCase): def testMessageID(self): d = {} for i in range(1000): m = smtp.messageid('testcase') self.failIf(m in d) d[m] = None def testQuoteAddr(self): cases = [ ['[email protected]', '<[email protected]>'], ['"User Name" <[email protected]>', '<[email protected]>'], [smtp.Address('someguy@someplace'), '<someguy@someplace>'], ['', '<>'], [smtp.Address(''), '<>'], ] for (c, e) in cases: self.assertEquals(smtp.quoteaddr(c), e) def testUser(self): u = smtp.User('user@host', 'helo.host.name', None, None) self.assertEquals(str(u), 'user@host') def testXtextEncoding(self): cases = [ ('Hello world', 'Hello+20world'), ('Hello+world', 'Hello+2Bworld'), ('\0\1\2\3\4\5', '+00+01+02+03+04+05'), ('[email protected]', '[email protected]') ] for (case, expected) in cases: self.assertEqual(smtp.xtext_encode(case), (expected, len(case))) self.assertEquals(case.encode('xtext'), expected) self.assertEqual( smtp.xtext_decode(expected), (case, len(expected))) self.assertEquals(expected.decode('xtext'), case) def test_encodeWithErrors(self): """ Specifying an error policy to C{unicode.encode} with the I{xtext} codec should produce the same result as not specifying the error policy. """ text = u'Hello world' self.assertEqual( smtp.xtext_encode(text, 'strict'), (text.encode('xtext'), len(text))) self.assertEqual( text.encode('xtext', 'strict'), text.encode('xtext')) def test_decodeWithErrors(self): """ Similar to L{test_encodeWithErrors}, but for C{str.decode}. """ bytes = 'Hello world' self.assertEqual( smtp.xtext_decode(bytes, 'strict'), (bytes.decode('xtext'), len(bytes))) self.assertEqual( bytes.decode('xtext', 'strict'), bytes.decode('xtext')) class NoticeTLSClient(MyESMTPClient): tls = False def esmtpState_starttls(self, code, resp): MyESMTPClient.esmtpState_starttls(self, code, resp) self.tls = True class TLSTestCase(unittest.TestCase, LoopbackMixin): def testTLS(self): clientCTX = ClientTLSContext() serverCTX = ServerTLSContext() client = NoticeTLSClient(contextFactory=clientCTX) server = DummyESMTP(contextFactory=serverCTX) def check(ignored): self.assertEquals(client.tls, True) self.assertEquals(server.startedTLS, True) return self.loopback(server, client).addCallback(check) if ClientTLSContext is None: for case in (TLSTestCase,): case.skip = "OpenSSL not present" if not interfaces.IReactorSSL.providedBy(reactor): for case in (TLSTestCase,): case.skip = "Reactor doesn't support SSL" class EmptyLineTestCase(unittest.TestCase): def test_emptyLineSyntaxError(self): """ If L{smtp.SMTP} receives an empty line, it responds with a 500 error response code and a message about a syntax error. """ proto = smtp.SMTP() transport = StringTransport() proto.makeConnection(transport) proto.lineReceived('') proto.setTimeout(None) out = transport.value().splitlines() self.assertEquals(len(out), 2) self.failUnless(out[0].startswith('220')) self.assertEquals(out[1], "500 Error: bad syntax") class TimeoutTestCase(unittest.TestCase, LoopbackMixin): """ Check that SMTP client factories correctly use the timeout. """ def _timeoutTest(self, onDone, clientFactory): """ Connect the clientFactory, and check the timeout on the request. """ clock = task.Clock() client = clientFactory.buildProtocol( address.IPv4Address('TCP', 'example.net', 25)) client.callLater = clock.callLater t = StringTransport() client.makeConnection(t) t.protocol = client def check(ign): self.assertEquals(clock.seconds(), 0.5) d = self.assertFailure(onDone, smtp.SMTPTimeoutError ).addCallback(check) # The first call should not trigger the timeout clock.advance(0.1) # But this one should clock.advance(0.4) return d def test_SMTPClient(self): """ Test timeout for L{smtp.SMTPSenderFactory}: the response L{Deferred} should be errback with a L{smtp.SMTPTimeoutError}. """ onDone = defer.Deferred() clientFactory = smtp.SMTPSenderFactory( 'source@address', 'recipient@address', StringIO("Message body"), onDone, retries=0, timeout=0.5) return self._timeoutTest(onDone, clientFactory) def test_ESMTPClient(self): """ Test timeout for L{smtp.ESMTPSenderFactory}: the response L{Deferred} should be errback with a L{smtp.SMTPTimeoutError}. """ onDone = defer.Deferred() clientFactory = smtp.ESMTPSenderFactory( 'username', 'password', 'source@address', 'recipient@address', StringIO("Message body"), onDone, retries=0, timeout=0.5) return self._timeoutTest(onDone, clientFactory) def test_resetTimeoutWhileSending(self): """ The timeout is not allowed to expire after the server has accepted a DATA command and the client is actively sending data to it. """ class SlowFile: """ A file-like which returns one byte from each read call until the specified number of bytes have been returned. """ def __init__(self, size): self._size = size def read(self, max=None): if self._size: self._size -= 1 return 'x' return '' failed = [] onDone = defer.Deferred() onDone.addErrback(failed.append) clientFactory = smtp.SMTPSenderFactory( 'source@address', 'recipient@address', SlowFile(1), onDone, retries=0, timeout=3) clientFactory.domain = "example.org" clock = task.Clock() client = clientFactory.buildProtocol( address.IPv4Address('TCP', 'example.net', 25)) client.callLater = clock.callLater transport = StringTransport() client.makeConnection(transport) client.dataReceived( "220 Ok\r\n" # Greet the client "250 Ok\r\n" # Respond to HELO "250 Ok\r\n" # Respond to MAIL FROM "250 Ok\r\n" # Respond to RCPT TO "354 Ok\r\n" # Respond to DATA ) # Now the client is producing data to the server. Any time # resumeProducing is called on the producer, the timeout should be # extended. First, a sanity check. This test is only written to # handle pull producers. self.assertNotIdentical(transport.producer, None) self.assertFalse(transport.streaming) # Now, allow 2 seconds (1 less than the timeout of 3 seconds) to # elapse. clock.advance(2) # The timeout has not expired, so the failure should not have happened. self.assertEqual(failed, []) # Let some bytes be produced, extending the timeout. Then advance the # clock some more and verify that the timeout still hasn't happened. transport.producer.resumeProducing() clock.advance(2) self.assertEqual(failed, []) # The file has been completely produced - the next resume producing # finishes the upload, successfully. transport.producer.resumeProducing() client.dataReceived("250 Ok\r\n") self.assertEqual(failed, []) # Verify that the client actually did send the things expected. self.assertEqual( transport.value(), "HELO example.org\r\n" "MAIL FROM:<source@address>\r\n" "RCPT TO:<recipient@address>\r\n" "DATA\r\n" "x\r\n" ".\r\n" # This RSET is just an implementation detail. It's nice, but this # test doesn't really care about it. "RSET\r\n") class MultipleDeliveryFactorySMTPServerFactory(protocol.ServerFactory): """ L{MultipleDeliveryFactorySMTPServerFactory} creates SMTP server protocol instances with message delivery factory objects supplied to it. Each factory is used for one connection and then discarded. Factories are used in the order they are supplied. """ def __init__(self, messageFactories): self._messageFactories = messageFactories def buildProtocol(self, addr): p = protocol.ServerFactory.buildProtocol(self, addr) p.delivery = SimpleDelivery(self._messageFactories.pop(0)) return p class SMTPSenderFactoryRetryTestCase(unittest.TestCase): """ Tests for the retry behavior of L{smtp.SMTPSenderFactory}. """ def test_retryAfterDisconnect(self): """ If the protocol created by L{SMTPSenderFactory} loses its connection before receiving confirmation of message delivery, it reconnects and tries to deliver the message again. """ recipient = 'alice' message = "some message text" domain = DummyDomain([recipient]) class CleanSMTP(smtp.SMTP): """ An SMTP subclass which ensures that its transport will be disconnected before the test ends. """ def makeConnection(innerSelf, transport): self.addCleanup(transport.loseConnection) smtp.SMTP.makeConnection(innerSelf, transport) # Create a server which will fail the first message deliver attempt to # it with a 500 and a disconnect, but which will accept a message # delivered over the 2nd connection to it. serverFactory = MultipleDeliveryFactorySMTPServerFactory([ BrokenMessage, lambda user: DummyMessage(domain, user)]) serverFactory.protocol = CleanSMTP serverPort = reactor.listenTCP(0, serverFactory, interface='127.0.0.1') serverHost = serverPort.getHost() self.addCleanup(serverPort.stopListening) # Set up a client to try to deliver a message to the above created # server. sentDeferred = defer.Deferred() clientFactory = smtp.SMTPSenderFactory( "[email protected]", recipient + "@example.com", StringIO(message), sentDeferred) clientFactory.domain = "example.org" clientConnector = reactor.connectTCP( serverHost.host, serverHost.port, clientFactory) self.addCleanup(clientConnector.disconnect) def cbSent(ignored): """ Verify that the message was successfully delivered and flush the error which caused the first attempt to fail. """ self.assertEquals( domain.messages, {recipient: ["\n%s\n" % (message,)]}) # Flush the RuntimeError that BrokenMessage caused to be logged. self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1) sentDeferred.addCallback(cbSent) return sentDeferred class SingletonRealm(object): """ Trivial realm implementation which is constructed with an interface and an avatar and returns that avatar when asked for that interface. """ implements(IRealm) def __init__(self, interface, avatar): self.interface = interface self.avatar = avatar def requestAvatar(self, avatarId, mind, *interfaces): for iface in interfaces: if iface is self.interface: return iface, self.avatar, lambda: None class NotImplementedDelivery(object): """ Non-implementation of L{smtp.IMessageDelivery} which only has methods which raise L{NotImplementedError}. Subclassed by various tests to provide the particular behavior being tested. """ def validateFrom(self, helo, origin): raise NotImplementedError("This oughtn't be called in the course of this test.") def validateTo(self, user): raise NotImplementedError("This oughtn't be called in the course of this test.") def receivedHeader(self, helo, origin, recipients): raise NotImplementedError("This oughtn't be called in the course of this test.") class SMTPServerTestCase(unittest.TestCase): """ Test various behaviors of L{twisted.mail.smtp.SMTP} and L{twisted.mail.smtp.ESMTP}. """ def testSMTPGreetingHost(self, serverClass=smtp.SMTP): """ Test that the specified hostname shows up in the SMTP server's greeting. """ s = serverClass() s.host = "example.com" t = StringTransport() s.makeConnection(t) s.connectionLost(error.ConnectionDone()) self.assertIn("example.com", t.value()) def testSMTPGreetingNotExtended(self): """ Test that the string "ESMTP" does not appear in the SMTP server's greeting since that string strongly suggests the presence of support for various SMTP extensions which are not supported by L{smtp.SMTP}. """ s = smtp.SMTP() t = StringTransport() s.makeConnection(t) s.connectionLost(error.ConnectionDone()) self.assertNotIn("ESMTP", t.value()) def testESMTPGreetingHost(self): """ Similar to testSMTPGreetingHost, but for the L{smtp.ESMTP} class. """ self.testSMTPGreetingHost(smtp.ESMTP) def testESMTPGreetingExtended(self): """ Test that the string "ESMTP" does appear in the ESMTP server's greeting since L{smtp.ESMTP} does support the SMTP extensions which that advertises to the client. """ s = smtp.ESMTP() t = StringTransport() s.makeConnection(t) s.connectionLost(error.ConnectionDone()) self.assertIn("ESMTP", t.value()) def test_acceptSenderAddress(self): """ Test that a C{MAIL FROM} command with an acceptable address is responded to with the correct success code. """ class AcceptanceDelivery(NotImplementedDelivery): """ Delivery object which accepts all senders as valid. """ def validateFrom(self, helo, origin): return origin realm = SingletonRealm(smtp.IMessageDelivery, AcceptanceDelivery()) portal = Portal(realm, [AllowAnonymousAccess()]) proto = smtp.SMTP() proto.portal = portal trans = StringTransport() proto.makeConnection(trans) # Deal with the necessary preliminaries proto.dataReceived('HELO example.com\r\n') trans.clear() # Try to specify our sender address proto.dataReceived('MAIL FROM:<[email protected]>\r\n') # Clean up the protocol before doing anything that might raise an # exception. proto.connectionLost(error.ConnectionLost()) # Make sure that we received exactly the correct response self.assertEqual( trans.value(), '250 Sender address accepted\r\n') def test_deliveryRejectedSenderAddress(self): """ Test that a C{MAIL FROM} command with an address rejected by a L{smtp.IMessageDelivery} instance is responded to with the correct error code. """ class RejectionDelivery(NotImplementedDelivery): """ Delivery object which rejects all senders as invalid. """ def validateFrom(self, helo, origin): raise smtp.SMTPBadSender(origin) realm = SingletonRealm(smtp.IMessageDelivery, RejectionDelivery()) portal = Portal(realm, [AllowAnonymousAccess()]) proto = smtp.SMTP() proto.portal = portal trans = StringTransport() proto.makeConnection(trans) # Deal with the necessary preliminaries proto.dataReceived('HELO example.com\r\n') trans.clear() # Try to specify our sender address proto.dataReceived('MAIL FROM:<[email protected]>\r\n') # Clean up the protocol before doing anything that might raise an # exception. proto.connectionLost(error.ConnectionLost()) # Make sure that we received exactly the correct response self.assertEqual( trans.value(), '550 Cannot receive from specified address ' '<[email protected]>: Sender not acceptable\r\n') def test_portalRejectedSenderAddress(self): """ Test that a C{MAIL FROM} command with an address rejected by an L{smtp.SMTP} instance's portal is responded to with the correct error code. """ class DisallowAnonymousAccess(object): """ Checker for L{IAnonymous} which rejects authentication attempts. """ implements(ICredentialsChecker) credentialInterfaces = (IAnonymous,) def requestAvatarId(self, credentials): return defer.fail(UnauthorizedLogin()) realm = SingletonRealm(smtp.IMessageDelivery, NotImplementedDelivery()) portal = Portal(realm, [DisallowAnonymousAccess()]) proto = smtp.SMTP() proto.portal = portal trans = StringTransport() proto.makeConnection(trans) # Deal with the necessary preliminaries proto.dataReceived('HELO example.com\r\n') trans.clear() # Try to specify our sender address proto.dataReceived('MAIL FROM:<[email protected]>\r\n') # Clean up the protocol before doing anything that might raise an # exception. proto.connectionLost(error.ConnectionLost()) # Make sure that we received exactly the correct response self.assertEqual( trans.value(), '550 Cannot receive from specified address ' '<[email protected]>: Sender not acceptable\r\n') def test_portalRejectedAnonymousSender(self): """ Test that a C{MAIL FROM} command issued without first authenticating when a portal has been configured to disallow anonymous logins is responded to with the correct error code. """ realm = SingletonRealm(smtp.IMessageDelivery, NotImplementedDelivery()) portal = Portal(realm, []) proto = smtp.SMTP() proto.portal = portal trans = StringTransport() proto.makeConnection(trans) # Deal with the necessary preliminaries proto.dataReceived('HELO example.com\r\n') trans.clear() # Try to specify our sender address proto.dataReceived('MAIL FROM:<[email protected]>\r\n') # Clean up the protocol before doing anything that might raise an # exception. proto.connectionLost(error.ConnectionLost()) # Make sure that we received exactly the correct response self.assertEqual( trans.value(), '550 Cannot receive from specified address ' '<[email protected]>: Unauthenticated senders not allowed\r\n') class ESMTPAuthenticationTestCase(unittest.TestCase): def assertServerResponse(self, bytes, response): """ Assert that when the given bytes are delivered to the ESMTP server instance, it responds with the indicated lines. @type bytes: str @type response: list of str """ self.transport.clear() self.server.dataReceived(bytes) self.assertEqual( response, self.transport.value().splitlines()) def assertServerAuthenticated(self, loginArgs, username="username", password="password"): """ Assert that a login attempt has been made, that the credentials and interfaces passed to it are correct, and that when the login request is satisfied, a successful response is sent by the ESMTP server instance. @param loginArgs: A C{list} previously passed to L{portalFactory}. """ d, credentials, mind, interfaces = loginArgs.pop() self.assertEqual(loginArgs, []) self.failUnless(twisted.cred.credentials.IUsernamePassword.providedBy(credentials)) self.assertEqual(credentials.username, username) self.failUnless(credentials.checkPassword(password)) self.assertIn(smtp.IMessageDeliveryFactory, interfaces) self.assertIn(smtp.IMessageDelivery, interfaces) d.callback((smtp.IMessageDeliveryFactory, None, lambda: None)) self.assertEqual( ["235 Authentication successful."], self.transport.value().splitlines()) def setUp(self): """ Create an ESMTP instance attached to a StringTransport. """ self.server = smtp.ESMTP({ 'LOGIN': imap4.LOGINCredentials}) self.server.host = 'localhost' self.transport = StringTransport( peerAddress=address.IPv4Address('TCP', '127.0.0.1', 12345)) self.server.makeConnection(self.transport) def tearDown(self): """ Disconnect the ESMTP instance to clean up its timeout DelayedCall. """ self.server.connectionLost(error.ConnectionDone()) def portalFactory(self, loginList): class DummyPortal: def login(self, credentials, mind, *interfaces): d = defer.Deferred() loginList.append((d, credentials, mind, interfaces)) return d return DummyPortal() def test_authenticationCapabilityAdvertised(self): """ Test that AUTH is advertised to clients which issue an EHLO command. """ self.transport.clear() self.server.dataReceived('EHLO\r\n') responseLines = self.transport.value().splitlines() self.assertEqual( responseLines[0], "250-localhost Hello 127.0.0.1, nice to meet you") self.assertEqual( responseLines[1], "250 AUTH LOGIN") self.assertEqual(len(responseLines), 2) def test_plainAuthentication(self): """ Test that the LOGIN authentication mechanism can be used """ loginArgs = [] self.server.portal = self.portalFactory(loginArgs) self.server.dataReceived('EHLO\r\n') self.transport.clear() self.assertServerResponse( 'AUTH LOGIN\r\n', ["334 " + "User Name\0".encode('base64').strip()]) self.assertServerResponse( 'username'.encode('base64') + '\r\n', ["334 " + "Password\0".encode('base64').strip()]) self.assertServerResponse( 'password'.encode('base64').strip() + '\r\n', []) self.assertServerAuthenticated(loginArgs) def test_plainAuthenticationEmptyPassword(self): """ Test that giving an empty password for plain auth succeeds. """ loginArgs = [] self.server.portal = self.portalFactory(loginArgs) self.server.dataReceived('EHLO\r\n') self.transport.clear() self.assertServerResponse( 'AUTH LOGIN\r\n', ["334 " + "User Name\0".encode('base64').strip()]) self.assertServerResponse( 'username'.encode('base64') + '\r\n', ["334 " + "Password\0".encode('base64').strip()]) self.assertServerResponse('\r\n', []) self.assertServerAuthenticated(loginArgs, password='') def test_plainAuthenticationInitialResponse(self): """ The response to the first challenge may be included on the AUTH command line. Test that this is also supported. """ loginArgs = [] self.server.portal = self.portalFactory(loginArgs) self.server.dataReceived('EHLO\r\n') self.transport.clear() self.assertServerResponse( 'AUTH LOGIN ' + "username".encode('base64').strip() + '\r\n', ["334 " + "Password\0".encode('base64').strip()]) self.assertServerResponse( 'password'.encode('base64').strip() + '\r\n', []) self.assertServerAuthenticated(loginArgs) def test_abortAuthentication(self): """ Test that a challenge/response sequence can be aborted by the client. """ loginArgs = [] self.server.portal = self.portalFactory(loginArgs) self.server.dataReceived('EHLO\r\n') self.server.dataReceived('AUTH LOGIN\r\n') self.assertServerResponse( '*\r\n', ['501 Authentication aborted']) def test_invalidBase64EncodedResponse(self): """ Test that a response which is not properly Base64 encoded results in the appropriate error code. """ loginArgs = [] self.server.portal = self.portalFactory(loginArgs) self.server.dataReceived('EHLO\r\n') self.server.dataReceived('AUTH LOGIN\r\n') self.assertServerResponse( 'x\r\n', ['501 Syntax error in parameters or arguments']) self.assertEqual(loginArgs, []) def test_invalidBase64EncodedInitialResponse(self): """ Like L{test_invalidBase64EncodedResponse} but for the case of an initial response included with the C{AUTH} command. """ loginArgs = [] self.server.portal = self.portalFactory(loginArgs) self.server.dataReceived('EHLO\r\n') self.assertServerResponse( 'AUTH LOGIN x\r\n', ['501 Syntax error in parameters or arguments']) self.assertEqual(loginArgs, []) def test_unexpectedLoginFailure(self): """ If the L{Deferred} returned by L{Portal.login} fires with an exception of any type other than L{UnauthorizedLogin}, the exception is logged and the client is informed that the authentication attempt has failed. """ loginArgs = [] self.server.portal = self.portalFactory(loginArgs) self.server.dataReceived('EHLO\r\n') self.transport.clear() self.assertServerResponse( 'AUTH LOGIN ' + 'username'.encode('base64').strip() + '\r\n', ['334 ' + 'Password\0'.encode('base64').strip()]) self.assertServerResponse( 'password'.encode('base64').strip() + '\r\n', []) d, credentials, mind, interfaces = loginArgs.pop() d.errback(RuntimeError("Something wrong with the server")) self.assertEquals( '451 Requested action aborted: local error in processing\r\n', self.transport.value()) self.assertEquals(len(self.flushLoggedErrors(RuntimeError)), 1) class SMTPClientErrorTestCase(unittest.TestCase): """ Tests for L{smtp.SMTPClientError}. """ def test_str(self): """ The string representation of a L{SMTPClientError} instance includes the response code and response string. """ err = smtp.SMTPClientError(123, "some text") self.assertEquals(str(err), "123 some text") def test_strWithNegativeCode(self): """ If the response code supplied to L{SMTPClientError} is negative, it is excluded from the string representation. """ err = smtp.SMTPClientError(-1, "foo bar") self.assertEquals(str(err), "foo bar") def test_strWithLog(self): """ If a line log is supplied to L{SMTPClientError}, its contents are included in the string representation of the exception instance. """ log = LineLog(10) log.append("testlog") log.append("secondline") err = smtp.SMTPClientError(100, "test error", log=log.str()) self.assertEquals( str(err), "100 test error\n" "testlog\n" "secondline\n") class SenderMixinSentMailTests(unittest.TestCase): """ Tests for L{smtp.SenderMixin.sentMail}, used in particular by L{smtp.SMTPSenderFactory} and L{smtp.ESMTPSenderFactory}. """ def test_onlyLogFailedAddresses(self): """ L{smtp.SenderMixin.sentMail} adds only the addresses with failing SMTP response codes to the log passed to the factory's errback. """ onDone = self.assertFailure(defer.Deferred(), smtp.SMTPDeliveryError) onDone.addCallback(lambda e: self.assertEquals( e.log, "[email protected]: 199 Error in sending.\n")) clientFactory = smtp.SMTPSenderFactory( 'source@address', 'recipient@address', StringIO("Message body"), onDone, retries=0, timeout=0.5) client = clientFactory.buildProtocol( address.IPv4Address('TCP', 'example.net', 25)) addresses = [("[email protected]", 200, "No errors here!"), ("[email protected]", 199, "Error in sending.")] client.sentMail(199, "Test response", 1, addresses, client.log) return onDone