repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
nttks/jenkins-test | common/lib/xmodule/xmodule/tabs.py | 1 | 35344 | """
Implement CourseTab
"""
# pylint: disable=incomplete-protocol
# Note: pylint complains that we do not implement __delitem__ and __len__, although we implement __setitem__
# and __getitem__. However, the former two do not apply to the CourseTab class so we do not implement them.
# The reason we implement the latter two is to enable callers to continue to use the CourseTab object with
# dict-type accessors.
from abc import ABCMeta, abstractmethod
from xblock.fields import List
# We should only scrape strings for i18n in this file, since the target language is known only when
# they are rendered in the template. So ugettext gets called in the template.
_ = lambda text: text
class CourseTab(object): # pylint: disable=incomplete-protocol
"""
The Course Tab class is a data abstraction for all tabs (i.e., course navigation links) within a course.
It is an abstract class - to be inherited by various tab types.
Derived classes are expected to override methods as needed.
When a new tab class is created, it should define the type and add it in this class' factory method.
"""
__metaclass__ = ABCMeta
# Class property that specifies the type of the tab. It is generally a constant value for a
# subclass, shared by all instances of the subclass.
type = ''
icon = ''
# Class property that specifies whether the tab can be hidden for a particular course
is_hideable = False
# Class property that specifies whether the tab can be moved within a course's list of tabs
is_movable = True
# Class property that specifies whether the tab is a collection of other tabs
is_collection = False
def __init__(self, name, tab_id, link_func):
"""
Initializes class members with values passed in by subclasses.
Args:
name: The name of the tab
tab_id: Intended to be a unique id for this tab, although it is currently not enforced
within this module. It is used by the UI to determine which page is active.
link_func: A function that computes the link for the tab,
given the course and a reverse-url function as input parameters
"""
self.name = name
self.tab_id = tab_id
self.link_func = link_func
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled): # pylint: disable=unused-argument
"""
Determines whether the tab should be displayed in the UI for the given course and a particular user.
This method is to be overridden by subclasses when applicable. The base class implementation
always returns True.
Args:
course: An xModule CourseDescriptor
settings: The configuration settings, including values for:
WIKI_ENABLED
FEATURES['ENABLE_DISCUSSION_SERVICE']
FEATURES['ENABLE_EDXNOTES']
FEATURES['ENABLE_STUDENT_NOTES']
FEATURES['ENABLE_TEXTBOOK']
is_user_authenticated: Indicates whether the user is authenticated. If the tab is of
type AuthenticatedCourseTab and this value is False, then can_display will return False.
is_user_staff: Indicates whether the user has staff access to the course. If the tab is of
type StaffTab and this value is False, then can_display will return False.
is_user_enrolled: Indicates whether the user is enrolled in the course
Returns:
A boolean value to indicate whether this instance of the tab should be displayed to a
given user for the given course.
"""
return True
def get(self, key, default=None):
"""
Akin to the get method on Python dictionary objects, gracefully returns the value associated with the
given key, or the default if key does not exist.
"""
try:
return self[key]
except KeyError:
return default
def __getitem__(self, key):
"""
This method allows callers to access CourseTab members with the d[key] syntax as is done with
Python dictionary objects.
"""
if key == 'name':
return self.name
elif key == 'type':
return self.type
elif key == 'tab_id':
return self.tab_id
else:
raise KeyError('Key {0} not present in tab {1}'.format(key, self.to_json()))
def __setitem__(self, key, value):
"""
This method allows callers to change CourseTab members with the d[key]=value syntax as is done with
Python dictionary objects. For example: course_tab['name'] = new_name
Note: the 'type' member can be 'get', but not 'set'.
"""
if key == 'name':
self.name = value
elif key == 'tab_id':
self.tab_id = value
else:
raise KeyError('Key {0} cannot be set in tab {1}'.format(key, self.to_json()))
def __eq__(self, other):
"""
Overrides the equal operator to check equality of member variables rather than the object's address.
Also allows comparison with dict-type tabs (needed to support callers implemented before this class
was implemented).
"""
if type(other) is dict and not self.validate(other, raise_error=False):
# 'other' is a dict-type tab and did not validate
return False
# allow tabs without names; if a name is required, its presence was checked in the validator.
name_is_eq = (other.get('name') is None or self.name == other['name'])
# only compare the persisted/serialized members: 'type' and 'name'
return self.type == other.get('type') and name_is_eq
def __ne__(self, other):
"""
Overrides the not equal operator as a partner to the equal operator.
"""
return not (self == other)
@classmethod
def validate(cls, tab_dict, raise_error=True):
"""
Validates the given dict-type tab object to ensure it contains the expected keys.
This method should be overridden by subclasses that require certain keys to be persisted in the tab.
"""
return key_checker(['type'])(tab_dict, raise_error)
def to_json(self):
"""
Serializes the necessary members of the CourseTab object to a json-serializable representation.
This method is overridden by subclasses that have more members to serialize.
Returns:
a dictionary with keys for the properties of the CourseTab object.
"""
return {'type': self.type, 'name': self.name}
@staticmethod
def from_json(tab_dict):
"""
Deserializes a CourseTab from a json-like representation.
The subclass that is instantiated is determined by the value of the 'type' key in the
given dict-type tab. The given dict-type tab is validated before instantiating the CourseTab object.
Args:
tab: a dictionary with keys for the properties of the tab.
Raises:
InvalidTabsException if the given tab doesn't have the right keys.
"""
sub_class_types = {
'courseware': CoursewareTab,
'course_info': CourseInfoTab,
'wiki': WikiTab,
'discussion': DiscussionTab,
'external_discussion': ExternalDiscussionTab,
'external_link': ExternalLinkTab,
'textbooks': TextbookTabs,
'pdf_textbooks': PDFTextbookTabs,
'html_textbooks': HtmlTextbookTabs,
'progress': ProgressTab,
'static_tab': StaticTab,
'peer_grading': PeerGradingTab,
'staff_grading': StaffGradingTab,
'open_ended': OpenEndedGradingTab,
'notes': NotesTab,
'edxnotes': EdxNotesTab,
'syllabus': SyllabusTab,
'instructor': InstructorTab, # not persisted
}
tab_type = tab_dict.get('type')
if tab_type not in sub_class_types:
raise InvalidTabsException(
'Unknown tab type {0}. Known types: {1}'.format(tab_type, sub_class_types)
)
tab_class = sub_class_types[tab_dict['type']]
tab_class.validate(tab_dict)
return tab_class(tab_dict=tab_dict)
class AuthenticatedCourseTab(CourseTab):
"""
Abstract class for tabs that can be accessed by only authenticated users.
"""
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return is_user_authenticated
class StaffTab(AuthenticatedCourseTab):
"""
Abstract class for tabs that can be accessed by only users with staff access.
"""
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled): # pylint: disable=unused-argument
return is_user_staff
class EnrolledOrStaffTab(CourseTab):
"""
Abstract class for tabs that can be accessed by only users with staff access
or users enrolled in the course.
"""
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled): # pylint: disable=unused-argument
return is_user_authenticated and (is_user_staff or is_user_enrolled)
class HideableTab(CourseTab):
"""
Abstract class for tabs that are hideable
"""
is_hideable = True
def __init__(self, name, tab_id, link_func, tab_dict):
super(HideableTab, self).__init__(
name=name,
tab_id=tab_id,
link_func=link_func,
)
self.is_hidden = tab_dict.get('is_hidden', False) if tab_dict else False
def __getitem__(self, key):
if key == 'is_hidden':
return self.is_hidden
else:
return super(HideableTab, self).__getitem__(key)
def __setitem__(self, key, value):
if key == 'is_hidden':
self.is_hidden = value
else:
super(HideableTab, self).__setitem__(key, value)
def to_json(self):
to_json_val = super(HideableTab, self).to_json()
if self.is_hidden:
to_json_val.update({'is_hidden': True})
return to_json_val
def __eq__(self, other):
if not super(HideableTab, self).__eq__(other):
return False
return self.is_hidden == other.get('is_hidden', False)
class CoursewareTab(EnrolledOrStaffTab):
"""
A tab containing the course content.
"""
type = 'courseware'
icon = 'fa fa-edit'
is_movable = False
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(CoursewareTab, self).__init__(
# Translators: 'Courseware' refers to the tab in the courseware that leads to the content of a course
name=_('Courseware'), # support fixed name for the courseware tab
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
class CourseInfoTab(CourseTab):
"""
A tab containing information about the course.
"""
type = 'course_info'
icon = 'fa fa-info-circle'
is_movable = False
def __init__(self, tab_dict=None):
super(CourseInfoTab, self).__init__(
# Translators: "Course Info" is the name of the course's information and updates page
name=tab_dict['name'] if tab_dict else _('Course Info'),
tab_id='info',
link_func=link_reverse_func('info'),
)
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(CourseInfoTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class ProgressTab(EnrolledOrStaffTab):
"""
A tab containing information about the authenticated user's progress.
"""
type = 'progress'
icon = 'fa fa-bar-chart'
def __init__(self, tab_dict=None):
super(ProgressTab, self).__init__(
# Translators: "Progress" is the name of the student's course progress page
name=tab_dict['name'] if tab_dict else _('Progress'),
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
super_can_display = super(ProgressTab, self).can_display(
course, settings, is_user_authenticated, is_user_staff, is_user_enrolled
)
return super_can_display and not course.hide_progress_tab
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(ProgressTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class WikiTab(HideableTab):
"""
A tab_dict containing the course wiki.
"""
type = 'wiki'
icon = 'fa fa-comment'
def __init__(self, tab_dict=None):
super(WikiTab, self).__init__(
# Translators: "Wiki" is the name of the course's wiki page
name=tab_dict['name'] if tab_dict else _('Wiki'),
tab_id=self.type,
link_func=link_reverse_func('course_wiki'),
tab_dict=tab_dict,
)
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return settings.WIKI_ENABLED and (
course.allow_public_wiki_access or is_user_enrolled or is_user_staff
)
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(WikiTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class DiscussionTab(EnrolledOrStaffTab):
"""
A tab only for the new Berkeley discussion forums.
"""
type = 'discussion'
icon = 'fa fa-comments'
def __init__(self, tab_dict=None):
super(DiscussionTab, self).__init__(
# Translators: "Discussion" is the title of the course forum page
name=tab_dict['name'] if tab_dict else _('Discussion'),
tab_id=self.type,
link_func=link_reverse_func('django_comment_client.forum.views.forum_form_discussion'),
)
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
super_can_display = super(DiscussionTab, self).can_display(
course, settings, is_user_authenticated, is_user_staff, is_user_enrolled
)
return settings.FEATURES.get('ENABLE_DISCUSSION_SERVICE') and super_can_display
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(DiscussionTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class LinkTab(CourseTab):
"""
Abstract class for tabs that contain external links.
"""
link_value = ''
def __init__(self, name, tab_id, link_value):
self.link_value = link_value
super(LinkTab, self).__init__(
name=name,
tab_id=tab_id,
link_func=link_value_func(self.link_value),
)
def __getitem__(self, key):
if key == 'link':
return self.link_value
else:
return super(LinkTab, self).__getitem__(key)
def __setitem__(self, key, value):
if key == 'link':
self.link_value = value
else:
super(LinkTab, self).__setitem__(key, value)
def to_json(self):
to_json_val = super(LinkTab, self).to_json()
to_json_val.update({'link': self.link_value})
return to_json_val
def __eq__(self, other):
if not super(LinkTab, self).__eq__(other):
return False
return self.link_value == other.get('link')
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(LinkTab, cls).validate(tab_dict, raise_error) and key_checker(['link'])(tab_dict, raise_error)
class ExternalDiscussionTab(LinkTab):
"""
A tab that links to an external discussion service.
"""
type = 'external_discussion'
icon = 'fa fa-question-circle'
def __init__(self, tab_dict=None, link_value=None):
super(ExternalDiscussionTab, self).__init__(
# Translators: 'Discussion' refers to the tab in the courseware that leads to the discussion forums
name=_('Discussion'),
tab_id='discussion',
link_value=tab_dict['link'] if tab_dict else link_value,
)
class ExternalLinkTab(LinkTab):
"""
A tab containing an external link.
"""
type = 'external_link'
icon = 'fa fa-question-circle'
def __init__(self, tab_dict):
super(ExternalLinkTab, self).__init__(
name=tab_dict['name'],
tab_id=None, # External links are never active.
link_value=tab_dict['link'],
)
class StaticTab(CourseTab):
"""
A custom tab.
"""
type = 'static_tab'
icon = 'fa fa-circle'
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(StaticTab, cls).validate(tab_dict, raise_error) and key_checker(['name', 'url_slug'])(tab_dict, raise_error)
def __init__(self, tab_dict=None, name=None, url_slug=None):
self.url_slug = tab_dict['url_slug'] if tab_dict else url_slug
super(StaticTab, self).__init__(
name=tab_dict['name'] if tab_dict else name,
tab_id='static_tab_{0}'.format(self.url_slug),
link_func=lambda course, reverse_func: reverse_func(self.type, args=[course.id.to_deprecated_string(), self.url_slug]),
)
def __getitem__(self, key):
if key == 'url_slug':
return self.url_slug
else:
return super(StaticTab, self).__getitem__(key)
def __setitem__(self, key, value):
if key == 'url_slug':
self.url_slug = value
else:
super(StaticTab, self).__setitem__(key, value)
def to_json(self):
to_json_val = super(StaticTab, self).to_json()
to_json_val.update({'url_slug': self.url_slug})
return to_json_val
def __eq__(self, other):
if not super(StaticTab, self).__eq__(other):
return False
return self.url_slug == other.get('url_slug')
class SingleTextbookTab(CourseTab):
"""
A tab representing a single textbook. It is created temporarily when enumerating all textbooks within a
Textbook collection tab. It should not be serialized or persisted.
"""
type = 'single_textbook'
icon = 'fa fa-book'
is_movable = False
is_collection_item = True
def to_json(self):
raise NotImplementedError('SingleTextbookTab should not be serialized.')
class TextbookTabsBase(AuthenticatedCourseTab):
"""
Abstract class for textbook collection tabs classes.
"""
is_collection = True
def __init__(self, tab_id):
# Translators: 'Textbooks' refers to the tab in the course that leads to the course' textbooks
super(TextbookTabsBase, self).__init__(
name=_("Textbooks"),
tab_id=tab_id,
link_func=None,
)
@abstractmethod
def items(self, course):
"""
A generator for iterating through all the SingleTextbookTab book objects associated with this
collection of textbooks.
"""
pass
class TextbookTabs(TextbookTabsBase):
"""
A tab representing the collection of all textbook tabs.
"""
type = 'textbooks'
icon = 'fa fa-book'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(TextbookTabs, self).__init__(
tab_id=self.type,
)
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return settings.FEATURES.get('ENABLE_TEXTBOOK')
def items(self, course):
for index, textbook in enumerate(course.textbooks):
yield SingleTextbookTab(
name=textbook.title,
tab_id='textbook/{0}'.format(index),
link_func=lambda course, reverse_func, index=index: reverse_func(
'book', args=[course.id.to_deprecated_string(), index]
),
)
class PDFTextbookTabs(TextbookTabsBase):
"""
A tab representing the collection of all PDF textbook tabs.
"""
type = 'pdf_textbooks'
icon = 'fa fa-book'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(PDFTextbookTabs, self).__init__(
tab_id=self.type,
)
def items(self, course):
for index, textbook in enumerate(course.pdf_textbooks):
yield SingleTextbookTab(
name=textbook['tab_title'],
tab_id='pdftextbook/{0}'.format(index),
link_func=lambda course, reverse_func, index=index: reverse_func(
'pdf_book', args=[course.id.to_deprecated_string(), index]
),
)
class HtmlTextbookTabs(TextbookTabsBase):
"""
A tab representing the collection of all Html textbook tabs.
"""
type = 'html_textbooks'
icon = 'fa fa-book'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(HtmlTextbookTabs, self).__init__(
tab_id=self.type,
)
def items(self, course):
for index, textbook in enumerate(course.html_textbooks):
yield SingleTextbookTab(
name=textbook['tab_title'],
tab_id='htmltextbook/{0}'.format(index),
link_func=lambda course, reverse_func, index=index: reverse_func(
'html_book', args=[course.id.to_deprecated_string(), index]
),
)
class GradingTab(object):
"""
Abstract class for tabs that involve Grading.
"""
pass
class StaffGradingTab(StaffTab, GradingTab):
"""
A tab for staff grading.
"""
type = 'staff_grading'
icon = 'fa fa-check-square-o'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(StaffGradingTab, self).__init__(
# Translators: "Staff grading" appears on a tab that allows
# staff to view open-ended problems that require staff grading
name=_("Staff grading"),
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
class PeerGradingTab(AuthenticatedCourseTab, GradingTab):
"""
A tab for peer grading.
"""
type = 'peer_grading'
icon = 'fa fa-check-square-o'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(PeerGradingTab, self).__init__(
# Translators: "Peer grading" appears on a tab that allows
# students to view open-ended problems that require grading
name=_("Peer grading"),
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
class OpenEndedGradingTab(AuthenticatedCourseTab, GradingTab):
"""
A tab for open ended grading.
"""
type = 'open_ended'
icon = 'fa fa-check-square-o'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(OpenEndedGradingTab, self).__init__(
# Translators: "Open Ended Panel" appears on a tab that, when clicked, opens up a panel that
# displays information about open-ended problems that a user has submitted or needs to grade
name=_("Open Ended Panel"),
tab_id=self.type,
link_func=link_reverse_func('open_ended_notifications'),
)
class SyllabusTab(CourseTab):
"""
A tab for the course syllabus.
"""
type = 'syllabus'
icon = 'fa fa-list-alt'
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return hasattr(course, 'syllabus_present') and course.syllabus_present
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(SyllabusTab, self).__init__(
# Translators: "Syllabus" appears on a tab that, when clicked, opens the syllabus of the course.
name=_('Syllabus'),
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
class NotesTab(AuthenticatedCourseTab):
"""
A tab for the course notes.
"""
type = 'notes'
icon = 'fa fa-file-text'
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return settings.FEATURES.get('ENABLE_STUDENT_NOTES')
def __init__(self, tab_dict=None):
super(NotesTab, self).__init__(
name=tab_dict['name'],
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(NotesTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class EdxNotesTab(AuthenticatedCourseTab):
"""
A tab for the course student notes.
"""
type = 'edxnotes'
icon = 'fa fa-file-text'
def can_display(self, course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
return settings.FEATURES.get('ENABLE_EDXNOTES')
def __init__(self, tab_dict=None):
super(EdxNotesTab, self).__init__(
name=tab_dict['name'] if tab_dict else _('Notes'),
tab_id=self.type,
link_func=link_reverse_func(self.type),
)
@classmethod
def validate(cls, tab_dict, raise_error=True):
return super(EdxNotesTab, cls).validate(tab_dict, raise_error) and need_name(tab_dict, raise_error)
class InstructorTab(StaffTab):
"""
A tab for the course instructors.
"""
type = 'instructor'
icon = 'fa fa-gears'
def __init__(self, tab_dict=None): # pylint: disable=unused-argument
super(InstructorTab, self).__init__(
# Translators: 'Instructor' appears on the tab that leads to the instructor dashboard, which is
# a portal where an instructor can get data and perform various actions on their course
name=_('Instructor'),
tab_id=self.type,
link_func=link_reverse_func('instructor_dashboard'),
)
class CourseTabList(List):
"""
An XBlock field class that encapsulates a collection of Tabs in a course.
It is automatically created and can be retrieved through a CourseDescriptor object: course.tabs
"""
@staticmethod
def initialize_default(course):
"""
An explicit initialize method is used to set the default values, rather than implementing an
__init__ method. This is because the default values are dependent on other information from
within the course.
"""
course.tabs.extend([
CoursewareTab(),
CourseInfoTab(),
])
# Presence of syllabus tab is indicated by a course attribute
if hasattr(course, 'syllabus_present') and course.syllabus_present:
course.tabs.append(SyllabusTab())
# If the course has a discussion link specified, use that even if we feature
# flag discussions off. Disabling that is mostly a server safety feature
# at this point, and we don't need to worry about external sites.
if course.discussion_link:
discussion_tab = ExternalDiscussionTab(link_value=course.discussion_link)
else:
discussion_tab = DiscussionTab()
course.tabs.extend([
TextbookTabs(),
discussion_tab,
WikiTab(),
ProgressTab(),
])
@staticmethod
def get_discussion(course):
"""
Returns the discussion tab for the given course. It can be either of type DiscussionTab
or ExternalDiscussionTab. The returned tab object is self-aware of the 'link' that it corresponds to.
"""
# the discussion_link setting overrides everything else, even if there is a discussion tab in the course tabs
if course.discussion_link:
return ExternalDiscussionTab(link_value=course.discussion_link)
# find one of the discussion tab types in the course tabs
for tab in course.tabs:
if isinstance(tab, DiscussionTab) or isinstance(tab, ExternalDiscussionTab):
return tab
return None
@staticmethod
def get_tab_by_slug(tab_list, url_slug):
"""
Look for a tab with the specified 'url_slug'. Returns the tab or None if not found.
"""
return next((tab for tab in tab_list if tab.get('url_slug') == url_slug), None)
@staticmethod
def get_tab_by_type(tab_list, tab_type):
"""
Look for a tab with the specified type. Returns the first matching tab.
"""
return next((tab for tab in tab_list if tab.type == tab_type), None)
@staticmethod
def get_tab_by_id(tab_list, tab_id):
"""
Look for a tab with the specified tab_id. Returns the first matching tab.
"""
return next((tab for tab in tab_list if tab.tab_id == tab_id), None)
@staticmethod
def iterate_displayable(
course,
settings,
is_user_authenticated=True,
is_user_staff=True,
is_user_enrolled=False
):
"""
Generator method for iterating through all tabs that can be displayed for the given course and
the given user with the provided access settings.
"""
for tab in course.tabs:
if tab.can_display(
course, settings, is_user_authenticated, is_user_staff, is_user_enrolled
) and (not tab.is_hideable or not tab.is_hidden):
if tab.is_collection:
for item in tab.items(course):
yield item
else:
yield tab
instructor_tab = InstructorTab()
if instructor_tab.can_display(course, settings, is_user_authenticated, is_user_staff, is_user_enrolled):
yield instructor_tab
@staticmethod
def iterate_displayable_cms(
course,
settings
):
"""
Generator method for iterating through all tabs that can be displayed for the given course
with the provided settings.
"""
for tab in course.tabs:
if tab.can_display(course, settings, is_user_authenticated=True, is_user_staff=True, is_user_enrolled=True):
if tab.is_collection and not len(list(tab.items(course))):
# do not yield collections that have no items
continue
yield tab
@classmethod
def validate_tabs(cls, tabs):
"""
Check that the tabs set for the specified course is valid. If it
isn't, raise InvalidTabsException with the complaint.
Specific rules checked:
- if no tabs specified, that's fine
- if tabs specified, first two must have type 'courseware' and 'course_info', in that order.
"""
if tabs is None or len(tabs) == 0:
return
if len(tabs) < 2:
raise InvalidTabsException("Expected at least two tabs. tabs: '{0}'".format(tabs))
if tabs[0].get('type') != CoursewareTab.type:
raise InvalidTabsException(
"Expected first tab to have type 'courseware'. tabs: '{0}'".format(tabs))
if tabs[1].get('type') != CourseInfoTab.type:
raise InvalidTabsException(
"Expected second tab to have type 'course_info'. tabs: '{0}'".format(tabs))
# the following tabs should appear only once
for tab_type in [
CoursewareTab.type,
CourseInfoTab.type,
NotesTab.type,
TextbookTabs.type,
PDFTextbookTabs.type,
HtmlTextbookTabs.type,
EdxNotesTab.type]:
cls._validate_num_tabs_of_type(tabs, tab_type, 1)
@staticmethod
def _validate_num_tabs_of_type(tabs, tab_type, max_num):
"""
Check that the number of times that the given 'tab_type' appears in 'tabs' is less than or equal to 'max_num'.
"""
count = sum(1 for tab in tabs if tab.get('type') == tab_type)
if count > max_num:
msg = (
"Tab of type '{type}' appears {count} time(s). "
"Expected maximum of {max} time(s)."
).format(
type=tab_type, count=count, max=max_num,
)
raise InvalidTabsException(msg)
def to_json(self, values):
"""
Overrides the to_json method to serialize all the CourseTab objects to a json-serializable representation.
"""
json_data = []
if values:
for val in values:
if isinstance(val, CourseTab):
json_data.append(val.to_json())
elif isinstance(val, dict):
json_data.append(val)
else:
continue
return json_data
def from_json(self, values):
"""
Overrides the from_json method to de-serialize the CourseTab objects from a json-like representation.
"""
self.validate_tabs(values)
return [CourseTab.from_json(tab_dict) for tab_dict in values]
#### Link Functions
def link_reverse_func(reverse_name):
"""
Returns a function that takes in a course and reverse_url_func,
and calls the reverse_url_func with the given reverse_name and course' ID.
"""
return lambda course, reverse_url_func: reverse_url_func(reverse_name, args=[course.id.to_deprecated_string()])
def link_value_func(value):
"""
Returns a function takes in a course and reverse_url_func, and returns the given value.
"""
return lambda course, reverse_url_func: value
#### Validators
# A validator takes a dict and raises InvalidTabsException if required fields are missing or otherwise wrong.
# (e.g. "is there a 'name' field?). Validators can assume that the type field is valid.
def key_checker(expected_keys):
"""
Returns a function that checks that specified keys are present in a dict.
"""
def check(actual_dict, raise_error=True):
"""
Function that checks whether all keys in the expected_keys object is in the given actual_dict object.
"""
missing = set(expected_keys) - set(actual_dict.keys())
if not missing:
return True
if raise_error:
raise InvalidTabsException(
"Expected keys '{0}' are not present in the given dict: {1}".format(expected_keys, actual_dict)
)
else:
return False
return check
def need_name(dictionary, raise_error=True):
"""
Returns whether the 'name' key exists in the given dictionary.
"""
return key_checker(['name'])(dictionary, raise_error)
class InvalidTabsException(Exception):
"""
A complaint about invalid tabs.
"""
pass
class UnequalTabsException(Exception):
"""
A complaint about tab lists being unequal
"""
pass
| agpl-3.0 | -3,983,259,697,770,877,000 | 33.856016 | 135 | 0.610627 | false |
tellesnobrega/storm_plugin | sahara/utils/openstack/keystone.py | 1 | 2752 | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keystoneclient.v2_0 import client as keystone_client
from keystoneclient.v3 import client as keystone_client_v3
from oslo.config import cfg
from sahara import context
from sahara.utils.openstack import base
CONF = cfg.CONF
opts = [
cfg.BoolOpt('use_identity_api_v3',
default=True,
help='Enables Sahara to use Keystone API v3. '
'If that flag is disabled, '
'per-job clusters will not be terminated automatically.')
]
CONF.register_opts(opts)
def client():
ctx = context.current()
auth_url = base.retrieve_auth_url()
if CONF.use_identity_api_v3:
keystone = keystone_client_v3.Client(username=ctx.username,
token=ctx.token,
tenant_id=ctx.tenant_id,
auth_url=auth_url)
keystone.management_url = auth_url
else:
keystone = keystone_client.Client(username=ctx.username,
token=ctx.token,
tenant_id=ctx.tenant_id,
auth_url=auth_url)
return keystone
def _admin_client(project_name=None, trust_id=None):
if not CONF.use_identity_api_v3:
raise Exception('Trusts aren\'t implemented in keystone api'
' less than v3')
auth_url = base.retrieve_auth_url()
username = CONF.keystone_authtoken.admin_user
password = CONF.keystone_authtoken.admin_password
keystone = keystone_client_v3.Client(username=username,
password=password,
project_name=project_name,
auth_url=auth_url,
trust_id=trust_id)
keystone.management_url = auth_url
return keystone
def client_for_admin():
project_name = CONF.keystone_authtoken.admin_tenant_name
return _admin_client(project_name=project_name)
def client_for_trusts(trust_id):
return _admin_client(trust_id=trust_id)
| apache-2.0 | 2,405,071,615,818,163,000 | 34.282051 | 78 | 0.597384 | false |
imapp-pl/golem | gnr/customizers/verificationparamshelper.py | 1 | 3687 | import logging
from PyQt4 import QtCore
from gnr.renderingtaskstate import AdvanceRenderingVerificationOptions
logger = logging.getLogger("gnr.gui")
def read_advance_verification_params(gui, definition):
if gui.ui.advanceVerificationCheckBox.isChecked():
definition.verification_options = AdvanceRenderingVerificationOptions()
if gui.ui.verificationForAllRadioButton.isChecked():
definition.verification_options.type = 'forAll'
elif gui.ui.verificationForFirstRadioButton.isChecked():
definition.verification_options.type = 'forFirst'
else:
definition.verification_options.type = 'random'
try:
definition.verification_options.probability = float(gui.ui.probabilityLineEdit.text())
if definition.verification_options.probability < 0:
definition.verification_options.probability = 0.0
gui.ui.probabilityLineEdit.setText("0.0")
if definition.verification_options.probability > 1:
definition.verification_options.probability = 1.0
gui.ui.probabilityLineEdit.setText("1.0")
except ValueError:
logger.warning("Wrong probability values {}".format(gui.ui.probabilityLineEdit.text()))
definition.verification_options.probability = 0.0
gui.ui.probabilityLineEdit.setText("0.0")
definition.verification_options.box_size = (int(gui.ui.verificationSizeXSpinBox.value()), int(gui.ui.verificationSizeYSpinBox.value()))
else:
definition.verification_options = None
return definition
def set_verification_widgets_state(gui, state):
gui.ui.verificationForAllRadioButton.setEnabled(state)
gui.ui.verificationForFirstRadioButton.setEnabled(state)
gui.ui.verificationSizeXSpinBox.setEnabled(state)
gui.ui.verificationSizeYSpinBox.setEnabled(state)
gui.ui.verificationRandomRadioButton.setEnabled(state)
gui.ui.probabilityLabel.setEnabled(state and gui.ui.verificationRandomRadioButton.isChecked())
gui.ui.probabilityLineEdit.setEnabled(state and gui.ui.verificationRandomRadioButton.isChecked())
def load_verification_params(gui, definition):
enabled = definition.verification_options is not None
set_verification_widgets_state(gui, enabled)
if enabled:
gui.ui.advanceVerificationCheckBox.setCheckState(QtCore.Qt.Checked)
gui.ui.verificationSizeXSpinBox.setValue(definition.verification_options.box_size[0])
gui.ui.verificationSizeYSpinBox.setValue(definition.verification_options.box_size[1])
gui.ui.verificationForAllRadioButton.setChecked(definition.verification_options.type == 'forAll')
gui.ui.verificationForFirstRadioButton.setChecked(definition.verification_options.type == 'forFirst')
gui.ui.verificationRandomRadioButton.setChecked(definition.verification_options.type == 'random')
gui.ui.probabilityLabel.setEnabled(definition.verification_options.type == 'random')
gui.ui.probabilityLineEdit.setEnabled(definition.verification_options.type == 'random')
if hasattr(definition.verification_options, 'probability'):
gui.ui.probabilityLineEdit.setText("{}".format(definition.verification_options.probability))
else:
gui.ui.advanceVerificationCheckBox.setCheckState(QtCore.Qt.Unchecked)
def verification_random_changed(gui):
rand_set = gui.ui.verificationRandomRadioButton.isChecked()
gui.ui.probabilityLineEdit.setEnabled(rand_set)
gui.ui.probabilityLabel.setEnabled(rand_set) | gpl-3.0 | 4,630,212,197,850,335,000 | 51.463768 | 143 | 0.721183 | false |
lkundrak/scraperwiki | web/frontend/features/common_steps.py | 1 | 2716 | from lettuce import step,before,world,after
from lettuce.django import django_url
from django.contrib.auth.models import User
from frontend.models import UserProfile, Feature
# Steps used in more than one feature's steps file
# Features
@step(u'(?:Given|And) the "([^"]*)" feature exists')
def and_the_feature_exists(step, feature):
Feature.objects.filter(name=feature).delete()
Feature.objects.create(name=feature, public=True)
@step(u'(?:Given|And) I have the "([^"]*)" feature enabled')
def and_i_have_a_feature_enabled(step, feature):
u = User.objects.filter(username='test')[0]
feature = Feature.objects.filter(name=feature)[0]
profile = u.get_profile();
profile.features.add(feature)
assert profile.has_feature(feature)
@step(u'And I do not have the "([^"]*)" feature enabled$')
def feature_not_enabled(step, feature):
u = User.objects.filter(username='test')[0]
feature = Feature.objects.filter(name=feature)[0]
profile = u.get_profile();
try:
profile.features.remove(feature)
except ValueError:
# Expected when the user already does not have the
# feature in question.
pass
assert not profile.has_feature(feature)
# Payment plan
@step(u'Given I am an? "([^"]*)" user')
def given_i_am_a_plan_user(step, plan):
plan = plan.replace(' ', '').lower()
step.behave_as("""
Given user "test" with password "pass" is logged in
And I have the "Self Service Vaults" feature enabled
And I am on the "%s" plan
""" % plan)
@step(u'And I am on the "([^"]*)" plan')
def and_i_am_on_the_plan(step, plan):
user = User.objects.get(username='test')
profile = user.get_profile()
profile.change_plan(plan)
# Seeing matchers
@step(u'(?:And|Then) I should see "([^"]*)"$')
def and_i_should_see(step, text):
assert world.browser.is_text_present(text)
@step(u'(?:Then|And) I should not see "([^"]*)"')
def and_i_should_not_see_text(step, text):
assert world.browser.is_text_not_present(text)
@step(u'(?:Then|And) I should see (?:the|a|an) "([^"]*)" (?:link|button)$')
def i_should_see_the_button(step, text):
assert world.browser.find_link_by_partial_text(text)
@step(u'(?:Then|And) I should not see (?:the|a|an) "([^"]*)" (?:link|button)$')
def i_should_not_see_the_button(step, text):
assert not world.browser.find_link_by_partial_text(text)
# Clicking
@step(u'(?:And|When) I click "([^"]*)"')
def and_i_click(step, text):
# :todo: Make it not wrong. so wrong.
world.browser.find_by_tag("button").first.click()
@step(u'(?:When|And) I click the "([^"]*)" (?:link|button)$')
def i_click_the_button(step, text):
world.browser.find_link_by_partial_text(text).first.click()
| agpl-3.0 | 3,858,872,102,019,335,000 | 33.820513 | 79 | 0.657585 | false |
andrewyoung1991/django-restframework-stripe | tests/test_coupon.py | 1 | 1629 | from unittest import mock
from django.utils import timezone
from django.core.exceptions import ValidationError
import pytest
from stripe.error import InvalidRequestError
from restframework_stripe import models
from restframework_stripe.test import get_mock_resource
@mock.patch("stripe.Coupon.create")
@pytest.mark.django_db
def test_create_coupon(create_coupon):
kwargs = {
"duration": models.Coupon.ONCE,
"amount_off": 100,
"currency": "usd",
"redeem_by": timezone.now()
}
mock_kwargs = kwargs.copy()
mock_kwargs.update({
"duration": "once",
"redeem_by": int(kwargs["redeem_by"].timestamp())
})
create_coupon.return_value = get_mock_resource("Coupon", **mock_kwargs)
coupon = models.Coupon(**kwargs)
coupon.save()
assert coupon.is_created
@pytest.mark.parametrize("kwargs", [
{"duration": 1},
{"duration": 1, "percent_off": 10, "amount_off": 10},
{"duration": 1, "amount_off": 10},
{"duration": models.Coupon.REPEATING, "amount_off": 10, "currency": "usd"}
])
def test_clean_coupon(kwargs):
coupon = models.Coupon(**kwargs)
with pytest.raises(ValidationError) as err:
coupon.clean()
@mock.patch("stripe.Coupon.create")
@pytest.mark.django_db
def test_create_coupon_error(create_coupon):
kwargs = {
"duration": models.Coupon.ONCE,
"amount_off": 100,
"currency": "usd",
}
create_coupon.side_effect = InvalidRequestError(param="foo", message="bar")
coupon = models.Coupon(**kwargs)
with pytest.raises(ValidationError):
coupon.save()
| bsd-2-clause | -384,012,645,645,948,600 | 26.15 | 79 | 0.651934 | false |
tcpcloud/openvstorage | ovs/extensions/generic/remote.py | 1 | 3482 | # Copyright 2015 Open vStorage NV
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Remote RPyC wrapper module
"""
from subprocess import check_output
from rpyc.utils.zerodeploy import DeployedServer
from plumbum import SshMachine
class Remote(object):
"""
Remote is a context-manager that allows code within its context to be executed through RPyC
It is supposed to be used like this:
with Remote([<ip1>, <ip2>], [module1, module2, module3]) as (remote1, remote2):
remote1.module1.do_something()
remote2.module3.do_something_else()
Or like this:
with Remote(<ip1>, [module1, module2, module3]) as remote1:
remote1.module1.do_something()
Each module mentioned in the initialization of the remote object will be made available locally (remote1.module1), but will actually be executed remotely on the respective IP (ip1)
"""
def __init__(self, ip_info, modules, username=None, password=None, strict_host_key_checking=True):
"""
Initializes the context
"""
self.ips = []
if isinstance(ip_info, basestring):
self.ips = [ip_info]
elif isinstance(ip_info, list):
self.ips = ip_info
else:
raise ValueError('IP info needs to be a single IP or a list of IPs')
if not isinstance(modules, list) and not isinstance(modules, set) and not isinstance(modules, tuple):
raise ValueError('Modules should be a list, set or tuple')
self.username = username if username is not None else check_output('whoami').strip()
ssh_opts = []
if strict_host_key_checking is False:
ssh_opts.append('-o StrictHostKeyChecking=no')
self.machines = [SshMachine(ip, user=self.username, password=password, ssh_opts=tuple(ssh_opts)) for ip in self.ips]
self.servers = [DeployedServer(machine) for machine in self.machines]
self.modules = modules
def __iter__(self):
replacements = []
for connection in self.connections:
replacements.append(self._build_remote_module(connection))
return iter(replacements)
def __enter__(self):
self.connections = [server.classic_connect() for server in self.servers]
if len(self.connections) == 1:
return self._build_remote_module(self.connections[0])
return self
def __exit__(self, *args):
_ = args
for server in self.servers:
server.close()
def _build_remote_module(self, connection):
connection.modules['sys'].path.append('/opt/OpenvStorage')
remote_modules = {}
for module in self.modules:
if hasattr(module, '__module__'):
remote_modules[module.__name__] = getattr(connection.modules[module.__module__], module.__name__)
else:
remote_modules[module.__name__] = connection.modules[module.__name__]
return type('Remote', (), remote_modules)
| apache-2.0 | -5,086,218,736,596,162,000 | 40.452381 | 184 | 0.657094 | false |
jmathai/elodie | elodie/tests/filesystem_test.py | 1 | 47287 | from __future__ import absolute_import
# Project imports
import mock
import os
import re
import shutil
import sys
import time
from datetime import datetime
from datetime import timedelta
from tempfile import gettempdir
sys.path.insert(0, os.path.abspath(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))))
from . import helper
from elodie.config import load_config
from elodie.filesystem import FileSystem
from elodie.media.text import Text
from elodie.media.media import Media
from elodie.media.photo import Photo
from elodie.media.video import Video
from nose.plugins.skip import SkipTest
from elodie.external.pyexiftool import ExifTool
from elodie.dependencies import get_exiftool
from elodie import constants
os.environ['TZ'] = 'GMT'
def setup_module():
exiftool_addedargs = [
u'-config',
u'"{}"'.format(constants.exiftool_config)
]
ExifTool(executable_=get_exiftool(), addedargs=exiftool_addedargs).start()
def teardown_module():
ExifTool().terminate
def test_create_directory_success():
filesystem = FileSystem()
folder = os.path.join(helper.temp_dir(), helper.random_string(10))
status = filesystem.create_directory(folder)
# Needs to be a subdirectory
assert helper.temp_dir() != folder
assert status == True
assert os.path.isdir(folder) == True
assert os.path.exists(folder) == True
# Clean up
shutil.rmtree(folder)
def test_create_directory_recursive_success():
filesystem = FileSystem()
folder = os.path.join(helper.temp_dir(), helper.random_string(10), helper.random_string(10))
status = filesystem.create_directory(folder)
# Needs to be a subdirectory
assert helper.temp_dir() != folder
assert status == True
assert os.path.isdir(folder) == True
assert os.path.exists(folder) == True
shutil.rmtree(folder)
@mock.patch('elodie.filesystem.os.makedirs')
def test_create_directory_invalid_permissions(mock_makedirs):
if os.name == 'nt':
raise SkipTest("It isn't implemented on Windows")
# Mock the case where makedirs raises an OSError because the user does
# not have permission to create the given directory.
mock_makedirs.side_effect = OSError()
filesystem = FileSystem()
status = filesystem.create_directory('/apathwhichdoesnotexist/afolderwhichdoesnotexist')
assert status == False
def test_delete_directory_if_empty():
filesystem = FileSystem()
folder = os.path.join(helper.temp_dir(), helper.random_string(10))
os.makedirs(folder)
assert os.path.isdir(folder) == True
assert os.path.exists(folder) == True
filesystem.delete_directory_if_empty(folder)
assert os.path.isdir(folder) == False
assert os.path.exists(folder) == False
def test_delete_directory_if_empty_when_not_empty():
filesystem = FileSystem()
folder = os.path.join(helper.temp_dir(), helper.random_string(10), helper.random_string(10))
os.makedirs(folder)
parent_folder = os.path.dirname(folder)
assert os.path.isdir(folder) == True
assert os.path.exists(folder) == True
assert os.path.isdir(parent_folder) == True
assert os.path.exists(parent_folder) == True
filesystem.delete_directory_if_empty(parent_folder)
assert os.path.isdir(folder) == True
assert os.path.exists(folder) == True
assert os.path.isdir(parent_folder) == True
assert os.path.exists(parent_folder) == True
shutil.rmtree(parent_folder)
def test_get_all_files_success():
filesystem = FileSystem()
folder = helper.populate_folder(5)
files = set()
files.update(filesystem.get_all_files(folder))
shutil.rmtree(folder)
length = len(files)
assert length == 5, files
def test_get_all_files_by_extension():
filesystem = FileSystem()
folder = helper.populate_folder(5)
files = set()
files.update(filesystem.get_all_files(folder))
length = len(files)
assert length == 5, length
files = set()
files.update(filesystem.get_all_files(folder, 'jpg'))
length = len(files)
assert length == 3, length
files = set()
files.update(filesystem.get_all_files(folder, 'txt'))
length = len(files)
assert length == 2, length
files = set()
files.update(filesystem.get_all_files(folder, 'gif'))
length = len(files)
assert length == 0, length
shutil.rmtree(folder)
def test_get_all_files_with_only_invalid_file():
filesystem = FileSystem()
folder = helper.populate_folder(0, include_invalid=True)
files = set()
files.update(filesystem.get_all_files(folder))
shutil.rmtree(folder)
length = len(files)
assert length == 0, length
def test_get_all_files_with_invalid_file():
filesystem = FileSystem()
folder = helper.populate_folder(5, include_invalid=True)
files = set()
files.update(filesystem.get_all_files(folder))
shutil.rmtree(folder)
length = len(files)
assert length == 5, length
def test_get_all_files_for_loop():
filesystem = FileSystem()
folder = helper.populate_folder(5)
files = set()
files.update()
counter = 0
for file in filesystem.get_all_files(folder):
counter += 1
shutil.rmtree(folder)
assert counter == 5, counter
def test_get_current_directory():
filesystem = FileSystem()
assert os.getcwd() == filesystem.get_current_directory()
def test_get_file_name_definition_default():
filesystem = FileSystem()
name_template, definition = filesystem.get_file_name_definition()
assert name_template == '%date-%original_name-%title.%extension', name_template
assert definition == [[('date', '%Y-%m-%d_%H-%M-%S')], [('original_name', '')], [('title', '')], [('extension', '')]], definition #noqa
@mock.patch('elodie.config.config_file', '%s/config.ini-custom-filename' % gettempdir())
def test_get_file_name_definition_custom():
with open('%s/config.ini-custom-filename' % gettempdir(), 'w') as f:
f.write("""
[File]
date=%Y-%m-%b
name=%date-%original_name.%extension
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
name_template, definition = filesystem.get_file_name_definition()
if hasattr(load_config, 'config'):
del load_config.config
assert name_template == '%date-%original_name.%extension', name_template
assert definition == [[('date', '%Y-%m-%b')], [('original_name', '')], [('extension', '')]], definition #noqa
def test_get_file_name_plain():
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
file_name = filesystem.get_file_name(media.get_metadata())
assert file_name == helper.path_tz_fix('2015-12-05_00-59-26-plain.jpg'), file_name
def test_get_file_name_with_title():
filesystem = FileSystem()
media = Photo(helper.get_file('with-title.jpg'))
file_name = filesystem.get_file_name(media.get_metadata())
assert file_name == helper.path_tz_fix('2015-12-05_00-59-26-with-title-some-title.jpg'), file_name
def test_get_file_name_with_original_name_exif():
filesystem = FileSystem()
media = Photo(helper.get_file('with-filename-in-exif.jpg'))
file_name = filesystem.get_file_name(media.get_metadata())
assert file_name == helper.path_tz_fix('2015-12-05_00-59-26-foobar.jpg'), file_name
def test_get_file_name_with_original_name_title_exif():
filesystem = FileSystem()
media = Photo(helper.get_file('with-filename-and-title-in-exif.jpg'))
file_name = filesystem.get_file_name(media.get_metadata())
assert file_name == helper.path_tz_fix('2015-12-05_00-59-26-foobar-foobar-title.jpg'), file_name
def test_get_file_name_with_uppercase_and_spaces():
filesystem = FileSystem()
media = Photo(helper.get_file('Plain With Spaces And Uppercase 123.jpg'))
file_name = filesystem.get_file_name(media.get_metadata())
assert file_name == helper.path_tz_fix('2015-12-05_00-59-26-plain-with-spaces-and-uppercase-123.jpg'), file_name
@mock.patch('elodie.config.config_file', '%s/config.ini-filename-custom' % gettempdir())
def test_get_file_name_custom():
with open('%s/config.ini-filename-custom' % gettempdir(), 'w') as f:
f.write("""
[File]
date=%Y-%m-%b
name=%date-%original_name.%extension
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
file_name = filesystem.get_file_name(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert file_name == helper.path_tz_fix('2015-12-dec-plain.jpg'), file_name
@mock.patch('elodie.config.config_file', '%s/config.ini-filename-custom-with-title' % gettempdir())
def test_get_file_name_custom_with_title():
with open('%s/config.ini-filename-custom-with-title' % gettempdir(), 'w') as f:
f.write("""
[File]
date=%Y-%m-%d
name=%date-%original_name-%title.%extension
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('with-title.jpg'))
file_name = filesystem.get_file_name(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert file_name == helper.path_tz_fix('2015-12-05-with-title-some-title.jpg'), file_name
@mock.patch('elodie.config.config_file', '%s/config.ini-filename-custom-with-empty-value' % gettempdir())
def test_get_file_name_custom_with_empty_value():
with open('%s/config.ini-filename-custom-with-empty-value' % gettempdir(), 'w') as f:
f.write("""
[File]
date=%Y-%m-%d
name=%date-%original_name-%title.%extension
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
file_name = filesystem.get_file_name(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert file_name == helper.path_tz_fix('2015-12-05-plain.jpg'), file_name
@mock.patch('elodie.config.config_file', '%s/config.ini-filename-custom-with-lowercase' % gettempdir())
def test_get_file_name_custom_with_lower_capitalization():
with open('%s/config.ini-filename-custom-with-lowercase' % gettempdir(), 'w') as f:
f.write("""
[File]
date=%Y-%m-%d
name=%date-%original_name-%title.%extension
capitalization=lower
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
file_name = filesystem.get_file_name(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert file_name == helper.path_tz_fix('2015-12-05-plain.jpg'), file_name
@mock.patch('elodie.config.config_file', '%s/config.ini-filename-custom-with-invalidcase' % gettempdir())
def test_get_file_name_custom_with_invalid_capitalization():
with open('%s/config.ini-filename-custom-with-invalidcase' % gettempdir(), 'w') as f:
f.write("""
[File]
date=%Y-%m-%d
name=%date-%original_name-%title.%extension
capitalization=garabage
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
file_name = filesystem.get_file_name(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert file_name == helper.path_tz_fix('2015-12-05-plain.jpg'), file_name
@mock.patch('elodie.config.config_file', '%s/config.ini-filename-custom-with-uppercase' % gettempdir())
def test_get_file_name_custom_with_upper_capitalization():
with open('%s/config.ini-filename-custom-with-uppercase' % gettempdir(), 'w') as f:
f.write("""
[File]
date=%Y-%m-%d
name=%date-%original_name-%title.%extension
capitalization=upper
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
file_name = filesystem.get_file_name(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert file_name == helper.path_tz_fix('2015-12-05-PLAIN.JPG'), file_name
def test_get_folder_path_plain():
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
assert path == os.path.join('2015-12-Dec','Unknown Location'), path
def test_get_folder_path_with_title():
filesystem = FileSystem()
media = Photo(helper.get_file('with-title.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
assert path == os.path.join('2015-12-Dec','Unknown Location'), path
def test_get_folder_path_with_location():
filesystem = FileSystem()
media = Photo(helper.get_file('with-location.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
assert path == os.path.join('2015-12-Dec','Sunnyvale'), path
@mock.patch('elodie.config.config_file', '%s/config.ini-original-with-camera-make-and-model' % gettempdir())
def test_get_folder_path_with_camera_make_and_model():
with open('%s/config.ini-original-with-camera-make-and-model' % gettempdir(), 'w') as f:
f.write("""
[Directory]
full_path=%camera_make/%camera_model
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert path == os.path.join('Canon', 'Canon EOS REBEL T2i'), path
@mock.patch('elodie.config.config_file', '%s/config.ini-original-with-camera-make-and-model-fallback' % gettempdir())
def test_get_folder_path_with_camera_make_and_model_fallback():
with open('%s/config.ini-original-with-camera-make-and-model-fallback' % gettempdir(), 'w') as f:
f.write("""
[Directory]
full_path=%camera_make|"nomake"/%camera_model|"nomodel"
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('no-exif.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert path == os.path.join('nomake', 'nomodel'), path
@mock.patch('elodie.config.config_file', '%s/config.ini-int-in-component-path' % gettempdir())
def test_get_folder_path_with_int_in_config_component():
# gh-239
with open('%s/config.ini-int-in-component-path' % gettempdir(), 'w') as f:
f.write("""
[Directory]
date=%Y
full_path=%date
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert path == os.path.join('2015'), path
@mock.patch('elodie.config.config_file', '%s/config.ini-combined-date-and-album' % gettempdir())
def test_get_folder_path_with_combined_date_and_album():
# gh-239
with open('%s/config.ini-combined-date-and-album' % gettempdir(), 'w') as f:
f.write("""
[Directory]
date=%Y-%m-%b
custom=%date %album
full_path=%custom
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('with-album.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert path == '2015-12-Dec Test Album', path
@mock.patch('elodie.config.config_file', '%s/config.ini-combined-date-album-location-fallback' % gettempdir())
def test_get_folder_path_with_album_and_location_fallback():
# gh-279
with open('%s/config.ini-combined-date-album-location-fallback' % gettempdir(), 'w') as f:
f.write("""
[Directory]
date=%Y-%m-%b
custom=%album
full_path=%custom|%city
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
# Test with no location
media = Photo(helper.get_file('plain.jpg'))
path_plain = filesystem.get_folder_path(media.get_metadata())
# Test with City
media = Photo(helper.get_file('with-location.jpg'))
path_city = filesystem.get_folder_path(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert path_plain == 'Unknown Location', path_plain
assert path_city == 'Sunnyvale', path_city
def test_get_folder_path_with_int_in_source_path():
# gh-239
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder('int')
origin = os.path.join(folder,'plain.jpg')
shutil.copyfile(helper.get_file('plain.jpg'), origin)
media = Photo(origin)
path = filesystem.get_folder_path(media.get_metadata())
assert path == os.path.join('2015-12-Dec','Unknown Location'), path
@mock.patch('elodie.config.config_file', '%s/config.ini-original-default-unknown-location' % gettempdir())
def test_get_folder_path_with_original_default_unknown_location():
with open('%s/config.ini-original-default-with-unknown-location' % gettempdir(), 'w') as f:
f.write('')
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert path == os.path.join('2015-12-Dec','Unknown Location'), path
@mock.patch('elodie.config.config_file', '%s/config.ini-custom-path' % gettempdir())
def test_get_folder_path_with_custom_path():
with open('%s/config.ini-custom-path' % gettempdir(), 'w') as f:
f.write("""
[MapQuest]
key=czjNKTtFjLydLteUBwdgKAIC8OAbGLUx
[Directory]
date=%Y-%m-%d
location=%country-%state-%city
full_path=%date/%location
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('with-location.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert path == os.path.join('2015-12-05','United States of America-California-Sunnyvale'), path
@mock.patch('elodie.config.config_file', '%s/config.ini-fallback' % gettempdir())
def test_get_folder_path_with_fallback_folder():
with open('%s/config.ini-fallback' % gettempdir(), 'w') as f:
f.write("""
[Directory]
year=%Y
month=%m
full_path=%year/%month/%album|%"No Album Fool"/%month
""")
#full_path=%year/%album|"No Album"
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert path == os.path.join('2015','12','No Album Fool','12'), path
@mock.patch('elodie.config.config_file', '%s/config.ini-location-date' % gettempdir())
def test_get_folder_path_with_with_more_than_two_levels():
with open('%s/config.ini-location-date' % gettempdir(), 'w') as f:
f.write("""
[MapQuest]
key=czjNKTtFjLydLteUBwdgKAIC8OAbGLUx
[Directory]
year=%Y
month=%m
location=%city, %state
full_path=%year/%month/%location
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('with-location.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert path == os.path.join('2015','12','Sunnyvale, California'), path
@mock.patch('elodie.config.config_file', '%s/config.ini-location-date' % gettempdir())
def test_get_folder_path_with_with_only_one_level():
with open('%s/config.ini-location-date' % gettempdir(), 'w') as f:
f.write("""
[Directory]
year=%Y
full_path=%year
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
media = Photo(helper.get_file('plain.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
if hasattr(load_config, 'config'):
del load_config.config
assert path == os.path.join('2015'), path
def test_get_folder_path_with_location_and_title():
filesystem = FileSystem()
media = Photo(helper.get_file('with-location-and-title.jpg'))
path = filesystem.get_folder_path(media.get_metadata())
assert path == os.path.join('2015-12-Dec','Sunnyvale'), path
def test_parse_folder_name_default():
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
place_name = {'default': u'California', 'country': u'United States of America', 'state': u'California', 'city': u'Sunnyvale'}
mask = '%city'
location_parts = re.findall('(%[^%]+)', mask)
path = filesystem.parse_mask_for_location(mask, location_parts, place_name)
if hasattr(load_config, 'config'):
del load_config.config
assert path == 'Sunnyvale', path
def test_parse_folder_name_multiple():
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
place_name = {'default': u'California', 'country': u'United States of America', 'state': u'California', 'city': u'Sunnyvale'}
mask = '%city-%state-%country'
location_parts = re.findall('(%[^%]+)', mask)
path = filesystem.parse_mask_for_location(mask, location_parts, place_name)
if hasattr(load_config, 'config'):
del load_config.config
assert path == 'Sunnyvale-California-United States of America', path
def test_parse_folder_name_static_chars():
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
place_name = {'default': u'California', 'country': u'United States of America', 'state': u'California', 'city': u'Sunnyvale'}
mask = '%city-is-the-city'
location_parts = re.findall('(%[^%]+)', mask)
path = filesystem.parse_mask_for_location(mask, location_parts, place_name)
if hasattr(load_config, 'config'):
del load_config.config
assert path == 'Sunnyvale-is-the-city', path
def test_parse_folder_name_key_not_found():
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
place_name = {'default': u'California', 'country': u'United States of America', 'state': u'California'}
mask = '%city'
location_parts = re.findall('(%[^%]+)', mask)
path = filesystem.parse_mask_for_location(mask, location_parts, place_name)
if hasattr(load_config, 'config'):
del load_config.config
assert path == 'California', path
def test_parse_folder_name_key_not_found_with_static_chars():
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
place_name = {'default': u'California', 'country': u'United States of America', 'state': u'California'}
mask = '%city-is-not-found'
location_parts = re.findall('(%[^%]+)', mask)
path = filesystem.parse_mask_for_location(mask, location_parts, place_name)
if hasattr(load_config, 'config'):
del load_config.config
assert path == 'California', path
def test_parse_folder_name_multiple_keys_not_found():
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
place_name = {'default': u'United States of America', 'country': u'United States of America'}
mask = '%city-%state'
location_parts = re.findall('(%[^%]+)', mask)
path = filesystem.parse_mask_for_location(mask, location_parts, place_name)
if hasattr(load_config, 'config'):
del load_config.config
assert path == 'United States of America', path
def test_process_file_invalid():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'photo.jpg')
shutil.copyfile(helper.get_file('invalid.jpg'), origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
assert destination is None
def test_process_file_plain():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'photo.jpg')
shutil.copyfile(helper.get_file('plain.jpg'), origin)
origin_checksum_preprocess = helper.checksum(origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
origin_checksum = helper.checksum(origin)
destination_checksum = helper.checksum(destination)
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
assert origin_checksum_preprocess is not None
assert origin_checksum is not None
assert destination_checksum is not None
assert origin_checksum_preprocess == origin_checksum
assert helper.path_tz_fix(os.path.join('2015-12-Dec','Unknown Location','2015-12-05_00-59-26-photo.jpg')) in destination, destination
def test_process_file_with_title():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = '%s/photo.jpg' % folder
shutil.copyfile(helper.get_file('with-title.jpg'), origin)
origin_checksum_preprocess = helper.checksum(origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
origin_checksum = helper.checksum(origin)
destination_checksum = helper.checksum(destination)
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
assert origin_checksum_preprocess is not None
assert origin_checksum is not None
assert destination_checksum is not None
assert origin_checksum_preprocess == origin_checksum
assert helper.path_tz_fix(os.path.join('2015-12-Dec','Unknown Location','2015-12-05_00-59-26-photo-some-title.jpg')) in destination, destination
def test_process_file_with_location():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'photo.jpg')
shutil.copyfile(helper.get_file('with-location.jpg'), origin)
origin_checksum_preprocess = helper.checksum(origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
origin_checksum = helper.checksum(origin)
destination_checksum = helper.checksum(destination)
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
assert origin_checksum_preprocess is not None
assert origin_checksum is not None
assert destination_checksum is not None
assert origin_checksum_preprocess == origin_checksum
assert helper.path_tz_fix(os.path.join('2015-12-Dec','Sunnyvale','2015-12-05_00-59-26-photo.jpg')) in destination, destination
def test_process_file_validate_original_checksum():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'photo.jpg')
shutil.copyfile(helper.get_file('plain.jpg'), origin)
origin_checksum_preprocess = helper.checksum(origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
origin_checksum = helper.checksum(origin)
destination_checksum = helper.checksum(destination)
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
assert origin_checksum_preprocess is not None, origin_checksum_preprocess
assert origin_checksum is not None, origin_checksum
assert destination_checksum is not None, destination_checksum
assert origin_checksum_preprocess == origin_checksum, (origin_checksum_preprocess, origin_checksum)
# See https://github.com/jmathai/elodie/issues/330
def test_process_file_no_exif_date_is_correct_gh_330():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'photo.jpg')
shutil.copyfile(helper.get_file('no-exif.jpg'), origin)
atime = 1330712100
utime = 1330712900
os.utime(origin, (atime, utime))
media = Photo(origin)
metadata = media.get_metadata()
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
assert '/2012-03-Mar/' in destination, destination
assert '/2012-03-02_18-28-20' in destination, destination
def test_process_file_with_location_and_title():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'photo.jpg')
shutil.copyfile(helper.get_file('with-location-and-title.jpg'), origin)
origin_checksum_preprocess = helper.checksum(origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
origin_checksum = helper.checksum(origin)
destination_checksum = helper.checksum(destination)
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
assert origin_checksum_preprocess is not None
assert origin_checksum is not None
assert destination_checksum is not None
assert origin_checksum_preprocess == origin_checksum
assert helper.path_tz_fix(os.path.join('2015-12-Dec','Sunnyvale','2015-12-05_00-59-26-photo-some-title.jpg')) in destination, destination
def test_process_file_with_album():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'photo.jpg')
shutil.copyfile(helper.get_file('with-album.jpg'), origin)
origin_checksum_preprocess = helper.checksum(origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
origin_checksum = helper.checksum(origin)
destination_checksum = helper.checksum(destination)
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
assert origin_checksum_preprocess is not None
assert origin_checksum is not None
assert destination_checksum is not None
assert origin_checksum_preprocess == origin_checksum
assert helper.path_tz_fix(os.path.join('2015-12-Dec','Test Album','2015-12-05_00-59-26-photo.jpg')) in destination, destination
def test_process_file_with_album_and_title():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'photo.jpg')
shutil.copyfile(helper.get_file('with-album-and-title.jpg'), origin)
origin_checksum_preprocess = helper.checksum(origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
origin_checksum = helper.checksum(origin)
destination_checksum = helper.checksum(destination)
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
assert origin_checksum_preprocess is not None
assert origin_checksum is not None
assert destination_checksum is not None
assert origin_checksum_preprocess == origin_checksum
assert helper.path_tz_fix(os.path.join('2015-12-Dec','Test Album','2015-12-05_00-59-26-photo-some-title.jpg')) in destination, destination
def test_process_file_with_album_and_title_and_location():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'photo.jpg')
shutil.copyfile(helper.get_file('with-album-and-title-and-location.jpg'), origin)
origin_checksum_preprocess = helper.checksum(origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
origin_checksum = helper.checksum(origin)
destination_checksum = helper.checksum(destination)
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
assert origin_checksum_preprocess is not None
assert origin_checksum is not None
assert destination_checksum is not None
assert origin_checksum_preprocess == origin_checksum
assert helper.path_tz_fix(os.path.join('2015-12-Dec','Test Album','2015-12-05_00-59-26-photo-some-title.jpg')) in destination, destination
# gh-89 (setting album then title reverts album)
def test_process_video_with_album_then_title():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'movie.mov')
shutil.copyfile(helper.get_file('video.mov'), origin)
origin_checksum = helper.checksum(origin)
origin_checksum_preprocess = helper.checksum(origin)
media = Video(origin)
media.set_album('test_album')
media.set_title('test_title')
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
destination_checksum = helper.checksum(destination)
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
assert origin_checksum_preprocess is not None
assert origin_checksum is not None
assert destination_checksum is not None
assert origin_checksum_preprocess == origin_checksum
assert helper.path_tz_fix(os.path.join('2015-01-Jan','test_album','2015-01-19_12-45-11-movie-test_title.mov')) in destination, destination
@mock.patch('elodie.config.config_file', '%s/config.ini-fallback-folder' % gettempdir())
def test_process_file_fallback_folder():
with open('%s/config.ini-fallback-folder' % gettempdir(), 'w') as f:
f.write("""
[Directory]
date=%Y-%m
full_path=%date/%album|"fallback"
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'plain.jpg')
shutil.copyfile(helper.get_file('plain.jpg'), origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
if hasattr(load_config, 'config'):
del load_config.config
assert helper.path_tz_fix(os.path.join('2015-12', 'fallback', '2015-12-05_00-59-26-plain.jpg')) in destination, destination
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
@mock.patch('elodie.config.config_file', '%s/config.ini-multiple-directories' % gettempdir())
def test_process_twice_more_than_two_levels_of_directories():
with open('%s/config.ini-multiple-directories' % gettempdir(), 'w') as f:
f.write("""
[Directory]
year=%Y
month=%m
day=%d
full_path=%year/%month/%day
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'plain.jpg')
shutil.copyfile(helper.get_file('plain.jpg'), origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
if hasattr(load_config, 'config'):
del load_config.config
assert helper.path_tz_fix(os.path.join('2015','12','05', '2015-12-05_00-59-26-plain.jpg')) in destination, destination
if hasattr(load_config, 'config'):
del load_config.config
media_second = Photo(destination)
media_second.set_title('foo')
destination_second = filesystem.process_file(destination, temporary_folder, media_second, allowDuplicate=True)
if hasattr(load_config, 'config'):
del load_config.config
assert destination.replace('.jpg', '-foo.jpg') == destination_second, destination_second
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
def test_process_existing_file_without_changes():
# gh-210
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'plain.jpg')
shutil.copyfile(helper.get_file('plain.jpg'), origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
assert helper.path_tz_fix(os.path.join('2015-12-Dec', 'Unknown Location', '2015-12-05_00-59-26-plain.jpg')) in destination, destination
media_second = Photo(destination)
destination_second = filesystem.process_file(destination, temporary_folder, media_second, allowDuplicate=True)
assert destination_second is None, destination_second
shutil.rmtree(folder)
shutil.rmtree(os.path.dirname(os.path.dirname(destination)))
@mock.patch('elodie.config.config_file', '%s/config.ini-plugin-throw-error' % gettempdir())
def test_process_file_with_plugin_throw_error():
with open('%s/config.ini-plugin-throw-error' % gettempdir(), 'w') as f:
f.write("""
[Plugins]
plugins=ThrowError
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'plain.jpg')
shutil.copyfile(helper.get_file('plain.jpg'), origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
if hasattr(load_config, 'config'):
del load_config.config
assert destination is None, destination
@mock.patch('elodie.config.config_file', '%s/config.ini-plugin-runtime-error' % gettempdir())
def test_process_file_with_plugin_runtime_error():
with open('%s/config.ini-plugin-runtime-error' % gettempdir(), 'w') as f:
f.write("""
[Plugins]
plugins=RuntimeError
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'plain.jpg')
shutil.copyfile(helper.get_file('plain.jpg'), origin)
media = Photo(origin)
destination = filesystem.process_file(origin, temporary_folder, media, allowDuplicate=True)
if hasattr(load_config, 'config'):
del load_config.config
assert '2015-12-Dec/Unknown Location/2015-12-05_00-59-26-plain.jpg' in destination, destination
def test_set_utime_with_exif_date():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'photo.jpg')
shutil.copyfile(helper.get_file('plain.jpg'), origin)
media_initial = Photo(origin)
metadata_initial = media_initial.get_metadata()
initial_stat = os.stat(origin)
initial_time = int(min(initial_stat.st_mtime, initial_stat.st_ctime))
initial_checksum = helper.checksum(origin)
assert initial_time != time.mktime(metadata_initial['date_taken'])
filesystem.set_utime_from_metadata(media_initial.get_metadata(), media_initial.get_file_path())
final_stat = os.stat(origin)
final_checksum = helper.checksum(origin)
media_final = Photo(origin)
metadata_final = media_final.get_metadata()
shutil.rmtree(folder)
assert initial_stat.st_mtime != final_stat.st_mtime
assert final_stat.st_mtime == time.mktime(metadata_final['date_taken'])
assert initial_checksum == final_checksum
def test_set_utime_without_exif_date():
filesystem = FileSystem()
temporary_folder, folder = helper.create_working_folder()
origin = os.path.join(folder,'photo.jpg')
shutil.copyfile(helper.get_file('no-exif.jpg'), origin)
media_initial = Photo(origin)
metadata_initial = media_initial.get_metadata()
initial_stat = os.stat(origin)
initial_time = int(min(initial_stat.st_mtime, initial_stat.st_ctime))
initial_checksum = helper.checksum(origin)
assert initial_time == time.mktime(metadata_initial['date_taken'])
filesystem.set_utime_from_metadata(media_initial.get_metadata(), media_initial.get_file_path())
final_stat = os.stat(origin)
final_checksum = helper.checksum(origin)
media_final = Photo(origin)
metadata_final = media_final.get_metadata()
shutil.rmtree(folder)
assert initial_time == final_stat.st_mtime
assert final_stat.st_mtime == time.mktime(metadata_final['date_taken']), (final_stat.st_mtime, time.mktime(metadata_final['date_taken']))
assert initial_checksum == final_checksum
def test_should_exclude_with_no_exclude_arg():
filesystem = FileSystem()
result = filesystem.should_exclude('/some/path')
assert result == False, result
def test_should_exclude_with_non_matching_regex():
filesystem = FileSystem()
result = filesystem.should_exclude('/some/path', {re.compile('foobar')})
assert result == False, result
def test_should_exclude_with_matching_regex():
filesystem = FileSystem()
result = filesystem.should_exclude('/some/path', {re.compile('some')})
assert result == True, result
def test_should_not_exclude_with_multiple_with_non_matching_regex():
filesystem = FileSystem()
result = filesystem.should_exclude('/some/path', {re.compile('foobar'), re.compile('dne')})
assert result == False, result
def test_should_exclude_with_multiple_with_one_matching_regex():
filesystem = FileSystem()
result = filesystem.should_exclude('/some/path', {re.compile('foobar'), re.compile('some')})
assert result == True, result
def test_should_exclude_with_complex_matching_regex():
filesystem = FileSystem()
result = filesystem.should_exclude('/var/folders/j9/h192v5v95gd_fhpv63qzyd1400d9ct/T/T497XPQH2R/UATR2GZZTX/2016-04-Apr/London/2016-04-07_11-15-26-valid-sample-title.txt', {re.compile('London.*\.txt$')})
assert result == True, result
@mock.patch('elodie.config.config_file', '%s/config.ini-does-not-exist' % gettempdir())
def test_get_folder_path_definition_default():
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
path_definition = filesystem.get_folder_path_definition()
if hasattr(load_config, 'config'):
del load_config.config
assert path_definition == [[('date', '%Y-%m-%b')], [('album', ''), ('location', '%city'), ('"Unknown Location"', '')]], path_definition
@mock.patch('elodie.config.config_file', '%s/config.ini-date-location' % gettempdir())
def test_get_folder_path_definition_date_location():
with open('%s/config.ini-date-location' % gettempdir(), 'w') as f:
f.write("""
[Directory]
date=%Y-%m-%d
location=%country
full_path=%date/%location
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
path_definition = filesystem.get_folder_path_definition()
expected = [
[('date', '%Y-%m-%d')], [('location', '%country')]
]
if hasattr(load_config, 'config'):
del load_config.config
assert path_definition == expected, path_definition
@mock.patch('elodie.config.config_file', '%s/config.ini-location-date' % gettempdir())
def test_get_folder_path_definition_location_date():
with open('%s/config.ini-location-date' % gettempdir(), 'w') as f:
f.write("""
[Directory]
date=%Y-%m-%d
location=%country
full_path=%location/%date
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
path_definition = filesystem.get_folder_path_definition()
expected = [
[('location', '%country')], [('date', '%Y-%m-%d')]
]
if hasattr(load_config, 'config'):
del load_config.config
assert path_definition == expected, path_definition
@mock.patch('elodie.config.config_file', '%s/config.ini-cached' % gettempdir())
def test_get_folder_path_definition_cached():
with open('%s/config.ini-cached' % gettempdir(), 'w') as f:
f.write("""
[Directory]
date=%Y-%m-%d
location=%country
full_path=%date/%location
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
path_definition = filesystem.get_folder_path_definition()
expected = [
[('date', '%Y-%m-%d')], [('location', '%country')]
]
assert path_definition == expected, path_definition
with open('%s/config.ini-cached' % gettempdir(), 'w') as f:
f.write("""
[Directory]
date=%uncached
location=%uncached
full_path=%date/%location
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
path_definition = filesystem.get_folder_path_definition()
expected = [
[('date', '%Y-%m-%d')], [('location', '%country')]
]
if hasattr(load_config, 'config'):
del load_config.config
@mock.patch('elodie.config.config_file', '%s/config.ini-location-date' % gettempdir())
def test_get_folder_path_definition_with_more_than_two_levels():
with open('%s/config.ini-location-date' % gettempdir(), 'w') as f:
f.write("""
[Directory]
year=%Y
month=%m
day=%d
full_path=%year/%month/%day
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
path_definition = filesystem.get_folder_path_definition()
expected = [
[('year', '%Y')], [('month', '%m')], [('day', '%d')]
]
if hasattr(load_config, 'config'):
del load_config.config
assert path_definition == expected, path_definition
@mock.patch('elodie.config.config_file', '%s/config.ini-location-date' % gettempdir())
def test_get_folder_path_definition_with_only_one_level():
with open('%s/config.ini-location-date' % gettempdir(), 'w') as f:
f.write("""
[Directory]
year=%Y
full_path=%year
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
path_definition = filesystem.get_folder_path_definition()
expected = [
[('year', '%Y')]
]
if hasattr(load_config, 'config'):
del load_config.config
assert path_definition == expected, path_definition
@mock.patch('elodie.config.config_file', '%s/config.ini-multi-level-custom' % gettempdir())
def test_get_folder_path_definition_multi_level_custom():
with open('%s/config.ini-multi-level-custom' % gettempdir(), 'w') as f:
f.write("""
[Directory]
year=%Y
month=%M
full_path=%year/%album|%month|%"foo"/%month
""")
if hasattr(load_config, 'config'):
del load_config.config
filesystem = FileSystem()
path_definition = filesystem.get_folder_path_definition()
expected = [[('year', '%Y')], [('album', ''), ('month', '%M'), ('"foo"', '')], [('month', '%M')]]
if hasattr(load_config, 'config'):
del load_config.config
assert path_definition == expected, path_definition
| apache-2.0 | -4,849,018,083,781,081,000 | 34.661388 | 206 | 0.683782 | false |
Clemson-DPA/dpa-pipe | dpa/product/action/update.py | 1 | 16515 | # -----------------------------------------------------------------------------
from dpa.action import Action, ActionError, ActionAborted
from dpa.notify import Notification
from dpa.shell.output import Output, Style, Fg
from dpa.product import Product, ProductError
from dpa.product.version import ProductVersion, ProductVersionError
from dpa.ptask.area import PTaskArea
from dpa.ptask.spec import PTaskSpec
from dpa.user import User
# -----------------------------------------------------------------------------
LATEST_VERSION = "-1"
# -----------------------------------------------------------------------------
class ProductUpdateAction(Action):
"""Update a product's state."""
name = "update"
target_type = "product"
# -------------------------------------------------------------------------
@classmethod
def setup_cl_args(cls, parser):
parser.add_argument(
"spec",
nargs="?",
default="",
help="Print info for this product spec.",
)
# ---- publish/unpublish
parser.add_argument(
"-p", "--publish",
nargs='?',
const=LATEST_VERSION,
default=None,
metavar="<v>,<v>,<v>...",
type=str,
help="Publish version(s) of this product. Default is latest.",
)
parser.add_argument(
"-u", "--unpublish",
nargs='?',
const=LATEST_VERSION,
default=None,
metavar="<v>,<v>,<v>...",
type=str,
help="Unpublish version(s) of this product. Default is latest.",
)
# ---- publish/unpublish
parser.add_argument(
"-d", "--deprecate",
nargs='?',
const=LATEST_VERSION,
default=None,
metavar="<v>,<v>,<v>...",
type=str,
help="Deprecate version(s) of this product. Default is latest.",
)
parser.add_argument(
"--undeprecate",
nargs='?',
const=LATEST_VERSION,
default=None,
metavar="<v>,<v>,<v>...",
type=str,
help="Undeprecate version(s) of this product. Default is latest.",
)
# ---- official/no official
official_group = parser.add_mutually_exclusive_group()
official_group.add_argument(
"-o", "--official",
nargs='?',
default=None,
const=LATEST_VERSION,
metavar="<v>",
type=str,
help="Official a version of this product. Default is latest.",
)
official_group.add_argument(
"-n", "--noofficial",
action="store_true",
help="Set this product to have no official version.",
)
# -------------------------------------------------------------------------
def __init__(self, spec, publish=None, unpublish=None, official=None,
noofficial=None, deprecate=None, undeprecate=None):
super(ProductUpdateAction, self).__init__(spec, publish=publish,
unpublish=unpublish, official=official, noofficial=noofficial,
deprecate=None, undeprecate=None,
)
self._spec = spec
self._publish = publish
self._unpublish = unpublish
self._official = official
self._noofficial = noofficial
self._deprecate = deprecate
self._undeprecate = undeprecate
# -------------------------------------------------------------------------
def execute(self):
updates = {}
versions = {}
if self.publish:
for ver in self.publish:
versions[ver.spec] = ver
data = updates.setdefault(ver.spec, {})
data['published'] = True
if self.unpublish:
for ver in self.unpublish:
versions[ver.spec] = ver
data = updates.setdefault(ver.spec, {})
data['published'] = False
if self.deprecate:
for ver in self.deprecate:
versions[ver.spec] = ver
data = updates.setdefault(ver.spec, {})
data['deprecated'] = True
if self.undeprecate:
for ver in self.undeprecate:
versions[ver.spec] = ver
data = updates.setdefault(ver.spec, {})
data['deprecated'] = False
if updates:
for (ver_spec, data) in updates.iteritems():
print "\nUpdating: " + Style.bright + ver_spec + Style.normal
version = versions[ver_spec]
for (key, value) in data.iteritems():
print " " + key + "=" + str(value)
version.update(**data)
if self.official:
self.product.set_official(self.official)
print "\nUpdated: {b}{s}{n}".format(
b=Style.bright,
s=self.official.spec,
n=Style.normal,
)
print " officialed"
elif self.noofficial:
print "\nUpdated: {b}removed official version.{n}".format(
b=Style.bright,
n=Style.normal,
)
self.product.clear_official()
print "\nDone.\n"
# -------------------------------------------------------------------------
def notify(self):
# XXX db intensive. revisit at some point
# for now, only alert on publish/official/deprecate
if not self.publish and not self.official and not self.deprecate:
return
ptasks_to_notify = []
msg = "A product you may be using has been updated:\n\n"
msg += "PRODUCT: " + self.product.spec + "\n\n"
if self.official:
product = self.official.product
ptasks_to_notify.extend(product.dependent_ptasks)
msg += "NOW OFFICIAL: " + self.official.number_padded + " - " + \
self.official.release_note + "\n"
if self.publish:
for ver in self.publish:
product = ver.product
ptasks_to_notify.extend(product.dependent_ptasks)
msg += "NOW PUBLISHED: " + ver.number_padded + " - " + \
ver.release_note + "\n"
if self.deprecate:
for ver in self.deprecate:
product = ver.product
ptasks_to_notify.extend(product.dependent_ptasks)
msg += "NOW DEPRECATED: " + ver.number_padded + " - " + \
ver.release_note + "\n"
msg += "\nYou should update your subscriptions accordingly."
subject = "Product Update: " + self.product.spec
sender = User.current().email
# TODO: the recipients should be creators of versions subscribed
recipients = set([p.creator.email for p in ptasks_to_notify])
# no need to send if there are no ptask creators to notify.
if recipients:
notification = Notification(subject, msg, list(recipients),
sender=sender)
notification.send_email()
# -------------------------------------------------------------------------
def undo(self):
pass
# -------------------------------------------------------------------------
def validate(self):
cur_spec = PTaskArea.current().spec
full_spec = PTaskSpec.get(self.spec, relative_to=cur_spec)
product = None
if full_spec:
try:
product = Product.get(full_spec)
except ProductError as e:
# fall back to input spec
try:
product = Product.get(self.spec)
except ProductError:
raise ActionError(
'Could not determine product from: "{s}"'.format(
s=self.spec
)
)
if product:
self._product = product
else:
raise ActionError(
'Could not determine product from: "{s}"'.format(
s=self.spec
)
)
if self.publish:
vers = self._nums_to_versions(self.publish)
self._publish = [v for v in vers if not v.published]
if self.unpublish:
vers = self._nums_to_versions(self.unpublish)
self._unpublish = [v for v in vers if v.unpublish]
if self.deprecate:
vers = self._nums_to_versions(self.deprecate)
self._deprecate = [v for v in vers if not v.deprecated]
if self.undeprecate:
vers = self._nums_to_versions(self.undeprecate)
self._undeprecate = [v for v in vers if v.deprecated]
if self.official:
vers = self._nums_to_versions(self.official)
if len(vers) > 1:
raise ActionError("Can't official more than one version.")
to_official = vers[0]
if to_official.number == self.product.official_version_number:
raise ActionError(
"Version {v} of '{p}' is already official.".format(
v=to_official.number,
p=self.product.spec,
)
)
if not to_official.published:
if not self.publish:
self._publish = [to_official]
else:
self._publish.append(to_official)
self._official = to_official
if self.publish and self.unpublish:
overlap = set([v.spec for v in self.publish]).intersection(
set([v.spec for v in self.unpublish]))
if len(overlap) > 0:
raise ActionError(
"Can't publish and unpublish the same versions.")
if self.deprecate and self.undeprecate:
overlap = set([v.spec for v in self.deprecate]).intersection(
set([v.spec for v in self.undeprecate]))
if len(overlap) > 0:
raise ActionError(
"Can't deprecate and undeprecate the same versions.")
# XXX publish if not already when officialing
# XXX can't official a deprecated version
# XXX can't deprecate the official version
# XXX can't unpublish something that has subscribers
# XXX add active to subscription model
if (self.publish is None and
self.unpublish is None and
self.deprecate is None and
self.undeprecate is None and
self.official is None and
self.noofficial is False):
raise ActionError("No actions to perform.")
# -------------------------------------------------------------------------
def verify(self):
if (not self.publish and not self.unpublish and not self.deprecate and
not self.undeprecate and not self.official and not self.noofficial):
raise ActionAborted("No updates to perform.")
print "\nProduct: {b}{s}{n}\n".format(
b=Style.bright,
s=self.product.spec,
n=Style.normal,
)
if self.publish:
self._version_table(self.publish, title="Publish")
if self.unpublish:
self._version_table(self.unpublish, title="Un-publish")
if self.deprecate:
self._version_table(self.deprecate, title="Deprecate")
if self.undeprecate:
self._version_table(self.undeprecate, title="Un-deprecate")
if self.official:
self._version_table([self.official], title="Official")
if self.noofficial:
print "{o}: {b}{m}{n}\n".format(
o="No official",
b=Style.bright,
m="This product will have no official versions.",
n=Style.normal,
)
if not Output.prompt_yes_no(Style.bright + "Update" + Style.reset):
raise ActionAborted("User chose not to proceed.")
# -------------------------------------------------------------------------
@property
def spec(self):
return self._spec
# -------------------------------------------------------------------------
@property
def product(self):
return self._product
# -------------------------------------------------------------------------
@property
def publish(self):
return self._publish
# -------------------------------------------------------------------------
@property
def unpublish(self):
return self._unpublish
# -------------------------------------------------------------------------
@property
def deprecate(self):
return self._deprecate
# -------------------------------------------------------------------------
@property
def undeprecate(self):
return self._undeprecate
# -------------------------------------------------------------------------
@property
def official(self):
return self._official
# -------------------------------------------------------------------------
@property
def noofficial(self):
return self._noofficial
# -------------------------------------------------------------------------
def _nums_to_versions(self, nums):
product_vers = None
versions = []
for num in nums.split(","):
if num is LATEST_VERSION:
if not product_vers:
product_vers = self.product.versions
product_vers.sort(key=lambda v: v.number)
versions.append(product_vers[-1])
elif isinstance(num, ProductVersion) and num.product == self.product:
versions.append(num)
else:
try:
matches = ProductVersion.list(
product=self.product.spec,
number=num
)
except ProductVersionError:
raise ActionError(
"Could not find a version {n} for '{s}'".format(
n=num, s=self.product.spec
)
)
else:
if len(matches) != 1:
raise ActionError(
"Could not find a version {n} for '{s}'".format(
n=num, s=self.product.spec
)
)
versions.append(matches[0])
return versions
# -------------------------------------------------------------------------
def _version_table(self, versions, title='Versions'):
number = title
note = "Release note"
reps = "Reps"
creator = "Creator"
created = "Created"
output = Output()
output.vertical_padding = 0
output.vertical_separator = None
output.table_header_separator="-"
output.header_names = [
number,
note,
reps,
creator,
created,
]
output.set_header_alignment({
number: "right",
})
output.set_header_colors({
number: Style.bright,
})
for version in sorted(versions, key=lambda v: v.number):
output.add_item(
{
number: version.number_padded,
note: version.release_note,
reps: _representations(version),
creator: version.creator_username,
created: _datetime_format(version.created),
},
)
output.dump(output_format='table')
print ""
# -----------------------------------------------------------------------------
def _datetime_format(datetime):
return datetime.strftime("%Y/%m/%d %H:%M:%S")
# -----------------------------------------------------------------------------
def _representations(version):
reps = []
for rep in version.representations:
rep_str = rep.spec.replace(version.spec, "")
reps.append(rep_str.lstrip("=").rstrip("=none"))
return ",".join(reps)
| mit | 5,395,749,681,344,600,000 | 32.842213 | 81 | 0.459885 | false |
Differlong/MyScripts | 美女图片展示.py | 1 | 2409 | """
import glob
import os
import time
picPathes = (path for path in glob.glob(r"F:/美图录/*/*.jpg"))
for pic in picPathes:
os.popen(pic)
time.sleep(3)
#体验不好的第一点是背景音乐太正了,应该去找AV里面的背景音乐;第二点是看到不想看的人可以跳过,这样就很好。这样怎么做呢?
"""
import time
import glob
import random
import os
from selenium import webdriver
chromedriver = "C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe"
os.environ["webdriver.chrome.driver"] = chromedriver
driver = webdriver.Chrome(chromedriver)
picPath = glob.glob(r"F:/美图录精华版/*")
while True:
picFold = random.choice(picPath)
isFirst = True
for pic in glob.glob(picFold+"/*.jpg"):
driver.get(pic)
if isFirst:
if(input("Do you want to continue"+pic)==""):
break
else:
isFirst = False
time.sleep(3)
driver.close()
driver.quit()
#这样看会产生
"""
import os
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
chromedriver = "C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe"
os.environ["webdriver.chrome.driver"] = chromedriver
driver = webdriver.Chrome(chromedriver)
driver.get("http://www.python.org")
assert "Python" in driver.title
elem = driver.find_element_by_name("q")
elem.send_keys("selenium")
elem.send_keys(Keys.RETURN)
assert "Google" in driver.title
driver.close()
driver.quit()
"""
"""
import glob
import os
from selenium import webdriver
chromedriver = "C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe"
os.environ["webdriver.chrome.driver"] = chromedriver
driver = webdriver.Chrome(chromedriver)
picPath = glob.glob(r"F:/美图录/*")
for picFold in picPath:
isFirst = True
for pic in glob.glob(picFold+"/*.jpg"):
driver.get(pic)
if isFirst:
if(input("Do you want to like it? input '' mean ignore it!" +picFold)==""):
break
else:
basename = os.path.basename(picFold)
source = "F:\美图录\\"+basename
destination = "F:\美图录精华版\\"+basename
os.popen("echo D | xcopy %s %s"%(source,destination))
break
#time.sleep(3)
driver.close()
driver.quit()
"""
| mit | 5,832,949,411,809,063,000 | 17.974576 | 87 | 0.641804 | false |
PermutaTriangle/Permuta | permuta/permutils/insertion_encodable.py | 1 | 3371 | from itertools import islice
from typing import ClassVar, Dict, Iterable, Tuple
from permuta.patterns.perm import Perm
class InsertionEncodablePerms:
"""A static container of functions fortesting
if a basis has a regular insertion encoding.
"""
_ALL_PROPERTIES: ClassVar[int] = 15
_CACHE: ClassVar[Dict[Tuple, int]] = dict()
@staticmethod
def _is_incr_next_incr(perm: Perm) -> bool:
n = len(perm)
return not any(
curr < prev and any(perm[j + 1] < perm[j] for j in range(i + 1, n - 1))
for i, (prev, curr) in enumerate(zip(perm, islice(perm, 1, None)))
)
@staticmethod
def _is_incr_next_decr(perm: Perm) -> bool:
n = len(perm)
return not any(
curr < prev and any(perm[j + 1] > perm[j] for j in range(i + 1, n - 1))
for i, (prev, curr) in enumerate(zip(perm, islice(perm, 1, None)))
)
@staticmethod
def _is_decr_next_incr(perm: Perm) -> bool:
n = len(perm)
return not any(
curr > prev and any(perm[j + 1] < perm[j] for j in range(i + 1, n - 1))
for i, (prev, curr) in enumerate(zip(perm, islice(perm, 1, None)))
)
@staticmethod
def _is_decr_next_decr(perm: Perm) -> bool:
n = len(perm)
return not any(
curr > prev and any(perm[j + 1] > perm[j] for j in range(i + 1, n - 1))
for i, (prev, curr) in enumerate(zip(perm, islice(perm, 1, None)))
)
@staticmethod
def _insertion_encodable_properties(perm: Perm) -> int:
properties = InsertionEncodablePerms._CACHE.get(perm, -1)
if properties < 0:
properties = sum(
val << shift
for shift, val in enumerate(
(
InsertionEncodablePerms._is_incr_next_decr(perm),
InsertionEncodablePerms._is_incr_next_incr(perm),
InsertionEncodablePerms._is_decr_next_decr(perm),
InsertionEncodablePerms._is_decr_next_incr(perm),
)
)
)
InsertionEncodablePerms._CACHE[perm] = properties
return properties
@staticmethod
def is_insertion_encodable_rightmost(basis: Iterable[Perm]) -> bool:
"""Check if basis is insertion encodable by rightmost."""
curr = 0
for perm in basis:
curr = curr | InsertionEncodablePerms._insertion_encodable_properties(perm)
if curr == InsertionEncodablePerms._ALL_PROPERTIES:
return True
return False
@staticmethod
def is_insertion_encodable_maximum(basis: Iterable[Perm]) -> bool:
"""Check if basis is insertion encodable by maximum."""
curr = 0
for perm in basis:
curr = curr | InsertionEncodablePerms._insertion_encodable_properties(
perm.rotate()
)
if curr == InsertionEncodablePerms._ALL_PROPERTIES:
return True
return False
@staticmethod
def is_insertion_encodable(basis: Iterable[Perm]) -> bool:
"""Check if basis is insertion encodable."""
return InsertionEncodablePerms.is_insertion_encodable_rightmost(
basis
) or InsertionEncodablePerms.is_insertion_encodable_maximum(basis)
| bsd-3-clause | -6,580,096,283,389,824,000 | 35.641304 | 87 | 0.57342 | false |
shekhargulati/localjobs-python | app/views.py | 1 | 3398 | from app import app , db , login_manager
from flask import render_template , request , flash , redirect , url_for ,g , jsonify
from models import Users
from flask.ext.login import login_user , logout_user , current_user , login_required
from bson.objectid import ObjectId
import json
from bson import json_util
@app.route('/')
def index():
return render_template('index.html',title="Welcome to LocalJobs -- Location Aware Job Search Application")
@app.route('/about')
def about():
return render_template('about.html',title="About LocalJobs")
@app.route('/contact')
def contact():
return render_template('contact.html' , title="Contact Us")
@app.route('/signin' , methods=['GET','POST'])
def signin():
if request.method == 'GET':
return render_template('signin.html' , title="Signin to LocalJobs")
email = request.form['email']
password = request.form['password']
remember_me = False
if 'rememberme' in request.form:
remember_me = True
users_dict = db.users.find_one({'email':email , 'password':password})
if users_dict is None:
flash('Email or password is invalid' , 'error')
return redirect(url_for('signin'))
registered_user = Users(users_dict.get('email'),users_dict.get('password'),users_dict.get('linkedin_profile_url'),users_dict.get('skills'))
registered_user.id = users_dict.get('_id')
login_user(registered_user, remember = remember_me)
flash('Logged in successfully')
return redirect(request.args.get('next') or url_for('search'))
@app.route('/register' , methods=['GET','POST'])
def register():
if request.method == 'GET':
return render_template('register.html' , title="Register for LocalJobs Account")
email = request.form['email']
if db.users.find_one({'email':email}):
flash('User exist with email id %s' % email,'error')
return redirect(url_for('register'))
skills = [skill.strip().lower() for skill in request.form['skills'].split(',')]
user = Users(request.form['email'],request.form['password'],request.form['linkedinUrl'],skills)
user_id = db.users.insert(user.__dict__ , w=1)
flash(u'User created with id %s' % user_id)
return redirect(url_for('signin'))
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@login_manager.user_loader
def load_user(id):
users_dict = db.users.find_one({"_id": ObjectId(str(id))})
registered_user = Users(users_dict.get('email'),users_dict.get('password'),users_dict.get('linkedin_profile_url'),users_dict.get('skills'))
registered_user.id = users_dict.get('_id')
return registered_user
@app.before_request
def before_request():
g.user = current_user
@app.route('/search')
@login_required
def search():
return render_template('search.html' , title="Search Jobs")
@app.route('/api/jobs')
@login_required
def jobs():
jobs = db.jobs.find().limit(25)
return json.dumps({'jobs':list(jobs)},default=json_util.default)
@app.route('/api/jobs/id/<job_id>')
@login_required
def job(job_id):
job = db.jobs.find_one({"_id":ObjectId(str(job_id))})
return json.dumps({'job':job},default=json_util.default)
@app.route('/api/jobs/<skills>')
@login_required
def jobs_near_with_skills(skills):
lat = float(request.args.get('latitude'))
lon = float(request.args.get('longitude'))
jobs = db.jobs.find({"skills" : {"$in" : skills.split(',')} , "location" : { "$near" : [lon,lat]}}).limit(10)
return json.dumps({'jobs':list(jobs)},default=json_util.default) | mit | 5,328,970,205,927,970,000 | 34.041237 | 140 | 0.699823 | false |
zed-ee/grader-simulator | 1GraderSimulator.py | 1 | 64887 | # -*- coding: utf-8 -*-
from pandac.PandaModules import loadPrcFileData
JOYSTICK_WHEEL = 0
JOYSTICK_STICKS = 1
BUTTON_FORWARD = 1
BUTTON_BACKWARD = 10
BUTTON_START = 3
BUTTON_HORN = 2
BUTTON_WINTER = 10
BUTTON_CAM_LEFT = 0
BUTTON_CAM_RIGHT = 7
BUTTON_RESET = 7
AXIS_STEERINGWHEEL = 0
AXIS_ACCELERATE = 2
AXIS_BRAKE = 1
AXIS_MIDDLE_BLADE_UPDOWN = 0
AXIS_MIDDLE_BLADE_ROTATE = 1
BUTTON_FRONT_BLADE_UP = 3
BUTTON_FRONT_BLADE_DOWN = 2
BUTTON_FRONT_BLADE_LEFT = 1
BUTTON_FRONT_BLADE_RIGHT = 0
TIMEOUT = 45
RESTART_MODE_RESTART = 0
RESTART_MODE_CONTINUE = 1
DEBUG_EVENTS = True
DEBUG_EVENTS = True
MODE = 1
if MODE == 1:
if 1 == 0:
loadPrcFileData("", """win-origin 0 0
win-size 1680 785
show-frame-rate-meter #t
client-cpu-affinity #t
client-cpu-affinity-mask 15
sync-video #f
assert-abort #t
text-encoding utf8
cursor-hidden #t
framebuffer-multisample 1
multisamples 8
undecorated 1""")
else:
loadPrcFileData("", """win-origin -900 0
win-size 3080 1440
show-frame-rate-meter #f
client-cpu-affinity #t
client-cpu-affinity-mask 15
assert-abort #t
text-encoding utf8
framebuffer-multisample 1
multisamples 2
cursor-hidden #t
undecorated 1""")
elif MODE == 2:
loadPrcFileData("", """win-origin -900 0
win-size 3080 1440
undecorated 1""")
else:
loadPrcFileData("", """win-origin 0 300
win-size 1680 350
undecorated 1""")
import direct.directbase.DirectStart
from direct.showbase.DirectObject import DirectObject
from direct.task.Task import Task
from pandac.PandaModules import AmbientLight,DirectionalLight
from pandac.PandaModules import TextNode,NodePath,LightAttrib
from pandac.PandaModules import Vec2,Vec3,Vec4,BitMask32, Point3
from pandac.PandaModules import PandaNode,Camera, Plane
from pandac.PandaModules import GraphicsWindow
from pandac.PandaModules import FrameBufferProperties
from pandac.PandaModules import WindowProperties
from pandac.PandaModules import GraphicsPipe
from pandac.PandaModules import CollisionTraverser,CollisionNode, CollisionHandlerEvent
from pandac.PandaModules import CollisionHandlerQueue,CollisionRay, CollisionPlane
from pandac.PandaModules import GeoMipTerrain, PNMImage, Filename
from pandac.PandaModules import Texture, TextureStage, Shader, Quat
from direct.interval.IntervalGlobal import *
from direct.fsm import FSM
from pandac.PandaModules import OdeWorld, OdeSimpleSpace,OdeJointGroup, AntialiasAttrib
from direct.particles.Particles import Particles
from direct.particles.ParticleEffect import ParticleEffect
import sys
import odebase
from Grader.Grader import *
from Grader.Config import *
from environment.Environment import *
from ObjectMover import *
from SnowMachine.SnowGrid import SnowGrid
from GravelMachine.GravelGrid import GravelGrid
from Screens.Screens import *
from direct.directutil import Mopath
from direct.interval.MopathInterval import *
from direct.interval.IntervalGlobal import *
import CarAnimation
render.setAntialias(AntialiasAttrib.MAuto)
try:
import pygame
USE_JOYSTICK = True
except Exception, e:
print `e`
USE_JOYSTICK = False
#taskMgr.setupTaskChain('paintChain', numThreads = 1, timeslicePriority=False, frameSync=True)
taskMgr.setupTaskChain('paintChain', numThreads = 0)
if USE_JOYSTICK == True:
pygame.init()
print "Number of joysticks: "+str(pygame.joystick.get_count())
if pygame.joystick.get_count() > 0:
for i in range(0, pygame.joystick.get_count()):
print "Joystick("+str(i)+") ",
pygame.joystick.Joystick(i).init()
print "axes: " + str(pygame.joystick.Joystick(i).get_numaxes()),
print "buttons: " + str(pygame.joystick.Joystick(i).get_numbuttons())
else:
USE_JOYSTICK = False
def debug(text):
print text
FRONT_BLADE = 1
MIDDLE_BLADE = 2
MODE_WINTER = 1
MODE_SUMMER = 0
class GraderSimulator(FSM.FSM):
keyboardMapping = {
"arrow_left": "left", "arrow_right": "right", "arrow_up": "forward", "arrow_down": "reverse",
".": "frontblade-up", ",": "frontblade-down",
"page_up": "middleblade-up", "page_down": "middleblade-down",
"z": "middleblade-rot-left", "x": "middleblade-rot-right",
"home": "middleblade-left", "end": "middleblade-right",
"k": "frontblade-left", "l": "frontblade-right",
"a": "object-inc", "s": "object-dec", "q": "object-set",
"enter": "next",
"escape": "prev",
}
keyMap = {}
nextState = {
('Off', 'next') : 'Startup',
('Startup', 'next') : 'Scenery',
('Scenery', 'next') : 'Instructions',
('Scenery', 'restart') : 'Startup',
('Instructions', 'next') : 'LevelStart',
('Instructions', 'restart') : 'Startup',
('LevelStart', 'next') : 'Game',
('LevelStart', 'restart') : 'Startup',
('Game', 'finish') : 'LevelCompleted',
('Game', 'restart') : 'Startup',
('LevelCompleted', 'next') : 'NextLevelStart',
('LevelCompleted', 'restart') : 'Startup',
# ('LevelCompleted', 'prev') : 'Startup',
('NextLevelStart', 'next') : 'LevelStart',
('LevelStart', 'gamecompleted') : 'GameCompleted',
('GameCompleted', 'next') : 'Startup',
('GameCompleted', 'restart') : 'Startup',
}
def defaultFilter(self, request, args):
print `(self.state, request)`
key = (self.state, request)
next = self.nextState.get(key)
print next
return next
def __init__(self):
self.cam1 = None
self.cam2 = None
self.cam3 = None
self.level = 0
self.mode = MODE_SUMMER
self.restartMode = -1
self.result = 0
self.resultCount = 0
self.levelEnd = {}
self.message = ""
self.levelStarted = False
self.firstPaint = True
self.enableRev = True
FSM.FSM.__init__(self, 'dummy')
#self.setupScreenSingle(2)
self.setupScreen()
#render.setShaderAuto()
self.odeworld = odebase.ODEWorld_Simple()
self.LoadModels()
self.grader.setSyncCamera(False)
self.grader.brake()
base.disableMouse()
self.odeworld.EnableODETask(3)
##################
self.screens = Screens()
self.selectedBlade = MIDDLE_BLADE
#base.disableMouse();
self.lastUpdateTime = 0
self.cTrav = CollisionTraverser()
# self.grader.bodyBox.setPos(GRADER_START_POS)
# self.grader.body.setHpr(GRADER_START_HPR)
#self.grader.moveForward()
#self.cam1.setPos(self.grader.body, CAMERA_LEFT_POS)
#self.cam2.setPos(self.grader.body, CAMERA_MIDDLE_POS)
#self.cam3.setPos(self.grader.body, CAMERA_RIGHT_POS)
self.setupLights()
self.setupKeys(self.keyboardMapping)
self.accept("l", self.printGraderPos)
#x = loader.loadModel("models/grader/body-box-full")
#self.cam1.setPos(self.grader.carbody_view, CAMERA_LEFT_POS)
#self.objectMover = ObjectMover(self.cam2,3, self.grader.body)
#self.objectMover = ObjectMover(self.grader.carbody,0.1, render)
#self.objectMover = ObjectMover(self.env.models['golf']['model'],0.1, render)
self.snow = SnowGrid(render)
self.gravel = GravelGrid(render)
## BORDER
self.maze = loader.loadModel("models/environment/border")
self.maze.setScale(4)
self.maze.reparentTo(render)
self.walls = self.maze.find("**/wall_collide")
self.walls.node().setIntoCollideMask(BitMask32.bit(0))
#self.walls.show()
self.maze2 = loader.loadModel("models/environment/levelends")
self.maze2.setScale(4)
self.maze2.reparentTo(render)
self.walls2 = self.maze2.find("**/level2_end")
self.walls2.node().setIntoCollideMask(BitMask32.bit(0))
#self.walls2.show()
self.cTrav = CollisionTraverser()
self.cHandler = CollisionHandlerQueue()
self.cTrav.addCollider(self.grader.ballSphere, self.cHandler)
#self.cTrav.showCollisions(render)
base.cTrav = self.cTrav
## BORDER
#base.cam.setPos(0,0,500)
#base.cam.lookAt(0,0,0)
#taskMgr.add(self.handleGameInputs,"handleGameInputsTask")
#taskMgr.add(self.updateTerrainTask,"update")
# base.toggleWireframe( )
self.myMotionPathName = Mopath.Mopath()
self.myMotionPathName.loadFile("models/line")
#self.myMotionPathName.ls()
#self.box = NodePath(PandaNode("box"))
self.box = loader.loadModel("models/car1/car_box")
self.box.reparentTo(render)
car = loader.loadModel('models/vehicles/police')
car.reparentTo(self.box)
#box.setScale(1.25)
car.setHpr(180,0,0)
car.setZ(2)
self.car1coll = self.box.find("**/ball")
self.car1coll.node().setIntoCollideMask(BitMask32.bit(0))
#self.car1coll.show()
# self.box2 = NodePath(PandaNode("box2"))
self.box2 = loader.loadModel("models/car1/car_box")
self.box2.reparentTo(render)
car2 = loader.loadModel('models/vehicles/vw_golf')
car2.reparentTo(self.box2)
#box2.setScale(1.25)
car2.setHpr(180,0,0)
car2.setZ(2)
#self.car2coll = car2.find("**/car_collide")
self.car2coll = self.box2.find("**/ball")
self.car2coll.node().setIntoCollideMask(BitMask32.bit(0))
#self.car2coll.show()
#self.env.models['police']['model'].place()
self.carAnim1 = CarAnimation.CarAnimation(self.box)
self.carAnim2 = CarAnimation.CarAnimation(self.box2)
if 1==1:
self.request('next')
else:
self.enterStartup()
self.exitStartup()
#self.mode=MODE_WINTER
self.exitScenery()
self.level = 3
self.request('LevelStart')
self.request('next')
#x = self.env.models['police']['model'].getPos(render)
#print 'police:', `x`, `self.env.models['bmw']['pos']`
#self.cam2.setPos(self.env.models['police']['pos'])
#self.cam2.setPos(CAMERA_MIDDLE_POS*2)
#self.cam2.setHpr(CAMERA_MIDDLE_HPR)
#print `box`
# myInterval = MopathInterval(self.myMotionPathName, box, duration= 10, name = "MotionInterval")
# myInterval.loop()
#print `self.myMotionPathName`
#print `myInterval`
taskMgr.add(self.timerTask, 'timerTask')
self.timer = -1
"""
self.rain = render.attachNewNode('parent')
self.rain.reparentTo(self.grader.carbody_view) # use parent.reparentTo(base.cam) for real apps
# enable particles
base.enableParticles()
rain = ParticleEffect()
rain.loadConfig(Filename('rain.ptf'))
#Sets particles to birth relative to the parent
rain.start(self.rain)
print "Rain effect " + `rain`
"""
def timerTask(self, task):
if self.state != 'Startup':
seconds = int(task.time)
#print self.timer,self.timer+TIMEOUT, seconds
if self.timer < 0:
self.timer = seconds
elif self.timer+TIMEOUT < seconds:
#print "Timeout!"
#self.request('restart')
sys.exit()
return Task.cont
def setupKeyboard(self):
self.accept("v", self.grader.toggleCameraMode)
self.accept("arrow_up", self.grader.forward)
self.accept("arrow_up-up", self.grader.normal)
self.accept("arrow_down", self.grader.backward)
self.accept("arrow_down-up", self.grader.normal)
self.accept("space", self.grader.brake, [200.0])
self.accept("space-up", self.grader.releasebrake)
self.accept("shift", self.grader.brake, [70.0])
self.accept("shift-up", self.grader.releasebrake)
self.accept("arrow_left", self.grader.Turn, [True,-0.01])
self.accept("arrow_left-up", self.grader.Turn, [False,-0.01])
self.accept("arrow_right", self.grader.Turn, [True,0.01])
self.accept("arrow_right-up", self.grader.Turn, [False,0.01])
self.accept("q", self.grader.frontBladeMove, [1, 1])
self.accept("q-up", self.grader.frontBladeMove, [1, 0])
self.accept("w", self.grader.frontBladeMove, [1, -1])
self.accept("w-up", self.grader.frontBladeMove, [1, 0])
self.accept("e", self.grader.frontBladeMove, [0, 1])
self.accept("e-up", self.grader.frontBladeMove, [0, 0])
self.accept("r", self.grader.frontBladeMove, [0, -1])
self.accept("r-up", self.grader.frontBladeMove, [0, 0])
self.accept("y", self.grader.middleBladeMove, [1, 1])
self.accept("y-up", self.grader.middleBladeMove, [1, 0])
self.accept("u", self.grader.middleBladeMove, [1, -1])
self.accept("u-up", self.grader.middleBladeMove, [1, 0])
self.accept("i", self.grader.middleBladeMove, [0, -1])
self.accept("i-up", self.grader.middleBladeMove, [0, 0])
self.accept("o", self.grader.middleBladeMove, [0, 1])
self.accept("o-up", self.grader.middleBladeMove, [0, 0])
self.accept("v-up", self.printTasks)
def printTasks(self):
print `taskMgr`
def printGraderPos(self):
print 'pos: '+ `self.grader.carbody_ode.geom.getPosition()` + ', ('+ `self.grader.carbody_ode.geom.getQuaternion()`
for i in range(6):
print 'pos wheel '+ `i` + ": "+`self.grader.wheels_ode[i].geom.getPosition()` + ', ('+ `self.grader.wheels_ode[i].geom.getQuaternion()`
def LoadModels(self):
base.setBackgroundColor(0,0,0)
world = self.odeworld.world
space = self.odeworld.space
world.setGravity(0, 0, -10)
world.initSurfaceTable(5)
# surface 1, 2 is the wheels
# surface 3 is the wall
# (surfaceId1, surfaceId2, mu, bounce, bounce_vel, soft_erp, soft_cfm, slip, dampen)
#world.setSurfaceEntry(0, 0, 0.8, 0.0, 0, 188, 0.00001, 0.0, 0.002)
world.setSurfaceEntry(0, 0, 0.8, 0.0, 10, 0.9, 0.00001, 100, 0.002)
world.setSurfaceEntry(0, 1, 0.8, 0.1, 10, 0.8, 0.00005, 0, 1)
world.setSurfaceEntry(0, 2, 0.9, 0.1, 10, 0.8, 0.00005, 0, 1)
world.setSurfaceEntry(3, 1, 0, 0, 100, 0, 1, 100, 0.002)
world.setSurfaceEntry(3, 2, 0, 0, 100, 0, 1, 100, 0.002)
self.env = Environment(self.odeworld, 4)
notifier = self.odeCollisionEvent
#notifier = None
self.grader = Grader(self.odeworld, Vec3(320, 400, 45)*4, Vec3(GRADER_START_HPR), False,
notifier)
self.grader.setCams(self.cam1, self.cam2, self.cam3)
self.enableCollisions = True
def odeCollisionEvent(self, odeobject, geomcollided, entry):
if self.enableCollisions:
if geomcollided != self.env.models['ground']['ode_geom'].geom:
self.grader.stop()
self.grader.audio.crash()
print "collision:", `(odeobject, geomcollided, entry)`
self.message = "Tegid avarii"
if self.env.models.has_key('houses') and geomcollided == self.env.models['houses']['ode_geom'].geom:
self.message = "Sõitsid vastu maja"
if self.env.models.has_key('bridge') and geomcollided == self.env.models['bridge']['ode_geom'].geom:
self.message = "Sõitsid vastu silda"
elif self.env.models.has_key('bigtree') and geomcollided == self.env.models['bigtree']['ode_geom'].geom:
self.message = "Sõitsid vastu puud"
elif self.env.models.has_key('signs1') and geomcollided == self.env.models['signs1']['ode_geom'].geom:
self.message = "Sõitsid liiklusmärgile otsa"
taskMgr.doMethodLater(0.5, self.handleCollision, "collision", extraArgs = [self.message])
#self.grader.setSyncCamera(False)
self.enableCollisions = False
def handleCollision(self, message):
self.result = -1
self.request('finish')
self.grader.stop()
def handleFinish(self, message):
self.request('finish')
self.grader.stop()
def collisionTask(self, task):
#return Task.cont #Continue the task indefinitely
for i in range(self.cHandler.getNumEntries()):
entry = self.cHandler.getEntry(i)
name = entry.getIntoNode().getName()
print 'collision: ' + `(entry, name)`
if name == "wall_collide":
self.message = 'Sõitsid teelt välja.'
taskMgr.doMethodLater(0.5, self.handleCollision, "collision", extraArgs = [self.message])
return
# self.grader.setSyncCamera(False)
elif name == "car_collide":
self.message = 'Tegid avarii.'
self.carAnim1.seq.clearIntervals()
self.carAnim2.seq.clearIntervals()
print `self.carAnim1.seq`
taskMgr.doMethodLater(0.5, self.handleCollision, "collision", extraArgs = [self.message])
return
elif name == "level2_end":
print "Level Completed"
self.grader.stop()
#self.message = 'Tase edukalt läbitud. Väga tubli.'
taskMgr.doMethodLater(0.5, self.handleFinish, "collision", extraArgs = [self.message])
self.request('finish')
return
elif name == "loseTrigger":
pass
return Task.cont #Continue the task indefinitely
#Sets up some default lighting
def setupLights(self):
ambientLight = AmbientLight( "ambientLight" )
ambientLight.setColor( Vec4(.4, .4, .35, 1) )
#ambientLight.setColor( Vec4(0.1, 0.0, 0.0, 1.0) )
directionalLight = DirectionalLight( "directionalLight" )
directionalLight.setDirection( Vec3( 0, 8, -2.5 ) )
directionalLight.setColor( Vec4( 0.9, 0.8, 0.9, 1 ) )
ambientLightNP = render.attachNewNode(ambientLight)
render.setLight(ambientLightNP)
directionalLightNP = render.attachNewNode(directionalLight)
# This light is facing backwards, towards the camera.
directionalLightNP.setHpr(180, -20, 0)
render.setLight(directionalLightNP)
def doNothing(self):
pass
def disableInputs(self):
print "disableInputs"
self.accept("escape", self.doNothing)
self.accept("enter", self.doNothing)
self.accept("arrow_left", self.doNothing)
self.accept("arrow_right", self.doNothing)
self.setupKeys()
def setupKeys(self, mapping = {} ):
self.keyMap = {}
for key in mapping.keys():
self.accept(key, self.setKey, [mapping[key],1])
self.accept(key+'-up', self.setKey, [mapping[key],0])
self.keyMap[mapping[key]] = 0
#Records the state of the arrow keys
def setKey(self, key, value):
self.keyMap[key] = value
def nextScreen(self):
self.request('next')
def prevScreen(self):
self.request('prev')
def handleStartupInputs(self, task):
if USE_JOYSTICK == True:
for e in pygame.event.get():
self.timer = -1
if DEBUG_EVENTS: print `e`
if e.joy == JOYSTICK_WHEEL:
if e.type == pygame.JOYBUTTONDOWN:
if e.button == BUTTON_START:
self.request('next')
if (self.keyMap["next"]!=0):
self.request('next')
return task.cont
def handleGameCompletedInputs(self, task):
if USE_JOYSTICK == True:
for e in pygame.event.get():
self.timer = -1
if e.joy == 0:
if e.type == pygame.JOYAXISMOTION:
if e.axis == AXIS_ACCELERATE:
if self.enableRev == True:
if e.value > 0.6:
self.request('next')
self.enableRev = False
else:
if e.value < 0:
self.enableRev = True
# if e.type == pygame.JOYBUTTONDOWN:
# if e.button == BUTTON_FORWARD:
# self.request('next')
if (self.keyMap["next"]!=0):
self.request('next')
return task.cont
def handleSceneryInputs(self, task):
if USE_JOYSTICK == True:
for e in pygame.event.get():
self.timer = -1
if DEBUG_EVENTS: print `e`
if e.joy == 0:
if e.type == pygame.JOYAXISMOTION:
if e.axis == AXIS_STEERINGWHEEL:
if e.value < -0.2:
if self.mode != MODE_WINTER:
self.selectSceneryWinter()
elif e.value > 0.2:
if self.mode != MODE_SUMMER:
self.selectScenerySummer()
elif e.axis == AXIS_ACCELERATE:
if self.enableRev == True:
if e.value > 0.6:
#self.grader.rev()
self.request('next')
self.enableRev = False
else:
if e.value < 0:
self.enableRev = True
elif e.type == pygame.JOYBUTTONDOWN:
if e.button == BUTTON_RESET:
self.showRestartDialog()
# elif e.button == BUTTON_FORWARD:
# self.request('next')
elif e.button == BUTTON_HORN:
self.grader.audio.horn()
elif e.type == pygame.JOYBUTTONUP:
if e.button == BUTTON_HORN:
self.grader.audio.horn(False)
if (self.keyMap["next"]!=0):
self.request('next')
if (self.keyMap["prev"]!=0):
self.request('prev')
if (self.keyMap["left"]!=0):
self.selectScenerySummer()
if (self.keyMap["right"]!=0):
self.selectSceneryWinter()
return task.cont
def handleRestartInputs(self, task):
if USE_JOYSTICK == True:
for e in pygame.event.get():
self.timer = -1
if DEBUG_EVENTS: print `e`
if e.joy == 0:
if e.type == pygame.JOYAXISMOTION:
if e.axis == AXIS_STEERINGWHEEL:
if e.value < -0.2:
if self.restartMode != RESTART_MODE_RESTART:
self.selectRestart()
elif e.value > 0.2:
if self.restartMode != RESTART_MODE_CONTINUE:
self.selectContinue()
elif e.axis == AXIS_ACCELERATE:
if self.enableRev == True:
if e.value > 0.6:
self.restartGame()
self.enableRev = False
else:
if e.value < 0:
self.enableRev = True
# elif e.type == pygame.JOYBUTTONDOWN:
# if e.button == BUTTON_FORWARD:
# self.restartGame()
if (self.keyMap["next"]!=0):
self.request('next')
if (self.keyMap["prev"]!=0):
self.request('prev')
if (self.keyMap["left"]!=0):
self.selectScenerySummer()
if (self.keyMap["right"]!=0):
self.selectSceneryWinter()
return task.cont
def handleInstructionsInputs(self, task):
if USE_JOYSTICK == True:
for e in pygame.event.get():
self.timer = -1
if DEBUG_EVENTS: print `e`
if e.joy == 0:
if e.type == pygame.JOYAXISMOTION:
if e.axis == 0:
self.grader.steer(e.value)
elif e.axis == AXIS_ACCELERATE:
if self.enableRev == True:
if e.value > 0.6:
if self.screens.instructions.step == 3:
self.request('next')
else:
self.grader.rev()
self.enableRev = False
else:
if e.value < 0:
self.enableRev = True
elif e.type == pygame.JOYBUTTONDOWN:
if e.button == BUTTON_RESET:
self.showRestartDialog()
elif e.button == BUTTON_FORWARD:
if self.screens.instructions.step == 3:
self.request('next')
elif e.button == BUTTON_HORN:
self.grader.audio.horn()
elif e.type == pygame.JOYBUTTONUP:
if e.button == BUTTON_HORN:
self.grader.audio.horn(False)
elif e.joy == JOYSTICK_STICKS:
if e.type == pygame.JOYBUTTONDOWN:
if e.button == BUTTON_FRONT_BLADE_UP:
self.grader.frontBladeMove(1, 1)
self.grader.audio.blade()
elif e.button == BUTTON_FRONT_BLADE_DOWN:
self.grader.frontBladeMove(1, -1)
self.grader.audio.blade()
elif e.button == BUTTON_FRONT_BLADE_LEFT:
self.grader.frontBladeMove(0, 1)
self.grader.audio.blade()
elif e.button == BUTTON_FRONT_BLADE_RIGHT:
self.grader.frontBladeMove(0, -1)
self.grader.audio.blade()
elif e.type == pygame.JOYBUTTONUP:
if e.button == BUTTON_FRONT_BLADE_UP:
self.grader.frontBladeMove(1, 0)
self.grader.audio.blade(False)
elif e.button == BUTTON_FRONT_BLADE_DOWN:
self.grader.frontBladeMove(1, 0)
self.grader.audio.blade(False)
elif e.button == BUTTON_FRONT_BLADE_LEFT:
self.grader.frontBladeMove(0, 0)
self.grader.audio.blade(False)
elif e.button == BUTTON_FRONT_BLADE_RIGHT:
self.grader.frontBladeMove(0, 0)
self.grader.audio.blade(False)
elif e.type == pygame.JOYAXISMOTION:
if abs(e.value) > 0.05:
self.grader.audio.blade()
else:
self.grader.audio.blade(False)
#print `
if (e.axis == 0):
self.grader.middleBladeMove(1-e.axis, -e.value)
else:
self.grader.middleBladeMove(1-e.axis, e.value)
if self.screens.instructions.step == 1:
if self.grader.frontBlade.bladeInGround() or not self.grader.frontBlade.bladeTooStraight():
self.screens.instructions.next()
elif self.screens.instructions.step == 2:
if self.grader.middleBlade.bladeInGround() or not self.grader.middleBlade.bladeTooStraight():
self.screens.instructions.next()
if (self.keyMap["next"]!=0):
self.request('next')
if (self.keyMap["prev"]!=0):
self.request('prev')
"""
if (self.keyMap["left"]!=0):
self.grader.Turn(True,-0.01)
else:
self.grader.Turn(False,-0.01)
if (self.keyMap["right"]!=0):
self.grader.Turn(True,0.01)
else:
self.grader.Turn(False,0.01)
"""
return task.cont
def handleLevelStartInputs(self, task):
if USE_JOYSTICK == True:
for e in pygame.event.get():
self.timer = -1
if e.joy == 0:
if e.type == pygame.JOYAXISMOTION:
if e.axis == AXIS_ACCELERATE:
if self.enableRev == True:
if e.value > 0.6:
self.request('next')
self.enableRev = False
else:
if e.value < 0:
self.enableRev = True
if (self.keyMap["next"]!=0):
self.request('next')
if (self.keyMap["prev"]!=0):
self.request('prev')
return task.cont
def handleLevelCompletedInputs(self, task):
if USE_JOYSTICK == True:
for e in pygame.event.get():
self.timer = -1
if e.joy == 0:
if e.type == pygame.JOYAXISMOTION:
if e.axis == AXIS_ACCELERATE:
if self.enableRev == True:
if e.value > 0.6:
self.request('next')
self.enableRev = False
else:
if e.value < 0:
self.enableRev = True
elif e.type == pygame.JOYBUTTONDOWN:
if e.button == BUTTON_RESET:
self.showRestartDialog()
if (self.keyMap["next"]!=0):
self.request('next')
return task.cont
# Accepts arrow keys to move either the player or the menu cursor,
# Also deals with grid checking and collision detection
def handleGameInputs(self, task):
#debug("handleGameInputs")
# Get the time elapsed since last frame. We need this
# for framerate-independent movement.
elapsed = globalClock.getDt()
# startpos = self.grader.body.getPos()
# Consume PyGame events.
# This seems superfluous, but it is necessary.
# Otherwise get_axis and get_button don't work.
if USE_JOYSTICK == True:
for e in pygame.event.get():
if DEBUG_EVENTS: print "handleGameInputs", `e`
self.timer = -1
if e.joy == 0:
if e.type == pygame.JOYAXISMOTION:
if e.axis == AXIS_STEERINGWHEEL:
self.grader.steer(e.value)
elif e.axis == AXIS_ACCELERATE:
if e.value > -0.8:
self.grader.accelerate(1+e.value)
else:
self.grader.brake2(0.2)
if self.enableRev == True:
if e.value > 0.6:
self.grader.rev()
self.enableRev = False
else:
if e.value < 0:
self.enableRev = True
elif e.axis == AXIS_BRAKE:
if e.value > 0:
self.grader.brake2(e.value + 0.4)
# if(e.value < -0.99):
# self.grader.rev()
elif e.type == pygame.JOYBUTTONDOWN:
if e.button == BUTTON_RESET:
self.showRestartDialog()
elif e.button == BUTTON_CAM_LEFT:
self.dr1.setActive(not self.dr1.isActive())
# elif e.button == BUTTON_CAM_RIGHT:
self.dr3.setActive(not self.dr3.isActive())
elif e.button == BUTTON_FORWARD:
self.grader.setGear(GEAR_FORWARD)
elif e.button == BUTTON_BACKWARD:
self.grader.setGear(GEAR_REVERSE)
elif e.button == BUTTON_HORN:
self.grader.audio.horn()
elif e.type == pygame.JOYBUTTONUP:
if e.button == BUTTON_HORN:
self.grader.audio.horn(False)
elif e.joy == JOYSTICK_STICKS:
if e.type == pygame.JOYAXISMOTION:
if abs(e.value) > 0.05:
self.grader.audio.blade()
else:
self.grader.audio.blade(False)
#print `
if (e.axis == 0):
self.grader.middleBladeMove(1-e.axis, -e.value)
else:
self.grader.middleBladeMove(1-e.axis, e.value)
elif e.type == pygame.JOYBUTTONDOWN:
if e.button == BUTTON_FRONT_BLADE_UP:
self.grader.frontBladeMove(1, 1)
self.grader.audio.blade()
elif e.button == BUTTON_FRONT_BLADE_DOWN:
self.grader.frontBladeMove(1, -1)
self.grader.audio.blade()
elif e.button == BUTTON_FRONT_BLADE_LEFT:
self.grader.frontBladeMove(0, -1)
self.grader.audio.blade()
elif e.button == BUTTON_FRONT_BLADE_RIGHT:
self.grader.frontBladeMove(0, 1)
self.grader.audio.blade()
elif e.type == pygame.JOYBUTTONUP:
if e.button == BUTTON_FRONT_BLADE_UP:
self.grader.frontBladeMove(1, 0)
self.grader.audio.blade(False)
elif e.button == BUTTON_FRONT_BLADE_DOWN:
self.grader.frontBladeMove(1, 0)
self.grader.audio.blade(False)
elif e.button == BUTTON_FRONT_BLADE_LEFT:
self.grader.frontBladeMove(0, 0)
self.grader.audio.blade(False)
elif e.button == BUTTON_FRONT_BLADE_RIGHT:
self.grader.frontBladeMove(0, 0)
self.grader.audio.blade(False)
return task.cont
def paintSnowTask(self, task):
if (self.levelStarted and self.grader.hasMoved()):
if self.firstPaint:
self.grader.resetTrack()
self.firstPaint = False
else:
if self.mode == MODE_WINTER:
self.grader.paintGround(self.snow)
self.snow.redraw()
else:
self.grader.paintGround(self.gravel)
self.gravel.redraw()
return task.cont
def progressTask(self, task):
if (self.levelStarted and self.grader.hasMoved()):
p = 0
if self.screens.game.items.has_key('progress'):
if not self.firstPaint:
if self.mode == MODE_WINTER:
self.grader.calcProgress(self.snow)
p = self.snow.getProgress()
else:
self.grader.calcProgress(self.gravel)
p = self.gravel.getProgress()
#print `p`
self.screens.game.items['progress'].setTexOffset(self.screens.game.stage, 1-p, 0)
cnt = self.resultCount+1
self.result = (self.result * self.resultCount + p) / cnt
self.resultCount = cnt
#print (cnt, self.result)
# self.screens.game.items['progress_mask'].setPos(p*1.8, 0, 0)
if self.level == 0:
if (self.grader.middleBlade.bladeInGround()):
self.screens.tooltip.show("Surusid keskmise saha liiga\nvastu maad!")
self.grader.brake2(0.2)
elif (self.grader.frontBlade.bladeInGround()):
self.screens.tooltip.show("Surusid esimese saha liiga\nvastu maad!")
self.grader.brake2(0.2)
if self.grader.carbody_ode.body.getLinearVel().length() < 2:
self.screens.tooltip.show("Hakka sõitma\nAnna gaasi!")
elif not self.dr1.isActive():
self.screens.tooltip.show("Lülita sisse külgvaade.")
elif self.grader.frontBlade.bladeTooUp():
self.screens.tooltip.show("Lase esibuldooser alla.")
elif self.grader.middleBlade.bladeTooUp():
self.screens.tooltip.show("Lase hõlm alla.")
elif self.grader.frontBlade.bladeTooStraight():
if self.mode == MODE_WINTER:
self.screens.tooltip.show("Hoia esibuldooserit viltuselt,\n et lumi ei koguneks saha ette!")
elif self.grader.middleBlade.bladeTooStraight():
self.screens.tooltip.show("Suuna hõlm väja,\nnii saad puhastada suuremat ala!")
elif p < 0.9:
self.screens.tooltip.show("Jälgi sõidutee äärejoont!")
else:
if (self.grader.bladeInGround()):
self.screens.tooltip.show("Ära suru sahku vastu maad\n üks sahkadest on liiga madalal!")
self.grader.brake2(0.2)
elif (self.grader.bladeTooUp()):
self.screens.tooltip.show("Vii sahad tööasendisse!")
elif self.grader.carbody_ode.body.getLinearVel().length() > 15:
self.screens.tooltip.show("Jälgi kiirust!")
elif p < 0.6:
self.screens.tooltip.show("Proovi püsida oma sõidusuunas.")
elif self.grader.frontBlade.bladeTooStraight():
self.screens.tooltip.show("Hoia esibuldooserit viltuselt,\n et lumi ei koguneks sahka ette!")
#self.grader.brake2(0.1)
elif self.grader.middleBlade.bladeTooStraight():
self.screens.tooltip.show("Suuna hõlm väja, \nnii saad puhastada suuremat ala!")
elif p < 0.9:
if self.level == 3:
self.screens.tooltip.show("Püsi sõidutee keskel!\n.")
else:
self.screens.tooltip.show("Jälgi sõidutee äärejoont!\n")
return task.cont
def createCamera(self, dispRegion, aspect):
camera=base.makeCamera(base.win,displayRegion=dispRegion, aspectRatio=aspect)
#camera.node().getLens().setViewHpr(x, y, z)
#camera.node().getLens().setFov(120)
camera.node().getLens().setNear(0.1)
#camera.node().getLens().setAspectRatio(aspect)
return camera
def setupScreenSingle(self, cam):
dr = base.camNode.getDisplayRegion(0)
if cam ==1:
self.cam1 = self.createCamera((0, 1, 0, 1), 45.0, 52.5, 0)
self.cam2 = NodePath(PandaNode("cam2"))
self.cam3 = NodePath(PandaNode("cam3"))
dr.setCamera(self.cam1)
elif cam == 2:
self.cam1 = NodePath(PandaNode("cam1"))
self.cam2 = self.createCamera((0, 1, 0, 1), 2.1389)
self.cam3 = NodePath(PandaNode("cam3"))
dr.setCamera(self.cam2)
self.cam2.node().getLens().setFov(120)
elif cam == 3:
self.cam1 = NodePath(PandaNode("cam1"))
self.cam2 = NodePath(PandaNode("cam2"))
self.cam3 = self.createCamera((0, 1, 0, 1), 45.0, 52.5, 0)
dr.setCamera(self.cam3)
def setupScreen(self):
# set the default display region to inactive so we can remake it
dr = base.camNode.getDisplayRegion(0)
dr.setActive(0)
#settings for main cam, which we will not really be displaying. Actually, this code might be
# unnecessary!
#base.camLens.setViewHpr(45.0, 52.5, 0)
#base.camLens.setFov(112)
# set up my dome-friendly display regions to reflect the dome geometry
window = dr.getWindow()
self.dr1 = window.makeDisplayRegion(0, 0.292, 0, 1)
self.dr1.setSort(1)
self.dr2 = window.makeDisplayRegion(0, 1, 0, 1)
self.dr2.setSort(0)
self.dr3 = window.makeDisplayRegion(0.708, 1, 0, 1)
self.dr3.setSort(1)
self.dr1.setClearColorActive(True)
self.dr2.setClearColorActive(True)
self.dr3.setClearColorActive(True)
self.dr1.setClearDepthActive(True)
self.dr2.setClearDepthActive(True)
self.dr3.setClearDepthActive(True)
self.dr1.setActive(False)
self.dr3.setActive(False)
camNode1 = Camera('cam1')
self.cam1 = NodePath(camNode1)
self.dr1.setCamera(self.cam1)
self.cam1.node().getLens().setAspectRatio(float(self.dr1.getPixelWidth()) / float(self.dr1.getPixelHeight()))
self.cam1.node().getLens().setNear(0.1)
camNode2 = Camera('cam2')
self.cam2 = NodePath(camNode2)
self.dr2.setCamera(self.cam2)
self.cam2.node().getLens().setAspectRatio(float(self.dr2.getPixelWidth()) / float(self.dr2.getPixelHeight()))
self.cam2.node().getLens().setNear(0.1)
camNode3 = Camera('cam3')
self.cam3 = NodePath(camNode3)
self.dr3.setCamera(self.cam3)
self.cam3.node().getLens().setAspectRatio(float(self.dr3.getPixelWidth()) / float(self.dr3.getPixelHeight()))
self.cam3.node().getLens().setNear(0.1)
print self.cam1.node().getLens().getFov()
print self.cam2.node().getLens().getFov()
print self.cam3.node().getLens().getFov()
self.cam1.node().getLens().setFov(55)
self.cam2.node().getLens().setFov(123)
self.cam3.node().getLens().setFov(55)
#self.cam1.reparentTo(base.camera)
# create four cameras, one per region, with the dome geometry. Note that we're not using the
# base cam. I tried this at first, pointing the base cam at region 1. It worked, but it threw the
# geometry off for some reason. The fix was to create four cameras, parent them to the base
# cam, and off we go.
#self.cam1 = self.createCamera((0, 0.292, 0, 1), float(dr1.getPixelWidth()) / float(dr1.getPixelHeight()))
#dr1.setCamera(self.cam1)
#self.cam2 = self.createCamera((0.292, 0.708, 0.2889, 1), float(dr1.getPixelWidth()) / float(dr1.getPixelHeight()))
#dr2.setCamera(self.cam2)
#self.cam3 = self.createCamera((0.708, 1, 0, 1), float(dr1.getPixelWidth()) / float(dr1.getPixelHeight()))
#dr3.setCamera(self.cam3)
# loading some baked-in model
self.cam1.reparentTo(base.cam)
self.cam2.reparentTo(base.cam)
self.cam3.reparentTo(base.cam)
def enterStartup(self):
print "enterStartup"
self.grader.stopEngine()
self.dr1.setActive(False)
self.dr3.setActive(False)
taskMgr.remove("paintSnowTask")
taskMgr.remove("progressTask")
self.env.models['road']['model'].reparentTo(render)
self.env.models['gravel3']['model'].detachNode()
self.env.models['gravel']['model'].reparentTo(render)
if self.mode == MODE_WINTER:
self.snow.clear()
else:
self.gravel.clear()
self.mode = MODE_SUMMER
self.env.selectSummer()
self.level = 0
self.grader.setPosQuat( Vec3(1329.5, 1628.75, 178.855), (0.971424, 0.0114102, 0.00194733, 0.237069), [
(Vec3(1320.86, 1637.8, 178.367), (-0.358772, 0.705538, -0.037394, 0.610001)),
(Vec3(1327.31, 1625.38, 178.054), (0.263939, 0.460471, 0.535744, 0.65672)),
(Vec3(1327.08, 1641.03, 178.379), (0.356115, 0.378157, 0.596817, 0.611547)),
(Vec3(1333.52, 1628.6, 178.065), (0.534824, -0.656632, 0.260793, -0.463448)),
(Vec3(1329.06, 1622.01, 177.984), (0.387032, 0.347022, 0.615535, 0.592367)),
(Vec3(1335.27, 1625.23, 177.995), (0.413413, -0.699113, 0.102229, -0.574352))
])
self.grader.reset2()
self.grader.HideSpeedMeter()
taskMgr.add(self.handleStartupInputs,"handleStartupInputsTask")
self.screens.startup.show()
self.grader.setSyncCamera(False)
self.cam2.setPos(CAMERA_MIDDLE_POS_STARTUP)
self.cam2.setHpr(CAMERA_MIDDLE_HPR_STARTUP)
print `base.camera.getQuat(render)`
### TEST
# self.gravel.load('level0_snow', 0.5)
### TEST
def exitStartup(self):
print "exitStartup"
taskMgr.remove("handleStartupInputsTask")
self.grader.startEngine()
Parallel(
Func(self.screens.startup.toggleButton),
Sequence(
Wait(2),
Func(self.screens.startup.hide)
)
).start()
def enterScenery(self):
#self.mode = MODE_WINTER
taskMgr.remove("paintSnowTask")
taskMgr.remove("progressTask")
Sequence(
Wait(2),
LerpPosHprInterval(self.cam2, pos=CAMERA_MIDDLE_POS_SCENERY, hpr=CAMERA_MIDDLE_HPR_SCENERY, duration=3),
# Func(self.env.selectWinter),
Func(taskMgr.add, self.handleSceneryInputs, "handleSceneryInputsTask"),
Func(self.screens.scenery.show)
).start()
def exitScenery(self):
self.screens.scenery.hide()
taskMgr.remove("handleSceneryInputsTask")
taskMgr.add(self.paintSnowTask, "paintSnowTask", taskChain = 'paintChain')
# taskMgr.add(self.paintSnowTask, "paintSnowTask")
def enterInstructions(self):
self.grader.HideSpeedMeter()
self.grader.SyncSideCameras()
Parallel(
Sequence(
LerpPosHprInterval(self.cam2, pos=CAMERA_MIDDLE_POS_INSTRUCTIONS, hpr=CAMERA_MIDDLE_HPR_INSTRUCTIONS, duration=4),
Func(self.screens.instructions.show),
Wait(7),
Func(taskMgr.add, self.handleInstructionsInputs, "handleInstructionsInputsTask"),
),
Sequence(
Wait(2),
Func(self.grader.reset)
)
).start()
def exitInstructions(self):
self.screens.instructions.hide()
taskMgr.remove("handleInstructionsInputsTask")
def showRestartDialog(self):
self.screens.restart.show()
taskMgr.remove("handleGameInputsTask")
taskMgr.remove("handleInstructionsInputsTask")
taskMgr.remove("handleSceneryInputsTask")
taskMgr.remove("handleStartupInputsTask")
taskMgr.remove("handleLevelCompletedInputsTask")
self.odeworld.EnableODETask(0)
taskMgr.add(self.handleRestartInputs,"handleRestartInputsTask")
def selectRestart(self):
self.restartMode = RESTART_MODE_RESTART
self.screens.restart.selectRestart()
def selectContinue(self):
self.restartMode = RESTART_MODE_CONTINUE
self.screens.restart.selectContinue()
def restartGame(self):
taskMgr.remove("handleRestartInputsTask")
self.odeworld.EnableODETask(3)
self.screens.restart.hide()
if self.restartMode == RESTART_MODE_RESTART:
#self.request('restart')
sys.exit()
elif self.state == 'Scenery':
taskMgr.add(self.handleSceneryInputs, "handleSceneryInputsTask")
elif self.state == 'Instructions':
taskMgr.add(self.handleInstructionsInputs, "handleInstructionsInputsTask")
elif self.state == 'Game':
taskMgr.add(self.handleGameInputs, "handleGameInputsTask")
elif self.state == 'LevelCompleted':
taskMgr.add(self.handleLevelCompletedInputs, "handleLevelCompletedInputsTask")
else:
print `self.state`
def enterGame(self):
#self.grader.setSyncCamera(True)
self.result = 0
self.resultCount = 0
self.levelStarted = True
self.firstPaint = True
#self.grader.stopEngine()
self.grader.stop()
self.grader.steer(0)
self.grader.brake2(1.0)
taskMgr.add(self.handleGameInputs,"handleGameInputsTask")
taskMgr.add(self.collisionTask, "collisionTask")
taskMgr.add(self.progressTask, "progressTask", taskChain = 'paintChain')
self.enableCollisions = True
self.grader.reset()
self.tasks = Sequence(
Func(self.grader.setSyncCamera, True),
# Func(self.grader.brake),
Func(self.grader.ShowSpeedMeter),
Func(self.grader.releasebrake),
Func(self.setupKeyboard),
)
self.tasks.start()
self.screens.game.show()
self.dr1.setActive(True)
self.dr3.setActive(True)
if self.level == 3:
Sequence(
Func(self.carAnim1.play),
Wait(60),
Func(self.carAnim2.play),
).start()
def exitGame(self):
self.dr1.setActive(False)
self.dr3.setActive(False)
self.tasks = Sequence(
Wait(0.5),
Func(self.grader.setSyncCamera, False)
).start()
self.screens.game.hide()
#self.disableInputs()
taskMgr.remove("handleGameInputsTask")
taskMgr.remove("collisionTask")
taskMgr.remove("progressTask")
if self.level == 3:
self.carAnim1.seq.clearIntervals()
self.carAnim2.seq.clearIntervals()
def defaultEnter(self):
print "defaultEnter"
def defaultExit(self):
print "defaultExit"
def selectScenerySummer(self):
self.mode = MODE_SUMMER
self.screens.scenery.selectSummer()
self.env.selectSummer()
def selectSceneryWinter(self):
self.mode = MODE_WINTER
self.screens.scenery.selectWinter()
self.env.selectWinter()
def enterLevelStart(self):
self.message=""
self.levelStarted = False
self.box.detachNode()
self.box2.detachNode()
if self.mode == MODE_WINTER:
self.snow.clear()
else:
self.gravel.clear()
self.env.models['road']['model'].detachNode()
self.env.models['gravel3']['model'].reparentTo(render)
self.env.models['gravel']['model'].detachNode()
if self.level < 4:
if self.level == 0:
self.enterLevel0Intro()
elif self.level == 1:
self.enterLevel1Intro()
elif self.level == 2:
self.enterLevel2Intro()
elif self.level == 3:
self.enterLevel3Intro()
Sequence(
Wait(1),
Func(taskMgr.add, self.handleLevelStartInputs, "handleLevelStartInputsTask"),
).start()
else:
self.level = 0
self.demand('gamecompleted')
def exitLevelStart(self):
if self.level == 0:
self.exitLevel0Intro()
elif self.level == 1:
self.exitLevel1Intro()
elif self.level == 2:
self.exitLevel2Intro()
elif self.level == 3:
self.exitLevel3Intro()
taskMgr.remove("handleLevelStartInputsTask")
def enterNextLevelStart(self):
# if self.result > 0.65 or ((self.level == 0 or self.mode == MODE_SUMMER) and self.result >= 0):
if self.result > 0.65 or ((self.level == 0) and self.result >= 0):
self.level = self.level + 1
self.demand('next')
def exitNextLevelStart(self):
self.exitLevelStart()
def enterLevelCompleted(self):
self.screens.game.show()
# if self.result > 0.65 or (self.result >= 0 and self.mode == MODE_SUMMER):
if self.result > 0.65:
if self.level != 0:
if self.result > 0.85:
self.message = "Töötulemus on väga hea."
elif self.result > 0.75:
self.message = "Töötulemus on hea."
elif self.result > 0.5:
self.message = "Töötulemus on rahuldav."
else:
self.message = "Töötulemus on kehv."
result = 0
elif self.result > 0:
if self.level != 0:
self.message = "Kahjuks ei saa töötulemusega raule jääda."
result = 1
else:
result = 0
else:
result = 1
print 'result: ' + `(self.screens.game.items.has_key('progress'), self.result)`
if self.screens.game.items.has_key('progress'):
self.screens.game.items['progress'].setTexOffset(self.screens.game.stage, 1-self.result, 0)
if self.level == 0:
self.screens.level0end.show(result, self.message)
if result == 0:
Sequence(
Wait(0.5),
LerpPosHprInterval(self.cam2, pos=Vec3(1461.36, 1640.03, 204.903), hpr=Vec3(11.5892, -35.0444, -5.33993), duration=4),
).start()
elif self.level == 1:
self.screens.level1end.show(result, self.message)
if result == 0:
Sequence(
Wait(0.5),
LerpPosHprInterval(self.cam2, pos=Vec3(1928.22, 918.771, 191.255), hpr=Vec3(16.1904, -35.3418, 2.035), duration=4),
).start()
elif self.level == 2:
self.screens.level2end.show(result, self.message)
if result == 0:
Sequence(
Wait(0.5),
LerpPosHprInterval(self.cam2, pos=Vec3(3117.02, 736.265, 138.927), hpr=Vec3(61.9806, -34.8236, -6.60664), duration=4),
).start()
elif self.level == 3:
self.screens.level3end.show(result, self.message)
if result == 0:
Sequence(
Wait(0.5),
LerpPosHprInterval(self.cam2, pos=Vec3(3946.69, 177.991, 155.82), hpr=Vec3(30.5327, -40.8385, 3.76417), duration=4),
).start()
Sequence(
Wait(2),
Func(taskMgr.add, self.handleLevelCompletedInputs, "handleLevelCompletedInputsTask"),
).start()
def exitLevelCompleted(self):
if self.level == 0:
self.screens.level0end.hide()
elif self.level == 1:
self.screens.level1end.hide()
elif self.level == 2:
self.screens.level2end.hide()
elif self.level == 3:
self.screens.level3end.hide()
taskMgr.remove("handleLevelCompletedInputsTask")
self.screens.game.hide()
def enterGameCompleted(self):
self.screens.gamecompleted.show(self.mode)
taskMgr.add(self.handleGameCompletedInputs, "handleGameCompletedInputsTask")
def exitGameCompleted(self):
self.screens.gamecompleted.hide()
taskMgr.remove("handleGameCompletedInputsTask")
def enterLevel0Intro(self):
self.grader.setPosQuat( Vec3(1237.39, 2098.63, 172.243), (-0.243187, -0.0112304, 0.0216488, 0.969673), [
(Vec3(1246.15, 2089.69, 172.09), (-0.698979, -0.4016, 0.586818, 0.0760983)),
(Vec3(1239.56, 2102.02, 171.447), (0.151114, -0.540025, -0.462597, 0.686688)),
(Vec3(1239.98, 2086.38, 172.015), (-0.120347, -0.675035, -0.223494, 0.692744)),
(Vec3(1233.39, 2098.71, 171.367), (-0.581909, 0.0740049, 0.707203, -0.394675)),
(Vec3(1237.77, 2105.37, 171.273), (-0.334631, -0.711114, 0.00308959, 0.618328)),
(Vec3(1231.6, 2102.06, 171.192), (0.559281, -0.112885, -0.702012, 0.426194))
])
if self.mode == MODE_WINTER:
self.snow.load('level0_snow', 0.4)
else:
self.gravel.load('level0_snow', 0.5)
"""
self.grader.setPosQuat( Vec3(1157.7, 2245.08, 162.98), (-0.214638, -0.0112082, 0.0249852, 0.97631), [
(Vec3(1165.93, 2235.63, 162.901), (0.657691, 0.507436, -0.49805, -0.248792)),
(Vec3(1160.07, 2248.33, 162.175), (-0.680531, -0.126562, 0.699714, -0.176805)),
(Vec3(1159.57, 2232.7, 162.824), (0.553144, -0.163703, -0.691947, 0.4341)),
(Vec3(1153.71, 2245.39, 162.095), (0.42493, 0.694539, -0.152244, -0.560242)),
(Vec3(1158.48, 2251.77, 161.97), (0.361738, -0.39991, -0.587945, 0.602941)),
(Vec3(1152.12, 2248.83, 161.895), (-0.155291, -0.695343, -0.148518, 0.685802))
])
"""
self.grader.setSyncCamera(True)
self.screens.level0start.show(self.mode, self.message)
def exitLevel0Intro(self):
self.screens.level0start.hide()
def enterLevel1Intro(self):
if self.mode == MODE_WINTER:
self.snow.load('level1_snow',0.7)
else:
self.gravel.load('level1_snow', 0.7)
self.grader.setPosQuat( Vec3(1488.8, 1656.08, 176.329), (0.273734, -0.0191399, 0.0101874, -0.961561), [
(Vec3(1498.06, 1647.73, 175.112), (0.525507, -0.134574, -0.682844, 0.489343)),
(Vec3(1490.69, 1659.63, 175.538), (0.432702, -0.250937, -0.649227, 0.572977)),
(Vec3(1492.11, 1644.05, 175.306), (-0.6919, -0.511637, 0.471857, 0.191974)),
(Vec3(1484.75, 1655.95, 175.759), (0.231923, -0.441254, -0.538183, 0.679607)),
(Vec3(1488.69, 1662.86, 175.649), (0.60394, 0.628471, -0.299015, -0.38842)),
(Vec3(1482.75, 1659.17, 175.88), (0.621054, 0.614165, -0.327753, -0.360098))
])
self.grader.setSyncCamera(True)
self.screens.level1start.show(self.mode)
def exitLevel1Intro(self):
self.screens.level1start.hide()
def enterLevel2Intro(self):
if self.mode == MODE_WINTER:
self.snow.load('level2_snow', 1)
else:
self.gravel.load('level2_snow', 0.7)
self.grader.setPosQuat( Vec3(1911.25, 1011.45, 137.853), (0.328843, -0.0337757, 0.0238795, -0.943478), [
(Vec3(1921.35, 1004.25, 136.098), (-0.6916, -0.569954, 0.388763, 0.213787)),
(Vec3(1912.69, 1015.21, 137.085), (-0.584949, -0.029296, 0.68931, -0.426413)),
(Vec3(1915.88, 999.894, 136.485), (0.0932334, 0.590587, 0.356586, -0.717887)),
(Vec3(1907.22, 1010.85, 137.401), (0.640559, 0.631666, -0.277436, -0.337214)),
(Vec3(1910.34, 1018.18, 137.364), (0.322465, 0.674626, 0.144311, -0.648128)),
(Vec3(1904.86, 1013.82, 137.659), (-0.722169, -0.391482, 0.568087, -0.0499004)),
])
self.grader.setSyncCamera(True)
self.screens.level2start.show(self.mode)
def exitLevel2Intro(self):
self.screens.level2start.hide()
def enterLevel3Intro(self):
if self.mode == MODE_WINTER:
self.snow.load('level3_snow', 1.5)
#self.env.models['ekskavaator']['model'].detachNode()
self.env.models['ekskavaator']['ode_geom'].geom.setPosition(Vec3(0,0,0))
self.env.models['golf']['model'].reparentTo(render)
self.env.models['golf']['ode_geom'].geom.setPosition(Vec3(3698.60, 901.72, 65.26))
self.env.models['golf']['model'].setPos(Vec3(3698.60, 901.72, 65.26))
else:
self.gravel.load('level3_snow', 0.7)
self.env.models['ekskavaator']['model'].reparentTo(render)
self.env.models['ekskavaator']['ode_geom'].geom.setPosition(Vec3(3711.89, 826.64, 69.95))
self.env.models['ekskavaator']['model'].setPos(Vec3(3711.89, 826.64, 69.95))
self.env.models['golf']['model'].detachNode()
self.env.models['golf']['ode_geom'].geom.setPosition(Vec3(0,0,0))
self.box.reparentTo(render)
self.box2.reparentTo(render)
self.grader.setPosQuat( Vec3(3037.76, 844.969, 49.4659), (0.845311, -0.0145352, -0.00145039, -0.534075), [
(Vec3(3047.09, 853.259, 48.3665), (0.415554, 0.613273, -0.338986, 0.579913)),
(Vec3(3034.45, 847.252, 48.7086), (-0.242749, 0.067758, -0.69745, 0.670854)),
(Vec3(3050.09, 846.939, 48.4952), (-0.266277, 0.0431386, -0.699373, 0.661901)),
(Vec3(3037.45, 840.932, 48.8311), (0.699634, 0.679449, 0.171428, 0.139548)),
(Vec3(3031.01, 845.621, 48.8018), (0.350414, 0.0485753, 0.699058, -0.621424)),
(Vec3(3034.02, 839.301, 48.9227), (0.563349, 0.314958, 0.625976, -0.437715))
])
self.grader.setSyncCamera(True)
self.screens.level3start.show(self.mode)
def exitLevel3Intro(self):
self.screens.level3start.hide()
try:
w = GraderSimulator()
run()
joystick.quit()
pygame.quit()
except:
pass | mit | -415,815,451,109,733,500 | 42.000663 | 147 | 0.518733 | false |
scharch/SOAnAR | mGSSP/5.4-compare_profiles.py | 1 | 3518 | #!/usr/bin/env python3
'''
5.4-compare_profiles.py
This script reads in GSSPs constructed by 5.2-make_profiles.py and calculates
the matrix of Jensen-Shannon divergences between all GSSPs, the rarity
of each substitution observed in each GSSP, and the Shannon entropy of
the subsitutions observed at each position of each gene.
NOTE: GSSP files are assumed to have names of the form "sourceData-info_profile.txt".
This program therefore uses everything up to the first underscore as the name
of the profile in the output. Thus:
`5.3-compare_profiles.py trial Condition_1_profile.txt Condition_2_profile.txt`
will produce output that compares "Condition" to "Condition", obfuscating which
one came from which original source.
Usage: 5.4-compare_profiles.py <outHead> GSSP...
Options:
<outHead> Stem of file name for output. Three files will be generated:
<outHead>_jsdMatrix.txt, for all v. all JSD;
<outHead>_rarity.txt, with average and stddev of rarities; and
<outHead>_entropy.txt,weighted average shannon entropy for each gene/dataset
GSSP One or more text files containing GSSPs generated by 5.2-make_profiles.py
Added to SONAR as part of mGSSP on 2017-02-24.
Edited to use Py3 and DocOpt by CAS 2018-08-29.
Renamed as 5.4 by CAS 2018-09-05.
Copyright (c) 2011-2018 Columbia University and Vaccine Research Center, National
Institutes of Health, USA. All rights reserved.
'''
import sys, csv
from docopt import docopt
import pandas
try:
from SONAR.mGSSP import *
except ImportError:
find_SONAR = sys.argv[0].split("SONAR/mGSSP")
sys.path.append(find_SONAR[0])
from SONAR.mGGSP import *
def main():
#open outputs
rHandle = open( "%s_rarity.txt"%arguments['<outHead>'], "w" )
rWriter = csv.writer(rHandle, delimiter = "\t")
rWriter.writerow( ['dataset', 'Vgene', 'position', 'germline', 'mutation', 'rarity', 'stdv_r'] )
eHandle = open( "%s_entropy.txt"%arguments['<outHead>'], "w" )
eWriter = csv.writer(eHandle, delimiter = "\t")
eWriter.writerow( ['dataset', 'Vgene', 'entropy'] )
#load all the data (hopefully this shouldn't kill memory)
data = []
for dataset in arguments['GSSP']:
data.append( GSSP( dataset, name=dataset.split("/")[-1].split("_")[0] ) )
print( "Data loaded!" )
#iterate through the list and run calculations
bigMatrix = [ [] for dummy in range(len(data)) ]
for x, spectrum1 in enumerate(data):
print( "Running calculations on %s..." % spectrum1.name )
#single dataset calculations
spectrum1.computeRarity()
for row in spectrum1.rarity:
rWriter.writerow( row )
spectrum1.profileEntropy(use_all=False)
for row in spectrum1.entropy:
eWriter.writerow( row )
#now do the big distance matrix
bigMatrix[x].append( spectrum1.betweenV() )
for y, spectrum2 in enumerate(data[x+1:]):
offDiagonal = spectrum1.compare(spectrum2)
bigMatrix[x].append( offDiagonal )
bigMatrix[x+y+1].append( offDiagonal.transpose() )
#now tell pandas to make it all into one big matrix and write it
combinedRows = []
for row in bigMatrix:
combinedRows.append(pandas.concat(row))
full = pandas.concat(combinedRows, axis=1)
full.to_csv("%s_jsdMatrix.txt"%sys.argv[1], sep="\t", float_format="%.3f", na_rep="NA")#, index=False)
#clean up
rHandle.close()
eHandle.close()
if __name__ == "__main__":
arguments = docopt(__doc__)
#log command line
logCmdLine(sys.argv)
main()
| gpl-3.0 | 3,715,301,078,919,269,000 | 31.878505 | 103 | 0.691586 | false |
rustychris/stomel | src/live_dt.py | 1 | 81184 | # Maintain a live constrained delaunay triangulation of the grid.
# designed as a mixin
import traceback
import sys
import pdb
import orthomaker,trigrid
from collections import Iterable
import numpy as np
class MissingConstraint(Exception):
pass
def distance_left_of_line(pnt, qp1, qp2):
# return the signed distance for where pnt is located left of
# of the line qp1->qp2
# we don't necessarily get the real distance, but at least something
# with the right sign, and monotonicity
vec = qp2 - qp1
left_vec = np.array( [-vec[1],vec[0]] )
return (pnt[0] - qp1[0])*left_vec[0] + (pnt[1]-qp1[1])*left_vec[1]
try:
# import CGAL
from CGAL.CGAL_Triangulation_2 import Constrained_Delaunay_triangulation_2
from CGAL.CGAL_Kernel import Point_2
cgal_bindings = 'new'
from collections import defaultdict
import safe_pylab as pylab
from matplotlib import collections
from array_append import array_append
from numpy.linalg import norm,solve
import field
class Edge(object):
def __init__(self,**kwargs):
self.__dict__.update(kwargs)
def vertices(self):
return self.f.vertex( (self.v+1)%3 ),self.f.vertex( (self.v+2)%3 )
class LiveDtGrid(orthomaker.OrthoMaker):
has_dt = 1
# if true, skips graph API handling
freeze=0
# if true, stores up modified nodes and edges, and
# updates all at once upon release
holding = 0
# queue of conflicting edges that have been un-constrained to allow for
# an add_edge() to proceed
pending_conflicts = []
edges_to_release = None
# triangles in the "medial axis" with a radius r < density/scale_ratio_for_cutoff
# will be removed.
# the meaning of this has changed slightly - 1/9/2013
# now it is roughly the number of cells across a channel to make room for.
# so at 1.0 it will retain channels which are one cell wide (actually there's a
# bit of slop - room for 1.3 cells or so).
# at 2.0, you should get 2-3 cells across.
scale_ratio_for_cutoff = 1.0
def __init__(self,*args,**kwargs):
super(LiveDtGrid,self).__init__(*args,**kwargs)
self.populate_dt()
check_i = 0
def check(self):
return
print " --checkplot %05i--"%self.check_i
pylab.figure(10)
pylab.clf()
self.plot_dt()
if self.default_clip is not None:
self.plot_nodes()
pylab.axis(self.default_clip)
pylab.title("--checkplot %05i--"%self.check_i)
pylab.savefig('tmp/dtframe%05i.png'%self.check_i)
self.check_i += 1
pylab.close(10)
def refresh_metadata(self):
""" Should be called when all internal state is changed outside
the mechanisms of add_X, delete_X, move_X, etc.
"""
super(LiveDtGrid,self).refresh_metadata()
self.populate_dt()
def populate_dt(self):
""" Initialize a triangulation with all current edges and nodes.
"""
print "populate_dt: top"
self.DT = Constrained_Delaunay_triangulation_2()
self.vh = np.zeros( (self.Npoints(),) ,'object')
# sometimes CGAL creates vertices automatically, which are detected by
# having info == None
self.vh_info = defaultdict(lambda:None)
print "populate_dt: adding points"
for n in range(self.Npoints()):
if n % 50000==0:
print "populate_dt: %d/%d"%(n,self.Npoints())
# skip over deleted points:
if np.isfinite(self.points[n,0]):
pnt = Point_2( self.points[n,0], self.points[n,1] )
self.vh[n] = self.DT.insert( pnt )
self.vh_info[self.vh[n]] = n
print "populate_dt: add constraints"
for e in range(self.Nedges()):
if e % 50000==0:
print "populate_dt: %d/%d"%(e,self.Nedges())
a,b = self.edges[e,:2]
if a>=0 and b>=0: # make sure we don't insert deleted edges
self.safe_insert_constraint(a,b)
print "populate_dt: end"
def safe_insert_constraint(self,a,b):
""" adds a constraint to the DT, but does a few simple checks first
if it's not safe, raise an Exception
"""
if a < 0 or b < 0 or a==b:
raise Exception,"invalid node indices: %d %d"%(a,b)
if all(self.points[a] == self.points[b]):
raise Exception,"invalid constraint: points[%d]=%s and points[%d]=%s are identical"%(a,self.points[a],
b,self.points[b])
if self.verbose > 2:
print " Inserting constraint (populate_dt): %d %d %s %s"%(a,b,self.vh[a],self.vh[b])
print " node A=%s node B=%s"%(self.points[a],self.points[b])
print " A.point=%s B.point=%s"%(self.vh[a].point(), self.vh[b].point())
self.DT.insert_constraint( self.vh[a], self.vh[b] )
# double check to make sure that it's actually in there...
found_it=0
for edge in self.incident_constraints(self.vh[a]):
v1,v2 = edge.vertices()
if v1==self.vh[b] or v2==self.vh[b]:
found_it = 1
break
if not found_it:
# we have a conflict - search from a to b
raise MissingConstraint,"Just tried to insert a constraint %d-%d (%s - %s), but it's not there!"%(a,b,
self.points[a],
self.points[b])
## Hold/release
def hold(self):
if self.holding == 0:
self.holding_nodes = {}
self.holding += 1
def release(self):
if self.holding == 0:
raise Exception,"Tried to release, but holding is already 0"
self.holding -= 1
if self.holding == 0:
# First, make sure that we have enough room for new nodes:
while len(self.vh) < self.Npoints():
self.vh = array_append(self.vh,0)
held_nodes = self.holding_nodes.keys()
# Remove all of the nodes that were alive when we started
# the hold:
for n in held_nodes:
if self.vh[n] is not 0: # used to != 0
self.DT.remove_incident_constraints( self.vh[n] )
self.dt_remove(n)
# Add back the ones that are currently valid
for n in held_nodes:
if np.isfinite(self.points[n,0]):
self.dt_insert(n)
# Add back edges for each one
held_edges = {}
for n in held_nodes:
for e in self.pnt2edges(n):
held_edges[e] = 1
self.edges_to_release = list(held_edges.keys())
while len(self.edges_to_release) > 0:
e = self.edges_to_release.pop()
# call dt_add_edge to get all of the conflicting-edge-detecting
# functionality.
self.dt_add_edge(e)
self.edges_to_release = None
self.holding_nodes=0
return self.holding
def dt_insert(self,n):
""" Given a point that is correctly in self.points, and vh that
is large enough, do the work of inserting the node and updating
the vertex handle.
"""
pnt = Point_2( self.points[n,0], self.points[n,1] )
self.vh[n] = self.DT.insert(pnt)
self.vh_info[self.vh[n]] = n
if self.verbose > 2:
print " dt_insert node %d"%n
self.check()
def dt_remove(self,n):
self.DT.remove( self.vh[n] )
del self.vh_info[self.vh[n]]
self.vh[n] = 0
if self.verbose > 2:
print " dt_remove node %d"%n
self.check()
def dt_update(self,n):
if self.verbose > 2:
print " dt_update TOP: %d"%n
self.check()
# have to remove any old constraints first:
n_removed = 0
to_remove = []
# probably unnecessary, but queue the deletions to avoid any possibility
# of confusing the iterator
for edge in self.incident_constraints(self.vh[n]):
n_removed += 1
v1,v2 = edge.vertices()
vi1 = self.vh_info[v1]
vi2 = self.vh_info[v2]
to_remove.append( (edge, vi1, vi2) )
if self.verbose > 2:
# weird stuff is happening in here, so print out some extra
# info
print " dt_update: found old constraint %s-%s"%(vi1,vi2)
if n_removed != len(self.pnt2edges(n)):
print " WARNING: was going to remove them, but n_removed=%d, but pnt2edges shows"%n_removed
# How many of this point's edges are in the queue to be added?
count_unreleased = 0
if self.edges_to_release:
for e in self.pnt2edges(n):
if e in self.edges_to_release:
count_unreleased += 1
if n_removed + count_unreleased != len(self.pnt2edges(n)):
print self.edges[self.pnt2edges(n),:2]
print "Even after counting edges that are queued for release, still fails."
raise Exception,"Something terrible happened trying to update a node"
for edge,a,b in to_remove:
if cgal_bindings == 'new':
self.DT.remove_constrained_edge(edge.f,edge.v)
else:
self.DT.remove_constraint(edge.f,edge.v)
self.dt_remove(n)
self.dt_insert(n)
# add back any of the constraints that we removed.
# This used to add all constraints involving n, but if we are in the middle
# of a release, pnt2edges() will not necessarily give the same edges as
# constraints
all_pairs = []
for edge,a,b in to_remove:
all_pairs.append( (a,b) )
self.safe_insert_constraint(a,b)
n_removed -= 1
if n_removed != 0:
print " WARNING: in updating node %d, removed-added=%d"%(n,n_removed)
print " Inserted edges were ",all_pairs
raise Exception,"why does this happen?"
if self.verbose > 2:
print " dt_update END: %d"%n
self.check()
def dt_add_edge(self,e):
a,b = self.edges[e,:2]
### Try to anticipate unsafe connections -
for i in range(3): # try a few times to adjust the conflicting nodes
constr_edges = self.check_line_is_clear(a,b)
if len(constr_edges)>0:
print "--=-=-=-=-=-= Inserting this edge %d-%d will cause an intersection -=-=-=-=-=-=-=--"%(a,b)
for v1,v2 in constr_edges:
print " intersects constrained edge: %d - %d"%(self.vh_info[v1],self.vh_info[v2])
if self.verbose > 1:
if i==0:
self.plot(plot_title="About to prepare_conflicting_edges")
pylab.plot(self.points[[a,b],0],
self.points[[a,b],1],'m')
# Debugging:
# raise Exception,"Stopping before trying to fix conflicting edges"
self.prepare_conflicting_edges(e,constr_edges)
else:
break
###
self.safe_insert_constraint(a,b)
if a>b:
a,b=b,a
if self.verbose > 2:
print " dt_add_edge: adding constraint %d->%d"%(a,b)
self.check()
def prepare_conflicting_edges(self,e,constr_edges):
# First figure out which side is "open"
# We should only be called when the data in self.edges has already
# been taken care of, so it should be safe to just consult our cell ids.
a,b = self.edges[e,:2]
# arrange for a -> b to have the open side to its right
if self.edges[e,3] >= 0 and self.edges[e,4] >= 0:
print "prepare_conflicting_edges: both sides are closed!"
return
if self.edges[e,3] == -1 and self.edges[e,4] != -1:
a,b = b,a
elif self.edges[e,4] == -1:
pass
elif self.edges[e,3] == -2 and self.edges[e,4] != -2:
a,b = b,a
# otherwise it's already in the correct orientation
print "prepare_conflicting_edges: proceeding for edge %d-%d"%(a,b)
AB = self.points[b] - self.points[a]
open_dir = np.array( [AB[1],-AB[0]] )
mag = np.sqrt(AB[0]**2+AB[1]**2)
AB /= mag
open_dir /= mag
to_move = [] # node index for nodes that need to be moved.
for cgal_edge in constr_edges:
vh_c,vh_d = cgal_edge
c = self.vh_info[vh_c]
d = self.vh_info[vh_d]
if c is None:
print "No node data for conflicting vertex %s"%vh_c
continue
if d is None:
print "No node data for conflicting vertex %s"%vh_d
continue
# 2. which one is on the closed side?
c_beta = np.dot( self.points[c] - self.points[a],
open_dir)
d_beta = np.dot( self.points[d] - self.points[a],
open_dir)
if c_beta < 0 and d_beta >= 0:
to_move.append(c)
elif d_beta < 0 and c_beta >= 0:
to_move.append(d)
else:
print "Neither node in conflicting edge appears to be on the closed side"
to_move = np.unique(to_move)
eps = mag / 50.0
for n in to_move:
beta = np.dot( self.points[n] - self.points[a], open_dir)
if beta >= 0:
raise Exception,"Only nodes with beta<0 should be in this list!"
new_point = self.points[n] - (beta-eps)*open_dir
print "prepare_conflicting_edges: Moving node %d to %s"%(n,new_point)
self.move_node(n,new_point)
def dt_remove_edge(self,e,nodes=None):
""" Remove the given edge from the triangulation. In cases
where the edge e has already been updated with different nodes,
pass in nodes as [a,b] to remove the edge as it was.
"""
if nodes is not None:
a,b = nodes
else:
a,b = self.edges[e,:2]
## DBG
if a>b:
a,b=b,a
if self.verbose > 2:
print " remove constraint %d->%d"%(a,b)
self.check()
## /DBG
# have to figure out the face,index for this edge
found_edge = 0
for edge in self.incident_constraints(self.vh[a]):
v1,v2 = edge.vertices()
if self.vh[b] == v1 or self.vh[b] == v2:
if cgal_bindings == 'new':
self.DT.remove_constrained_edge(edge.f,edge.v)
else:
self.DT.remove_constraint( edge.f, edge.v )
return
raise MissingConstraint,"Tried to remove edge %i, but it wasn't in the constrained DT"%e
# wrappers to ease access to the DT
def incident_constraints(self,vh):
if cgal_bindings=='new':
constraints = []
self.DT.incident_constraints(vh,constraints)
return [Edge(f=e.first,v=e.second) for e in constraints]
else:
return [Edge(f=e.face(),v=e.vertex()) for e in self.DT.incident_constraints( vh )]
### API for adding/moving/deleting
## NODES
def add_node(self,P):
n = super(LiveDtGrid,self).add_node(P)
if self.freeze:
pass
elif self.holding:
self.holding_nodes[n] = 'add_node'
else:
self.vh = array_append(self.vh,0)
self.dt_insert(n)
# tricky - a new node may interrupt some existing
# constraint, but when the node is removed the
# constraint is not remembered - so check for that
# explicitly -
interrupted_edge = []
for edge in self.incident_constraints(self.vh[n]):
a,b = edge.vertices()
if self.vh_info[a] != n:
interrupted_edge.append(self.vh_info[a])
else:
interrupted_edge.append(self.vh_info[b])
if len(interrupted_edge):
self.push_op(self.uninterrupt_constraint,interrupted_edge)
return n
def uninterrupt_constraint(self,ab):
print "Uninterrupting a constraint. Yes!"
self.safe_insert_constraint(ab[0],ab[1])
def unmodify_edge(self, e, old_data):
""" a bit unsure of this... I don't know exactly where this
gets done the first time
"""
a,b = self.edges[e,:2]
n = super(LiveDtGrid,self).unmodify_edge(e,old_data)
if a!=old_data[0] or b!=old_data[1]:
print "unmodifying live_dt edge"
self.safe_insert_constraint(old_data[0],old_data[1])
def unadd_node(self,old_length):
if self.freeze:
pass
elif self.holding:
for i in range(old_length,len(self.points)):
self.holding_nodes[i] = 'unadd'
else:
for i in range(old_length,len(self.points)):
self.dt_remove(i)
self.vh = self.vh[:old_length]
super(LiveDtGrid,self).unadd_node(old_length)
if not (self.freeze or self.holding):
print "HEY - this would be a good time to refresh the neighboring constraints"
def delete_node(self,i,*args,**kwargs):
# there is a keyword argument, remove_edges
# does that need to be interpreted here?
if self.freeze:
pass
elif self.holding:
self.holding_nodes[i] = 'delete_node'
super(LiveDtGrid,self).delete_node(i,*args,**kwargs)
if not self.freeze and not self.holding:
self.dt_remove( i )
def undelete_node(self,i,p):
super(LiveDtGrid,self).undelete_node(i,p)
if self.freeze:
pass
elif self.holding:
self.holding_nodes[i] = 'undelete'
else:
self.dt_insert(i)
def unmove_node(self,i,orig_val):
super(LiveDtGrid,self).unmove_node(i,orig_val)
if self.freeze:
pass
elif self.holding:
self.holding_nodes[i] = 'unmove'
else:
self.dt_update(i)
def move_node(self,i,new_pnt,avoid_conflicts=True):
""" avoid_conflicts: if the new location would cause a
self-intersection, don't move it so far...
if the location is modified, return the actual location, otherwise
return None
"""
if not self.freeze and not self.holding:
# pre-emptively remove constraints and the vertex
# so that there aren't conflicts between the current
# edges and the probe point.
# See if the location will be okay -
to_remove = []
nbrs = [] # neighbor nodes, based only on constrained edges
for edge in self.incident_constraints(self.vh[i]):
v1,v2 = edge.vertices()
vi1 = self.vh_info[v1]
vi2 = self.vh_info[v2]
to_remove.append( (edge, vi1, vi2) )
if vi1 == i:
nbrs.append(vi2)
else:
nbrs.append(vi1)
if len(to_remove) != len(self.pnt2edges(i)):
# why is this a warning here, but for unmove_node we bail out?
# I'm not really sure how this happens in the first place...
# this was a warning, but investigating...
raise Exception,"WARNING: move_node len(DT constraints) != len(pnt2edges(i))"
for edge,a,b in to_remove:
if cgal_bindings=='old':
self.DT.remove_constraint(edge.f,edge.v)
else:
self.DT.remove_constrained_edge(edge.f,edge.v)
self.dt_remove(i)
# With the old edges and vertex out of the way, make sure the new location
# is safe, and adjust necessary
new_pnt = self.adjust_move_node(i,new_pnt,nbrs)
super(LiveDtGrid,self).move_node(i,new_pnt)
if self.freeze:
pass
elif self.holding:
self.holding_nodes[i] = 'move'
else:
# put the node back in, and add back any edges that we removed.
# NB: safer to add only the constraints that were there before, since it
# could be that the DT is not perfectly in sync with self.edges[]
self.dt_insert(i)
for edge,a,b in to_remove:
self.safe_insert_constraint(a,b)
return new_pnt
def adjust_move_node(self,i,new_pnt,nbrs):
""" Check if it's okay to move the node i to the given point, and
if needed, return a different new_pnt location that won't make an
intersection
i: node index
new_pnt: the requested new location of the node
nbrs: list of neighbor node indices for checking edges
"""
# find existing constrained edges
# for each constrained edge:
# will the updated edge still be valid?
# if not, update new_pnt to be halfway between the old and the new,
# and loop again.
for shorten in range(15): # maximum number of shortenings allowed
all_good = True
# Create a probe vertex so we can call check_line_is_clear()
# sort of winging here for a measure of close things are.
if abs(self.points[i] - new_pnt).sum() / (1.0+abs(new_pnt).max()) < 1e-8:
print "In danger of floating point roundoff issues"
all_good = False
break
pnt = Point_2( new_pnt[0], new_pnt[1] )
probe = self.DT.insert(pnt)
self.vh_info[probe] = 'PROBE!'
for nbr in nbrs:
valid=True
crossings = self.check_line_is_clear( n1=nbr, v2=probe )
if len(crossings) > 0:
all_good = False
new_pnt = 0.5*(self.points[i]+new_pnt)
break
del self.vh_info[probe]
self.DT.remove(probe)
if all_good:
break
else:
if self.verbose>0:
sys.stdout.write("$") ; sys.stdout.flush()
if all_good:
return new_pnt
else:
return self.points[i]
## EDGES
def add_edge(self,nodeA,nodeB,*args,**kwargs):
e = super(LiveDtGrid,self).add_edge(nodeA,nodeB,*args,**kwargs)
if self.freeze:
pass
elif self.holding:
self.holding_nodes[ self.edges[e,0] ] ='add_edge'
self.holding_nodes[ self.edges[e,1] ] ='add_edge'
else:
self.dt_add_edge(e)
return e
def unadd_edge(self,old_length):
if self.freeze:
pass
elif self.holding:
for e in range(old_length,len(self.edges)):
self.holding_nodes[ self.edges[e,0] ] ='unadd_edge'
self.holding_nodes[ self.edges[e,1] ] ='unadd_edge'
else:
for e in range(old_length,len(self.edges)):
self.dt_remove_edge(e)
super(LiveDtGrid,self).unadd_edge(old_length)
def delete_edge(self,e,*args,**kwargs):
if self.freeze:
pass
elif self.holding:
self.holding_nodes[ self.edges[e,0] ] = 'delete_edge'
self.holding_nodes[ self.edges[e,1] ] = 'delete_edge'
else:
self.dt_remove_edge(e)
super(LiveDtGrid,self).delete_edge(e,*args,**kwargs)
def undelete_edge(self,e,*args,**kwargs):
super(LiveDtGrid,self).undelete_edge(e,*args,**kwargs)
if self.freeze:
pass
elif self.holding:
self.holding_nodes[ self.edges[e,0] ] = 'undelete_edge'
self.holding_nodes[ self.edges[e,1] ] = 'undelete_edge'
else:
self.dt_add_edge(e)
def merge_edges(self,e1,e2):
if self.verbose > 1:
print " live_dt: merge edges %d %d"%(e1,e2)
# the tricky thing here is that we don't know which of
# these edges will be removed by merge_edges - one
# of them will get deleted, and then deleted by our
# delete handler.
# the other one will get modified, so by the time we get
# control again after trigrid, we won't know what to update
# so - save the nodes...
saved_nodes = self.edges[ [e1,e2],:2]
remaining = super(LiveDtGrid,self).merge_edges(e1,e2)
if self.freeze:
pass
elif self.holding:
for n in saved_nodes.ravel():
self.holding_nodes[n] = 'merge_edges'
else:
if remaining == e1:
ab = saved_nodes[0]
else:
ab = saved_nodes[1]
# the one that is *not* remaining has already been deleted
# just update the other one.
try:
self.dt_remove_edge(remaining,nodes=ab)
except MissingConstraint:
print " on merge_edges, may have an intervener"
raise
self.dt_add_edge(remaining)
return remaining
def unmerge_edges(self,e1,e2,*args,**kwargs):
#print " top of live_dt unmerge_edges"
check_dt_after = False
if self.freeze:
pass
elif self.holding:
pass
else:
# this can be problematic if the middle node is exactly on
# the line between them, because re-inserting that node
# will pre-emptively segment the constrained edge.
try:
self.dt_remove_edge(e1)
except MissingConstraint:
print " got a missing constraint on merge edges - will verify that it's okay"
check_dt_after = True
#print " after pre-emptive remove_edge"
super(LiveDtGrid,self).unmerge_edges(e1,e2,*args,**kwargs)
#print " after call to super()"
if self.freeze:
pass
elif self.holding:
n1,n2 = self.edges[e1,:2]
n3,n4 = self.edges[e2,:2]
for n in [n1,n2,n3,n4]:
self.holding_nodes[ n ] = 'unmerge_edges'
else:
if check_dt_after:
AB = self.edges[e1,:2]
BC = self.edges[e2,:2]
B = np.intersect1d(AB,BC)[0]
A = np.setdiff1d(AB,B)[0]
C = np.setdiff1d(BC,B)[0]
print "while unmerging edges, a constraint was pre-emptively created, but will verify that now %d-%d-%d."%(A,B,C)
for edge in self.incident_constraints(self.vh[B]):
v1,v2 = edge.vertices()
if self.vh_info[v1] == A or self.vh_info[v2] == A:
A = None
elif self.vh_info[v1] == B or self.vh_info[v2] == B:
B = None
else:
print "while unmerging edge, the middle point has another constrained DT neighbor - surprising..."
if A is not None or B is not None:
print "Failed to verify that implicit constraint was there"
raise MissingConstraint
else:
#print " adding reverted edge e1 and e2"
self.dt_add_edge(e1)
# even though trigrid.merge_edges() calls delete_edge()
# on e2, it doesn't register an undelete_edge() b/c
# rollback=0.
self.dt_add_edge(e2)
# def unsplit_edge(...): # not supported by trigrid
def split_edge(self,nodeA,nodeB,nodeC):
""" per trigrid updates, nodeB may be a node index or a tuple (coords, **add_node_opts)
"""
if self.freeze:
pass
elif self.holding:
self.holding_nodes[nodeA] = 'split_edge'
if not isinstance(nodeB,Iterable):
self.holding_nodes[nodeB] = 'split_edge'
self.holding_nodes[nodeC] = 'split_edge'
else:
if self.verbose > 2:
print " split_edge: %d %d %d"%(nodeA,nodeB,nodeC)
e1 = self.find_edge([nodeA,nodeC])
try:
self.dt_remove_edge(e1)
except MissingConstraint:
if isinstance(nodeB,Iterable):
print " got a missing constraint on split edge, and node has not been created!"
raise
else:
print " got a missing constraint on split edge, but maybe the edge has already been split"
self.dt_remove_edge(e1,[nodeA,nodeB])
self.dt_remove_edge(e1,[nodeB,nodeC])
print " Excellent. The middle node had become part of the constraint"
e2 = super(LiveDtGrid,self).split_edge(nodeA,nodeB,nodeC)
if self.freeze:
pass
elif self.holding:
pass
else:
self.dt_add_edge(e1)
self.dt_add_edge(e2)
return e2
def delete_node_and_merge(self,n):
if self.freeze:
return super(LiveDtGrid,self).delete_node_and_merge(n)
if self.holding:
self.holding_nodes[n] = 'delete_node_and_merge'
else:
# remove any constraints going to n -
self.DT.remove_incident_constraints(self.vh[n])
self.dt_remove( n )
# note that this is going to call merge_edges, before it
# calls delete_node() - and merge_edges will try to add the new
# constraint, which will fail if the middle node is collinear with
# the outside nodes. so freeze LiveDT updates, then here we clean up
self.freeze = 1
new_edge = super(LiveDtGrid,self).delete_node_and_merge(n)
if self.verbose > 2:
print " Got new_edge=%s from trigrid.delete_node_and_merge"%new_edge
self.freeze=0
if self.holding:
for n in self.edges[new_edge,:2]:
self.holding_nodes[n] = 'delete_node_and_merge'
else:
# while frozen we missed a merge_edges and a delete node.
# we just want to do them in the opposite order of what trigrid does.
self.dt_add_edge(new_edge)
return new_edge
### Useful ways for subclasses to get DT information:
def delaunay_face(self,pnt):
""" Returns node indices making up the face of the DT in which pnt lies.
"""
f = self.DT.locate( Point_2(pnt[0],pnt[1]) )
n = [self.vh_info[f.vertex(i)] for i in [0,1,2]]
return n
def delaunay_neighbors(self,n):
""" returns an array of node ids that the DT connects the given node
to. Includes existing edges
"""
nbrs = []
# how do we stop on a circulator?
first_v = None
# somehow it fails HERE, with self.vh[n] being an int, rather
# than a vertex handle.
for v in self.DT.incident_vertices(self.vh[n]):
if first_v is None:
first_v = v
elif first_v == v:
break
if self.DT.is_infinite(v):
continue
# print "Looking for vertex at ",v.point()
# This is going to need something faster, or maybe the store info
# bits of cgal.
nbr_i = self.vh_info[v] # np.where( self.vh == v )[0]
if nbr_i is None:
print " While looking for vertex at ",v.point()
raise Exception,"expected vertex handle->node, but instead got %s"%nbr_i
nbrs.append( nbr_i )
return np.array(nbrs)
def renumber(self):
mappings = super(LiveDtGrid,self).renumber()
self.vh = self.vh[ mappings['valid_nodes'] ]
for i in range(len(self.vh)):
self.vh_info[self.vh[i]] = i
return mappings
####
def plot_dt(self,clip=None):
edges = []
colors = []
gray = (0.7,0.7,0.7,1.0)
magenta = (1.0,0.0,1.0,1.0)
if cgal_bindings == 'new':
e_iter = self.DT.finite_edges()
else:
e_iter = self.DT.edges
for e in e_iter:
if cgal_bindings=='old' and self.DT.is_infinite(e):
continue
if cgal_bindings=='new':
face,vertex = e
else:
face,vertex = e.face(),e.vertex()
v1 = face.vertex( (vertex + 1)%3 )
v2 = face.vertex( (vertex + 2)%3 )
edges.append( [ [v1.point().x(),v1.point().y()],
[v2.point().x(),v2.point().y()] ] )
if self.DT.is_constrained(e):
colors.append(magenta)
else:
colors.append(gray)
segments = np.array(edges)
colors = np.array(colors)
if clip is None:
clip = self.default_clip
if clip is not None:
points_visible = (segments[...,0] >= clip[0]) & (segments[...,0]<=clip[1]) \
& (segments[...,1] >= clip[2]) & (segments[...,1]<=clip[3])
# so now clip is a bool array of length Nedges
clip = np.any( points_visible, axis=1)
segments = segments[clip,...]
colors = colors[clip,...]
coll = collections.LineCollection(segments,colors=colors)
ax = pylab.gca()
ax.add_collection(coll)
## Detecting self-intersections
def face_in_direction(self,vh,vec):
""" Starting at the vertex handle vh, look in the direction
of vec to choose a face adjacent to vh
"""
# vh: vertex handle
# vec: search direction as array
theta = np.arctan2(vec[1],vec[0])
# choose a starting face
best_f = None
f_circ = self.DT.incident_faces(vh)
first_f = f_circ.next()
f = first_f
while 1:
# get the vertices of this face:
vlist=[f.vertex(i) for i in range(3)]
# rotate to make v1 first:
vh_index = vlist.index(vh)
vlist = vlist[vh_index:] + vlist[:vh_index]
# then check the relative angles of the other two - they are in CCW order
pnts = np.array( [ [v.point().x(),v.point().y()] for v in vlist] )
delta01 = pnts[1] - pnts[0]
delta02 = pnts[2] - pnts[0]
theta01 = np.arctan2( delta01[1], delta01[0] )
theta02 = np.arctan2( delta02[1], delta02[0] )
#
d01 = (theta - theta01)%(2*np.pi)
d02 = (theta02 - theta)%(2*np.pi)
#print "starting point:",pnts[0]
#print "Theta01=%f Theta=%f Theta02=%f"%(theta01,theta,theta02)
if (d01 < np.pi) and (d02 < np.pi):
best_f = f
break
f = f_circ.next()
if f == first_f:
raise Exception,"Went all the way around..."
return best_f
def next_face(self,f,p1,vec):
""" find the next face from f, along the line through v in the direction vec,
return the face and the edge that was crossed, where the edge is a face,i tuple
"""
# First get the vertices that make up this face:
# look over the edges:
vlist=[f.vertex(i) for i in range(3)]
pnts = np.array( [ [v.point().x(),v.point().y()] for v in vlist] )
# check which side of the line each vertex is on:
left_vec = np.array( [-vec[1],vec[0]] )
left_distance = [ (pnts[i,0] - p1[0])*left_vec[0] + (pnts[i,1]-p1[1])*left_vec[1] for i in range(3)]
# And we want the edge that goes from a negative to positive left_distance.
# should end with i being the index of the start of the edge that we want
for i in range(3):
# This doesn't quite follow the same definitions as in CGAL -
# because we want to ensure that we get a consecutive list of edges
# The easy case - the query line exits through an edge that straddles
# the query line, that's the <
# the == part comes in where the query line exits through a vertex.
# in that case, we choose the edge to the left (arbitrary).
if left_distance[i] <= 0 and left_distance[(i+1)%3] > 0:
break
# so now the new edge is between vertex i,(i+1)%3, so in CGAL parlance
# that's
edge = (f,(i-1)%3)
new_face = f.neighbor( (i-1)%3 )
return edge,new_face
def line_walk_edges_new(self,n1=None,n2=None,v1=None,v2=None,
include_tangent=False,
include_coincident=True):
# Use the CGAL primitives to implement this in a hopefully more
# robust way.
# unfortunately we can't use the line_walk() circulator directly
# because the bindings enumerate the whole list, making it potentially
# very expensive.
# ultimately we want to know edges which straddle the query line
# as well as nodes that fall exactly on the line.
# is it sufficient to then return a mixed list of edges and vertices
# that fall on the query line?
# and any edge that is coincident with the query line will be included
# in the output.
# but what is the appropriate traversal cursor?
# when no vertices fall exactly on the query line, tracking a face
# is fine.
# but when the query line goes through a vertex, it's probably better
# to just record the vertex.
# so for a first cut - make sure that we aren't just directly connected:
if (n2 is not None) and (n1 is not None) and (n2 in self.delaunay_neighbors(n1)):
return []
if v1 is None:
v1 = self.vh[n1]
if v2 is None:
v2 = self.vh[n2]
# Get the points from the vertices, not self.points, because in some cases
# (adjust_move_node) we may be probing
p1 = np.array([ v1.point().x(), v1.point().y()] )
p2 = np.array([ v2.point().x(), v2.point().y()] )
if self.verbose > 1:
print "Walking the line: ",p1,p2
hits = [ ['v',v1] ]
# do the search:
# Note that we really need a better equality test here
# hits[-1][1] != v2 doesn't work beac
def obj_eq(a,b):
return type(a)==type(b) and a==b
while not obj_eq(hits[-1][1], v2):
# if we just came from a vertex, choose a new face in the given direction
if hits[-1][0] == 'v':
if self.verbose > 1:
print "Last hit was the vertex at %s"%(hits[-1][1].point())
# like face_in_direction, but also check for possibility that
# an edge is coincident with the query line.
next_item = self.next_from_vertex( hits[-1][1],(p1,p2) )
if self.verbose > 1:
print "Moved from vertex to ",next_item
if next_item[0] == 'v':
# Find the edge connecting these two:
for e in self.DT.incident_edges( next_item[1] ):
if cgal_bindings == 'new':
f,v_opp = e
else:
f = e.face()
v_opp = e.vertex()
if f.vertex( (v_opp+1)%3 ) == hits[-1][1] or \
f.vertex( (v_opp+2)%3 ) == hits[-1][1]:
hits.append( ['e', (f,v_opp)] )
break
elif hits[-1][0] == 'f':
# either we cross over an edge into another face, or we hit
# one of the vertices.
next_item = self.next_from_face( hits[-1][1], (p1,p2) )
# in case the next item is also a face, go ahead and insert
# the intervening edge
if next_item[0]=='f':
middle_edge = None
for v_opp in range(3):
if self.verbose > 1:
print "Comparing %s to %s looking for the intervening edge"%(hits[-1][1].neighbor(v_opp),
next_item[1])
if hits[-1][1].neighbor(v_opp) == next_item[1]:
middle_edge = ['e', (hits[-1][1],v_opp)]
break
if middle_edge is not None:
hits.append( middle_edge )
else:
raise Exception,"Two faces in a row, but couldn't find the edge between them"
elif hits[-1][0] == 'e':
# This one is easy - just have to check which end of the edge is in the
# desired direction
next_item = self.next_from_edge( hits[-1][1], (p1,p2) )
hits.append( next_item )
if self.verbose > 1:
print "Got hits: ",hits
# but ignore the first and last, since they are the starting/ending points
hits = hits[1:-1]
# and since some of those CGAL elements are going to disappear, translate everything
# into node references
for i in range(len(hits)):
if hits[i][0] == 'v':
hits[i][1] = [ self.vh_info[ hits[i][1] ] ]
elif hits[i][0] == 'e':
f,v_opp = hits[i][1]
hits[i][1] = [ self.vh_info[ f.vertex( (v_opp+1)%3 ) ], self.vh_info[ f.vertex( (v_opp+2)%3 ) ] ]
elif hits[i][0] == 'f':
f = hits[i][1]
hits[i][1] = [ self.vh_info[ f.vertex(0) ],
self.vh_info[ f.vertex(1) ],
f.vertex(2) ]
# have to go back through, and where successive items are faces, we must
# have crossed cleanly through an edge, and that should be inserted, too
return hits
## steppers for line_walk_edges_new
def next_from_vertex(self, vert, vec):
# from a vertex, we either go into one of the faces, or along an edge
qp1,qp2 = vec
last_left_distance=None
last_nbr = None
start = None
for nbr in self.DT.incident_vertices(vert):
pnt = np.array( [nbr.point().x(),nbr.point().y()] )
left_distance = distance_left_of_line(pnt, qp1,qp2 )
# This used to be inside the last_left_distance < 0 block, but it seems to me
# that if we find a vertex for which left_distance is 0, that's our man.
# NOPE - having it inside the block caused the code to discard a colinear vertex
# that was behind us.
# in the corner case of three colinear points, and we start from the center, both
# end points will have left_distance==0, and both will be preceeded by the infinite
# vertex. So to distinguish colinear points it is necessary to check distance in the
# desired direction.
if left_distance==0.0:
dx = pnt[0] - vert.point().x()
dy = pnt[1] - vert.point().y()
progress = dx * (qp2[0] - qp1[0]) + dy * (qp2[1] - qp1[1])
if progress > 0:
return ['v',nbr]
# Note that it's also possible for the infinite vertex to come up.
# this should be okay when the left_distance==0.0 check is outside the
# block below. If it were inside the block, then we would miss the
# case where we see the infinite vertex (which makes last_left_distance
# undefined), and then see the exact match.
if last_left_distance is not None and last_left_distance < 0:
# left_distance == 0.0 used to be here.
if left_distance > 0:
# what is the face between the last one and this one??
# it's vertices are vert, nbr, last_nbr
for face in self.DT.incident_faces(vert):
for j in range(3):
if face.vertex(j) == nbr:
for k in range(3):
if face.vertex(k) == last_nbr:
return ['f',face]
raise Exception,"Found a good pair of incident vertices, but failed to find the common face."
# Sanity check - if we've gone all the way around
if start is None:
start = nbr
else: # must not be the first time through the loop:
if nbr == start:
raise Exception,"This is bad - we checked all vertices and didn't find a good neighbor"
last_left_distance = left_distance
last_nbr = nbr
if self.DT.is_infinite(nbr):
last_left_distance = None
raise Exception,"Fell through!"
def next_from_edge(self, edge, vec):
# vec is the tuple of points defining the query line
qp1,qp2 = vec
# edge is a tuple of face and vertex index
v1 = edge[0].vertex( (edge[1]+1)%3 )
v2 = edge[0].vertex( (edge[1]+2)%3 )
# this means the edge was coincident with the query line
p1 = v1.point()
p2 = v2.point()
p1 = np.array( [p1.x(),p1.y()] )
p2 = np.array( [p2.x(),p2.y()] )
line12 = p2 - p1
if np.dot( line12, qp2-qp1 ) > 0:
return ['v',v2]
else:
return ['v',v1]
def next_from_face(self, f, vec):
qp1,qp2 = vec
# stepping through a face, along the query line qp1 -> qp2
# we exit the face either via an edge, or possibly exactly through a
# vertex.
# A lot like next_face(), but hopefully more robust handling of
# exiting the face by way of a vertex.
# First get the vertices that make up this face:
# look over the edges:
vlist=[f.vertex(i) for i in range(3)]
pnts = np.array( [ [v.point().x(),v.point().y()] for v in vlist] )
# check which side of the line each vertex is on:
# HERE is where the numerical issues come up.
# could possibly do this in terms of the end points of the query line, in order to
# at least robustly handle the starting and ending points.
left_distance = [ distance_left_of_line(pnts[i], qp1,qp2 ) for i in range(3)]
# And we want the edge that goes from a negative to positive left_distance.
# should end with i being the index of the start of the edge that we want
for i in range(3):
# This doesn't quite follow the same definitions as in CGAL -
# because we want to ensure that we get a consecutive list of edges
# The easy case - the query line exits through an edge that straddles
# the query line, that's the <
# the == part comes in where the query line exits through a vertex.
# in that case, we choose the edge to the left (arbitrary).
if left_distance[i] <= 0 and left_distance[(i+1)%3] > 0:
break
# sanity check
if i==2:
raise Exception,"Trying to figure out how to get out of a face, and nothing looks good"
# Two cases - leaving via vertex, or crossing an edge internally.
if left_distance[i]==0:
return ['v',vlist[i]]
else:
# so now the new edge is between vertex i,(i+1)%3, so in CGAL parlance
# that's
new_face = f.neighbor( (i-1)%3 )
return ['f',new_face]
##
def line_walk_edges(self,n1=None,n2=None,v1=None,v2=None,
include_tangent=False,
include_coincident=True):
""" for a line starting at node n1 or vertex handle v1 and
ending at node n2 or vertex handle v2, return all the edges
that intersect.
"""
# this is a bit dicey in terms of numerical robustness -
# face_in_direction is liable to give bad results when multiple faces are
# indistinguishable (like a colinear set of points with many degenerate faces
# basically on top of each other).
# How can this be made more robust?
# When the query line exactly goes through one or more vertex stuff starts
# going nuts.
# So is it possible to handle this more intelligently?
# there are 3 possibilities for intersecting edges:
# (1) intersect only at an end point, i.e. endpoint lies on query line
# (2) intersect in interior of edge - one end point on one side, other endpoint
# on the other side of the query line
# (3) edge is coincident with query line
# so for a first cut - make sure that we aren't just directly connected:
if (n2 is not None) and (n1 is not None) and (n2 in self.delaunay_neighbors(n1)):
return []
if v1 is None:
v1 = self.vh[n1]
if v2 is None:
v2 = self.vh[n2]
# Get the points from the vertices, not self.points, because in some cases
# (adjust_move_node) we may be probing
p1 = np.array([ v1.point().x(), v1.point().y()] )
p2 = np.array([ v2.point().x(), v2.point().y()] )
# print "Walking the line: ",p1,p2
vec = p2 - p1
unit_vec = vec / norm(vec)
pnt = p1
f1 = self.face_in_direction(v1,vec)
f2 = self.face_in_direction(v2,-vec)
# do the search:
f_trav = f1
edges = []
while 1:
# print "line_walk_edges: traversing face:"
# print [f_trav.vertex(i).point() for i in [0,1,2]]
# Stop condition: we're in a face containing the final vertex
# check the vertices directly, rather than the face
still_close = 0
for i in range(3):
if f_trav.vertex(i) == v2:
return edges
if not still_close:
# Check to see if this vertex is beyond the vertex of interest
vertex_i_pnt = np.array( [f_trav.vertex(i).point().x(),f_trav.vertex(i).point().y()] )
if norm(vec) > np.dot( vertex_i_pnt - p1, unit_vec):
still_close = 1
if not still_close:
# We didn't find any vertices of this face that were as close to where we started
# as the destination was, so we must have passed it.
print "BAILING: n1=%s n2=%s v1=%s v2=%s"%(n1,n2,v1,v2)
raise Exception,"Yikes - line_walk_edges exposed its numerical issues. We traversed too far."
return edges
edge,new_face = self.next_face(f_trav,pnt,vec)
edges.append(edge)
f_trav = new_face
return edges
def shoot_ray(self,n1,vec,max_dist=None):
""" Shoot a ray from self.points[n] in the given direction vec
returns (e_index,pnt), the first edge that it encounters and the location
of the intersection
max_dist: stop checking beyond this distance -- currently doesn't make it faster
but will return None,None if the point that it finds is too far away
"""
v1 = self.vh[n1]
vec = vec / norm(vec) # make sure it's a unit vector
pnt = self.points[n1]
f1 = self.face_in_direction(v1,vec)
# do the search:
f_trav = f1
while 1:
edge,new_face = self.next_face(f_trav,pnt,vec)
# make that into a cgal edge:
if cgal_bindings == 'new':
e = edge
else:
e = CGAL.Triangulations_2.Edge(*edge)
if max_dist is not None:
# Test the distance as we go...
face,i = edge
va = face.vertex((i+1)%3)
vb = face.vertex((i-1)%3)
pa = va.point()
pb = vb.point()
d1a = np.array([pa.x()-pnt[0],pa.y() - pnt[1]])
# alpha * vec + beta * ab = d1a
# | vec[0] ab[0] | | alpha | = | d1a[0] |
# | vec[1] ab[1] | | beta | = | d1a[1] |
A = np.array( [[vec[0], pb.x() - pa.x()],
[vec[1], pb.y() - pa.y()]] )
alpha_beta = solve(A,d1a)
dist = alpha_beta[0]
if dist > max_dist:
return None,None
if self.DT.is_constrained(e):
# print "Found a constrained edge"
break
f_trav = new_face
na = self.vh_info[va]
nb = self.vh_info[vb]
if (na is None) or (nb is None):
raise Exception,"Constrained edge is missing at least one node index"
if max_dist is None:
# Compute the point at which they intersect:
ab = self.points[nb] - self.points[na]
d1a = self.points[na] - pnt
# alpha * vec + beta * ab = d1a
# | vec[0] ab[0] | | alpha | = | d1a[0] |
# | vec[1] ab[1] | | beta | = | d1a[1] |
A = np.array( [[vec[0],ab[0]],[vec[1],ab[1]]] )
alpha_beta = solve(A,d1a)
else:
pass # already calculated alpha_beta
p_int = pnt + alpha_beta[0]*vec
edge_id = self.find_edge((na,nb))
return edge_id,p_int
def check_line_is_clear(self,n1=None,n2=None,v1=None,v2=None,p1=None,p2=None):
""" returns a list of vertex tuple for constrained segments that intersect
the given line
"""
# if points were given, create some temporary vertices
if p1 is not None:
cp1 = Point_2( p1[0], p1[1] )
v1 = self.DT.insert(cp1) ; self.vh_info[v1] = 'tmp'
if p2 is not None:
cp2 = Point_2( p2[0], p2[1] )
v2 = self.DT.insert(cp2) ; self.vh_info[v2] = 'tmp'
edges = self.line_walk_edges(n1=n1,n2=n2,v1=v1,v2=v2)
constrained = []
for f,i in edges:
if cgal_bindings=='new':
e = (f,i)
else:
e = CGAL.Triangulations_2.Edge(f,i)
if self.DT.is_constrained(e):
vA = f.vertex( (i+1)%3 )
vB = f.vertex( (i+2)%3 )
print "Conflict info: ",self.vh_info[vA],self.vh_info[vB]
constrained.append( (vA,vB) )
if p1 is not None:
del self.vh_info[v1]
self.DT.remove( v1 )
if p2 is not None:
del self.vh_info[v2]
self.DT.remove( v2 )
return constrained
def check_line_is_clear_new(self,n1=None,n2=None,v1=None,v2=None,p1=None,p2=None):
""" returns a list of vertex tuple for constrained segments that intersect
the given line.
in the case of vertices that are intersected, just a tuple of length 1
(and assumes that all vertices qualify as constrained)
"""
# if points were given, create some temporary vertices
if p1 is not None:
cp1 = Point_2( p1[0], p1[1] )
v1 = self.DT.insert(cp1) ; self.vh_info[v1] = 'tmp'
if p2 is not None:
cp2 = Point_2( p2[0], p2[1] )
v2 = self.DT.insert(cp2) ; self.vh_info[v2] = 'tmp'
crossings = self.line_walk_edges_new(n1=n1,n2=n2,v1=v1,v2=v2)
constrained = []
for crossing_type,crossing in crossings:
if crossing_type == 'f':
continue
if crossing_type == 'v':
constrained.append( (crossing_type,crossing) )
continue
if crossing_type == 'e':
n1,n2 = crossing
if self.verbose > 1:
print "Got potential conflict with edge",n1,n2
try:
self.find_edge( (n1,n2) )
constrained.append( ('e',(n1,n2)) )
except trigrid.NoSuchEdgeError:
pass
if p1 is not None:
del self.vh_info[v1]
self.DT.remove( v1 )
if p2 is not None:
del self.vh_info[v2]
self.DT.remove( v2 )
return constrained
## DT-based "smoothing"
# First, make sure the boundary is sufficiently sampled
def subdivide(self,min_edge_length=1.0,edge_ids=None):
""" Like medial_axis::subdivide_iterate -
Add nodes along the boundary as needed to ensure that the boundary
is well represented in channels
[ from medial_axis ]
Find edges that need to be sampled with smaller
steps and divide them into two edges.
returns the number of new edges / nodes
method: calculate voronoi radii
iterate over edges in boundary
for each edge, find the voronoi point that they have
in common. So this edge should be part of a triangle,
and we are getting the center of that triangle.
the voronoi radius with the distance between the voronoi
point and the edge. If the edge is too long and needs to
be subdivided, it will be long (and the voronoi radius large)
compared to the distance between the edge and the vor. center.
"""
if edge_ids is None:
print "Considering all edges for subdividing"
edge_ids = range(self.Nedges())
else:
print "Considering only %d supplied edges for subdividing"%len(edge_ids)
to_subdivide = []
# Also keep a list of constrained edges of DT cells for which another edge
# has been selected for subdivision.
neighbors_of_subdivide = {}
print "Choosing edges to subdivide"
for ni,i in enumerate(edge_ids): # range(self.Nedges()):
if ni%500==0:
sys.stdout.write('.') ;sys.stdout.flush()
if self.edges[i,0] == -37:
continue # edge has been deleted
# this only works when one side is unpaved and the other boundary -
if self.edges[i,3] != trigrid.UNMESHED or self.edges[i,4] != trigrid.BOUNDARY:
print "Skipping edge %d because it has weird cell ids"%i
continue
a,b = self.edges[i,:2]
# consult the DT to find who the third node is:
a_nbrs = self.delaunay_neighbors(a)
b_nbrs = self.delaunay_neighbors(b)
abc = np.array([self.points[a],self.points[b],[0,0]])
c = None
for nbr in a_nbrs:
if nbr in b_nbrs:
# does it lie to the left of the edge?
abc[2,:] = self.points[nbr]
if trigrid.is_ccw(abc):
c = nbr
break
if c is None:
print "While looking at edge %d, %s - %s"%(i,self.points[a],self.points[b])
raise Exception,"Failed to find the third node that makes up an interior triangle"
pntV = trigrid.circumcenter(abc[0],abc[1],abc[2])
# compute the point-line distance between
# this edge and the v center, then compare to
# the distance from the endpoint to that
# vcenter
pntA = self.points[a]
pntB = self.points[b]
v_radius = np.sqrt( ((pntA-pntV)**2).sum() )
# This calculates unsigned distance - with Triangle, that's fine because
# it takes care of the Steiner points, but with CGAL we do it ourselves.
# line_clearance = np.sqrt( (( 0.5*(pntA+pntB) - pntV)**2).sum() )
ab = (pntB - pntA)
ab = ab / np.sqrt( np.sum(ab**2) )
pos_clearance_dir = np.array( [-ab[1],ab[0]] )
av = pntV - pntA
line_clearance = av[0]*pos_clearance_dir[0] + av[1]*pos_clearance_dir[1]
# Why do I get some bizarrely short edges?
ab = np.sqrt( np.sum( (pntA - pntB)**2 ) )
if v_radius > 1.2*line_clearance and v_radius > min_edge_length and ab>min_edge_length:
to_subdivide.append(i)
# Also make note of the other edges of this same DT triangle
for maybe_nbr in [ [a,c], [b,c] ]:
# could be an internal DT edge, or a real edge
try:
nbr_edge = self.find_edge(maybe_nbr)
neighbors_of_subdivide[nbr_edge] = 1
except trigrid.NoSuchEdgeError:
pass
print
print "Will subdivide %d edges"%(len(to_subdivide))
for ni,i in enumerate(to_subdivide):
if ni%500==0:
sys.stdout.write('.') ; sys.stdout.flush()
if neighbors_of_subdivide.has_key(i):
del neighbors_of_subdivide[i]
a,b = self.edges[i,:2]
elts = self.all_iters_for_node(a)
if len(elts) != 1:
raise Exception,"How is there not exactly one iter for this node!?"
scale = 0.5*np.sqrt( np.sum( (self.points[a]-self.points[b])**2 ) )
# print "Subdividing edge %d with scale %f"%(i,scale)
new_elt = self.resample_boundary(elts[0],'forward',
local_scale=scale,
new_node_stat=self.node_data[a,0])
# keep track of any edges that change:
e1,e2 = self.pnt2edges(new_elt.data)
neighbors_of_subdivide[e1] = 1
neighbors_of_subdivide[e2] = 1
print "done"
subdivided = np.array( neighbors_of_subdivide.keys() )
return subdivided
def subdivide_iterate(self,min_edge_length=1.0):
modified_edges = None
while 1:
# It wasn't enough to just test for no modified edges - rather than
# trying to be clever about checking exactly edges that may have
# been affected by a split, have nested iterations, and stop only
# when globally there are no modified edges
new_modified_edges = self.subdivide(min_edge_length=min_edge_length,
edge_ids = modified_edges)
print "Subdivide made %d new nodes"%(len(new_modified_edges)/2)
if len(new_modified_edges) == 0:
if modified_edges is None:
# this means we considered all edges, and still found nobody
# to split
break
else:
# this means we were only considering likely offenders -
# step back and consider everyone
print "Will reconsider all edges..."
modified_edges = None
else:
modified_edges = new_modified_edges
def smoothed_poly(self,density,min_edge_length=1.0):
""" Returns a polygon for the boundary that has all 'small' concave features
removed. Modifies the boundary points, but only by adding new samples evenly
between originals.
"""
# Make sure that all edges are sufficiently sampled:
self.subdivide_iterate(min_edge_length=min_edge_length)
# The process (same as in smoother.py):
# For all _interior_ DT cells
# calculate circum-radius
# mark for deletion any cell with radius < scale/2,
# with scale calculated at circumcenter
# For all non-deleted cells, create an array of all edges
# The notes in smoother say that if an edge appears exactly once
# then it should be kept.
# Edges that appear twice are internal to the domain.
# If/when degenerate edges take part in this, they will have to be
# handled specially, since they *should* have two adjacent, valid, cells.
# What is faster?
# (a) iterate over known boundary edges, grabbing cells to the left,
# and checking against a hash to see that the cell hasn't been included
# already
# (b) iterate over DT faces, checking to see if it's an internal face or not
# by checking ab,bc,ca against the edge hash?
# probably (b), since we already have this hash built.
# Actually, (b) isn't sufficient - there are triangles that are internal, but
# have no boundary edges.
# And (a) isn't good either - it would skip over any triangles that are entirely
# internal _or_ entirely external (i.e. share no edges with the boundary).
# Is there a way to do this by tracing edges? Start at some vertex on a clist.
# check the next edge forward - is the radius of the DT face to its left big enough?
# If so, we move forward.
# If not, detour?
# That's not quite enough, though. Really need to be checking every triangle incident
# to the vertex, not just the ones incident to the edges.
# So for simplicity, why not use the traversal of the edges to enumerate internal cells,
# then proceed as before.
cells = self.dt_interior_cells()
print "Marking for deletion DT faces that are too small"
points = self.points[cells]
vcenters = trigrid.circumcenter(points[:,0],
points[:,1],
points[:,2])
# Threshold on the radius, squared -
#
r2_min = (density(vcenters)/2.0 * self.scale_ratio_for_cutoff)**2
# r^2 for each internal DT face
r2 = np.sum( (vcenters - points[:,0,:])**2,axis=1)
valid = r2 >= r2_min
# From here on out it follows smoother.py very closely...
print "Compiling valid edges"
# expands cells into edges
good_cells = cells[valid]
all_edges = good_cells[:,np.array([[0,1],[1,2],[2,0]])]
# cells is Nfaces x 3
# all_edges is then Nfaces x 3 x 2
# combine the first two dimensions, so we have a regular edges array
all_edges = all_edges.reshape( (-1,2) )
print "building hash of edges"
edge_hash = {}
for i in range(len(all_edges)):
k = all_edges[i,:]
if k[0] > k[1]:
k=k[::-1]
k = tuple(k)
if not edge_hash.has_key(k):
edge_hash[k] = 0
edge_hash[k] += 1
print "Selecting boundary edges"
# good edges are then edges that appear in exactly one face
good_edges = []
for k in edge_hash:
if edge_hash[k] == 1:
good_edges.append(k)
good_edges = np.array(good_edges)
print "Finding polygons from edges"
tgrid = trigrid.TriGrid(points=self.points,
edges =good_edges)
tgrid.verbose = 2
polygons = tgrid.edges_to_polygons(None) # none=> use all edges
self.smooth_all_polygons = polygons # for debugging..
print "done with smoothing"
return polygons[0]
def dt_interior_cells(self):
print "Finding interior cells from full Delaunay Triangulation"
interior_cells = []
if cgal_bindings == 'new':
face_it = self.DT.finite_faces()
else:
face_it = self.DT.faces
for f in face_it:
a,b,c = [self.vh_info[f.vertex(i)] for i in [0,1,2]]
# going to be slow...
# How to test whether this face is internal:
# Arbitrarily choose a vertex: a
#
# Find an iter for which the face abc lies to the left of the boundary
internal = 0
for elt in self.all_iters_for_node(a):
d = self.points[elt.nxt.data] - self.points[a]
theta_afwd = np.arctan2(d[1],d[0])
d = self.points[b] - self.points[a]
theta_ab = np.arctan2(d[1],d[0])
d = self.points[elt.prv.data] - self.points[a]
theta_aprv = np.arctan2(d[1],d[0])
dtheta_b = (theta_ab - theta_afwd) % (2*np.pi)
dtheta_elt = (theta_aprv - theta_afwd) % (2*np.pi)
# if b==elt.nxt.data, then dtheta_b==0.0 - all good
if dtheta_b >= 0 and dtheta_b < dtheta_elt:
internal = 1
break
if internal:
interior_cells.append( [a,b,c] )
cells = np.array(interior_cells)
return cells
def apollonius_scale(self,r,min_edge_length=1.0,process_islands=True):
""" Return an apollonius based field giving the scale subject to
the local feature size of geo and the telescoping rate r
"""
self.subdivide_iterate(min_edge_length=min_edge_length)
dt_cells = self.dt_interior_cells()
points = self.points[dt_cells]
vcenters = trigrid.circumcenter(points[:,0],
points[:,1],
points[:,2])
radii = np.sqrt( np.sum( (vcenters - points[:,0,:])**2,axis=1) )
diam = 2*radii
if process_islands:
print "Hang on. Adding scale points for islands"
island_centers = []
island_scales = []
for int_ring in self.poly.interiors:
p = int_ring.convex_hull
points = np.array(p.exterior.coords)
center = points.mean(axis=0)
# brute force - find the maximal distance between
# any two points. probably a smart way to do this,
# but no worries...
max_dsqr = 0
for i in range(len(points)):
pa = points[i]
for j in range(i,len(points)):
d = ((pa - points[j])**2).sum()
max_dsqr = max(d,max_dsqr)
feature_scale = np.sqrt( max_dsqr )
print "Ring has scale of ",feature_scale
island_centers.append( center )
# this very roughly says that we want at least 4 edges
# for representing this thing.
# island_scales.append( feature_scale / 2.0)
# actually it's not too hard to have a skinny island
# 2 units long that gets reduced to a degenerate pair
# of edges, so go conservative here:
island_scales.append( feature_scale / 3.0 )
island_centers = np.array(island_centers)
island_scales = np.array(island_scales)
if len(island_centers) > 0:
vcenters = np.concatenate( (vcenters,island_centers) )
diam = np.concatenate( (diam,island_scales) )
print "Done with islands"
scale = field.ApolloniusField(vcenters,diam)
return scale
def dt_clearance(self,n):
"""POORLY TESTED
Returns the diameter of the smallest circumcircle (?) of a face
incident to the node n. Currently this doesn't work terribly well
because sliver triangles will create arbitrarily small clearances
at obtuse angles.
"""
diams = []
f_circ = self.DT.incident_faces( self.vh[n] )
first_f = f_circ.next()
f = first_f
for f in f_circ:
if f == first_f:
break
diams.append( self.face_diameter(f) )
return min(diams)
def face_nodes(self,face):
return np.array( [self.vh_info[face.vertex(j)] for j in range(3)] )
def face_center(self,face):
points = self.points[self.face_nodes(face)]
return trigrid.circumcenter(points[0],points[1],points[2])
def face_diameter(self,face):
points = self.points[self.face_nodes(face)]
ccenter = trigrid.circumcenter(points[0],points[1],points[2])
return 2*norm(points[0] - ccenter)
except ImportError,exc:
print "CGAL unavailable."
print exc
import orthomaker
class LiveDtGrid(orthomaker.OrthoMaker):
""" placeholder for live delaunay triangulation code """
has_dt = 0
pending_conflicts = []
def hold(self):
pass
def release(self):
pass
def delaunay_neighbors(self,n):
return []
| gpl-2.0 | 7,978,895,844,134,759,000 | 41.239334 | 133 | 0.48055 | false |
janezkranjc/clowdflows | gdpr/migrations/0001_initial.py | 2 | 4770 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'GDPRProfile'
db.create_table('gdpr_gdprprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user', self.gf('django.db.models.fields.related.OneToOneField')(related_name='gdprprofile', unique=True, to=orm['auth.User'])),
('accepted_terms', self.gf('django.db.models.fields.BooleanField')(default=False)),
('allow_correspondence', self.gf('django.db.models.fields.BooleanField')(default=False)),
('updated', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('gdpr', ['GDPRProfile'])
def backwards(self, orm):
# Deleting model 'GDPRProfile'
db.delete_table('gdpr_gdprprofile')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'gdpr.gdprprofile': {
'Meta': {'object_name': 'GDPRProfile'},
'accepted_terms': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'allow_correspondence': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'gdprprofile'", 'unique': 'True', 'to': "orm['auth.User']"})
}
}
complete_apps = ['gdpr'] | gpl-3.0 | -6,109,719,187,834,653,000 | 63.472973 | 182 | 0.567296 | false |
kgblll/libresoft-gymkhana | apps/semanticSearch/urls.py | 2 | 1388 | #
# Copyright (C) 2009 GSyC/LibreSoft
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author : Jose Gato Luis <jgato __at__ libresoft __dot__ es>
#
from django.conf.urls.defaults import *
urlpatterns = patterns('',
url(r'^semantic/similarity/words/$', 'apps.semanticSearch.views.similarityWords', name="similarSemantic"),
url(r'^social/search/semantic/$', 'apps.semanticSearch.views.socialSearchSemantic', name="socialSemanticSearch"),
url(r'^social/node/distances/$', 'apps.semanticSearch.views.compareSemanticNodes', name="compareNodesSemantic"),
url(r'^social/node/(?P<node_id1>[1-9][0-9]*)/distance/(?P<node_id2>[1-9][0-9]*)/$', 'apps.semanticSearch.views.compareTwoSemanticNodes',name="compareTwoNodesSemantic")
)
| gpl-2.0 | -3,967,316,095,379,997,000 | 43.774194 | 171 | 0.724063 | false |
shuvozula/crazyflie-clients-python | lib/cflib/crazyflie/mem.py | 1 | 24546 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2014 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
"""
Enables flash access to the Crazyflie.
"""
__author__ = 'Bitcraze AB'
__all__ = ['Memory', 'MemoryElement']
import struct
import errno
from cflib.crtp.crtpstack import CRTPPacket, CRTPPort
from cflib.utils.callbacks import Caller
from binascii import crc32
import binascii
# Channels used for the logging port
CHAN_INFO = 0
CHAN_READ = 1
CHAN_WRITE = 2
# Commands used when accessing the Settings port
CMD_INFO_VER = 0
CMD_INFO_NBR = 1
CMD_INFO_DETAILS = 2
# The max size of a CRTP packet payload
MAX_LOG_DATA_PACKET_SIZE = 30
import logging
logger = logging.getLogger(__name__)
class MemoryElement(object):
"""A memory """
TYPE_I2C = 0
TYPE_1W = 1
def __init__(self, id, type, size, mem_handler):
"""Initialize the element with default values"""
self.id = id
self.type = type
self.size = size
self.mem_handler = mem_handler
@staticmethod
def type_to_string(t):
"""Get string representation of memory type"""
if t == MemoryElement.TYPE_I2C:
return "I2C"
if t == MemoryElement.TYPE_1W:
return "1-wire"
return "Unknown"
def new_data(self, mem, addr, data):
logger.info("New data, but not OW mem")
def __str__(self):
"""Generate debug string for memory"""
return ("Memory: id={}, type={}, size={}".format(
self.id, MemoryElement.type_to_string(self.type), self.size))
class I2CElement(MemoryElement):
def __init__(self, id, type, size, mem_handler):
super(I2CElement, self).__init__(id=id, type=type, size=size, mem_handler=mem_handler)
self._update_finished_cb = None
self._write_finished_cb = None
self.elements = {}
self.valid = False
def new_data(self, mem, addr, data):
"""Callback for when new memory data has been fetched"""
if mem.id == self.id:
if addr == 0:
# Check for header
if data[0:4] == "0xBC":
logger.info("Got new data: {}".format(data))
[self.elements["radio_channel"],
self.elements["radio_speed"],
self.elements["pitch_trim"],
self.elements["roll_trim"]] = struct.unpack("<BBff", data[5:15])
logger.info(self.elements)
if self._checksum256(data[:15]) == ord(data[15]):
self.valid = True
if self._update_finished_cb:
self._update_finished_cb(self)
self._update_finished_cb = None
def _checksum256(self, st):
return reduce(lambda x, y: x + y, map(ord, st)) % 256
def write_data(self, write_finished_cb):
data = (0x00, self.elements["radio_channel"], self.elements["radio_speed"],
self.elements["pitch_trim"], self.elements["roll_trim"])
image = struct.pack("<BBBff", *data)
# Adding some magic:
image = "0xBC" + image
image += struct.pack("B", self._checksum256(image))
self._write_finished_cb = write_finished_cb
self.mem_handler.write(self, 0x00, struct.unpack("B"*len(image), image))
def update(self, update_finished_cb):
"""Request an update of the memory content"""
if not self._update_finished_cb:
self._update_finished_cb = update_finished_cb
self.valid = False
logger.info("Updating content of memory {}".format(self.id))
# Start reading the header
self.mem_handler.read(self, 0, 16)
def write_done(self, mem, addr):
if self._write_finished_cb and mem.id == self.id:
self._write_finished_cb(self, addr)
self._write_finished_cb = None
def disconnect(self):
self._update_finished_cb = None
self._write_finished_cb = None
class OWElement(MemoryElement):
"""Memory class with extra functionality for 1-wire memories"""
element_mapping = {
1: "Board name",
2: "Board revision",
3: "Custom"
}
def __init__(self, id, type, size, addr, mem_handler):
"""Initialize the memory with good defaults"""
super(OWElement, self).__init__(id=id, type=type, size=size, mem_handler=mem_handler)
self.addr = addr
self.valid = False
self.vid = None
self.pid = None
self.name = None
self.pins = None
self.elements = {}
self._update_finished_cb = None
self._write_finished_cb = None
self._rev_element_mapping = {}
for key in OWElement.element_mapping.keys():
self._rev_element_mapping[OWElement.element_mapping[key]] = key
def new_data(self, mem, addr, data):
"""Callback for when new memory data has been fetched"""
if mem.id == self.id:
if addr == 0:
if self._parse_and_check_header(data[0:8]):
logger.info("--> HEADER OK")
if self._parse_and_check_elements(data[9:11]):
self.valid = True
self._update_finished_cb(self)
self._update_finished_cb = None
else:
# We need to fetch the elements, find out the length
(elem_ver, elem_len) = struct.unpack("BB", data[8:10])
self.mem_handler.read(self, 8, elem_len + 3)
else:
logger.info("--> HEADER NOT OK")
# Call the update if the CRC check of the header fails, we're done here
if self._update_finished_cb:
self._update_finished_cb(self)
self._update_finished_cb = None
elif addr == 0x08:
if self._parse_and_check_elements(data):
logger.info("--> ELEMENT OK")
self.valid = True
else:
logger.info("--> ELEMENT NOT OK")
if self._update_finished_cb:
self._update_finished_cb(self)
self._update_finished_cb = None
def _parse_and_check_elements(self, data):
"""Parse and check the CRC and length of the elements part of the memory"""
(elem_ver, elem_len, crc) = struct.unpack("<BBB", data[0] + data[1] + data[-1])
test_crc = crc32(data[:-1]) & 0x0ff
elem_data = data[2:-1]
if test_crc == crc:
while len(elem_data) > 0:
(eid, elen) = struct.unpack("BB", elem_data[:2])
self.elements[self.element_mapping[eid]] = elem_data[2:2+elen]
elem_data = elem_data[2+elen:]
return True
return False
def write_done(self, mem, addr):
if self._write_finished_cb:
self._write_finished_cb(self, addr)
self._write_finished_cb = None
def write_data(self, write_finished_cb):
# First generate the header part
header_data = struct.pack("<BIBB", 0xEB, self.pins, self.vid, self.pid)
header_crc = crc32(header_data) & 0x0ff
header_data += struct.pack("B", header_crc)
# Now generate the elements part
elem = ""
logger.info(self.elements.keys())
for element in reversed(self.elements.keys()):
elem_string = self.elements[element]
#logger.info(">>>> {}".format(elem_string))
key_encoding = self._rev_element_mapping[element]
elem += struct.pack("BB", key_encoding, len(elem_string))
elem += elem_string
elem_data = struct.pack("BB", 0x00, len(elem))
elem_data += elem
elem_crc = crc32(elem_data) & 0x0ff
elem_data += struct.pack("B", elem_crc)
data = header_data + elem_data
# Write data
p = ""
for s in data:
p += "0x{:02X} ".format(ord(s))
logger.info(p)
self.mem_handler.write(self, 0x00, struct.unpack("B"*len(data), data))
self._write_finished_cb = write_finished_cb
def update(self, update_finished_cb):
"""Request an update of the memory content"""
if not self._update_finished_cb:
self._update_finished_cb = update_finished_cb
self.valid = False
logger.info("Updating content of memory {}".format(self.id))
# Start reading the header
self.mem_handler.read(self, 0, 11)
#else:
# logger.warning("Already in progress of updating memory {}".format(self.id))
def _parse_and_check_header(self, data):
"""Parse and check the CRC of the header part of the memory"""
#logger.info("Should parse header: {}".format(data))
(start, self.pins, self.vid, self.pid, crc) = struct.unpack("<BIBBB", data)
test_crc = crc32(data[:-1]) & 0x0ff
if start == 0xEB and crc == test_crc:
return True
return False
def __str__(self):
"""Generate debug string for memory"""
return ("OW {} ({:02X}:{:02X}): {}".format(
self.addr, self.vid, self.pid, self.elements))
def disconnect(self):
self._update_finished_cb = None
self._write_finished_cb = None
class _ReadRequest:
"""Class used to handle memory reads that will split up the read in multiple packets in necessary"""
MAX_DATA_LENGTH = 20
def __init__(self, mem, addr, length, cf):
"""Initialize the object with good defaults"""
self.mem = mem
self.addr = addr
self._bytes_left = length
self.data = ""
self.cf = cf
self._current_addr = addr
def start(self):
"""Start the fetching of the data"""
self._request_new_chunk()
def resend(self):
logger.info("Sending write again...")
self._request_new_chunk()
def _request_new_chunk(self):
"""Called to request a new chunk of data to be read from the Crazyflie"""
# Figure out the length of the next request
new_len = self._bytes_left
if new_len > _ReadRequest.MAX_DATA_LENGTH:
new_len = _ReadRequest.MAX_DATA_LENGTH
logger.info("Requesting new chunk of {}bytes at 0x{:X}".format(new_len, self._current_addr))
# Request the data for the next address
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_READ)
pk.data = struct.pack("<BIB", self.mem.id, self._current_addr, new_len)
reply = struct.unpack("<BBBBB", pk.data[:-1])
self.cf.send_packet(pk, expected_reply=reply, timeout=1)
def add_data(self, addr, data):
"""Callback when data is received from the Crazyflie"""
data_len = len(data)
if not addr == self._current_addr:
logger.warning("Address did not match when adding data to read request!")
return
# Add the data and calculate the next address to fetch
self.data += data
self._bytes_left -= data_len
self._current_addr += data_len
if self._bytes_left > 0:
self._request_new_chunk()
return False
else:
return True
class _WriteRequest:
"""Class used to handle memory reads that will split up the read in multiple packets in necessary"""
MAX_DATA_LENGTH = 20
def __init__(self, mem, addr, data, cf):
"""Initialize the object with good defaults"""
self.mem = mem
self.addr = addr
self._bytes_left = len(data)
self._data = data
self.data = ""
self.cf = cf
self._current_addr = addr
self._sent_packet = None
self._sent_reply = None
self._addr_add = 0
def start(self):
"""Start the fetching of the data"""
self._write_new_chunk()
def resend(self):
logger.info("Sending write again...")
self.cf.send_packet(self._sent_packet, expected_reply=self._sent_reply, timeout=3)
def _write_new_chunk(self):
"""Called to request a new chunk of data to be read from the Crazyflie"""
# Figure out the length of the next request
new_len = len(self._data)
if new_len > _WriteRequest.MAX_DATA_LENGTH:
new_len = _WriteRequest.MAX_DATA_LENGTH
logger.info("Writing new chunk of {}bytes at 0x{:X}".format(new_len, self._current_addr))
data = self._data[:new_len]
self._data = self._data[new_len:]
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_WRITE)
pk.data = struct.pack("<BI", self.mem.id, self._current_addr)
# Create a tuple used for matching the reply using id and address
reply = struct.unpack("<BBBBB", pk.data)
self._sent_reply = reply
# Add the data
pk.data += struct.pack("B"*len(data), *data)
self._sent_packet = pk
self.cf.send_packet(pk, expected_reply=reply, timeout=3)
self._addr_add = len(data)
def write_done(self, addr):
"""Callback when data is received from the Crazyflie"""
if not addr == self._current_addr:
logger.warning("Address did not match when adding data to read request!")
return
if len(self._data) > 0:
self._current_addr += self._addr_add
self._write_new_chunk()
return False
else:
logger.info("This write request is done")
return True
class Memory():
"""Access memories on the Crazyflie"""
# These codes can be decoded using os.stderror, but
# some of the text messages will look very stange
# in the UI, so they are redefined here
_err_codes = {
errno.ENOMEM: "No more memory available",
errno.ENOEXEC: "Command not found",
errno.ENOENT: "No such block id",
errno.E2BIG: "Block too large",
errno.EEXIST: "Block already exists"
}
def __init__(self, crazyflie=None):
"""Instantiate class and connect callbacks"""
self.mems = []
# Called when new memories have been added
self.mem_added_cb = Caller()
# Called when new data has been read
self.mem_read_cb = Caller()
self.mem_write_cb = Caller()
self.cf = crazyflie
self.cf.add_port_callback(CRTPPort.MEM, self._new_packet_cb)
self._refresh_callback = None
self._fetch_id = 0
self.nbr_of_mems = 0
self._ow_mem_fetch_index = 0
self._elem_data = ()
self._read_requests = {}
self._write_requests = {}
self._ow_mems_left_to_update = []
self._getting_count = False
def _mem_update_done(self, mem):
"""Callback from each individual memory (only 1-wire) when reading of header/elements are done"""
if mem.id in self._ow_mems_left_to_update:
self._ow_mems_left_to_update.remove(mem.id)
logger.info(mem)
if len(self._ow_mems_left_to_update) == 0:
if self._refresh_callback:
self._refresh_callback()
self._refresh_callback = None
def get_mem(self, id):
"""Fetch the memory with the supplied id"""
for m in self.mems:
if m.id == id:
return m
return None
def get_mems(self, type):
"""Fetch all the memories of the supplied type"""
ret = ()
for m in self.mems:
if m.type == type:
ret += (m, )
return ret
def ow_search(self, vid=0xBC, pid=None, name=None):
"""Search for specific memory id/name and return it"""
for m in self.get_mems(MemoryElement.TYPE_1W):
if pid and m.pid == pid or name and m.name == name:
return m
return None
def write(self, memory, addr, data):
"""Write the specified data to the given memory at the given address"""
if memory.id in self._write_requests:
logger.warning("There is already a write operation ongoing for memory id {}".format(memory.id))
return False
wreq = _WriteRequest(memory, addr, data, self.cf)
self._write_requests[memory.id] = wreq
wreq.start()
return True
def read(self, memory, addr, length):
"""Read the specified amount of bytes from the given memory at the given address"""
if memory.id in self._read_requests:
logger.warning("There is already a read operation ongoing for memory id {}".format(memory.id))
return False
rreq = _ReadRequest(memory, addr, length, self.cf)
self._read_requests[memory.id] = rreq
rreq.start()
return True
def refresh(self, refresh_done_callback):
"""Start fetching all the detected memories"""
self._refresh_callback = refresh_done_callback
self._fetch_id = 0
for m in self.mems:
try:
self.mem_read_cb.remove_callback(m.new_data)
m.disconnect()
except Exception as e:
logger.info("Error when removing memory after update: {}".format(e))
self.mems = []
self.nbr_of_mems = 0
self._getting_count = False
logger.info("Requesting number of memories")
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_INFO)
pk.data = (CMD_INFO_NBR, )
self.cf.send_packet(pk, expected_reply=(CMD_INFO_NBR,))
def _new_packet_cb(self, packet):
"""Callback for newly arrived packets for the memory port"""
chan = packet.channel
cmd = packet.datal[0]
payload = struct.pack("B" * (len(packet.datal) - 1), *packet.datal[1:])
#logger.info("--------------->CHAN:{}=>{}".format(chan, struct.unpack("B"*len(payload), payload)))
if chan == CHAN_INFO:
if cmd == CMD_INFO_NBR:
self.nbr_of_mems = ord(payload[0])
logger.info("{} memories found".format(self.nbr_of_mems))
# Start requesting information about the memories, if there are any...
if self.nbr_of_mems > 0:
if not self._getting_count:
self._getting_count = True
logger.info("Requesting first id")
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_INFO)
pk.data = (CMD_INFO_DETAILS, 0)
self.cf.send_packet(pk, expected_reply=(CMD_INFO_DETAILS, 0))
else:
self._refresh_callback()
if cmd == CMD_INFO_DETAILS:
# Did we get a good reply, otherwise try again:
if len(payload) < 5:
# Workaround for 1-wire bug when memory is detected
# but updating the info crashes the communication with
# the 1-wire. Fail by saying we only found 1 memory (the I2C).
logger.error("-------->Got good count, but no info on mem!")
self.nbr_of_mems = 1
if self._refresh_callback:
self._refresh_callback()
self._refresh_callback = None
return
# Create information about a new memory
# Id - 1 byte
mem_id = ord(payload[0])
# Type - 1 byte
mem_type = ord(payload[1])
# Size 4 bytes (as addr)
mem_size = struct.unpack("I", payload[2:6])[0]
# Addr (only valid for 1-wire?)
mem_addr_raw = struct.unpack("B"*8, payload[6:14])
mem_addr = ""
for m in mem_addr_raw:
mem_addr += "{:02X}".format(m)
if (not self.get_mem(mem_id)):
if mem_type == MemoryElement.TYPE_1W:
mem = OWElement(id=mem_id, type=mem_type, size=mem_size, addr=mem_addr, mem_handler=self)
self.mem_read_cb.add_callback(mem.new_data)
self.mem_write_cb.add_callback(mem.write_done)
self._ow_mems_left_to_update.append(mem.id)
elif mem_type == MemoryElement.TYPE_I2C:
mem = I2CElement(id=mem_id, type=mem_type, size=mem_size, mem_handler=self)
logger.info(mem)
self.mem_read_cb.add_callback(mem.new_data)
self.mem_write_cb.add_callback(mem.write_done)
else:
mem = MemoryElement(id=mem_id, type=mem_type, size=mem_size, mem_handler=self)
logger.info(mem)
self.mems.append(mem)
self.mem_added_cb.call(mem)
#logger.info(mem)
self._fetch_id = mem_id + 1
if self.nbr_of_mems - 1 >= self._fetch_id:
logger.info("Requesting information about memory {}".format(self._fetch_id))
pk = CRTPPacket()
pk.set_header(CRTPPort.MEM, CHAN_INFO)
pk.data = (CMD_INFO_DETAILS, self._fetch_id)
self.cf.send_packet(pk, expected_reply=(CMD_INFO_DETAILS, self._fetch_id))
else:
logger.info("Done getting all the memories, start reading the OWs")
ows = self.get_mems(MemoryElement.TYPE_1W)
# If there are any OW mems start reading them, otherwise we are done
for ow_mem in self.get_mems(MemoryElement.TYPE_1W):
ow_mem.update(self._mem_update_done)
if len (self.get_mems(MemoryElement.TYPE_1W)) == 0:
if self._refresh_callback:
self._refresh_callback()
self._refresh_callback = None
if chan == CHAN_WRITE:
id = cmd
(addr, status) = struct.unpack("<IB", payload[0:5])
logger.info("WRITE: Mem={}, addr=0x{:X}, status=0x{}".format(id, addr, status))
# Find the read request
if id in self._write_requests:
wreq = self._write_requests[id]
if status == 0:
if wreq.write_done(addr):
self._write_requests.pop(id, None)
self.mem_write_cb.call(wreq.mem, wreq.addr)
else:
wreq.resend()
if chan == CHAN_READ:
id = cmd
(addr, status) = struct.unpack("<IB", payload[0:5])
data = struct.unpack("B"*len(payload[5:]), payload[5:])
logger.info("READ: Mem={}, addr=0x{:X}, status=0x{}, data={}".format(id, addr, status, data))
# Find the read request
if id in self._read_requests:
logger.info("READING: We are still interested in request for mem {}".format(id))
rreq = self._read_requests[id]
if status == 0:
if rreq.add_data(addr, payload[5:]):
self._read_requests.pop(id, None)
self.mem_read_cb.call(rreq.mem, rreq.addr, rreq.data)
else:
rreq.resend()
| gpl-2.0 | -7,623,506,746,200,751,000 | 36.417683 | 113 | 0.543673 | false |
nwiizo/workspace_2017 | environmental/mininet/examples/bind.py | 1 | 2310 | #!/usr/bin/python
"""
bind.py: Bind mount example
This creates hosts with private directories that the user specifies.
These hosts may have persistent directories that will be available
across multiple mininet session, or temporary directories that will
only last for one mininet session. To specify a persistent
directory, add a tuple to a list of private directories:
[ ( 'directory to be mounted on', 'directory to be mounted' ) ]
String expansion may be used to create a directory template for
each host. To do this, add a %(name)s in place of the host name
when creating your list of directories:
[ ( '/var/run', '/tmp/%(name)s/var/run' ) ]
If no persistent directory is specified, the directories will default
to temporary private directories. To do this, simply create a list of
directories to be made private. A tmpfs will then be mounted on them.
You may use both temporary and persistent directories at the same
time. In the following privateDirs string, each host will have a
persistent directory in the root filesystem at
"/tmp/(hostname)/var/run" mounted on "/var/run". Each host will also
have a temporary private directory mounted on "/var/log".
[ ( '/var/run', '/tmp/%(name)s/var/run' ), '/var/log' ]
This example has both persistent directories mounted on '/var/log'
and '/var/run'. It also has a temporary private directory mounted
on '/var/mn'
"""
from mininet.net import Mininet
from mininet.node import Host
from mininet.cli import CLI
from mininet.topo import SingleSwitchTopo
from mininet.log import setLogLevel, info
from functools import partial
# Sample usage
def testHostWithPrivateDirs():
"Test bind mounts"
topo = SingleSwitchTopo( 10 )
privateDirs = [ ( '/var/log', '/tmp/%(name)s/var/log' ),
( '/var/run', '/tmp/%(name)s/var/run' ),
'/var/mn' ]
host = partial( Host,
privateDirs=privateDirs )
net = Mininet( topo=topo, host=host )
net.start()
directories = [ directory[ 0 ] if isinstance( directory, tuple )
else directory for directory in privateDirs ]
info( 'Private Directories:', directories, '\n' )
CLI( net )
net.stop()
if __name__ == '__main__':
setLogLevel( 'info' )
testHostWithPrivateDirs()
info( 'Done.\n')
| mit | 4,247,840,918,116,562,400 | 33.477612 | 69 | 0.693939 | false |
christianheinrichs/learning-abop | chapter-13/backup_ver4.py | 1 | 1724 | #!/usr/bin/env python2
import os
import time
# 1. The files and directories to be backed up are specified in a list.
# Example on Windows:
# source = ['"C:\\My Documents"', 'C:\\Code']
# Example on Mac OS X and Linux:
source = ['/Users/swa/notes']
# Notice we had to use double quotes inside the string
# for names with spaces in it.
# 2. The backup must be stored in a main backup directory
# Example on Windows:
# target_dir = 'E:\\Backup'
# Example on Mac OS X and Linux:
target_dir = '/Users/swa/backup'
# Remember to change this to which folder you will be using
# Create target directory if it is not present
if not os.path.exists(target_dir):
os.mkdir(target_dir) # Make directory
# 3. The files are backed up into a zip file.
# 4. The current day is the name of the subdirectory in the main directory.
today = target_dir + os.sep + time.strftime('%Y%m%d')
# The current time is the name of the zip archive.
now = time.strftime('%H%M%S')
# Take a comment from the user to create the name of the zip file
comment = raw_input('Enter a comment --> ')
# Check if a comment was entered
if len(comment) == 0:
target = today + os.sep + now + '.zip'
else:
target = today + os.sep + now + '_' + \
comment.replace(' ', '_') + '.zip'
# Create the subdirectory if it isn't already there
if not os.path.exists(today):
os.mkdir(today)
print 'Successfully created directory', today
# 5. We use the zip command to put the files in a zip archive
zip_command = "zip -r {0} {1}".format(target, ' '.join(source))
# Run the backup
print "Zip command is:"
print zip_command
print "Running:"
if os.system(zip_command) == 0:
print 'Successful backup to', target
else:
print 'Backup FAILED'
| gpl-3.0 | 2,480,055,453,355,750,000 | 29.785714 | 75 | 0.682135 | false |
matthewshim-ms/Recognizers-Text | Python/libraries/recognizers-date-time/recognizers_date_time/date_time/chinese/set_extractor.py | 1 | 1616 | from typing import List, Pattern
from datetime import datetime
import regex
from recognizers_text import ExtractResult
from ..extractors import DateTimeExtractor
from ..utilities import Token, merge_all_tokens
from ..base_set import BaseSetExtractor
from .set_extractor_config import ChineseSetExtractorConfiguration
class ChineseSetExtractor(BaseSetExtractor):
def __init__(self):
super().__init__(ChineseSetExtractorConfiguration())
def extract(self, source: str, reference: datetime = None) -> List[ExtractResult]:
if reference is None:
reference = datetime.now()
tokens: List[Token] = list()
tokens.extend(self.match_each_unit(source))
tokens.extend(self.match_each_duration(source, reference))
tokens.extend(self.match_each_specific(self.config.time_extractor, self.config.each_day_regex, source, reference))
tokens.extend(self.match_each_specific(self.config.date_extractor, self.config.each_prefix_regex, source, reference))
tokens.extend(self.match_each_specific(self.config.date_time_extractor, self.config.each_prefix_regex, source, reference))
result = merge_all_tokens(tokens, source, self.extractor_type_name)
return result
def match_each_specific(self, extractor: DateTimeExtractor, pattern: Pattern, source: str, reference: datetime) -> List[Token]:
for er in extractor.extract(source, reference):
before_str = source[0:er.start]
match = regex.search(pattern, before_str)
if match:
yield Token(match.start(), er.start + er.length)
| mit | 8,538,081,089,762,458,000 | 46.529412 | 131 | 0.711634 | false |
b3ngmann/gratia-sensei | lci/settings.py | 1 | 4337 | """
Django settings for lci project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from os import environ
from urlparse import urlparse
import dj_database_url
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'l=k8(5fwnb&)ovdcos6#ie5m*qt&qvpf+iceq1y(^xh+7cv7k$'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = ['*']
ADMINS = (
('b3ngmann', '[email protected]'),
)
MANAGERS = ADMINS
# Application definition
DEFAULT_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
)
THIRD_PARTY_APPS = (
'userena',
'guardian',
'easy_thumbnails',
)
LOCAL_APPS = (
'accounts',
'church',
)
INSTALLED_APPS = DEFAULT_APPS + THIRD_PARTY_APPS + LOCAL_APPS
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
#userena language add-on
'userena.middleware.UserenaLocaleMiddleware',
)
ROOT_URLCONF = 'lci.urls'
WSGI_APPLICATION = 'lci.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {'default' : dj_database_url.config(default=os.environ.get('LCI_DATABASE_URL'))}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
TEMPLATE_DIRS = (
'%s/templates' % BASE_DIR,
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
"django.core.context_processors.i18n",
"django.core.context_processors.media",
'django.core.context_processors.request',
)
AUTHENTICATION_BACKENDS = (
'userena.backends.UserenaAuthenticationBackend',
'guardian.backends.ObjectPermissionBackend',
'django.contrib.auth.backends.ModelBackend',
)
EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = os.environ.get('PERSONAL_EMAIL')
EMAIL_HOST_PASSWORD = os.environ.get('PERSONAL_PASSWORD')
ANONYMOUS_USER_ID = -1
AUTH_PROFILE_MODULE = 'accounts.Profile'
SITE_ID = 1
LOGIN_REDIRECT_URL = '/accounts/%(username)s/'
LOGIN_URL = '/accounts/signin/'
LOGOUT_URL = '/accounts/signout/'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
} | mit | -5,906,864,589,107,685,000 | 22.966851 | 92 | 0.688033 | false |
dhongu/l10n-romania | l10n_ro_account_report/report/activity_statement.py | 1 | 12059 | # Copyright 2018 Eficent Business and IT Consulting Services S.L.
# (http://www.eficent.com)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models, _
from collections import defaultdict
from datetime import datetime, timedelta
from odoo.tools.misc import DEFAULT_SERVER_DATE_FORMAT
class ActivityStatement(models.AbstractModel):
"""Model of Activity Statement"""
_name = 'report.l10n_ro_account_report.activity_statement'
def _get_invoice_address(self, part):
inv_addr_id = part.address_get(['invoice']).get('invoice', part.id)
return self.env["res.partner"].browse(inv_addr_id)
def _format_date_to_partner_lang(self, date, date_format=DEFAULT_SERVER_DATE_FORMAT):
if isinstance(date, str):
date = datetime.strptime(date, DEFAULT_SERVER_DATE_FORMAT)
return date.strftime(date_format) if date else ''
def _get_line_currency_defaults(self, currency_id, currencies, balance_forward):
if currency_id not in currencies:
# This will only happen if currency is inactive
currencies[currency_id] = (self.env['res.currency'].browse(currency_id))
return (
{
'lines': [],
'balance_forward': balance_forward,
'amount_due': balance_forward,
},
currencies
)
@api.multi
def get_report_values(self, docids, data):
"""
@return: returns a dict of parameters to pass to qweb report.
the most important pair is {'data': res} which contains all
the data for each partner. It is structured like:
{partner_id: {
'start': date string,
'end': date_string,
'today': date_string
'currencies': {
currency_id: {
'lines': [{'date': date string, ...}, ...],
'balance_forward': float,
'amount_due': float,
'buckets': {
'p1': float, 'p2': ...
}
}
}
}
"""
if not data:
data = {}
if 'company_id' not in data:
wiz = self.env["activity.statement.wizard"].with_context(active_ids=docids, model="res.partner")
data.update(wiz.create({})._prepare_statement())
data['amount_field'] = 'amount'
company_id = data['company_id']
partner_ids = data['partner_ids']
date_start = data.get('date_start')
if date_start and isinstance(date_start, str):
date_start = fields.Datetime.from_string(date_start)
date_end = data['date_end']
if isinstance(date_end, str):
date_end = fields.Datetime.from_string(date_end)
account_type = data['account_type']
today = fields.Date.today()
amount_field = data.get('amount_field', 'amount')
# There should be relatively few of these, so to speed performance
# we cache them - default needed if partner lang not set
self._cr.execute("""
SELECT p.id, l.date_format
FROM res_partner p LEFT JOIN res_lang l ON p.lang=l.code
WHERE p.id IN %(partner_ids)s
""", {"partner_ids": tuple(partner_ids)})
date_formats = {r[0]: r[1] for r in self._cr.fetchall()}
default_fmt = self.env["res.lang"]._lang_get(self.env.user.lang).date_format
currencies = {x.id: x for x in self.env['res.currency'].search([])}
res = {}
# get base data
lines = self._get_account_display_lines(company_id, partner_ids, date_start, date_end, account_type)
balances_forward = self._get_account_initial_balance(company_id, partner_ids, date_start, account_type)
# organise and format for report
format_date = self._format_date_to_partner_lang
partners_to_remove = set()
for partner_id in partner_ids:
res[partner_id] = {
'today': format_date(today, date_formats.get(partner_id, default_fmt)),
'start': format_date(date_start, date_formats.get(partner_id, default_fmt)),
'end': format_date(date_end, date_formats.get(partner_id, default_fmt)),
'currencies': {},
}
currency_dict = res[partner_id]['currencies']
for line in balances_forward.get(partner_id, []):
currency_dict[line['currency_id']], currencies = (
self._get_line_currency_defaults(line['currency_id'], currencies, line['balance'])
)
for line in lines[partner_id]:
if line['currency_id'] not in currency_dict:
currency_dict[line['currency_id']], currencies = (
self._get_line_currency_defaults(line['currency_id'], currencies, 0.0))
line_currency = currency_dict[line['currency_id']]
if not line['blocked']:
line_currency['amount_due'] += line[amount_field]
line['balance'] = line_currency['amount_due']
line['date'] = format_date(line['date'], date_formats.get(partner_id, default_fmt))
line['date_maturity'] = format_date(line['date_maturity'], date_formats.get(partner_id, default_fmt))
line_currency['lines'].append(line)
if len(partner_ids) > 1:
values = currency_dict.values()
if not any([v['lines'] or v['balance_forward'] for v in values]):
if data["filter_non_due_partners"]:
partners_to_remove.add(partner_id)
continue
else:
res[partner_id]['no_entries'] = True
if data["filter_negative_balances"]:
if not all([v['amount_due'] >= 0.0 for v in values]):
partners_to_remove.add(partner_id)
for partner in partners_to_remove:
del res[partner]
partner_ids.remove(partner)
return {
'doc_ids': partner_ids,
'doc_model': 'res.partner',
'docs': self.env['res.partner'].browse(partner_ids),
'data': res,
'company': self.env['res.company'].browse(company_id),
'Currencies': currencies,
'account_type': account_type,
'get_inv_addr': self._get_invoice_address,
}
def _initial_balance_sql_q1(self, partners, date_start, account_type):
return str(self._cr.mogrify("""
SELECT l.partner_id, l.currency_id, l.company_id,
CASE WHEN l.currency_id is not null AND l.amount_currency > 0.0
THEN sum(l.amount_currency)
ELSE sum(l.debit)
END as debit,
CASE WHEN l.currency_id is not null AND l.amount_currency < 0.0
THEN sum(l.amount_currency * (-1))
ELSE sum(l.credit)
END as credit
FROM account_move_line l
JOIN account_account_type at ON (at.id = l.user_type_id)
JOIN account_move m ON (l.move_id = m.id)
WHERE l.partner_id IN %(partners)s AND at.type = %(account_type)s
AND l.date < %(date_start)s AND not l.blocked
GROUP BY l.partner_id, l.currency_id, l.amount_currency,
l.company_id
""", locals()), "utf-8")
def _initial_balance_sql_q2(self, company_id):
return str(self._cr.mogrify("""
SELECT Q1.partner_id, debit-credit AS balance,
COALESCE(Q1.currency_id, c.currency_id) AS currency_id
FROM Q1
JOIN res_company c ON (c.id = Q1.company_id)
WHERE c.id = %(company_id)s
""", locals()), "utf-8")
def _get_account_initial_balance(self, company_id, partner_ids, date_start, account_type):
balance_start = defaultdict(list)
partners = tuple(partner_ids)
# pylint: disable=E8103
self.env.cr.execute("""WITH Q1 AS (%s), Q2 AS (%s)
SELECT partner_id, currency_id, balance
FROM Q2""" % (self._initial_balance_sql_q1(partners, date_start, account_type),
self._initial_balance_sql_q2(company_id)))
for row in self.env.cr.dictfetchall():
balance_start[row.pop('partner_id')].append(row)
return balance_start
def _display_lines_sql_q1(self, partners, date_start, date_end, account_type):
return str(self._cr.mogrify("""
SELECT m.name AS move_id, l.partner_id, l.date,
CASE WHEN (aj.type IN ('sale', 'purchase'))
THEN l.name
ELSE '/'
END as name,
CASE WHEN (aj.type IN ('sale', 'purchase'))
THEN l.ref
WHEN (aj.type in ('bank', 'cash'))
THEN 'Payment'
ELSE ''
END as ref,
l.blocked, l.currency_id, l.company_id,
CASE WHEN (l.currency_id is not null AND l.amount_currency > 0.0)
THEN sum(l.amount_currency)
ELSE sum(l.debit)
END as debit,
CASE WHEN (l.currency_id is not null AND l.amount_currency < 0.0)
THEN sum(l.amount_currency * (-1))
ELSE sum(l.credit)
END as credit,
CASE WHEN l.date_maturity is null
THEN l.date
ELSE l.date_maturity
END as date_maturity
FROM account_move_line l
JOIN account_account_type at ON (at.id = l.user_type_id)
JOIN account_move m ON (l.move_id = m.id)
JOIN account_journal aj ON (l.journal_id = aj.id)
WHERE l.partner_id IN %(partners)s
AND at.type = %(account_type)s
AND %(date_start)s <= l.date
AND l.date <= %(date_end)s
GROUP BY l.partner_id, m.name, l.date, l.date_maturity,
CASE WHEN (aj.type IN ('sale', 'purchase'))
THEN l.name
ELSE '/'
END,
CASE WHEN (aj.type IN ('sale', 'purchase'))
THEN l.ref
WHEN (aj.type in ('bank', 'cash'))
THEN 'Payment'
ELSE ''
END,
l.blocked, l.currency_id, l.amount_currency, l.company_id
""", locals()), "utf-8")
def _display_lines_sql_q2(self, company_id):
return str(self._cr.mogrify("""
SELECT Q1.partner_id, Q1.move_id, Q1.date, Q1.date_maturity,
Q1.name, Q1.ref, Q1.debit, Q1.credit,
Q1.debit-Q1.credit as amount, Q1.blocked,
COALESCE(Q1.currency_id, c.currency_id) AS currency_id
FROM Q1
JOIN res_company c ON (c.id = Q1.company_id)
WHERE c.id = %(company_id)s
""", locals()), "utf-8")
def _get_account_display_lines(self, company_id, partner_ids, date_start, date_end, account_type):
res = dict(map(lambda x: (x, []), partner_ids))
partners = tuple(partner_ids)
# pylint: disable=E8103
lines_sql_q1 = self._display_lines_sql_q1(partners, date_start, date_end, account_type)
lines_sql_q2 = self._display_lines_sql_q2(company_id)
self.env.cr.execute("""
WITH Q1 AS (%s),
Q2 AS (%s)
SELECT partner_id, move_id, date, date_maturity, name, ref, debit, credit, amount, blocked, currency_id
FROM Q2
ORDER BY date, date_maturity, move_id
""" % (lines_sql_q1, lines_sql_q2))
for row in self.env.cr.dictfetchall():
res[row.pop('partner_id')].append(row)
return res
| agpl-3.0 | 2,818,139,441,633,559,600 | 43.172161 | 117 | 0.533212 | false |
mirusresearch/staticdhcpd | staticDHCPd/staticdhcpdlib/web/__init__.py | 1 | 11143 | # -*- encoding: utf-8 -*-
"""
staticdhcpdlib.web
==================
Defines web-registration methods and structures.
Legal
-----
This file is part of staticDHCPd.
staticDHCPd is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
(C) Neil Tallim, 2014 <[email protected]>
"""
import collections
import logging
import threading
import functions
_logger = logging.getLogger('web')
_web_lock = threading.Lock()
_web_headers = []
_web_dashboard = []
_web_methods = {}
_WebDashboardElement = collections.namedtuple("WebDashboardElement", ('ordering', 'module', 'name', 'callback'))
"""
A component of the dashboard.
.. py:attribute:: ordering
An integer used to sort this element against others
.. py:attribute:: module
The name of the module to which this element belongs
.. py:attribute:: name
The name under which to display the element
.. py:attribute:: callback
The method to invoke when rendering this element
"""
_WebMethod = collections.namedtuple("WebMethod", (
'module', 'name', 'hidden', 'secure', 'confirm', 'display_mode', 'cacheable', 'callback'
))
"""
An invokable method.
.. py:attribute:: module
The name of the module to which this method belongs
.. py:attribute:: name
The name under which to display the method
.. py:attribute:: hidden
Whether the method should be advertised on the dashboard
.. py:attribute:: secure
Whether the method requires authorization
.. py:attribute:: confirm
Whether the method, when invoked from the dashboard, requires confirmation
.. py:attribute:: display_mode
Whether the method's callback presents information to display as part of
the dashboard, on its own, or as raw bytes
.. py:attribute:: cacheable
Whether the method's response can be cached
.. py:attribute:: callback
The method to invoke when rendering this element
"""
#Method-rendering constants
WEB_METHOD_DASHBOARD = 1 #: The content is rendered before the dashboard.
WEB_METHOD_TEMPLATE = 2 #: The content is rendered in the same container that would normally show the dashboard, but no dashboard elements are present.
WEB_METHOD_RAW = 3 #: The content is presented exactly as returned, identified by the given MIME-type.
def registerHeaderCallback(callback):
"""
Installs an element in the headers; at most one instance of any given
``callback`` will be accepted.
:param callable callback: Must accept the parameters `path`, `queryargs`,
`mimetype`, `data`, and `headers`, with the
possibility that `mimetype` and `data` may be
None; `queryargs` is a dictionary of parsed
query-string items, with values expressed as lists
of strings; `headers` is a dictionary-like object.
It must return data as a string, formatted as
XHTML, to be embedded inside of <head/>, or None
to suppress inclusion.
"""
with _web_lock:
if callback in _web_headers:
_logger.error("%(callback)r is already registered" % {'callback': callback,})
else:
_web_headers.append(callback)
_logger.debug("Registered header %(callback)r" % {'callback': callback,})
def unregisterHeaderCallback(callback):
"""
Removes a header element.
:param callable callback: The element to be removed.
:return bool: True if an element was removed.
"""
with _web_lock:
try:
_web_headers.remove(callback)
except ValueError:
_logger.error("header %(callback)r is not registered" % {'callback': callback,})
return False
else:
_logger.error("header %(callback)r unregistered" % {'callback': callback,})
return True
def retrieveHeaderCallbacks():
"""
Enumerates header callback elements.
:return tuple: All header callbacks, in registration order.
"""
with _web_lock:
return tuple(_web_headers)
def registerDashboardCallback(module, name, callback, ordering=None):
"""
Installs an element in the dashboard; at most one instance of any given
``callback`` will be accepted.
:param basestring module: The name of the module to which this element
belongs.
:param basestring name: The name under which to display the element.
:param callable callback: Must accept the parameters `path`, `queryargs`,
`mimetype`, `data`, and `headers`, with the
possibility that `mimetype` and `data` may be
None; `queryargs` is a dictionary of parsed
query-string items, with values expressed as lists
of strings; `headers` is a dictionary-like object.
It must return data as a string, formatted as
XHTML, to be embedded inside of a <div/>, or None
to suppress inclusion.
:param int ordering: A number that controls where this element will appear
in relation to others. If not specified, the value will
be that of the highest number plus one, placing it at
the end; negatives are valid.
"""
with _web_lock:
for (i, element) in enumerate(_web_dashboard):
if element.callback is callback:
_logger.error("%(element)r is already registered" % {'element': element,})
break
else:
if ordering is None:
if _web_dashboard:
ordering = _web_dashboard[-1].ordering + 1
else:
ordering = 0
element = _WebDashboardElement(ordering, functions.sanitise(module), functions.sanitise(name), callback)
_web_dashboard.append(element)
_web_dashboard.sort()
_logger.debug("Registered dashboard element %(element)r" % {'element': element,})
def unregisterDashboardCallback(callback):
"""
Removes a dashboard element.
:param callable callback: The element to be removed.
:return bool: True if an element was removed.
"""
with _web_lock:
for (i, element) in enumerate(_web_dashboard):
if element.callback is callback:
del _web_dashboard[i]
_logger.debug("Unregistered dashboard element %(element)r" % {'element': element,})
return True
else:
_logger.error("Dashboard callback %(callback)r is not registered" % {'callback': callback,})
return False
def retrieveDashboardCallbacks():
"""
Enumerates dashboard callback elements.
:return tuple: All dashboard callbacks, in display order.
"""
with _web_lock:
return tuple(_web_dashboard)
def registerMethodCallback(path, callback, cacheable=False, hidden=True, secure=False, module=None, name=None, confirm=False, display_mode=WEB_METHOD_RAW):
"""
Installs a webservice method; at most one instance of ``path`` will be
accepted.
:param basestring path: The location at which the service may be called,
like "/ca/uguu/puukusoft/staticDHCPd/extension/stats/histograph.csv".
:param callable callback: Must accept the parameters `path`, `queryargs`,
`mimetype`, `data`, and `headers`, with the
possibility that `mimetype` and `data` may be
None; `queryargs` is a dictionary of parsed
query-string items, with values expressed as lists
of strings; `headers` is a dictionary-like object.
It must return a tuple of (mimetype, data,
headers), with data being a string or bytes-like
object.
:param bool cacheable: Whether the client is allowed to cache the method's
content.
:param bool hidden: Whether to render a link in the side-bar.
:param bool secure: Whether authentication will be required before this
method can be called.
:param basestring module: The name of the module to which this element
belongs.
:param basestring name: The name under which to display the element.
:param bool confirm: Adds JavaScript validation to ask the user if they're
sure they know what they're doing before the method
will be invoked, if not `hidden`.
:param display_mode: One of the WEB_METHOD_* constants.
"""
with _web_lock:
if path in _web_methods:
_logger.error("'%(path)s' is already registered" % {'path': path,})
else:
_web_methods[path] = method = _WebMethod(
functions.sanitise(module), functions.sanitise(name),
hidden, secure, confirm, display_mode, cacheable, callback
)
_logger.debug("Registered method %(method)r at %(path)s" % {'method': method, 'path': path,})
def unregisterMethodCallback(path):
"""
Removes a method element.
:param basestring path: The element to be removed.
:return bool: True if an element was removed.
"""
with _web_lock:
try:
del _web_methods[path]
except KeyError:
_logger.error("'%(path)s' is not registered" % {'path': path,})
return False
else:
_logger.debug("Unregistered method %(path)s" % {'path': path,})
return True
def retrieveMethodCallback(path):
"""
Retrieves a method callback element.
:return callable: The requested method, or None if unbound.
"""
with _web_lock:
return _web_methods.get(path)
def retrieveVisibleMethodCallbacks():
"""
Enumerates method callback elements.
:return tuple: All method callbacks, as (`element`, `path`) tuples, in
lexically sorted order.
"""
with _web_lock:
return tuple(sorted((element, path) for (path, element) in _web_methods.items() if not element.hidden))
| gpl-3.0 | -6,151,364,820,245,189,000 | 37.164384 | 155 | 0.60612 | false |
caioariede/openimob | dashboard/migrations/0001_initial.py | 1 | 3192 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import taggit.managers
from django.conf import settings
import wagtail.wagtailadmin.taggable
import wagtail.wagtailimages.models
class Migration(migrations.Migration):
dependencies = [
('taggit', '0002_auto_20150616_2121'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('wagtailimages', '0008_image_created_at_index'),
]
operations = [
migrations.CreateModel(
name='CustomImage',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=255, null=True, blank=True, verbose_name='Title')),
('file', models.ImageField(height_field='height', upload_to=wagtail.wagtailimages.models.get_upload_to, verbose_name='File', width_field='width')),
('width', models.IntegerField(verbose_name='Width', editable=False)),
('height', models.IntegerField(verbose_name='Height', editable=False)),
('created_at', models.DateTimeField(db_index=True, verbose_name='Created at', auto_now_add=True)),
('focal_point_x', models.PositiveIntegerField(null=True, blank=True)),
('focal_point_y', models.PositiveIntegerField(null=True, blank=True)),
('focal_point_width', models.PositiveIntegerField(null=True, blank=True)),
('focal_point_height', models.PositiveIntegerField(null=True, blank=True)),
('file_size', models.PositiveIntegerField(null=True, editable=False)),
('brightness', models.IntegerField(default=0)),
('tags', taggit.managers.TaggableManager(blank=True, to='taggit.Tag', through='taggit.TaggedItem', help_text=None, verbose_name='Tags')),
('uploaded_by_user', models.ForeignKey(null=True, blank=True, to=settings.AUTH_USER_MODEL, editable=False, verbose_name='Uploaded by user')),
],
options={
'abstract': False,
},
bases=(models.Model, wagtail.wagtailadmin.taggable.TagSearchable),
),
migrations.CreateModel(
name='CustomRendition',
fields=[
('id', models.AutoField(primary_key=True, serialize=False, auto_created=True, verbose_name='ID')),
('file', models.ImageField(height_field='height', upload_to='images', width_field='width')),
('width', models.IntegerField(editable=False)),
('height', models.IntegerField(editable=False)),
('focal_point_key', models.CharField(max_length=255, blank=True, default='', editable=False)),
('filter', models.ForeignKey(to='wagtailimages.Filter', related_name='+')),
('image', models.ForeignKey(to='dashboard.CustomImage', related_name='renditions')),
],
),
migrations.AlterUniqueTogether(
name='customrendition',
unique_together=set([('image', 'filter', 'focal_point_key')]),
),
]
| bsd-3-clause | -2,819,622,076,555,435,000 | 53.101695 | 163 | 0.614975 | false |
lubosz/periphilia | src/application.py | 1 | 2294 | #!/usr/bin/python3
from IPython import embed
from gi.repository import Gst
from gi.repository import Gtk
import subprocess
from gi.repository.Gtk import Stack, StackTransitionType
from gi.repository import Gtk, Gio, GLib, Gdk, Notify
from .window import Window
class Application(Gtk.Application):
def __init__(self):
Gtk.Application.__init__(self,
application_id='org.gnome.Gamepads',
flags=Gio.ApplicationFlags.FLAGS_NONE)
GLib.set_application_name("Gamepads")
GLib.set_prgname('gnome-gamepads')
cssProviderFile = Gio.File.new_for_uri(
'resource:///org.gnome.Gamepads/application.css')
cssProvider = Gtk.CssProvider()
cssProvider.load_from_file(cssProviderFile)
screen = Gdk.Screen.get_default()
styleContext = Gtk.StyleContext()
styleContext.add_provider_for_screen(screen, cssProvider,
Gtk.STYLE_PROVIDER_PRIORITY_USER)
self._window = None
def build_app_menu(self):
builder = Gtk.Builder()
builder.add_from_resource(
'/org.gnome.Gamepads/app-menu.ui')
menu = builder.get_object('app-menu')
self.set_app_menu(menu)
aboutAction = Gio.SimpleAction.new('about', None)
aboutAction.connect('activate', self.about)
self.add_action(aboutAction)
quitAction = Gio.SimpleAction.new('quit', None)
quitAction.connect('activate', self.quit)
self.add_action(quitAction)
def about(self, action, param):
builder = Gtk.Builder()
builder.add_from_resource('/org.gnome.Gamepads/AboutDialog.ui')
about = builder.get_object('about_dialog')
about.set_transient_for(self._window)
about.connect("response", self.about_response)
about.show()
def about_response(self, dialog, response):
dialog.destroy()
def do_startup(self):
print("Starting up")
Gtk.Application.do_startup(self)
Notify.init("Gamepads")
self.build_app_menu()
def quit(self, action=None, param=None):
self._window.destroy()
def do_activate(self):
if not self._window:
self._window = Window(self)
self._window.present()
| gpl-3.0 | -2,480,795,425,693,209,600 | 31.309859 | 71 | 0.62816 | false |
foxmask/django-th | django_th/tests/test_publishing_limit.py | 1 | 1300 | # coding: utf-8
from django.test import TestCase
from django.conf import settings
from django_th.publishing_limit import PublishingLimit
class PublishingLimitTestCase(TestCase):
def test_settings(self):
self.assertTrue('publishing_limit' in settings.DJANGO_TH)
def test_get_data(self):
cache_stack = "th_rss"
cache_data = {}
trigger_id = 1
services = PublishingLimit.get_data(cache_stack, cache_data, trigger_id)
self.assertTrue(len(services) == 0)
def test_get_data2(self):
cache_stack = "th_rss"
cache_data = {'th_rss_1': 'foobar'}
trigger_id = 1
services = PublishingLimit.get_data(cache_stack, cache_data, trigger_id)
self.assertTrue(len(services) > 0)
def test_get_data3(self):
cache_stack = "th_rss"
cache_data = []
cache_data.append({'th_rss_1': 'foobar'})
cache_data.append({'th_rss_2': 'foobar'})
cache_data.append({'th_rss_3': 'foobar'})
cache_data.append({'th_rss_4': 'foobar'})
cache_data.append({'th_rss_5': 'foobar'})
cache_data.append({'th_rss_6': 'foobar'})
trigger_id = 1
services = PublishingLimit.get_data(cache_stack, cache_data, trigger_id)
self.assertTrue(len(services) > 0)
| bsd-3-clause | -8,438,425,074,195,135,000 | 29.952381 | 80 | 0.615385 | false |
ales-erjavec/orange-canvas | orangecanvas/gui/tests/test_toolgrid.py | 1 | 2937 | from AnyQt.QtWidgets import QAction, QToolButton
from .. import test
from ..toolgrid import ToolGrid
class TestToolGrid(test.QAppTestCase):
def test_tool_grid(self):
w = ToolGrid()
w.show()
self.app.processEvents()
def buttonsOrderedVisual():
# Process layout events so the buttons have right positions
self.app.processEvents()
buttons = w.findChildren(QToolButton)
return list(sorted(buttons, key=lambda b: (b.y(), b.x())))
def buttonsOrderedLogical():
return list(map(w.buttonForAction, w.actions()))
def assertOrdered():
self.assertSequenceEqual(buttonsOrderedLogical(),
buttonsOrderedVisual())
action_a = QAction("A", w)
action_b = QAction("B", w)
action_c = QAction("C", w)
action_d = QAction("D", w)
w.addAction(action_b)
w.insertAction(0, action_a)
self.assertSequenceEqual(w.actions(),
[action_a, action_b])
assertOrdered()
w.addAction(action_d)
w.insertAction(action_d, action_c)
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_c, action_d])
assertOrdered()
w.removeAction(action_c)
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_d])
assertOrdered()
w.removeAction(action_a)
self.assertSequenceEqual(w.actions(),
[action_b, action_d])
assertOrdered()
w.insertAction(0, action_a)
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_d])
assertOrdered()
w.setColumnCount(2)
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_d])
assertOrdered()
w.insertAction(2, action_c)
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_c, action_d])
assertOrdered()
w.clear()
# test no 'before' action edge case
w.insertAction(0, action_a)
self.assertIs(action_a, w.actions()[0])
w.insertAction(1, action_b)
self.assertSequenceEqual(w.actions(),
[action_a, action_b])
w.clear()
w.setActions([action_a, action_b, action_c, action_d])
self.assertSequenceEqual(w.actions(),
[action_a, action_b, action_c, action_d])
assertOrdered()
triggered_actions = []
def p(action):
print(action.text())
w.actionTriggered.connect(p)
w.actionTriggered.connect(triggered_actions.append)
action_a.trigger()
w.show()
self.app.exec_()
| gpl-3.0 | 6,218,772,141,890,226,000 | 28.969388 | 74 | 0.53524 | false |
cowhi/HFO | experiments/agents/adhocvisit_backup.py | 1 | 1171 | # -*- coding: utf-8 -*-
"""
Created on Fri Sep 9 16:36:47 2016
@author: Felipe Leno
Loads everything from adhoc.py, this class only defines parameters for the visit-based
ad hoc advising
"""
from adhoc import AdHoc
import math
class AdHocVisit(AdHoc):
#Enum for importance metrics
VISIT_IMPORTANCE, Q_IMPORTANCE = range(2)
def __init__(self, budgetAsk=1000, budgetAdvise=1000,stateImportanceMetric=VISIT_IMPORTANCE,seed=12345, port=12345, epsilon=0.1, alpha=0.1, gamma=0.9, decayRate=0.9, serverPath = "/home/leno/HFO/bin/"):
super(AdHocVisit, self).__init__(budgetAsk,budgetAdvise,stateImportanceMetric,port = port, seed=seed,serverPath = serverPath)
def midpoint(self,typeMid):
"""Calculates the midpoint"""
if typeMid == self.ADVISE:
numVisits = 25
impMid = numVisits / (numVisits + math.log(self.scalingVisits + numVisits))
return impMid
elif typeMid == self.ASK:
numVisits = 20
impMid = numVisits / (numVisits + math.log(self.scalingVisits + numVisits))
return impMid
#Error
return None | mit | 6,629,827,150,139,662,000 | 36.806452 | 206 | 0.641332 | false |
walterbender/fractionbounce | aplay.py | 1 | 2286 | # Copyright (C) 2009, Aleksey Lim
# Copyright (C) 2018, James Cameron <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from queue import Queue
import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst
Gst.init(None)
class Aplay:
def __init__(self):
pipeline = Gst.ElementFactory.make('playbin', 'playbin')
pipeline.set_property(
"video-sink",
Gst.ElementFactory.make('fakesink', 'fakesink'))
bus = pipeline.get_bus()
bus.add_signal_watch()
bus.connect('message::eos', self._on_message_eos)
bus.connect('message::error', self._on_message_error)
self._pipeline = pipeline
self._queue = Queue()
def _dequeue(self):
if self._queue.empty():
return
name = self._queue.get()
self._pipeline.props.uri = 'file://' + name
self._pipeline.set_state(Gst.State.PLAYING)
def _on_message_eos(self, bus, message):
if self._pipeline:
self._pipeline.set_state(Gst.State.NULL)
self._dequeue()
def _on_message_error(self, bus, message):
err, debug = message.parse_error()
logging.error('%s %s', err, debug)
self._pipeline.set_state(Gst.State.NULL)
self._dequeue()
def play(self, name):
self._queue.put(name)
if self._pipeline:
if self._pipeline.get_state(Gst.CLOCK_TIME_NONE)[1] == Gst.State.NULL:
self._dequeue()
def close(self):
self._pipeline.set_state(Gst.State.NULL)
self._pipeline = None
aplay = Aplay()
| gpl-3.0 | 2,392,143,500,170,973,700 | 30.75 | 82 | 0.644357 | false |
longjie/ps4eye | script/camera_info_publisher.py | 1 | 3113 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import yaml
import roslib
roslib.load_manifest("ps4eye");
from sensor_msgs.msg import CameraInfo
import rospy
import rospkg
class CameraInfoPublisher:
# Callback of the ROS subscriber.
def leftCallback(self, data):
left_cam_info_org = data
self.left_cam_info.header = left_cam_info_org.header
self.leftPublish()
def rightCallback(self, data):
right_cam_info_org = data
self.right_cam_info.header = right_cam_info_org.header
self.rightPublish()
def __init__(self, camera_name):
self.left_cam_info_org = 0
self.right_cam_info_org = 0
# yamlファイルを読み込んでCameraInfoを返す
left_file_name = rospy.get_param('~left_file_name', rospack.get_path('ps4eye')+'/camera_info/left.yaml')
right_file_name = rospy.get_param('~right_file_name', rospack.get_path('ps4eye')+'/camera_info/right.yaml')
self.left_cam_info = parse_yaml(left_file_name)
self.right_cam_info = parse_yaml(right_file_name)
left_topic = "/" + camera_name + "/left/camera_info"
right_topic = "/" + camera_name + "/right/camera_info"
# Timestampを合わせるためにsubする必要あり
rospy.Subscriber("/null/left/camera_info", CameraInfo, self.leftCallback)
rospy.Subscriber("/null/right/camera_info", CameraInfo, self.rightCallback)
self.left_pub = rospy.Publisher(left_topic,CameraInfo)
self.right_pub = rospy.Publisher(right_topic,CameraInfo)
def leftPublish(self):
'''
now = rospy.Time.now()
self.left_cam_info.header.stamp = now
'''
self.left_pub.publish(self.left_cam_info)
def rightPublish(self):
'''
now = rospy.Time.now()
self.right_cam_info.header.stamp = now
'''
self.right_pub.publish(self.right_cam_info)
def parse_yaml(filename):
stream = file(filename, 'r')
calib_data = yaml.load(stream)
cam_info = CameraInfo()
cam_info.width = calib_data['image_width']
cam_info.height = calib_data['image_height']
cam_info.K = calib_data['camera_matrix']['data']
cam_info.D = calib_data['distortion_coefficients']['data']
cam_info.R = calib_data['rectification_matrix']['data']
cam_info.P = calib_data['projection_matrix']['data']
cam_info.distortion_model = calib_data['distortion_model']
cam_info.binning_x = calib_data['binning_x']
cam_info.binning_y = calib_data['binning_y']
cam_info.roi.x_offset = calib_data['roi']['x_offset']
cam_info.roi.y_offset = calib_data['roi']['y_offset']
cam_info.roi.height = calib_data['roi']['height']
cam_info.roi.width = calib_data['roi']['width']
cam_info.roi.do_rectify = calib_data['roi']['do_rectify']
return cam_info
if __name__ == '__main__':
argv = rospy.myargv(sys.argv)
rospy.init_node("camera_info_publisher")
rospack = rospkg.RosPack()
publisher = CameraInfoPublisher('stereo')
while not rospy.is_shutdown():
rospy.sleep(rospy.Duration(.1))
| apache-2.0 | -1,765,290,943,594,029,800 | 35.416667 | 115 | 0.640405 | false |
wiredrive/wtframework | wtframework/wtf/constants.py | 1 | 1235 | ##########################################################################
# This file is part of WTFramework.
#
# WTFramework is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# WTFramework is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with WTFramework. If not, see <http://www.gnu.org/licenses/>.
##########################################################################
###
# Constants that are used by both WTFramework and by the utility script
# files. We're putting the constants here to avoid adding an unnecessary
# dependency on WTFramework classes inside our utility scripts.
###
# Constants specifying the config structure.
WTF_CONFIG_LOCATION = 'configs/'
WTF_DEFAULT_CONFIG_FILE = 'default'
WTF_CONFIG_EXT = '.yaml'
WTF_ENV_VARS = "WTF_ENV"
| gpl-3.0 | -7,286,994,177,004,521,000 | 41.586207 | 74 | 0.649393 | false |
PhilHudson/XWingDice | DiceRoller.py | 1 | 3888 | import random
from collections import Counter
# >>> dict((x,l.count(x)) for x in set(l))
class OctaDie(object):
"""An eight-sided die (octahedron) instantiated for either attack or defence.
In attack, the faces of the die are, with their associated probability:
Hit (3 of 8, 37.5% probability)
Critical Hit (1 of 8, 12.5%)
Focus (2 of 8, 25%)
Blank (2 of 8, 25%)
In defence, the faces of the die are:
Evade (3 of 8, 37.5%)
Focus (2 of 8, 25%)
Blank (3 of 8, 37.5%)
Attributes:
attacking: boolean determining whether this die is an attack die or a
defence die.
faces: Dictionary of available faces from which to select at random the
showing face.
faceShowing: The face which is showing after a roll of the die.
"""
attackFaces = [
'Hit', 'Hit', 'Hit', 'Critical Hit', 'Focus', 'Focus', 'Blank', 'Blank'
]
defenceFaces = [
'Evade', 'Evade', 'Evade', 'Focus', 'Focus', 'Blank', 'Blank', 'Blank'
]
def __init__(self, attacking, faceShowing='Blank'):
"""Return an OctaDie object.
The OctaDie is for attack if *attacking* is True, or for defence
otherwise, with the *faceShowing* face (a blank, by default) showing."""
self.attacking = attacking
if attacking:
self.faces = OctaDie.attackFaces
else:
self.faces = OctaDie.defenceFaces
self.faceShowing = faceShowing
def roll(self):
"""Return a new freshly rolled OctaDie."""
return OctaDie(self.attacking, random.choice(self.faces))
def __str__(self):
return self.faceShowing
def __repr__(self):
return "OctaDie(%s, '%s')" % (str(self.attacking), self.faceShowing)
class Throw(object):
"""A simultaneous throw of one or more OctaDie objects.
Attributes:
attacking: boolean determining whether this Throw is an attack or
defence.
octaDieCount: The number of dice to roll.
dice: List of OctaDie dice making up the throw, each with a face showing.
"""
def __init__(self, attacking, diceCount=1, preRolledDice=None):
"""Return a Throw object with *diceCount* dice.
The throw is an attack if *attacking* is True or a defence otherwise. If
*preRolledDice* is not None, for instance if re-creating or copying a
Throw object, then its length must be *diceCount*."""
self.attacking = attacking;
self.diceCount = diceCount;
if preRolledDice == None:
self.dice = [ OctaDie(attacking) for ignoredValue in range(diceCount) ]
else:
assert len(preRolledDice) == diceCount
for die in preRolledDice:
assert isinstance(die, OctaDie)
self.dice = preRolledDice
def throw(self):
"""Return a new Throw with all the dice from this Throw freshly rolled."""
return Throw(self.attacking, self.diceCount,
[ die.roll() for die in self.dice ])
def tally(self):
""" Return a Counter object tallying the faces of the dice in this Throw."""
return Counter([ die.faceShowing for die in self.dice ])
def __str__(self):
return str([str(die) for die in self.dice])
def __repr__(self):
return "Throw(%s, %s, [%s])" % \
(str(self.attacking), str(self.diceCount), \
', '.join([repr(die) for die in self.dice]))
if __name__ == '__main__':
attackCount = input("Number of attack dice: ")
defendCount = input("Number of defence dice: ")
print
print "Attacker:"
print Throw(True, attackCount).throw().tally()
print
print "Defender:"
print Throw(False, defendCount).throw().tally()
| gpl-3.0 | 2,130,385,826,533,544,400 | 34.669725 | 84 | 0.591307 | false |
gpotter2/scapy | scapy/compat.py | 1 | 9597 | # This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) Gabriel Potter <[email protected]>
# This program is published under a GPLv2 license
"""
Python 2 and 3 link classes.
"""
from __future__ import absolute_import
import base64
import binascii
import collections
import gzip
import socket
import struct
import sys
import scapy.modules.six as six
# Very important: will issue typing errors otherwise
__all__ = [
# typing
'Any',
'AnyStr',
'Callable',
'DefaultDict',
'Dict',
'Generic',
'Iterable',
'IO',
'Iterable',
'Iterator',
'List',
'Literal',
'NamedTuple',
'NewType',
'NoReturn',
'Optional',
'Pattern',
'Sequence',
'Set',
'Sized',
'Tuple',
'Type',
'TypeVar',
'Union',
'cast',
'overload',
'FAKE_TYPING',
'TYPE_CHECKING',
# compat
'AddressFamily',
'base64_bytes',
'bytes_base64',
'bytes_encode',
'bytes_hex',
'chb',
'gzip_compress',
'gzip_decompress',
'hex_bytes',
'lambda_tuple_converter',
'orb',
'plain_str',
'raw',
]
# Typing compatibility
# Note:
# supporting typing on multiple python versions is a nightmare.
# Since Python 3.7, Generic is a type instead of a metaclass,
# therefore we can't support both at the same time. Our strategy
# is to only use the typing module if the Python version is >= 3.7
# and use totally fake replacements otherwise.
# HOWEVER, when using the fake ones, to emulate stub Generic
# fields (e.g. _PacketField[str]) we need to add a fake
# __getitem__ to Field_metaclass
try:
import typing # noqa: F401
from typing import TYPE_CHECKING
if sys.version_info[0:2] <= (3, 6):
# Generic is messed up before Python 3.7
# https://github.com/python/typing/issues/449
raise ImportError
FAKE_TYPING = False
except ImportError:
FAKE_TYPING = True
TYPE_CHECKING = False
# Import or create fake types
def _FakeType(name, cls=object):
# type: (str, Optional[type]) -> Any
class _FT(object):
def __init__(self, name):
# type: (str) -> None
self.name = name
# make the objects subscriptable indefinetly
def __getitem__(self, item): # type: ignore
return cls
def __call__(self, *args, **kargs):
# type: (*Any, **Any) -> Any
if isinstance(args[0], str):
self.name = args[0]
return self
def __repr__(self):
# type: () -> str
return "<Fake typing.%s>" % self.name
return _FT(name)
if not FAKE_TYPING:
# Only required if using mypy-lang for static typing
from typing import (
Any,
AnyStr,
Callable,
DefaultDict,
Dict,
Generic,
Iterable,
Iterator,
IO,
List,
NewType,
NoReturn,
Optional,
Pattern,
Sequence,
Set,
Sized,
Tuple,
Type,
TypeVar,
Union,
cast,
overload,
)
else:
# Let's be creative and make some fake ones.
def cast(_type, obj): # type: ignore
return obj
Any = _FakeType("Any")
AnyStr = _FakeType("AnyStr") # type: ignore
Callable = _FakeType("Callable")
DefaultDict = _FakeType("DefaultDict", # type: ignore
collections.defaultdict)
Dict = _FakeType("Dict", dict) # type: ignore
Generic = _FakeType("Generic")
Iterable = _FakeType("Iterable") # type: ignore
Iterator = _FakeType("Iterator") # type: ignore
IO = _FakeType("IO") # type: ignore
List = _FakeType("List", list) # type: ignore
NewType = _FakeType("NewType")
NoReturn = _FakeType("NoReturn") # type: ignore
Optional = _FakeType("Optional")
Pattern = _FakeType("Pattern") # type: ignore
Sequence = _FakeType("Sequence") # type: ignore
Set = _FakeType("Set", set) # type: ignore
Sequence = _FakeType("Sequence", list) # type: ignore
Tuple = _FakeType("Tuple")
Type = _FakeType("Type", type)
TypeVar = _FakeType("TypeVar") # type: ignore
Union = _FakeType("Union")
class Sized(object): # type: ignore
pass
overload = lambda x: x
# Broken < Python 3.7
if sys.version_info >= (3, 7):
from typing import NamedTuple
else:
# Hack for Python < 3.7 - Implement NamedTuple pickling
def _unpickleNamedTuple(name, len_params, *args):
return collections.namedtuple(
name,
args[:len_params]
)(*args[len_params:])
def NamedTuple(name, params):
tup_params = tuple(x[0] for x in params)
cls = collections.namedtuple(name, tup_params)
class _NT(cls):
def __reduce__(self):
"""Used by pickling methods"""
return (_unpickleNamedTuple,
(name, len(tup_params)) + tup_params + tuple(self))
_NT.__name__ = cls.__name__
return _NT
# Python 3.8 Only
if sys.version_info >= (3, 8):
from typing import Literal
else:
Literal = _FakeType("Literal")
# Python 3.4
if sys.version_info >= (3, 4):
from socket import AddressFamily
else:
class AddressFamily:
AF_INET = socket.AF_INET
AF_INET6 = socket.AF_INET6
class _Generic_metaclass(type):
if FAKE_TYPING:
def __getitem__(self, typ):
# type: (Any) -> Any
return self
###########
# Python3 #
###########
# https://mypy.readthedocs.io/en/stable/generics.html#declaring-decorators
DecoratorCallable = TypeVar("DecoratorCallable", bound=Callable[..., Any])
def lambda_tuple_converter(func):
# type: (DecoratorCallable) -> DecoratorCallable
"""
Converts a Python 2 function as
lambda (x,y): x + y
In the Python 3 format:
lambda x,y : x + y
"""
if func is not None and func.__code__.co_argcount == 1:
return lambda *args: func( # type: ignore
args[0] if len(args) == 1 else args
)
else:
return func
# This is ugly, but we don't want to move raw() out of compat.py
# and it makes it much clearer
if TYPE_CHECKING:
from scapy.packet import Packet
if six.PY2:
bytes_encode = plain_str = str # type: Callable[[Any], bytes]
orb = ord # type: Callable[[bytes], int]
def chb(x):
# type: (int) -> bytes
if isinstance(x, str):
return x
return chr(x)
def raw(x):
# type: (Union[Packet]) -> bytes
"""
Builds a packet and returns its bytes representation.
This function is and will always be cross-version compatible
"""
if hasattr(x, "__bytes__"):
return x.__bytes__()
return bytes(x)
else:
def raw(x):
# type: (Union[Packet]) -> bytes
"""
Builds a packet and returns its bytes representation.
This function is and will always be cross-version compatible
"""
return bytes(x)
def bytes_encode(x):
# type: (Any) -> bytes
"""Ensure that the given object is bytes.
If the parameter is a packet, raw() should be preferred.
"""
if isinstance(x, str):
return x.encode()
return bytes(x)
if sys.version_info[0:2] <= (3, 4):
def plain_str(x):
# type: (AnyStr) -> str
"""Convert basic byte objects to str"""
if isinstance(x, bytes):
return x.decode(errors="ignore")
return str(x)
else:
# Python 3.5+
def plain_str(x):
# type: (Any) -> str
"""Convert basic byte objects to str"""
if isinstance(x, bytes):
return x.decode(errors="backslashreplace")
return str(x)
def chb(x):
# type: (int) -> bytes
"""Same than chr() but encode as bytes."""
return struct.pack("!B", x)
def orb(x):
# type: (Union[int, str, bytes]) -> int
"""Return ord(x) when not already an int."""
if isinstance(x, int):
return x
return ord(x)
def bytes_hex(x):
# type: (AnyStr) -> bytes
"""Hexify a str or a bytes object"""
return binascii.b2a_hex(bytes_encode(x))
def hex_bytes(x):
# type: (AnyStr) -> bytes
"""De-hexify a str or a byte object"""
return binascii.a2b_hex(bytes_encode(x))
def base64_bytes(x):
# type: (AnyStr) -> bytes
"""Turn base64 into bytes"""
if six.PY2:
return base64.decodestring(x) # type: ignore
return base64.decodebytes(bytes_encode(x))
def bytes_base64(x):
# type: (AnyStr) -> bytes
"""Turn bytes into base64"""
if six.PY2:
return base64.encodestring(x).replace('\n', '') # type: ignore
return base64.encodebytes(bytes_encode(x)).replace(b'\n', b'')
if six.PY2:
import cgi
html_escape = cgi.escape
else:
import html
html_escape = html.escape
if six.PY2:
from StringIO import StringIO
def gzip_decompress(x):
# type: (AnyStr) -> bytes
"""Decompress using gzip"""
with gzip.GzipFile(fileobj=StringIO(x), mode='rb') as fdesc:
return fdesc.read()
def gzip_compress(x):
# type: (AnyStr) -> bytes
"""Compress using gzip"""
buf = StringIO()
with gzip.GzipFile(fileobj=buf, mode='wb') as fdesc:
fdesc.write(x)
return buf.getvalue()
else:
gzip_decompress = gzip.decompress
gzip_compress = gzip.compress
| gpl-2.0 | 7,207,311,381,274,182,000 | 24.660428 | 75 | 0.571012 | false |
endlessm/chromium-browser | third_party/chromite/lib/process_util_unittest.py | 1 | 3668 | # -*- coding: utf-8 -*-
# Copyright 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for the process_util.py module."""
from __future__ import print_function
import os
import signal
from chromite.lib import cros_test_lib
from chromite.lib import process_util
def _SpawnChild(exit_code=None, kill_signal=None):
"""Create a child, have it exit/killed, and return its status."""
assert exit_code is not None or kill_signal is not None
pid = os.fork()
if pid == 0:
# Make sure this child never returns.
while True:
if exit_code is not None:
# pylint: disable=protected-access
os._exit(exit_code)
else:
os.kill(os.getpid(), kill_signal)
return os.waitpid(pid, 0)[1]
class GetExitStatusTests(cros_test_lib.TestCase):
"""Tests for GetExitStatus()"""
def testExitNormal(self):
"""Verify normal exits get decoded."""
status = _SpawnChild(exit_code=0)
ret = process_util.GetExitStatus(status)
self.assertEqual(ret, 0)
def testExitError(self):
"""Verify error exits (>0 && <128) get decoded."""
status = _SpawnChild(exit_code=10)
ret = process_util.GetExitStatus(status)
self.assertEqual(ret, 10)
def testExitWeird(self):
"""Verify weird exits (>=128) get decoded."""
status = _SpawnChild(exit_code=150)
ret = process_util.GetExitStatus(status)
self.assertEqual(ret, 150)
def testSIGUSR1(self):
"""Verify normal kill signals get decoded."""
status = _SpawnChild(kill_signal=signal.SIGUSR1)
ret = process_util.GetExitStatus(status)
self.assertEqual(ret, 128 + signal.SIGUSR1)
def testSIGKILL(self):
"""Verify harsh signals get decoded."""
status = _SpawnChild(kill_signal=signal.SIGKILL)
ret = process_util.GetExitStatus(status)
self.assertEqual(ret, 128 + signal.SIGKILL)
class ExitAsStatusTests(cros_test_lib.TestCase):
"""Tests for ExitAsStatus()"""
def _Tester(self, exit_code=None, kill_signal=None):
"""Helper func for testing ExitAsStatus()
Create a child to mimic the grandchild.
Create a grandchild and have it exit/killed.
Assert behavior based on exit/signal behavior.
"""
pid = os.fork()
if pid == 0:
# Let the grandchild exit/kill itself.
# The child should mimic the grandchild.
status = _SpawnChild(exit_code=exit_code, kill_signal=kill_signal)
try:
process_util.ExitAsStatus(status)
except SystemExit as e:
# pylint: disable=protected-access
os._exit(e.code)
raise AssertionError('ERROR: should have exited!')
# The parent returns the child's status.
status = os.waitpid(pid, 0)[1]
if exit_code is not None:
self.assertFalse(os.WIFSIGNALED(status))
self.assertTrue(os.WIFEXITED(status))
self.assertEqual(os.WEXITSTATUS(status), exit_code)
else:
self.assertFalse(os.WIFEXITED(status))
self.assertTrue(os.WIFSIGNALED(status))
self.assertEqual(os.WTERMSIG(status), kill_signal)
def testExitNormal(self):
"""Verify normal exits get decoded."""
self._Tester(exit_code=0)
def testExitError(self):
"""Verify error exits (>0 && <128) get decoded."""
self._Tester(exit_code=10)
def testExitWeird(self):
"""Verify weird exits (>=128) get decoded."""
self._Tester(exit_code=150)
def testSIGUSR1(self):
"""Verify normal kill signals get decoded."""
self._Tester(kill_signal=signal.SIGUSR1)
def testSIGKILL(self):
"""Verify harsh signals get decoded."""
self._Tester(kill_signal=signal.SIGKILL)
| bsd-3-clause | 2,300,556,246,486,656,500 | 29.823529 | 72 | 0.677754 | false |
choeminjun/SE-points-severce-platfrom | __main__.py | 1 | 9892 | import time
import datetime
import csv
import os
import json
import sys
import logging
import send2trash
logging.basicConfig(filename='program_Log.txt', level=logging.INFO, format=' %(asctime)s - %(levelname)s - %(message)s')
MASTER_PASSWORD = 'minjun5627'
MASTERUSERNAME = 'minjun'
MASTERUSERPASSWORD = '2005'
#os.chdir('/SE_point_severce/')
def setup():
logging.info('function info:"setup"function in activate state.')
print('---SE---')
print('••••loading modules...')
time.sleep(1)
print('••••starting program...')
time.sleep(1)
print('---------------------------------------------------------------------')
logging.info('function info:"setup"function termited.')
def master():
logging.warning('master status: master function starting...')
print('---SE master mode---')
USERIN = input('master_kor>>:')
if USERIN == 'charge points' or USERIN == 'charge':
logging.info('master status: master request charge points.')
usernameIn = input('please write the user name you want to charge points:')
try:
for file in os.listdir('./'):
if file.strip('.csv') == usernameIn:
userFile = open(usernameIn + '.csv', 'r')
userName = usernameIn.strip('.csv')
userFileRead = userFile.readlines()
user_password = str(userFileRead[0].strip('\n'))
user_points = int(userFileRead[1])
userFile.close()
USERpoints = input('please write the points you want to charge:')
user_FileE = open(str(usernameIn) + '.csv', 'w')
user_FileE.write(user_password + '\n')
user_pluse_points = user_points + int(USERpoints)
user_points = str(user_pluse_points)
user_FileE.write(user_points)
user_FileE.close()
except:
print('No user %s. please try again.' % (usernameIn))
if USERIN == 'del user':
logging.info('master status: del user.')
USERIN = input('please write the user name you want to del:')
for file in os.listdir('./'):
if file.strip('.csv') == USERIN:
delfile = file
try:
send2trash.send2trash(os.path.join(os.getcwd(), delfile))
except:
print('No user %s. please try again.' % (USERIN))
def setup_login():
logging.info('function info:"setup_login"function in activate state.')
while True:
USERINPUT = input('>user공용_kor:')
if USERINPUT == 'login':
logging.info('function info:user logining...')
Id = input('ID:')
for file in os.listdir('./'):
if file.strip('.csv') == Id:
userFile = open(str(file), 'r')
userName = file.strip('.csv')
userFileRead = userFile.readlines()
user_password = str(userFileRead[0].strip('\n'))
user_points = int(userFileRead[1])
try:
while True:
logging.info('user state: writing password.')
userINPassword = input('Password:')
if str(userINPassword) == user_password:
logging.warning('security state: user %s logined' % (userName))
print('Access OK.')
print('Changing mode to private mode...')
logging.info('user state:Changing mode to private mode.')
return userFile, userName, userFileRead, user_password, user_points
else:
print('Your password or user ID is worng.')
time.sleep(0.2)
print('Please try again.')
time.sleep(0.2)
logging.info('user state:user login failed.')
break
except:
print('Your password or user ID is worng.')
time.sleep(0.2)
print('Please try again.')
logging.info('user state:user login failed.')
time.sleep(0.2)
elif USERINPUT == 'make user' or USERINPUT == 'makeUser':
logging.warning('user state: makeing user.')
NewuserName = input('New ID:')
NewuserPassword = input('New password:')
while True:
INPUT = input('comfrim password:')
if INPUT == NewuserPassword:
break
else:
print('The password you wrote is not same.')
time.sleep(0.2)
print('please try again.')
time.sleep(0.2)
print('makeing new user...')
NewuserFile = open(str(NewuserName) + '.csv', 'w')
NewuserFile.write(str(NewuserPassword) + '\n')
NewuserFile.write('200')
NewuserFile.close()
print('User makeing complete!')
logging.info('user state:user makeing complete!')
time.sleep(0.2)
print('Please Relogin.')
elif USERINPUT == 'terminat' or USERINPUT == 'OFF':
logging.info('system status:system closeing.')
print('system closeing...')
time.sleep(1)
print('Goodby~!')
logging.info('system closed.')
sys.exit(10)
elif USERINPUT == 'help' or USERINPUT == 'Help':
logging.info('user status: Requested help.')
print('commands:')
print('••login:To login to the program.')
print('••make user:To make a New user.')
print('••terminat:To close system.')
elif USERINPUT == 'login to master mode':
logging.warning('user status: logining to master mode...')
USERINPUT = input('Please write the master password:')
if USERINPUT == MASTER_PASSWORD:
print('Please login to master user.')
name = input('ID:')
password = input('password:')
if name == MASTERUSERNAME and password == MASTERUSERPASSWORD:
logging.warning('user status: logined to master mode.')
print('Changing mode to MASTER mode...')
master()
else:
print('The command you wrote do not exist.')
time.sleep(0.3)
print('Please try again. Or write "Help".')
def load_data():
# ITEMSFORTXT = open('items.txt', 'r').readlines()
# logging.info('val info:ITEMSFORTXT = ' + str(ITEMSFORTXT))
items = "".join(open('itemsForPro.json').readlines())
logging.info('val info:ITEMSFORPRO_RAW = ' + str(items))
jsonItems = json.loads(items)
logging.info('val info:ITEMSFORPRO = ' + str(jsonItems))
return jsonItems
def print_items(items):
print('------------------------------------------------')
for k, v in items.items():
print('| %s=%dP' % (k, v))
print('------------------------------------------------')
def main():
setup()
items = load_data()
user_File, user_Name, user_File_header, user_password, user_points = setup_login()
logging.info('program status:changed to privted mode.')
while True:
USERIN = input('>local_kor:')
if USERIN == 'buy' or USERIN == 'buy item':
logging.info('user status:buying item.')
print_items(items)
while True:
USERIN = input('please write the item you want to buy:')
if USERIN in items:
break
elif USERIN == 'ter':
print('going out...')
logging.warning('user status: user going out buy room...')
break
else:
print('The item you wrote not exist.')
time.sleep(0.3)
print('Please try to write a different item.')
if USERIN in items:
logging.info('user status:buying item:' + str(USERIN))
if user_points < items[str(USERIN)]:
print('You have not enogh points to buy "%s" item' % (str(USERIN)))
logging.info('user status: failed to buy item:%s.(not enogh points)' % (user_points))
else:
logging.info('program status: Processing...')
print('구매중..')
user_File.close()
user_FileE = open(user_Name + '.csv', 'w')
user_FileE.write(user_password + '\n')
user_points = user_points - items[str(USERIN)]
user_FileE.write(str(user_points))
print('구매완료!')
logging.info('program status: Processing complete!')
elif USERIN == 'logout':
logging.warning('user status: logouting...')
print('logouting...')
del user_File, user_Name, user_File_header, user_password, user_points
print('logout complete!')
logging.info('user status: logouted.')
user_File, user_Name, user_File_header, user_password, user_points = setup_login()
# elif USERIN == 'return item' or USERIN == 'Return item' or USERIN == 'Return':
# RETURNITEM = input('What item do you want to return?')
# if RETURNITEM in items:
# userstatus = input('Do you really want to return %s?' % (RETURNITEM))
# if userstatus == 'y' or userstatus == 'yes':
# print('Returning item...')
# time.sleep(0.3)
# print('Returning points...')
if __name__ == '__main__':
main() | apache-2.0 | 1,022,653,048,925,842,600 | 37.315175 | 122 | 0.510766 | false |
svalqui/sysad | examples/ldap-rep-comp-disa.py | 1 | 2248 | # Copyright 2020 by Sergio Valqui. All rights reserved.
#
# Report Computers in an AD branch that are not disabled
#
# Run this in the command line while on the palazo directory so python can find serv
# export PYTHONPATH=`pwd`
from serv.ldaps import ldap_connect, find_computers_disabled, ldap_disconnect
# from pathlib import Path
# import datetime
import getpass
import configparser
import pathlib
file_conf_dir = pathlib.Path(__file__).absolute().parents[2]
print('file_conf_dir', file_conf_dir)
file_conf_name = pathlib.Path(file_conf_dir) / 'ldapq.ini'
print('file_conf_name', file_conf_name)
# Reading configuration
config = configparser.ConfigParser()
user_name = ''
URI = ''
try:
config.read(str(file_conf_name))
user_name = config['Settings']['default_user']
URI = config['Settings']['uri']
BASE = config['Settings']['default_base']
show_fields = config['Filters']['show_attributes'].split(',')
proceed = True
except BaseException as e:
print('--FileError: ', e)
print('--Exception Name :', type(e))
proceed = False
if proceed:
user_password = getpass.getpass()
my_branch = input("Which branch :")
connection = ldap_connect(URI, user_name, user_password)
my_list = find_computers_disabled(my_branch, connection,
["name", "operatingSystem", "operatingSystemVersion",
"lastLogonTimestamp", "distinguishedName", "description",
"userAccountControl"])
# UserAccountControl:1.2.840.113556.1.4.803:=2 ## Computer disabled
print(" ------ search concluded... printing ", len(my_list))
for i in my_list:
if isinstance(i, list):
my_row = []
for j in i:
# print(j.header, j.content)
if len(j.content) == 1:
value = j.content[0]
# print(j.content, " ", value)
else:
value = "Multiple Values"
# print(j.content)
my_row.append(value)
print("\t".join(my_row))
else:
print(i)
print(i.header, i.content)
ldap_disconnect(connection)
| mit | 8,384,420,325,427,028,000 | 30.661972 | 96 | 0.588968 | false |
mich013/iotdb-smartthings | testdata.py | 1 | 13022 | testData = {u'name': u'Michels Office',
u'roomDeviceLabels': [u'Aeon SmartStrip 03',
u'Aeon Ss 01',
u'Aeon Ss 02',
u'SmartSense Motion/Temp Sensor 01'],
u'roomDevices': [{u'children': {u'device': {u'id': u'db91bd2b-b03e-40e5-9889-7fa74107c262',
u'label': u'Aeon SmartStrip 03',
u'name': u'Aeon SmartStrip'},
u'energy': {u'childenergy': 0.009999999776482582,
u'energy1State': u'0E+1',
u'energy2State': u'0.01',
u'energy3State': u'0E+1',
u'energy4State': u'0E+1',
u'energyState': u'0.05',
u'nonmonitoredEnergy': u'0.05'},
u'power': {u'childPower': 0,
u'nonmonitorPower': u'',
u'power1State': u'0',
u'power2State': u'0',
u'power3State': u'0',
u'power4State': u'0',
u'powerState': u'0'},
u'switches': {u'switch1State': u'on',
u'switch2State': u'on',
u'switch3State': u'on',
u'switch4State': u'on',
u'switchState': u'on'}},
u'device': {u'id': u'db91bd2b-b03e-40e5-9889-7fa74107c262',
u'label': u'Aeon SmartStrip 03',
u'name': u'Aeon SmartStrip'},
u'label': u'Aeon SmartStrip 03'},
{u'children': {u'device': {u'id': u'db91bd2b-b03e-40e5-9889-7fa74107c262',
u'label': u'Aeon SmartStrip 03',
u'name': u'Aeon SmartStrip'},
u'energy': {u'childenergy': 0.009999999776482582,
u'energy1State': u'0E+1',
u'energy2State': u'0.01',
u'energy3State': u'0E+1',
u'energy4State': u'0E+1',
u'energyState': u'0.05',
u'nonmonitoredEnergy': u'0.05'},
u'power': {u'childPower': 0,
u'nonmonitorPower': u'',
u'power1State': u'0',
u'power2State': u'0',
u'power3State': u'0',
u'power4State': u'0',
u'powerState': u'0'},
u'switches': {u'switch1State': u'on',
u'switch2State': u'on',
u'switch3State': u'on',
u'switch4State': u'on',
u'switchState': u'on'}},
u'device': {u'id': u'db91bd2b-b03e-40e5-9889-7fa74107c262',
u'label': u'Aeon SmartStrip 03',
u'name': u'Aeon SmartStrip'},
u'label': u'Aeon SmartStrip 03'},
{u'children': {u'device': {u'id': u'abe801ad-b07e-4bcc-9b8c-b26bae2cb099',
u'label': u'Aeon Ss 01',
u'name': u'Aeon SmartStrip'},
u'energy': {u'childenergy': 149.73000144958496,
u'energy1State': u'51.31',
u'energy2State': u'17.49',
u'energy3State': u'27.7',
u'energy4State': u'53.23',
u'energyState': u'236.32',
u'nonmonitoredEnergy': u'236.32'},
u'power': {u'childPower': 258,
u'nonmonitorPower': u'256',
u'power1State': u'119',
u'power2State': u'17',
u'power3State': u'40',
u'power4State': u'82',
u'powerState': u'256'},
u'switches': {u'switch1State': u'on',
u'switch2State': u'on',
u'switch3State': u'on',
u'switch4State': u'on',
u'switchState': u'on'}},
u'device': {u'id': u'abe801ad-b07e-4bcc-9b8c-b26bae2cb099',
u'label': u'Aeon Ss 01',
u'name': u'Aeon SmartStrip'},
u'label': u'Aeon Ss 01'},
{u'children': {u'device': {u'id': u'abe801ad-b07e-4bcc-9b8c-b26bae2cb099',
u'label': u'Aeon Ss 01',
u'name': u'Aeon SmartStrip'},
u'energy': {u'childenergy': 149.73000144958496,
u'energy1State': u'51.31',
u'energy2State': u'17.49',
u'energy3State': u'27.7',
u'energy4State': u'53.23',
u'energyState': u'236.32',
u'nonmonitoredEnergy': u'236.32'},
u'power': {u'childPower': 258,
u'nonmonitorPower': u'256',
u'power1State': u'119',
u'power2State': u'17',
u'power3State': u'40',
u'power4State': u'82',
u'powerState': u'256'},
u'switches': {u'switch1State': u'on',
u'switch2State': u'on',
u'switch3State': u'on',
u'switch4State': u'on',
u'switchState': u'on'}},
u'device': {u'id': u'abe801ad-b07e-4bcc-9b8c-b26bae2cb099',
u'label': u'Aeon Ss 01',
u'name': u'Aeon SmartStrip'},
u'label': u'Aeon Ss 01'},
{u'children': {u'device': {u'id': u'a7243303-371a-499c-b0b6-d35e1813fb27',
u'label': u'Aeon Ss 02',
u'name': u'Aeon SmartStrip'},
u'energy': {u'childenergy': 1577.209995150566,
u'energy1State': u'2.74',
u'energy2State': u'0.79',
u'energy3State': u'0.56',
u'energy4State': u'1573.12',
u'energyState': u'1612.11',
u'nonmonitoredEnergy': u'1612.11'},
u'power': {u'childPower': 4,
u'nonmonitorPower': u'3',
u'power1State': u'0',
u'power2State': u'4',
u'power3State': u'0',
u'power4State': u'0',
u'powerState': u'43'},
u'switches': {u'switch1State': u'on',
u'switch2State': u'on',
u'switch3State': u'on',
u'switch4State': u'on',
u'switchState': u'on'}},
u'device': {u'id': u'a7243303-371a-499c-b0b6-d35e1813fb27',
u'label': u'Aeon Ss 02',
u'name': u'Aeon SmartStrip'},
u'label': u'Aeon Ss 02'},
{u'children': {u'device': {u'id': u'a7243303-371a-499c-b0b6-d35e1813fb27',
u'label': u'Aeon Ss 02',
u'name': u'Aeon SmartStrip'},
u'energy': {u'childenergy': 1577.209995150566,
u'energy1State': u'2.74',
u'energy2State': u'0.79',
u'energy3State': u'0.56',
u'energy4State': u'1573.12',
u'energyState': u'1612.11',
u'nonmonitoredEnergy': u'1612.11'},
u'power': {u'childPower': 4,
u'nonmonitorPower': u'3',
u'power1State': u'0',
u'power2State': u'4',
u'power3State': u'0',
u'power4State': u'0',
u'powerState': u'43'},
u'switches': {u'switch1State': u'on',
u'switch2State': u'on',
u'switch3State': u'on',
u'switch4State': u'on',
u'switchState': u'on'}},
u'device': {u'id': u'a7243303-371a-499c-b0b6-d35e1813fb27',
u'label': u'Aeon Ss 02',
u'name': u'Aeon SmartStrip'},
u'label': u'Aeon Ss 02'},
{u'children': {u'battery': {u'date': u'2015-07-30T15:22:55.535Z',
u'name': u'battery',
u'type': None,
u'unit': None,
u'unixTime': 1438269775535,
u'value': u'77'},
u'device': {u'id': u'a256e80e-f415-437b-bf25-e381ea3ff7a9',
u'label': u'SmartSense Motion/Temp Sensor 01',
u'name': u'SmartSense Motion/Temp Sensor'},
u'motion': {u'date': u'2015-09-17T17:28:24.034Z',
u'name': u'motion',
u'type': None,
u'unit': None,
u'unixTime': 1442510904034,
u'value': u'inactive'},
u'temperature': {u'date': u'2015-09-17T14:39:39.101Z',
u'name': u'temperature',
u'type': None,
u'unit': None,
u'unixTime': 1442500779101,
u'value': u'71'}},
u'device': {u'id': u'a256e80e-f415-437b-bf25-e381ea3ff7a9',
u'label': u'SmartSense Motion/Temp Sensor 01',
u'name': u'SmartSense Motion/Temp Sensor'},
u'label': u'SmartSense Motion/Temp Sensor 01'}]}
| apache-2.0 | -9,113,373,430,056,118,000 | 69.010753 | 92 | 0.298034 | false |
brint/fastfood | fastfood/templating.py | 1 | 2315 | # Copyright 2015 Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Jinja templating for Fastfood."""
import os
import re
import jinja2
# create a jinja env, overriding delimiters
JINJA_ENV = jinja2.Environment(variable_start_string='|{',
variable_end_string='}|',
trim_blocks=True)
NODE_ATTR_RE = '^node((\[\'([\w_-]+)\'\])+)'
CHEF_CONST_RE = '^node\.([\w_-]+)'
def qstring(option):
if (re.match(NODE_ATTR_RE, option) is None and
re.match(CHEF_CONST_RE, option) is None):
return "'%s'" % option
else:
return option
JINJA_ENV.globals['qstring'] = qstring
def render_templates(*files, **template_map):
"""Render jinja templates according to template_map.
Return a list of [(path, result), (...)]
"""
return list(render_templates_generator(*files, **template_map))
def render_templates_generator(*files, **template_map):
"""Render jinja templates according to template_map.
Yields (path, result)
"""
for path in files:
if not os.path.isfile(path):
raise ValueError("Template file %s not found"
% os.path.relpath(path))
else:
try:
template = JINJA_ENV.from_string(open(path).read())
except jinja2.TemplateSyntaxError as err:
msg = ("Error rendering jinja2 template for file %s "
"on line %s. Error: %s"
% (path, err.lineno, err.message))
raise type(err)(
msg, err.lineno, filename=os.path.basename(path))
result = template.render(**template_map)
if not result.endswith('\n'):
result += '\n'
yield path, result
| apache-2.0 | -2,063,054,368,301,933,300 | 31.605634 | 74 | 0.601728 | false |
quodlibet/quodlibet | quodlibet/_init.py | 1 | 12725 | # Copyright 2012 Christoph Reiter
# 2020 Nick Boultbee
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import os
import sys
import warnings
import logging
from senf import environ, argv, fsn2text
from quodlibet.const import MinVersions
from quodlibet import config
from quodlibet.util import is_osx, is_windows, i18n
from quodlibet.util.dprint import print_e, PrintHandler, print_d
from quodlibet.util.urllib import install_urllib2_ca_file
from ._main import get_base_dir, is_release, get_image_dir, get_cache_dir
_cli_initialized = False
_initialized = False
def _init_gtk_debug(no_excepthook):
from quodlibet.errorreport import enable_errorhook
enable_errorhook(not no_excepthook)
def is_init():
"""Returns if init() was called"""
global _initialized
return _initialized
def init(no_translations=False, no_excepthook=False, config_file=None):
"""This needs to be called before any API can be used.
Might raise in case of an error.
Pass no_translations=True to disable translations (used by tests)
"""
global _initialized
if _initialized:
return
init_cli(no_translations=no_translations, config_file=config_file)
_init_gtk()
_init_gtk_debug(no_excepthook=no_excepthook)
_init_gst()
_init_dbus()
_initialized = True
def _init_gettext(no_translations=False):
"""Call before using gettext helpers"""
if no_translations:
language = u"C"
else:
language = config.gettext("settings", "language")
if language:
print_d(f"Using language in QL settings: {language!r}")
else:
language = None
i18n.init(language)
# Use the locale dir in ../build/share/locale if there is one
localedir = os.path.join(
os.path.dirname(get_base_dir()), "build", "share", "locale")
if os.path.isdir(localedir):
print_d(f"Using local locale dir {localedir}")
else:
localedir = None
i18n.register_translation("quodlibet", localedir)
debug_text = environ.get("QUODLIBET_TEST_TRANS")
if debug_text is not None:
i18n.set_debug_text(fsn2text(debug_text))
def _init_python():
MinVersions.PYTHON3.check(sys.version_info)
if is_osx():
# We build our own openssl on OSX and need to make sure that
# our own ca file is used in all cases as the non-system openssl
# doesn't use the system certs
install_urllib2_ca_file()
if is_windows():
# Not really needed on Windows as pygi-aio seems to work fine, but
# wine doesn't have certs which we use for testing.
install_urllib2_ca_file()
if is_windows() and os.sep != "\\":
# In the MSYS2 console MSYSTEM is set, which breaks os.sep/os.path.sep
# If you hit this do a "setup.py clean -all" to get rid of the
# bytecode cache then start things with "MSYSTEM= ..."
raise AssertionError("MSYSTEM is set (%r)" % environ.get("MSYSTEM"))
logging.getLogger().addHandler(PrintHandler())
def _init_formats():
from quodlibet.formats import init
init()
def init_cli(no_translations=False, config_file=None):
"""This needs to be called before any API can be used.
Might raise in case of an error.
Like init() but for code not using Gtk etc.
"""
global _cli_initialized
if _cli_initialized:
return
_init_python()
config.init_defaults()
if config_file is not None:
config.init(config_file)
_init_gettext(no_translations)
_init_formats()
_init_g()
_cli_initialized = True
def _init_dbus():
"""Setup dbus mainloop integration. Call before using dbus"""
# To make GDBus fail early and we don't have to wait for a timeout
if is_osx() or is_windows():
os.environ["DBUS_SYSTEM_BUS_ADDRESS"] = "something-invalid"
os.environ["DBUS_SESSION_BUS_ADDRESS"] = "something-invalid"
try:
from dbus.mainloop.glib import DBusGMainLoop, threads_init
except ImportError:
try:
import dbus.glib
dbus.glib
except ImportError:
return
else:
threads_init()
DBusGMainLoop(set_as_default=True)
def _fix_gst_leaks():
"""gst_element_add_pad and gst_bin_add are wrongly annotated and lead
to PyGObject refing the passed element.
Work around by adding a wrapper that unrefs afterwards.
Can be called multiple times.
https://bugzilla.gnome.org/show_bug.cgi?id=741390
https://bugzilla.gnome.org/show_bug.cgi?id=702960
"""
from gi.repository import Gst
assert Gst.is_initialized()
def do_wrap(func):
def wrap(self, obj):
result = func(self, obj)
obj.unref()
return result
return wrap
parent = Gst.Bin()
elm = Gst.Bin()
parent.add(elm)
if elm.__grefcount__ == 3:
elm.unref()
Gst.Bin.add = do_wrap(Gst.Bin.add)
pad = Gst.Pad.new("foo", Gst.PadDirection.SRC)
parent.add_pad(pad)
if pad.__grefcount__ == 3:
pad.unref()
Gst.Element.add_pad = do_wrap(Gst.Element.add_pad)
def _init_g():
"""Call before using GdkPixbuf/GLib/Gio/GObject"""
import gi
gi.require_version("GLib", "2.0")
gi.require_version("Gio", "2.0")
gi.require_version("GObject", "2.0")
gi.require_version("GdkPixbuf", "2.0")
# Newer glib is noisy regarding deprecated signals/properties
# even with stable releases.
if is_release():
warnings.filterwarnings(
'ignore', '.* It will be removed in a future version.',
Warning)
# blacklist some modules, simply loading can cause segfaults
sys.modules["glib"] = None
sys.modules["gobject"] = None
def _init_gtk():
"""Call before using Gtk/Gdk"""
import gi
if config.getboolean("settings", "pangocairo_force_fontconfig") and \
"PANGOCAIRO_BACKEND" not in environ:
environ["PANGOCAIRO_BACKEND"] = "fontconfig"
# disable for consistency and trigger events seem a bit flaky here
if config.getboolean("settings", "scrollbar_always_visible"):
environ["GTK_OVERLAY_SCROLLING"] = "0"
try:
# not sure if this is available under Windows
gi.require_version("GdkX11", "3.0")
from gi.repository import GdkX11
GdkX11
except (ValueError, ImportError):
pass
gi.require_version("Gtk", "3.0")
gi.require_version("Gdk", "3.0")
gi.require_version("Pango", "1.0")
gi.require_version('Soup', '2.4')
gi.require_version('PangoCairo', "1.0")
from gi.repository import Gtk
from quodlibet.qltk import ThemeOverrider, gtk_version
# PyGObject doesn't fail anymore when init fails, so do it ourself
initialized, argv[:] = Gtk.init_check(argv)
if not initialized:
raise SystemExit("Gtk.init failed")
# include our own icon theme directory
theme = Gtk.IconTheme.get_default()
theme_search_path = get_image_dir()
assert os.path.exists(theme_search_path)
theme.append_search_path(theme_search_path)
# Force menu/button image related settings. We might show too many atm
# but this makes sure we don't miss cases where we forgot to force them
# per widget.
# https://bugzilla.gnome.org/show_bug.cgi?id=708676
warnings.filterwarnings('ignore', '.*g_value_get_int.*', Warning)
# some day... but not now
warnings.filterwarnings(
'ignore', '.*Stock items are deprecated.*', Warning)
warnings.filterwarnings(
'ignore', '.*:use-stock.*', Warning)
warnings.filterwarnings(
'ignore', r'.*The property GtkAlignment:[^\s]+ is deprecated.*',
Warning)
settings = Gtk.Settings.get_default()
with warnings.catch_warnings():
warnings.simplefilter("ignore")
settings.set_property("gtk-button-images", True)
settings.set_property("gtk-menu-images", True)
if hasattr(settings.props, "gtk_primary_button_warps_slider"):
# https://bugzilla.gnome.org/show_bug.cgi?id=737843
settings.set_property("gtk-primary-button-warps-slider", True)
# Make sure PyGObject includes support for foreign cairo structs
try:
gi.require_foreign("cairo")
except ImportError:
print_e("PyGObject is missing cairo support")
sys.exit(1)
css_override = ThemeOverrider()
if sys.platform == "darwin":
# fix duplicated shadows for popups with Gtk+3.14
style_provider = Gtk.CssProvider()
style_provider.load_from_data(b"""
GtkWindow {
box-shadow: none;
}
.tooltip {
border-radius: 0;
padding: 0;
}
.tooltip.background {
background-clip: border-box;
}
""")
css_override.register_provider("", style_provider)
if gtk_version[:2] >= (3, 20):
# https://bugzilla.gnome.org/show_bug.cgi?id=761435
style_provider = Gtk.CssProvider()
style_provider.load_from_data(b"""
spinbutton, button {
min-height: 22px;
}
.view button {
min-height: 24px;
}
entry {
min-height: 28px;
}
entry.cell {
min-height: 0;
}
""")
css_override.register_provider("Adwaita", style_provider)
css_override.register_provider("HighContrast", style_provider)
# https://github.com/quodlibet/quodlibet/issues/2541
style_provider = Gtk.CssProvider()
style_provider.load_from_data(b"""
treeview.view.separator {
min-height: 2px;
color: @borders;
}
""")
css_override.register_provider("Ambiance", style_provider)
css_override.register_provider("Radiance", style_provider)
# https://github.com/quodlibet/quodlibet/issues/2677
css_override.register_provider("Clearlooks-Phenix", style_provider)
# https://github.com/quodlibet/quodlibet/issues/2997
css_override.register_provider("Breeze", style_provider)
if gtk_version[:2] >= (3, 18):
# Hack to get some grab handle like thing for panes
style_provider = Gtk.CssProvider()
style_provider.load_from_data(b"""
GtkPaned.vertical, paned.vertical >separator {
-gtk-icon-source: -gtk-icontheme("view-more-symbolic");
-gtk-icon-transform: rotate(90deg) scaleX(0.1) scaleY(3);
}
GtkPaned.horizontal, paned.horizontal >separator {
-gtk-icon-source: -gtk-icontheme("view-more-symbolic");
-gtk-icon-transform: rotate(0deg) scaleX(0.1) scaleY(3);
}
""")
css_override.register_provider("", style_provider)
# https://bugzilla.gnome.org/show_bug.cgi?id=708676
warnings.filterwarnings('ignore', '.*g_value_get_int.*', Warning)
# blacklist some modules, simply loading can cause segfaults
sys.modules["gtk"] = None
sys.modules["gpod"] = None
sys.modules["gnome"] = None
from quodlibet.qltk import pygobject_version, gtk_version
MinVersions.GTK.check(gtk_version)
MinVersions.PYGOBJECT.check(pygobject_version)
def _init_gst():
"""Call once before importing GStreamer"""
arch_key = "64" if sys.maxsize > 2**32 else "32"
registry_name = "gst-registry-%s-%s.bin" % (sys.platform, arch_key)
environ["GST_REGISTRY"] = os.path.join(get_cache_dir(), registry_name)
assert "gi.repository.Gst" not in sys.modules
import gi
# We don't want python-gst, it changes API..
assert "gi.overrides.Gst" not in sys.modules
sys.modules["gi.overrides.Gst"] = None
# blacklist some modules, simply loading can cause segfaults
sys.modules["gst"] = None
# We don't depend on Gst overrides, so make sure it's initialized.
try:
gi.require_version("Gst", "1.0")
from gi.repository import Gst
except (ValueError, ImportError):
return
if Gst.is_initialized():
return
from gi.repository import GLib
try:
ok, argv[:] = Gst.init_check(argv)
except GLib.GError:
print_e("Failed to initialize GStreamer")
# Uninited Gst segfaults: make sure no one can use it
sys.modules["gi.repository.Gst"] = None
else:
# monkey patching ahead
_fix_gst_leaks()
| gpl-2.0 | -4,163,567,065,139,508,700 | 29.369928 | 78 | 0.628919 | false |
EUDAT-B2SHARE/b2share | b2share/modules/records/ext.py | 1 | 2716 | # -*- coding: utf-8 -*-
#
# This file is part of EUDAT B2Share.
# Copyright (C) 2016 University of Tuebingen, CERN.
# Copyright (C) 2015 University of Tuebingen.
#
# B2Share is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# B2Share is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with B2Share; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""B2share records extension"""
from __future__ import absolute_import, print_function
from invenio_records_rest.utils import PIDConverter
from invenio_indexer.signals import before_record_index
from invenio_records_rest import utils
from .triggers import register_triggers
from .errors import register_error_handlers
from .views import create_blueprint
from .indexer import indexer_receiver
from .cli import records as records_cmd
class B2ShareRecords(object):
"""B2Share Records extension."""
def __init__(self, app=None):
"""Extension initialization."""
if app:
self.init_app(app)
def init_app(self, app):
"""Flask application initialization."""
self.init_config(app)
app.cli.add_command(records_cmd)
app.extensions['b2share-records'] = self
register_triggers(app)
register_error_handlers(app)
# Register records API blueprints
endpoints = app.config['B2SHARE_RECORDS_REST_ENDPOINTS']
app.register_blueprint(create_blueprint(endpoints))
@app.before_first_request
def extend_default_endpoint_prefixes():
"""Fix the endpoint prefixes as ."""
endpoint_prefixes = utils.build_default_endpoint_prefixes(endpoints)
current_records_rest = app.extensions['invenio-records-rest']
current_records_rest.default_endpoint_prefixes.update(
endpoint_prefixes
)
before_record_index.connect(indexer_receiver, sender=app)
app.url_map.converters['pid'] = PIDConverter
def init_config(self, app):
"""Initialize configuration."""
pass
| gpl-2.0 | 5,970,646,417,336,346,000 | 35.213333 | 80 | 0.705817 | false |
romain-dartigues/cf-python-client | setup.py | 1 | 2416 | import os
import shutil
import subprocess
import sys
from setuptools import setup, find_packages, Command
src_dir = 'src/python'
package_directory = 'cloudfoundry_client'
package_name = 'cloudfoundry-client'
loggregator_dir = 'loggregator'
sys.path.insert(0, os.path.realpath(src_dir))
version_file = '%s/%s/__init__.py' % (src_dir, package_directory)
with open(version_file, 'r') as f:
for line in f.readlines():
if line.find('__version__') >= 0:
exec line
break
else:
raise AssertionError('Failed to load version from %s' % version_file)
def purge_sub_dir(path):
shutil.rmtree(os.path.join(os.path.dirname(__file__), path))
class GenerateCommand(Command):
description = "generate protobuf class generation"
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
source_path = os.path.join(os.path.dirname(__file__), src_dir, package_directory, loggregator_dir)
for file_protobuf in os.listdir(source_path):
if file_protobuf.endswith('.proto'):
print('Generating %s' % file_protobuf)
subprocess.call(['protoc', '-I', source_path, '--python_out=%s' % source_path,
os.path.join(source_path, file_protobuf)])
setup(name=package_name,
version=__version__,
zip_safe=True,
packages=find_packages(where=src_dir),
author='Benjamin Einaudi',
author_email='[email protected]',
description='A client library for CloudFoundry',
long_description=open('README.rst').read(),
url='http://github.com/antechrestos/cf-python-client',
classifiers=[
"Programming Language :: Python",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Topic :: Communications",
],
entry_points={
'console_scripts': [
'cloudfoundry-client = %s.main:main' % package_directory,
]
},
cmdclass=dict(generate=GenerateCommand),
package_dir={package_directory: '%s/%s' % (src_dir, package_directory)},
install_requires=[requirement.rstrip(' \r\n') for requirement in open('requirements.txt')],
tests_require=[
'mock==2.0.0',
],
test_suite='test',
)
| apache-2.0 | 6,585,751,665,811,986,000 | 31.213333 | 106 | 0.611755 | false |
nuagenetworks/vspk-python | vspk/v6/nuethernetsegmentgroup.py | 1 | 7771 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUVLANsFetcher
from .fetchers import NUAlarmsFetcher
from .fetchers import NUEnterprisePermissionsFetcher
from bambou import NURESTObject
class NUEthernetSegmentGroup(NURESTObject):
""" Represents a EthernetSegmentGroup in the VSD
Notes:
Group of Ethernet Segments with same ID.
"""
__rest_name__ = "ethernetsegmentgroup"
__resource_name__ = "ethernetsegmentgroups"
## Constants
CONST_PORT_TYPE_ACCESS = "ACCESS"
def __init__(self, **kwargs):
""" Initializes a EthernetSegmentGroup instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> ethernetsegmentgroup = NUEthernetSegmentGroup(id=u'xxxx-xxx-xxx-xxx', name=u'EthernetSegmentGroup')
>>> ethernetsegmentgroup = NUEthernetSegmentGroup(data=my_dict)
"""
super(NUEthernetSegmentGroup, self).__init__()
# Read/Write Attributes
self._vlan_range = None
self._name = None
self._description = None
self._virtual = None
self._port_type = None
self._ethernet_segment_id = None
self.expose_attribute(local_name="vlan_range", remote_name="VLANRange", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="virtual", remote_name="virtual", attribute_type=bool, is_required=False, is_unique=False)
self.expose_attribute(local_name="port_type", remote_name="portType", attribute_type=str, is_required=True, is_unique=False, choices=[u'ACCESS'])
self.expose_attribute(local_name="ethernet_segment_id", remote_name="ethernetSegmentID", attribute_type=str, is_required=False, is_unique=False)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.vlans = NUVLANsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.alarms = NUAlarmsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.enterprise_permissions = NUEnterprisePermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def vlan_range(self):
""" Get vlan_range value.
Notes:
VLAN Range of the EthernetSegment. Format must conform to a-b,c,d-f where a,b,c,d,f are integers from range 0 to 4094.
This attribute is named `VLANRange` in VSD API.
"""
return self._vlan_range
@vlan_range.setter
def vlan_range(self, value):
""" Set vlan_range value.
Notes:
VLAN Range of the EthernetSegment. Format must conform to a-b,c,d-f where a,b,c,d,f are integers from range 0 to 4094.
This attribute is named `VLANRange` in VSD API.
"""
self._vlan_range = value
@property
def name(self):
""" Get name value.
Notes:
Name of the Ethernet Segment Group
"""
return self._name
@name.setter
def name(self, value):
""" Set name value.
Notes:
Name of the Ethernet Segment Group
"""
self._name = value
@property
def description(self):
""" Get description value.
Notes:
Description of the Ethernet Segment Group
"""
return self._description
@description.setter
def description(self, value):
""" Set description value.
Notes:
Description of the Ethernet Segment Group
"""
self._description = value
@property
def virtual(self):
""" Get virtual value.
Notes:
Indicates if Ethernet Segment is Virtual.
"""
return self._virtual
@virtual.setter
def virtual(self, value):
""" Set virtual value.
Notes:
Indicates if Ethernet Segment is Virtual.
"""
self._virtual = value
@property
def port_type(self):
""" Get port_type value.
Notes:
Type of the Port.
This attribute is named `portType` in VSD API.
"""
return self._port_type
@port_type.setter
def port_type(self, value):
""" Set port_type value.
Notes:
Type of the Port.
This attribute is named `portType` in VSD API.
"""
self._port_type = value
@property
def ethernet_segment_id(self):
""" Get ethernet_segment_id value.
Notes:
Unique Identifier of the Ethernet Segment.
This attribute is named `ethernetSegmentID` in VSD API.
"""
return self._ethernet_segment_id
@ethernet_segment_id.setter
def ethernet_segment_id(self, value):
""" Set ethernet_segment_id value.
Notes:
Unique Identifier of the Ethernet Segment.
This attribute is named `ethernetSegmentID` in VSD API.
"""
self._ethernet_segment_id = value
| bsd-3-clause | -7,263,087,607,523,101,000 | 28.439394 | 153 | 0.601982 | false |
ucla/PushHubCore | pushhub/models/listener.py | 1 | 2600 | """
Copyright (c) 2013, Regents of the University of California
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the University of California nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
"""
Listeners are subscribers that wish to be notified any time
a new topic is added to the hub.
"""
from persistent import Persistent
from repoze.folder import Folder
import requests
from zope.interface import Interface, implements
from .topic import Topics
from ..utils import is_valid_url
import logging
logger = logging.getLogger(__name__)
class Listeners(Folder):
"""Folder to hold listeners"""
title = "Listeners"
class IListener(Interface):
"""Marker interface for listeners"""
pass
class Listener(Persistent):
implements(IListener)
def __init__(self, callback_url):
if not is_valid_url(callback_url):
raise ValueError(
'Malformed URL: %s'
)
self.callback_url = callback_url
self.topics = Topics()
def notify(self, topic):
headers, data = topic.get_request_data()
logger.debug('Notify listener: %s' % self.callback_url)
response = requests.get(self.callback_url, data=data, headers=headers)
return response
| bsd-3-clause | 8,600,699,612,325,148,000 | 34.135135 | 79 | 0.746154 | false |
bluedynamics/activities.runtime | src/activities/runtime/runtime.py | 1 | 10151 | # -*- coding: utf-8 -*-
#
# Copyright 2009: Johannes Raggam, BlueDynamics Alliance
# http://bluedynamics.com
# GNU Lesser General Public License Version 2 or later
__author__ = """Johannes Raggam <[email protected]>"""
__docformat__ = 'plaintext'
from zope.interface import implements
from zope.component import getMultiAdapter
from zope.component import getUtility
import activities.runtime.tokens # registers multiadapter
from activities.metamodel.interfaces import ActivitiesException
from activities.metamodel.interfaces import IAction
from activities.metamodel.interfaces import IActivity
from activities.metamodel.interfaces import IActivityEdge
from activities.metamodel.interfaces import IActivityFinalNode
from activities.metamodel.interfaces import IDecisionNode
from activities.metamodel.interfaces import IFinalNode
from activities.metamodel.interfaces import IForkNode
from activities.metamodel.interfaces import IInitialNode
from activities.metamodel.interfaces import IMergeNode
from activities.metamodel.interfaces import IStereotype
from activities.metamodel.interfaces import ITaggedValue
from activities.runtime.interfaces import IActionInfo
from activities.runtime.interfaces import IActivityRuntime
from activities.runtime.interfaces import IExecution
from activities.runtime.interfaces import ITaggedValueDict
from activities.runtime.interfaces import IToken
from activities.runtime.interfaces import ITokenFilter
from activities.runtime.tokens import Token
from activities.runtime.tokens import TokenPool
import logging
log = logging.getLogger('activities.runtime')
class ActivityRuntimeError(ActivitiesException):
pass
class ActivityRuntime(object):
implements(IActivityRuntime)
def __init__(self, activity):
try:
assert(IActivity.providedBy(activity))
except AssertionError:
raise ActivityRuntimeError,\
" ActivityRuntime must be initialized with an Activity instance"
self.activity = activity
self.token_pool = TokenPool()
def start(self, data=None):
try:
assert(len(self.token_pool) == 0)
except AssertionError:
raise ActivityRuntimeError,\
"A active activity cannot be re-started."
self._eval_constraints(self.activity.preconditions, data)
for profile in self.activity.package.profiles:
# Importing executions associated with model
# Imported modules are not available because they are not bound
# to a variable. but we just want to execute modules and register
# utilities here.
__import__(profile.__name__)
# TODO: check guard conditions for outgoing_nodes here?
for node in self.activity.filtereditems(IInitialNode):
for edge in node.outgoing_edges:
self._create_token(edge, data)
self._unlock_token()
def stop(self):
log.info('stopping activity')
del self.token_pool[:]
def next(self):
data_output = {}
do_stop = False
for node in self.activity.nodes:
# TODO: if node is still executing (and may not be a reentrant or
# so), don't let it execute again. only needed for asyn behavior
# InitialNode only considered at runtime-start
if IInitialNode.providedBy(node):
continue
### Is node executable?
is_merge_node = IMergeNode.providedBy(node)
can_execute = not is_merge_node # TODO: i don't like this construct
for edge in node.incoming_edges:
tokens = getMultiAdapter((self.token_pool, edge), ITokenFilter)
tok = [tk for tk in tokens if not tk.lock]
if is_merge_node:
# merge behavior: any token on any edge fires node
can_execute = can_execute or tok
else:
# implicit and explicit synchronisation (join, et. al):
# only when all tokens are present, node is fired.
can_execute = can_execute and tok
if not can_execute:
continue
### Getting and destroying tokens and merging data
data = {}
for edge in node.incoming_edges:
tokens = getMultiAdapter((self.token_pool, edge), ITokenFilter)
for token in tokens:
if token.lock:
# don't manipulate locked tokens (in case of IMergeNode)
continue
# merge the token's data
data = self._merge_data(data, token.data)
# when nodes execute, tokens are deleted
self.token_pool.remove(token)
### Executing actions
do_set_tokens = True
if IAction.providedBy(node):
self._eval_constraints(node.preconditions, data)
data = self._execute(node, data)
self._eval_constraints(node.postconditions, data)
# contract: if data is none, there is async behavior and action
# is still executing.
# if data is not none, processing on node can continue
# TODO: check this contract, formalise and document it.
do_set_tokens = data is not None
if not do_set_tokens:
continue
# TODO: UML2's MergeNode behavior does not reduce concurrency
# here the concurrency is reduced if 2 tokens come into the node
# at a time. THIS SHOULD BE CHANGED...
### Setting tokens
else_branch = None
for edge in node.outgoing_edges:
if edge.guard \
and not edge.guard == "else" \
and not eval(edge.guard, None, data):
continue
elif edge.guard == "else" and else_branch is None:
else_branch = edge
else:
else_branch = False
# create tokens for outgoing edges
self._create_token(edge, data)
if IDecisionNode.providedBy(node):
# XOR semantic for DecisionNode: only one outgoing edge
# can traverse.
break
if IActivityEdge.providedBy(else_branch):
self._create_token(else_branch, data)
### Prepare for FinalNode if so
# Collecting data from tokens running into IFinalNode
# TODO: only when reaching IFinalNode data is returned?
if IFinalNode.providedBy(node):
data_output = self._merge_data(data_output, data)
# The activity must be stopped when ActivityFinalNode is reached
# But let other tokens be processed before. The order in which a
# Token reaches ActivityFinalNode is not deterministic anyways.
if IActivityFinalNode.providedBy(node):
do_stop = True
# after all nodes processed, unlock the tokens created in this run
self._unlock_token()
if do_stop:
self.stop()
# TODO: does this really mean that activity is reached it's end or is
# it just an implicit stop? len(self.token_pool) == 0
# maybe do_stop should apply here
# TODO: should token be erased before postconstraints are evaluated?
# maybe better before, so that tokens are preserved and activity is
# hindered to stop
if len(self.token_pool) == 0:
self._eval_constraints(self.activity.postconditions, data_output)
if data_output:
return data_output
def print_token_state(self):
for token in self.token_pool:
print(self.activity.node(token.edge_uuid).__name__
+ ': '
+ str(token)
+ ', data: ' + str(token.data)
)
# convinience
ts = print_token_state
def _eval_constraints(self, constraints, data=None):
for constr in constraints:
try:
assert(eval(constr.specification, None, data))
except (NameError, AssertionError):
raise ActivityRuntimeError,\
constr.__class__.__name__ + ' not fulfilled: "' +\
constr.specification + '"'
def _merge_data(self, data_dict, new_data_dict):
for key in data_dict.keys():
if key in new_data_dict.keys():
if data_dict[key] is not new_data_dict[key]:
raise ActivityRuntimeError,\
"""Failed to merge token data:
Same key, different values"""
data_dict.update(new_data_dict)
return data_dict
def _unlock_token(self):
# TOKEN LOCK:
# locked token's should not be considered for node execution in current
# step. wait for next step.
# token lock exist because we want tokens only be passed by one node
# per step.
# TOKEN MUST BE UNLOCKED HERE
for token in self.token_pool:
token.lock = False
def _create_token(self, edge, data=None):
"""Helper method to ease token creation.
"""
# token should not be modified more than once per iteration
# therefore lock=True
self.token_pool.append(
Token(edge_uuid=edge.uuid, data=data, lock=True)
)
def _execute(self, action, data):
action_info = IActionInfo(action)
for stereotype in action.stereotypes:
tgv_dict = ITaggedValueDict(stereotype)
execution = getUtility(IExecution, name=stereotype.__name__)
data = execution(action_info, tgv_dict, data)
log.info('executing: "' + action.__name__ + '"')
return data
def _receive(self, action_uuid, data=None):
# create tokens with data for action with uuid
pass
| lgpl-3.0 | -6,893,728,240,807,559,000 | 38.964567 | 82 | 0.604768 | false |
eirmag/weboob | weboob/applications/comparoob/comparoob.py | 1 | 5114 | # -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import sys
from weboob.capabilities.pricecomparison import ICapPriceComparison
from weboob.tools.misc import html2text
from weboob.tools.application.repl import ReplApplication
from weboob.tools.application.formatters.iformatter import IFormatter, PrettyFormatter
__all__ = ['Comparoob']
class PriceFormatter(IFormatter):
MANDATORY_FIELDS = ('id', 'cost', 'currency', 'shop', 'product')
def format_obj(self, obj, alias):
if hasattr(obj, 'message') and obj.message:
message = obj.message
else:
message = u'%s (%s)' % (obj.shop.name, obj.shop.location)
result = u'%s%s%s\n' % (self.BOLD, message, self.NC)
result += u'ID: %s\n' % obj.fullid
result += u'Product: %s\n' % obj.product.name
result += u'Cost: %s%s\n' % (obj.cost, obj.currency)
if hasattr(obj, 'date') and obj.date:
result += u'Date: %s\n' % obj.date.strftime('%Y-%m-%d')
result += u'\n%sShop:%s\n' % (self.BOLD, self.NC)
result += u'\tName: %s\n' % obj.shop.name
if obj.shop.location:
result += u'\tLocation: %s\n' % obj.shop.location
if obj.shop.info:
result += u'\n\t' + html2text(obj.shop.info).replace('\n', '\n\t').strip()
return result
class PricesFormatter(PrettyFormatter):
MANDATORY_FIELDS = ('id', 'cost', 'currency')
def get_title(self, obj):
if hasattr(obj, 'message') and obj.message:
message = obj.message
elif hasattr(obj, 'shop') and obj.shop:
message = '%s (%s)' % (obj.shop.name, obj.shop.location)
else:
return u'%s%s' % (obj.cost, obj.currency)
return u'%s%s - %s' % (obj.cost, obj.currency, message)
def get_description(self, obj):
if obj.date:
return obj.date.strftime('%Y-%m-%d')
class Comparoob(ReplApplication):
APPNAME = 'comparoob'
VERSION = '0.e'
COPYRIGHT = 'Copyright(C) 2012 Romain Bignon'
DESCRIPTION = 'Console application to compare products.'
DEFAULT_FORMATTER = 'table'
EXTRA_FORMATTERS = {'prices': PricesFormatter,
'price': PriceFormatter,
}
COMMANDS_FORMATTERS = {'prices': 'prices',
'info': 'price',
}
CAPS = ICapPriceComparison
def do_prices(self, pattern):
products = []
for backend, product in self.do('search_products', pattern):
double = False
for prod in products:
if product.name == prod.name:
double = True
break
if not double:
products.append(product)
product = None
if len(products) == 0:
print >>sys.stderr, 'Error: no product found with this pattern'
return 1
elif len(products) == 1:
product = products[0]
else:
print 'What product do you want to compare?'
for i, p in enumerate(products):
print ' %s%2d)%s %s' % (self.BOLD, i+1, self.NC, p.name)
r = int(self.ask(' Select a product', regexp='\d+'))
while product is None:
if r <= 0 or r > len(products):
print 'Error: Please enter a valid ID'
continue
product = products[r-1]
self.change_path([u'prices'])
self.start_format()
products = []
for backend, price in self.do('iter_prices', product):
products.append(price)
for price in sorted(products, key=self._get_price):
self.cached_format(price)
self.flush()
def _get_price(self, price):
return price.cost
def complete_info(self, text, line, *ignored):
args = line.split(' ')
if len(args) == 2:
return self._complete_object()
def do_info(self, _id):
if not _id:
print >>sys.stderr, 'This command takes an argument: %s' % self.get_command_help('info', short=True)
return 2
price = self.get_object(_id, 'get_price')
if not price:
print >>sys.stderr, 'Price not found: %s' % _id
return 3
self.start_format()
self.format(price)
self.flush()
| agpl-3.0 | 3,408,489,574,096,930,000 | 33.322148 | 112 | 0.574697 | false |
johnmee/plugin.video.catchuptv.au.ten | resources/lib/networktenaddon/showlist.py | 1 | 5314 | #
# Network Ten CatchUp TV Video Addon
#
# Copyright (c) 2013 Adam Malcontenti-Wilson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import xbmcswift2
from xbmcswift2 import ListItem, SortMethod
import config
import time
import urllib
import urlparse
from networktenvideo.api import NetworkTenVideo
class Module(xbmcswift2.Module):
def __init__(self):
super(Module, self).__init__('plugin.video.catchuptv.au.ten.showlist')
# decorators
self.showlist = self.route('/shows/<type>')(self.showlist)
def showlist(self, type):
api = NetworkTenVideo(self.plugin.cached(TTL=config.CACHE_TTL))
shows = []
if 'news' == type:
for news in api.get_news():
fanart_url = api.get_fanart(news)
item = ListItem.from_dict(
label=news['Title'],
path=self.url_for('videolist.videolist', explicit=True, query=news['BCQueryForVideoListing'], page='0', fanart=fanart_url),
)
if fanart_url:
item.set_property('fanart_image', fanart_url)
if 'Thumbnail' in news:
url = news['Thumbnail']
if url.startswith('//'):
url = 'http:' + url
item.set_thumbnail(url)
shows.append(item)
elif 'sport' == type:
for sport in api.get_sports():
item = ListItem.from_dict(
label=sport['Title'],
path=self.url_for('videolist.videolist', explicit=True, query=sport['BCQueryForVideoListing'], page='0'),
)
shows.append(item)
elif 'live' == type:
for category in api.get_live_categories():
fanart_url = None
if 'fanart' in category:
fanart_url = category['fanart']
item = ListItem.from_dict(
label=category['title'],
path=self.url_for('videolist.videolist', explicit=True, query=category['query'], page='0', fanart=fanart_url),
)
if fanart_url:
item.set_property('fanart_image', fanart_url)
if 'thumbnail' in category:
item.set_thumbnail(category['thumbnail'])
shows.append(item)
else: #tvshows
for show in api.get_shows():
info_dict = {}
if show['IsLongFormAvailable'] is not True: #todo: make this a setting
continue
if 'Genres' in show and len(show['Genres']):
info_dict['genre'] = show['Genres'][0]['Name']
if 'Description' in show:
info_dict['plot'] = show['Description']
if 'CurrentSeasonFirstEpisodeAirDateTime' in show:
try:
date = time.strptime(show['CurrentSeasonFirstEpisodeAirDateTime'],'%d-%m-%Y %H:%M:%S %p')
info_dict['aired'] = time.strftime('%Y-%m-%d', date)
info_dict['premiered'] = time.strftime('%Y-%m-%d', date)
info_dict['year'] = time.strftime('%Y', date)
except Exception, e:
pass
if 'Channel' in show:
info_dict['studio'] = show['Channel']
if 'NumberOfVideosFromBCQuery' in show:
# not technically correct as this also returns the number of short form as well but close enough
info_dict['episode'] = show['NumberOfVideosFromBCQuery']
if 'BCQueryForVideoListing' in show and len(show['BCQueryForVideoListing']):
query = urlparse.parse_qs(show['BCQueryForVideoListing'], True)
if 'all' not in query:
query['all'] = []
elif not isinstance(query['all'], list):
query['all'] = [ query['all'] ]
query['all'].append('video_type_long_form:Full Episode')
else:
continue
fanart_url = api.get_fanart(show)
item = ListItem.from_dict(
label=show['Title'],
path=self.url_for('videolist.videolist', explicit=True, query=urllib.urlencode(query, True), fanart=fanart_url), #ShowPageItemId=show['ShowPageItemId']
info=info_dict
)
if fanart_url:
item.set_property('fanart_image', fanart_url)
if 'Thumbnail' in show:
url = show['Thumbnail']
if url.startswith('//'):
url = 'http:' + url
item.set_thumbnail(url)
shows.append(item)
self.set_content('tvshows')
self.plugin.finish(items=shows, sort_methods=[SortMethod.LABEL_IGNORE_THE])
| mit | -7,655,345,065,162,287,000 | 37.230216 | 161 | 0.629469 | false |
plewis/phycas | src/python/probdist/_SquareMatrix.py | 1 | 6596 | from _ProbDistExt import *
class SquareMatrix(SquareMatrixBase):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Encapsulates a square matrix of floating point values (underlying C++
implementation stores these as doubles).
"""
def __init__(self, dimension, value):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Create a square matrix of size dimension containing value in every
cell.
"""
SquareMatrixBase.__init__(self, dimension, value)
def duplicate(self):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns a copy of this matrix.
"""
return SquareMatrixBase.duplicate(self)
def identity(self):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Converts existing matrix to an identity matrix (1s on diagonal, 0s
everywhere else). Dimension of the matrix is not changed.
"""
SquareMatrixBase.identity(self)
def trace(self):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns the sum of the elements on the main diagonal.
"""
return SquareMatrixBase.trace(self)
def inverse(self):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns a SquareMatrix that is the inverse of this matrix.
"""
return SquareMatrixBase.inverse(self)
def pow(self, p):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns a SquareMatrix that is raised to the (postive) power p.
"""
return SquareMatrixBase.pow(self, p)
def getDimension(self):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns an integer representing the number of rows of the matrix. The
number of columns is the same value because this is a square matrix.
"""
return SquareMatrixBase.getDimension(self)
def getElement(self, i, j):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns (i,j)th element of the square matrix.
"""
return SquareMatrixBase.getElement(self, i, j)
def setElement(self, i, j, v):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Sets (i,j)th element of the square matrix to value v.
"""
SquareMatrixBase.setElement(self, i, j, v)
def getMatrix(self):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns square matrix in the form of a two-dimensional list.
"""
dim = self.getDimension()
v = SquareMatrixBase.getMatrix(self)
m = []
k = 0
for i in range(dim):
tmp = []
for j in range(dim):
tmp.append(v[k])
k += 1
m.append(tmp)
return m
def setMatrixFromFlattenedList(self, dim, v):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Replaces existing or creates a new square matrix using the supplied
unidimensional list or tuple v. The supplied list v is expected to
have length equal to the square of dim, the number of elements in a
single row or column of the matrix.
"""
SquareMatrixBase.setMatrix(self, dim, v)
def setMatrix(self, m):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Replaces existing or creates a new square matrix using the supplied
two-dimensional list or tuple m.
"""
dim = len(m[0])
v = []
for row in m:
for col in row:
v.append(col)
SquareMatrixBase.setMatrix(self, dim, v)
def __repr__(self):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Represents matrix as string.
"""
s = SquareMatrixBase.__repr__(self)
return s
def leftMultiplyMatrix(self, matrixOnLeft):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns a SquareMatrix that equals the product of supplied
matrixOnLeft with this matrix (on right).
"""
return SquareMatrixBase.leftMultiplyMatrix(self, matrixOnLeft)
def rightMultiplyMatrix(self, matrixOnRight):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns a SquareMatrix that equals the product of this matrix (on
left) with supplied matrixOnRight.
"""
return SquareMatrixBase.rightMultiplyMatrix(self, matrixOnRight)
def leftMultiplyVector(self, vectorOnLeft):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns a SquareMatrix that equals the product of supplied
(transposed) vectorOnLeft with this matrix (on right).
"""
return SquareMatrixBase.leftMultiplyVector(self, vectorOnLeft)
def rightMultiplyVector(self, vectorOnRight):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns a SquareMatrix that equals the product of this matrix (on
left) with supplied vectorOnRight.
"""
return SquareMatrixBase.rightMultiplyVector(self, vectorOnRight)
def logAbsDet(self):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns natural logarithm of the absolute value of the determinant of
this square matrix.
"""
return SquareMatrixBase.logAbsDet(self)
def CholeskyDecomposition(self):
#---+----|----+----|----+----|----+----|----+----|----+----|----+----|
"""
Returns Cholesky decomposition of this square matrix as a lower
triangular matrix. Note: if this matrix is not symmetric and positive
definite, result will be None.
"""
return SquareMatrixBase.CholeskyDecomposition(self)
| mit | -2,730,760,165,720,875,500 | 33.715789 | 78 | 0.431928 | false |
bachvtuan/Backup-to-dropbox | restore.py | 1 | 1879 | # Include the Dropbox SDK
import dropbox, sys, os, json, operator
from dropbox import rest
from datetime import datetime
def end_wrong_syntax():
print "wrong argv python restore.py config_file_path backup_file_name dest_folder"
sys.exit()
try:
if __name__ != '__main__':
print "not allow"
sys.exit()
current_file = os.path.realpath(__file__)
current_path = os.path.dirname(current_file)
os.chdir(current_path)
print sys.argv
if len(sys.argv) != 4:
end_wrong_syntax()
config_file_path = sys.argv[1]
backup_file_name = sys.argv[2]
dest_folder = sys.argv[3]
if os.path.isdir(dest_folder) is False:
print "Destination folder doesn't found , please check again: "+ dest_folder
sys.exit()
if os.path.isfile(config_file_path) is False:
print "not found config file: " + config_file_path
sys.exit()
config_file = open(config_file_path, 'r')
config = json.loads(config_file.read())
# Get your app key and secret from the Dropbox developer website
backup_folder = config['backup_folder']
access_token = config['access_token']
max_file_in_folder = config['max_file_in_folder']
restore_path = os.path.join( backup_folder, backup_file_name )
print "restore path is " + restore_path
restore_path_computer = os.path.join( dest_folder, backup_file_name )
print "This process will retore file {0} from dropbox to {1} on your computer".format( restore_path, restore_path_computer )
client = dropbox.client.DropboxClient(access_token)
print 'linked account: ', client.account_info()
f, metadata = client.get_file_and_metadata( restore_path )
# print f
# print metadata
print "Restoring, please wait"
out = open( restore_path_computer , 'wb')
out.write(f.read())
out.close()
print metadata
except Exception, e:
print "error"
print e
finally:
pass
| mit | 1,344,065,037,891,964,400 | 23.723684 | 129 | 0.679617 | false |
wq/wq.db | rest/model_tools.py | 1 | 1160 | def get_ct(model, for_concrete_model=False):
from .models import ContentType
if isinstance(model, str):
ctype = ContentType.objects.get_by_identifier(model)
else:
ctype = ContentType.objects.get_for_model(
model, for_concrete_model=for_concrete_model
)
# get_for_model sometimes returns a DjangoContentType - caching issue?
if not isinstance(ctype, ContentType):
ctype = ContentType.objects.get(pk=ctype.pk)
ContentType.objects._add_to_cache(ContentType.objects.db, ctype)
return ctype
def get_object_id(instance):
ct = get_ct(instance)
config = ct.get_config()
if config and 'lookup' in config:
return getattr(instance, config['lookup'])
return instance.pk
def get_by_identifier(queryset, ident):
if hasattr(queryset, 'get_by_identifier'):
return queryset.get_by_identifier(ident)
else:
ct = get_ct(queryset.model)
config = ct.get_config()
if config and 'lookup' in config:
lookup = config['lookup']
else:
lookup = 'pk'
return queryset.get(**{lookup: ident})
| mit | 7,703,832,144,751,686,000 | 33.117647 | 78 | 0.633621 | false |
1st1/uvloop | tests/test_sockets.py | 1 | 25872 | import asyncio
import pickle
import select
import socket
import sys
import time
import unittest
from uvloop import _testbase as tb
_SIZE = 1024 * 1024
class _TestSockets:
async def recv_all(self, sock, nbytes):
buf = b''
while len(buf) < nbytes:
buf += await self.loop.sock_recv(sock, nbytes - len(buf))
return buf
def test_socket_connect_recv_send(self):
if self.is_asyncio_loop() and sys.version_info[:3] == (3, 5, 2):
# See https://github.com/python/asyncio/pull/366 for details.
raise unittest.SkipTest()
def srv_gen(sock):
sock.send(b'helo')
data = sock.recv_all(4 * _SIZE)
self.assertEqual(data, b'ehlo' * _SIZE)
sock.send(b'O')
sock.send(b'K')
# We use @asyncio.coroutine & `yield from` to test
# the compatibility of Cython's 'async def' coroutines.
@asyncio.coroutine
def client(sock, addr):
yield from self.loop.sock_connect(sock, addr)
data = yield from self.recv_all(sock, 4)
self.assertEqual(data, b'helo')
yield from self.loop.sock_sendall(sock, b'ehlo' * _SIZE)
data = yield from self.recv_all(sock, 2)
self.assertEqual(data, b'OK')
with self.tcp_server(srv_gen) as srv:
sock = socket.socket()
with sock:
sock.setblocking(False)
self.loop.run_until_complete(client(sock, srv.addr))
def test_socket_accept_recv_send(self):
async def server():
sock = socket.socket()
sock.setblocking(False)
with sock:
sock.bind(('127.0.0.1', 0))
sock.listen()
fut = self.loop.run_in_executor(None, client,
sock.getsockname())
client_sock, _ = await self.loop.sock_accept(sock)
with client_sock:
data = await self.recv_all(client_sock, _SIZE)
self.assertEqual(data, b'a' * _SIZE)
await fut
def client(addr):
sock = socket.socket()
with sock:
sock.connect(addr)
sock.sendall(b'a' * _SIZE)
self.loop.run_until_complete(server())
def test_socket_failed_connect(self):
sock = socket.socket()
with sock:
sock.bind(('127.0.0.1', 0))
addr = sock.getsockname()
async def run():
sock = socket.socket()
with sock:
sock.setblocking(False)
with self.assertRaises(ConnectionRefusedError):
await self.loop.sock_connect(sock, addr)
self.loop.run_until_complete(run())
@unittest.skipUnless(tb.has_IPv6, 'no IPv6')
def test_socket_ipv6_addr(self):
server_sock = socket.socket(socket.AF_INET6)
with server_sock:
server_sock.bind(('::1', 0))
addr = server_sock.getsockname() # tuple of 4 elements for IPv6
async def run():
sock = socket.socket(socket.AF_INET6)
with sock:
sock.setblocking(False)
# Check that sock_connect accepts 4-element address tuple
# for IPv6 sockets.
f = self.loop.sock_connect(sock, addr)
try:
await asyncio.wait_for(f, timeout=0.1, loop=self.loop)
except (asyncio.TimeoutError, ConnectionRefusedError):
# TimeoutError is expected.
pass
self.loop.run_until_complete(run())
def test_socket_ipv4_nameaddr(self):
async def run():
sock = socket.socket(socket.AF_INET)
with sock:
sock.setblocking(False)
await self.loop.sock_connect(sock, ('localhost', 0))
with self.assertRaises(OSError):
# Regression test: sock_connect(sock) wasn't calling
# getaddrinfo() with `family=sock.family`, which resulted
# in `socket.connect()` being called with an IPv6 address
# for IPv4 sockets, which used to cause a TypeError.
# Here we expect that that is fixed so we should get an
# OSError instead.
self.loop.run_until_complete(run())
def test_socket_blocking_error(self):
self.loop.set_debug(True)
sock = socket.socket()
with sock:
with self.assertRaisesRegex(ValueError, 'must be non-blocking'):
self.loop.run_until_complete(
self.loop.sock_recv(sock, 0))
with self.assertRaisesRegex(ValueError, 'must be non-blocking'):
self.loop.run_until_complete(
self.loop.sock_sendall(sock, b''))
with self.assertRaisesRegex(ValueError, 'must be non-blocking'):
self.loop.run_until_complete(
self.loop.sock_accept(sock))
with self.assertRaisesRegex(ValueError, 'must be non-blocking'):
self.loop.run_until_complete(
self.loop.sock_connect(sock, (b'', 0)))
def test_socket_fileno(self):
rsock, wsock = socket.socketpair()
f = asyncio.Future(loop=self.loop)
def reader():
rsock.recv(100)
# We are done: unregister the file descriptor
self.loop.remove_reader(rsock)
f.set_result(None)
def writer():
wsock.send(b'abc')
self.loop.remove_writer(wsock)
with rsock, wsock:
self.loop.add_reader(rsock, reader)
self.loop.add_writer(wsock, writer)
self.loop.run_until_complete(f)
def test_socket_sync_remove_and_immediately_close(self):
# Test that it's OK to close the socket right after calling
# `remove_reader`.
sock = socket.socket()
with sock:
cb = lambda: None
sock.bind(('127.0.0.1', 0))
sock.listen(0)
fd = sock.fileno()
self.loop.add_reader(fd, cb)
self.loop.run_until_complete(asyncio.sleep(0.01, loop=self.loop))
self.loop.remove_reader(fd)
sock.close()
self.assertEqual(sock.fileno(), -1)
self.loop.run_until_complete(asyncio.sleep(0.01, loop=self.loop))
def test_sock_cancel_add_reader_race(self):
srv_sock_conn = None
async def server():
nonlocal srv_sock_conn
sock_server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock_server.setblocking(False)
with sock_server:
sock_server.bind(('127.0.0.1', 0))
sock_server.listen()
fut = asyncio.ensure_future(
client(sock_server.getsockname()), loop=self.loop)
srv_sock_conn, _ = await self.loop.sock_accept(sock_server)
srv_sock_conn.setsockopt(
socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
with srv_sock_conn:
await fut
async def client(addr):
sock_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock_client.setblocking(False)
with sock_client:
await self.loop.sock_connect(sock_client, addr)
_, pending_read_futs = await asyncio.wait(
[self.loop.sock_recv(sock_client, 1)],
timeout=1, loop=self.loop)
async def send_server_data():
# Wait a little bit to let reader future cancel and
# schedule the removal of the reader callback. Right after
# "rfut.cancel()" we will call "loop.sock_recv()", which
# will add a reader. This will make a race between
# remove- and add-reader.
await asyncio.sleep(0.1, loop=self.loop)
await self.loop.sock_sendall(srv_sock_conn, b'1')
self.loop.create_task(send_server_data())
for rfut in pending_read_futs:
rfut.cancel()
data = await self.loop.sock_recv(sock_client, 1)
self.assertEqual(data, b'1')
self.loop.run_until_complete(server())
def test_sock_send_before_cancel(self):
srv_sock_conn = None
async def server():
nonlocal srv_sock_conn
sock_server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock_server.setblocking(False)
with sock_server:
sock_server.bind(('127.0.0.1', 0))
sock_server.listen()
fut = asyncio.ensure_future(
client(sock_server.getsockname()), loop=self.loop)
srv_sock_conn, _ = await self.loop.sock_accept(sock_server)
with srv_sock_conn:
await fut
async def client(addr):
await asyncio.sleep(0.01, loop=self.loop)
sock_client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock_client.setblocking(False)
with sock_client:
await self.loop.sock_connect(sock_client, addr)
_, pending_read_futs = await asyncio.wait(
[self.loop.sock_recv(sock_client, 1)],
timeout=1, loop=self.loop)
# server can send the data in a random time, even before
# the previous result future has cancelled.
await self.loop.sock_sendall(srv_sock_conn, b'1')
for rfut in pending_read_futs:
rfut.cancel()
data = await self.loop.sock_recv(sock_client, 1)
self.assertEqual(data, b'1')
self.loop.run_until_complete(server())
class TestUVSockets(_TestSockets, tb.UVTestCase):
@unittest.skipUnless(hasattr(select, 'epoll'), 'Linux only test')
def test_socket_sync_remove(self):
# See https://github.com/MagicStack/uvloop/issues/61 for details
sock = socket.socket()
epoll = select.epoll.fromfd(self.loop._get_backend_id())
try:
cb = lambda: None
sock.bind(('127.0.0.1', 0))
sock.listen(0)
fd = sock.fileno()
self.loop.add_reader(fd, cb)
self.loop.run_until_complete(asyncio.sleep(0.01, loop=self.loop))
self.loop.remove_reader(fd)
with self.assertRaises(FileNotFoundError):
epoll.modify(fd, 0)
finally:
sock.close()
self.loop.close()
epoll.close()
def test_add_reader_or_writer_transport_fd(self):
def assert_raises():
return self.assertRaisesRegex(
RuntimeError,
r'File descriptor .* is used by transport')
async def runner():
tr, pr = await self.loop.create_connection(
lambda: asyncio.Protocol(), sock=rsock)
try:
cb = lambda: None
sock = tr.get_extra_info('socket')
with assert_raises():
self.loop.add_reader(sock, cb)
with assert_raises():
self.loop.add_reader(sock.fileno(), cb)
with assert_raises():
self.loop.remove_reader(sock)
with assert_raises():
self.loop.remove_reader(sock.fileno())
with assert_raises():
self.loop.add_writer(sock, cb)
with assert_raises():
self.loop.add_writer(sock.fileno(), cb)
with assert_raises():
self.loop.remove_writer(sock)
with assert_raises():
self.loop.remove_writer(sock.fileno())
finally:
tr.close()
rsock, wsock = socket.socketpair()
try:
self.loop.run_until_complete(runner())
finally:
rsock.close()
wsock.close()
def test_pseudosocket(self):
def assert_raises():
return self.assertRaisesRegex(
RuntimeError,
r'File descriptor .* is used by transport')
def test_pseudo(real_sock, pseudo_sock, *, is_dup=False):
self.assertIn('AF_UNIX', repr(pseudo_sock))
self.assertEqual(pseudo_sock.family, real_sock.family)
self.assertEqual(pseudo_sock.proto, real_sock.proto)
# Guard against SOCK_NONBLOCK bit in socket.type on Linux.
self.assertEqual(pseudo_sock.type & 0xf, real_sock.type & 0xf)
with self.assertRaises(TypeError):
pickle.dumps(pseudo_sock)
na_meths = {
'accept', 'connect', 'connect_ex', 'bind', 'listen',
'makefile', 'sendfile', 'close', 'detach', 'shutdown',
'sendmsg_afalg', 'sendmsg', 'sendto', 'send', 'sendall',
'recv_into', 'recvfrom_into', 'recvmsg_into', 'recvmsg',
'recvfrom', 'recv'
}
for methname in na_meths:
meth = getattr(pseudo_sock, methname)
with self.assertRaisesRegex(
TypeError,
r'.*not support ' + methname + r'\(\) method'):
meth()
eq_meths = {
'getsockname', 'getpeername', 'get_inheritable', 'gettimeout'
}
for methname in eq_meths:
pmeth = getattr(pseudo_sock, methname)
rmeth = getattr(real_sock, methname)
# Call 2x to check caching paths
self.assertEqual(pmeth(), rmeth())
self.assertEqual(pmeth(), rmeth())
self.assertEqual(
pseudo_sock.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR),
0)
if not is_dup:
self.assertEqual(pseudo_sock.fileno(), real_sock.fileno())
duped = pseudo_sock.dup()
with duped:
test_pseudo(duped, pseudo_sock, is_dup=True)
with self.assertRaises(TypeError):
with pseudo_sock:
pass
async def runner():
tr, pr = await self.loop.create_connection(
lambda: asyncio.Protocol(), sock=rsock)
try:
sock = tr.get_extra_info('socket')
test_pseudo(rsock, sock)
finally:
tr.close()
rsock, wsock = socket.socketpair()
try:
self.loop.run_until_complete(runner())
finally:
rsock.close()
wsock.close()
def test_socket_connect_and_close(self):
def srv_gen(sock):
sock.send(b'helo')
async def client(sock, addr):
f = asyncio.ensure_future(self.loop.sock_connect(sock, addr),
loop=self.loop)
self.loop.call_soon(sock.close)
await f
return 'ok'
with self.tcp_server(srv_gen) as srv:
sock = socket.socket()
with sock:
sock.setblocking(False)
r = self.loop.run_until_complete(client(sock, srv.addr))
self.assertEqual(r, 'ok')
def test_socket_recv_and_close(self):
def srv_gen(sock):
time.sleep(1.2)
sock.send(b'helo')
async def kill(sock):
await asyncio.sleep(0.2, loop=self.loop)
sock.close()
async def client(sock, addr):
await self.loop.sock_connect(sock, addr)
f = asyncio.ensure_future(self.loop.sock_recv(sock, 10),
loop=self.loop)
self.loop.create_task(kill(sock))
res = await f
self.assertEqual(sock.fileno(), -1)
return res
with self.tcp_server(srv_gen) as srv:
sock = socket.socket()
with sock:
sock.setblocking(False)
c = client(sock, srv.addr)
w = asyncio.wait_for(c, timeout=5.0, loop=self.loop)
r = self.loop.run_until_complete(w)
self.assertEqual(r, b'helo')
def test_socket_recv_into_and_close(self):
def srv_gen(sock):
time.sleep(1.2)
sock.send(b'helo')
async def kill(sock):
await asyncio.sleep(0.2, loop=self.loop)
sock.close()
async def client(sock, addr):
await self.loop.sock_connect(sock, addr)
data = bytearray(10)
with memoryview(data) as buf:
f = asyncio.ensure_future(self.loop.sock_recv_into(sock, buf),
loop=self.loop)
self.loop.create_task(kill(sock))
rcvd = await f
data = data[:rcvd]
self.assertEqual(sock.fileno(), -1)
return bytes(data)
with self.tcp_server(srv_gen) as srv:
sock = socket.socket()
with sock:
sock.setblocking(False)
c = client(sock, srv.addr)
w = asyncio.wait_for(c, timeout=5.0, loop=self.loop)
r = self.loop.run_until_complete(w)
self.assertEqual(r, b'helo')
def test_socket_send_and_close(self):
ok = False
def srv_gen(sock):
nonlocal ok
b = sock.recv_all(2)
if b == b'hi':
ok = True
sock.send(b'ii')
async def client(sock, addr):
await self.loop.sock_connect(sock, addr)
s2 = sock.dup() # Don't let it drop connection until `f` is done
with s2:
f = asyncio.ensure_future(self.loop.sock_sendall(sock, b'hi'),
loop=self.loop)
self.loop.call_soon(sock.close)
await f
return await self.loop.sock_recv(s2, 2)
with self.tcp_server(srv_gen) as srv:
sock = socket.socket()
with sock:
sock.setblocking(False)
r = self.loop.run_until_complete(client(sock, srv.addr))
self.assertEqual(r, b'ii')
self.assertTrue(ok)
def test_socket_close_loop_and_close(self):
class Abort(Exception):
pass
def srv_gen(sock):
time.sleep(1.2)
async def client(sock, addr):
await self.loop.sock_connect(sock, addr)
asyncio.ensure_future(self.loop.sock_recv(sock, 10),
loop=self.loop)
await asyncio.sleep(0.2, loop=self.loop)
raise Abort
with self.tcp_server(srv_gen) as srv:
sock = socket.socket()
with sock:
sock.setblocking(False)
c = client(sock, srv.addr)
w = asyncio.wait_for(c, timeout=5.0, loop=self.loop)
try:
sock = self.loop.run_until_complete(w)
except Abort:
pass
# `loop` still owns `sock`, so closing `sock` shouldn't
# do anything.
sock.close()
self.assertNotEqual(sock.fileno(), -1)
# `loop.close()` should io-decref all sockets that the
# loop owns, including our `sock`.
self.loop.close()
self.assertEqual(sock.fileno(), -1)
def test_socket_close_remove_reader(self):
s = socket.socket()
with s:
s.setblocking(False)
self.loop.add_reader(s, lambda: None)
self.loop.remove_reader(s.fileno())
s.close()
self.assertEqual(s.fileno(), -1)
s = socket.socket()
with s:
s.setblocking(False)
self.loop.add_reader(s.fileno(), lambda: None)
self.loop.remove_reader(s)
self.assertNotEqual(s.fileno(), -1)
s.close()
self.assertEqual(s.fileno(), -1)
def test_socket_close_remove_writer(self):
s = socket.socket()
with s:
s.setblocking(False)
self.loop.add_writer(s, lambda: None)
self.loop.remove_writer(s.fileno())
s.close()
self.assertEqual(s.fileno(), -1)
s = socket.socket()
with s:
s.setblocking(False)
self.loop.add_writer(s.fileno(), lambda: None)
self.loop.remove_writer(s)
self.assertNotEqual(s.fileno(), -1)
s.close()
self.assertEqual(s.fileno(), -1)
def test_socket_cancel_sock_recv_1(self):
def srv_gen(sock):
time.sleep(1.2)
sock.send(b'helo')
async def kill(fut):
await asyncio.sleep(0.2, loop=self.loop)
fut.cancel()
async def client(sock, addr):
await self.loop.sock_connect(sock, addr)
f = asyncio.ensure_future(self.loop.sock_recv(sock, 10),
loop=self.loop)
self.loop.create_task(kill(f))
with self.assertRaises(asyncio.CancelledError):
await f
sock.close()
self.assertEqual(sock.fileno(), -1)
with self.tcp_server(srv_gen) as srv:
sock = socket.socket()
with sock:
sock.setblocking(False)
c = client(sock, srv.addr)
w = asyncio.wait_for(c, timeout=5.0, loop=self.loop)
self.loop.run_until_complete(w)
def test_socket_cancel_sock_recv_2(self):
def srv_gen(sock):
time.sleep(1.2)
sock.send(b'helo')
async def kill(fut):
await asyncio.sleep(0.5, loop=self.loop)
fut.cancel()
async def recv(sock):
fut = self.loop.create_task(self.loop.sock_recv(sock, 10))
await asyncio.sleep(0.1, loop=self.loop)
self.loop.remove_reader(sock)
sock.close()
try:
await fut
except asyncio.CancelledError:
raise
finally:
sock.close()
async def client(sock, addr):
await self.loop.sock_connect(sock, addr)
f = asyncio.ensure_future(recv(sock), loop=self.loop)
self.loop.create_task(kill(f))
with self.assertRaises(asyncio.CancelledError):
await f
sock.close()
self.assertEqual(sock.fileno(), -1)
with self.tcp_server(srv_gen) as srv:
sock = socket.socket()
with sock:
sock.setblocking(False)
c = client(sock, srv.addr)
w = asyncio.wait_for(c, timeout=5.0, loop=self.loop)
self.loop.run_until_complete(w)
def test_socket_cancel_sock_sendall(self):
def srv_gen(sock):
time.sleep(1.2)
sock.recv_all(4)
async def kill(fut):
await asyncio.sleep(0.2, loop=self.loop)
fut.cancel()
async def client(sock, addr):
await self.loop.sock_connect(sock, addr)
f = asyncio.ensure_future(
self.loop.sock_sendall(sock, b'helo' * (1024 * 1024 * 50)),
loop=self.loop)
self.loop.create_task(kill(f))
with self.assertRaises(asyncio.CancelledError):
await f
sock.close()
self.assertEqual(sock.fileno(), -1)
# disable slow callback reporting for this test
self.loop.slow_callback_duration = 1000.0
with self.tcp_server(srv_gen) as srv:
sock = socket.socket()
with sock:
sock.setblocking(False)
c = client(sock, srv.addr)
w = asyncio.wait_for(c, timeout=5.0, loop=self.loop)
self.loop.run_until_complete(w)
def test_socket_close_many_add_readers(self):
s = socket.socket()
with s:
s.setblocking(False)
self.loop.add_reader(s, lambda: None)
self.loop.add_reader(s, lambda: None)
self.loop.add_reader(s, lambda: None)
self.loop.remove_reader(s.fileno())
s.close()
self.assertEqual(s.fileno(), -1)
s = socket.socket()
with s:
s.setblocking(False)
self.loop.add_reader(s, lambda: None)
self.loop.add_reader(s, lambda: None)
self.loop.add_reader(s, lambda: None)
self.loop.remove_reader(s)
s.close()
self.assertEqual(s.fileno(), -1)
def test_socket_close_many_remove_writers(self):
s = socket.socket()
with s:
s.setblocking(False)
self.loop.add_writer(s, lambda: None)
self.loop.add_writer(s, lambda: None)
self.loop.add_writer(s, lambda: None)
self.loop.remove_writer(s.fileno())
s.close()
self.assertEqual(s.fileno(), -1)
s = socket.socket()
with s:
s.setblocking(False)
self.loop.add_writer(s, lambda: None)
self.loop.add_writer(s, lambda: None)
self.loop.add_writer(s, lambda: None)
self.loop.remove_writer(s)
s.close()
self.assertEqual(s.fileno(), -1)
class TestAIOSockets(_TestSockets, tb.AIOTestCase):
pass
| mit | -1,937,974,264,071,074,600 | 33.26755 | 79 | 0.517509 | false |
kenshay/ImageScripter | ProgramData/SystemFiles/Python/Lib/site-packages/PyQt4/examples/webkit/simpleselector/ui_window.py | 2 | 2807 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\Lib\site-packages\PyQt4\examples\webkit\simpleselector\window.ui'
#
# Created: Wed Jan 18 20:24:02 2017
# by: PyQt4 UI code generator 4.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Window(object):
def setupUi(self, Window):
Window.setObjectName(_fromUtf8("Window"))
Window.resize(640, 480)
self.verticalLayout = QtGui.QVBoxLayout(Window)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.webView = QtWebKit.QWebView(Window)
self.webView.setUrl(QtCore.QUrl(_fromUtf8("http://webkit.org/")))
self.webView.setObjectName(_fromUtf8("webView"))
self.verticalLayout.addWidget(self.webView)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.formLayout = QtGui.QFormLayout()
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.ExpandingFieldsGrow)
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.elementLabel = QtGui.QLabel(Window)
self.elementLabel.setObjectName(_fromUtf8("elementLabel"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.elementLabel)
self.elementLineEdit = QtGui.QLineEdit(Window)
self.elementLineEdit.setObjectName(_fromUtf8("elementLineEdit"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.elementLineEdit)
self.horizontalLayout.addLayout(self.formLayout)
self.highlightButton = QtGui.QPushButton(Window)
self.highlightButton.setObjectName(_fromUtf8("highlightButton"))
self.horizontalLayout.addWidget(self.highlightButton)
self.verticalLayout.addLayout(self.horizontalLayout)
self.elementLabel.setBuddy(self.elementLineEdit)
self.retranslateUi(Window)
QtCore.QMetaObject.connectSlotsByName(Window)
def retranslateUi(self, Window):
Window.setWindowTitle(_translate("Window", "Web Element Selector", None))
self.elementLabel.setText(_translate("Window", "&Element:", None))
self.elementLineEdit.setText(_translate("Window", "li a", None))
self.highlightButton.setText(_translate("Window", "&Highlight", None))
from PyQt4 import QtWebKit
| gpl-3.0 | 2,831,186,600,653,961,700 | 43.555556 | 121 | 0.717848 | false |
vthorsteinsson/tensor2tensor | tensor2tensor/bin/t2t_bleu.py | 1 | 7314 | # coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Evaluate BLEU score for all checkpoints/translations in a given directory.
This script can be used in two ways.
To evaluate one already translated file:
```
t2t-bleu --translation=my-wmt13.de --reference=wmt13_deen.de
```
To evaluate all translations in a given directory (translated by
`t2t-translate-all`):
```
t2t-bleu
--translations_dir=my-translations
--reference=wmt13_deen.de
--event_dir=events
```
In addition to the above-mentioned required parameters,
there are optional parameters:
* bleu_variant: cased (case-sensitive), uncased, both (default).
* tag_suffix: Default="", so the tags will be BLEU_cased and BLEU_uncased.
tag_suffix can be used e.g. for different beam sizes if these should be
plotted in different graphs.
* min_steps: Don't evaluate checkpoints with less steps.
Default=-1 means check the `last_evaluated_step.txt` file, which contains
the number of steps of the last successfully evaluated checkpoint.
* report_zero: Store BLEU=0 and guess its time based on the oldest file in the
translations_dir. Default=True. This is useful, so TensorBoard reports
correct relative time for the remaining checkpoints. This flag is set to
False if min_steps is > 0.
* wait_minutes: Wait upto N minutes for a new translated file. Default=0.
This is useful for continuous evaluation of a running training, in which case
this should be equal to save_checkpoints_secs/60 plus time needed for
translation plus some reserve.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import time
from tensor2tensor.utils import bleu_hook
import tensorflow as tf
flags = tf.flags
FLAGS = flags.FLAGS
flags.DEFINE_string("source", None,
"Path to the source-language file to be translated")
flags.DEFINE_string("reference", None, "Path to the reference translation file")
flags.DEFINE_string("translation", None,
"Path to the MT system translation file")
flags.DEFINE_string("translations_dir", None,
"Directory with translated files to be evaluated.")
flags.DEFINE_string("event_dir", None, "Where to store the event file.")
flags.DEFINE_string("bleu_variant", "both",
"Possible values: cased(case-sensitive), uncased, "
"both(default).")
flags.DEFINE_string("tag_suffix", "",
"What to add to BLEU_cased and BLEU_uncased tags.")
flags.DEFINE_integer("min_steps", -1,
"Don't evaluate checkpoints with less steps.")
flags.DEFINE_integer("wait_minutes", 0,
"Wait upto N minutes for a new checkpoint, cf. "
"save_checkpoints_secs.")
flags.DEFINE_bool("report_zero", None,
"Store BLEU=0 and guess its time based on the oldest file.")
def main(_):
tf.logging.set_verbosity(tf.logging.INFO)
if FLAGS.translation:
if FLAGS.translations_dir:
raise ValueError(
"Cannot specify both --translation and --translations_dir.")
if FLAGS.bleu_variant in ("uncased", "both"):
bleu = 100 * bleu_hook.bleu_wrapper(FLAGS.reference, FLAGS.translation,
case_sensitive=False)
print("BLEU_uncased = %6.2f" % bleu)
if FLAGS.bleu_variant in ("cased", "both"):
bleu = 100 * bleu_hook.bleu_wrapper(FLAGS.reference, FLAGS.translation,
case_sensitive=True)
print("BLEU_cased = %6.2f" % bleu)
return
if not FLAGS.translations_dir:
raise ValueError(
"Either --translation or --translations_dir must be specified.")
transl_dir = os.path.expanduser(FLAGS.translations_dir)
if not os.path.exists(transl_dir):
exit_time = time.time() + FLAGS.wait_minutes * 60
tf.logging.info("Translation dir %s does not exist, waiting till %s.",
transl_dir, time.asctime(time.localtime(exit_time)))
while not os.path.exists(transl_dir):
time.sleep(10)
if time.time() > exit_time:
raise ValueError("Translation dir %s does not exist" % transl_dir)
last_step_file = os.path.join(FLAGS.event_dir, "last_evaluated_step.txt")
if FLAGS.min_steps == -1:
if tf.gfile.Exists(last_step_file):
with open(last_step_file) as ls_file:
FLAGS.min_steps = int(ls_file.read())
else:
FLAGS.min_steps = 0
if FLAGS.report_zero is None:
FLAGS.report_zero = FLAGS.min_steps == 0
writer = tf.summary.FileWriter(FLAGS.event_dir)
for transl_file in bleu_hook.stepfiles_iterator(
transl_dir, FLAGS.wait_minutes, FLAGS.min_steps, path_suffix=""):
# report_zero handling must be inside the for-loop,
# so we are sure the transl_dir is already created.
if FLAGS.report_zero:
all_files = (os.path.join(transl_dir, f) for f in os.listdir(transl_dir))
start_time = min(
os.path.getmtime(f) for f in all_files if os.path.isfile(f))
values = []
if FLAGS.bleu_variant in ("uncased", "both"):
values.append(tf.Summary.Value(
tag="BLEU_uncased" + FLAGS.tag_suffix, simple_value=0))
if FLAGS.bleu_variant in ("cased", "both"):
values.append(tf.Summary.Value(
tag="BLEU_cased" + FLAGS.tag_suffix, simple_value=0))
writer.add_event(tf.summary.Event(summary=tf.Summary(value=values),
wall_time=start_time, step=0))
FLAGS.report_zero = False
filename = transl_file.filename
tf.logging.info("Evaluating " + filename)
values = []
if FLAGS.bleu_variant in ("uncased", "both"):
bleu = 100 * bleu_hook.bleu_wrapper(FLAGS.reference, filename,
case_sensitive=False)
values.append(tf.Summary.Value(tag="BLEU_uncased" + FLAGS.tag_suffix,
simple_value=bleu))
tf.logging.info("%s: BLEU_uncased = %6.2f" % (filename, bleu))
if FLAGS.bleu_variant in ("cased", "both"):
bleu = 100 * bleu_hook.bleu_wrapper(FLAGS.reference, filename,
case_sensitive=True)
values.append(tf.Summary.Value(tag="BLEU_cased" + FLAGS.tag_suffix,
simple_value=bleu))
tf.logging.info("%s: BLEU_cased = %6.2f" % (transl_file.filename, bleu))
writer.add_event(tf.summary.Event(
summary=tf.Summary(value=values),
wall_time=transl_file.mtime, step=transl_file.steps))
writer.flush()
with open(last_step_file, "w") as ls_file:
ls_file.write(str(transl_file.steps) + "\n")
if __name__ == "__main__":
tf.logging.set_verbosity(tf.logging.INFO)
tf.app.run()
| apache-2.0 | 3,376,770,000,079,592,400 | 41.034483 | 80 | 0.655865 | false |
nil0x42/phpsploit | src/api/plugin.py | 1 | 1513 | """Provide access to attributes of currently running plugin"""
__all__ = ["plugin"]
import re
from core import plugins
class Plugin:
"""Get access to currently running plugin attributes.
Usage:
>>> from api import plugin
Attributes:
* name (type: str)
# Plugin name.
>>> plugin.name
'foobar'
* help (type: str)
# Plugin docstring (detailed help).
>>> print(plugin.help)
[*] foobar: An imaginary phpsploit plugin
DESCRIPTION:
An imaginary foobar plugin description.
...
* path (type: str)
# Absolute path of plugin's root directory.
>>> plugin.path
'/home/user/phpsploit/plugins/parent_dir/foobar/'
* category (type: str)
# Plugin's category name (parent directory).
>>> plugin.category
'Parent Dir'
"""
def __init__(self):
pass
def __getattr__(self, attr):
errmsg = "type object '%s' has no attribute '%s'"
if attr in dir(self):
return getattr(plugins.current_plugin, attr)
raise AttributeError(errmsg % (self.__class__.__name__, str(attr)))
def __dir__(self):
result = []
for attr in dir(plugins.current_plugin):
obj = getattr(plugins.current_plugin, attr)
if re.match("^[a-z]+$", attr) and not callable(obj):
result.append(attr)
return result
# instanciate plugin object (for use within python API)
plugin = Plugin()
| gpl-3.0 | 7,849,397,474,975,934,000 | 24.644068 | 75 | 0.573695 | false |
YYHugo/Open-2015-vision | auxiliar/capture_video.py | 1 | 1161 | ############### DESCRIPTION ###############
#Save a video record to use without robot #
# #
# BY Hugo #
# #
############### DESCRIPTION ###############
import cv2
import numpy as np
import sys
#Receive and count quantity of arguments
argv = sys.argv[0:];
argc = len(argv);
#camera argument - char '0' is 48 in ASCII
camera = ord(argv[1])-48;
#video filename
videofile = argv[2]
if (argc < 2):
print '\nMissing arguments. Need 3 but has', argc+1
print 'Try "python teste.py <camera> <videofile>"\n'
quit()
cap = cv2.VideoCapture(camera)
# Define the codec and create VideoWriter object
fourcc = cv2.cv.CV_FOURCC('X','V', 'I', 'D')
out = cv2.VideoWriter(videofile,fourcc, 20.0, (640,480))
while(cap.isOpened()):
ret, frame = cap.read()
if ret==True:
# write the flipped frame
out.write(frame)
cv2.imshow('frame',frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
else:
break
# Release everything if job is finished
cap.release()
out.release()
cv2.destroyAllWindows() | gpl-2.0 | -8,218,589,862,494,094,000 | 23.723404 | 56 | 0.548665 | false |
zaxliu/deepnap | experiments/kdd-exps/experiment_DynaQNN_130_Feb15_2001.py | 1 | 5180 | # System built-in modules
import time
from datetime import datetime
import sys
import os
from multiprocessing import Pool
# Project dependency modules
import pandas as pd
pd.set_option('mode.chained_assignment', None) # block warnings due to DataFrame value assignment
import lasagne
# Project modules
sys.path.append('../')
from sleep_control.traffic_emulator import TrafficEmulator
from sleep_control.traffic_server import TrafficServer
from sleep_control.controller import QController, DummyController, NController
from sleep_control.integration import Emulation
from sleep_control.env_models import SJTUModel
from rl.qtable import QAgent
from rl.qnn_theano import QAgentNN
from rl.mixin import PhiMixin, DynaMixin
sys_stdout = sys.stdout
log_prefix = '_'.join(['msg'] + os.path.basename(__file__).replace('.', '_').split('_')[1:5])
log_file_name = "{}_{}.log".format(log_prefix, sys.argv[1])
# Composite classes
class Dyna_QAgentNN(DynaMixin, QAgentNN):
def __init__(self, **kwargs):
super(Dyna_QAgentNN, self).__init__(**kwargs)
# Parameters
# |- Data
location = 'mhC'
# |- Agent
# |- QAgent
actions = [(True, None), (False, 'serve_all')]
gamma, alpha = 0.9, 0.9 # TD backup
explore_strategy, epsilon = 'epsilon', 0.02 # exploration
# |- QAgentNN
# | - Phi
# phi_length = 5
# dim_state = (1, phi_length, 3+2)
# range_state_slice = [(0, 10), (0, 10), (0, 10), (0, 1), (0, 1)]
# range_state = [[range_state_slice]*phi_length]
# | - No Phi
phi_length = 0
dim_state = (1, 1, 3)
range_state = ((((0, 10), (0, 10), (0, 10)),),)
# | - Other params
momentum, learning_rate = 0.9, 0.01 # SGD
num_buffer, memory_size, batch_size, update_period, freeze_period = 2, 200, 100, 4, 16
reward_scaling, reward_scaling_update, rs_period = 1, 'adaptive', 32 # reward scaling
# |- Env model
model_type, traffic_window_size = 'IPP', 50
stride, n_iter, adjust_offset = 2, 3, 1e-22
eval_period, eval_len = 4, 100
n_belief_bins, max_queue_len = 0, 20
Rs, Rw, Rf, Co, Cw = 1.0, -1.0, -10.0, -5.0, -0.5
traffic_params = (model_type, traffic_window_size,
stride, n_iter, adjust_offset,
eval_period, eval_len,
n_belief_bins)
queue_params = (max_queue_len,)
beta = 0.5 # R = (1-beta)*ServiceReward + beta*Cost
reward_params = (Rs, Rw, Rf, Co, Cw, beta)
# |- DynaQ
num_sim = 0
# |- Env
# |- Time
start_time = pd.to_datetime("2014-10-15 09:40:00")
total_time = pd.Timedelta(days=7)
time_step = pd.Timedelta(seconds=2)
backoff_epochs = num_buffer*memory_size+phi_length
head_datetime = start_time - time_step*backoff_epochs
tail_datetime = head_datetime + total_time
TOTAL_EPOCHS = int(total_time/time_step)
# |- Reward
rewarding = {'serve': Rs, 'wait': Rw, 'fail': Rf}
# load from processed data
session_df =pd.read_csv(
filepath_or_buffer='../data/trace_{}.dat'.format(location),
parse_dates=['startTime_datetime', 'endTime_datetime']
)
te = TrafficEmulator(
session_df=session_df, time_step=time_step,
head_datetime=head_datetime, tail_datetime=tail_datetime,
rewarding=rewarding,
verbose=2)
ts = TrafficServer(cost=(Co, Cw), verbose=2)
env_model = SJTUModel(traffic_params, queue_params, reward_params, 2)
agent = Dyna_QAgentNN(
env_model=env_model, num_sim=num_sim,
dim_state=dim_state, range_state=range_state,
f_build_net = None,
batch_size=batch_size, learning_rate=learning_rate, momentum=momentum,
reward_scaling=reward_scaling, reward_scaling_update=reward_scaling_update, rs_period=rs_period,
update_period=update_period, freeze_period=freeze_period,
memory_size=memory_size, num_buffer=num_buffer,
# Below is QAgent params
actions=actions, alpha=alpha, gamma=gamma,
explore_strategy=explore_strategy, epsilon=epsilon,
verbose=2)
c = QController(agent=agent)
emu = Emulation(te=te, ts=ts, c=c, beta=beta)
# Heavyliftings
t = time.time()
sys.stdout = sys_stdout
log_path = './log/'
if os.path.isfile(log_path+log_file_name):
print "Log file {} already exist. Experiment cancelled.".format(log_file_name)
else:
log_file = open(log_path+log_file_name,"w")
print datetime.now().strftime('[%Y-%m-%d %H:%M:%S]'),
print '{}%'.format(int(100.0*emu.epoch/TOTAL_EPOCHS)),
print log_file_name
time.sleep(1)
sys.stdout = log_file
while emu.epoch is not None and emu.epoch<TOTAL_EPOCHS:
# log time
print "Epoch {},".format(emu.epoch),
left = emu.te.head_datetime + emu.te.epoch*emu.te.time_step
right = left + emu.te.time_step
print "{} - {}".format(left.strftime("%Y-%m-%d %H:%M:%S"), right.strftime("%Y-%m-%d %H:%M:%S"))
emu.step()
print
if emu.epoch%(0.05*TOTAL_EPOCHS)==0:
sys.stdout = sys_stdout
print datetime.now().strftime('[%Y-%m-%d %H:%M:%S]'),
print '{}%'.format(int(100.0*emu.epoch/TOTAL_EPOCHS)),
print log_file_name
time.sleep(1)
sys.stdout = log_file
sys.stdout = sys_stdout
log_file.close()
print
print log_file_name,
print '{:.3f} sec,'.format(time.time()-t),
print '{:.3f} min'.format((time.time()-t)/60)
| bsd-3-clause | 6,453,434,949,261,437,000 | 33.304636 | 103 | 0.653089 | false |
jacknlliu/ros-docker-images | 3d_model/scripts/install_brd.py | 1 | 1572 | import bpy
import os
import addon_utils
from subprocess import call
from urllib.request import urlretrieve
from zipfile import ZipFile
from tempfile import TemporaryDirectory
from shutil import copytree,rmtree
from os.path import join
python_exec = bpy.app.binary_path_python
path_to_addons = bpy.utils.user_resource('SCRIPTS', "addons")
print('Install Pip')
try:
import pip
except:
rc = call([python_exec,"-m","ensurepip","--default-pip", "--upgrade"])
import pip
print('Download RD')
URL = "https://github.com/HBPNeurorobotics/BlenderRobotDesigner/archive/master.zip"
addon_dir = 'robot_designer_plugin'
zip_dir = "BlenderRobotDesigner-master"
print('Unzip RD')
with TemporaryDirectory() as tmp:
zip_file = join(tmp,"master.zip")
print(zip_file)
urlretrieve(URL,zip_file)
print('Downloaded!')
rc = call([python_exec,"-m","zipfile","-e",zip_file,tmp])
with ZipFile(zip_file, "r") as z:
z.extractall(tmp)
print('Unzip finished')
addon_dir_src = join(tmp,zip_dir,addon_dir)
addon_dir_dst = join(path_to_addons,addon_dir)
print('remove previous addon')
rmtree(addon_dir_dst,True)
print('add latest addon')
copytree(addon_dir_src,addon_dir_dst)
print('enable addon')
addon_utils.enable("robot_designer_plugin", persistent=True)
bpy.ops.wm.save_userpref()
with open(join(addon_dir_src,"requirements.txt")) as f:
for line in f:
rc = call([python_exec,"-m","pip","install",line])
#pip.main(['install', line])
print('RD Installation Done!')
| mit | -5,145,348,280,357,354,000 | 23.952381 | 83 | 0.683206 | false |
spino327/sdr_testbed | DistributedTestbed/transmitter/TxUsrp.py | 1 | 4423 | '''
Copyright (c) 2011, Universidad Industrial de Santander, Colombia
University of Delaware
All rights reserved.
@author: Sergio Pino
@author: Henry Arguello
Website: http://www.eecis.udel.edu/
emails : [email protected] - [email protected]
Date : Apr, 2011
'''
from gnuradio import gr
from gnuradio.gr import firdes
from gnuradio import blks2
from util import USRP2Conf
class TxUSRP(gr.hier_block2):
'''
This class handle the samples rate fixing operation and also the frequency error fixing operation
Resampler a lower signal rate to the requirement rate at the usrp
--->(pfb_resampler)--->(xlatting_filter)--->(usrp_sink)
'''
def __init__(self, *params):
gr.hier_block2.__init__(self, "TxUSPR",
gr.io_signature(1, 1, gr.sizeof_gr_complex),
gr.io_signature(0, 0, 0))
if len(params) == 7:
self.__uhd(params[0], params[1], params[2], params[3], params[4], params[5], params[6])
# elif len(params) == 6:
# self.__raw(params[0], params[1], params[2], params[3], params[4], params[5])
else:
raise Exception()
def __uhd(self, fc, lo_off, inter, gain, addr, inSampRate, sync):
'''
in:
- fc = center frequency
- lo_off = LO off
- inter = interporlation factor
- gain = gain in the tx, only with 2450
- addr = ip address, format = "addr=ip, mimo_mode="
- inSampRate = incoming sample frequency, basically here we determine the re-sampler interpolation factor
- sync = True is we're gonna use an external ref clock
'''
# instance variables
self.type = "UHD"
(self.tx, basebandFreq, dxcFreq) = USRP2Conf.getUhdUSRPSink(fc, lo_off, inter, gain, addr, sync)
sampRate = float(self.tx.get_clock_rate())/inter
self.postProcessing(inSampRate, dxcFreq, sampRate)
# def __raw(self, fc, inter, gain, eth, inSampRate, sync):
# '''
# in:
# - fc = center frequency
# - inter = interporlation factor
# - gain = gain in the tx, only with 2450
# - eth = ethernet interface name(String)
# - inSampRate = incoming sample frequency, basically here we determine the re-sampler interpolation factor
# - sync = True is we're gonna use an external ref clock
#
# '''
#
# # instance variables
# self.type = "RAW"
# (self.tx, basebandFreq, dxcFreq) = USRP2Conf.getUSRP2Sink(fc, inter, gain, eth, sync)
# sampRate = float(self.tx.dac_rate())/inter
# self.postProcessing(inSampRate, dxcFreq, sampRate)
def postProcessing(self, inSampRate, dxcFreq, sampRate):
# xlating
if dxcFreq != 0:
xlateFilterTaps = firdes.low_pass(1, sampRate, sampRate / 2, sampRate / 10, firdes.WIN_HAMMING, 6.76)
self.xlatingFilter = gr.freq_xlating_fir_filter_ccc(1, (xlateFilterTaps),
dxcFreq,
sampRate)
print "i: xlating filter fixed to " + str(dxcFreq)
else:
self.xlatingFilter = gr.multiply_const_vcc((1, ))
print "i: xlating filter not needed"
# pfb resampler
self.resamplerFactor = sampRate / inSampRate
nphases = 32
frac_bw = 0.45
rs_taps = firdes.low_pass(nphases, nphases, frac_bw, 0.5 - frac_bw)
self.resampler = blks2.pfb_arb_resampler_ccf(self.resamplerFactor,
(rs_taps),
nphases)
print "i: re-sampler relation new_freq/old_freq = " + str(self.resamplerFactor)
#EO instance variables
self.isRTEnable = gr.enable_realtime_scheduling()
if self.isRTEnable == gr.RT_OK:
print "i: realtime enable: True"
else:
print "i: realtime enable: False"
# Connections
self.connect((self, 0), (self.resampler, 0), (self.xlatingFilter, 0), (self.tx, 0))
def dac_rate(self):
'''
return the DAC rate in Hz
'''
if self.type == "UHD":
return self.tx.get_clock_rate()
else:
return self.tx.dac_rate()
| apache-2.0 | -164,745,593,080,653,150 | 34.677419 | 123 | 0.558671 | false |
ufjfeng/leetcode-jf-soln | python/402_remove_k_digits.py | 1 | 1551 | """
Given a non-negative integer num represented as a string, remove k digits from
the number so that the new number is the smallest possible.
Note:
The length of num is less than 10002 and will be ≥ k.
The given num does not contain any leading zero.
Example 1:
Input: num = "1432219", k = 3
Output: "1219"
Explanation: Remove the three digits 4, 3, and 2 to form the new number 1219
which is the smallest.
Example 2:
Input: num = "10200", k = 1
Output: "200"
Explanation: Remove the leading 1 and the number is 200. Note that the
output must not contain leading zeroes.
Example 3:
Input: num = "10", k = 2
Output: "0"
Explanation: Remove all the digits from the number and it is left with
nothing which is 0.
"""
class Solution(object):
def removeKdigits(self, num, k):
"""
:type num: str
:type k: int
:rtype: str
"""
if k == 0:
return num
soln = []
l = len(num)
for i in range(l):
while len(soln) > 0 and soln[-1] > num[i] and k > 0:
soln.pop()
k -= 1
soln.append(num[i])
while len(soln) > max(l - k, 0):
soln.pop()
return str(int(''.join(soln))) if len(soln) > 0 else "0"
a = Solution()
print(a.removeKdigits("1432219", 3) == "1219")
print(a.removeKdigits("10200", 1) == "200")
print(a.removeKdigits("10", 2) == "0")
print(a.removeKdigits("123456", 2) == "1234")
print(a.removeKdigits("9", 1) == "0")
| mit | 3,027,180,323,243,118,600 | 27.163636 | 80 | 0.572628 | false |
PeterSchichtel/hepstore | hepstore/core/statistic/parser.py | 1 | 3086 | #!/usr/bin/env python
import os
import argparse
class StatisticParser(argparse.ArgumentParser):
def __init__( self,
description = "perform statistical analysis" ):
argparse.ArgumentParser.__init__( self, description )
# put arguments here
self.add_argument("-f", "--fit",
action = "store_true",
help = "fit distribution from numric pdf's",
)
self.add_argument( "--data",
default = [],
help = "data to be used",
nargs = '+',
)
self.add_argument( "--pdf",
default = [],
help = "list of pdf's used for fitting",
nargs = '+',
)
self.add_argument( "--bins",
default = 100,
type = int,
help = "numeric granularity for histograms",
)
self.add_argument( "--axis",
default = [ 0, 1, 2,],
type = int,
help = "specify axis for projections",
nargs = '+',
)
self.add_argument( "--start",
default = 1.0,
type = float,
help = "init value for fit",
)
self.add_argument( "--limit",
action = "store_true",
help = "compute upper bound on xsec_s",
)
self.add_argument( "--roc",
default = os.path.join(os.getcwd(),'roc.npy'),
help = "path to roc data",
)
self.add_argument( "--xsec_s",
default = 1.0,
type = float,
help = "signal cross section",
)
self.add_argument( "--xsec_b",
default = 1.0,
type = float,
help = "background cross section",
)
self.add_argument( "--luminosity",
default = 1.0,
type = float,
help = "set luminosity",
)
self.add_argument( "--significance",
default = "",
help = "compute significance from classifier output, save at 'ARG'/significance.npy",
)
self.add_argument( "--cls_s",
default = os.path.join(os.getcwd(),'cls_s.npy'),
help = "classifier output distribution for signal",
)
self.add_argument( "--cls_b",
default = os.path.join(os.getcwd(),'cls_b.npy'),
help = "classifier output distribution for background",
)
pass
pass
| gpl-3.0 | -2,844,642,597,303,474,700 | 36.634146 | 115 | 0.377187 | false |
rwl/PyCIM | CIM14/CDPSM/GIS_Connectivity/IEC61968/WiresExt/PhaseImpedanceData.py | 1 | 3366 | # Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.CDPSM.GIS_Connectivity.Element import Element
class PhaseImpedanceData(Element):
"""Triplet of resistance, reactance, and susceptance matrix element values.
"""
def __init__(self, x=0.0, sequenceNumber=0, b=0.0, r=0.0, PhaseImpedance=None, *args, **kw_args):
"""Initialises a new 'PhaseImpedanceData' instance.
@param x: Reactance matrix element value, per length of unit.
@param sequenceNumber: Column-wise element index, assuming a symmetrical matrix. Ranges from 1 to N + N*(N-1)/2.
@param b: Susceptance matrix element value, per length of unit.
@param r: Resistance matrix element value, per length of unit.
@param PhaseImpedance: Conductor phase impedance to which this data belongs.
"""
#: Reactance matrix element value, per length of unit.
self.x = x
#: Column-wise element index, assuming a symmetrical matrix. Ranges from 1 to N + N*(N-1)/2.
self.sequenceNumber = sequenceNumber
#: Susceptance matrix element value, per length of unit.
self.b = b
#: Resistance matrix element value, per length of unit.
self.r = r
self._PhaseImpedance = None
self.PhaseImpedance = PhaseImpedance
super(PhaseImpedanceData, self).__init__(*args, **kw_args)
_attrs = ["x", "sequenceNumber", "b", "r"]
_attr_types = {"x": float, "sequenceNumber": int, "b": float, "r": float}
_defaults = {"x": 0.0, "sequenceNumber": 0, "b": 0.0, "r": 0.0}
_enums = {}
_refs = ["PhaseImpedance"]
_many_refs = []
def getPhaseImpedance(self):
"""Conductor phase impedance to which this data belongs.
"""
return self._PhaseImpedance
def setPhaseImpedance(self, value):
if self._PhaseImpedance is not None:
filtered = [x for x in self.PhaseImpedance.PhaseImpedanceData if x != self]
self._PhaseImpedance._PhaseImpedanceData = filtered
self._PhaseImpedance = value
if self._PhaseImpedance is not None:
if self not in self._PhaseImpedance._PhaseImpedanceData:
self._PhaseImpedance._PhaseImpedanceData.append(self)
PhaseImpedance = property(getPhaseImpedance, setPhaseImpedance)
| mit | 1,970,894,435,229,611,300 | 43.289474 | 121 | 0.689542 | false |
cindy820219/milsss | UI/new_for_sim_dual.py | 1 | 34162 | ### import parsing
from xml.dom.minidom import parse
import xml.dom.minidom
### import ElementTree
from xml.etree.ElementTree import ElementTree, Element, parse
def simple_dual(DOMTree, collection, level):
### ????
a = 0
# new_for_parse.py
### to count the number of three dual notes
chord_pre = 0
chord_now = 0
chord_three = 0
### to count the total PI
total_PI = 1
### test !!! not on the on-beat dual notes
count_rest = 0
### parsing the file
tree = parse('change_parse.xml')
root = tree.getroot()
print(' is in the simple_dual function !')
### pre notes
daul_pre_note = ''
daul_pre_pre_note = ''
daul_staff_data = ''
### default measure_max and measure_num
measure_max = 0
measure_num = 0
for measure in root.iter('measure'):
measure_max = measure_max +1
print('innnnnnnnnnnnnnnnnnnnnnnn level', level)
### write to the test_dual.xml
if(level == 2):
high_dual_fun(root, tree)
# if(level == 1):
# low_dual_func(root, tree)
def high_dual_fun(root, tree):
### high !!!
daul_pre_note = ''
### count all the chord notes
chord_num = 0
### to count the number of three dual notes
chord_pre = 0
chord_now = 0
chord_three = 0
is_three_chord = 0
three_chord_quene = []
### to get the divisions :(((
DOMTree = xml.dom.minidom.parse('change_parse.xml')
collection = DOMTree.documentElement
attrs = collection.getElementsByTagName('attributes')
for attr in attrs:
times = collection.getElementsByTagName('time')
for time in times:
beats = time.getElementsByTagName('beats')[0]
beats = beats.childNodes[0].data
# print(beats)
for measure in root.iter('measure'):
for note in measure.iter('note'):
# print('here note: ',note)
is_three_chord = 0
for staff in note.iter('staff'):
daul_staff_data = staff.text
# print('daul_staff_data: ',daul_staff_data)
chord_now = 0
### must delete notes
chord = note.find('chord')
### count all the chord
if (chord != None):
chord_num = chord_num + 1
chord_now = 1
### count the number of chord we need to keep
### three notes
if(chord_pre == 1 and chord_now == 1):
chord_three = chord_three + 1
is_three_chord = 1
# print('chord_three: ', chord_three)
### left hand delete 'chord'
if(is_three_chord == 1):
# print(' IN is_three_chord !!!')
if(chord != None):
if(daul_staff_data == '1'):
xml.etree.ElementTree.SubElement(daul_pre_note, 'must_chord_delete')
daul_pre_note.find('must_chord_delete').text = 'yes'
#### delete pre_pre_note chord_delete
# for measure in root.iter('measure'):
# for note in measure.iter('note'):
# if(note.find('chord_delete') != None):
# measure.remove(note)
if(daul_pre_pre_note.find('chord_delete') != None):
chord_delete = daul_pre_pre_note.find('chord_delete')
daul_pre_pre_note.remove(chord_delete)
# three_chord_quene.append(chord_delete)
# # daul_pre_pre_note.remove(chord_delete)
if(daul_staff_data == '2'):
xml.etree.ElementTree.SubElement(daul_pre_note, 'must_chord_delete')
daul_pre_note.find('must_chord_delete').text = 'yes'
is_three_chord = 0
xml.etree.ElementTree.SubElement(note, 'is_three')
note.find('is_three').text = 'yes'
if(chord != None and daul_staff_data == '2' and is_three_chord == 0):
# xml.etree.ElementTree.SubElement(note, 'rest')
xml.etree.ElementTree.SubElement(note, 'chord_delete')
note.find('chord_delete').text = 'yes'
### right hand delete 'chord'
if(chord != None and daul_staff_data == '1' and is_three_chord == 0):
xml.etree.ElementTree.SubElement(daul_pre_note, 'chord_delete')
daul_pre_note.find('chord_delete').text = 'yes'
### is important
# if(note.find('chord') != None):
# chord = note.find('chord')
# note.remove(chord)
if(daul_pre_note != ''):
daul_pre_pre_note = daul_pre_note
daul_pre_note = note
chord_pre = chord_now
# ### here is delete the notes !!!
# for measure in root.iter('measure'):
# for note in measure.iter('note'):
# if(note.find('must_chord_delete') != None):
# measure.remove(note)
# for measure in root.iter('measure'):
# for note in measure.iter('note'):
# if(note.find('must_chord_delete') != None):
# measure.remove(note)
print('delete_high_dual.xml')
tree.write('change_temp.xml')
tree.write('delete_high_dual.xml')
chord_num = chord_num - chord_three
print(chord_num)
print(chord_three)
### count chord_min and chord_max
chord_min = chord_num *2 // 5 - chord_three
chord_max = chord_num *3 // 5 - chord_three
# chord_min = (chord_num - chord_three) *2 // 5
# chord_max = (chord_num - chord_three) *3 // 5
if(chord_min < 0):
chord_min=0
if(chord_max < 0):
chord_max=0
print(chord_min,chord_max)
case(chord_max, chord_min)
def case(chord_max, chord_min):
# print(chord_max, chord_min)
lenth_1 = 0
lenth_2 = 0
lenth_3 = 0
lenth_4 = 0
### parsing the file
tree = parse('delete_high_dual.xml')
root = tree.getroot()
b = 0
for measure in root.iter('measure'):
for note in measure.iter('note'):
if(note.find('chord_delete') != None):
if(note.find('must_chord_delete') != None):
# if(note.find('is_three') != None):
# print('here')
# b = 1
break
else:
for staff in note.iter('staff'):
staff_text = staff.text
# print(staff_text)
for TotalPI in note.iter('TotalPI'):
TotalPI_text = TotalPI.text
TotalPI_text = float(TotalPI_text)
if(float(TotalPI_text) == 1.0):
lenth_1 = lenth_1 +1
if(float(TotalPI_text) == 2.0):
lenth_2 = lenth_2 +1
if(float(TotalPI_text) == 3.0):
lenth_3 = lenth_3 +1
if(float(TotalPI_text) == 4.0):
lenth_4 = lenth_4 +1
print(lenth_1)
print(lenth_2)
print(lenth_3)
print(lenth_4)
case = 0
if(chord_min <= lenth_1):
if( lenth_1 <= chord_max):
case = 1
elif( lenth_1 > chord_max):
case = 2
# case = 1
elif(chord_min <= lenth_1 + lenth_3):
if( lenth_1+lenth_3 <= chord_max):
case = 3
elif( lenth_1+lenth_3 > chord_max):
case = 4
elif(chord_min <= lenth_1 + lenth_3 + lenth_2):
if( lenth_1+lenth_3+lenth_2 <= chord_max):
case = 5
elif( lenth_1+lenth_3+lenth_2 > chord_max):
case = 6
elif(chord_min <= lenth_1 + lenth_3 + lenth_2+lenth_4):
if( lenth_1+lenth_3+lenth_2+lenth_4 <= chord_max):
case = 7
elif( lenth_1+lenth_3+lenth_2+lenth_4 > chord_max):
case = 8
elif(chord_min > lenth_1 + lenth_3 + lenth_2+lenth_4):
case = 9
# print(lenth_1 + lenth_3 + lenth_2+lenth_4)
print('case: ',case)
case_delete_function(case)
def case_delete_function(case):
tree = parse('delete_high_dual.xml')
root = tree.getroot()
must_delete = 0
quene_delete = []
if(case == 1 or case == 2):
# if(case == 1):
print('case 1 in')
for measure in root.iter('measure'):
for note in measure.iter('note'):
if(must_delete == 1):
# print('^^^^^^^')
# for MIDI in note.iter('MIDI'):
# print('must_delete', MIDI.text)
# for TotalPI in note.iter('TotalPI'):
# print('must_delete',TotalPI.text)
# print('^^^^^^^')
# for MIDI in pre_note.iter('MIDI'):
# print('aaa', MIDI.text)
# for TotalPI in pre_note.iter('TotalPI'):
# print('aaa',TotalPI.text)
# print('========')
# if(note.find('must_chord_delete') != None):
if(note.find('chord') != None):
if(pre_note.find('must_chord_delete') != None):
print('here 1')
else:
chord = note.find('chord')
note.remove(chord)
# measure.remove(pre_note)
quene_delete.append(pre_note)
must_delete = 0
# if(note.find('chord_delete') != None):
if(note.find('is_three') != None):
print('hehhehehehe')
else:
if(note.find('chord_delete') != None):
for staff in note.iter('staff'):
staff_text = staff.text
for TotalPI in note.iter('TotalPI'):
TotalPI_text = TotalPI.text
TotalPI_text = float(TotalPI_text)
if(TotalPI_text == 4.0 or TotalPI_text == 2.0 or TotalPI_text == 3.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
### ### ###
if(1.0 < TotalPI_text < 2.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(2.0 < TotalPI_text < 3.0):
if(staff_text == '1'):
must_delete = 1
print('here !!!!')
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(3.0 < TotalPI_text < 4.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(4.0 < TotalPI_text):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
### ### ###
pre_note = note
for i in quene_delete:
measure.remove(i)
quene_delete = []
# for measure in root.iter('measure'):
# for note in measure.iter('note'):
# if(must_delete == 1):
# if(note.find('chord') != None):
# if(pre_note.find('must_chord_delete') != None):
# print('here')
# else:
# chord = note.find('chord')
# note.remove(chord)
# measure.remove(pre_note)
# must_delete = 0
# if(note.find('chord_delete') != None):
# for staff in note.iter('staff'):
# staff_text = staff.text
# for TotalPI in note.iter('TotalPI'):
# TotalPI_text = TotalPI.text
# TotalPI_text = float(TotalPI_text)
# if(TotalPI_text == 4.0 or TotalPI_text == 2.0 or TotalPI_text == 3.0):
# if(staff_text == '1'):
# must_delete = 1
# elif(staff_text == '2'):
# measure.remove(note)
# ### ### ###
# if(1.0 < TotalPI_text < 2.0):
# if(staff_text == '1'):
# must_delete = 1
# elif(staff_text == '2'):
# measure.remove(note)
# if(2.0 < TotalPI_text < 3.0):
# if(staff_text == '1'):
# must_delete = 1
# elif(staff_text == '2'):
# measure.remove(note)
# if(3.0 < TotalPI_text < 4.0):
# if(staff_text == '1'):
# must_delete = 1
# elif(staff_text == '2'):
# measure.remove(note)
# if(4.0 < TotalPI_text):
# if(staff_text == '1'):
# must_delete = 1
# elif(staff_text == '2'):
# measure.remove(note)
# ### ### ###
# pre_note = note
if(case == 2):
print('case 2 in')
if(case == 3 or case == 4):
# if(case == 2):
print('case 3 in')
for measure in root.iter('measure'):
for note in measure.iter('note'):
if(must_delete == 1):
# print('^^^^^^^')
# for MIDI in note.iter('MIDI'):
# print('must_delete', MIDI.text)
# for TotalPI in note.iter('TotalPI'):
# print('must_delete',TotalPI.text)
# print('^^^^^^^')
# for MIDI in pre_note.iter('MIDI'):
# print('aaa', MIDI.text)
# for TotalPI in pre_note.iter('TotalPI'):
# print('aaa',TotalPI.text)
# print('========')
# if(note.find('must_chord_delete') != None):
if(note.find('chord') != None):
if(pre_note.find('must_chord_delete') != None):
print('here')
else:
chord = note.find('chord')
note.remove(chord)
# measure.remove(pre_note)
quene_delete.append(pre_note)
must_delete = 0
if(note.find('chord_delete') != None):
for staff in note.iter('staff'):
staff_text = staff.text
for TotalPI in note.iter('TotalPI'):
TotalPI_text = TotalPI.text
TotalPI_text = float(TotalPI_text)
if(TotalPI_text == 4.0 or TotalPI_text == 2.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
### ### ###
if(1.0 < TotalPI_text < 2.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(2.0 < TotalPI_text < 3.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(3.0 < TotalPI_text < 4.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(4.0 < TotalPI_text):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
### ### ###
pre_note = note
for i in quene_delete:
measure.remove(i)
quene_delete = []
if(case == 4):
print('case 4')
if(case == 5 or case == 6):
# if(case == 3):
print('case 5')
for measure in root.iter('measure'):
for note in measure.iter('note'):
if(must_delete == 1):
# print('^^^^^^^')
# for MIDI in note.iter('MIDI'):
# print('must_delete', MIDI.text)
# for TotalPI in note.iter('TotalPI'):
# print('must_delete',TotalPI.text)
# print('^^^^^^^')
# if(note.find('chord') != None):
# chord = note.find('chord')
# note.remove(chord)
# for MIDI in pre_note.iter('MIDI'):
# print('aaa', MIDI.text)
# for TotalPI in pre_note.iter('TotalPI'):
# print('aaa',TotalPI.text)
# print('========')
# measure.remove(pre_note)
if(note.find('chord') != None):
if(pre_note.find('must_chord_delete') != None):
print('here')
else:
chord = note.find('chord')
note.remove(chord)
# measure.remove(pre_note)
quene_delete.append(pre_note)
must_delete = 0
if(note.find('chord_delete') != None):
for staff in note.iter('staff'):
staff_text = staff.text
for TotalPI in note.iter('TotalPI'):
TotalPI_text = TotalPI.text
TotalPI_text = float(TotalPI_text)
if(TotalPI_text == 4.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
# if(staff_text == '1'):
### next note must delete the 'chord' !!!
# must_delete = 1
# print('must_delete == 1', must_delete)
# if(note.find('chord') != None):
# chord = note.find('chord')
# note.remove(chord)
# print('------')
# for MIDI in note.iter('MIDI'):
# print(MIDI.text)
# for TotalPI in note.iter('TotalPI'):
# print(TotalPI.text)
# print('------')
# else:
# # print('must_delete == 0', must_delete)
### ### ###
if(1.0 < TotalPI_text < 2.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(2.0 < TotalPI_text < 3.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(3.0 < TotalPI_text < 4.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(4.0 < TotalPI_text):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
### ### ###
pre_note = note
for i in quene_delete:
measure.remove(i)
quene_delete = []
if(case == 6):
print('case 6')
if(case == 7 or case == 8 or case == 9):
print('case 7')
for measure in root.iter('measure'):
for note in measure.iter('note'):
if(must_delete == 1):
if(note.find('chord') != None):
if(pre_note.find('must_chord_delete') != None):
print('here')
else:
chord = note.find('chord')
note.remove(chord)
# measure.remove(pre_note)
quene_delete.append(pre_note)
must_delete = 0
if(note.find('chord_delete') != None):
for staff in note.iter('staff'):
staff_text = staff.text
for TotalPI in note.iter('TotalPI'):
TotalPI_text = TotalPI.text
TotalPI_text = float(TotalPI_text)
if(TotalPI_text == 5.0):
# if(TotalPI_text == 4.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
### ### ###
if(1.0 < TotalPI_text < 2.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(2.0 < TotalPI_text < 3.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(3.0 < TotalPI_text < 4.0):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
if(4.0 < TotalPI_text):
if(staff_text == '1'):
must_delete = 1
elif(staff_text == '2'):
# measure.remove(note)
quene_delete.append(note)
### ### ###
pre_note = note
for i in quene_delete:
measure.remove(i)
quene_delete = []
if(case == 8):
print('case 8')
# if(case == 9):
# print('case 9')
# for measure in root.iter('measure'):
# for note in measure.iter('note'):
# if(must_delete == 1):
# if(note.find('chord') != None):
# if(pre_note.find('must_chord_delete') != None):
# print('here')
# else:
# chord = note.find('chord')
# note.remove(chord)
# # measure.remove(pre_note)
# quene_delete.append(pre_note)
# must_delete = 0
# if(note.find('chord_delete') != None):
# for staff in note.iter('staff'):
# staff_text = staff.text
# for TotalPI in note.iter('TotalPI'):
# TotalPI_text = TotalPI.text
# TotalPI_text = float(TotalPI_text)
# if(TotalPI_text == 4.0 or TotalPI_text == 2.0 or TotalPI_text == 3.0 or TotalPI_text == 1.0):
# # if(TotalPI_text == 4.0):
# if(staff_text == '1'):
# must_delete = 1
# elif(staff_text == '2'):
# # measure.remove(note)
# quene_delete.append(note)
# ### ### ###
# if(1.0 < TotalPI_text < 2.0):
# if(staff_text == '1'):
# must_delete = 1
# elif(staff_text == '2'):
# # measure.remove(note)
# quene_delete.append(note)
# if(2.0 < TotalPI_text < 3.0):
# if(staff_text == '1'):
# must_delete = 1
# elif(staff_text == '2'):
# # measure.remove(note)
# quene_delete.append(note)
# if(3.0 < TotalPI_text < 4.0):
# if(staff_text == '1'):
# must_delete = 1
# elif(staff_text == '2'):
# # measure.remove(note)
# quene_delete.append(note)
# if(4.0 < TotalPI_text):
# if(staff_text == '1'):
# must_delete = 1
# elif(staff_text == '2'):
# # measure.remove(note)
# quene_delete.append(note)
# ### ### ###
# pre_note = note
# for i in quene_delete:
# measure.remove(i)
# quene_delete = []
for measure in root.iter('measure'):
for note in measure.iter('note'):
if(note.find('must_chord_delete') != None):
measure.remove(note)
# pre_note = ''
# for measure in root.iter('measure'):
# for note in measure.iter('note'):
# if(note.find('chord_delete') != None):
# # measure.remove(note)
# for TotalPI in note.iter('TotalPI'):
# TotalPI_text = TotalPI.text
# TotalPI_text = float(TotalPI_text)
# if(TotalPI_text != 4.0 or TotalPI_text == 2.0 or TotalPI_text != 1.0 or TotalPI_text == 3.0):
# if(staff_text == '1'):
# measure.remove(pre_note)
# elif(staff_text == '2'):
# measure.remove(note)
# pre_note = note
tree.write('delete_high_dual.xml')
tree.write('change_temp.xml')
# print('delete_high_dual.xml')
print(' ----------> have change high dual')
def low_dual_func(root, tree):
### low !!!
# tree = parse('sonatina2.xml')
# root = tree.getroot()
### count all the chord notes
chord_num = 0
### to count the number of three dual notes
chord_pre = 0
chord_now = 0
chord_three = 0
is_three_chord = 0
for measure in root.iter('measure'):
for note in measure.iter('note'):
is_three_chord = 0
# print('here note: ',note)
for staff in note.iter('staff'):
daul_staff_data = staff.text
# print('daul_staff_data: ',daul_staff_data)
chord_now = 0
### must delete notes
chord = note.find('chord')
### count all the chord
if (chord != None):
chord_num = chord_num + 1
chord_now = 1
### three notes
if(chord_pre == 1 and chord_now == 1):
chord_three = chord_three + 1
is_three_chord = 1
### left hand delete 'chord'
if(is_three_chord == 1):
if(chord != None):
if(daul_staff_data == '1'):
xml.etree.ElementTree.SubElement(daul_pre_note, 'chord_delete')
daul_pre_note.find('chord_delete').text = 'yes'
if(note.find('chord') != None):
chord = note.find('chord')
note.remove(chord)
if(chord != None and daul_staff_data == '2'):
xml.etree.ElementTree.SubElement(note, 'rest')
xml.etree.ElementTree.SubElement(note, 'chord_delete')
note.find('chord_delete').text = 'yes'
### right hand delete 'chord'
if(chord != None and daul_staff_data == '1'):
xml.etree.ElementTree.SubElement(daul_pre_note, 'chord_delete')
daul_pre_note.find('chord_delete').text = 'yes'
if(note.find('chord') != None):
chord = note.find('chord')
note.remove(chord)
daul_pre_note = note
chord_pre = chord_now
### here is delete the notes !!!
for measure in root.iter('measure'):
for note in measure.iter('note'):
if(note.find('chord_delete') != None):
measure.remove(note)
for measure in root.iter('measure'):
for note in measure.iter('note'):
if(note.find('chord_delete') != None):
measure.remove(note)
tree.write('delete_low_dual.xml')
# print(' the file "delete_low_dual.xml" is save.')
tree.write('change_temp.xml')
print(' ----------> have change low dual')
# DOMTree = xml.dom.minidom.parse('change-parse.xml')
# collection = DOMTree.documentElement
# level = 1
# hands = 0
# simple_dual(DOMTree, collection, level)
| agpl-3.0 | -3,115,448,628,422,695,000 | 36.623348 | 123 | 0.369446 | false |
drewcsillag/skunkweb | util/make_distr.py | 1 | 4166 | #! /usr/bin/env python
#
# Copyright (C) 2001 Andrew T. Csillag <[email protected]>
#
# You may distribute under the terms of either the GNU General
# Public License or the SkunkWeb License, as specified in the
# README file.
#
#
# This is a script which facilitates creation of versioned releases
#
import commands
import re
import sys
import os
EXCLUDE='pylibs/string8859.py pylibs/pargen pylibs/skunkdoc'
try:
from prompt import *
except ImportError:
sys.path.append('../pylibs')
from prompt import *
CVSROOT=open('CVS/Root').read().strip()
# Ask the questions
vers_q = StringQuestion('Please enter the version for this release')
src_q = StringQuestion('Please enter the directory where source code is checked out in', os.getcwd())
dist_dir = StringQuestion('Where do you wish the distribution to be created',
'/tmp')
vers_tag = StringQuestion('Please enter the tag for this release')
print 'Welcome to Skunk distribution creation'
print 'Please answer the following questions'
vers = vers_q.ask()
def_tag = 'SKUNK_RELEASE_' + re.sub('\.', '_', vers)
vers_tag.setDefault(def_tag)
tag = vers_tag.ask()
_dir = dist_dir.ask()
src_dir = src_q.ask()
conf_q = BoolQuestion('Are you sure you want to tag current code %s (version %s), and create a distribution in %s' % (tag, vers, _dir), 1)
if not conf_q.ask():
sys.exit(0)
#
# Update the version
#
for d, f, var, real_f in(('.', 'configure.in', 'SW_VERSION', 'configure'),
('SkunkWeb', 'SkunkWeb/configure.in', 'SW_VERSION', 'configure'),):
full_f = os.path.join(src_dir, f)
lines = open(full_f).read()
pat = re.compile('^%s=.*' % var, re.MULTILINE)
print 'Changing version in %s' % full_f
new_lines = pat.sub('%s=%s' % (var, vers), lines)
try:
f = open(full_f, 'w')
f.write(new_lines)
f.close()
except IOError, val:
raise 'Cannot write %s : %s' % (full_f, val)
# Run autoconf
os.chdir(os.path.join(src_dir, d))
ret, out = commands.getstatusoutput('autoconf')
if ret:
print 'Autoconf failed: returned %d: %s' % (ret, out)
sys.exit(1)
# Check the file back in
print 'Checking in %s, %s' % (full_f, real_f)
_d, _f = os.path.split(full_f)
os.chdir(_d)
cmd =("cvs ci -m 'upped version to %s' %s %s" % (vers, _f, real_f))
ret, out = commands.getstatusoutput(cmd)
if ret:
print 'Checkin failed: returned %d: %s' % (ret, out)
sys.exit(1)
# All done
#
# Ok, do the work
#
#for d, local in ( ('.', 1), ('AED', 0), ('SDS', 0), ('pylibs', 0), ('misc', 0),
# ('schemas', 0)):
for d, local in(('.', 1), ('SkunkWeb', 0), ('pylibs', 0), ('docs', 0)):
# Tag the stuff
if local:
opt = '-l -F '
else:
opt = '-F '
print 'Tagging in %s' % (os.path.join(src_dir, d))
cmd = 'cvs tag %s %s .' % (opt, tag)
os.chdir(os.path.join(src_dir, d))
ret, out = commands.getstatusoutput(cmd)
if ret:
print 'Tag failed in %s: returned %d: %s' % (d, ret, out)
sys.exit(1)
# untag the excluded files
os.chdir(src_dir)
cmd="cvs tag -d %s %s" % (tag, EXCLUDE)
ret, out=commands.getstatusoutput(cmd)
if ret:
print "untagging excludes failed: returned %d: %s" % (ret, out)
sys.exit(1)
#
# Ok, all tagged - create the distribution
#
os.chdir(_dir)
d_file = os.path.join(_dir, 'skunkweb-%s.tar.gz' % vers)
doc_cmds=[]
for i in ['stmlrefer', 'PyDO', 'devel', 'opman']:
doc_cmds.append('cd skunkweb/docs/html; make %s/%s.html' % (i, i))
doc_cmds.append('cd skunkweb/docs/paper-letter; make %s.ps %s.pdf %s.dvi' % (i,i,i))
cmds = (('cvs -d %s export -r %s -d skunkweb skunkweb' % (CVSROOT, tag,),)
+ tuple(doc_cmds) +
('mv skunkweb skunkweb-%s' % (vers,),
'tar czf %s skunkweb-%s' % (d_file, vers),
'rm -rf skunkweb-%s' % vers))
print 'Creating distribution'
for c in cmds:
ret, out = commands.getstatusoutput(c)
if ret:
print '"%s" failed: returned %d: %s' % (c, ret, out)
sys.exit(1)
print 'The new skunk distribution is now in %s' % d_file
sys.exit(0)
| gpl-2.0 | 2,828,195,229,895,905,000 | 26.228758 | 138 | 0.598896 | false |
eviljeff/olympia | src/olympia/applications/views.py | 1 | 5242 | from django.core.cache import cache
from django.db.transaction import non_atomic_requests
from django.utils.translation import ugettext
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.status import HTTP_201_CREATED, HTTP_202_ACCEPTED
from olympia import amo
from olympia.api.authentication import (
JWTKeyAuthentication,
)
from olympia.amo.feeds import BaseFeed
from olympia.amo.templatetags.jinja_helpers import absolutify, url
from olympia.amo.utils import render
from olympia.api.permissions import GroupPermission
from olympia.versions.compare import version_dict, version_re
from .models import AppVersion
def get_versions(order=('application', 'version_int')):
def fetch_versions():
apps = amo.APP_USAGE
versions = {app.id: [] for app in apps}
qs = list(AppVersion.objects.order_by(*order)
.filter(application__in=versions)
.values_list('application', 'version'))
for app, version in qs:
versions[app].append(version)
return apps, versions
return cache.get_or_set('getv' + ':'.join(order), fetch_versions)
@non_atomic_requests
def appversions(request):
apps, versions = get_versions()
return render(request, 'applications/appversions.html',
{'apps': apps, 'versions': versions})
class AppversionsFeed(BaseFeed):
# appversions aren't getting a created date so the sorting is kind of
# wanky. I blame fligtar.
def title(self):
return ugettext(u'Application Versions')
def link(self):
return absolutify(url('apps.appversions'))
def description(self):
return ugettext(u'Acceptable versions for all applications on AMO.')
def items(self):
apps, versions = get_versions(order=('application', '-version_int'))
return [(app, version) for app in apps
for version in versions[app.id][:3]]
return [(app, versions[app.id][:3]) for app in apps]
def item_title(self, item):
app, version = item
return u'%s %s' % (app.pretty, version)
item_description = ''
def item_link(self):
return self.link()
def item_guid(self, item):
return self.item_link() + '%s:%s' % item
class AppVersionView(APIView):
authentication_classes = [JWTKeyAuthentication]
permission_classes = [GroupPermission(amo.permissions.APPVERSIONS_CREATE)]
def put(self, request, *args, **kwargs):
# For each request, we'll try to create up to 3 versions for each app,
# one for the parameter in the URL, one for the corresponding "release"
# version if it's different (if 79.0a1 is passed, the base would be
# 79.0. If 79.0 is passed, then we'd skip that one as they are the
# same) and a last one for the corresponding max version with a star
# (if 79.0 or 79.0a1 is passed, then this would be 79.*)
# We validate the app parameter, but always try to create the versions
# for both Firefox and Firefox for Android anyway, because at the
# extension manifest level there is no difference so for validation
# purposes we want to keep both in sync.
application = amo.APPS.get(kwargs.get('application'))
if not application:
raise ParseError('Invalid application parameter')
requested_version = kwargs.get('version')
if not requested_version or not version_re.match(requested_version):
raise ParseError('Invalid version parameter')
version_data = version_dict(requested_version)
release_version = '%d.%d' % (
version_data['major'], version_data['minor1'] or 0)
star_version = '%d.*' % version_data['major']
created_firefox = self.create_versions_for_app(
application=amo.FIREFOX,
requested_version=requested_version,
release_version=release_version,
star_version=star_version)
created_android = self.create_versions_for_app(
application=amo.ANDROID,
requested_version=requested_version,
release_version=release_version,
star_version=star_version)
created = created_firefox or created_android
status_code = HTTP_201_CREATED if created else HTTP_202_ACCEPTED
return Response(status=status_code)
def create_versions_for_app(
self, *, application, requested_version, release_version,
star_version):
_, created_requested = AppVersion.objects.get_or_create(
application=application.id, version=requested_version)
if requested_version != release_version:
_, created_release = AppVersion.objects.get_or_create(
application=application.id, version=release_version)
else:
created_release = False
if requested_version != star_version:
_, created_star = AppVersion.objects.get_or_create(
application=application.id, version=star_version)
else:
created_star = False
return created_requested or created_release or created_star
| bsd-3-clause | -6,394,211,211,390,946,000 | 39.635659 | 79 | 0.662724 | false |
maoy/zknova | nova/virt/vmwareapi/volumeops.py | 1 | 8336 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 VMware, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Management class for Storage-related functions (attach, detach, etc).
"""
from nova import exception
from nova.openstack.common import cfg
from nova.openstack.common import log as logging
from nova.virt.vmwareapi import vim_util
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import volume_util
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
class VMwareVolumeOps(object):
"""
Management class for Volume-related tasks
"""
def __init__(self, session):
self._session = session
def attach_disk_to_vm(self, vm_ref, instance_name,
adapter_type, disk_type, vmdk_path=None,
disk_size=None, linked_clone=False,
controller_key=None, unit_number=None,
device_name=None):
"""
Attach disk to VM by reconfiguration.
"""
client_factory = self._session._get_vim().client.factory
vmdk_attach_config_spec = vm_util.get_vmdk_attach_config_spec(
client_factory, adapter_type, disk_type,
vmdk_path, disk_size, linked_clone,
controller_key, unit_number, device_name)
LOG.debug(_("Reconfiguring VM instance %(instance_name)s to attach "
"disk %(vmdk_path)s or device %(device_name)s with type "
"%(disk_type)s") % locals())
reconfig_task = self._session._call_method(
self._session._get_vim(),
"ReconfigVM_Task", vm_ref,
spec=vmdk_attach_config_spec)
self._session._wait_for_task(instance_name, reconfig_task)
LOG.debug(_("Reconfigured VM instance %(instance_name)s to attach "
"disk %(vmdk_path)s or device %(device_name)s with type "
"%(disk_type)s") % locals())
def detach_disk_from_vm(self, vm_ref, instance_name, device):
"""
Detach disk from VM by reconfiguration.
"""
client_factory = self._session._get_vim().client.factory
vmdk_detach_config_spec = vm_util.get_vmdk_detach_config_spec(
client_factory, device)
disk_key = device.key
LOG.debug(_("Reconfiguring VM instance %(instance_name)s to detach "
"disk %(disk_key)s") % locals())
reconfig_task = self._session._call_method(
self._session._get_vim(),
"ReconfigVM_Task", vm_ref,
spec=vmdk_detach_config_spec)
self._session._wait_for_task(instance_name, reconfig_task)
LOG.debug(_("Reconfigured VM instance %(instance_name)s to detach "
"disk %(disk_key)s") % locals())
def discover_st(self, data):
"""Discover iSCSI targets."""
target_portal = data['target_portal']
target_iqn = data['target_iqn']
LOG.debug(_("Discovering iSCSI target %(target_iqn)s from "
"%(target_portal)s.") % locals())
device_name, uuid = volume_util.find_st(self._session, data)
if device_name:
LOG.debug(_("Storage target found. No need to discover"))
return (device_name, uuid)
# Rescan iSCSI HBA
volume_util.rescan_iscsi_hba(self._session)
# Find iSCSI Target again
device_name, uuid = volume_util.find_st(self._session, data)
if device_name:
LOG.debug(_("Discovered iSCSI target %(target_iqn)s from "
"%(target_portal)s.") % locals())
else:
LOG.debug(_("Unable to discovered iSCSI target %(target_iqn)s "
"from %(target_portal)s.") % locals())
return (device_name, uuid)
def get_volume_connector(self, instance):
"""Return volume connector information."""
iqn = volume_util.get_host_iqn(self._session)
return {
'ip': CONF.vmwareapi_host_ip,
'initiator': iqn,
'host': CONF.vmwareapi_host_ip
}
def attach_volume(self, connection_info, instance, mountpoint):
"""Attach volume storage to VM instance."""
instance_name = instance['name']
vm_ref = vm_util.get_vm_ref_from_name(self._session, instance_name)
if vm_ref is None:
raise exception.InstanceNotFound(instance_id=instance_name)
# Attach Volume to VM
LOG.debug(_("Attach_volume: %(connection_info)s, %(instance_name)s, "
"%(mountpoint)s") % locals())
driver_type = connection_info['driver_volume_type']
if driver_type not in ['iscsi']:
raise exception.VolumeDriverNotFound(driver_type=driver_type)
data = connection_info['data']
mount_unit = volume_util.mountpoint_to_number(mountpoint)
# Discover iSCSI Target
device_name, uuid = self.discover_st(data)
if device_name is None:
raise volume_util.StorageError(_("Unable to find iSCSI Target"))
# Get the vmdk file name that the VM is pointing to
hardware_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
vmdk_file_path, controller_key, adapter_type, disk_type, unit_number \
= vm_util.get_vmdk_path_and_adapter_type(hardware_devices)
# Figure out the correct unit number
if unit_number < mount_unit:
unit_number = mount_unit
else:
unit_number = unit_number + 1
self.attach_disk_to_vm(vm_ref, instance_name,
adapter_type, disk_type="rdmp",
controller_key=controller_key,
unit_number=unit_number,
device_name=device_name)
LOG.info(_("Mountpoint %(mountpoint)s attached to "
"instance %(instance_name)s") % locals())
def detach_volume(self, connection_info, instance, mountpoint):
"""Detach volume storage to VM instance."""
instance_name = instance['name']
vm_ref = vm_util.get_vm_ref_from_name(self._session, instance_name)
if vm_ref is None:
raise exception.InstanceNotFound(instance_id=instance_name)
# Detach Volume from VM
LOG.debug(_("Detach_volume: %(instance_name)s, %(mountpoint)s")
% locals())
driver_type = connection_info['driver_volume_type']
if driver_type not in ['iscsi']:
raise exception.VolumeDriverNotFound(driver_type=driver_type)
data = connection_info['data']
# Discover iSCSI Target
device_name, uuid = volume_util.find_st(self._session, data)
if device_name is None:
raise volume_util.StorageError(_("Unable to find iSCSI Target"))
# Get the vmdk file name that the VM is pointing to
hardware_devices = self._session._call_method(vim_util,
"get_dynamic_property", vm_ref,
"VirtualMachine", "config.hardware.device")
device = vm_util.get_rdm_disk(hardware_devices, uuid)
if device is None:
raise volume_util.StorageError(_("Unable to find volume"))
self.detach_disk_from_vm(vm_ref, instance_name, device)
LOG.info(_("Mountpoint %(mountpoint)s detached from "
"instance %(instance_name)s") % locals())
| apache-2.0 | -1,843,336,539,798,740,500 | 44.551913 | 78 | 0.576536 | false |
xuru/pyvisdk | pyvisdk/do/customization_custom_ip_generator.py | 1 | 1043 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def CustomizationCustomIpGenerator(vim, *args, **kwargs):
'''Use a command-line program configured with the VirtualCenter server.'''
obj = vim.client.factory.create('ns0:CustomizationCustomIpGenerator')
# do some validation checking...
if (len(args) + len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'argument', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| mit | -8,892,122,323,126,696,000 | 30.636364 | 124 | 0.605944 | false |
rlugojr/rekall | rekall-core/rekall/plugins/windows/cache.py | 1 | 14971 | # Rekall Memory Forensics
# Copyright 2014 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# pylint: disable=protected-access
"""This module adds plugins to inspect the windows cache manager.
The windows cache manager is responsible for maintaining file cache for files
read from disk. The manager maintains a large arena of 256kb cached
blocks. These blocks are controlled using the VACB (Virtual Address Control
Block) arrays.
References:
http://www.codemachine.com/article_kernelstruct.html
"""
__author__ = "Michael Cohen <[email protected]>"
from rekall import obj
from rekall import utils
from rekall import testlib
from rekall.plugins import core
from rekall.plugins.windows import common
class EnumerateVacbs(common.WindowsCommandPlugin):
"""Enumerate all blocks cached in the cache manager."""
name = "vacbs"
def GetVACBs_Win7(self):
"""Yield all system VACBs.
Walks the VACB tables and produce all valid VACBs. This essentially
produces the entire contents of the cache manager.
"""
# The Kernel variable CcVacbArrays is a pointer to an array of pointers
# to the _VACB_ARRAY_HEADER tables. The total number of tables is stored
# in CcVacbArraysAllocated.
total_vacb_arrays = self.profile.get_constant_object(
'CcVacbArraysAllocated', 'unsigned int')
vacb_arrays = self.profile.get_constant_object(
'CcVacbArrays',
target="Pointer",
target_args=dict(
target='Array',
target_args=dict(
target="Pointer",
target_args=dict(
target='_VACB_ARRAY_HEADER'
),
count=int(total_vacb_arrays),
)
)
)
for table in vacb_arrays:
self.session.report_progress(
"Scanning VACB table %s", table.VacbArrayIndex)
for vacb in table.VACBs:
if vacb.ArrayHead != table:
continue
yield vacb
def GetVACBs_WinXP(self):
"""Yield all system VACBs for older Windows XP based kernels.
Walks the VACB tables and produce all valid VACBs. This essentially
produces the entire contents of the cache manager.
"""
# The Kernel variable CcVacbArrays is a pointer to an array of pointers
# to the _VACB_ARRAY_HEADER tables. The total number of tables is stored
# in CcVacbArraysAllocated.
total_vacb_arrays = self.profile.get_constant_object(
'CcNumberVacbs', 'unsigned int')
vacb_array = self.profile.get_constant_object(
'CcVacbs',
target="Pointer",
target_args=dict(
target='Array',
target_args=dict(
target="_VACB",
count=int(total_vacb_arrays),
)
)
)
for vacb in vacb_array:
yield vacb
def GetVACBs(self):
# Support the old XP way.
if self.session.profile.get_constant("CcVacbs"):
return self.GetVACBs_WinXP()
return self.GetVACBs_Win7()
table_header = [
dict(name="_VACB", style="address"),
dict(name='valid', width=7),
dict(name="base", style="address"),
dict(name="offset", style="address"),
dict(name="filename"),
]
def column_types(self):
return dict(_VACB=self.session.profile._VACB(),
valid=True,
base=0,
offset=0,
filename="")
def collect(self):
for vacb in self.GetVACBs():
filename = vacb.SharedCacheMap.FileObject.file_name_with_drive()
if filename:
yield (vacb,
bool(self.kernel_address_space.vtop(
vacb.BaseAddress.v()
)),
vacb.BaseAddress.v(),
vacb.Overlay.FileOffset.QuadPart,
filename,
)
class DumpFiles(core.DirectoryDumperMixin, common.WinProcessFilter):
"""Dump files from memory.
The interface is loosely based on the Volatility plugin of the same name,
although the implementation is quite different.
"""
name = "dumpfiles"
__args = [
dict(name="file_objects", type="ArrayIntParser",
help="Kernel addresses of _FILE_OBJECT structs.")
]
def CollectFileObject(self):
"""Collect all known file objects."""
self.file_objects = set()
self.vacb_by_cache_map = {}
# Collect known file objects for selected processes.
for task in self.filter_processes():
# First scan the vads.
self.session.report_progress("Inspecting VAD for %s", task.name)
for vad in task.RealVadRoot.traverse():
file_object = vad.m("Subsection").ControlArea.FilePointer
if file_object:
self.file_objects.add(file_object)
# Now check handles.
self.session.report_progress("Inspecting Handles for %s", task.name)
for handle in task.ObjectTable.handles():
if handle.get_object_type() == "File":
self.file_objects.add(handle.Object)
# Now scan all the objects in the cache manager.
for vacb in self.session.plugins.vacbs().GetVACBs():
shared_cache_map = vacb.SharedCacheMap.v()
if shared_cache_map:
# Keep a tally of all VACBs for each file_object.
self.vacb_by_cache_map.setdefault(
shared_cache_map, []).append(vacb)
def _dump_ca(self, ca, out_fd, type, filename, renderer):
sectors_per_page = 0x1000 / 512
for subsection in ca.FirstSubsection.walk_list("NextSubsection"):
for i, pte in enumerate(subsection.SubsectionBase):
pte_value = pte.u.Long.v()
try:
phys_address = self.kernel_address_space.ResolveProtoPTE(
pte_value, 0)
except AttributeError:
# For address spaces which do not support prototype
# (currently non PAE 32 bits) just support the absolute
# basic - valid PTE only.
if pte & 1:
phys_address = pte_value & 0xffffffffff000
else:
continue
if phys_address == None:
continue
# The file offset of this page.
file_sector_offset = (
subsection.StartingSector + i * sectors_per_page)
# Sometimes not the entire page is mapped in.
file_sectors_mapped_in_page = min(
sectors_per_page,
subsection.NumberOfFullSectors - i * sectors_per_page)
if file_sectors_mapped_in_page < 0:
continue
# This should not happen but it does if the data is corrupt.
if phys_address > self.physical_address_space.end():
continue
renderer.table_row(
type, phys_address, file_sector_offset * 512,
file_sectors_mapped_in_page * 512, filename)
# This writes a sparse file.
out_fd.seek(file_sector_offset * 512)
out_fd.write(self.physical_address_space.read(
phys_address, file_sectors_mapped_in_page * 512))
table_header = [
dict(name="type", width=20),
dict(name="p_offset", style="address"),
dict(name="f_offset", style="address"),
dict(name="f_length", style="address"),
dict(name="filename")
]
def column_types(self):
return dict(type="VACB", p_offset=0, f_offset=0,
f_length=0x1000, filename="")
def collect(self):
renderer = self.session.GetRenderer()
if not self.plugin_args.file_objects:
self.CollectFileObject()
else:
self.file_objects = set(
[self.session.profile._FILE_OBJECT(int(x))
for x in self.plugin_args.file_objects])
seen_filenames = set()
for file_object in self.file_objects:
filename = unicode(
file_object.file_name_with_device()).replace("\\", "_")
if filename in seen_filenames:
continue
seen_filenames.add(filename)
self.session.report_progress(" Dumping %s", filename)
with renderer.open(directory=self.dump_dir,
filename=filename, mode="w") as out_fd:
filename = out_fd.name
# Sometimes we get both subsections.
ca = file_object.SectionObjectPointer.ImageSectionObject
if ca:
self._dump_ca(ca, out_fd, "ImageSectionObject",
filename, renderer)
ca = file_object.SectionObjectPointer.DataSectionObject
if ca:
self._dump_ca(ca, out_fd, "DataSectionObject",
filename, renderer)
scm = file_object.SectionObjectPointer.SharedCacheMap.v()
# Augment the data with the cache manager.
for vacb in self.vacb_by_cache_map.get(scm, []):
base_address = vacb.BaseAddress.v()
file_offset = vacb.Overlay.FileOffset.QuadPart.v()
# Each VACB controls a 256k buffer.
for offset in utils.xrange(0, 0x40000, 0x1000):
phys_address = self.kernel_address_space.vtop(
base_address + offset)
if phys_address:
yield dict(type="VACB",
p_offset=phys_address,
f_offset=file_offset+offset,
f_length=0x1000,
filename=filename)
# This writes a sparse file.
out_fd.seek(file_offset + offset)
out_fd.write(self.physical_address_space.read(
phys_address, 0x1000))
class TestDumpFiles(testlib.HashChecker):
PARAMETERS = dict(
commandline="dumpfiles --dump_dir %(tempdir)s"
)
class SparseArray(dict):
def __getitem__(self, key):
return self.get(key, obj.NoneObject())
class MftDump(common.WindowsCommandPlugin):
"""Enumerate MFT entries from the cache manager."""
name = "mftdump"
def __init__(self, *args, **kwargs):
super(MftDump, self).__init__(*args, **kwargs)
self.ntfs_profile = self.session.LoadProfile("ntfs")
self.mft_size = 0x400
self.vacb_size = 0x40000
# A sparse MFT table - basically a map between mft id and MFT entry.
self.mfts = SparseArray()
# A directory tree. For each MFT id a dict of its direct children.
self.dir_tree = {2: {}}
def extract_mft_entries_from_vacb(self, vacb):
base = vacb.BaseAddress.v()
for offset in utils.xrange(base, base + self.vacb_size, self.mft_size):
# Fixups are not applied in memory.
mft = self.ntfs_profile.MFT_ENTRY(
offset, context=dict(mft=self.mfts, ApplyFixup=False))
if mft.magic != "FILE":
continue
mft_id = mft.mft_entry
self.mfts[mft_id] = mft
self.session.report_progress(
"Added: %s", lambda mft=mft: mft.filename.name)
parent_id = mft.filename.mftReference.v()
if parent_id not in self.dir_tree:
self.dir_tree[parent_id] = set()
self.dir_tree[parent_id].add(mft_id)
def collect_tree(self, root, seen, depth=0):
if root not in self.mfts or root in seen:
return
mft = self.mfts[root]
standard_info = mft.get_attribute(
"$STANDARD_INFORMATION").DecodeAttribute()
yield dict(MFT=root,
mft_entry=mft,
file_modified=standard_info.file_altered_time,
mft_modified=standard_info.mft_altered_time,
access=standard_info.file_accessed_time,
create_time=standard_info.create_time,
Name=self.mfts[root].filename.name,
depth=depth)
seen.add(root)
for child in sorted(self.dir_tree.get(root, [])):
if child not in seen:
for x in self.collect_tree(child, seen, depth=depth+1):
yield x
table_header = [
dict(name="MFT", width=5, align="r"),
dict(name="mft_entry", hidden=True),
dict(name="file_modified", width=25),
dict(name="mft_modified", width=25),
dict(name="access", width=25),
dict(name="create_time", width=25),
dict(name="Name", type="TreeNode", max_depth=15, width=100),
]
def column_types(self):
wft = self.session.profile.WinFileTime()
return dict(MFT=int,
mft_entry=self.ntfs_profile.MFT_ENTRY(),
file_modified=wft,
mft_modified=wft,
access=wft,
create_time=wft,
Name=self.session.profile.UnicodeString())
def collect(self):
for vacb in self.session.plugins.vacbs().GetVACBs():
filename = vacb.SharedCacheMap.FileObject.FileName
if filename == r"\$Mft":
self.extract_mft_entries_from_vacb(vacb)
# Avoid loops.
seen = set()
for mft_id in self.dir_tree:
for x in self.collect_tree(mft_id, seen, depth=0):
yield x
class TestMftDump(testlib.SortedComparison):
"""The order is someone non-deterministic."""
| gpl-2.0 | 8,356,214,992,809,683,000 | 35.603912 | 80 | 0.554272 | false |
saullocastro/pyNastran | pyNastran/bdf/dev_vectorized/cards/elements/solid/ctetra10.py | 1 | 9950 | from six.moves import zip
from numpy import zeros, arange, dot, cross, searchsorted, array
#from numpy.linalg import norm
from pyNastran.bdf.field_writer_8 import print_card_8
from pyNastran.bdf.bdf_interface.assign_type import integer, integer_or_blank
from pyNastran.bdf.dev_vectorized.cards.elements.solid.solid_element import SolidElement
def volume4(n1, n2, n3, n4):
r"""
Gets the volume, :math:`V`, of the tetrahedron.
.. math:: V = \frac{(a-d) \cdot \left( (b-d) \times (c-d) \right) }{6}
"""
V = -dot((n1 - n4), cross(n2 - n4, n3 - n4)) / 6.
return V
class CTETRA10(SolidElement):
type = 'CTETRA10'
nnodes = 10
def __init__(self, model):
"""
Defines the CTETRA10 object.
Parameters
----------
model : BDF
the BDF object
"""
SolidElement.__init__(self, model)
def add_card(self, card, comment=''):
i = self.i
eid = integer(card, 1, 'element_id')
if comment:
self.set_comment(eid, comment)
#: Element ID
#comment = self._comments[i]
element_id = integer(card, 1, 'element_id')
if comment:
self.set_comment(eid, comment)
#: Element ID
self.element_id[i] = element_id
#: Property ID
self.property_id[i] = integer(card, 2, 'property_id')
#: Node IDs
nids = array([
integer(card, 3, 'node_id_1'),
integer(card, 4, 'node_id_2'),
integer(card, 5, 'node_id_3'),
integer(card, 6, 'node_id_4'),
integer_or_blank(card, 7, 'node_id_5', 0),
integer_or_blank(card, 8, 'node_id_6', 0),
integer_or_blank(card, 9, 'node_id_7', 0),
integer_or_blank(card, 10, 'node_id_8', 0),
integer_or_blank(card, 11, 'node_id_9', 0),
integer_or_blank(card, 12, 'node_id_10', 0),
], dtype='int32')
self.node_ids[i, :] = nids
assert len(card) <= 13, 'len(CTETRA10 card) = %i\ncard=%s' % (len(card), card)
self.i += 1
def update(self, maps):
"""
maps = {
'node_id' : nid_map,
'property' : pid_map,
}
"""
if self.n:
eid_map = maps['element']
nid_map = maps['node']
pid_map = maps['property']
for i, (eid, pid, nids) in enumerate(zip(self.element_id, self.property_id, self.node_ids)):
print(self.print_card(i))
self.element_id[i] = eid_map[eid]
self.property_id[i] = pid_map[pid]
self.node_ids[i, 0] = nid_map[nids[0]]
self.node_ids[i, 1] = nid_map[nids[1]]
self.node_ids[i, 2] = nid_map[nids[2]]
self.node_ids[i, 3] = nid_map[nids[3]]
self.node_ids[i, 4] = nid_map[nids[4]]
self.node_ids[i, 5] = nid_map[nids[5]]
self.node_ids[i, 6] = nid_map[nids[6]]
self.node_ids[i, 7] = nid_map[nids[7]]
self.node_ids[i, 8] = nid_map[nids[8]]
self.node_ids[i, 9] = nid_map[nids[9]]
def _verify(self, xref=True):
eid = self.eid
pid = self.Pid()
nids = self.node_ids
assert isinstance(eid, int)
assert isinstance(pid, int)
for i, nid in enumerate(nids):
assert isinstance(nid, int), 'nid%i is not an integer; nid=%s' % (i, nid)
if xref:
c = self.centroid()
v = self.volume()
assert isinstance(v, float)
for i in range(3):
assert isinstance(c[i], float)
def _node_locations(self, xyz_cid0, n):
if xyz_cid0 is None:
xyz_cid0 = self.model.grid.get_position_by_node_index()
n1 = xyz_cid0[self.model.grid.get_node_index_by_node_id(self.node_ids[:, 0]), :]
n2 = xyz_cid0[self.model.grid.get_node_index_by_node_id(self.node_ids[:, 1]), :]
n3 = xyz_cid0[self.model.grid.get_node_index_by_node_id(self.node_ids[:, 2]), :]
n4 = xyz_cid0[self.model.grid.get_node_index_by_node_id(self.node_ids[:, 3]), :]
if n == 4:
return n1, n2, n3, n4
assert n == 10, n
n5 = xyz_cid0[self.model.grid.get_node_index_by_node_id(self.node_ids[:, 4]), :]
n6 = xyz_cid0[self.model.grid.get_node_index_by_node_id(self.node_ids[:, 5]), :]
n7 = xyz_cid0[self.model.grid.get_node_index_by_node_id(self.node_ids[:, 6]), :]
n8 = xyz_cid0[self.model.grid.get_node_index_by_node_id(self.node_ids[:, 7]), :]
n9 = xyz_cid0[self.model.grid.get_node_index_by_node_id(self.node_ids[:, 8]), :]
n10 = xyz_cid0[self.model.grid.get_node_index_by_node_id(self.node_ids[:, 9]), :]
return n1, n2, n3, n4, n5, n6, n7, n8, n9, n10
def get_volume_by_element_id(self, element_id=None, xyz_cid0=None, total=False):
"""
Gets the volume for one or more CTETRA10 elements.
Parameters
----------
element_id : (N, ) int ndarray; (default=None -> all)
the elements to consider
xyz_cid0 : dict[int node_id] : (3, ) float ndarray xyz (default=None -> auto)
the positions of the GRIDs in CID=0
total : bool; default=False
should the volume be summed
"""
if element_id is None:
element_id = self.element_id
n1, n2, n3, n4 = self._node_locations(xyz_cid0, 4)
n = len(element_id)
V = zeros(n, self.model.float_fmt)
i = 0
for n1i, n2i, n3i, n4i in zip(n1, n2, n3, n4):
V[i] = volume4(n1i, n2i, n3i, n4i)
i += 1
if total:
V = V.sum()
return V
def get_centroid_volume(self, element_id=None, xyz_cid0=None, total=False):
"""
Gets the centroid and volume for one or more CTETRA10 elements.
Parameters
----------
element_id : (N, ) int ndarray; (default=None -> all)
the elements to consider
xyz_cid0 : dict[int node_id] : (3, ) float ndarray xyz (default=None -> auto)
the positions of the GRIDs in CID=0
total : bool; default=False
should the volume be summed
..see:: CTETRA10.volume() and CTETRA10.centroid() for more information.
"""
if element_id is None:
element_id = self.element_id
n1, n2, n3, n4 = self._node_locations(xyz_cid0, 4)
n = len(element_id)
volume = zeros(n, self.model.float_fmt)
i = 0
for n1i, n2i, n3i, n4i in zip(n1, n2, n3, n4):
volume[i] = volume4(n1i, n2i, n3i, n4i)
i += 1
centroid = (n1 + n2 + n3 + n4) / 4.0
if total:
centroid = centroid.mean()
assert volume.min() > 0.0, 'volume.min() = %f' % volume.min()
return centroid, volume
def get_centroid_by_element_id(self, element_id=None, xyz_cid0=None, total=False):
"""
Gets the centroid for one or more CTETRA elements.
Parameters
----------
element_id : (N, ) int ndarray; (default=None -> all)
the elements to consider
xyz_cid0 : dict[int node_id] : (3, ) float ndarray xyz (default=None -> auto)
the positions of the GRIDs in CID=0
total : bool; default=False
should the centroid be summed
"""
if element_id is None:
element_id = self.element_id
n1, n2, n3, n4 = self._node_locations(xyz_cid0, 4)
centroid = (n1 + n2 + n3 + n4) / 4.0
if total:
centroid = centroid.mean()
return centroid
def get_mass_by_element_id(self, element_id=None, xyz_cid0=None, total=False):
"""
Gets the mass for one or more CTETRA10 elements.
Parameters
----------
element_id : (N, ) int ndarray; (default=None -> all)
the elements to consider
xyz_cid0 : dict[int node_id] : (3, ) float ndarray xyz (default=None -> auto)
the positions of the GRIDs in CID=0
total : bool; default=False
should the centroid be summed
"""
if element_id is None:
element_id = self.element_id
if xyz_cid0 is None:
xyz_cid0 = self.model.grid.get_position_by_node_index()
V = self.get_volume_by_element_id(element_id, xyz_cid0)
mid = self.model.properties_solid.get_material_id_by_property_id(self.property_id)
rho = self.model.materials.get_density_by_material_id(mid)
mass = V * rho
if total:
mass = mass.sum()
return mass
def get_face_nodes(self, nid, nid_opposite):
raise NotImplementedError()
#nids = self.node_ids[:4]
#indx = nids.index(nid_opposite)
#nids.pop(indx)
#return nids
def write_card(self, bdf_file, size=8, element_id=None):
if self.n:
if element_id is None:
i = arange(self.n)
else:
i = searchsorted(self.element_id, element_id)
for (eid, pid, n) in zip(self.element_id[i], self.property_id[i], self.node_ids[i, :]):
if eid in self._comments:
bdf_file.write(self._comments[eid])
n = [ni if ni != 0 else None for ni in n]
card = ['CTETRA', eid, pid, n[0], n[1], n[2], n[3], n[4], n[5], n[6], n[7], n[8], n[9]]
bdf_file.write(print_card_8(card))
#def slice_by_index(self, i):
#i = self._validate_slice(i)
#obj = CTETRA10(self.model)
#obj.n = len(i)
##obj._cards = self._cards[i]
##obj._comments = obj._comments[i]
##obj.comments = obj.comments[i]
#obj.element_id = self.element_id[i]
#obj.property_id = self.property_id[i]
#obj.node_ids = self.node_ids[i, :]
#return obj
| lgpl-3.0 | -1,049,081,684,335,408,600 | 35.988848 | 104 | 0.530754 | false |
LumPenPacK/NetworkExtractionFromImages | win_build/nefi2_win_amd64_msvc_2015/bin/nefi2/view/main_controller.py | 1 | 72114 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This is nefi's main view. Currently we deployed all controls of the
GUI in the MainView.ui. Static changes to the GUI should always been
done by the Qt designer since this reduces the amount of code dramatically.
To draw the complete UI the controllers are invoked and the draw_ui function is
called
"""
from nefi2.model.pipeline import *
import copy
import time
import os
import traceback
import sys
import zope.event.classhandler
import PyQt5
import webbrowser
from PyQt5 import QtWidgets, uic, QtCore, QtGui
from PyQt5.QtPrintSupport import QPrintDialog, QPrinter
from PyQt5.QtGui import QIcon, QPixmap, QPainter, QWheelEvent
from PyQt5.QtCore import Qt, pyqtSignal, pyqtSlot, QObject, QEvent, QTimer, QSize, QRect, QFile, QIODevice
from PyQt5.QtWidgets import QBoxLayout, QGroupBox, QSpinBox, QDoubleSpinBox, QSlider, QLabel, QWidget, QHBoxLayout, \
QVBoxLayout, QStackedWidget, QComboBox, QSizePolicy, QToolButton, QMenu, QAction, QMessageBox, QApplication, \
QScrollArea, QAbstractScrollArea, QFrame, QGridLayout, QSplitter, QCheckBox, QSpacerItem
__authors__ = {"Dennis Groß": "[email protected]",
"Philipp Reichert": "[email protected]"}
try:
mainview_path = os.path.join('nefi2', 'view', 'MainView.ui')
base, form = uic.loadUiType(mainview_path)
except (FileNotFoundError):
raise NameError(os.listdir(os.curdir))
# class CustomMainView(QWidget):
#
# def __init__(self):
# super(MainView, self).__init__()
class MainView(base, form):
scrollsignal = pyqtSignal()
def __init__(self, pipeline, parent=None):
super(base, self).__init__(parent)
self.setupUi(self)
self.pip_run = 0
self.pipeline = pipeline
self.pip_widgets = []
self.default_pips = []
self.active_pip_label = ""
self.active_immediate_results_group_layout = None
# Cache pipeline entries to use them for settings history.
self.pipeline_cache = []
self.autofit = True
self.autoclear = False
self.autoscroll = True
self.resultsonly = False
self.comparemode = False
self.MidCustomWidget = MidCustomWidget(self.mid_panel, self.autofit)
self.q_icon_up = QtGui.QIcon()
self.q_icon_down = QtGui.QIcon()
self.q_icon_plus = QtGui.QIcon()
self.q_icon_plus_grey = QtGui.QIcon()
self.q_icon_delete = QtGui.QIcon()
self.thread = ProcessWorker(self.pipeline)
self.printer = QPrinter()
self.createActions()
self.createMenus()
self.draw_ui()
self.connect_ui()
def createMenus(self):
self.fileMenu = QMenu("&File", self)
self.fileMenu.addAction(self.openAct)
self.fileMenu.addAction(self.saveAct)
self.fileMenu.addAction(self.printAct)
self.fileMenu.addSeparator()
self.fileMenu.addAction(self.exitAct)
self.viewMenu = QMenu("&View", self)
self.viewMenu.addAction(self.zoomInAct)
self.viewMenu.addAction(self.zoomOutAct)
self.viewMenu.addAction(self.normalSizeAct)
self.viewMenu.addSeparator()
self.viewMenu.addAction(self.fitToWindowAct)
self.helpMenu = QMenu("&Help", self)
self.helpMenu.addAction(self.aboutAct)
self.helpMenu.addAction(self.docsAct)
self.helpMenu.addAction(self.aboutQtAct)
self.menuBar().addMenu(self.fileMenu)
self.menuBar().addMenu(self.viewMenu)
self.menuBar().addMenu(self.helpMenu)
def about(self):
QMessageBox.about(self, "About NEFI2",
"<p><b>NEFI 2.0</b> is a Python tool created "
"to extract networks from images. "
"Given a suitable 2D image of a network as input, "
"NEFI outputs a mathematical representation "
"as a weighted undirected planar graph. "
"Representing the structure of the network as a graph "
"enables subsequent studies of its properties "
"using tools and concepts from graph theory.<br><br>"
"<img src='nefi2/icons/logo_mpi.png'><br><br>"
"<b>TODO - AUTHORS <br>"
"TODO - VERSION <br>"
"TODO - REFERENCES </b> <br></p>")
def open_docs(self):
index = os.path.join(os.getcwd(), 'nefi2', 'doc', 'documentation',
'_build', 'html', 'index.html')
webbrowser.open('file://' + index)
def print_(self):
if (self.MidCustomWidget.getCurrentImage() is None):
return
dialog = QPrintDialog(self.printer, self)
if dialog.exec_():
painter = QPainter(self.printer)
rect = painter.viewport()
size = self.MidCustomWidget.getCurrentImage().size()
size.scale(rect.size(), Qt.KeepAspectRatio)
painter.setViewport(rect.x(), rect.y(), size.width(), size.height())
painter.setWindow(self.MidCustomWidget.getCurrentImage().rect())
painter.drawPixmap(0, 0, self.MidCustomWidget.getCurrentImage())
def createActions(self):
self.openAct = QAction("&Open Image", self, shortcut="Ctrl+O",
triggered=self.set_input_url)
self.saveAct = QAction("&Save Image", self, shortcut="Ctrl+S",
triggered=self.save_output_picture)
self.printAct = QAction("&Print Image", self, shortcut="Ctrl+P",
enabled=True, triggered=self.print_)
self.exitAct = QAction("E&xit", self, shortcut="Ctrl+Q",
triggered=self.close)
self.zoomInAct = QAction("Zoom &In (25%)", self, shortcut="Ctrl++",
enabled=True, triggered=self.MidCustomWidget.zoom_in_)
self.zoomOutAct = QAction("Zoom &Out (25%)", self, shortcut="Ctrl+-",
enabled=True, triggered=self.MidCustomWidget.zoom_out_)
self.normalSizeAct = QAction("&Normal Size", self, shortcut="Ctrl+D",
enabled=True, triggered=self.MidCustomWidget.resize_original)
self.fitToWindowAct = QAction("&Fit to Window", self, enabled=True,
checkable=True, checked=True, shortcut="Ctrl+F",
triggered=self.MidCustomWidget.toggleAutofit)
self.fitToWindowAct.setChecked(True)
self.aboutAct = QAction("&About", self, triggered=self.about)
self.docsAct = QAction("&Documentation", self, triggered=self.open_docs)
self.aboutQtAct = QAction("About &Qt", self,
triggered=QApplication.instance().aboutQt)
def load_dark_theme(self, application):
"""
This function is called to load the white theme with
all its icons for the buttons and the css file.
Args:
application: the cureent app instance
"""
# load buttons
iconpath = os.path.join('nefi2', 'icons', 'close.png')
pixmap_icon = QtGui.QPixmap(iconpath)
q_icon = QtGui.QIcon(pixmap_icon)
self.clear_immediate_btn.setIcon(q_icon)
iconpath = os.path.join('nefi2', 'icons', 'folder_white.png')
pixmap_icon = QtGui.QPixmap(iconpath)
q_icon = QtGui.QIcon(pixmap_icon)
self.open_pip_btn.setIcon(q_icon)
iconpath = os.path.join('nefi2', 'icons', 'man.png')
pixmap_icon = QtGui.QPixmap(iconpath)
q_icon = QtGui.QIcon(pixmap_icon)
self.run_btn.setIcon(q_icon)
iconpath = os.path.join('nefi2', 'icons', 'trash_white.png')
pixmap_icon = QtGui.QPixmap(iconpath)
q_icon = QtGui.QIcon(pixmap_icon)
self.delete_btn.setIcon(q_icon)
iconpath = os.path.join('nefi2', 'icons', 'diskette_white.png')
pixmap_icon = QtGui.QPixmap(iconpath)
q_icon = QtGui.QIcon(pixmap_icon)
self.save_btn.setIcon(q_icon)
iconpath = os.path.join('nefi2', 'icons', 'folder_white.png')
pixmap_icon = QtGui.QPixmap(iconpath)
q_icon = QtGui.QIcon(pixmap_icon)
self.input_btn.setIcon(q_icon)
iconpath = os.path.join('nefi2', 'icons', 'folder_white.png')
pixmap_icon = QtGui.QPixmap(iconpath)
q_icon = QtGui.QIcon(pixmap_icon)
self.output_btn.setIcon(q_icon)
iconpath = os.path.join('nefi2', 'icons', 'plus.png')
pixmap_icon = QtGui.QPixmap(iconpath)
q_icon = QtGui.QIcon(pixmap_icon)
self.zoom_in.setIcon(q_icon)
iconpath = os.path.join('nefi2', 'icons', 'minus.png')
pixmap_icon = QtGui.QPixmap(iconpath)
q_icon = QtGui.QIcon(pixmap_icon)
self.zoom_out.setIcon(q_icon)
iconpath = os.path.join('nefi2', 'icons', 'resize.png')
q_icon = QtGui.QIcon(iconpath)
self.resize.setIcon(q_icon)
iconpath = os.path.join('nefi2', 'icons', 'up.png')
pixmap_up = QtGui.QPixmap(iconpath)
iconpath = os.path.join('nefi2', 'icons', 'down.png')
pixmap_down = QtGui.QPixmap(iconpath)
self.q_icon_up = QtGui.QIcon(pixmap_up)
self.q_icon_down = QtGui.QIcon(pixmap_down)
iconpath = os.path.join('nefi2', 'icons', 'plus.png')
pixmap_plus = QtGui.QPixmap(iconpath)
self.q_icon_plus = QtGui.QIcon(pixmap_plus)
self.enable_plus()
iconpath = os.path.join('nefi2', 'icons', 'plus_grey.png')
pixmap_plus_grey = QtGui.QPixmap(iconpath)
self.q_icon_plus_grey = QtGui.QIcon(pixmap_plus_grey)
iconpath = os.path.join('nefi2', 'icons', 'delete_x_white.png')
pixmap_icon_delete = QtGui.QPixmap(iconpath)
self.q_icon_delete = QtGui.QIcon(pixmap_icon_delete)
def draw_ui(self):
"""
This function draws all additional UI elements. If you want the
application to display any additional things like a button you can
either add it in the QtDesigner or declare it here.
"""
self.setWindowTitle("NEFI 2.0")
icon = QIcon(os.path.join('nefi2', 'icons', 'nefi2.png'))
self.setWindowIcon(icon)
# self.setWindowFlags(Qt.FramelessWindowHint)
self.ComboxCategories = QComboBox()
self.stackedWidgetComboxesAlgorithms = QStackedWidget()
self.select_cat_alg_vbox_layout.addWidget(self.ComboxCategories)
self.select_cat_alg_vbox_layout.addWidget(self.stackedWidgetComboxesAlgorithms)
self.ComboxCategories.hide()
self.pip_widget_vbox_layout.setAlignment(Qt.AlignTop)
self.select_cat_alg_vbox_layout.setAlignment(Qt.AlignTop)
self.left_scroll_results_vbox_layout.setAlignment(Qt.AlignTop)
self.progress_label = QLabel(self)
self.progress_label.setGeometry(self.width() / 2 - 200, self.height() / 2 - 20, 400, 20)
self.progress_label.hide()
self.progressbar = QtWidgets.QProgressBar(self)
self.progressbar.setGeometry(self.width() / 2 - 200, self.height() / 2, 400, 30)
self.progressbar.hide()
self.mid_panel_layout.addWidget(self.MidCustomWidget)
self.mid_panel_layout.setContentsMargins(0, 0, 0, 0)
self.splitterWidget = QWidget()
self.splitterWidgetLayout = QGridLayout()
self.splitterWidgetLayout.setContentsMargins(7, 0, 0, 0)
self.splitterWidget.setLayout(self.splitterWidgetLayout)
self.splitter = QSplitter()
self.splitterLayout = QHBoxLayout()
self.splitterLayout.setSpacing(0)
self.splitterLayout.setContentsMargins(0, 0, 0, 0)
self.splitter.setLayout(self.splitterLayout)
self.splitterFrame = QFrame()
self.splitterFrame.setFixedHeight(2)
self.splitterFrame.setFrameShape(QFrame.HLine)
self.splitterFrame.setFrameShadow(QFrame.Sunken)
self.splitter.setHandleWidth(0)
self.splitter.handleWidth()
self.splitter.setOrientation(Qt.Vertical)
self.splitter.setChildrenCollapsible(False)
#self.pip_collapsable.setStyleSheet("border:0;")
self.settings_collapsable.setStyleSheet("border:0;")
self.splitter.addWidget(self.pip_collapsable)
self.splitterLayout.addWidget(self.splitterFrame)
self.splitter.addWidget(self.settings_collapsable)
self.splitterWidgetLayout.addWidget(self.splitter)
self.verticalLayout_9.addWidget(self.splitterWidget, Qt.AlignHCenter)
#self.setStyleSheet("QScrollBar:horizontal {max-height: 15px;}" "QScrollBar:vertical {max-width: 15px;}")
#self.mid_panel.setStyleSheet("border:0;")
#self.right_panel.setStyleSheet("border:0;")
def connect_ui(self):
"""
This function connects the ui using signals from the
ui elements and its method counterparts.
"""
# connect pyqt slots with signals
self.input_btn.clicked.connect(self.set_input_url)
self.output_btn.clicked.connect(self.set_output_url)
self.load_favorite_pipelines()
self.fav_pips_combo_box.activated.connect(self.select_default_pip)
self.run_btn.clicked.connect(self.run)
self.delete_btn.clicked.connect(self.trash_pipeline)
self.add_btn.clicked.connect(lambda: self.add_pipe_entry())
self.resize.clicked.connect(lambda: self.MidCustomWidget.resize_default(True))
self.zoom_in.clicked.connect(self.MidCustomWidget.zoom_in_)
self.zoom_out.clicked.connect(self.MidCustomWidget.zoom_out_)
self.pip_scroll.verticalScrollBar().rangeChanged.connect(self.scroll_down_pip)
self.clear_immediate_btn.clicked.connect(self.clear_immediate_results)
self.thread.progess_changed.connect(self.update_progress)
self.thread.immediate_results_changed[object, QCheckBox].connect(lambda x=object,y=QCheckBox: self.update_add_immediate_result(x,y))
self.thread.finished.connect(self.process_finish)
self.open_pip_btn.clicked.connect(self.open_pip_json)
self.save_btn.clicked.connect(self.save_pip_json)
self.auto_clear.toggled.connect(self.set_autoclear)
self.auto_scroll.toggled.connect(self.set_autoscroll)
self.thread.finished.connect(self.delay)
self.scrollsignal.connect(self.scroll_down_left)
self.results_only.toggled.connect(self.set_resultsonly)
self.compare_mode.toggled.connect(self.set_comparemode)
# not implemented yes
self.compare_mode.hide()
# connect zope.events
zope.event.classhandler.handler(ProgressEvent, self.thread.update_progress)
zope.event.classhandler.handler(CacheAddEvent, self.thread.update_add_immediate_result)
zope.event.classhandler.handler(CacheRemoveEvent, self.update_remove_immediate_result)
zope.event.classhandler.handler(CacheInputEvent, self.update_input)
def back_connect_settings(self, cat, pixmap):
try:
pip_entry = self.get_pip_entry(cat)
except (ValueError):
print("Pipeline entry has already been deleted.")
return
# Show image while settings is selected
# *TODO* Use pip_entry.findChild(PyQt5.QtWidgets.QLabel, name) instead
labels = pip_entry.findChildren(PyQt5.QtWidgets.QLabel)
pixmap_label = labels[0]
string_label = labels[1]
def set_image():
self.MidCustomWidget.setCurrentImage(pixmap)
self.MidCustomWidget.resetImageSize()
self.MidCustomWidget.setPixmap(pixmap, self.mid_panel)
self.mid_panel.setTitle(
str(cat.get_name() + " " + cat.active_algorithm.name) + " - Pipeline Position " + str(
self.pipeline.get_index(cat) + 1))
pixmap_label.trigger.connect(set_image)
string_label.trigger.connect(set_image)
@pyqtSlot()
def get_current_image(self, image, cat=None):
self.MidCustomWidget.setCurrentImage(image)
self.MidCustomWidget.resize_default()
try:
pip_entry = self.get_pip_entry(cat)
settings_widget = self.get_settings_widget(cat)
except (ValueError):
self.reset_pip_backgroundcolor()
self.reset_pip_backgroundcolor()
self.stackedWidget_Settings.hide()
self.remove_cat_alg_dropdown()
self.settings_collapsable.setTitle("Settings")
return
# Set background color while widget is selected.
pip_entry.setStyleSheet("background-color:DarkSlateGrey;")
# Reset background color for all other pipeline entries
self.reset_pip_backgroundcolor(pip_entry)
self.stackedWidget_Settings.show()
self.stackedWidget_Settings.setCurrentIndex(self.pipeline.get_index(cat))
self.settings_collapsable.setTitle(cat.active_algorithm.get_name() + " Settings")
# Create drop down for cats and algs
self.remove_cat_alg_dropdown()
self.create_cat_alg_dropdown(self.pipeline.get_index(cat), pip_entry, settings_widget)
self.set_cat_alg_dropdown(cat, cat.active_algorithm)
def resizeEvent(self, event=None):
if self.MidCustomWidget.auto_fit:
self.progressbar.setGeometry(self.width() / 2 - 200, self.height() / 2, 400, 30)
self.progress_label.setGeometry(self.width() / 2 - 200, self.height() / 2 - 20, 400, 20)
self.MidCustomWidget.resize_default()
def set_autoclear(self):
self.autoclear = not self.autoclear
def set_autoscroll(self):
self.autoscroll = not self.autoscroll
def set_resultsonly(self):
self.resultsonly = not self.resultsonly
def set_comparemode(self):
self.comparemode = not self.comparemode
"""
def keyPressEvent(self, key):
if key.modifiers() & Qt.ControlModifier:
self.left_scroll.verticalScrollBar().blockSignals(True)
def keyReleaseEvent(self, key):
if Qt.ControlModifier:
self.left_scroll.verticalScrollBar().blockSignals(False)
def mousePressEvent(self, key):
self.left_scroll.verticalScrollBar().blockSignals(True)
def mouseReleaseEvent(self, key):
self.left_scroll.verticalScrollBar().blockSignals(False)
"""
def delay(self):
from threading import Timer
def send():
self.scrollsignal.emit()
t = Timer(0.01, send)
t.start()
def scroll_down_left(self):
if self.autoscroll:
self.left_scroll.verticalScrollBar().setSliderPosition(self.left_scroll.verticalScrollBar().maximum() + 100)
def scroll_down_pip(self):
self.pip_scroll.verticalScrollBar().setSliderPosition(self.pip_scroll.verticalScrollBar().maximum() + 100)
def disable_plus(self):
self.add_btn.setEnabled(False)
self.add_btn.setIcon(self.q_icon_plus_grey)
def enable_plus(self):
self.add_btn.setEnabled(True)
self.add_btn.setIcon(self.q_icon_plus)
def disable_pip(self):
pass
def enable_pip(self):
pass
def set_pip_title(self, title):
"""
Sets the title of the current selected pipeline in the ui.
Args:
| *title*: the title of the pipeline
| *label_ref*: the reference to the label.
"""
self.current_pip_label.setText(title)
self.active_pip_label = title
@pyqtSlot()
def clear_immediate_results(self):
"""
This method removes all images from the immediate results when
the user clicked the clear button
"""
self.clear_left_side_new_run()
@pyqtSlot(int)
def select_default_pip(self, index):
"""
This is the slot for the Pipeline combobox in the ui
Args:
index: index of the option currently selected
"""
if index < 1:
self.trash_pipeline()
return
# delete current pipeline
self.trash_pipeline()
# get url and name
name, url = self.default_pips[index - 1]
# parse the json in the model
try:
self.pipeline.load_pipeline_json(url)
except Exception as e:
print("Failed to load default pip: " + name + " received parser error")
traceback.print_exc()
return
# set the title
self.set_pip_title(name)
# Create an entry in the pipeline widget for every step in the pipeline
for i in range(0, len(self.pipeline.executed_cats)):
self.add_pipe_entry(i)
@pyqtSlot()
def save_pip_json(self):
"""
This method allows the user to save its pip json on the file system
while clicking the save_btn
"""
url = str(QtWidgets.QFileDialog.getSaveFileName(self, "Save Pipeline", '', 'JSON file (*.json)')[0])
try:
if url[0]:
name = os.path.basename(url)
print(url)
print(name)
self.pipeline.save_pipeline_json(name, url)
except Exception as e:
print("Failed to save pip json on file system")
traceback.print_exc()
return
self.set_pip_title(os.path.basename(url))
@pyqtSlot()
def open_pip_json(self):
"""
This method provides the logic for the open_pip_btn which lets the user load a
pip json from an abritary location of the file system.
"""
url = QtWidgets.QFileDialog.getOpenFileNames(self, 'Open Pipeline', '',
'JSON file (*.json)')
if url[0]:
# delete current pipeline
self.trash_pipeline()
# parse the json in the model
try:
self.pipeline.load_pipeline_json(url[0][0])
# reset pipelines run counter
self.pip_run = 0
except Exception as e:
print("Failed to load the json at the location: " + url[0][0])
traceback.print_exc()
return
# set the title
self.set_pip_title(os.path.basename(url[0][0]))
# Create an entry in the pipeline widget for every step in the pipeline
for i in range(0, len(self.pipeline.executed_cats)):
self.add_pipe_entry(i)
@pyqtSlot(object)
def update_progress(self, event):
"""
This method gets fired by the progress event in the pipeline
and tells the maincontroller about the actual progress in the
pipeline
Args:
value: the percentage of the progress value
status_name: the next category being processed
"""
self.progressbar.setValue(event.value)
self.progress_label.setText("Calculating " + event.report)
@pyqtSlot(object)
def update_remove_immediate_result(self, event):
"""
This event gets triggered when the pipeline removes something from the
cache of the model side.
We remove the accordin widget then also in the ui.
Args:
event: the event from the model
"""
for left_custom in self.left_scroll_results_vbox_layout:
if left_custom.cat == event.cat:
del left_custom
@pyqtSlot(object)
def update_input(self, event):
"""
This events tells us that the model loaded a new input image into the cache.
We also display the new image in the immediate results.
Args:
event: the event from the model
"""
path = event.path
self.lineEdit.setText(path)
self.clear_left_side_new_image()
pixmap = QPixmap(event.path)
self.MidCustomWidget.setCurrentImage(pixmap)
self.MidCustomWidget.resetImageSize()
self.MidCustomWidget.setPixmap(pixmap, self.mid_panel)
settings_widget = None
widget = LeftCustomWidget(event.path, self.MidCustomWidget, self.mid_panel,
self.left_scroll_results, self.MidCustomWidget.getCurrentImage(),
self.get_current_image, self.pipeline, settings_widget, self.left_scroll.verticalScrollBar())
self.left_scroll_results_vbox_layout.addWidget(widget, Qt.AlignTop)
@pyqtSlot(object)
def update_add_immediate_result(self, event, checkbox):
"""
This method gets fired when the pipeline computed a fresh
immediate result.
Args:
event: the event from the model
"""
path = event.path
pixmap = QPixmap(path)
self.MidCustomWidget.setCurrentImage(pixmap)
self.MidCustomWidget.resetImageSize()
self.MidCustomWidget.setPixmap(pixmap, self.mid_panel)
settings_widget = self.load_settings_widgets_from_pipeline_groupbox(event.cat)
widget = LeftCustomWidget(path, self.MidCustomWidget, self.mid_panel,
self.left_scroll_results, self.MidCustomWidget.getCurrentImage(),
self.get_current_image,
self.pipeline, settings_widget, self.left_scroll.verticalScrollBar(), event.cat)
#self.left_scroll_results_vbox_layout.addWidget(widget, Qt.AlignTop)
self.active_immediate_results_group_layout.addWidget(widget)
if self.resultsonly:
if self.pipeline.get_index(event.cat) is not (len(self.pipeline.executed_cats) - 1):
widget.hide()
if self.pipeline.get_index(event.cat) is not (len(self.pipeline.executed_cats) - 1):
checkbox.toggled.connect(widget.setVisible, Qt.UniqueConnection)
try:
self.back_connect_settings(event.cat, pixmap)
except:
e = sys.exc_info()[0]
print("<p>Error: %s</p>" % e)
@pyqtSlot()
def run(self):
"""
This method runs the the pipeline by calling the process methode
in pipeline
"""
signal = pyqtSignal()
# Check if we have a legal pipeline configuration
msg, cat = self.pipeline.sanity_check()
if cat:
widget = self.get_pip_entry(cat)
widget.setStyleSheet("background-color:red;")
widget.setToolTip(msg)
return
# Clear left side pictures for auto delete option
if self.autoclear:
self.clear_immediate_results()
# set a timestamp for the current run
# so the user can distinct between them
if len(self.pipeline.executed_cats) != 0:
title = QWidget()
title.setFixedWidth(315)
title.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Maximum)
titelLayout = QVBoxLayout()
titelLayout.setAlignment(Qt.AlignLeft)
titelLayout.setContentsMargins(0, 0, 0, 0)
titelLayout.setSpacing(7)
title.setLayout(titelLayout)
self.active_immediate_results_group_layout = titelLayout
timestamp = QLabel()
self.pip_run += 1
timestamp.setText(self.active_pip_label + " " + str(time.strftime("%H:%M:%S")) + ", Run: " + str(self.pip_run))
timestamp.setStyleSheet("font:Candara; font-size: 11pt;")
timestamp.setContentsMargins(0, 7, 0, 7)
show_pipeline = QCheckBox()
if self.resultsonly:
show_pipeline.setChecked(False)
else:
show_pipeline.setChecked(True)
show_pipeline.setText("Results")
show_pipeline.setContentsMargins(0, 0, 0, 0)
line = QFrame()
line.setFrameShape(QFrame.HLine)
line.setFrameShadow(QFrame.Sunken)
line.setFixedWidth(295)
titelLayout.addWidget(line)
titelLayout.addWidget(timestamp, Qt.AlignLeft)
titelLayout.addWidget(show_pipeline, Qt.AlignLeft)
self.left_scroll_results_vbox_layout.addWidget(title)
self.right_panel.setEnabled(False)
self.progress_label.show()
self.progressbar.show()
self.thread.setCheckbox(show_pipeline)
try:
if not self.thread.isRunning():
self.thread.start()
except Exception as e:
print("Process thread crashed")
traceback.print_exc()
@pyqtSlot()
def process_finish(self):
self.right_panel.setEnabled(True)
self.progress_label.hide()
self.progressbar.hide()
@pyqtSlot()
def set_input_url(self):
"""
This method sets the url for the input image in the pipeline.
"""
url = QtWidgets.QFileDialog.getOpenFileNames(self, 'Open Image', '',
'Images (*.jpg *.jpeg *.png *.tif *.tiff)')
if url[0]:
self.clear_left_side_new_image()
self.pipeline.set_input(url[0][0])
self.mid_panel.setTitle("Input - Image")
# reset pipelines run
self.pip_run = 0
@pyqtSlot()
def save_output_picture(self):
"""
This method sets the url for the input image in the pipeline.
"""
print(self.MidCustomWidget.getCurrentImage())
if self.MidCustomWidget.getCurrentImage() is None:
return
url = str(QtWidgets.QFileDialog.getSaveFileName(self, "Save Image", '', 'Image file (*.png)')[0])
try:
if url[0]:
name = os.path.basename(url)
self.MidCustomWidget.getCurrentImage().save(url)
except Exception as e:
print("Failed to save image file on file system")
traceback.print_exc()
return
@pyqtSlot()
def set_output_url(self):
"""
This method sets the url for the output folder in the pipeline.
Args:
url: the url to the output folder a user selected in the ui
"""
url = QtWidgets.QFileDialog.getExistingDirectory()
if url:
self.custom_line_edit.setText(url)
self.pipeline.set_output_dir(url)
def cache_pipeline_entries(self, pipeline):
for i in range(0, len(pipeline.executed_cats)):
copied_entry = copy.deepcopy(pipeline.executed_cats[i])
self.pipeline_cache.append(copied_entry)
def cache_remove_entry(self, entry):
self.pipeline_cache.remove(entry)
def cache_clear(self):
self.pipeline_cache.clear()
def load_favorite_pipelines(self):
"""
Scans the directory for default pipelines to display all available items
"""
self.fav_pips_combo_box.addItem("Please Select")
# scan the directory for default pipelines
default_pip = os.path.join('nefi2', 'default_pipelines')
for pip in os.listdir(default_pip):
if pip.endswith(".json"):
name = pip.split(".")[0]
url = os.path.join('nefi2', 'default_pipelines', pip)
self.default_pips.append([name, url])
self.fav_pips_combo_box.addItem(name)
def trash_pipeline(self):
"""
This method clears the complete pipeline while users clicked the trash
button.
"""
# remove all entries in the pipeline list
while self.pip_widget_vbox_layout.count():
child = self.pip_widget_vbox_layout.takeAt(0)
child.widget().deleteLater()
while self.stackedWidget_Settings.currentWidget() is not None:
self.stackedWidget_Settings.removeWidget(self.stackedWidget_Settings.currentWidget())
self.settings_collapsable.setTitle("")
# remove the pipeline name
self.set_pip_title("")
# reset pipeline run
self.pip_run = 0
# remove all entries int the executed_cats of the model pipeline
del self.pipeline.executed_cats[:]
# remove all widgets
del self.pip_widgets[:]
# remove category algorith dropdown
self.remove_cat_alg_dropdown()
# remove all entries from the pipeline model
del self.pipeline.executed_cats[:]
self.enable_plus()
def clear_left_side_new_image(self):
while self.left_scroll_results_vbox_layout.count():
child = self.left_scroll_results_vbox_layout.takeAt(0)
child.widget().deleteLater()
def clear_left_side_new_run(self):
while self.left_scroll_results_vbox_layout.count() > 1:
child = self.left_scroll_results_vbox_layout.takeAt(1)
child.widget().deleteLater()
@pyqtSlot(int)
def remove_pip_entry(self, pipe_entry_widget, settings_widget, cat=None):
"""
Removes the pip entry at the given position in the ui
Args:
pipeline_index (object):
settings_widget:
position: position at which the pip entry gets removed
"""
# remove pipeline entry widget from ui
self.pip_widget_vbox_layout.removeWidget(pipe_entry_widget)
pipe_entry_widget.deleteLater()
# remove it settings widgets from ui
if settings_widget is not None:
if self.stackedWidget_Settings.currentWidget() == settings_widget:
self.stackedWidget_Settings.hide()
self.remove_cat_alg_dropdown()
self.settings_collapsable.setTitle("Settings")
self.stackedWidget_Settings.removeWidget(settings_widget)
# remove in model
if cat is not None:
if cat.get_name() == "blank":
self.enable_plus()
self.pipeline.delete_category(self.pipeline.get_index(cat))
def change_pip_entry_alg(self, position, new_category, new_algorithm, pipe_entry_widget, settings_widget):
"""
Changes the selected algorithm of the pipeline entry at the position.
Afterwards create all widgets for this algorithm instance
Args:
position: the position of the pipeline entry
algorithm: the selected algorithm for this category
"""
# print("Position to be changed:" + str(position))
# print("Pipeline length: " + str(len(self.pipeline.executed_cats)))
old_cat = self.pipeline.executed_cats[position]
old_alg = old_cat.active_algorithm
# print("Old Cat found in pipeline: " + str(old_cat))
# print("Old Alg: found in pipeline:" + str(old_alg))
# print("New Category given:" + str(new_category))
# print("New Algorithm given:" + str(new_algorithm))
# set in model
self.pipeline.change_category(new_category, position)
self.pipeline.change_algorithm(new_algorithm, position)
new_cat = self.pipeline.executed_cats[position]
new_alg = new_cat.active_algorithm
# change settings widgets
self.remove_pip_entry(pipe_entry_widget, settings_widget)
(new_pipe_entry_widget, new_settings_widget) = self.add_pipe_entry(position)
new_pipe_entry_widget.setStyleSheet("background-color:DarkSlateGrey;")
self.stackedWidget_Settings.show()
self.stackedWidget_Settings.setCurrentIndex(position)
self.settings_collapsable.setTitle(new_alg.get_name() + " Settings")
self.remove_cat_alg_dropdown()
self.create_cat_alg_dropdown(position, new_pipe_entry_widget, new_settings_widget)
self.set_cat_alg_dropdown(new_cat, new_alg)
# print("New Cat found in pipeline: " + str(new_cat))
# print("New Alg found in pipeline: " + str(new_alg))
def load_settings_widgets_from_pipeline_groupbox(self, cat):
"""
Extracts all widgets from a single algorithm and returns a QBoxLayout
Args:
alg: the alg instance we extract from
Returns: a QBoxLayout containing all widgets for this particular alg.
"""
alg = cat.active_algorithm
empty_flag = True
groupOfSliders = QWidget()
sp = QSizePolicy()
sp.setVerticalPolicy(QSizePolicy.Preferred)
groupOfSliderssLayout = QBoxLayout(QBoxLayout.TopToBottom)
groupOfSliderssLayout.setContentsMargins(0, 0, 0, 0)
groupOfSliderssLayout.setAlignment(Qt.AlignTop)
groupOfSliderssLayout.setSpacing(0)
# create integer sliders
for slider in alg.integer_sliders:
empty_flag = False
groupOfSliderssLayout.addWidget(
SliderWidget(slider.name, slider.lower, slider.upper, slider.step_size, slider.value,
slider.set_value, False, alg), 0, Qt.AlignTop)
# create float sliders
for slider in alg.float_sliders:
empty_flag = False
groupOfSliderssLayout.addWidget(
SliderWidget(slider.name, slider.lower, slider.upper, slider.step_size, slider.value,
slider.set_value, True, alg), 0, Qt.AlignTop)
# create checkboxes
for checkbox in alg.checkboxes:
empty_flag = False
groupOfSliderssLayout.addWidget(CheckBoxWidget(checkbox.name, checkbox.value, checkbox.set_value, alg), 0,
Qt.AlignTop)
# create dropdowns
for combobox in alg.drop_downs:
empty_flag = False
groupOfSliderssLayout.addWidget(
ComboBoxWidget(combobox.name, combobox.options, alg, combobox.set_value, combobox.value), 0,
Qt.AlignTop)
if empty_flag:
label = QLabel()
label.setFixedHeight(30)
label.setText("This algorithm has no Settings.")
label.setFixedHeight(50)
groupOfSliderssLayout.addWidget(label, 0, Qt.AlignHCenter)
groupOfSliders.setLayout(groupOfSliderssLayout)
return groupOfSliders
def create_cat_alg_dropdown(self, cat_position, pipe_entry_widget, settings_widget):
"""
Args:
last_cat (object):
"""
layout = self.select_cat_alg_vbox_layout
cat = self.pipeline.executed_cats[cat_position]
last_cat = None
last_cat_name = None
# Show only allowed categories in dropdown
if len(self.pipeline.executed_cats) > 1:
last_cat = self.pipeline.executed_cats[cat_position]
last_cat_name = last_cat.get_name()
# Combobox for selecting Category
self.ComboxCategories.show()
self.ComboxCategories.setFixedHeight(30)
self.ComboxCategories.addItem("<Please Select Category>")
self.stackedWidgetComboxesAlgorithms = QStackedWidget()
self.stackedWidgetComboxesAlgorithms.setFixedHeight(30)
self.stackedWidgetComboxesAlgorithms.hide()
def setCurrentIndexCat(index):
if self.ComboxCategories.currentIndex() == 0:
self.stackedWidgetComboxesAlgorithms.hide()
else:
self.stackedWidgetComboxesAlgorithms.show()
self.stackedWidgetComboxesAlgorithms.setCurrentIndex(index - 1)
for category_name in [cat.name for cat in self.pipeline.get_available_cats()]:
# Add Category to combobox
self.ComboxCategories.addItem(category_name)
tmp1 = QComboBox()
tmp1.addItem("<Please Select Algorithm>")
tmp1.setFixedHeight(30)
category = self.pipeline.get_category(category_name)
# self.current_index = -1
def setCurrentIndexAlg(index):
if self.ComboxCategories.currentIndex() == 0 or self.stackedWidgetComboxesAlgorithms.currentWidget().currentIndex() == 0:
pass
else:
if cat.get_name() == "blank":
self.enable_plus()
self.change_pip_entry_alg(self.pipeline.get_index(cat), self.ComboxCategories.currentText(),
self.stackedWidgetComboxesAlgorithms.currentWidget().currentText(),
pipe_entry_widget, settings_widget)
# self.current_index = index
tmp1.activated.connect(setCurrentIndexAlg, Qt.UniqueConnection)
for algorithm_name in self.pipeline.get_all_algorithm_list(category):
tmp1.addItem(algorithm_name)
self.stackedWidgetComboxesAlgorithms.addWidget(tmp1)
#layout.setMaximumHeight(200)
layout.addWidget(self.ComboxCategories)
layout.addWidget(self.stackedWidgetComboxesAlgorithms)
self.ComboxCategories.activated.connect(setCurrentIndexCat, Qt.UniqueConnection)
def set_cat_alg_dropdown(self, category, algorithm):
indexC = self.ComboxCategories.findText(category.get_name())
self.ComboxCategories.setCurrentIndex(indexC)
self.stackedWidgetComboxesAlgorithms.show()
self.stackedWidgetComboxesAlgorithms.setCurrentIndex(indexC - 1)
indexA = self.stackedWidgetComboxesAlgorithms.currentWidget().findText(algorithm.get_name())
self.stackedWidgetComboxesAlgorithms.currentWidget().setCurrentIndex(indexA)
def remove_cat_alg_dropdown(self):
"""
Returns:
object:
"""
self.ComboxCategories = QComboBox()
self.select_cat_alg_vbox_layout.addWidget(self.ComboxCategories)
while self.stackedWidgetComboxesAlgorithms.currentWidget() is not None:
self.stackedWidgetComboxesAlgorithms.removeWidget(self.stackedWidgetComboxesAlgorithms.currentWidget())
while self.select_cat_alg_vbox_layout.count():
child = self.select_cat_alg_vbox_layout.takeAt(0)
child.widget().hide()
def add_pipe_entry(self, position=None):
"""
Creates an entry in the ui pipeline with a given position in pipeline.
It also creates the corresponding settings widget.
"""
# create an widget that displays the pip entry in the ui and connect the remove button
pip_main_widget = QWidget()
pip_main_widget.setFixedWidth(320)
pip_main_widget.setFixedHeight(50)
hbox_layout = QHBoxLayout()
hbox_layout.setAlignment(Qt.AlignLeft)
hbox_layout.setAlignment(Qt.AlignVCenter)
pip_main_widget.setLayout(hbox_layout)
new_marker = False
if position is None:
position = len(self.pipeline.executed_cats)
cat = self.pipeline.new_category(position)
label = "<Specify new step to continue>"
icon = None
new_marker = True
else:
cat = self.pipeline.executed_cats[position]
alg = cat.active_algorithm
label = alg.get_name()
icon = cat.get_icon()
new_marker = False
pixmap_label = ClickableQLabel()
pixmap_label.setFixedHeight(50)
pixmap_label.setFixedWidth(50)
pixmap_label.setContentsMargins(0, -20, 0, 0)
pip_up_down = QWidget()
pip_up_down.setFixedHeight(30)
pip_up_down.setFixedWidth(30)
pip_up_down_layout = QVBoxLayout()
pip_up_down_layout.setAlignment(Qt.AlignLeft)
pip_up_down.setLayout(pip_up_down_layout)
pip_up_down.setContentsMargins(-13, -11, 0, 0)
up_btn = QToolButton()
dw_btn = QToolButton()
up_btn.setIcon(self.q_icon_up)
dw_btn.setIcon(self.q_icon_down)
up_btn.setFixedHeight(20)
dw_btn.setFixedHeight(20)
pip_up_down_layout.addWidget(up_btn)
pip_up_down_layout.addWidget(dw_btn)
if not new_marker:
hbox_layout.addWidget(pip_up_down, Qt.AlignVCenter)
pixmap_icon = QPixmap(icon)
pixmap_scaled_keeping_aspec = pixmap_icon.scaled(30, 30, QtCore.Qt.KeepAspectRatio)
pixmap_label.setPixmap(pixmap_scaled_keeping_aspec)
# btn_plus = QtWidgets.QPushButton()
# btn_plus.setFixedSize(20, 20)
# btn_plus.setIcon(self.q_icon_plus)
# hbox_layout.addWidget(btn_plus)
string_label = ClickableQLabel()
string_label.setText(label)
if not new_marker:
string_label.setFixedHeight(30)
string_label.setFixedWidth(200)
btn = QtWidgets.QPushButton()
btn.setFixedHeight(30)
btn.setFixedWidth(30)
iconpath = os.path.join('nefi2', 'icons', 'delete_x_white.png')
pixmap_icon = QtGui.QPixmap(iconpath)
q_icon = QtGui.QIcon(pixmap_icon)
btn.setIcon(self.q_icon_delete)
hbox_layout.addWidget(string_label, Qt.AlignLeft)
if not new_marker:
hbox_layout.addWidget(pixmap_label, Qt.AlignRight)
hbox_layout.addWidget(btn, Qt.AlignRight)
self.pip_widget_vbox_layout.insertWidget(position, pip_main_widget, Qt.AlignTop)
# Create the corresponding settings widget and connect it
self.settings_collapsable.setTitle("Settings")
self.stackedWidget_Settings.hide()
settings_main_widget = None
if not new_marker:
settings_main_widget = self.load_settings_widgets_from_pipeline_groupbox(cat)
self.stackedWidget_Settings.insertWidget(position, settings_main_widget)
def show_settings():
# Set background color while widget is selected.
pip_main_widget.setStyleSheet("background-color:DarkSlateGrey;")
# Reset background color for all other pipeline entries
self.reset_pip_backgroundcolor(pip_main_widget)
if not new_marker:
self.stackedWidget_Settings.show()
self.stackedWidget_Settings.setCurrentIndex(self.pipeline.get_index(cat))
self.settings_collapsable.setTitle(alg.get_name() + " Settings")
else:
self.stackedWidget_Settings.hide()
# Create drop down for cats and algs
self.remove_cat_alg_dropdown()
self.create_cat_alg_dropdown(self.pipeline.get_index(cat), pip_main_widget, settings_main_widget)
if not new_marker:
self.set_cat_alg_dropdown(cat, alg)
# Connect Button to remove step from pipeline
def delete_button_clicked():
self.remove_cat_alg_dropdown()
self.remove_pip_entry(pip_main_widget, settings_main_widget, cat)
def move_up_button_clicked():
try:
current_position = self.pipeline.get_index(cat)
except ValueError:
print("Pipeline entry has already been removed.")
return
if current_position == 0 or new_marker:
pass
else:
current_position = self.pipeline.get_index(cat)
self.swap_pip_entry(current_position - 1, current_position)
self.reset_pip_backgroundcolor()
self.get_pip_entry(cat).setStyleSheet("background-color:DarkSlateGrey;")
def move_down_button_clicked():
try:
current_position = self.pipeline.get_index(cat)
except ValueError:
print("Pipeline entry has already been removed.")
return
if current_position == len(self.pipeline.executed_cats) - 1 or new_marker:
pass
else:
current_position = self.pipeline.get_index(cat)
if self.pipeline.executed_cats[current_position + 1].get_name() == "blank":
pass
else:
self.swap_pip_entry(current_position, current_position + 1)
self.reset_pip_backgroundcolor()
self.get_pip_entry(cat).setStyleSheet("background-color:DarkSlateGrey;")
pixmap_label.trigger.connect(show_settings)
string_label.trigger.connect(show_settings)
btn.clicked.connect(delete_button_clicked, Qt.UniqueConnection)
up_btn.clicked.connect(move_up_button_clicked, Qt.UniqueConnection)
dw_btn.clicked.connect(move_down_button_clicked, Qt.UniqueConnection)
# show new settings widget for new step
if new_marker:
show_settings()
self.disable_plus()
return (pip_main_widget, settings_main_widget)
def reset_pip_backgroundcolor(self, current_pip_main_widget=None):
for i in range(0, self.pip_widget_vbox_layout.count()):
child = self.pip_widget_vbox_layout.itemAt(i)
if child.widget() is current_pip_main_widget:
pass
else:
child.widget().setStyleSheet("background-color:None;")
def get_pip_entry(self, cat):
index = self.pipeline.get_index(cat)
pip_entry = self.pip_widget_vbox_layout.itemAt(index).widget()
return pip_entry
def get_settings_widget(self, cat):
index = self.pipeline.get_index(cat)
pip_widget = self.stackedWidget_Settings.widget(index)
return pip_widget
def swap_pip_entry(self, pos1, pos2):
"""
Swap two entries in the ui pipeline and the pipeline model
"""
if pos1 == pos2:
return
if pos1 < 0 or pos2 < 0:
return
if pos1 > len(self.pipeline.executed_cats) or pos2 > len(self.pipeline.executed_cats):
return
# Save pipeline model entries
cat1 = self.pipeline.executed_cats[pos1]
cat2 = self.pipeline.executed_cats[pos2]
# Find pipe_entry_widget
pipe_entry_widget1 = self.pip_widget_vbox_layout.itemAt(pos1).widget()
pipe_entry_widget2 = self.pip_widget_vbox_layout.itemAt(pos2).widget()
# Find settings_widget
settings_widget1 = self.stackedWidget_Settings.widget(pos1)
settings_widget2 = self.stackedWidget_Settings.widget(pos2)
# Remove old entries
self.remove_pip_entry(pipe_entry_widget1, settings_widget1)
self.remove_pip_entry(pipe_entry_widget2, settings_widget2)
# Create new entries
self.pipeline.executed_cats[pos1] = cat2
self.pipeline.executed_cats[pos2] = cat1
self.add_pipe_entry(pos1)
self.add_pipe_entry(pos2)
class QScrollArea_filtered(QScrollArea):
def __init__(self):
super(QScrollArea_filtered, self).__init__()
zoom_in = pyqtSignal()
zoom_out = pyqtSignal()
def eventFilter(self, obj, event):
if event.type() == QEvent.Wheel:
if (event.modifiers() & Qt.ControlModifier):
if event.angleDelta().y() < 0:
self.zoom_out.emit()
else:
self.zoom_in.emit()
return True
return False
class ClickableQLabel(QLabel):
trigger = pyqtSignal()
def __init__(self):
super(ClickableQLabel, self).__init__()
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.LeftButton:
self.trigger.emit()
class MidCustomWidget(QWidget):
def __init__(self, mid_panel, auto_fit):
super(MidCustomWidget, self).__init__()
self.auto_fit = auto_fit
self.current_image_original = None
self.current_image_size = 1.0
self.mid_panel = mid_panel
self.offset = 0
self.pixels_x = None
self.pixels_y = None
self.imageLabel = QLabel()
self.imageLabel.setAlignment(Qt.AlignCenter)
self.imageLabel.setSizePolicy(QSizePolicy.Ignored, QSizePolicy.Ignored)
self.imageLabel.setScaledContents(False)
self.scrollArea = QScrollArea_filtered()
self.scrollArea.setFrameShape(QtWidgets.QFrame.NoFrame)
self.scrollArea.setWidget(self.imageLabel)
self.scrollArea.setFrameShape(QtWidgets.QFrame.NoFrame)
self.scrollArea.setFrameShadow(QtWidgets.QFrame.Plain)
self.scrollArea.setHorizontalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self.scrollArea.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self.Layout = QVBoxLayout()
self.Layout.setContentsMargins(11,11,11,11)
self.setLayout(self.Layout)
self.Layout.addWidget(self.scrollArea, Qt.AlignTop)
self.scrollArea.zoom_in.connect(self.zoom_in_)
self.scrollArea.zoom_out.connect(self.zoom_out_)
self.scrollArea.horizontalScrollBar().rangeChanged[int, int].connect(lambda min, max : self.handle_zoom_x(min, max,))
self.scrollArea.verticalScrollBar().rangeChanged[int, int].connect(lambda min, max: self.handle_zoom_y(min, max,))
def mousePressEvent(self, QMouseEvent):
self.setCursor(Qt.ClosedHandCursor)
self.offset = QMouseEvent.pos()
def mouseReleaseEvent(self, QMouseEvent):
self.setCursor(Qt.ArrowCursor)
def mouseMoveEvent(self, QMouseEvent):
if (QMouseEvent.buttons() & Qt.LeftButton):
self.move(QMouseEvent.pos() - self.offset)
"""
def keyPressEvent(self, key):
if key.modifiers() & Qt.ControlModifier:
self.scrollArea.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.scrollArea.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
def keyReleaseEvent(self, key):
if Qt.ControlModifier:
self.scrollArea.setHorizontalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self.scrollArea.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
"""
def move(self, offset):
self.scrollArea.verticalScrollBar().setSliderPosition(
self.scrollArea.verticalScrollBar().value() - offset.y() / 50)
self.scrollArea.horizontalScrollBar().setSliderPosition(
self.scrollArea.horizontalScrollBar().value() - offset.x() / 50)
def setPixmap(self, pixmap, mid_panel):
self.setCurrentImage(pixmap)
if self.auto_fit:
self.resize_default()
else:
self.resize_original()
def resetImageSize(self):
self.current_image_size = 1.0
def setCurrentImage(self, pixmap):
self.current_image_original = pixmap
def getCurrentImage(self):
return self.current_image_original
@pyqtSlot()
def handle_zoom_y(self, min, max):
if self.pixels_y == None:
return
delta = self.scrollArea.verticalScrollBar().maximum() - self.pixels_y
#print("y delta " + str(delta))
value = self.scrollArea.verticalScrollBar().value() + delta/2
self.scrollArea.verticalScrollBar().setValue(value)
self.pixels_y = self.scrollArea.verticalScrollBar().maximum()
@pyqtSlot()
def handle_zoom_x(self, min, max):
if self.pixels_x == None:
return
delta = self.scrollArea.horizontalScrollBar().maximum() - self.pixels_x
#print("x delta " + str(delta))
value = self.scrollArea.horizontalScrollBar().value() + delta/2
self.scrollArea.horizontalScrollBar().setValue(value)
self.pixels_x = self.scrollArea.horizontalScrollBar().maximum()
def zoom_out_(self):
if not self.current_image_original:
return
if self.current_image_size < 0.1:
return
self.pixels_x = self.scrollArea.horizontalScrollBar().maximum()
self.pixels_y = self.scrollArea.verticalScrollBar().maximum()
self.current_image_size = self.current_image_size * 0.85
pixmap = self.current_image_original.scaled(self.current_image_original.width() * self.current_image_size,
self.current_image_original.width() * self.current_image_size,
QtCore.Qt.KeepAspectRatio, Qt.FastTransformation)
self.imageLabel.setGeometry(0, 0, pixmap.width() + 22, pixmap.height() + 22)
self.imageLabel.setPixmap(pixmap)
def zoom_in_(self):
if not self.current_image_original:
return
if self.current_image_size > 3:
return
self.pixels_x = self.scrollArea.horizontalScrollBar().maximum()
self.pixels_y = self.scrollArea.verticalScrollBar().maximum()
self.current_image_size = self.current_image_size * 1.25
pixmap = self.current_image_original.scaled(self.current_image_original.width() * self.current_image_size,
self.current_image_original.width() * self.current_image_size,
QtCore.Qt.KeepAspectRatio, Qt.FastTransformation)
self.imageLabel.setGeometry(0, 0, pixmap.width() + 22, pixmap.height() + 22)
self.imageLabel.setPixmap(pixmap)
def resize_original(self):
if not self.current_image_original:
return
self.current_image_size = 1.0
self.imageLabel.setGeometry(0, 0, self.current_image_original.width() + 22,
self.current_image_original.height() + 22)
self.imageLabel.setPixmap(self.current_image_original)
def resize_default(self, force=None):
if not self.current_image_original:
return
if not self.auto_fit and not force:
return
original_width = self.current_image_original.width()
if original_width != 0:
self.current_image_size = self.mid_panel.width() / original_width
new_pixmap = self.current_image_original.scaled(self.mid_panel.width() - 50 , self.mid_panel.height() - 120,
QtCore.Qt.KeepAspectRatio, Qt.SmoothTransformation)
self.imageLabel.setGeometry(0, 0, new_pixmap.width() + 22, new_pixmap.height() + 22)
self.imageLabel.setPixmap(new_pixmap)
def toggleAutofit(self):
self.auto_fit = not self.auto_fit
if self.auto_fit:
self.resize_default()
else:
self.resize_original()
class LeftCustomWidget(QWidget):
"""
this widget is used in the left panel of the GUI. All intermediate
result images are packed into a LeftCustomWidget and appended to the
according vbox_layout of the Mainview.ui
"""
select_image = pyqtSignal()
def __init__(self, image_path, MidCustomWidget, mid_panel, left_scroll_results, current_image,
slot, pipeline, settings_widget, left_slider, cat=None):
super(LeftCustomWidget, self).__init__()
self.setStyleSheet("font:Candara; font-size: 8pt;")
#self.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum)
self.MidCustomWidget = MidCustomWidget
self.mid_panel = mid_panel
self.left_scroll_results = left_scroll_results
self.cat = cat
self.pipeline = pipeline
self.settings_widget = settings_widget
self.left_slider = left_slider
self.step = 0
self.image_label = QLabel()
if cat is None:
self.image_name = "Input - Image"
else:
self.setToolTip("Click here while holding 'CTRL' button to see used settings .")
index = self.pipeline.get_index(self.cat)
if index is not (len(self.pipeline.executed_cats) - 1):
self.image_name = str(cat.get_name() + " - " + cat.active_algorithm.name)
else:
self.image_label.setStyleSheet("background-color:DarkSlateGrey; font:Candara; font-size: 8pt;")
self.image_name = "Result image - " + str(cat.get_name() + " - " + cat.active_algorithm.name)
self.step = self.pipeline.get_index(cat) + 1
self.slot = slot
# self.setGeometry(0, 0, 300, 100)
self.LeftCustomWidgetLayout = QVBoxLayout()
self.LeftCustomWidgetLayout.setContentsMargins(0, 0, 0, 0)
self.LeftCustomWidgetLayout.setSpacing(11)
self.setLayout(self.LeftCustomWidgetLayout)
#self.LeftCustomWidgetLayout.setAlignment(Qt.AlignTop)
self.image_label.setText(self.image_name)
self.image_label.setGeometry(0, 0, 150, 30)
self.pixmap = QPixmap(image_path)
self.pixmap_scaled_keeping_aspec = self.pixmap.scaledToWidth(315, Qt.SmoothTransformation)
self.image = QLabel()
#self.image.setAlignment(Qt.AlignLeft)
self.image.setGeometry(0, 0, 330, self.pixmap_scaled_keeping_aspec.height())
self.image.setPixmap(self.pixmap_scaled_keeping_aspec)
self.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum)
self.LeftCustomWidgetLayout.addWidget(self.image)
self.LeftCustomWidgetLayout.addWidget(self.image_label)
self.setGeometry(0, 0, 315, self.pixmap_scaled_keeping_aspec.height() + 50)
if cat:
self.createSettings()
#self.settings_widget.layout().setContentsMargins(0, 0, 0, 0)
#self.settings_widget.layout().setSpacing(1)
self.settings_widget.hide()
self.LeftCustomWidgetLayout.addWidget(self.settings_widget)
self.select_image.connect(lambda: self.slot(self.MidCustomWidget.getCurrentImage(), self.cat))
"""
def sizeHint(self):
return QSize(self.pixmap_scaled_keeping_aspec.width(), self.pixmap_scaled_keeping_aspec.height() + 50)
def minimumSizeHint(self):
return QSize(self.pixmap_scaled_keeping_aspec.width(), self.pixmap_scaled_keeping_aspec.height() + 50)
"""
def mousePressEvent(self, QMouseEvent):
"""
this events sets the self.pixmap from this custom widget
into the middle panel of the GUI. Or more general: by clicking
on this widget the users wants to see this picture in the big display
area of the middle.
Args:
| *event*: the mouse press event
"""
if QMouseEvent.button() == QtCore.Qt.LeftButton:
try:
if self.step == 0 or self.cat is None:
self.mid_panel.setTitle(self.image_name)
else:
index = self.pipeline.get_index(self.cat)
if index is not (len(self.pipeline.executed_cats) - 1):
self.mid_panel.setTitle(self.image_name + " - Pipeline Position " + str(index + 1))
else:
self.setStyleSheet("font:Candara; font-size: 8pt;")
self.mid_panel.setTitle("Result image - " + self.image_name + " - Pipeline Position " + str(index + 1))
except (ValueError):
self.mid_panel.setTitle(self.image_name + " - Already Removed From Pipeline")
self.MidCustomWidget.setCurrentImage(self.pixmap)
# Connect the trigger signal to a slot.
# Emit the signal.
self.select_image.emit()
if (QMouseEvent.modifiers() & Qt.ControlModifier):
if self.settings_widget:
if self.settings_widget.isVisible():
self.settings_widget.hide()
else:
self.settings_widget.show()
def createSettings(self):
self.settings_widget.setDisabled(True)
self.settings_widget.setStyleSheet("color:silver;")
class ProcessWorker(QtCore.QThread):
progess_changed = pyqtSignal(object)
immediate_results_changed = pyqtSignal(object, QCheckBox)
finished = pyqtSignal()
def __init__(self, pipeline):
QtCore.QThread.__init__(self)
self.pipeline = pipeline
self.checkbox = None
def update_progress(self, event):
self.progess_changed.emit(event)
def update_add_immediate_result(self, event):
self.immediate_results_changed.emit(event, self.checkbox)
def setCheckbox(self, checkbox):
self.checkbox = checkbox
def run(self):
try:
self.pipeline.process()
except Exception as e:
print("Failed to process pipeline")
traceback.print_exc()
self.finished.emit()
class PipCustomWidget(QWidget):
"""
This Widget is used for the entry's in the pipeline of thr right
GUI panel.
"""
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.MidCustomWidget = parent
self.pixmap = None
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.LeftButton:
self.MidCustomWidget.setPixmap(QtGui.QPixmap(self.pixmap), self.mid_panel)
class ComboBoxWidget(QWidget):
"""
This is the combobox widget as it is shown in the settings
panel of the GUI. It gets initialized with a name
With self.valueChanged on can connect a pyqt slot with the
combobox pyqtSignal.
"""
def __init__(self, name, options, alg, slot=None, default=None):
super(ComboBoxWidget, self).__init__()
self.activated = pyqtSignal()
# ComboBox itself
self.combobox = QtWidgets.QComboBox()
self.combobox.orientationCombo = PyQt5.QtWidgets.QComboBox()
self.combobox.setFixedWidth(220)
# Label
self.label = QtWidgets.QLabel()
self.label.setText(name + ": ")
self.SingleCheckBoxLayout = QBoxLayout(QBoxLayout.LeftToRight)
self.SingleCheckBoxLayout.addWidget(self.label)
self.SingleCheckBoxLayout.addWidget(self.combobox, Qt.AlignRight)
self.setLayout(self.SingleCheckBoxLayout)
self.setFixedHeight(50)
def set_modified():
alg.set_modified()
# options
for i in options:
self.add_item(i)
if default is not None:
index = self.combobox.findText(default)
if index != -1:
self.combobox.setCurrentIndex(index)
if slot is not None:
self.combobox.currentTextChanged.connect(slot)
self.combobox.currentTextChanged.connect(set_modified)
def add_item(self, option, image=None):
"""
Args:
| *option*: A string option refers to an entry which can be selected in the combobox later.
| *image*: An optional icon that can be shown combobox.
"""
if image is None:
self.combobox.addItem(option)
else:
self.combobox.addItem(QIcon(image), option)
class CheckBoxWidget(QWidget):
"""
Thi sis the checkbox widget as it is shown in the GUI.
The name is the displayed in fron of the checkbox in the GUI and
the default value is of type boolean.
With self.valueChanged on can connect a pyqt slot with the
checkbox pyqtSignal.
"""
def __init__(self, name, default, slot, alg):
super(CheckBoxWidget, self).__init__()
self.stateChanged = pyqtSignal()
# CheckBox itself
self.checkbox = PyQt5.QtWidgets.QCheckBox()
self.checkbox.setChecked(default)
# Label
self.label = PyQt5.QtWidgets.QLabel()
self.label.setText(name + ": ")
self.SingleCheckBoxLayout = PyQt5.QtWidgets.QGridLayout()
self.SingleCheckBoxLayout.setAlignment(Qt.AlignLeft)
self.SingleCheckBoxLayout.addWidget(self.label, 0, 0)
self.SingleCheckBoxLayout.addWidget(self.checkbox, 0, 1)
self.setLayout(self.SingleCheckBoxLayout)
self.setFixedHeight(50)
def set_modified():
alg.set_modified()
self.checkbox.stateChanged.connect(slot)
self.checkbox.stateChanged.connect(set_modified)
class SliderWidget(QWidget):
"""
This is a combined widget for a slider in the GUI. It
contains several input fields and a slider itself. By setting
the constructor value, the complete widget is connected in itself.
The name will be displayed in front of the widget. lower and upper
refer to the sliders range, step_size tells the distance of each step
and default is the preset value in the GUI.
The float_flag determines whether the slider should represent float values or not.
Set float_flag to true if you want to store float values.
With self.valueChanged on can connect a pyqt slot with the
float slider pyqtSignal.
A SliderWidget is built by a Slider, a QLabel and either a DoubleTextfield or an IntegerTextfield.
"""
def __init__(self, name, lower, upper, step_size, default, slot, float_flag, alg):
super(SliderWidget, self).__init__()
self.valueChanged = pyqtSignal()
self.internal_steps = abs(upper - lower) / step_size
def to_internal_coordinate(value):
return (self.internal_steps / (upper - lower)) * (value - lower)
def to_external_coordinate(value):
return lower + (value * (upper - lower)) / self.internal_steps
# Slider itself
self.slider = \
Slider(0, self.internal_steps, 1, to_internal_coordinate(default)).slider
# Textfield
if float_flag:
self.textfield = \
DoubleTextfield(lower, upper, step_size, default).textfield
else:
self.textfield = \
IntegerTextfield(lower, upper, step_size, default).textfield
# Label
self.label = QLabel()
self.label.setText(name + ": ")
# Connect Textfield with Slider
def textfield_value_changed(value):
self.slider.setValue(to_internal_coordinate(value))
def slider_value_changed(value):
self.textfield.setValue(to_external_coordinate(value))
def set_modified():
alg.set_modified()
self.textfield.valueChanged.connect(textfield_value_changed)
self.slider.valueChanged.connect(slider_value_changed)
self.SingleSlidersLayout = QBoxLayout(QBoxLayout.LeftToRight)
self.SingleSlidersLayout.addWidget(self.label)
self.SingleSlidersLayout.addWidget(self.slider)
self.SingleSlidersLayout.addWidget(self.textfield)
self.setLayout(self.SingleSlidersLayout)
self.setFixedHeight(50)
self.textfield.valueChanged.connect(lambda: slot(self.textfield.value()))
self.textfield.valueChanged.connect(set_modified)
# self.textfield.setValue(default)
class IntegerTextfield(QSpinBox):
"""
A customized QSpinBox that is used by the SliderWidget to allow users to enter integer values.
"""
def __init__(self, lower, upper, step_size, default):
super(IntegerTextfield, self).__init__()
# Textfield
self.textfield = QSpinBox()
self.textfield.setRange(lower, upper)
self.textfield.setSingleStep(step_size)
self.textfield.setValue(default)
self.textfield.setFixedWidth(50)
class DoubleTextfield(QDoubleSpinBox):
"""
A customized QDoubleSpinBox that is used by the SliderWidget to allow users to enter float values.
"""
def __init__(self, lower, upper, step_size, default):
super(DoubleTextfield, self).__init__()
# Textfield
self.textfield = QDoubleSpinBox()
self.textfield.setRange(lower, upper)
self.textfield.setSingleStep(step_size)
self.textfield.setValue(default)
self.textfield.setFixedWidth(50)
class Slider(QSlider):
"""
A customized QSlider that is used by the SliderWidget to allow users to
change a certain setting.
"""
def __init__(self, lower, upper, step_size, default):
super(Slider, self).__init__()
self.slider = QSlider(Qt.Horizontal)
self.slider.setFocusPolicy(Qt.StrongFocus)
self.slider.setTickPosition(QSlider.TicksBothSides)
self.slider.setTickInterval(step_size)
self.slider.setRange(lower, upper)
self.slider.setSingleStep(step_size)
self.slider.setValue(default)
self.slider.setPageStep(step_size)
if __name__ == '__main__':
pass
| bsd-2-clause | 5,772,202,558,477,753,000 | 36.558854 | 140 | 0.621677 | false |
tpeek/django-imager | imagersite/imagersite/settings.py | 1 | 5616 | """
Django settings for imagersite project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import dj_database_url
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = os.environ['SECRET_KEY']
DEBUG = os.environ.get('DEBUG', False)
ALLOWED_HOSTS = os.environ.get('ALLOWED_HOSTS')
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'imager_profile',
'imager_images',
'bootstrap3',
'registration',
'django.contrib.sites',
'sorl.thumbnail',
'django.contrib.gis',
'floppyforms',
'leaflet',
'djgeojson',
'rest_framework',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'imagersite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates').replace('\\', '/')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.media',
],
},
},
]
WSGI_APPLICATION = 'imagersite.wsgi.application'
# Default settings
BOOTSTRAP3 = {
# The URL to the jQuery JavaScript file
'jquery_url': '//code.jquery.com/jquery.min.js',
# The Bootstrap base URL
'base_url': '//maxcdn.bootstrapcdn.com/bootstrap/3.3.5/',
# The complete URL to the Bootstrap CSS file (None means derive it from base_url)
'css_url': 'http://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css',
# The complete URL to the Bootstrap CSS file (None means no theme)
'theme_url': 'http://bootswatch.com/cerulean/bootstrap.min.css',
# The complete URL to the Bootstrap JavaScript file (None means derive it from base_url)
'javascript_url': None,
# Put JavaScript in the HEAD section of the HTML document (only relevant if you use bootstrap3.html)
'javascript_in_head': False,
# Include jQuery with Bootstrap JavaScript (affects django-bootstrap3 template tags)
'include_jquery': True,
# Label class to use in horizontal forms
'horizontal_label_class': 'col-md-3',
# Field class to use in horizontal forms
'horizontal_field_class': 'col-md-9',
# Set HTML required attribute on required fields
'set_required': True,
# Set HTML disabled attribute on disabled fields
'set_disabled': False,
# Set placeholder attributes to label if no placeholder is provided
'set_placeholder': True,
# Class to indicate required (better to set this in your Django form)
'required_css_class': '',
# Class to indicate error (better to set this in your Django form)
'error_css_class': 'has-error',
# Class to indicate success, meaning the field has valid input (better to set this in your Django form)
'success_css_class': 'has-success',
# Renderers (only set these if you have studied the source and understand the inner workings)
'formset_renderers': {
'default': 'bootstrap3.renderers.FormsetRenderer',
},
'form_renderers': {
'default': 'bootstrap3.renderers.FormRenderer',
},
'field_renderers': {
'default': 'bootstrap3.renderers.FieldRenderer',
'inline': 'bootstrap3.renderers.InlineFieldRenderer',
},
}
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config()
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': '127.0.0.1:11211',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
# Media file handling
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
# Email
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = os.environ['EMAIL_HOST_USER']
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
EMAIL_PORT = 587
if os.environ.get('EMAIL_BACKEND', None):
EMAIL_BACKEND = os.environ['EMAIL_BACKEND']
# For django-registration-redux
ACCOUNT_ACTIVATION_DAYS = 3
LOGIN_REDIRECT_URL = '/profile/'
LOGIN_URL = '/login/'
# Added per
# http://stackoverflow.com/questions/11814059/site-matching-query-does-not-exist
SITE_ID = 4
| mit | -573,961,341,429,756,700 | 27.507614 | 107 | 0.683405 | false |
SherifRadwan/Scheduling-Simulator | CPUSchedulingSimulator.py | 1 | 10905 | #!/usr/bin/env python
from scheduler.job import *
from scheduler.fcfs import *
from scheduler.sjf import *
from scheduler.srjf import *
from scheduler.priority import *
from scheduler.preemptive_priority import *
from scheduler.round_robin import *
from PyQt4 import QtCore, QtGui
from random import randint
import copy
import time
class JobWidget(QtGui.QLabel):
# todo add colors
def __init__(self, text=''):
QtGui.QLabel.__init__(self, None)
self.setText(text)
self.setFrameShape(QtGui.QFrame.Box)
self.setWordWrap(True)
self.setIndent(0)
self.setFont(QtGui.QFont("Times", 20))#, QtGui.QFont.Bold))
#self.setScaledContents(True)
#def __copy__(self):
# #http://stackoverflow.com/questions/1500718/what-is-the-right-way-to-override-the-copy-deepcopy-operations-on-an-object-in-p
# newone = type(self)()
# newone.__dict__.update(self.__dict__)
# return newone
# need to be improved or changed
# TODO: reimplement this in QGraphics scene and view
class JobListWidget(QtGui.QScrollArea):
def __init__(self, title='', allowDuplicates = False, spacing = 5):
QtGui.QLabel.__init__(self, None)
self.widget = QtGui.QWidget()
self.titleLbl = QtGui.QLabel(title)
self.hbox = QtGui.QHBoxLayout()
self.hbox.setSpacing(spacing)
self.vbox = QtGui.QVBoxLayout()
self.vbox.addWidget(self.titleLbl)
self.vbox.addLayout(self.hbox)
self.widget.setLayout(self.vbox)
self.allowDuplicates = allowDuplicates
self.widgets = []
self.hbox.addStretch() # last one
self.setWidgetResizable(True)
self.setWidget(self.widget)
def addWidget(self, widget, sub_text=None):
if not self.allowDuplicates and self.hbox.indexOf(widget) != -1:
return
if self.allowDuplicates:
#widget = copy.copy(widget)
#widget = copy.deepcopy(widget)
widget = JobWidget(widget.text())
if sub_text:
widget.setText(widget.text() + '<sub>%s</sub>' % sub_text)
self.widgets.append(widget)
self.hbox.insertWidget(self.hbox.count() - 1, widget)
widget.show()
def removeWidget(self, widget):
#widget.clearLayout()
if self.hbox.indexOf(widget) != -1:
self.hbox.removeWidget(widget)
def clear(self):
# delete error causes some error if called agian
# figure it out
for widget in self.widgets:
try:
widget.deleteLater()
except:
continue
class SchedulerWidget(QtGui.QWidget):
def __init__(self):
QtGui.QWidget.__init__(self, None)
self.resize(700, 500)
self.setWindowTitle('CPU Scheduling Simulator')
self.sch = None
self.jobs = []
self.init_ui()
def init_ui(self):
self.timer = QtCore.QTimer()
self.timer.setInterval(1000) # one second
self.timer.timeout.connect(self.tick)
self.numLbl = QtGui.QLabel('No. of jobs: ')
self.jobs_no = QtGui.QSpinBox()
self.jobs_no.setValue(5)
self.jobs_no.valueChanged[int].connect(lambda v: self.generateJobs())
self.generateBtn = QtGui.QPushButton('&Generate')
self.startBtn = QtGui.QPushButton('&Start')
self.startBtn.setEnabled(False)
self.stopBtn = QtGui.QPushButton('&Stop')
self.stopBtn.setEnabled(False)
self.pauseBtn = QtGui.QPushButton('&Pause')
self.pauseBtn.clicked.connect(self.pauseSimu)
self.resumeBtn = QtGui.QPushButton('&Resume')
self.resumeBtn.clicked.connect(self.resumeSimu)
self.pauseBtn.setEnabled(False)
self.resumeBtn.setVisible(False)
self.generateBtn.clicked.connect(self.generateJobs)
self.startBtn.clicked.connect(self.startSimu)
self.stopBtn.clicked.connect(self.stopSimu)
self.speedLbl = QtGui.QLabel('Speed: ')
self.speedSlider = QtGui.QSlider(QtCore.Qt.Horizontal)
self.speedSlider.setMinimum(0)
self.speedSlider.setMaximum(10)
self.speedSlider.setValue(1)
self.speedSlider.valueChanged[int].connect(lambda v: self.timer.setInterval(v*1000))
self.timeLbl = QtGui.QLabel('Time: ')
self.controlLayout = QtGui.QHBoxLayout()
self.controlLayout.addWidget(self.numLbl)
self.controlLayout.addWidget(self.jobs_no)
self.controlLayout.addWidget(self.generateBtn)
self.controlLayout.addWidget(self.startBtn)
self.controlLayout.addWidget(self.stopBtn)
self.controlLayout.addWidget(self.pauseBtn)
self.controlLayout.addWidget(self.resumeBtn)
self.controlLayout.addWidget(self.speedLbl)
self.controlLayout.addWidget(self.speedSlider)
self.controlLayout.addWidget(self.timeLbl)
self.algorithms = [FCFS, SJF, SRJF, Priority, PreemptivePriority, RoundRobin]
self.algoLabel = QtGui.QLabel('Algorithm: ')
self.comoboAlgo = QtGui.QComboBox()
self.comoboAlgo.activated[int].connect(self.algoChoosed)
self.timeSliceLabel = QtGui.QLabel('Time Slice: ')
self.timeSliceSpin = QtGui.QSpinBox()
self.timeSliceSpin.setMinimum(1)
self.timeSliceSpin.setValue(1)
self.timeSliceLabel.setVisible(False)
self.timeSliceSpin.setVisible(False)
self.comoboAlgo.setCurrentIndex(0)
self.algoChoosed(0)
self.comoboAlgo.addItems(['FCFS', 'SJF', 'SRJF', 'Priority', 'Preemptive Priority', 'RoundRobin'])
self.algoLayout = QtGui.QHBoxLayout()
self.algoLayout.addWidget(self.algoLabel)
self.algoLayout.addWidget(self.comoboAlgo)
self.algoLayout.addWidget(self.timeSliceLabel)
self.algoLayout.addWidget(self.timeSliceSpin)
self.algoLayout.addStretch()
self.aboutBtn = QtGui.QPushButton('&About')
self.aboutBtn.clicked.connect(self.aboutMsg)
self.algoLayout.addWidget(self.aboutBtn)
# control algo layout
self.calgoLayout = QtGui.QVBoxLayout()
self.calgoLayout.addLayout(self.controlLayout)
self.calgoLayout.addLayout(self.algoLayout)
self.calgoLayout.addStretch()
self.jobsTable = QtGui.QTableWidget(0, 3)
self.generateJobs()
# calgoLayout and table
self.splitter = QtGui.QSplitter(QtCore.Qt.Vertical)
self.calgoWidget = QtGui.QWidget()
self.calgoWidget.setLayout(self.calgoLayout)
self.splitter.addWidget(self.calgoWidget)
self.splitter.addWidget(self.jobsTable)
self.new_jobs = JobListWidget('New')
self.ready_jobs = JobListWidget('Ready')
self.running_jobs = JobListWidget('Running', True, 0)
self.terminated_jobs = JobListWidget('Terminated')
self.avgTurnaround = QtGui.QLabel('Average Turnaround time: ')
self.avgWaiting = QtGui.QLabel('Average Waiting time: ')
self.cpuUtilization = QtGui.QLabel('CPU Utilization: ')
self.mainLayout = QtGui.QVBoxLayout()
self.mainLayout.addWidget(self.splitter)
self.mainLayout.addWidget(self.new_jobs)
self.mainLayout.addWidget(self.ready_jobs)
self.mainLayout.addWidget(self.running_jobs)
self.mainLayout.addWidget(self.terminated_jobs)
self.mainLayout.addStretch()
self.mainLayout.addWidget(self.avgTurnaround)
self.mainLayout.addWidget(self.avgWaiting)
self.mainLayout.addWidget(self.cpuUtilization)
self.setLayout(self.mainLayout)
def tick(self):
if self.sch:
# to get the ready ones
#self.sch.get_ready_jobs(self.sch.current_time)
self.report_scheduler(self.sch)
#time.sleep(0.3)
self.timeLbl.setText('Time: %d' % self.sch.current_time)
if self.sch.isFinished():
self.stopSimu()
self.avgTurnaround.setText('Average Turnaround time: %f' % self.sch.avg_turnaround_time())
self.avgWaiting.setText('Average Waiting time: %f' % self.sch.avg_waiting_time())
self.cpuUtilization.setText('CPU Utilization: %f' % (self.sch.cpu_util() * 100.0) + '%')
else:
self.sch.tick()
def report_scheduler(self, sch):
for job in sch.ready:
self.add_to_ready(job.widget)
if sch.current_job:
self.add_to_running(sch.current_job.widget)
for job in sch.terminated:
self.add_to_terminated(job.widget)
#if self.sch.isIdle():
# self.add_to_running(JobWidget(' '))
def add_to_new(self, widget):
###
self.new_jobs.addWidget(widget)
def add_to_ready(self, widget):
self.new_jobs.removeWidget(widget)
self.ready_jobs.addWidget(widget)
def add_to_running(self, widget):
#widget.setText(widget.text() + )
self.ready_jobs.removeWidget(widget)
self.running_jobs.addWidget(widget, self.sch.current_time)
def add_to_terminated(self, widget):
self.terminated_jobs.addWidget(widget)
def job_status_changed(self, job, new_status):
if new_status == Status.Ready:
self.add_to_ready(job.widget)
elif new_status == Status.Running:
self.add_to_running(job.widget)
elif new_status == Status.Terminated:
self.add_to_terminated(job.widget)
def algoChoosed(self, index):
self.algo = self.algorithms[index]
if self.algo == RoundRobin:
self.timeSliceLabel.setVisible(True)
self.timeSliceSpin.setVisible(True)
else:
self.timeSliceLabel.setVisible(False)
self.timeSliceSpin.setVisible(False)
def generateJobs(self):
self.startBtn.setEnabled(True)
n = self.jobs_no.value()
if n > 0:
self.jobsTable.clear()
self.jobsTable.setRowCount(n)
for r in range(0, self.jobsTable.rowCount()):
self.jobsTable.setItem(r, 0, QtGui.QTableWidgetItem(str(randint(0, n))))
self.jobsTable.setItem(r, 1, QtGui.QTableWidgetItem(str(randint(1, n))))
self.jobsTable.setItem(r, 2, QtGui.QTableWidgetItem(str(randint(0, n))))
self.jobsTable.setVerticalHeaderLabels(['P%d'%p for p in range(1, n+1)])
self.jobsTable.setHorizontalHeaderLabels(['Arrival Time', 'Bursts', 'Priority'])
def startSimu(self):
self.new_jobs.clear()
self.ready_jobs.clear()
self.running_jobs.clear()
self.terminated_jobs.clear()
self.avgTurnaround.setText('Average Turnaround time: ')
self.avgWaiting.setText('Average Waiting time: ')
self.jobs = []
for r in range(0, self.jobsTable.rowCount()):
arrival_time = int(self.jobsTable.item(r, 0).text())
brusts = int(self.jobsTable.item(r, 1).text())
priority = int(self.jobsTable.item(r, 2).text())
job = Job(bursts=brusts, arrival_time=arrival_time, priority=priority, job_id=r+1)
widget = JobWidget('P%d' % job.job_id)
job.widget = widget
self.jobs.append(job)
self.add_to_new(job.widget)
#self.sch = FCFS(self.jobs)
if self.algo == RoundRobin:
self.sch = RoundRobin(self.timeSliceSpin.value(), self.jobs)
else:
self.sch = self.algo(self.jobs)
self.stopBtn.setEnabled(True)
self.pauseBtn.setEnabled(True)
self.startBtn.setEnabled(False)
self.timer.start()
def stopSimu(self):
self.timer.stop()
self.stopBtn.setEnabled(False)
self.pauseBtn.setEnabled(False)
self.startBtn.setEnabled(True)
self.resumeBtn.setVisible(False)
self.pauseBtn.setVisible(True)
def pauseSimu(self):
self.timer.stop()
self.pauseBtn.setVisible(False)
self.resumeBtn.setVisible(True)
def resumeSimu(self):
self.timer.start()
self.pauseBtn.setVisible(True)
self.resumeBtn.setVisible(False)
def aboutMsg(self):
QtGui.QMessageBox.about(self, 'About', 'CPU Scheduling Simulator<br>Operating Systems Project<br>By: Abdelrahman Ghanem')
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
en_eg= QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.Egypt)
QtCore.QLocale.setDefault(en_eg)
mwin = SchedulerWidget()
mwin.show()
sys.exit(app.exec_())
| gpl-2.0 | 7,541,484,197,705,449,000 | 31.552239 | 128 | 0.733517 | false |
saurabh6790/trufil_app | test/doctype/sample_allocation/sample_allocation.py | 1 | 5297 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
# For license information, please see license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.model.doc import addchild, Document
from webnotes import msgprint, _
from webnotes.model.bean import getlist
from webnotes.utils import cint, cstr, flt, now, nowdate, get_first_day, get_last_day, add_to_date, getdate
class DocType:
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def on_submit(self):
self.update_sample_status()
def update_sample_status(self):
samples = {}
for sample in getlist(self.doclist, 'sample_allocation_detail'):
samples[sample.get('sample_no')] = ''
self.test_allocation(sample)
for sample in samples:
webnotes.conn.sql("update tabSample set status = 'Assigned' where name = '%s'"%sample)
webnotes.conn.sql("commit")
def test_allocation(self, sample):
test_id = self.create_test(sample)
self.create_todo(sample, test_id)
def create_test(self, sample):
test = Document(sample.get("test"))
test.sample_no = sample.get("sample_no")
# test.tested_by = sample.get("tester")
test.shift_incharge_approval = sample.get("shift_incharge")
test.lab_incharge_approval = sample.get("lab_incharge")
test.save()
self.update_test_id(sample,test.name)
return test.name
def update_test_id(self,sample,test_name):
webnotes.errprint("update `tabSample Allocation Detail` set test_id='"+test_name+"' where sample_no='"+sample.get("sample_no")+"' and test='"+sample.get("test")+"' and parent='"+self.doc.name+"'")
webnotes.conn.sql("update `tabSample Allocation Detail` set test_id='"+test_name+"' where sample_no='"+sample.get("sample_no")+"' and test='"+sample.get("test")+"' and parent='"+self.doc.name+"'")
webnotes.conn.commit()
def create_todo(self, sample, test_id):
user = webnotes.conn.sql("select user_id from tabEmployee where name = '%s'"%(sample.get("tester")),as_list=1)
if user:
d = Document("ToDo")
d.owner = user[0][0]
d.reference_type = sample.get("test")
d.reference_name = test_id
d.priority = 'Medium'
d.date = nowdate()
d.assigned_by = webnotes.user.name
d.save(1)
def add_samples(self):
if self.doc.sample_type == 'Single Sample':
test_details = self.get_sample_wise_test(self.doc.sample_id)
# self.check_register(self.doc.sample_id)
self.fill_sample_alloacation_detail(test_details, self.doc.sample_id)
if self.doc.sample_type == "Batch":
samples = webnotes.conn.sql("select sample from `tabBatch Detail` bd where bd.parent = '%s' "%self.doc.batch, as_list=1)
for sample in samples:
# self.check_register(sample[0])
self.fill_sample_alloacation_detail(self.get_sample_wise_test(sample[0]), sample[0])
def check_register(self, sample_id):
register_no = webnotes.conn.sql("""select name from tabRegister where sample_no = '%s'"""%(sample_id))
if not register_no:
webnotes.msgprint("Registration not yet done for selected sample.",raise_exception=1)
def get_sample_wise_test(self, sample_id):
tests = self.check_group_or_other(sample_id)
if tests:
return tests
else:
return webnotes.conn.sql("""select test from `tabGroup Test`
where parent in (select test_required from `tabSample Entry`
where name=(select sample_entry from tabSample where name = '%s'))"""%(sample_id),as_list=1)
def check_group_or_other(self, sample_id):
test_required = webnotes.conn.sql(""" select test_required, name from `tabSample Entry`
where name = (select sample_entry from tabSample where name = '%s')"""%(sample_id))[0][0]
if test_required == 'Others':
return webnotes.conn.sql(""" select test from `tabRegister Test Details` where parent = '%s' """%(test_required[0][1]), as_list=1)
def fill_sample_alloacation_detail(self, sample_details, sample_id):
if self.doc.sample_id:
self.doclist=self.doc.clear_table(self.doclist,'sample_allocation_detail')
for sample in sample_details:
nl = addchild(self.doc, 'sample_allocation_detail', 'Sample Allocation Detail', self.doclist)
nl.sample_no = sample_id
nl.test = sample[0]
def get_samples(doctype, txt, searchfield, start, page_len, filters):
return webnotes.conn.sql(""" select s.name, se.priority from `tabSample` s,`tabSample Entry` se
where ifnull(s.status,'') not in ('Assigned') and s.sample_entry = se.name """)
def get_employee(doctype, txt, searchfield, start, page_len, filters):
conditions = make_condition(filters)
return webnotes.conn.sql(""" select parent from `tabEmployee Training Details` where
%(key)s like "%(txt)s" and %(cond)s
limit %(start)s, %(page_len)s """%{'key': searchfield, 'txt': "%%%s%%" % txt,
'cond': conditions, 'start': start, 'page_len': page_len})
def make_condition(filters):
cond = ''
if filters.get("level"):
cond += " level = '%(level)s'"%{'level': filters.get("level")}
if filters.get("test"):
cond += " and test = '%(test)s'"%{'test': filters.get("test")}
if filters.get("parent"):
if isinstance(filters.get("parent"), list):
parent = "('%s', '%s')"%(filters.get("parent")[0], filters.get("parent")[1])
cond += " and parent not in %(parent)s"%{'parent':parent}
else:
cond += " and parent != '%s'"%filters.get('parent')
return cond | agpl-3.0 | -2,092,437,030,698,776,600 | 40.390625 | 198 | 0.687937 | false |
jakobrunge/tigramite | tigramite/independence_tests/oracle_conditional_independence.py | 1 | 66715 | """Tigramite causal discovery for time series."""
# Author: Jakob Runge <[email protected]>
#
# License: GNU General Public License v3.0
from __future__ import print_function
import numpy as np
from collections import defaultdict, OrderedDict
from itertools import combinations, permutations
class OracleCI:
r"""Oracle of conditional independence test X _|_ Y | Z given a graph.
Class around link_coeff causal ground truth. X _|_ Y | Z is based on
assessing whether X and Y are d-separated given Z in the graph.
Class can be used just like a Tigramite conditional independence class
(e.g., ParCorr). The main use is for unit testing of PCMCI methods.
Parameters
----------
graph : array of shape [N, N, tau_max+1]
Causal graph.
links : dict
Dictionary of form {0:[(0, -1), ...], 1:[...], ...}.
Alternatively can also digest {0: [((0, -1), coeff, func)], ...}.
observed_vars : None or list, optional (default: None)
Subset of keys in links definining which variables are
observed. If None, then all variables are observed.
selection_vars : None or list, optional (default: None)
Subset of keys in links definining which variables are
selected (= always conditioned on at every time lag).
If None, then no variables are selected.
verbosity : int, optional (default: 0)
Level of verbosity.
"""
# documentation
@property
def measure(self):
"""
Concrete property to return the measure of the independence test
"""
return self._measure
def __init__(self,
links=None,
observed_vars=None,
selection_vars=None,
graph=None,
graph_is_mag=False,
# tau_max=None,
verbosity=0):
# self.tau_max = tau_max
self.graph_is_mag = graph_is_mag
if links is None:
if graph is None:
raise ValueError("Either links or graph must be specified!")
else:
# Get canonical DAG from graph, potentially interpreted as MAG
# self.tau_max = graph.shape[2]
(links,
observed_vars,
selection_vars) = self.get_links_from_graph(graph)
# # TODO make checks and tau_max?
# self.graph = graph
self.verbosity = verbosity
self._measure = 'oracle_ci'
self.confidence = None
self.links = links
self.N = len(links)
# self.tau_max = self._get_minmax_lag(self.links)
# Initialize already computed dsepsets of X, Y, Z
self.dsepsets = {}
# Initialize observed vars
self.observed_vars = observed_vars
if self.observed_vars is None:
self.observed_vars = range(self.N)
else:
if not set(self.observed_vars).issubset(set(range(self.N))):
raise ValueError("observed_vars must be subset of range(N).")
if self.observed_vars != sorted(self.observed_vars):
raise ValueError("observed_vars must ordered.")
if len(self.observed_vars) != len(set(self.observed_vars)):
raise ValueError("observed_vars must not contain duplicates.")
self.selection_vars = selection_vars
if self.selection_vars is not None:
if not set(self.selection_vars).issubset(set(range(self.N))):
raise ValueError("selection_vars must be subset of range(N).")
if self.selection_vars != sorted(self.selection_vars):
raise ValueError("selection_vars must ordered.")
if len(self.selection_vars) != len(set(self.selection_vars)):
raise ValueError("selection_vars must not contain duplicates.")
else:
self.selection_vars = []
# ToDO: maybe allow to use user-tau_max, otherwise deduced from links
self.graph = self.get_graph_from_links(tau_max=None)
def set_dataframe(self, dataframe):
"""Dummy function."""
pass
def _check_XYZ(self, X, Y, Z):
"""Checks variables X, Y, Z.
Parameters
----------
X, Y, Z : list of tuples
For a dependence measure I(X;Y|Z), Y is of the form [(varY, 0)],
where var specifies the variable index. X typically is of the form
[(varX, -tau)] with tau denoting the time lag and Z can be
multivariate [(var1, -lag), (var2, -lag), ...] .
Returns
-------
X, Y, Z : tuple
Cleaned X, Y, Z.
"""
# Get the length in time and the number of nodes
N = self.N
# Remove duplicates in X, Y, Z
X = list(OrderedDict.fromkeys(X))
Y = list(OrderedDict.fromkeys(Y))
Z = list(OrderedDict.fromkeys(Z))
# If a node in Z occurs already in X or Y, remove it from Z
Z = [node for node in Z if (node not in X) and (node not in Y)]
# Check that all lags are non-positive and indices are in [0,N-1]
XYZ = X + Y + Z
dim = len(XYZ)
# Ensure that XYZ makes sense
if np.array(XYZ).shape != (dim, 2):
raise ValueError("X, Y, Z must be lists of tuples in format"
" [(var, -lag),...], eg., [(2, -2), (1, 0), ...]")
if np.any(np.array(XYZ)[:, 1] > 0):
raise ValueError("nodes are %s, " % str(XYZ) +
"but all lags must be non-positive")
if (np.any(np.array(XYZ)[:, 0] >= N)
or np.any(np.array(XYZ)[:, 0] < 0)):
raise ValueError("var indices %s," % str(np.array(XYZ)[:, 0]) +
" but must be in [0, %d]" % (N - 1))
if np.all(np.array(Y)[:, 1] != 0):
raise ValueError("Y-nodes are %s, " % str(Y) +
"but one of the Y-nodes must have zero lag")
return (X, Y, Z)
def _get_lagged_parents(self, var_lag, exclude_contemp=False,
only_non_causal_paths=False, X=None, causal_children=None):
"""Helper function to yield lagged parents for var_lag from
self.links_coeffs.
Parameters
----------
var_lag : tuple
Tuple of variable and lag which is assumed <= 0.
exclude_contemp : bool
Whether contemporaneous links should be exluded.
Yields
------
Next lagged parent.
"""
var, lag = var_lag
for link_props in self.links[var]:
if len(link_props) == 3:
i, tau = link_props[0]
coeff = link_props[1]
else:
i, tau = link_props
coeff = 1.
if coeff != 0.:
if not (exclude_contemp and lag == 0):
if only_non_causal_paths:
if not ((i, lag + tau) in X and var_lag in causal_children):
yield (i, lag + tau)
else:
yield (i, lag + tau)
def _get_children(self):
"""Helper function to get children from links.
Note that for children the lag is positive.
Returns
-------
children : dict
Dictionary of form {0:[(0, 1), (3, 0), ...], 1:[], ...}.
"""
N = len(self.links)
children = dict([(j, []) for j in range(N)])
for j in range(N):
for link_props in self.links[j]:
if len(link_props) == 3:
i, tau = link_props[0]
coeff = link_props[1]
else:
i, tau = link_props
coeff = 1.
if coeff != 0.:
children[i].append((j, abs(tau)))
return children
def _get_lagged_children(self, var_lag, children, exclude_contemp=False,
only_non_causal_paths=False, X=None, causal_children=None):
"""Helper function to yield lagged children for var_lag from children.
Parameters
----------
var_lag : tuple
Tuple of variable and lag which is assumed <= 0.
children : dict
Dictionary of form {0:[(0, 1), (3, 0), ...], 1:[], ...}.
exclude_contemp : bool
Whether contemporaneous links should be exluded.
Yields
------
Next lagged child.
"""
var, lag = var_lag
# lagged_parents = []
for child in children[var]:
k, tau = child
if not (exclude_contemp and tau == 0):
# lagged_parents.append((i, lag + tau))
if only_non_causal_paths:
if not (var_lag in X and (k, lag + tau) in causal_children):
yield (k, lag + tau)
else:
yield (k, lag + tau)
def _get_non_blocked_ancestors(self, Y, conds=None, mode='non_repeating',
max_lag=None):
"""Helper function to return the non-blocked ancestors of variables Y.
Returns a dictionary of ancestors for every y in Y. y is a tuple (
var, lag) where lag <= 0. All ancestors with directed paths towards y
that are not blocked by conditions in conds are included. In mode
'non_repeating' an ancestor X^i_{t-\tau_i} with link X^i_{t-\tau_i}
--> X^j_{ t-\tau_j} is only included if X^i_{t'-\tau_i} --> X^j_{
t'-\tau_j} is not already part of the ancestors. The most lagged
ancestor for every variable X^i defines the maximum ancestral time
lag, which is also returned. In mode 'max_lag' ancestors are included
up to the maximum time lag max_lag.
It's main use is to return the maximum ancestral time lag max_lag of
y in Y for every variable in self.links_coeffs.
Parameters
----------
Y : list of tuples
Of the form [(var, -tau)], where var specifies the variable
index and tau the time lag.
conds : list of tuples
Of the form [(var, -tau)], where var specifies the variable
index and tau the time lag.
mode : {'non_repeating', 'max_lag'}
Whether repeating links should be excluded or ancestors should be
followed up to max_lag.
max_lag : int
Maximum time lag to include ancestors.
Returns
-------
ancestors : dict
Includes ancestors for every y in Y.
max_lag : int
Maximum time lag to include ancestors.
"""
def _repeating(link, seen_links):
"""Returns True if a link or its time-shifted version is already
included in seen_links."""
i, taui = link[0]
j, tauj = link[1]
for seen_link in seen_links:
seen_i, seen_taui = seen_link[0]
seen_j, seen_tauj = seen_link[1]
if (i == seen_i and j == seen_j
and abs(tauj-taui) == abs(seen_tauj-seen_taui)):
return True
return False
if conds is None:
conds = []
conds = [z for z in conds if z not in Y]
N = len(self.links)
# Initialize max. ancestral time lag for every N
if mode == 'non_repeating':
max_lag = 0
else:
if max_lag is None:
raise ValueError("max_lag must be set in mode = 'max_lag'")
if self.selection_vars is not None:
for selection_var in self.selection_vars:
# print (selection_var, conds)
# print([(selection_var, -tau_sel) for tau_sel in range(0, max_lag + 1)])
conds += [(selection_var, -tau_sel) for tau_sel in range(0, max_lag + 1)]
ancestors = dict([(y, []) for y in Y])
for y in Y:
j, tau = y # tau <= 0
if mode == 'non_repeating':
max_lag = max(max_lag, abs(tau))
seen_links = []
this_level = [y]
while len(this_level) > 0:
next_level = []
for varlag in this_level:
for par in self._get_lagged_parents(varlag):
i, tau = par
if par not in conds and par not in ancestors[y]:
if ((mode == 'non_repeating' and
not _repeating((par, varlag), seen_links)) or
(mode == 'max_lag' and
abs(tau) <= abs(max_lag))):
ancestors[y].append(par)
if mode == 'non_repeating':
max_lag = max(max_lag,
abs(tau))
next_level.append(par)
seen_links.append((par, varlag))
this_level = next_level
return ancestors, max_lag
def _get_descendants(self, W, children, max_lag, ignore_time_bounds=False):
"""Get descendants of nodes in W up to time t.
Includes the nodes themselves.
"""
descendants = set(W)
for w in W:
j, tau = w
this_level = [w]
while len(this_level) > 0:
next_level = []
for varlag in this_level:
for child in self._get_lagged_children(varlag, children):
i, tau = child
if (child not in descendants
and (-max_lag <= tau <= 0 or ignore_time_bounds)):
descendants = descendants.union(set([child]))
next_level.append(child)
this_level = next_level
return list(descendants)
def _has_any_path(self, X, Y, conds, max_lag=None,
starts_with=None, ends_with=None,
directed=False,
forbidden_nodes=None,
only_non_causal_paths=False,
check_optimality_cond=False,
optimality_cond_des_YM=None,
optimality_cond_Y=None,
only_collider_paths_with_vancs=False,
XYS=None,
return_path=False):
"""Returns True if X and Y are d-connected by any open path.
Does breadth-first search from both X and Y and meets in the middle.
Paths are walked according to the d-separation rules where paths can
only traverse motifs <-- v <-- or <-- v --> or --> v --> or
--> [v] <-- where [.] indicates that v is conditioned on.
Furthermore, paths nodes (v, t) need to fulfill max_lag <= t <= 0
and links cannot be traversed backwards.
Parameters
----------
X, Y : lists of tuples
Of the form [(var, -tau)], where var specifies the variable
index and tau the time lag.
conds : list of tuples
Of the form [(var, -tau)], where var specifies the variable
index and tau the time lag.
max_lag : int
Maximum time lag.
starts_with : {None, 'tail', 'arrohead'}
Whether to only consider paths starting with particular mark at X.
ends_with : {None, 'tail', 'arrohead'}
Whether to only consider paths ending with particular mark at Y.
"""
if max_lag is None:
max_lag = self._get_max_lag_from_XYZ(X, Y, conds)
def _walk_to_parents(v, fringe, this_path, other_path):
"""Helper function to update paths when walking to parents."""
found_connection = False
for w in self._get_lagged_parents(v,
only_non_causal_paths=only_non_causal_paths, X=X,
causal_children=causal_children):
# Cannot walk into conditioned parents and
# cannot walk beyond t or max_lag
i, t = w
if w == x and starts_with == 'arrowhead':
continue
if w == y and ends_with == 'arrowhead':
continue
if (w not in conds and w not in forbidden_nodes and
# (w, v) not in seen_links and
t <= 0 and abs(t) <= max_lag):
# if ((w, 'tail') not in this_path and
# (w, None) not in this_path):
if (w not in this_path or
('tail' not in this_path[w] and None not in this_path[w])):
if self.verbosity > 1:
print("Walk parent: %s --> %s " %(v, w))
fringe.append((w, 'tail'))
if w not in this_path:
this_path[w] = {'tail' : (v, 'arrowhead')}
else:
this_path[w]['tail'] = (v, 'arrowhead')
# seen_links.append((v, w))
# Determine whether X and Y are connected
# (w, None) indicates the start or end node X/Y
# if ((w, 'tail') in other_path
# or (w, 'arrowhead') in other_path
# or (w, None) in other_path):
if w in other_path:
found_connection = (w, 'tail')
if self.verbosity > 1:
print("Found connection: ", found_connection)
break
return found_connection, fringe, this_path
def _walk_to_children(v, fringe, this_path, other_path):
"""Helper function to update paths when walking to children."""
found_connection = False
for w in self._get_lagged_children(v, children,
only_non_causal_paths=only_non_causal_paths, X=X,
causal_children=causal_children):
# You can also walk into conditioned children,
# but cannot walk beyond t or max_lag
i, t = w
if w == x and starts_with == 'tail':
continue
if w == y and ends_with == 'tail':
continue
if (w not in forbidden_nodes and
# (w, v) not in seen_links and
t <= 0 and abs(t) <= max_lag):
# if ((w, 'arrowhead') not in this_path and
# (w, None) not in this_path):
if (w not in this_path or
('arrowhead' not in this_path[w] and None not in this_path[w])):
if self.verbosity > 1:
print("Walk child: %s --> %s " %(v, w))
fringe.append((w, 'arrowhead'))
# this_path[(w, 'arrowhead')] = (v, 'tail')
if w not in this_path:
this_path[w] = {'arrowhead' : (v, 'tail')}
else:
this_path[w]['arrowhead'] = (v, 'tail')
# seen_links.append((v, w))
# Determine whether X and Y are connected
# If the other_path contains w with a tail, then w must
# NOT be conditioned on. Alternatively, if the other_path
# contains w with an arrowhead, then w must be
# conditioned on.
# if (((w, 'tail') in other_path and w not in conds)
# or ((w, 'arrowhead') in other_path and w in conds)
# or (w, None) in other_path):
if w in other_path:
if (('tail' in other_path[w] and w not in conds) or
('arrowhead' in other_path[w] and w in conds) or
(None in other_path[w])):
found_connection = (w, 'arrowhead')
if self.verbosity > 1:
print("Found connection: ", found_connection)
break
return found_connection, fringe, this_path
def _walk_fringe(this_level, fringe, this_path, other_path):
"""Helper function to walk each fringe, i.e., the path from X and Y,
respectively."""
found_connection = False
if starts_with == 'arrowhead':
if len(this_level) == 1 and this_level[0] == (x, None):
(found_connection, fringe,
this_path) = _walk_to_parents(x, fringe,
this_path, other_path)
return found_connection, fringe, this_path, other_path
elif starts_with == 'tail':
if len(this_level) == 1 and this_level[0] == (x, None):
(found_connection, fringe,
this_path) = _walk_to_children(x, fringe,
this_path, other_path)
return found_connection, fringe, this_path, other_path
if ends_with == 'arrowhead':
if len(this_level) == 1 and this_level[0] == (y, None):
(found_connection, fringe,
this_path) = _walk_to_parents(y, fringe,
this_path, other_path)
return found_connection, fringe, this_path, other_path
elif ends_with == 'tail':
if len(this_level) == 1 and this_level[0] == (y, None):
(found_connection, fringe,
this_path) = _walk_to_children(y, fringe,
this_path, other_path)
return found_connection, fringe, this_path, other_path
for v, mark in this_level:
if v in conds:
if (mark == 'arrowhead' or mark == None) and directed is False:
# Motif: --> [v] <--
# If standing on a condition and coming from an
# arrowhead, you can only walk into parents
(found_connection, fringe,
this_path) = _walk_to_parents(v, fringe,
this_path, other_path)
if found_connection: break
else:
if only_collider_paths_with_vancs:
continue
if (mark == 'tail' or mark == None):
# Motif: <-- v <-- or <-- v -->
# If NOT standing on a condition and coming from
# a tail mark, you can walk into parents or
# children
(found_connection, fringe,
this_path) = _walk_to_parents(v, fringe,
this_path, other_path)
if found_connection: break
if not directed:
(found_connection, fringe,
this_path) = _walk_to_children(v, fringe,
this_path, other_path)
if found_connection: break
elif mark == 'arrowhead':
# Motif: --> v -->
# If NOT standing on a condition and coming from
# an arrowhead mark, you can only walk into
# children
(found_connection, fringe,
this_path) = _walk_to_children(v, fringe,
this_path, other_path)
if found_connection: break
if check_optimality_cond and v[0] in self.observed_vars:
# if v is not descendant of YM
# and v is not connected to Y given X OS\Cu
# print("v = ", v)
cond4a = v not in optimality_cond_des_YM
cond4b = not self._has_any_path(X=[v], Y=optimality_cond_Y,
conds=conds + X,
max_lag=None,
starts_with=None,
ends_with=None,
forbidden_nodes=None, #list(prelim_Oset),
return_path=False)
# print(cond4a, cond4b)
if cond4a and cond4b:
(found_connection, fringe,
this_path) = _walk_to_parents(v, fringe,
this_path, other_path)
# print(found_connection)
if found_connection: break
if self.verbosity > 1:
print("Updated fringe: ", fringe)
return found_connection, fringe, this_path, other_path
def backtrace_path():
"""Helper function to get path from start point, end point,
and connection found."""
path = [found_connection[0]]
node, mark = found_connection
if 'tail' in pred[node]:
mark = 'tail'
else:
mark = 'arrowhead'
# print(found_connection)
while path[-1] != x:
# print(path, node, mark, pred[node])
prev_node, prev_mark = pred[node][mark]
path.append(prev_node)
if prev_mark == 'arrowhead':
if prev_node not in conds:
# if pass_through_colliders:
# if 'tail' in pred[prev_node] and pred[prev_node]['tail'] != (node, mark):
# mark = 'tail'
# else:
# mark = 'arrowhead'
# else:
mark = 'tail'
elif prev_node in conds:
mark = 'arrowhead'
elif prev_mark == 'tail':
if 'tail' in pred[prev_node] and pred[prev_node]['tail'] != (node, mark):
mark = 'tail'
else:
mark = 'arrowhead'
node = prev_node
path.reverse()
node, mark = found_connection
if 'tail' in succ[node]:
mark = 'tail'
else:
mark = 'arrowhead'
while path[-1] != y:
next_node, next_mark = succ[node][mark]
path.append(next_node)
if next_mark == 'arrowhead':
if next_node not in conds:
# if pass_through_colliders:
# if 'tail' in succ[next_node] and succ[next_node]['tail'] != (node, mark):
# mark = 'tail'
# else:
# mark = 'arrowhead'
# else:
mark = 'tail'
elif next_node in conds:
mark = 'arrowhead'
elif next_mark == 'tail':
if 'tail' in succ[next_node] and succ[next_node]['tail'] != (node, mark):
mark = 'tail'
else:
mark = 'arrowhead'
node = next_node
return path
if conds is None:
conds = []
if forbidden_nodes is None:
forbidden_nodes = []
conds = [z for z in conds if z not in Y and z not in X]
# print(X, Y, conds)
if self.selection_vars is not None:
for selection_var in self.selection_vars:
conds += [(selection_var, -tau_sel) for tau_sel in range(0, max_lag + 1)]
N = len(self.links)
children = self._get_children()
if only_non_causal_paths:
anc_Y_dict = self._get_non_blocked_ancestors(Y=Y, conds=None, mode='max_lag',
max_lag=max_lag)[0]
# print(anc_Y_dict)
anc_Y = []
for y in Y:
anc_Y += anc_Y_dict[y]
des_X = self._get_descendants(X, children=children, max_lag=max_lag)
mediators = set(anc_Y).intersection(set(des_X)) - set(Y) - set(X)
causal_children = list(mediators) + Y
else:
causal_children = None
if only_collider_paths_with_vancs:
vancs_dict = self._get_non_blocked_ancestors(Y=XYS, conds=None, mode='max_lag',
max_lag=max_lag)[0]
vancs = set()
for xys in XYS:
vancs = vancs.union(set(vancs_dict[xys]))
vancs = list(vancs) + XYS
conds = vancs
# else:
# vancs = None
# Iterate through nodes in X and Y
for x in X:
for y in Y:
# seen_links = []
# predecessor and successors in search
# (x, None) where None indicates start/end nodes, later (v,
# 'tail') or (w, 'arrowhead') indicate how a link ends at a node
pred = {x : {None: None}}
succ = {y : {None: None}}
# initialize fringes, start with forward from X
forward_fringe = [(x, None)]
reverse_fringe = [(y, None)]
while forward_fringe and reverse_fringe:
if len(forward_fringe) <= len(reverse_fringe):
if self.verbosity > 1:
print("Walk from X since len(X_fringe)=%d "
"<= len(Y_fringe)=%d" % (len(forward_fringe),
len(reverse_fringe)))
this_level = forward_fringe
forward_fringe = []
(found_connection, forward_fringe, pred,
succ) = _walk_fringe(this_level, forward_fringe, pred,
succ)
# print(pred)
if found_connection:
if return_path:
backtraced_path = backtrace_path()
return [(self.observed_vars.index(node[0]), node[1])
for node in backtraced_path
if node[0] in self.observed_vars]
else:
return True
else:
if self.verbosity > 1:
print("Walk from Y since len(X_fringe)=%d "
"> len(Y_fringe)=%d" % (len(forward_fringe),
len(reverse_fringe)))
this_level = reverse_fringe
reverse_fringe = []
(found_connection, reverse_fringe, succ,
pred) = _walk_fringe(this_level, reverse_fringe, succ,
pred)
if found_connection:
if return_path:
backtraced_path = backtrace_path()
return [(self.observed_vars.index(node[0]), node[1])
for node in backtraced_path
if node[0] in self.observed_vars]
else:
return True
if self.verbosity > 1:
print("X_fringe = %s \n" % str(forward_fringe) +
"Y_fringe = %s" % str(reverse_fringe))
return False
def _get_max_lag_from_XYZ(self, X, Y, Z):
"""Get maximum non-repeated ancestral time lag.
"""
# Get maximum non-repeated ancestral time lag
_, max_lag_X = self._get_non_blocked_ancestors(X, conds=Z,
mode='non_repeating')
_, max_lag_Y = self._get_non_blocked_ancestors(Y, conds=Z,
mode='non_repeating')
_, max_lag_Z = self._get_non_blocked_ancestors(Z, conds=Z,
mode='non_repeating')
# Get max time lag among the ancestors
max_lag = max(max_lag_X, max_lag_Y, max_lag_Z)
if self.verbosity > 0:
print("Max. non-repeated ancestral time lag: ", max_lag)
return max_lag
def _is_dsep(self, X, Y, Z, max_lag=None):
"""Returns whether X and Y are d-separated given Z in the graph.
X, Y, Z are of the form (var, lag) for lag <= 0. D-separation is
based on:
1. Assessing maximum time lag max_lag of last ancestor of any X, Y, Z
with non-blocked (by Z), non-repeating directed path towards X, Y, Z
in the graph. 'non_repeating' means that an ancestor X^i_{ t-\tau_i}
with link X^i_{t-\tau_i} --> X^j_{ t-\tau_j} is only included if
X^i_{t'-\tau_i} --> X^j_{ t'-\tau_j} for t'!=t is not already part of
the ancestors.
2. Using the time series graph truncated at max_lag we then test
d-separation between X and Y conditional on Z using breadth-first
search of non-blocked paths according to d-separation rules.
Parameters
----------
X, Y, Z : list of tuples
List of variables chosen for current independence test.
max_lag : int, optional (default: None)
Used here to constrain the _is_dsep function to the graph
truncated at max_lag instead of identifying the max_lag from
ancestral search.
Returns
-------
dseparated : bool, or path
True if X and Y are d-separated given Z in the graph.
"""
N = len(self.links)
if self.verbosity > 0:
print("Testing X=%s d-sep Y=%s given Z=%s in TSG" %(X, Y, Z))
if max_lag is not None:
# max_lags = dict([(j, max_lag) for j in range(N)])
if self.verbosity > 0:
print("Set max. time lag to: ", max_lag)
else:
max_lag = self._get_max_lag_from_XYZ(X, Y, Z)
# Store overall max. lag
self.max_lag = max_lag
# _has_any_path is the main function that searches open paths
any_path = self._has_any_path(X, Y, conds=Z, max_lag=max_lag)
if any_path:
dseparated = False
else:
dseparated = True
return dseparated
def check_shortest_path(self, X, Y, Z,
max_lag=None, # compute_ancestors=False,
starts_with=None, ends_with=None,
forbidden_nodes=None,
directed=False,
only_non_causal_paths=False,
check_optimality_cond=False,
optimality_cond_des_YM=None,
optimality_cond_Y=None,
return_path=False):
"""Returns path between X and Y given Z in the graph.
X, Y, Z are of the form (var, lag) for lag <= 0. D-separation is
based on:
1. Assessing maximum time lag max_lag of last ancestor of any X, Y, Z
with non-blocked (by Z), non-repeating directed path towards X, Y, Z
in the graph. 'non_repeating' means that an ancestor X^i_{ t-\tau_i}
with link X^i_{t-\tau_i} --> X^j_{ t-\tau_j} is only included if
X^i_{t'-\tau_i} --> X^j_{ t'-\tau_j} for t'!=t is not already part of
the ancestors.
2. Using the time series graph truncated at max_lag we then test
d-separation between X and Y conditional on Z using breadth-first
search of non-blocked paths according to d-separation rules including
selection variables.
Optionally only considers paths starting/ending with specific marks)
and makes available the ancestors up to max_lag of X, Y, Z. This may take
a very long time, however.
Parameters
----------
X, Y, Z : list of tuples
List of variables chosen for testing paths.
max_lag : int, optional (default: None)
Used here to constrain the has_path function to the graph
truncated at max_lag instead of identifying the max_lag from
ancestral search.
compute_ancestors : bool
Whether to also make available the ancestors for X, Y, Z as
self.anc_all_x, self.anc_all_y, and self.anc_all_z, respectively.
starts_with : {None, 'tail', 'arrohead'}
Whether to only consider paths starting with particular mark at X.
ends_with : {None, 'tail', 'arrohead'}
Whether to only consider paths ending with particular mark at Y.
Returns
-------
path : list or False
Returns path or False if no path exists.
"""
N = len(self.links)
# Translate from observed_vars index to full variable set index
X = [(self.observed_vars[x[0]], x[1]) for x in X]
Y = [(self.observed_vars[y[0]], y[1]) for y in Y]
Z = [(self.observed_vars[z[0]], z[1]) for z in Z]
# print(X)
# print(Y)
# print(Z)
if check_optimality_cond:
optimality_cond_des_YM = [(self.observed_vars[x[0]], x[1])
for x in optimality_cond_des_YM]
optimality_cond_Y = [(self.observed_vars[x[0]], x[1])
for x in optimality_cond_Y]
# Get the array to test on
X, Y, Z = self._check_XYZ(X, Y, Z)
if self.verbosity > 0:
print("Testing X=%s d-sep Y=%s given Z=%s in TSG" %(X, Y, Z))
if max_lag is not None:
# max_lags = dict([(j, max_lag) for j in range(N)])
if self.verbosity > 0:
print("Set max. time lag to: ", max_lag)
else:
max_lag = self._get_max_lag_from_XYZ(X, Y, Z)
# Store overall max. lag
self.max_lag = max_lag
# _has_any_path is the main function that searches open paths
any_path = self._has_any_path(X, Y, conds=Z, max_lag=max_lag,
starts_with=starts_with, ends_with=ends_with,
return_path=return_path,
directed=directed,
only_non_causal_paths=only_non_causal_paths,
check_optimality_cond=check_optimality_cond,
optimality_cond_des_YM=optimality_cond_des_YM,
optimality_cond_Y=optimality_cond_Y,
forbidden_nodes=forbidden_nodes)
if any_path:
if return_path:
any_path_observed = [(self.observed_vars.index(node[0]), node[1]) for node in any_path
if node[0] in self.observed_vars]
else:
any_path_observed = True
else:
any_path_observed = False
if self.verbosity > 0:
print("_has_any_path = ", any_path)
print("_has_any_path_obs = ", any_path_observed)
# if compute_ancestors:
# if self.verbosity > 0:
# print("Compute ancestors.")
# # Get ancestors up to maximum ancestral time lag incl. repeated
# # links
# self.anc_all_x, _ = self._get_non_blocked_ancestors(X, conds=Z,
# mode='max_lag', max_lag=max_lag)
# self.anc_all_y, _ = self._get_non_blocked_ancestors(Y, conds=Z,
# mode='max_lag', max_lag=max_lag)
# self.anc_all_z, _ = self._get_non_blocked_ancestors(Z, conds=Z,
# mode='max_lag', max_lag=max_lag)
return any_path_observed
def run_test(self, X, Y, Z=None, tau_max=0, cut_off='2xtau_max',
verbosity=0):
"""Perform oracle conditional independence test.
Calls the d-separation function.
Parameters
----------
X, Y, Z : list of tuples
X,Y,Z are of the form [(var, -tau)], where var specifies the
variable index in the observed_vars and tau the time lag.
tau_max : int, optional (default: 0)
Not used here.
cut_off : {'2xtau_max', 'max_lag', 'max_lag_or_tau_max'}
Not used here.
Returns
-------
val, pval : Tuple of floats
The test statistic value and the p-value.
"""
# Translate from observed_vars index to full variable set index
X = [(self.observed_vars[x[0]], x[1]) for x in X]
Y = [(self.observed_vars[y[0]], y[1]) for y in Y]
Z = [(self.observed_vars[z[0]], z[1]) for z in Z]
# Get the array to test on
X, Y, Z = self._check_XYZ(X, Y, Z)
if not str((X, Y, Z)) in self.dsepsets:
self.dsepsets[str((X, Y, Z))] = self._is_dsep(X, Y, Z)
if self.dsepsets[str((X, Y, Z))]:
val = 0.
pval = 1.
else:
val = 1.
pval = 0.
if verbosity > 1:
self._print_cond_ind_results(val=val, pval=pval, cached=False,
conf=None)
# Return the value and the pvalue
return val, pval
def get_measure(self, X, Y, Z=None, tau_max=0):
"""Returns dependence measure.
Returns 0 if X and Y are d-separated given Z in the graph and 1 else.
Parameters
----------
X, Y [, Z] : list of tuples
X,Y,Z are of the form [(var, -tau)], where var specifies the
variable index in the observed_vars and tau the time lag.
tau_max : int, optional (default: 0)
Maximum time lag. This may be used to make sure that estimates for
different lags in X, Z, all have the same sample size.
Returns
-------
val : float
The test statistic value.
"""
# Translate from observed_vars index to full variable set index
X = [(self.observed_vars[x[0]], x[1]) for x in X]
Y = [(self.observed_vars[y[0]], y[1]) for y in Y]
Z = [(self.observed_vars[z[0]], z[1]) for z in Z]
# Check XYZ
X, Y, Z = _check_XYZ(X, Y, Z)
if not str((X, Y, Z)) in self.dsepsets:
self.dsepsets[str((X, Y, Z))] = self._is_dsep(X, Y, Z)
if self.dsepsets[str((X, Y, Z))]:
return 0.
else:
return 1.
def _print_cond_ind_results(self, val, pval=None, cached=None, conf=None):
"""Print results from conditional independence test.
Parameters
----------
val : float
Test stastistic value.
pval : float, optional (default: None)
p-value
conf : tuple of floats, optional (default: None)
Confidence bounds.
"""
printstr = " val = %.3f" % (val)
if pval is not None:
printstr += " | pval = %.5f" % (pval)
if conf is not None:
printstr += " | conf bounds = (%.3f, %.3f)" % (
conf[0], conf[1])
if cached is not None:
printstr += " %s" % ({0:"", 1:"[cached]"}[cached])
print(printstr)
def get_model_selection_criterion(self, j, parents, tau_max=0):
"""
Base class assumption that this is not implemented. Concrete classes
should override when possible.
"""
raise NotImplementedError("Model selection not"+\
" implemented for %s" % self.measure)
def _reverse_patt(self, patt):
"""Inverts a link pattern"""
if patt == "":
return ""
left_mark, middle_mark, right_mark = patt[0], patt[1], patt[2]
if left_mark == "<":
new_right_mark = ">"
else:
new_right_mark = left_mark
if right_mark == ">":
new_left_mark = "<"
else:
new_left_mark = right_mark
return new_left_mark + middle_mark + new_right_mark
def get_links_from_graph(self, graph):
"""
Constructs links_coeffs dictionary, observed_vars,
and selection_vars from graph array (MAG or DAG).
In the case of MAGs, for every <-> or --- link further
latent and selection variables, respectively, are added.
This corresponds to a canonical DAG (Richardson Spirtes 2002).
For ADMGs "---" are not supported, but also links of type "+->"
exist, which corresponds to having both "-->" and "<->".
Can be used to evaluate d-separation in MAG/DAGs.
"""
if "U3" not in str(graph.dtype):
raise ValueError("graph must be of type '<U3'!")
if self.graph_is_mag:
edge_types = ["-->", "<--", "<->", "---"]
else:
edge_types = ["-->", "<--", "<->", "+->", "<-+"] #, "--+", "+--"]
N, N, tau_maxplusone = graph.shape
tau_max = tau_maxplusone - 1
observed_vars = list(range(N))
selection_vars = []
links = {j: [] for j in observed_vars }
# Add further latent variables to accommodate <-> and --- links
latent_index = N
for i, j, tau in zip(*np.where(graph)):
edge_type = graph[i, j, tau]
if edge_type not in edge_types:
raise ValueError(
"Links can only be in %s " %str(edge_types)
)
if tau == 0:
if edge_type != self._reverse_patt(graph[j, i, 0]):
raise ValueError(
"graph needs to have consistent lag-zero patterns (eg"
" graph[i,j,0]='-->' requires graph[j,i,0]='<--')"
)
# Consider contemporaneous links only once
if j > i:
continue
# Restrict lagged links
else:
if edge_type not in ["-->", "<->", "---", "+->"]: #, "--+"]:
raise ValueError(
"Lagged links can only be in ['-->', '<->', '---', '+->']"
)
if edge_type == "-->":
links[j].append((i, -tau))
elif edge_type == "<--":
links[i].append((j, -tau))
elif edge_type == "<->":
links[latent_index] = []
links[i].append((latent_index, 0))
links[j].append((latent_index, -tau))
latent_index += 1
elif edge_type == "---":
links[latent_index] = []
selection_vars.append(latent_index)
links[latent_index].append((i, -tau))
links[latent_index].append((j, 0))
latent_index += 1
elif edge_type == "+->":
links[j].append((i, -tau))
links[latent_index] = []
links[i].append((latent_index, 0))
links[j].append((latent_index, -tau))
latent_index += 1
elif edge_type == "<-+":
links[i].append((j, -tau))
links[latent_index] = []
links[i].append((latent_index, 0))
links[j].append((latent_index, -tau))
latent_index += 1
# elif edge_type == "+--":
# links[i].append((j, -tau))
# links[latent_index] = []
# selection_vars.append(latent_index)
# links[latent_index].append((i, -tau))
# links[latent_index].append((j, 0))
# latent_index += 1
# elif edge_type == "--+":
# links[j].append((i, -tau))
# links[latent_index] = []
# selection_vars.append(latent_index)
# links[latent_index].append((i, -tau))
# links[latent_index].append((j, 0))
# latent_index += 1
return links, observed_vars, selection_vars
def _get_minmax_lag(self, links):
"""Helper function to retrieve tau_min and tau_max from links
"""
N = len(links)
# Get maximum time lag
min_lag = np.inf
max_lag = 0
for j in range(N):
for link_props in links[j]:
if len(link_props) == 3:
i, lag = link_props[0]
coeff = link_props[1]
else:
i, lag = link_props
coeff = 1.
# func = link_props[2]
if coeff != 0.:
min_lag = min(min_lag, abs(lag))
max_lag = max(max_lag, abs(lag))
return min_lag, max_lag
def get_graph_from_links(self, tau_max=None):
"""
Constructs graph (DAG or MAG or ADMG) from links, observed_vars,
and selection_vars.
For ADMGs uses the Latent projection operation (Pearl 2009).
"""
# TODO: use MAG from DAG construction procedure (lecture notes)
# issues with tau_max?
if self.graph_is_mag is False and len(self.selection_vars) > 0:
raise ValueError("ADMG do not support selection_vars.")
N_all = len(self.links)
# If tau_max is None, compute from links_coeffs
_, max_lag_links = self._get_minmax_lag(self.links)
if tau_max is None:
tau_max = max_lag_links
else:
if max_lag_links > tau_max:
raise ValueError("tau_max must be >= maximum lag in links_coeffs; choose tau_max=None")
N = len(self.observed_vars)
# Init graph
graph = np.zeros((N, N, tau_max + 1), dtype='<U3')
graph[:] = ""
# We will enumerate the observed variables with (i,j) which refers to the index in MAG graph
# while x, y iterates through the variables in the underlying DAG
# Loop over the observed variables
for j, y in enumerate(self.observed_vars):
for i, x in enumerate(self.observed_vars):
for tau in range(0, tau_max + 1):
if (x, -tau) != (y, 0):
if self.graph_is_mag:
dag_anc_y, _ = self._get_non_blocked_ancestors(Y=[(y, 0)], conds=None,
mode='max_lag',
max_lag=tau_max)
# Only consider observed ancestors
mag_anc_y = [anc for anc in dag_anc_y[(y, 0)]
if anc[0] in self.observed_vars]
dag_anc_x, _ = self._get_non_blocked_ancestors(Y=[(x, -tau)],
conds=None, mode='max_lag',
max_lag=tau_max)
# Only consider observed ancestors
mag_anc_x = [anc for anc in dag_anc_x[(x, -tau)]
if anc[0] in self.observed_vars]
# Add selection variable ancestors
dag_anc_s = set()
for s in self.selection_vars:
dag_anc_s_here, _ = self._get_non_blocked_ancestors(Y=[(s, 0)],
conds=None, mode='max_lag',
max_lag=tau_max)
dag_anc_s = dag_anc_s.union(set(dag_anc_s_here[(s, 0)]))
dag_anc_s = list(dag_anc_s)
# Only consider observed ancestors
mag_anc_s = [anc for anc in dag_anc_s
if anc[0] in self.observed_vars]
Z = set([z for z in mag_anc_y + mag_anc_x + mag_anc_s if z != (y, 0) and z != (x, -tau)])
Z = list(Z)
separated = self._is_dsep(X=[(x, -tau)], Y=[(y, 0)], Z=Z, max_lag=None)
# If X and Y are connected given Z, mark a link
if not separated:
# (i, -tau) --> j
if (x, -tau) in dag_anc_y[(y, 0)] + dag_anc_s and (y, 0) not in dag_anc_x[(x, -tau)] + dag_anc_s:
graph[i, j, tau] = "-->"
if tau == 0:
graph[j, i, 0] = "<--"
elif (x, -tau) not in dag_anc_y[(y, 0)] + dag_anc_s and (y, 0) not in dag_anc_x[(x, -tau)] + dag_anc_s:
graph[i, j, tau] = "<->"
if tau == 0:
graph[j, i, 0] = "<->"
elif (x, -tau) in dag_anc_y[(y, 0)] + dag_anc_s and (y, 0) in dag_anc_x[(x, -tau)] + dag_anc_s:
graph[i, j, tau] = "---"
if tau == 0:
graph[j, i, 0] = "---"
else:
if tau == 0 and j >= i:
continue
# edge_types = ["-->", "<->", "+->"]
# Latent projection operation:
# (i) ADMG contains i --> j iff there is a directed path x --> ... --> y on which
# every non-endpoint vertex is in hidden variables (= not in observed_vars)
# (ii) ADMG contains i <-> j iff there exists a path of the form x <-- ... --> y on
# which every non-endpoint vertex is non-collider AND in L (=not in observed_vars)
observed_varslags = set([(v, -lag) for v in self.observed_vars
for lag in range(0, tau_max + 1)]) - set([(x, -tau), (y, 0)])
cond_one_xy = self._has_any_path(X=[(x, -tau)], Y=[(y, 0)],
conds=[],
max_lag=None,
starts_with='tail',
ends_with='arrowhead',
directed=True,
forbidden_nodes=list(observed_varslags),
return_path=False)
if tau == 0:
cond_one_yx = self._has_any_path(X=[(y, 0)], Y=[(x, 0)],
conds=[],
max_lag=None,
starts_with='tail',
ends_with='arrowhead',
directed=True,
forbidden_nodes=list(observed_varslags),
return_path=False)
else:
cond_one_yx = False
cond_two = self._has_any_path(X=[(x, -tau)], Y=[(y, 0)],
conds=[],
max_lag=None,
starts_with='arrowhead',
ends_with='arrowhead',
directed=False,
forbidden_nodes=list(observed_varslags),
return_path=False)
if cond_one_xy and cond_one_yx:
raise ValueError("Cyclic graph!")
# print((x, -tau), y, cond_one_xy, cond_one_yx, cond_two)
# Only (i) holds: i --> j
if cond_one_xy and not cond_two:
graph[i, j, tau] = "-->"
if tau == 0:
graph[j, i, 0] = "<--"
elif cond_one_yx and not cond_two:
graph[i, j, tau] = "<--"
if tau == 0:
graph[j, i, 0] = "-->"
# Only (ii) holds: i <-> j
elif not cond_one_xy and not cond_one_yx and cond_two:
graph[i, j, tau] = "<->"
if tau == 0:
graph[j, i, 0] = "<->"
# Both (i) and (ii) hold: i +-> j
elif cond_one_xy and cond_two:
graph[i, j, tau] = "+->"
if tau == 0:
graph[j, i, 0] = "<-+"
elif cond_one_yx and cond_two:
graph[i, j, tau] = "<-+"
if tau == 0:
graph[j, i, 0] = "+->"
return graph
if __name__ == '__main__':
import tigramite.plotting as tp
from matplotlib import pyplot as plt
def lin_f(x): return x
# N = 20
# links = tests.a_random_process(
# N=N, L=2*N, coupling_coeffs=[0.7, -0.7],
# coupling_funcs=[lin_f, lin_f], auto_coeffs=[0., 0.5],
# tau_max=5, contemp_fraction=0.3, num_trials=1,
# model_seed=3)
# N = 50
# links = {0: [((0, -1), 0.5)]}
# for j in range(1, N):
# links[j] = [((j, -1), 0.6), ((j-1, -1), 0.5)]
# links = {0: [((0, -1), 0.5)],
# 1: [((0, -1), 0.5), ((2, -1), 0.5)],
# 2: [((2, -1), 0.)],
# 3: [((3, -1), 0.), ((2, -1), 0.5), ((4, -1), 0.5)],
# 4: [((4, -1), 0.5),],
# }
# links = {0: [((0, -1), 0.)],
# 1: [((1, -1), 0.)],
# 2: [((2, -1), 0.), ((1, 0), 0.6), ((0, 0), 0.6)],
# 3: [((3, -1), 0.), ((2, 0), -0.5)],
# }
# links = {0: [((0, -1), 0.9)],
# 1: [((1, -1), 0.8, lin_f), ((0, -1), 0.8, lin_f)],
# 2: [((2, -1), 0.7, lin_f), ((1, 0), 0.6, lin_f)],
# 3: [((3, -1), 0.7, lin_f), ((2, 0), -0.5, lin_f)],
# }
# links = {0: [((0, -1), 0.5)],
# 1: [((0, -1), 0.5), ((2, -1), 0.5)],
# 2: [],
# 3: [((2, -1), 0.4), ((4, -1), -0.5)],
# 4: [((4, -1), 0.4)],
# }
# def setup_nodes(auto_coeff, N):
# links = {}
# for j in range(N):
# links[j] = [((j, -1), auto_coeff, lin_f)]
# return links
# coeff = 0.5
# links = setup_nodes(0.7, N=3)
# for i in [0, 2]:
# links[1].append(((i, 0), coeff, lin_f))
# links = setup_nodes(0., N=3)
# links[1].append(((1, -1), coeff, lin_f))
# links[1].append(((0, 0), coeff, lin_f))
# links[2].append(((1, 0), coeff, lin_f))
# links[2].append(((0, 0), coeff, lin_f))
coeff = 0.5
links ={
0: [((4, 0), coeff, lin_f), ((2, 0), coeff, lin_f)],
1: [((4, 0), coeff, lin_f)],
2: [],
3: [],
4: [],
5: [((2, 0), coeff, lin_f), ((3, 0), coeff, lin_f)]
}
observed_vars = [0, 1, 2, 3]
selection_vars = [5]
# graph = np.zeros((8, 8, 4), dtype='<U3')
# # EXample C from paper plus M
# # X = 0, M = 1, Y = 2, Z1 = 3, etc
# var_names = ['X-0', 'M-1', 'Y-2', 'Z1-3', 'Z2-4', 'Z3-5', 'Z4-6', 'Z5-7']
# # Causal paths
# graph[0, 1, 0] = '-->'
# graph[1, 0, 0] = '<--'
# graph[1, 2, 0] = '-->'
# graph[2, 1, 0] = '<--'
# graph[0, 2, 0] = '-->'
# graph[2, 0, 0] = '<--'
# # Others
# # Z1 = 3
# graph[0, 3, 0] = '<->'
# graph[3, 0, 0] = '<->'
# graph[3, 2, 0] = '-->'
# graph[2, 3, 0] = '<--'
# graph[3, 4, 0] = '<->'
# graph[4, 3, 0] = '<->'
# # Z2 = 4
# graph[2, 4, 0] = '<->'
# graph[4, 2, 0] = '<->'
# graph[4, 3, 0] = '<->'
# graph[4, 5, 0] = '<->'
# # Z3 = 5
# graph[5, 4, 0] = '<->'
# # Z4 = 6
# graph[6, 5, 0] = '-->'
# graph[5, 6, 0] = '<--'
# graph[6, 0, 0] = '-->'
# graph[0, 6, 0] = '<--'
# # Z5 = 7
# graph[7, 2, 0] = '<->'
# graph[2, 7, 0] = '<->'
# graph[7, 0, 0] = '-->'
# graph[0, 7, 0] = '<--'
graph = np.zeros((16, 16, 1), dtype='<U3')
# EXample B from paper
# X = 0, M = 1, Y = 2, Z1 = 3, etc (S is last)
var_names = ['X-0', 'M-1', 'Y-2', 'Z1-3', 'Z2-4',
'Z3-5', 'Z4-6', 'Z5-7', 'Z6-8', 'Z7-9', 'Z8-10',
'Z9-11', 'Z10-12', 'Z11-13', 'Z12-14', 'S-15']
# Causal paths
graph[0, 1, 0] = '-->'
graph[1, 0, 0] = '<--'
graph[1, 2, 0] = '-->'
graph[2, 1, 0] = '<--'
graph[0, 2, 0] = '-->'
graph[2, 0, 0] = '<--'
# Others
# Z1 = 3
graph[0, 3, 0] = '<->'
graph[3, 0, 0] = '<->'
graph[3, 2, 0] = '-->'
graph[2, 3, 0] = '<--'
graph[3, 1, 0] = '-->'
graph[1, 3, 0] = '<--'
graph[3, 7, 0] = '-->'
graph[7, 3, 0] = '<--'
graph[3, 8, 0] = '-->'
graph[8, 3, 0] = '<--'
# Z2 = 4
graph[4, 2, 0] = '-->'
graph[2, 4, 0] = '<--'
# Z3 = 5
graph[5, 1, 0] = '-->'
graph[1, 5, 0] = '<--'
# Z4 = 6
graph[6, 2, 0] = '-->'
graph[2, 6, 0] = '<--'
# Z5 = 7
graph[7, 2, 0] = '<->'
graph[2, 7, 0] = '<->'
graph[7, 8, 0] = '<->'
graph[8, 7, 0] = '<->'
graph[7, 10, 0] = '<->'
graph[10, 7, 0] = '<->'
# Z6 = 8
graph[8, 12, 0] = '-->'
graph[12, 8, 0] = '<--'
# Z7 = 9
graph[9, 8, 0] = '-->'
graph[8, 9, 0] = '<--'
# Z8 = 10
graph[10, 11, 0] = '<->'
graph[11, 10, 0] = '<->'
# Z9 = 11
graph[2, 11, 0] = '-->'
graph[11, 2, 0] = '<--'
# Z10 = 12
graph[1, 12, 0] = '-->'
graph[12, 1, 0] = '<--'
# Z11 = 13
graph[13, 0, 0] = '-->'
graph[0, 13, 0] = '<--'
graph[13, 4, 0] = '-->'
graph[4, 13, 0] = '<--'
# Z12 = 14
# No links
# S = 15
graph[15, 0, 0] = '-->'
graph[0, 15, 0] = '<--'
graph[15, 13, 0] = '-->'
graph[13, 15, 0] = '<--'
# tp.plot_time_series_graph(link_matrix=graph, save_name="/home/rung_ja/Downloads/tsg.pdf")
# links = {0: [((0, -1), 0.8, lin_f)],
# 1: [((1, -1), 0.8, lin_f), ((0, -1), 0.5, lin_f)],
# 2: [((2, -1), 0.8, lin_f), ((1, 0), -0.6, lin_f)]}
# oracle = OracleCI(links=links, observed_vars=observed_vars,
# selection_vars=selection_vars,
# verbosity=2)
# print(cond_ind_test.get_graph_from_links()[:,:,0])
# Example C
links ={
0: [((8, 0), coeff, lin_f), ((6, 0), coeff, lin_f), ((7, 0), coeff, lin_f)],
1: [((0, 0), coeff, lin_f)],
2: [((0, 0), coeff, lin_f), ((1, 0), coeff, lin_f), ((3, 0), coeff, lin_f), ((9, 0), coeff, lin_f), ((12, 0), coeff, lin_f)],
3: [((8, 0), coeff, lin_f), ((10, 0), coeff, lin_f)],
4: [((9, 0), coeff, lin_f), ((10, 0), coeff, lin_f), ((11, 0), coeff, lin_f)],
5: [((11, 0), coeff, lin_f), ((6, 0), coeff, lin_f)],
6: [],
7: [((12, 0), coeff, lin_f)],
8: [],
9: [],
10: [],
11: [],
12: []}
observed_vars = [0, 1, 2, 3, 4, 5, 6, 7]
# links ={
# 0: [((2, 0), coeff, lin_f)],
# 1: [((0, 0), coeff, lin_f), ((3, 0), coeff, lin_f)],
# 2: [],
# 3: [((2, 0), coeff, lin_f)], }
# observed_vars = [0, 1, 2, 3]
# links ={
# 0: [((3, 0), coeff, lin_f)],
# 1: [((2, 0), coeff, lin_f), ((4, 0), coeff, lin_f)],
# 2: [((3, 0), coeff, lin_f), ((4, 0), coeff, lin_f)],
# 3: [],
# 4: []}
# observed_vars = [0, 1, 2]
oracle = OracleCI(links=links,
observed_vars=observed_vars,
graph_is_mag=True,
# selection_vars=selection_vars,
# verbosity=2
)
graph = oracle.graph
print(graph[:,:,0])
tp.plot_graph(link_matrix=graph, var_names=var_names, figsize=(5, 5),
save_name="/home/rung_ja/Downloads/tsg.pdf")
# X = [(0, 0)]
# Y = [(2, 0)]
# node = (3, 0)
# prelim_Oset = set([(3, 0)])
# S = set([])
# collider_path_nodes = set([])
# path = oracle._has_any_path(X=X, Y=Y,
# conds=list(prelim_Oset),
# max_lag=None,
# starts_with='arrowhead',
# ends_with='arrowhead',
# forbidden_nodes=None,
# return_path=True)
# print(path)
# cond_ind_test = OracleCI(graph=graph)
# links, observed_vars, selection_vars = cond_ind_test.get_links_from_graph(graph)
# print("{")
# for j in links.keys():
# parents = repr([(p, 'coeff', 'lin_f') for p in links[j]])
# print(f"{j: 1d}" ":" f"{parents:s},")
# print(repr(observed_vars))
# cond_ind_test = OracleCI(graph=graph, verbosity=2)
# X = [(0, 0)]
# Y = [(2, 0)]
# Z = [(7, 0), (3, 0), (6, 0), (5, 0), (4, 0)] #(1, -3), (1, -2), (0, -2), (0, -1), (0, -3)]
# #(j, -2) for j in range(N)] + [(j, 0) for j in range(N)]
# # print(oracle._get_non_blocked_ancestors(Z, Z=None, mode='max_lag',
# # max_lag=2))
# # cond_ind_test = OracleCI(links, observed_vars=observed_vars, verbosity=2)
# print(cond_ind_test.get_shortest_path(X=X, Y=Y, Z=Z,
# max_lag=None, compute_ancestors=False,
# backdoor=True))
# anc_x=None #oracle.anc_all_x[X[0]]
# anc_y=None #oracle.anc_all_y[Y[0]]
# anc_xy=None # []
# # # for z in Z:
# # # anc_xy += oracle.anc_all_z[z]
# fig, ax = tp.plot_tsg(links,
# X=[(observed_vars[x[0]], x[1]) for x in X],
# Y=[(observed_vars[y[0]], y[1]) for y in Y],
# Z=[(observed_vars[z[0]], z[1]) for z in Z],
# anc_x=anc_x, anc_y=anc_y,
# anc_xy=anc_xy)
# fig.savefig("/home/rung_ja/Downloads/tsg.pdf") | gpl-3.0 | 6,659,198,463,855,801,000 | 37.946877 | 134 | 0.448385 | false |
stormi/tsunami | src/primaires/joueur/commandes/chgroupe/__init__.py | 1 | 2994 | # -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'chgroupe' et ses sous-commandes.
Dans ce fichier se trouve la commande même.
"""
from primaires.interpreteur.commande.commande import Commande
from primaires.joueur.commandes.chgroupe.commande import PrmCommande
from primaires.joueur.commandes.chgroupe.joueur import PrmJoueur
class CmdChgroupe(Commande):
"""Commande 'chgroupe'.
"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "chgroupe", "chgroup")
self.groupe = "administrateur"
self.nom_categorie = "groupes"
self.aide_courte = "déplacement d'un groupe à l'autre"
self.aide_longue = \
"Cette commande permet de déplacer une commande ou un joueur " \
"d'un groupe à un autre. Elle permet, par exemple, d'avancer un " \
"joueur au niveau des administrateurs ou réciproquement. Elle " \
"permet également de gérer les commandes accessibles aux " \
"différents groupes. Pour ajouter, supprimer et avoir plus de " \
"détails sur les groupes, référez-vous à l'aide de la commande " \
"%groupe%."
def ajouter_parametres(self):
"""Ajout des paramètres"""
prm_commande = PrmCommande()
prm_joueur = PrmJoueur()
self.ajouter_parametre(prm_commande)
self.ajouter_parametre(prm_joueur)
| bsd-3-clause | 7,193,316,183,022,921,000 | 43.477612 | 79 | 0.715436 | false |
namuan/bin-utils | download_html_to_file.py | 1 | 1102 | from argparse import ArgumentParser
from pathlib import Path
import requests
# arguments
# -d paypal_general_errors.html -p https://developer.paypal.com/docs/classic/api/errors/general/
def download_html(url):
print("Downloading webpage from {0}".format(url))
page = requests.get(url)
return page.text
def write_html_to_disk(file_path, contents):
file_path.write_text(contents)
print("Finished writing file to {0}".format(file_path))
def parse_args():
parser = ArgumentParser(description="Download HTML page and save to disk")
parser.add_argument("-p", "--page", type=str, help="HTML page to download")
parser.add_argument(
"-d", "--downloadfile", type=str, help="Name of the file to save"
)
return parser.parse_args()
def main():
args = parse_args()
url = args.page
filename = args.downloadfile
file_path = Path(filename)
if not file_path.exists():
contents = download_html(url)
write_html_to_disk(file_path, contents)
else:
print("File already exists")
if __name__ == "__main__":
main()
| gpl-3.0 | 3,552,462,800,278,940,700 | 24.045455 | 96 | 0.662432 | false |
BBN-Q/Auspex | src/auspex/instruments/tektronix.py | 1 | 8584 | # Copyright 2016 Raytheon BBN Technologies
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
__all__ = ['DPO72004C','DPO2024','DPO2014','RSA3308A']
from auspex.log import logger
from .instrument import SCPIInstrument, Command, StringCommand, BoolCommand, FloatCommand, IntCommand, is_valid_ipv4
import numpy as np
class _TekDPscope(SCPIInstrument):
"""Tektronix DP Oscilloscope Base Class"""
encoding = StringCommand(get_string="DAT:ENC?;", set_string="DAT:ENC {:s};",
allowed_values=["ASCI","RIB","RPB","FPB","SRI","SRP","SFP"])
byte_depth = IntCommand(get_string="WFMOutpre:BYT_Nr?;",
set_string="WFMOutpre:BYT_Nr {:d};", allowed_values=[1,2,4,8])
data_start = IntCommand(get_string="DAT:STAR?;", set_string="DAT:STAR {:d};")
data_stop = IntCommand(get_string="DAT:STOP?;", set_string="DAT:STOP {:d};")
preamble = StringCommand(get_string="WFMOutpre?;") # Curve preamble
record_length = IntCommand(get_string="HOR:ACQLENGTH?;")
record_rate = FloatCommand(get_string="HOR:SAMPLER?;")
button_press = StringCommand(set_string="FPAnel:PRESS {:s};",
allowed_values=["RUnstop", "SINGleseq"])
def clear(self):
self.interface.write("CLEAR ALL;")
def snap(self):
"""Sets the start and stop points to the the current front panel display.
This doesn't actually seem to work, strangely."""
self.interface.write("DAT SNAp;")
def get_curve(self, channel=1, byte_depth=2):
channel_string = "CH{:d}".format(channel)
self.interface.write("DAT:SOU {:s};".format(channel_string))
#self.source_channel = 1
self.encoding = "SRI" # Signed ints
record_length = self.record_length
self.data_start = 1
self.data_stop = record_length
self.byte_depth = byte_depth
strf_from_depth = {1: 'b', 2: 'h', 4: 'l', 8: 'q'}
curve = self.interface.query_binary_values("CURVe?;", datatype=strf_from_depth[byte_depth])
scale = self.interface.value('WFMO:YMU?;')
offset = self.interface.value('WFMO:YOF?;')
curve = (curve - offset)*scale
return curve
def get_timebase(self):
return np.linspace(0, self.record_length/self.record_rate, self.record_length)
def get_fastaq_curve(self, channel=1):
channel_string = "CH{:d}".format(channel)
self.interface.write("DAT:SOU {:s};".format(channel_string))
self.source_channel = 1
self.encoding = "SRP" # Unsigned ints
self.byte_depth = 8
self.data_start = 1
self.data_stop = self.record_length
curve = self.interface.query_binary_values("CURVe?;", datatype='Q').reshape((1000,252))
return curve
def get_math_curve(self, channel=1):
pass
class DPO72004C(_TekDPscope):
fast_frame = StringCommand(get_string="HORizontal:FASTframe:STATE?;", set_string="HORizontal:FASTframe:STATE {:s};", value_map = {True: '1', False: '0'})
num_fast_frames = IntCommand(get_string="HOR:FAST:COUN?;", set_string="HOR:FAST:COUN {:d};")
def get_curve(self, channel=1, byte_depth=2):
channel_string = "CH{:d}".format(channel)
self.interface.write("DAT:SOU {:s};".format(channel_string))
#self.source_channel = 1
self.encoding = "SRI" # Signed ints
record_length = self.record_length
self.data_start = 1
self.data_stop = record_length
self.byte_depth = byte_depth
strf_from_depth = {1: 'b', 2: 'h', 4: 'l', 8: 'q'}
curve = self.interface.query_binary_values("CURVe?;", datatype=strf_from_depth[byte_depth])
scale = self.interface.value('WFMO:YMU?;')
offset = self.interface.value('WFMO:YOF?;')
curve = (curve - offset)*scale
if self.fast_frame:
curve.resize((self.num_fast_frames, record_length))
return curve
def __init__(self, resource_name, *args, **kwargs):
resource_name += "::4000::SOCKET" #user guide recommends HiSLIP protocol
super(DPO72004C, self).__init__(resource_name, *args, **kwargs)
self.name = "Tektronix DPO72004C Oscilloscope"
class DPO2024(_TekDPscope):
def __init__(self, resource_name, *args, **kwargs):
super(DPO2024, self).__init__(resource_name, *args, **kwargs)
self.name = "Tektronix DPO2024 Oscilloscope"
def connect(self, resource_name=None, interface_type=None):
if resource_name is not None:
self.resource_name = resource_name
super(DPO2024, self).connect(resource_name=self.resource_name, interface_type=interface_type)
self.interface._resource.read_termination = u"\n"
self.interface._resource.write_termination = u"\n"
class DPO2014(_TekDPscope):
def __init__(self, resource_name, *args, **kwargs):
super(DPO2014, self).__init__(resource_name, *args, **kwargs)
self.name = "Tektronix DPO2014 Oscilloscope"
def connect(self, resource_name=None, interface_type=None):
if resource_name is not None:
self.resource_name = resource_name
super(DPO2014, self).connect(resource_name=self.resource_name, interface_type=interface_type)
self.interface._resource.read_termination = u"\n"
self.interface._resource.write_termination = u"\n"
class RSA3308A(SCPIInstrument):
"""Tektronix RSA3308A SA"""
instrument_type = "Spectrum Analyzer"
frequency_center = FloatCommand(scpi_string=":FREQuency:CENTer")
frequency_span = FloatCommand(scpi_string=":FREQuency:SPAN")
frequency_start = FloatCommand(scpi_string=":FREQuency:STARt")
frequency_stop = FloatCommand(scpi_string=":FREQuency:STOP")
num_sweep_points = FloatCommand(scpi_string=":SWEep:POINTs")
resolution_bandwidth = FloatCommand(scpi_string=":BANDwidth")
sweep_time = FloatCommand(scpi_string=":SWEep:TIME")
averaging_count = IntCommand(scpi_string=':AVER:COUN')
marker1_amplitude = FloatCommand(scpi_string=':CALC:MARK1:Y')
marker1_position = FloatCommand(scpi_string=':CALC:MARK1:X')
mode = StringCommand(scpi_string=":INSTrument", allowed_values=["SA", "BASIC", "PULSE", "PNOISE"])
# phase noise application commands
pn_offset_start = FloatCommand(scpi_string=":LPLot:FREQuency:OFFSet:STARt")
pn_offset_stop = FloatCommand(scpi_string=":LPLot:FREQuency:OFFSet:STOP")
pn_carrier_freq = FloatCommand(scpi_string=":FREQuency:CARRier")
def __init__(self, resource_name=None, *args, **kwargs):
super(RSA3308A, self).__init__(resource_name, *args, **kwargs)
def connect(self, resource_name=None, interface_type=None):
if resource_name is not None:
self.resource_name = resource_name
#If we only have an IP address then tack on the raw socket port to the VISA resource string
if is_valid_ipv4(self.resource_name):
self.resource_name += "::5025::SOCKET"
super(RSA3308A, self).connect(resource_name=self.resource_name, interface_type=interface_type)
self.interface._resource.read_termination = u"\n"
self.interface._resource.write_termination = u"\n"
self.interface._resource.timeout = 3000 #seem to have trouble timing out on first query sometimes
def get_axis(self):
return np.linspace(self.frequency_start, self.frequency_stop, self.num_sweep_points)
def get_trace(self, num=1):
self.interface.write(':FORM:DATA REAL,32')
return self.interface.query_binary_values(":TRACE:DATA? TRACE{:d}".format(num),
datatype="f", is_big_endian=True)
def get_pn_trace(self, num=3):
# num = 3 is raw data
# num = 4 is smoothed data
# returns a tuple of (freqs, dBc/Hz)
self.interface.write(":FORM:DATA ASCII")
response = self.interface.query(":FETCH:LPLot{:d}?".format(num))
xypts = np.array([float(x) for x in response.split(',')])
return xypts[::2], xypts[1::2]
def restart_sweep(self):
""" Aborts current sweep and restarts. """
self.interface.write(":INITiate:RESTart")
def peak_search(self, marker=1):
self.interface.write(':CALC:MARK{:d}:MAX'.format(marker))
def marker_to_center(self, marker=1):
self.interface.write(':CALC:MARK{:d}:CENT'.format(marker))
def clear_averaging(self):
self.interface.write(':AVER:CLE')
| apache-2.0 | 4,864,984,427,118,562,000 | 42.353535 | 162 | 0.646668 | false |
stormi/tsunami | src/primaires/perso/__init__.py | 1 | 14160 | # -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le module primaire perso."""
from collections import namedtuple
from abstraits.module import *
from primaires.format.fonctions import supprimer_accents
from . import commandes
from . import masques
from .editeurs.skedit import EdtSkedit
from .editeurs.raedit import EdtRaedit
from .cfg_stats import cfg_stats
from .cfg_niveaux import cfg_niveaux
from .cfg_talents import cfg_talents
from .race import Race
from .stats import *
from .squelette import Squelette
from .niveaux import Niveaux
from .templates.niveau import Niveau
from .templates.talent import Talent
from .templates.etat import Etat
from .templates.position import Position
from .templates.allonge import Allonge
from .templates.assis import Assis
from .prompt import prompts
from .prompt.defaut import PromptDefaut
class Module(BaseModule):
"""Module gérant la classe Personnage qui sera héritée pour construire
des joueurs et PNJs. Les mécanismes propres au personnage (c'est-à-dire
indépendants de la connexion et liés à l'univers) seront gérés ici.
En revanche, les contextes de connexion ou de création d'un personnage
ne se trouvent pas ici (il s'agit d'informations propres à un joueur, non
à un PNJ).
"""
def __init__(self, importeur):
"""Constructeur du module"""
BaseModule.__init__(self, importeur, "perso", "primaire")
self.cfg_stats = None
self.cfg_niveaux = None
self.cfg_talents = None
self.modele_stats = None
self.commandes = []
self.squelettes = {}
self.races = []
self.gen_niveaux = None
self.niveaux = {}
self.talents = {}
self.etats = {}
self.positions = {}
self.prompts = prompts
def config(self):
"""Méthode de configuration.
On récupère les fichiers de configuration correspondant au module.
"""
self.cfg_stats = conf_stats = type(self.importeur).anaconf.get_config(
"stats", "perso/stats.cfg", "modele stats", cfg_stats)
conf_stats._set_globales({
"I0":I0,
"IE0":IE0,
"SM":SM,
"SEM":SEM,
})
self.modele_stats = Stats()
self.cfg_niveaux = type(self.importeur).anaconf.get_config(
"niveaux", "perso/niveaux.cfg", "modele niveaux", cfg_niveaux)
self.cfg_talents = type(self.importeur).anaconf.get_config(
"talents", "perso/talents.cfg", "modele talents", cfg_talents)
self.ajouter_niveau("art_pisteur", "art du pisteur")
# Ajout des états (assis et allongé)
assis = self.ajouter_etat("assis", Assis)
assis.msg_refus = "Vous êtes assis."
assis.msg_visible = "est assis là"
assis.act_autorisees = ["regarder", "poser", "parler", "ingerer",
"lancersort", "lever", "geste", "allonger", "pecher", "ramer"]
allonge = self.ajouter_etat("allonge", Allonge)
allonge.msg_refus = "Vous êtes allongé."
allonge.msg_visible = "est allongé là"
allonge.act_autorisees = ["regarder", "parler", "ingerer",
"lever", "geste", "asseoir"]
mort = self.ajouter_etat("mort")
mort.msg_refus = "Vous êtes inconscient."
mort.msg_visible = "est inconscient ici"
paralyse = self.ajouter_etat("paralyse")
paralyse.msg_refus = "Vous ne pouvez bouger un muscle."
paralyse.msg_visible = "se tient, rigide, à cet endroit"
paralyse.peut_etre_attaque = False
paralyse.sauvegarder_au_reboot = True
entraine = self.ajouter_etat("entrainer")
entraine.msg_refus = "Vous êtes en train de vous entraîner."
entraine.msg_visible = "s'entraîne ici"
entraine.act_autorisees = ["regarder", "parler"]
recherche = self.ajouter_etat("recherche")
recherche.msg_refus = "Vous êtes un peu occupé."
recherche.msg_visible = "cherche quelque chose ici"
recherche.act_autorisees = ["parler"]
# Ajout des hooks
importeur.hook.ajouter_hook("personnage:peut_deplacer",
"Hook appelé quand un personnage veut se déplacer.")
importeur.hook.ajouter_hook("personnage:calculer_endurance",
"Hook appelé pour calculer l'endurance de déplacement.")
importeur.hook.ajouter_hook("personnage:deplacer",
"Hook appelé quand un personnage se déplace.")
importeur.hook.ajouter_hook("personnage:verbe_deplacer",
"Hook appelé pour retourner le verbe de déplacement.")
importeur.hook.ajouter_hook("personnage:verbe_arriver",
"Hook appelé pour retourner le verbe d'arriver.")
importeur.hook.ajouter_hook("personnage:score",
"Hook appelé quand un personnage consulte son score.")
importeur.hook.ajouter_hook("personnage:points_apprentissage",
"Hook pour ajouter des points d'apprentissage")
BaseModule.config(self)
def init(self):
"""Initialisation du module"""
# Ajout du prompt
self.ajouter_prompt(PromptDefaut)
# On construit le niveau
niveaux = Niveaux
niveaux.nb_niveaux = self.cfg_niveaux.nb_niveaux
niveaux.xp_min = self.cfg_niveaux.xp_min
niveaux.xp_max = self.cfg_niveaux.xp_max
niveaux.points_entrainement_fixes = \
self.cfg_niveaux.points_entrainement_fixes
niveaux.points_entrainement_paliers = \
self.cfg_niveaux.points_entrainement_paliers
niveaux.stats_entrainables = self.cfg_stats.entrainables
niveaux.calculer_grille()
self.gen_niveaux = niveaux
# On récupère les squelettes
squelettes = self.importeur.supenr.charger_groupe(Squelette)
for squelette in squelettes:
self.ajouter_squelette(squelette)
# On récupère les races
races = self.importeur.supenr.charger_groupe(Race)
for race in races:
self.ajouter_race(race)
# Positions
self.ajouter_position("assis", "est assis", "est assise")
self.ajouter_position("allonge", "est allongé", "est allongée")
self.ajouter_talent("escalade", "escalade", "survie", 0.31)
self.ajouter_talent("nage", "nage", "survie", 0.25)
BaseModule.init(self)
def ajouter_commandes(self):
"""Ajout des commandes dans l'interpréteur"""
self.commandes = [
commandes.allonger.CmdAllonger(),
commandes.asseoir.CmdAsseoir(),
commandes.chercher.CmdChercher(),
commandes.commande.CmdCommande(),
commandes.d.CmdD(),
commandes.equipement.CmdEquipement(),
commandes.lever.CmdLever(),
commandes.m.CmdM(),
commandes.niveaux.CmdNiveaux(),
commandes.prompt.CmdPrompt(),
commandes.quete.CmdQuete(),
commandes.qui.CmdQui(),
commandes.raedit.CmdRaedit(),
commandes.score.CmdScore(),
commandes.skedit.CmdSkedit(),
commandes.sklist.CmdSklist(),
commandes.talents.CmdTalents(),
commandes.v.CmdV(),
]
for cmd in self.commandes:
self.importeur.interpreteur.ajouter_commande(cmd)
# Ajout des éditeurs
self.importeur.interpreteur.ajouter_editeur(EdtRaedit)
self.importeur.interpreteur.ajouter_editeur(EdtSkedit)
def preparer(self):
"""Préparation des personnages."""
personnages = list(importeur.joueur.joueurs.values()) + list(
importeur.pnj.PNJ.values())
for personnage in personnages:
personnage.etats.reinitialiser()
if personnage.salle and personnage.salle.nom_terrain == \
"subaquatique" and not personnage.est_immortel():
personnage.plonger()
def get_niveau_par_nom(self, nom):
"""Retourne le niveau dont le nom est donné."""
nom = supprimer_accents(nom).lower()
for niveau in self.niveaux.values():
if supprimer_accents(niveau.nom).lower() == nom:
return niveau
raise ValueError("niveau inconnu {}".format(nom))
def creer_squelette(self, cle):
"""Création d'un squelette"""
squelette = Squelette(cle)
self.ajouter_squelette(squelette)
return squelette
def ajouter_squelette(self, squelette):
"""Ajoute le squelette aux squelettes existants"""
self.squelettes[squelette.cle] = squelette
def supprimer_squelette(self, cle):
"""Supprime le squelette existant"""
squelette = self.squelettes[cle]
del self.squelettes[cle]
squelette.detruire()
def creer_race(self, nom):
"""Crée la race du nom indiqué"""
race = Race(nom)
self.ajouter_race(race)
return race
def ajouter_race(self, race):
"""Ajout de la race au dictionnaire des races existantes"""
self.races.append(race)
def supprimer_race(self, nom):
"""Suppression de la race 'nom'"""
race = None
indice = None
for i, t_race in enumerate(self.races):
if t_race.nom.lower() == nom.lower():
race = t_race
indice = i
if indice is None:
raise KeyError("ce nom de race est introuvable")
del self.races[indice]
race.detruire()
def race_est_utilisee(self, race):
"""Contrôle si la race est déjà utilisée ou non.
La race peut être utilisée :
- par un joueur
- par un prototype de PNJ
"""
a_tester = list(self.importeur.connex.joueurs)
a_tester += list(self.importeur.pnj.prototypes.values())
for test in a_tester:
if test.race is race:
return True
return False
def stats_symboles(self):
"""Retourne un tuple nommé contenant les stats et leur symbole.
Par exemple :
>>> nt = importeur.perso.stats_symboles()
>>> nt.force
'f'
"""
NTStats = namedtuple("NTStats",
[stat.nom for stat in self.modele_stats])
stats_symboles = dict(((stat.nom, "%{}".format(stat.symbole)) \
for stat in self.modele_stats))
ntstats = NTStats(**stats_symboles)
return ntstats
def ajouter_niveau(self, cle, nom):
"""Ajoute un niveau au dictionnaire des niveaux."""
if cle in self.niveaux:
raise ValueError("la clé {} est déjà utilisée comme clé " \
"de niveau".format(repr(cle)))
niveau = Niveau(cle, nom)
self.niveaux[cle] = niveau
def get_points_apprentissage(self, personnage):
"""Retourne le nombre de points d'apprentissage maximum.
Ce calcul ne prend pas en compte les points déjà consommés
par le personnage. On peut étendre le calcul (en ajoutant
des talents "caché") grâce à l'hook
'personnage:points_apprentissage'.
"""
talents = [t for t in self.talents.values() if t.liberer_points]
plus = importeur.hook["personnage:points_apprentissage"].executer(
personnage)
talents = len(talents) * 50 + sum(plus)
return talents
def ajouter_talent(self, cle, nom, niveau, difficulte,
liberer_points=True):
"""Ajoute un talent."""
if cle in self.talents:
raise ValueError("un talent de clé {} existe déjà".format(cle))
talent = Talent(self.niveaux, cle, nom, niveau, difficulte,
liberer_points)
self.talents[cle] = talent
def ajouter_etat(self, cle, classe=None):
"""Ajoute un état dans le dictionnaire."""
if classe is None:
classe = type("Etat{}".format(cle.capitalize()), (Etat, ), {})
classe.cle = cle
if cle in self.etats:
raise ValueError("l'état {} existe déjà".format(cle))
self.etats[cle] = classe
return classe
def ajouter_position(self, cle, etat_m, etat_f):
"""Ajoute une position."""
position = Position(cle, etat_m, etat_f)
self.positions[cle] = position
return position
def ajouter_prompt(self, prompt):
"""Ajoute un prompt.
Cette méthode attend en paramètre une classe héritée de Prompt.
"""
self.prompts[prompt.nom] = prompt
| bsd-3-clause | 2,775,036,464,844,803,000 | 36.238095 | 79 | 0.631145 | false |
juggernate/pymel | pymel/core/uitypes.py | 1 | 42892 | import sys
import re
import pymel.util as _util
import pymel.internal.pmcmds as cmds
import pymel.internal.factories as _factories
import pymel.internal.startup as _startup
import pymel.internal as _internal
import pymel.versions as _versions
import maya.mel as _mm
_logger = _internal.getLogger(__name__)
def _resolveUIFunc(name):
if isinstance(name, basestring):
import windows
try:
return getattr(windows, name)
except AttributeError:
try:
cls = getattr(dynModule, name)
return cls.__melcmd__()
except (KeyError, AttributeError):
pass
else:
import inspect
if inspect.isfunction(name):
return name
elif inspect.isclass(name) and issubclass(name, PyUI):
name.__melcmd__()
raise ValueError, "%r is not a known ui type" % name
if _versions.current() >= _versions.v2011:
def toPyQtObject(mayaName):
"""
Given the name of a Maya UI element of any type, return the corresponding QWidget or QAction.
If the object does not exist, returns None
When using this function you don't need to specify whether UI type is a control, layout,
window, or menuItem, the first match -- in that order -- will be returned. If you have the full path to a UI object
this should always be correct, however, if you only have the short name of the UI object,
consider using one of the more specific variants: `toQtControl`, `toQtLayout`, `toQtWindow`, or `toQtMenuItem`.
.. note:: Requires PyQt
"""
import maya.OpenMayaUI as mui
import sip
import PyQt4.QtCore as qtcore
import PyQt4.QtGui as qtgui
ptr = mui.MQtUtil.findControl(mayaName)
if ptr is None:
ptr = mui.MQtUtil.findLayout(mayaName)
if ptr is None:
ptr = mui.MQtUtil.findMenuItem(mayaName)
if ptr is not None:
return sip.wrapinstance(long(ptr), qtcore.QObject)
def toPyQtControl(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PyQt
"""
import maya.OpenMayaUI as mui
import sip
import PyQt4.QtCore as qtcore
import PyQt4.QtGui as qtgui
ptr = mui.MQtUtil.findControl(mayaName)
if ptr is not None:
return sip.wrapinstance(long(ptr), qtgui.QWidget)
def toPyQtLayout(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PyQt
"""
import maya.OpenMayaUI as mui
import sip
import PyQt4.QtCore as qtcore
import PyQt4.QtGui as qtgui
ptr = mui.MQtUtil.findLayout(mayaName)
if ptr is not None:
return sip.wrapinstance(long(ptr), qtgui.QWidget)
def toPyQtWindow(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PyQt
"""
import maya.OpenMayaUI as mui
import sip
import PyQt4.QtCore as qtcore
import PyQt4.QtGui as qtgui
ptr = mui.MQtUtil.findWindow(mayaName)
if ptr is not None:
return sip.wrapinstance(long(ptr), qtgui.QWidget)
def toPyQtMenuItem(mayaName):
"""
Given the name of a May UI menuItem, return the corresponding QAction.
If the object does not exist, returns None
This only works for menu items. for Menus, use toQtControl or toQtObject
.. note:: Requires PyQt
"""
import maya.OpenMayaUI as mui
import sip
import PyQt4.QtCore as qtcore
import PyQt4.QtGui as qtgui
ptr = mui.MQtUtil.findMenuItem(mayaName)
if ptr is not None:
return sip.wrapinstance(long(ptr), qtgui.QAction)
# PYSIDE VERSIONS
def pysideWrapInstance(ptr, base=None):
'''Utility to convert a point to a Qt Class and produce the same result
as sip.wrapinstance using shiboken.wrapInstance.
Note: This is modeled after nathanhorne.com/?p=486. The base arg isn't
currently used, and defaults to QObject. The way that base arg was used
seems like it would give a different result than the sip version. It would
skip the checking for attribute and just use base as base, however the sip
version will still return QMainWindow even if QObject is passed in.
'''
if ptr is None:
return
import shiboken
import PySide.QtCore as qtcore
import PySide.QtGui as qtgui
qObj = shiboken.wrapInstance(long(ptr), qtcore.QObject)
metaObj = qObj.metaObject()
cls = metaObj.className()
superCls = metaObj.superClass().className()
if hasattr(qtgui, cls):
base = getattr(qtgui, cls)
elif hasattr(qtgui, superCls):
base = getattr(qtgui, superCls)
else:
base = qtgui.QWidget
return shiboken.wrapInstance(long(ptr), base)
def toPySideObject(mayaName):
"""
Given the name of a Maya UI element of any type, return the corresponding QWidget or QAction.
If the object does not exist, returns None
When using this function you don't need to specify whether UI type is a control, layout,
window, or menuItem, the first match -- in that order -- will be returned. If you have the full path to a UI object
this should always be correct, however, if you only have the short name of the UI object,
consider using one of the more specific variants: `toQtControl`, `toQtLayout`, `toQtWindow`, or `toQtMenuItem`.
.. note:: Requires PySide
"""
import maya.OpenMayaUI as mui
import PySide.QtCore as qtcore
ptr = mui.MQtUtil.findControl(mayaName)
if ptr is None:
ptr = mui.MQtUtil.findLayout(mayaName)
if ptr is None:
ptr = mui.MQtUtil.findMenuItem(mayaName)
if ptr is not None:
return pysideWrapInstance(long(ptr), qtcore.QObject)
def toPySideControl(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PySide
"""
import maya.OpenMayaUI as mui
import shiboken
import PySide.QtCore as qtcore
import PySide.QtGui as qtgui
ptr = mui.MQtUtil.findControl(mayaName)
if ptr is not None:
return pysideWrapInstance(long(ptr), qtgui.QWidget)
def toPySideLayout(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PySide
"""
import maya.OpenMayaUI as mui
import shiboken
import PySide.QtCore as qtcore
import PySide.QtGui as qtgui
ptr = mui.MQtUtil.findLayout(mayaName)
if ptr is not None:
return pysideWrapInstance(long(ptr), qtgui.QWidget)
def toPySideWindow(mayaName):
"""
Given the name of a May UI control, return the corresponding QWidget.
If the object does not exist, returns None
.. note:: Requires PySide
"""
import maya.OpenMayaUI as mui
import shiboken
import PySide.QtCore as qtcore
import PySide.QtGui as qtgui
ptr = mui.MQtUtil.findWindow(mayaName)
if ptr is not None:
return pysideWrapInstance(long(ptr), qtgui.QWidget)
def toPySideMenuItem(mayaName):
"""
Given the name of a Maya UI menuItem, return the corresponding QAction.
If the object does not exist, returns None
This only works for menu items. for Menus, use toQtControl or toQtObject
.. note:: Requires PySide
"""
import maya.OpenMayaUI as mui
import shiboken
import PySide.QtCore as qtcore
import PySide.QtGui as qtgui
ptr = mui.MQtUtil.findMenuItem(mayaName)
if ptr is not None:
return pysideWrapInstance(long(ptr), qtgui.QAction)
# Assign functions to PyQt versions if PyQt is available, otherwise set to PySide versions
try:
import sip
import PyQt4
pyQtAvailable = True
except ImportError:
pyQtAvailable = False
try:
import shiboken
import PySide
pySideAvailable = True
except ImportError:
pySideAvailable = False
if pyQtAvailable and not pySideAvailable:
qtBinding = 'pyqt'
elif pySideAvailable and not pyQtAvailable:
qtBinding = 'pyside'
else:
qtBinding = _startup.pymel_options['preferred_python_qt_binding']
if qtBinding == 'pyqt':
toQtObject = toPyQtObject
toQtControl = toPyQtControl
toQtLayout = toPyQtLayout
toQtWindow = toPyQtWindow
toQtMenuItem = toPyQtMenuItem
elif qtBinding == 'pyside':
toQtObject = toPySideObject
toQtControl = toPySideControl
toQtLayout = toPySideLayout
toQtWindow = toPySideWindow
toQtMenuItem = toPySideMenuItem
else:
raise ValueError('preferred_python_qt_binding must be set to either'
' pyside or pyqt')
# really, this should be in core.windows; but, due to that fact that this module
# is "higher" in the import hierarchy than core.windows, and we need this function
# here, we're just defining it here
@_factories.addMelDocs('objectTypeUI')
def objectTypeUI(name, **kwargs):
try:
return cmds.objectTypeUI(name, **kwargs)
except RuntimeError, topError:
try:
# some ui types (radioCollections) can only be identified with their shortname
return cmds.objectTypeUI(name.split('|')[-1], **kwargs)
except RuntimeError:
# we cannot query the type of rowGroupLayout children: check common types for these
uiType = None
typesToCheck = 'checkBox floatField button floatSlider intSlider ' \
'floatField textField intField optionMenu radioButton'.split()
if _versions.current() >= _versions.v2012_SP2:
# 2012 SP2 introducted a bug where doing:
# win = cmds.window(menuBar=True)
# cmds.objectTypeUI(win)
# would error...
typesToCheck.append('window')
for cmdName in typesToCheck:
if getattr(cmds, cmdName)(name, ex=1, q=1):
uiType = cmdName
break
if uiType:
return uiType
raise topError
class PyUI(unicode):
def __new__(cls, name=None, create=False, **kwargs):
"""
Provides the ability to create the PyUI Element when creating a class::
import pymel.core as pm
n = pm.Window("myWindow",create=True)
n.__repr__()
# Result: Window('myWindow')
"""
if cls is PyUI:
try:
uiType = objectTypeUI(name)
except RuntimeError:
uiType = 'PyUI'
uiType = _uiTypesToCommands.get(uiType, uiType)
try:
newcls = getattr(dynModule, _util.capitalize(uiType))
except AttributeError:
newcls = PyUI
# objectTypeUI for panels seems to return weird results -
# ie, TmodelPane ... check for them this way.
# Other types should be detected correctly by objectTypeUI,
# but this just provides a failsafe...
for testType in 'panel scriptedPanel window control layout menu'.split():
if getattr(cmds, testType)(name, ex=1, q=1):
newcls = getattr(dynModule, _util.capitalize(testType),
PyUI)
if newcls != PyUI:
break
else:
newcls = cls
if not newcls is PyUI:
if cls._isBeingCreated(name, create, kwargs):
name = newcls.__melcmd__(name, **kwargs)
_logger.debug("PyUI: created... %s" % name)
else:
# find the long name
if '|' not in name and not issubclass(newcls,
(Window,
Panel,
dynModule.ScriptedPanel,
dynModule.RadioCollection,
dynModule.ToolCollection)):
import windows
try:
if issubclass(newcls, Layout):
parent = windows.layout(name, q=1, p=1)
elif issubclass(newcls, OptionMenu):
parent = windows.optionMenu(name, q=1, p=1)
elif issubclass(newcls, Menu):
parent = windows.menu(name, q=1, p=1)
else:
parent = windows.control(name, q=1, p=1)
if parent:
name = parent + '|' + name
except RuntimeError:
# editors don't have a long name, so we keep the short name
if name not in cmds.lsUI(long=True, editors=True):
raise
# correct for optionMenu
if newcls == PopupMenu and cmds.optionMenu(name, ex=1):
newcls = OptionMenu
return unicode.__new__(newcls, name)
@staticmethod
def _isBeingCreated(name, create, kwargs):
"""
create a new node when any of these conditions occur:
name is None
create is True
parent flag is set
"""
return not name or create or ('q' not in kwargs and kwargs.get('parent', kwargs.get('p', None)))
def __repr__(self):
return u"ui.%s('%s')" % (self.__class__.__name__, self)
def parent(self):
buf = unicode(self).split('|')[:-1]
if len(buf) == 2 and buf[0] == buf[1] and _versions.current() < _versions.v2011:
# pre-2011, windows with menus can have a strange name:
# ex. window1|window1|menu1
buf = buf[:1]
if not buf:
return None
return PyUI('|'.join(buf))
getParent = parent
def shortName(self):
return unicode(self).split('|')[-1]
def name(self):
return unicode(self)
def window(self):
return Window(self.name().split('|')[0])
delete = _factories.functionFactory('deleteUI', rename='delete')
rename = _factories.functionFactory('renameUI', rename='rename')
type = objectTypeUI
@classmethod
def exists(cls, name):
return cls.__melcmd__(name, exists=True)
if _versions.current() >= _versions.v2011:
asQtObject = toQtControl
class Panel(PyUI):
"""pymel panel class"""
__metaclass__ = _factories.MetaMayaUIWrapper
# note that we're not actually customizing anything, but
# we're declaring it here because other classes will have this
# as their base class, so we need to make sure it exists first
_withParentStack = []
_withParentMenuStack = []
class Layout(PyUI):
def __enter__(self):
global _withParentStack
_withParentStack.append(self)
self.makeDefault()
return self
def __exit__(self, type, value, traceback):
global _withParentStack
_withParentStack.pop()
if _withParentStack:
parent = _withParentStack[-1]
else:
parent = self.pop()
while parent and objectTypeUI(parent) == u'rowGroupLayout':
parent = parent.pop()
cmds.setParent(parent)
def children(self):
# return [ PyUI( self.name() + '|' + x) for x in self.__melcmd__(self, q=1, childArray=1) ]
kids = cmds.layout(self, q=1, childArray=1)
if kids:
return [PyUI(self.name() + '|' + x) for x in kids]
return []
getChildren = children
# TODO: add depth firt and breadth first options
def walkChildren(self):
"""
recursively yield all children of this layout
"""
for child in self.children():
yield child
if hasattr(child, 'walkChildren'):
for subChild in child.walkChildren():
yield subChild
def findChild(self, shortName, recurse=False):
if recurse:
for child in self.walkChildren():
if child.shortName() == shortName:
return child
else:
for child in self.children():
if child.shortName() == shortName:
return child
def addChild(self, uiType, name=None, **kwargs):
if isinstance(uiType, basestring):
uiType = getattr(dynModule, uiType)
assert hasattr(uiType, '__call__'), 'argument uiType must be the name of a known ui type, a UI subclass, or a callable object'
args = []
if name:
args.append(name)
if kwargs:
if 'parent' in kwargs or 'p' in kwargs:
_logger.warn('parent flag is set by addChild automatically. passed value will be ignored')
kwargs.pop('parent', None)
kwargs.pop('p', None)
kwargs['parent'] = self
res = uiType(*args, **kwargs)
if not isinstance(res, PyUI):
res = PyUI(res)
return res
def makeDefault(self):
"""
set this layout as the default parent
"""
cmds.setParent(self)
def pop(self):
"""
set the default parent to the parent of this layout
"""
p = self.parent()
cmds.setParent(p)
return p
def clear(self):
children = self.getChildArray()
if children:
for child in self.getChildArray():
cmds.deleteUI(child)
if _versions.current() >= _versions.v2011:
asQtObject = toQtLayout
# customized ui classes
class Window(Layout):
"""pymel window class"""
__metaclass__ = _factories.MetaMayaUIWrapper
# if _versions.current() < _versions.v2011:
# # don't set
# def __enter__(self):
# return self
def __exit__(self, type, value, traceback):
super(Window, self).__exit__(type, value, traceback)
self.show()
def show(self):
cmds.showWindow(self)
def delete(self):
cmds.deleteUI(self, window=True)
def layout(self):
name = self.name()
for layout in sorted(cmds.lsUI(long=True, controlLayouts=True)):
# since we are sorted, shorter will be first, and the first layout we come across will be the base layout
if layout.startswith(name):
return PyUI(layout)
# # create a child and then delete it to get the layout
# res = self.addChild(cmds.columnLayout)
# layout = res.parent()
# res.delete()
# return layout
def children(self):
res = self.layout()
return [res] if res else []
getChildren = children
def window(self):
return self
def parent(self):
return None
getParent = parent
if _versions.current() >= _versions.v2011:
asQtObject = toQtWindow
class FormLayout(Layout):
__metaclass__ = _factories.MetaMayaUIWrapper
def __new__(cls, name=None, **kwargs):
if kwargs:
[kwargs.pop(k, None) for k in ['orientation', 'ratios', 'reversed', 'spacing']]
self = Layout.__new__(cls, name, **kwargs)
return self
def __init__(self, name=None, orientation='vertical', spacing=2, reversed=False, ratios=None, **kwargs):
"""
spacing - absolute space between controls
orientation - the orientation of the layout [ AutoLayout.HORIZONTAL | AutoLayout.VERTICAL ]
"""
Layout.__init__(self, **kwargs)
self._spacing = spacing
self._orientation = self.Orientation.getIndex(orientation)
self._reversed = reversed
self._ratios = ratios and list(ratios) or []
def attachForm(self, *args):
kwargs = {'edit': True}
kwargs['attachForm'] = [args]
cmds.formLayout(self, **kwargs)
def attachControl(self, *args):
kwargs = {'edit': True}
kwargs['attachControl'] = [args]
cmds.formLayout(self, **kwargs)
def attachNone(self, *args):
kwargs = {'edit': True}
kwargs['attachNone'] = [args]
cmds.formLayout(self, **kwargs)
def attachPosition(self, *args):
kwargs = {'edit': True}
kwargs['attachPosition'] = [args]
cmds.formLayout(self, **kwargs)
HORIZONTAL = 0
VERTICAL = 1
Orientation = _util.enum.Enum('Orientation', ['horizontal', 'vertical'])
def flip(self):
"""Flip the orientation of the layout """
self._orientation = 1 - self._orientation
self.redistribute(*self._ratios)
def reverse(self):
"""Reverse the children order """
self._reversed = not self._reversed
self._ratios.reverse()
self.redistribute(*self._ratios)
def reset(self):
self._ratios = []
self._reversed = False
self.redistribute()
def redistribute(self, *ratios):
"""
Redistribute the child controls based on the ratios.
If not ratios are given (or not enough), 1 will be used
"""
sides = [["top", "bottom"], ["left", "right"]]
children = self.getChildArray()
if not children:
return
if self._reversed:
children.reverse()
ratios = list(ratios) or self._ratios or []
ratios += [1] * (len(children) - len(ratios))
self._ratios = ratios
total = sum(ratios)
for i, child in enumerate(children):
for side in sides[self._orientation]:
self.attachForm(child, side, self._spacing)
if i == 0:
self.attachForm(child,
sides[1 - self._orientation][0],
self._spacing)
else:
self.attachControl(child,
sides[1 - self._orientation][0],
self._spacing,
children[i - 1])
if ratios[i]:
self.attachPosition(children[i],
sides[1 - self._orientation][1],
self._spacing,
float(sum(ratios[:i + 1])) / float(total) * 100)
else:
self.attachNone(children[i],
sides[1 - self._orientation][1])
def vDistribute(self, *ratios):
self._orientation = int(self.Orientation.vertical)
self.redistribute(*ratios)
def hDistribute(self, *ratios):
self._orientation = int(self.Orientation.horizontal)
self.redistribute(*ratios)
class AutoLayout(FormLayout):
"""
AutoLayout behaves exactly like `FormLayout`, but will call redistribute automatically
at the end of a 'with' statement block
"""
def __exit__(self, type, value, traceback):
self.redistribute()
super(AutoLayout, self).__exit__(type, value, traceback)
class RowLayout(Layout):
__metaclass__ = _factories.MetaMayaUIWrapper
class TextScrollList(PyUI):
__metaclass__ = _factories.MetaMayaUIWrapper
def extend(self, appendList):
""" append a list of strings"""
for x in appendList:
self.append(x)
def selectIndexedItems(self, selectList):
"""select a list of indices"""
for x in selectList:
self.setSelectIndexedItem(x)
def removeIndexedItems(self, removeList):
"""remove a list of indices"""
for x in removeList:
self.removeIndexedItem(x)
def selectAll(self):
"""select all items"""
numberOfItems = self.getNumberOfItems()
self.selectIndexedItems(range(1, numberOfItems + 1))
class Menu(PyUI):
__metaclass__ = _factories.MetaMayaUIWrapper
def __enter__(self):
global _withParentMenuStack
_withParentMenuStack.append(self)
self.makeDefault()
return self
def __exit__(self, type, value, traceback):
global _withParentMenuStack
_withParentMenuStack.pop()
if _withParentMenuStack:
cmds.setParent(_withParentMenuStack[-1], menu=True)
else:
parent = self
while True:
parent = parent.parent()
# Maya 2012 Service Pack 2 (or SAP1, SP1) introduces a bug where
# '' is returned, instead of None; problem being that doing
# cmds.setParent(None, menu=True) is valid, but
# cmds.setParent('', menu=True) is not
if parent == '':
parent = None
try:
cmds.setParent(parent, menu=True)
except RuntimeError:
continue
break
def getItemArray(self):
""" Modified to return pymel instances """
children = cmds.menu(self, query=True, itemArray=True)
if children:
return [MenuItem(item) for item in cmds.menu(self, query=True, itemArray=True)]
else:
return []
def makeDefault(self):
"""
set this layout as the default parent
"""
cmds.setParent(self, menu=True)
class PopupMenu(Menu):
__metaclass__ = _factories.MetaMayaUIWrapper
class OptionMenu(PopupMenu):
__metaclass__ = _factories.MetaMayaUIWrapper
def addMenuItems(self, items, title=None):
""" Add the specified item list to the OptionMenu, with an optional 'title' item """
if title:
cmds.menuItem(l=title, en=0, parent=self)
for item in items:
cmds.menuItem(l=item, parent=self)
def clear(self):
""" Clear all menu items from this OptionMenu """
for t in self.getItemListLong() or []:
cmds.deleteUI(t)
addItems = addMenuItems
class OptionMenuGrp(RowLayout):
__metaclass__ = _factories.MetaMayaUIWrapper
def menu(self):
for child in self.children():
if isinstance(child, OptionMenu):
return child
# Want to set both the menu to the child |OptionMenu item, and the normal
# parent to this...
def __enter__(self):
self.menu().__enter__()
return super(OptionMenuGrp, self).__enter__()
def __exit__(self, type, value, traceback):
self.menu().__exit__(type, value, traceback)
return super(OptionMenuGrp, self).__exit__(type, value, traceback)
class SubMenuItem(Menu):
def getBoldFont(self):
return cmds.menuItem(self, query=True, boldFont=True)
def getItalicized(self):
return cmds.menuItem(self, query=True, italicized=True)
if _versions.current() >= _versions.v2011:
asQtObject = toQtMenuItem
class CommandMenuItem(PyUI):
__metaclass__ = _factories.MetaMayaUIWrapper
__melui__ = 'menuItem'
def __enter__(self):
SubMenuItem(self).__enter__()
return self
def __exit__(self, type, value, traceback):
return SubMenuItem(self).__exit__(type, value, traceback)
def MenuItem(name=None, create=False, **kwargs):
if PyUI._isBeingCreated(name, create, kwargs):
cls = CommandMenuItem
else:
try:
uiType = objectTypeUI(name)
except RuntimeError:
cls = SubMenuItem
else:
if uiType == 'subMenuItem':
cls = SubMenuItem
else:
cls = CommandMenuItem
return cls(name, create, **kwargs)
class UITemplate(object):
"""
from pymel.core import *
# force deletes the template if it already exists
template = ui.UITemplate( 'ExampleTemplate', force=True )
template.define( button, width=100, height=40, align='left' )
template.define( frameLayout, borderVisible=True, labelVisible=False )
# Create a window and apply the template.
#
with window():
with template:
with columnLayout( rowSpacing=5 ):
with frameLayout():
with columnLayout():
button( label='One' )
button( label='Two' )
button( label='Three' )
with frameLayout():
with columnLayout():
button( label='Red' )
button( label='Green' )
button( label='Blue' )
"""
def __init__(self, name=None, force=False):
if name and cmds.uiTemplate(name, exists=True):
if force:
cmds.deleteUI(name, uiTemplate=True)
else:
self._name = name
return
args = [name] if name else []
self._name = cmds.uiTemplate(*args)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self._name)
def __enter__(self):
self.push()
return self
def __exit__(self, type, value, traceback):
self.pop()
def name(self):
return self._name
def push(self):
cmds.setUITemplate(self._name, pushTemplate=True)
def pop(self):
cmds.setUITemplate(popTemplate=True)
def define(self, uiType, **kwargs):
"""
uiType can be:
- a ui function or class
- the name of a ui function or class
- a list or tuple of the above
"""
if isinstance(uiType, (list, tuple)):
funcs = [_resolveUIFunc(x) for x in uiType]
else:
funcs = [_resolveUIFunc(uiType)]
kwargs['defineTemplate'] = self._name
for func in funcs:
func(**kwargs)
@staticmethod
def exists(name):
return cmds.uiTemplate(name, exists=True)
class AELoader(type):
"""
Metaclass used by `AETemplate` class to create wrapping and loading mechanisms when an AETemplate instance is created
"""
_loaded = []
def __new__(cls, classname, bases, classdict):
newcls = super(AELoader, cls).__new__(cls, classname, bases, classdict)
try:
nodeType = newcls.nodeType()
except ValueError:
_logger.debug("could not determine node type for " + classname)
else:
modname = classdict['__module__']
if modname == '__builtin__':
# since the module is __builtin__ our AE was probably included in the body of a scripted
# plugin, which is called by maya in a strange way ( execfile? ).
# give it a real home so we can load it later.
mod = sys.modules['__builtin__']
setattr(mod, classname, newcls)
template = 'AE' + nodeType + 'Template'
cls.makeAEProc(modname, classname, template)
if template not in cls._loaded:
cls._loaded.append(template)
return newcls
@staticmethod
def makeAEProc(modname, classname, procname):
_logger.debug("making AE loader procedure: %s" % procname)
contents = '''global proc %(procname)s( string $nodeName ){
python("import %(__name__)s;%(__name__)s.AELoader.load('%(modname)s','%(classname)s','" + $nodeName + "')");}'''
d = locals().copy()
d['__name__'] = __name__
import maya.mel as mm
mm.eval(contents % d)
@staticmethod
def load(modname, classname, nodename):
mod = __import__(modname, globals(), locals(), [classname], -1)
try:
cls = getattr(mod, classname)
cls(nodename)
except Exception:
print "failed to load python attribute editor template '%s.%s'" % (modname, classname)
import traceback
traceback.print_exc()
@classmethod
def loadedTemplates(cls):
"Return the names of the loaded templates"
return cls._loaded
class AETemplate(object):
"""
To create an Attribute Editor template using python, do the following:
1. create a subclass of `uitypes.AETemplate`
2. set its ``_nodeType`` class attribute to the name of the desired node type, or name the class using the
convention ``AE<nodeType>Template``
3. import the module
AETemplates which do not meet one of the two requirements listed in step 2 will be ignored. To ensure that your
Template's node type is being detected correctly, use the ``AETemplate.nodeType()`` class method::
import AETemplates
AETemplates.AEmib_amb_occlusionTemplate.nodeType()
As a convenience, when pymel is imported it will automatically import the module ``AETemplates``, if it exists,
thereby causing any AETemplates within it or its sub-modules to be registered. Be sure to import pymel
or modules containing your ``AETemplate`` classes before opening the Atrribute Editor for the node types in question.
To check which python templates are loaded::
from pymel.core.uitypes import AELoader
print AELoader.loadedTemplates()
"""
__metaclass__ = AELoader
_nodeType = None
def __init__(self, nodeName):
self._nodeName = nodeName
@property
def nodeName(self):
return self._nodeName
@classmethod
def nodeType(cls):
if cls._nodeType:
return cls._nodeType
else:
m = re.match('AE(.+)Template$', cls.__name__)
if m:
return m.groups()[0]
else:
raise ValueError("You must either name your AETemplate subclass of the form 'AE<nodeType>Template' or set the '_nodeType' class attribute")
@classmethod
def controlValue(cls, nodeName, control):
return cmds.editorTemplate(queryControl=(nodeName, control))
@classmethod
def controlLabel(cls, nodeName, control):
return cmds.editorTemplate(queryLabel=(nodeName, control))
@classmethod
def reload(cls):
"Reload the template. Beware, this reloads the module in which the template exists!"
nodeType = cls.nodeType()
form = "AttrEd" + nodeType + "FormLayout"
exists = cmds.control(form, exists=1) and cmds.formLayout(form, q=1, ca=1)
if exists:
sel = cmds.ls(sl=1)
cmds.select(cl=True)
cmds.deleteUI(form)
if sel:
cmds.select(sel)
reload(sys.modules[cls.__module__])
def addControl(self, control, label=None, changeCommand=None, annotation=None, preventOverride=False, dynamic=False):
args = [control]
kwargs = {'preventOverride': preventOverride}
if dynamic:
kwargs['addDynamicControl'] = True
else:
kwargs['addControl'] = True
if changeCommand:
if hasattr(changeCommand, '__call__'):
import pymel.tools.py2mel
name = self.__class__.__name__ + '_callCustom_changeCommand_' + control
changeCommand = pymel.tools.py2mel.py2melProc(changeCommand, procName=name, argTypes=['string'])
args.append(changeCommand)
if label:
kwargs['label'] = label
if annotation:
kwargs['annotation'] = annotation
cmds.editorTemplate(*args, **kwargs)
def callCustom(self, newFunc, replaceFunc, *attrs):
#cmds.editorTemplate(callCustom=( (newFunc, replaceFunc) + attrs))
import pymel.tools.py2mel
if hasattr(newFunc, '__call__'):
name = self.__class__.__name__ + '_callCustom_newFunc_' + '_'.join(attrs)
newFunc = pymel.tools.py2mel.py2melProc(newFunc, procName=name, argTypes=['string'] * len(attrs))
if hasattr(replaceFunc, '__call__'):
name = self.__class__.__name__ + '_callCustom_replaceFunc_' + '_'.join(attrs)
replaceFunc = pymel.tools.py2mel.py2melProc(replaceFunc, procName=name, argTypes=['string'] * len(attrs))
args = (newFunc, replaceFunc) + attrs
cmds.editorTemplate(callCustom=1, *args)
def suppress(self, control):
cmds.editorTemplate(suppress=control)
def dimControl(self, nodeName, control, state):
#nodeName = nodeName if nodeName else self.nodeName
# print "dim", nodeName
cmds.editorTemplate(dimControl=(nodeName, control, state))
def beginLayout(self, name, collapse=True):
cmds.editorTemplate(beginLayout=name, collapse=collapse)
def endLayout(self):
cmds.editorTemplate(endLayout=True)
def beginScrollLayout(self):
cmds.editorTemplate(beginScrollLayout=True)
def endScrollLayout(self):
cmds.editorTemplate(endScrollLayout=True)
def beginNoOptimize(self):
cmds.editorTemplate(beginNoOptimize=True)
def endNoOptimize(self):
cmds.editorTemplate(endNoOptimize=True)
def interruptOptimize(self):
cmds.editorTemplate(interruptOptimize=True)
def addSeparator(self):
cmds.editorTemplate(addSeparator=True)
def addComponents(self):
cmds.editorTemplate(addComponents=True)
def addExtraControls(self, label=None):
kwargs = {}
if label:
kwargs['extraControlsLabel'] = label
cmds.editorTemplate(addExtraControls=True, **kwargs)
# TODO: listExtraAttributes
dynModule = _util.LazyLoadModule(__name__, globals())
def _createUIClasses():
for funcName in _factories.uiClassList:
# Create Class
classname = _util.capitalize(funcName)
try:
cls = dynModule[classname]
except KeyError:
if classname.endswith(('Layout', 'Grp')):
bases = (Layout,)
elif classname.endswith('Panel'):
bases = (Panel,)
else:
bases = (PyUI,)
dynModule[classname] = (_factories.MetaMayaUIWrapper, (classname, bases, {}))
_createUIClasses()
class MainProgressBar(dynModule.ProgressBar):
'''Context manager for main progress bar
If an exception occur after beginProgress() but before endProgress() maya
gui becomes unresponsive. Use this class to escape this behavior.
:Parameters:
minValue : int
Minimum or startingvalue of progress indicatior. If the progress
value is less than the minValue, the progress value will be set
to the minimum. Default value is 0
maxValue : int
The maximum or endingvalue of the progress indicator. If the
progress value is greater than the maxValue, the progress value
will be set to the maximum. Default value is 100.
interruptable : bool
Set to True if the isCancelled flag should respond to attempts to
cancel the operation. Setting this to true will put make the help
line display message to the user indicating that they can cancel
the operation.
Here's an example::
with MainProgressBar(0,20,True) as bar:
bar.setStatus('Calculating...')
for i in range(0,20):
bar.setProgress(i)
if bar.getIsCancelled():
break
'''
def __new__(cls, minValue=0, maxValue=100, interruptable=True):
from language import melGlobals
bar = dynModule.ProgressBar.__new__(
cls, melGlobals['gMainProgressBar'], create=False)
bar.setMinValue(minValue)
bar.setMaxValue(maxValue)
bar.setIsInterruptable(interruptable)
return bar
def __enter__(self):
self.beginProgress()
return self
def __exit__(self, *args):
self.endProgress()
class VectorFieldGrp(dynModule.FloatFieldGrp):
def __new__(cls, name=None, create=False, *args, **kwargs):
if create:
kwargs.pop('nf', None)
kwargs['numberOfFields'] = 3
name = cmds.floatFieldGrp(name, *args, **kwargs)
return dynModule.FloatFieldGrp.__new__(cls, name, create=False, *args, **kwargs)
def getVector(self):
import datatypes
x = cmds.floatFieldGrp(self, q=1, v1=True)
y = cmds.floatFieldGrp(self, q=1, v2=True)
z = cmds.floatFieldGrp(self, q=1, v3=True)
return datatypes.Vector([x, y, z])
def setVector(self, vec):
cmds.floatFieldGrp(self, e=1, v1=vec[0], v2=vec[1], v3=vec[2])
class PathButtonGrp(dynModule.TextFieldButtonGrp):
PROMPT_FUNCTION = 'promptForPath'
def __new__(cls, name=None, create=False, *args, **kwargs):
if create:
import windows
kwargs.pop('bl', None)
kwargs['buttonLabel'] = 'Browse'
kwargs.pop('bc', None)
kwargs.pop('buttonCommand', None)
name = cmds.textFieldButtonGrp(name, *args, **kwargs)
promptFunction = getattr(windows, cls.PROMPT_FUNCTION)
def setPathCB(name):
f = promptFunction()
if f:
cmds.textFieldButtonGrp(name, e=1, text=f, forceChangeCommand=True)
import windows
cb = windows.Callback(setPathCB, name)
cmds.textFieldButtonGrp(name, e=1, buttonCommand=cb)
return super(PathButtonGrp, cls).__new__(cls, name, create=False, *args, **kwargs)
def setPath(self, path, **kwargs):
kwargs['forceChangeCommand'] = kwargs.pop('fcc', kwargs.pop('forceChangeCommand', True))
self.setText(path, **kwargs)
def getPath(self):
import system
return system.Path(self.getText())
class FolderButtonGrp(PathButtonGrp):
PROMPT_FUNCTION = 'promptForFolder'
# most of the keys here are names that are only used in certain circumstances
_uiTypesToCommands = {
'radioCluster': 'radioCollection',
'rowGroupLayout': 'rowLayout',
'TcolorIndexSlider': 'rowLayout',
'TcolorSlider': 'rowLayout',
'floatingWindow': 'window'
}
dynModule._lazyModule_update()
| bsd-3-clause | -8,284,236,821,677,527,000 | 33.36859 | 155 | 0.584468 | false |
jcfr/girder | plugins/gravatar/server/__init__.py | 1 | 3656 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import cherrypy
import hashlib
from girder import events
from girder.api import access
from girder.api.describe import Description
from girder.api.rest import loadmodel
from girder.models.model_base import AccessType
from girder.utility.model_importer import ModelImporter
_cachedDefaultImage = None
class PluginSettings(object):
DEFAULT_IMAGE = 'gravatar.default_image'
def computeBaseUrl(user):
"""
Compute the base gravatar URL for a user and save the value in the user
document. For the moment, the current default image is cached in this URL.
"""
global _cachedDefaultImage
if _cachedDefaultImage is None:
_cachedDefaultImage = ModelImporter.model('setting').get(
PluginSettings.DEFAULT_IMAGE, default='identicon')
md5 = hashlib.md5(user['email']).hexdigest()
url = 'https://www.gravatar.com/avatar/%s?d=%s' % (md5, _cachedDefaultImage)
user['gravatar_baseUrl'] = url
ModelImporter.model('user').save(user)
return url
@access.public
@loadmodel(model='user', level=AccessType.READ)
def getGravatar(user, params):
size = int(params.get('size', 64))
if user.get('gravatar_baseUrl'):
baseUrl = user['gravatar_baseUrl']
else:
baseUrl = computeBaseUrl(user)
raise cherrypy.HTTPRedirect(baseUrl + '&s=%d' % size)
getGravatar.description = (
Description('Redirects to the gravatar image for a user.')
.param('id', 'The ID of the user.', paramType='path')
.param('size', 'Size in pixels for the image (default=64).', required=False,
dataType='int')
.notes('This should only be used if the gravatar_baseUrl property of'))
def validateSettings(event):
if event.info['key'] == PluginSettings.DEFAULT_IMAGE:
event.preventDefault().stopPropagation()
# TODO should we update user collection to remove gravatar_baseUrl vals?
# Invalidate cached default image since setting changed
global _cachedDefaultImage
_cachedDefaultImage = None
def userUpdate(event):
"""
Called when the user document is being changed. If the email field changes,
we wipe the cached gravatar URL so it will be recomputed on next request.
"""
if 'email' in event.info['params']:
user = ModelImporter.model('user').load(event.info['id'], force=True)
if (user['email'] != event.info['params']['email'] and
user.get('gravatar_baseUrl')):
del user['gravatar_baseUrl']
ModelImporter.model('user').save(user)
def load(info):
info['apiRoot'].user.route('GET', (':id', 'gravatar'), getGravatar)
ModelImporter.model('user').exposeFields(
level=AccessType.READ, fields='gravatar_baseUrl')
events.bind('model.setting.validate', 'gravatar', validateSettings)
events.bind('rest.put.user/:id.before', 'gravatar', userUpdate)
| apache-2.0 | -9,183,986,138,841,433,000 | 33.490566 | 80 | 0.663567 | false |
JukeboxPipeline/jukebox-core | src/jukeboxcore/gui/widgets/guerilla/prjadder_ui.py | 1 | 1373 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'h:\projects\jukebox-core\src\jukeboxcore\gui\widgets\guerilla\prjadder.ui'
#
# Created: Tue Jan 13 18:54:57 2015
# by: pyside-uic 0.2.15 running on PySide 1.2.2
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_prjadder_dialog(object):
def setupUi(self, prjadder_dialog):
prjadder_dialog.setObjectName("prjadder_dialog")
prjadder_dialog.resize(987, 631)
self.verticalLayout = QtGui.QVBoxLayout(prjadder_dialog)
self.verticalLayout.setObjectName("verticalLayout")
self.prj_tablev = QtGui.QTableView(prjadder_dialog)
self.prj_tablev.setObjectName("prj_tablev")
self.verticalLayout.addWidget(self.prj_tablev)
self.add_pb = QtGui.QPushButton(prjadder_dialog)
self.add_pb.setObjectName("add_pb")
self.verticalLayout.addWidget(self.add_pb)
self.retranslateUi(prjadder_dialog)
QtCore.QMetaObject.connectSlotsByName(prjadder_dialog)
def retranslateUi(self, prjadder_dialog):
prjadder_dialog.setWindowTitle(QtGui.QApplication.translate("prjadder_dialog", "Add Projects", None, QtGui.QApplication.UnicodeUTF8))
self.add_pb.setText(QtGui.QApplication.translate("prjadder_dialog", "Add", None, QtGui.QApplication.UnicodeUTF8))
| bsd-3-clause | 3,771,888,898,077,854,000 | 43.290323 | 141 | 0.722505 | false |
machinalis/django-srd20 | srd20/migrations/0026_auto__chg_field_monster_damage_reduction_condition.py | 1 | 14452 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Monster.damage_reduction_condition'
db.alter_column('srd20_monster', 'damage_reduction_condition', self.gf('django.db.models.fields.CharField')(max_length=128))
def backwards(self, orm):
# Changing field 'Monster.damage_reduction_condition'
db.alter_column('srd20_monster', 'damage_reduction_condition', self.gf('django.db.models.fields.CharField')(max_length=64))
models = {
'srd20.characterclass': {
'Meta': {'ordering': "('name',)", 'object_name': 'CharacterClass', 'db_table': "'class'"},
'alignment': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'class_skills': ('django.db.models.fields.TextField', [], {}),
'epic_feat_base_level': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'epic_feat_interval': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'epic_feat_list': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'epic_full_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'full_text': ('django.db.models.fields.TextField', [], {}),
'hit_die': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'proficiencies': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'req_base_attack_bonus': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'}),
'req_epic_feat': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'req_feat': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'req_languages': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'req_psionics': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'req_race': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'req_skill': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'req_special': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'req_spells': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'req_weapon_proficiency': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'skill_points': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'skill_points_ability': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'spell_list_1': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'spell_list_2': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'spell_list_3': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'spell_list_4': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'spell_list_5': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'spell_stat': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'}),
'spell_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'srd20.feat': {
'Meta': {'ordering': "('name',)", 'object_name': 'Feat', 'db_table': "'feat'"},
'altname': ('django.db.models.fields.SlugField', [], {'max_length': '64'}),
'benefit': ('django.db.models.fields.TextField', [], {}),
'choice': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiple': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'normal': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'prerequisite': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'special': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'stack': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'srd20.monster': {
'Meta': {'ordering': "['name']", 'object_name': 'Monster'},
'abilities': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'alignment': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'altname': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'armor_class': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'aura': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'base_attack_bonus': ('django.db.models.fields.IntegerField', [], {}),
'charisma': ('django.db.models.fields.IntegerField', [], {}),
'class_level': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'combat_maneuver_bonus': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'combat_maneuver_defense': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'constitution': ('django.db.models.fields.IntegerField', [], {}),
'cr': ('django.db.models.fields.IntegerField', [], {}),
'damage_reduction_amount': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'damage_reduction_condition': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'defensive_abilities': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'dexterity': ('django.db.models.fields.IntegerField', [], {}),
'environment': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'feats': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'flavor_text': ('django.db.models.fields.TextField', [], {}),
'fortitude_save': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'gear': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'hit_points': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immunities': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'initiative': ('django.db.models.fields.IntegerField', [], {}),
'intelligence': ('django.db.models.fields.IntegerField', [], {}),
'languages': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'melee': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'opposition_schools': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'organization': ('django.db.models.fields.TextField', [], {}),
'other_type': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'racial_modifiers': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'ranged': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'reach': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'reflex_save': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'resistance': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'senses': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {}),
'skills': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'sorcerer_spells_known': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'space': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
'special_attacks': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'special_qualities': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'speed': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'spell_like_abilities': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'spell_resistance': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'spells_known': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'spells_prepared': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'strength': ('django.db.models.fields.IntegerField', [], {}),
'subtypes': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'treasure': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'weaknesses': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'will_save': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'wisdom': ('django.db.models.fields.IntegerField', [], {}),
'xp': ('django.db.models.fields.IntegerField', [], {})
},
'srd20.monsterability': {
'Meta': {'object_name': 'MonsterAbility'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'monster': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['srd20.Monster']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'srd20.spell': {
'Meta': {'ordering': "('name',)", 'object_name': 'Spell', 'db_table': "'spell'"},
'altname': ('django.db.models.fields.SlugField', [], {'max_length': '64'}),
'arcane_focus': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'arcane_material_components': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'area': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'casting_time': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'cleric_focus': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'components': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'descriptor': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'druid_focus': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'duration': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'effect': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'focus': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'material_components': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'range': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'saving_throw': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'school': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'spell_resistance': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'spellcraft_dc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'subschool': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'target': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'to_develop': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'verbal_components': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'xp_cost': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['srd20'] | bsd-3-clause | 3,939,490,585,488,178,700 | 81.119318 | 132 | 0.547398 | false |
Abdoctor/behave | behave/formatter/plain.py | 1 | 4689 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from behave.formatter.base import Formatter
from behave.model_describe import ModelPrinter
from behave.textutil import make_indentation
# -----------------------------------------------------------------------------
# CLASS: PlainFormatter
# -----------------------------------------------------------------------------
class PlainFormatter(Formatter):
"""
Provides a simple plain formatter without coloring/formatting.
The formatter displays now also:
* multi-line text (doc-strings)
* table
* tags (maybe)
"""
name = "plain"
description = "Very basic formatter with maximum compatibility"
SHOW_MULTI_LINE = True
SHOW_TAGS = False
SHOW_ALIGNED_KEYWORDS = False
DEFAULT_INDENT_SIZE = 2
def __init__(self, stream_opener, config, **kwargs):
super(PlainFormatter, self).__init__(stream_opener, config)
self.steps = []
self.show_timings = config.show_timings
self.show_multiline = config.show_multiline and self.SHOW_MULTI_LINE
self.show_aligned_keywords = self.SHOW_ALIGNED_KEYWORDS
self.show_tags = self.SHOW_TAGS
self.indent_size = self.DEFAULT_INDENT_SIZE
# -- ENSURE: Output stream is open.
self.stream = self.open()
self.printer = ModelPrinter(self.stream)
# -- LAZY-EVALUATE:
self._multiline_indentation = None
@property
def multiline_indentation(self):
if self._multiline_indentation is None:
offset = 0
if self.show_aligned_keywords:
offset = 2
indentation = make_indentation(3 * self.indent_size + offset)
self._multiline_indentation = indentation
return self._multiline_indentation
def reset_steps(self):
self.steps = []
def write_tags(self, tags, indent=None):
if tags and self.show_tags:
indent = indent or ""
text = " @".join(tags)
self.stream.write(u"%s@%s\n" % (indent, text))
# -- IMPLEMENT-INTERFACE FOR: Formatter
def feature(self, feature):
self.reset_steps()
self.write_tags(feature.tags)
self.stream.write(u"%s: %s\n" % (feature.keyword, feature.name))
def background(self, background):
self.reset_steps()
indent = make_indentation(self.indent_size)
text = u"%s%s: %s\n" % (indent, background.keyword, background.name)
self.stream.write(text)
def scenario(self, scenario):
self.reset_steps()
self.stream.write(u"\n")
indent = make_indentation(self.indent_size)
text = u"%s%s: %s\n" % (indent, scenario.keyword, scenario.name)
self.write_tags(scenario.tags, indent)
self.stream.write(text)
def step(self, step):
self.steps.append(step)
def result(self, result):
"""
Process the result of a step (after step execution).
:param result:
"""
step = self.steps.pop(0)
indent = make_indentation(2 * self.indent_size)
if self.show_aligned_keywords:
# -- RIGHT-ALIGN KEYWORDS (max. keyword width: 6):
text = u"%s%6s %s ... " % (indent, step.keyword, step.name)
else:
text = u"%s%s %s ... " % (indent, step.keyword, step.name)
self.stream.write(text)
status_text = result.status.name
if self.show_timings:
status_text += " in %0.3fs" % step.duration
if result.error_message:
self.stream.write(u"%s\n%s\n" % (status_text, result.error_message))
else:
self.stream.write(u"%s\n" % status_text)
if self.show_multiline:
if step.text:
self.doc_string(step.text)
if step.table:
self.table(step.table)
def eof(self):
self.stream.write("\n")
# -- MORE: Formatter helpers
def doc_string(self, doc_string):
self.printer.print_docstring(doc_string, self.multiline_indentation)
def table(self, table):
self.printer.print_table(table, self.multiline_indentation)
# -----------------------------------------------------------------------------
# CLASS: Plain0Formatter
# -----------------------------------------------------------------------------
class Plain0Formatter(PlainFormatter):
"""
Similar to old plain formatter without support for:
* multi-line text
* tables
* tags
"""
name = "plain0"
description = "Very basic formatter with maximum compatibility"
SHOW_MULTI_LINE = False
SHOW_TAGS = False
SHOW_ALIGNED_KEYWORDS = False
| bsd-2-clause | -8,901,676,309,910,815,000 | 32.255319 | 80 | 0.565152 | false |
TesseractCat/TesseractBot | utilities.py | 1 | 9081 | import discord
from discord.ext import commands
import pickle
import asyncio
from urllib.request import urlopen
from bot import nonAsyncRun, printToDiscord, checkOp, safeEval, getShelfSlot, doUrlopen, getToken
import atexit
import random
import markovify
import wolframalpha
import threading
import xmltodict
import urllib.request
import urllib
import brainfuck
import urbandict
from translate import translate
import html
import re
import time
class CustomUtilities():
def __init__(self, client):
self.client = client
#Wolfram Alpha Client
self.waClient = wolframalpha.Client(getToken("wolframalpha"))
# Quotes
@commands.group(pass_context = True, aliases = ["qt"])
async def quote(self, ctx):
"""Manage quotes, run this command with no subcommands to get a random quote"""
quotes = getShelfSlot(ctx.message.server.id, "Quotes")
if "quotes" not in quotes:
quotes.update({"quotes":[]})
if ctx.invoked_subcommand == None:
if len(ctx.message.content.split(" "))>1:
quote = int(ctx.message.content.split(" ")[1])
else:
quote = None
if quote == None:
quoteRand = random.choice(quotes["quotes"])
await self.client.say("**Quote #{}**\n{}".format(quotes["quotes"].index(quoteRand)+1,quoteRand))
return
try:
await self.client.say(quotes["quotes"][quote-1])
except:
await self.client.say("That's not a quote!")
quotes.close()
@quote.command(pass_context = True)
async def add(self, ctx, *, quote : str = None):
"""Add a quote"""
quotes = getShelfSlot(ctx.message.server.id, "Quotes")
quotes["quotes"] = quotes["quotes"] + ["{} - **{}** in **{}** at **{}**".format(quote,ctx.message.author.name,ctx.message.channel.name,time.strftime("%d/%m/%Y"))]
await self.client.say("Quote added as #{}!".format(len(quotes["quotes"])))
#quotes.close()
@quote.command(pass_context = True, aliases = ["del"])
async def delete(self, ctx, num : int):
"""Delete a quote"""
quotes = getShelfSlot(ctx.message.server.id, "Quotes")
quotes["quotes"][num-1] = "Deleted!"
quotes.sync()
await self.client.say("Quote deleted!")
#quotes.close()
# Quotes done
# Reminders
@commands.command(pass_context = True, aliases = ["rem"])
async def remind(self, ctx, time : int, unit : str, *, text : str):
thread = threading.Thread(target=doRemind,args=(ctx, time, unit.lower(), text, asyncio.get_event_loop()))
thread.start()
def doRemind(self, ctx, timeToSleep, unit, text, loop):
if "second" in unit:
sleepTime = (timeToSleep)
elif "minute" in unit:
sleepTime = (timeToSleep*60)
elif "hour" in unit:
sleepTime = (timeToSleep*60*60)
elif "day" in unit:
sleepTime = (timeToSleep*60*60*24)
else:
loop.call_soon_threadsafe(asyncio.async, self.client.send_message(ctx.message.channel,"That is not a valid time unit, the available units are: seconds, minutes, hours, days"))
return
loop.call_soon_threadsafe(asyncio.async, self.client.send_message(ctx.message.channel,"Ok! I will remind you in `{}` {}".format(timeToSleep, unit)))
time.sleep(sleepTime)
loop.call_soon_threadsafe(asyncio.async, self.client.send_message(ctx.message.author, "Hello! `{}` {} ago you asked me to remind you of:\n\n{}".format(timeToSleep, unit, text)))
# Reminders done
@commands.command(pass_context = True, aliases = ["mal"])
async def myanimelist(self, ctx, *, searchQuery : str):
"""Search myanimelist"""
url = "http://myanimelist.net/api/anime/search.xml?q={}".format(searchQuery.replace(" ","%20"))
password_mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, url, "discorddoggobot", "discordbotmal")
handler = urllib.request.HTTPBasicAuthHandler(password_mgr)
opener = urllib.request.build_opener(handler)
response = opener.open(url)
response = response.read().decode("utf-8")
doc = xmltodict.parse(response)
result = doc["anime"]["entry"][0]
await self.client.say("**--- Result for search: {} ---**\nTitle: **{}**\nEpisodes: **{}**\nScore: **{}**\nType: **{}**\nStatus: **{}**\n\nSynopsis: *{}*\n\nImage: {}".format(searchQuery, result["title"],result["episodes"],result["score"],result["type"],result["status"],html.unescape(re.sub(re.compile('<.*?>'), '', result["synopsis"])),result["image"]))
@commands.command(pass_context = True, aliases = ["wa"])
async def wolframalpha(self, ctx,*, search : str):
"""Gets Wolfram Alpha result for [search] put a 'true' at the beginning of your search to enable images."""
watext = "**--- Wolfram Alpha result for: " + search + " ---**\n"
await self.client.send_typing(ctx.message.channel)
if search.split(" ")[0].lower() == "true":
waresult = self.waClient.query(search.split(" ",1)[1])
else:
waresult = self.waClient.query(search)
for pod in waresult.pods:
watext+="**"+pod.title+"**\n"
if pod.text == None and search.split(" ")[0].lower() == "true":
watext+=pod.img + "\n"
await client.say(watext)
watext = ""
elif pod.text != None:
watext+=pod.text.replace("\\:","\\u") + "\n"
if len(waresult.pods) < 1:
watext += "*No results, please rephrase your query.*"
await self.client.say(watext)
@commands.command(aliases = ["bf"])
async def brainfuck(self, bfsrc : str, bfinput : str = ""):
"""Executes brainfuck code"""
bftext = ""
bftext += "**--- Brainfuck result ---**\n"
bftext += "```" + brainfuck.bf(bfsrc, 0, len(bfsrc) - 1, bfinput, 0, 1000000)
await self.client.say(bftext[:1500] + " ```")
@commands.command(aliases = ["df"])
async def define(self, word : str, defNum : int = 1):
"""Defines a word"""
await self.client.say(urbandict.define(word)[defNum-1]['def'])
@commands.command(pass_context = True, aliases = ["ev"])
async def evaluate(self, ctx, *, code : str):
"""Evaluates a python statement"""
await self.client.say(safeEval("return " + code, {"message": ctx.message, "urlopen": doUrlopen, "client": {"servers":self.client.servers, "user":self.client.user}, "list": lambda x: [obj for obj in x]})[:1000])
@commands.command(pass_context = True, aliases = ["rep"])
async def reputation(self, ctx, user : discord.User = None):
repDict = getShelfSlot(ctx.message.server.id, "Rep")
if (user == ctx.message.author):
await self.client.say("You can't give yourself rep!")
return
if (user == None):
try:
await self.client.say("You have `{}` rep!".format(repDict[ctx.message.author.id]))
except:
await self.client.say("You have no rep!")
else:
try:
repDict[user.id] += 1
except:
repDict[user.id] = 1
await self.client.say("1 rep given to {}, they currently have `{}` rep.".format(user.mention, repDict[user.id]))
repDict.close()
@commands.command(pass_context = True, aliases = ["mkv"])
async def markov(self, ctx, channel : discord.Channel, messages : int = 500, stateSize : int = 1):
"""Make a markov chain of a channel"""
text = ""
async for message in self.client.logs_from(channel, limit=messages):
text += message.content.replace("<@","@") + "\n"
text_model = markovify.Text(text, state_size=stateSize)
await self.client.say(text_model.make_sentence(max_overlap_ratio = 0.9,max_overlap_total=30,tries=1000))
@commands.command(pass_context = True, aliases = ["trans"])
async def translate(self, ctx, *, text : str):
"""Translate text to english (this function is very finnicky)"""
#if len(ctx.message.content.rsplit(" ",1)[1]) == 2:
# await self.client.say(translate(ctx.message.content.rsplit(" ",1)[0].split(" ",1)[1],ctx.message.content.rsplit(" ",1)[1]))
#else:
await self.client.say("**Translated text:** " + translate(text,"en"))
def setup(client):
client.add_cog(CustomUtilities(client))
| mit | 4,706,039,357,852,995,000 | 41.869565 | 362 | 0.565907 | false |
dpeinado/nike | config/urls.py | 1 | 1226 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name="home"),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name="about"),
# Django Admin
url(r'^admin/', include(admin.site.urls)),
# User management
url(r'^users/', include("nike.users.urls", namespace="users")),
url(r'^accounts/', include('allauth.urls')),
# Your stuff: custom urls includes go here
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', 'django.views.defaults.bad_request'),
url(r'^403/$', 'django.views.defaults.permission_denied'),
url(r'^404/$', 'django.views.defaults.page_not_found'),
url(r'^500/$', 'django.views.defaults.server_error'),
]
| bsd-3-clause | -7,648,930,935,618,000,000 | 35.058824 | 91 | 0.680261 | false |
mr555ru/orbotor | orbotor/game_constants.py | 1 | 1246 | # Orbotor - arcade with orbit mechanics
# Copyright (C) 2014 mr555ru
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ConfigParser
from pygame import Color
config = ConfigParser.RawConfigParser()
config.read('profile.cfg')
G = 1
MAXRESPAWNTIME = 3000
BULLET_LIFE = 7000
BULLET_MASS = 0.3
BULLET_VEL = 6
FUEL_MASS = 0.2
MAX_FUEL = 16
MAX_AMMO = 16
GAME_SPEED = 40
SYS_FONT = "font.ttf"
STATS_FONT = "font_stats.ttf"
MAX_OBJECTS = config.getint("Misc", "max_objects")
DEBRIS_R = 6
DEBRIS_SCALING = 3.0
DEBRIS_COLORS = (Color("#555555"), Color("#888888"))
HELLDEBRIS_COLORS = (Color("#303030"), Color("#777777"))
| gpl-3.0 | 1,457,648,420,157,971,700 | 29.390244 | 74 | 0.715088 | false |
Code-ReaQtor/IPToCC | setup.py | 1 | 1942 | #!/usr/bin/env python
import sys
from setuptools import setup
VERSION = '2.1.1'
REQUIREMENTS = ['pandas==0.23.4']
if sys.version_info[:2] < (3, 2):
REQUIREMENTS += ['backports.functools-lru-cache==1.5']
if sys.version_info[:2] < (3, 3):
REQUIREMENTS += ['ipaddress==1.0.22']
setup(
name='IPToCC',
version=VERSION,
packages=['iptocc'],
url='https://github.com/roniemartinez/IPToCC',
download_url='https://github.com/roniemartinez/IPToCC/tarball/{}'.format(VERSION),
license='MIT',
author='Ronie Martinez',
author_email='[email protected]',
description='Get country code of IPv4/IPv6 address. Address lookup is done offline.',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
keywords=[],
install_requires=REQUIREMENTS,
classifiers=['Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering :: Mathematics'],
package_data={'iptocc': ['delegated-afrinic-extended-latest',
'delegated-arin-extended-latest',
'delegated-apnic-extended-latest',
'delegated-lacnic-extended-latest',
'delegated-ripencc-extended-latest',
'iso3166.csv'
]
}
)
| mit | -2,779,000,412,450,829,000 | 40.319149 | 89 | 0.561277 | false |
hengshuangliu/Practice-of-JSRG | RobotNet.py | 1 | 22125 | #! usr/bin/env python
"""
Created on Wed May 11 16:56:23 2016
Builds the RobotNet.
@author: shuang
"""
from __future__ import absolute_import
from __future__ import print_function
import tensorflow as tf
import time
#import numpy as np
import os.path
import Robot_data
import Nets
#---------------------------------------Configure---------------------------------------------------------
# parameters for debug model
# if you are not a programmer, please set debug=False.
debug=False
# global parameters for what you want to run.
# ==== 1: for run_trainning
# ==== 2: for run_testing
RUN_FUNCTION=2
# the RobotNet function.
ROBOT_NET=Nets.RobotNet_v3
# Basic parameters.
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_integer('num_epochs', 1, 'Number of epochs to run trainer.')
flags.DEFINE_string('train_dir', '/home/wangdong/PythonCode/AI/converted_data/0522','Directory with the training data.')
flags.DEFINE_string('model', 'RobotNet_v3','name of model for saver.')
flags.DEFINE_string('saver_dir', '/home/wangdong/PythonCode/AI/saver/multi_pic/17-4','directory for checkpoint file.')
flags.DEFINE_string('summary_dir', '/home/wangdong/PythonCode/AI/summary/multi_pic_manul/16_new','directory for summary writer.')
# Basic model parameters for train mode.
flags.DEFINE_float('learning_rate', 0.001, 'Initial learning rate.')
flags.DEFINE_integer('batch_size', 8, 'Batch size.')
flags.DEFINE_integer('max_steps', 10000, 'steps for trainnig')
TRAIN_FILE_LIST=['sponge1.tfrecords','sponge2.tfrecords','sponge3.tfrecords','sponge4.tfrecords',
'sponge6.tfrecords','sponge7.tfrecords','sponge8.tfrecords',
'sponge16.tfrecords','sponge18.tfrecords',
'sponge11.tfrecords','sponge12.tfrecords','sponge13.tfrecords','sponge14.tfrecords',
'sponge19.tfrecords','sponge10.tfrecords','sponge20.tfrecords'] # .tfrecords files for trainning, string list.
TRAIN_PROB=0.5
# basic parameters for test mode.
TEST_FILE_LIST=['validation0522.tfrecords'] # .tfrecords files for testing, string list.
flags.DEFINE_integer('test_batch_size', 10, 'Batch size.')
flags.DEFINE_integer('test_numbers', 2000, 'Numbers of testing.')
TEST_PROB=1.0
# The ROBOT images are always 240*320*3 pixels.
IMAGE_HEIGTH=240
IMAGE_WIDTH=320
IMAGE_CHANNAL=3
IMAGE_PIXELS =IMAGE_HEIGTH*IMAGE_WIDTH*IMAGE_CHANNAL
CLASSES=2
# do pre_process for inputed image.
CROP_HEIGTH=240
CROP_WIDTH=240
CROP_CHANNAL=3
IF_RANDOM_CROP=False
IF_FLIP=False
IF_CONTRAST=False
IF_BRIGHT=False
IF_WHITEN=False
#-------------------------------------------------Functions--------------------------------------------------
def loss(net_out, labels):
"""Calculates the loss from the net_out and the labels.
Args:
net_out: tensor, float - [batch_size, NUM_CLASSES].
labels: tensor, int32 - [batch_size].
Returns:
loss: Loss tensor of type float.
"""
labels = tf.cast(labels, tf.float32)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(net_out, labels, name="softmax")
loss = tf.reduce_mean(cross_entropy, name='reduce_mean')
return loss
def test_loss():
with tf.Graph().as_default():
net_out_i = tf.constant([[1,0],[0.5,0.5],[1,0.5]])
net_out = tf.cast(net_out_i, tf.float32)
labels_i = tf.constant([[1,0],[1,0],[0,1]])
labels = tf.cast(labels_i, tf.float32)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(net_out, labels, name="softmax")
loss = tf.reduce_mean(cross_entropy, name='reduce_mean')
init = tf.initialize_all_variables()
with tf.Session() as sess:
sess.run(init)
loss_r, cross_entropy_r = sess.run([loss,cross_entropy])
print('loss_r:',loss_r)
print('entre_r:',cross_entropy_r)
return True
def train_op(loss, learning_rate):
"""
Sets up the training Ops.
Creates a summarizer to track the loss over time in TensorBoard.
Creates an optimizer and applies the gradients to all trainable variables.
The Op returned by this function is what must be passed to the
`sess.run()` call to cause the model to train.
Args:
loss: Loss tensor, from loss().
learning_rate: The learning rate to use for gradient descent.
Returns:
train_op: The Op for training.
"""
tf.scalar_summary(loss.op.name, loss)
# Create the gradient descent optimizer with the given learning rate.
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
#optimizer = tf.train.AdamOptimizer(learning_rate)
#optimizer = tf.train.MomentumOptimizer(learning_rate, 0.9)
# Create a variable to track the global step.
global_step = tf.Variable(0, name='global_step', trainable=False)
# Use the optimizer to apply the gradients that minimize the loss
# (and also increment the global step counter) as a single training step.
train_op = optimizer.minimize(loss, global_step=global_step)
return train_op
def evaluation(net_out, labels):
"""Evaluate the quality of the net_out at predicting the label.
Args:
net_out: net_out tensor, float - [batch_size, NUM_CLASSES].
labels: Labels tensor, int32 - [batch_size], with values in the
range [0, NUM_CLASSES).
Returns:
accuracy in a batch with a float32.
"""
correct_pred = tf.equal(tf.argmax(net_out,1), tf.argmax(labels,1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
return accuracy
def test_evaluation():
with tf.Graph().as_default():
net_out_i = tf.constant([[1,0],[0,1],[1,0]])
net_out = tf.cast(net_out_i, tf.float32)
labels_i = tf.constant([[1,0],[1,0],[0,1]])
labels = tf.cast(labels_i, tf.float32)
correct_pred = tf.equal(tf.argmax(net_out,1), tf.argmax(labels,1))
accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
init = tf.initialize_all_variables()
with tf.Session() as sess:
sess.run(init)
c_r,accur_r = sess.run([correct_pred,accuracy])
print('c_r:',c_r)
print('accur_r:',accur_r)
return True
def _check_dir(chk_dir):
"""
check if chk_dir is already existed. if not, create it.
Args:
chk_dir: string, directory to be checking.
"""
if os.path.exists(chk_dir):
if os.path.isabs(chk_dir):
print("%s is an absolute path"%(chk_dir))
else:
print("%s is a relative path"%(chk_dir))
else:
print(chk_dir+" is not existed.")
os.mkdir(chk_dir)
print(chk_dir+" is created.")
return True
def run_training():
"""
Run the train for RobotNet.
"""
with tf.Graph().as_default():
R_data=Robot_data.Robot_data(data_dir=FLAGS.train_dir,filename_list=TRAIN_FILE_LIST,batch_size=FLAGS.batch_size,
imshape=[IMAGE_HEIGTH,IMAGE_WIDTH,IMAGE_CHANNAL],crop_shape=[CROP_HEIGTH,CROP_WIDTH,CROP_CHANNAL],
if_random_crop=IF_RANDOM_CROP,if_flip=IF_FLIP,if_bright=IF_BRIGHT, if_contrast=IF_CONTRAST,
if_whiten=IF_WHITEN, num_classes=CLASSES,num_epochs=FLAGS.num_epochs)
n_images,n_labels=R_data.one_hot_input()
#x_images = tf.placeholder(tf.float32, [None, IMAGE_PIXELS])
#y_labels = tf.placeholder(tf.float32, [None, CLASSES])
x_images = n_images
y_labels = n_labels
tf.image_summary('images', x_images,max_images=FLAGS.batch_size)
#keep_prob = tf.placeholder(tf.float32)
keep_prob = tf.constant(TRAIN_PROB)
#net_out,saver= RobotNet(x_images, keep_prob)
net_out,saver= ROBOT_NET(x_images, keep_prob)
loss_out= loss(net_out, y_labels)
train_op_out = train_op(loss_out, FLAGS.learning_rate)
eval_correct = evaluation(net_out, y_labels)
summary_op = tf.merge_all_summaries()
#saver = tf.train.Saver()
init = tf.initialize_all_variables()
with tf.Session() as sess:
sess.run(init)
_check_dir(FLAGS.saver_dir)
checkpoint = tf.train.get_checkpoint_state(FLAGS.saver_dir)
if checkpoint and checkpoint.model_checkpoint_path:
saver.restore(sess, checkpoint.model_checkpoint_path)
print("Successfully loaded:", checkpoint.model_checkpoint_path)
else:
print("Could not find old network weights")
print("train from step one")
summary_writer = tf.train.SummaryWriter(FLAGS.summary_dir, graph=sess.graph)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
try:
for step in xrange(FLAGS.max_steps):
start_time = time.time()
#feed_dict={x_images:n_images, y_labels:n_labels, keep_prob:TRAIN_PROB}
#_, loss_value = sess.run([train_op_out, loss_out],feed_dict=feed_dict)
_, loss_value = sess.run([train_op_out, loss_out])
duration = time.time() - start_time
if not coord.should_stop():
if step % 100 == 0:
# Print status to stdout.
print('Step %d: loss = %.4f (%.3f sec)' % (step, loss_value, duration))
# Update the events file.
#summary_str = sess.run(summary_op, feed_dict=feed_dict)
summary_str = sess.run(summary_op)
summary_writer.add_summary(summary_str, step)
summary_writer.flush()
if (step + 1) % 1000 == 0 or (step + 1) == FLAGS.max_steps:
saver.save(sess, FLAGS.saver_dir+'/'+FLAGS.model, global_step=step)
# Evaluate against the training set.
print('Training Data Eval:')
#accuracy = sess.run(eval_correct,feed_dict={x_images:n_images, y_labels:n_labels, keep_prob:TRAIN_PROB})
accuracy = sess.run(eval_correct)
print("step:%d time:%.3f"%(step,duration))
print("accuracy:%.6f"%(accuracy))
print("loss:%.3f"%(loss_value))
# Evaluate against the test set.
print('Test Data Eval:')
else:
break
except tf.errors.OutOfRangeError:
print('Done training for %d epochs, %d steps.' % (FLAGS.num_epochs, step))
finally:
coord.request_stop()
coord.join(threads)
print("run_training ok")
return True
def test_one_train():
with tf.Graph().as_default():
R_data=Robot_data.Robot_data(data_dir=FLAGS.train_dir,filename_list=TRAIN_FILE_LIST,batch_size=FLAGS.batch_size,
imshape=[IMAGE_HEIGTH,IMAGE_WIDTH,IMAGE_CHANNAL],crop_shape=[CROP_HEIGTH,CROP_WIDTH,CROP_CHANNAL],
if_random_crop=IF_RANDOM_CROP, if_flip=IF_FLIP,if_bright=IF_BRIGHT, if_contrast=IF_CONTRAST,
if_whiten=IF_WHITEN, num_classes=CLASSES,num_epochs=FLAGS.num_epochs)
x_images,y_labels=R_data.one_hot_input()
keep_prob = tf.constant(TRAIN_PROB)
# fetch all the tensor in the RobotNet for testing.
# ......by liuhengshuang.
dropout = keep_prob
images = x_images
#_X = tf.reshape(images, shape=[-1, IMAGE_HEIGTH, IMAGE_WIDTH, IMAGE_CHANNAL])
X = tf.cast(images, tf.float32)
weights1=tf.Variable(tf.random_normal([11, 11, 3, 96],stddev=0.01))
biases1=tf.Variable(tf.zeros([96]))
conv1 = Nets.conv2d('conv1', X, weights1, biases1,stride=[4,4],padding='SAME')
norm1 = Nets.norm('norm1', conv1, lsize=2)
pool1= Nets.max_pool('pool1', norm1, 3, 2)
weights2=tf.Variable(tf.random_normal([5, 5, 96, 256],stddev=0.01))
biases2=tf.Variable(tf.constant(0.1,shape=[256]))
conv2 = Nets.conv2d('conv2', pool1, weights2, biases2,stride=[1,1],padding='SAME')
norm2 = Nets.norm('norm2', conv2, lsize=2)
pool2= Nets.max_pool('pool2', norm2, 3, 2)
weights3=tf.Variable(tf.random_normal([3, 3, 256, 384],stddev=0.01))
biases3=tf.Variable(tf.zeros([384]))
conv3 = Nets.conv2d('conv3', pool2, weights3, biases3,stride=[1,1],padding='SAME')
weights4=tf.Variable(tf.random_normal([3, 3, 384, 384],stddev=0.01))
biases4=tf.Variable(tf.constant(0.1,shape=[384]))
conv4 = Nets.conv2d('conv4', conv3, weights4, biases4,stride=[1,1],padding='SAME')
weights5=tf.Variable(tf.random_normal([3, 3, 384, 256],stddev=0.01))
biases5=tf.Variable(tf.constant(0.1,shape=[256]))
conv5 = Nets.conv2d('conv5', conv4, weights5, biases5,stride=[1,1],padding='SAME')
pool5= Nets.max_pool('pool5', conv5, 3, 2)
p_h=pool5.get_shape().as_list()[1]
p_w=pool5.get_shape().as_list()[2]
print('p_h:',p_h)
print('p_w:',p_w)
weights6=tf.Variable(tf.random_normal([p_h*p_w*256, 4096],stddev=0.005))
biases6=tf.Variable(tf.constant(0.1,shape=[4096]))
dense1 = tf.reshape(pool5, [-1, weights6.get_shape().as_list()[0]])
fc6= tf.nn.relu(tf.matmul(dense1, weights6) + biases6, name='fc6')
drop6=tf.nn.dropout(fc6, dropout)
weights7=tf.Variable(tf.random_normal([4096, 4096],stddev=0.005))
biases7=tf.Variable(tf.constant(0.1,shape=[4096]))
fc7= tf.nn.relu(tf.matmul(drop6, weights7) + biases7, name='fc7')
drop7=tf.nn.dropout(fc7, dropout)
weights8=tf.Variable(tf.random_normal([4096, 2],stddev=0.01))
biases8=tf.Variable(tf.zeros([2]))
net_out= tf.matmul(drop7, weights8) + biases8
# all above is code for testing RobotNet variables.
# if you are not in testing mode, you can comment these and uncomment
# line 364: net_out= RobotNet(x_images, keep_prob)
# ......by liuhengshuang.
with tf.Session() as sess:
#net_out= RobotNet(x_images, keep_prob)
loss_out= loss(net_out, y_labels)
eval_correct = evaluation(net_out, y_labels)
init = tf.initialize_all_variables()
sess.run(init)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
for step in xrange(1):
x_images_r,y_labels_r,net_out_r,loss_out_r,eval_correct_r=sess.run([x_images,y_labels,net_out,loss_out,eval_correct])
print('x_images_r:',x_images_r)
print('y_labels_r:',y_labels_r)
# output for testing RobotNet. by liuhengshuang.
X_r, weights1_r, biases1_r, conv1_r=sess.run([X,weights1,biases1,conv1])
print('x_r:',X_r)
#print('weights1_r:',weights1_r)
#print('biases1_r:',biases1_r)
print('conv1_r:',conv1_r)
norm1_r,pool1_r,weights2_r,biases2_r,conv2_r=sess.run([norm1,pool1,weights2,biases2,conv2])
print('norm1_r:',norm1_r)
print('pool1_r:',pool1_r)
#print('weights2_r:',weights2_r)
#print('biases2_r:',biases2_r)
print('conv2_r:',conv2_r)
norm2_r,pool2_r,weights3_r,biases3_r,conv3_r=sess.run([norm2,pool2,weights3,biases3,conv3])
print('norm2_r:',norm2_r)
print('pool2_r:',pool2_r)
#print('weights3_r:',weights3_r)
#print('biases3_r:',biases3_r)
print('conv3_r:',conv3_r)
weights4_r,biases4_r,conv4_r=sess.run([weights4,biases4,conv4])
#print('weights4_r:',weights4_r)
#print('biases4_r:',biases4_r)
print('conv4_r:',conv4_r)
weights5_r,biases5_r,conv5_r=sess.run([weights5,biases5,conv5])
#print('weights5_r:',weights5_r)
#print('biases5_r:',biases5_r)
print('conv5_r:',conv5_r)
pool5_r,weights6_r,biases6_r,dense1_r,fc6_r=sess.run([pool5,weights6,biases6,dense1,fc6])
print('pool5_r:',pool5_r)
#print('weights6_r:',weights6_r)
#print('biases6_r:',biases6_r)
print('dense1_r:',dense1_r)
print('fc6_r:',fc6_r)
drop6_r,weights7_r,biases7_r,fc7_r=sess.run([drop6,weights7,biases7,fc7])
print('drop6_r:',drop6_r)
#print('weights7_r:',weights7_r)
#print('biases7_r:',biases7_r)
print('fc7_r:',fc7_r)
drop7_r,weights8_r,biases8_r=sess.run([drop7,weights8,biases8])
print('drop7_r:',drop7_r)
#print('weights8_r:',weights8_r)
#print('biases8_r:',biases8_r)
# output for testing RobotNet. by liuhengshuang.
print('net_out_r:',net_out_r)
print('loss_out_r:',loss_out_r)
print('eval_correct_r:',eval_correct_r)
coord.request_stop()
coord.join(threads)
print("run_training ok")
return True
def test_RobotNet():
with tf.Graph().as_default():
rd=Robot_data.Robot_data(data_dir=FLAGS.train_dir,filename_list=TRAIN_FILE_LIST,batch_size=FLAGS.batch_size,
imshape=[IMAGE_HEIGTH,IMAGE_WIDTH,IMAGE_CHANNAL],crop_shape=[CROP_HEIGTH,CROP_WIDTH,CROP_CHANNAL],
if_random_crop=IF_RANDOM_CROP, if_flip=IF_FLIP,if_bright=IF_BRIGHT, if_contrast=IF_CONTRAST,
if_whiten=IF_WHITEN, num_classes=CLASSES,num_epochs=FLAGS.num_epochs)
images, labels =rd.one_hot_input()
dropout=tf.constant(1.0)
net_out,_=ROBOT_NET(images,dropout)
init_op = tf.initialize_all_variables()
with tf.Session() as sess:
sess.run(init_op)
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
images_r,dropout_r,net_out_r,labels_r=sess.run([images,dropout,net_out,labels])
print("images_r:",images_r,images_r.shape)
print("dropout_r",dropout_r,dropout_r.shape)
print("net_out_r:",net_out_r,net_out_r.shape)
print("labels_r:",labels_r,labels_r.shape)
coord.request_stop()
coord.join(threads)
print("great work")
def run_testing():
"""run testing for trained RobotNet.
"""
with tf.Graph().as_default():
R_data=Robot_data.Robot_data(data_dir=FLAGS.train_dir,filename_list=TEST_FILE_LIST,batch_size=FLAGS.test_batch_size,
imshape=[IMAGE_HEIGTH,IMAGE_WIDTH,IMAGE_CHANNAL],crop_shape=[CROP_HEIGTH,CROP_WIDTH,CROP_CHANNAL],
if_random_crop=IF_RANDOM_CROP, if_flip=IF_FLIP,if_bright=IF_BRIGHT, if_contrast=IF_CONTRAST,
if_whiten=IF_WHITEN,num_classes=CLASSES,num_epochs=FLAGS.num_epochs)
n_images,n_labels=R_data.one_hot_input()
x_images = n_images
y_labels = n_labels
keep_prob = tf.constant(TEST_PROB)
#net_out,saver= RobotNet(x_images, keep_prob)
net_out,saver= ROBOT_NET(x_images, keep_prob)
loss_out= loss(net_out, y_labels)
eval_correct = evaluation(net_out, y_labels)
#saver = tf.train.Saver()
with tf.Session() as sess:
init = tf.initialize_all_variables()
sess.run(init)
checkpoint = tf.train.get_checkpoint_state(FLAGS.saver_dir)
if checkpoint and checkpoint.model_checkpoint_path:
saver.restore(sess, checkpoint.model_checkpoint_path)
print("Successfully loaded:", checkpoint.model_checkpoint_path)
else:
print("Could not find old network weights")
return False
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord)
try:
total_accuracy=0.0
for step in xrange(FLAGS.test_numbers):
start_time = time.time()
if not coord.should_stop():
print('-----Testing accuracy----:')
accuracy_r,loss_value = sess.run([eval_correct, loss_out])
total_accuracy+=accuracy_r
duration = time.time() - start_time
print("step:%d time:%.3f"%(step,duration))
print("accuracy:%.6f"%(accuracy_r))
print("loss:%.6f"%(loss_value))
else:
break
except tf.errors.OutOfRangeError:
print('Done training for %d epochs, %d steps.' % (FLAGS.num_epochs, step))
finally:
coord.request_stop()
print("-----Total accuracy-----:")
print(total_accuracy/float(step))
coord.join(threads)
print('success')
return True
def main():
if debug:
print("debug mode")
#test_RobotNet()
#test_loss()
#test_evaluation()
test_one_train()
else:
if RUN_FUNCTION==1:
run_training()
elif RUN_FUNCTION==2:
run_testing()
else:
print("RUN_FUNCTION set error: 1 for run_training, 2 for run_testing")
if __name__ == '__main__':
main()
| apache-2.0 | 8,659,570,485,466,039,000 | 44.431211 | 135 | 0.569266 | false |
eirannejad/pyRevit | pyrevitlib/pyrevit/interop/adc.py | 1 | 5494 | """Wrapping Autodesk Desktop Connector API"""
#pylint: disable=bare-except,broad-except
import os.path as op
from pyrevit import PyRevitException
from pyrevit.framework import clr, Process
from pyrevit.coreutils import logger
mlogger = logger.get_logger(__name__)
ADC_NAME = "Autodesk Desktop Connector"
ADC_SHORTNAME = "ADC"
ADC_DRIVE_SCHEMA = '{drive_name}://'
ADC_DEFAULT_INSTALL_PATH = \
r'C:\Program Files\Autodesk\Desktop Connector'
ADC_API_DLL = 'Autodesk.DesktopConnector.API.dll'
ADC_API_DLL_PATH = op.join(ADC_DEFAULT_INSTALL_PATH, ADC_API_DLL)
API = None
if op.exists(ADC_API_DLL_PATH):
try:
clr.AddReferenceToFileAndPath(ADC_API_DLL_PATH)
import Autodesk.DesktopConnector.API as API #pylint: disable=import-error
except:
pass
def _ensure_api():
if not API:
raise PyRevitException("{} is not loaded".format(ADC_NAME))
return API
def _get_all_processids():
return [x.Id for x in Process.GetProcesses()]
def _get_adc():
api = _ensure_api()
return api.DesktopConnectorService()
def _get_drives_info(adc):
return list(adc.GetDrives())
def _get_drive_properties(adc, drive):
return adc.GetPropertyDefinitions(drive.Id)
def _get_drive_from_path(adc, path):
for drv_info in _get_drives_info(adc):
drive_schema = ADC_DRIVE_SCHEMA.format(drive_name=drv_info.Name)
if path.lower().startswith(drive_schema.lower()):
return drv_info
def _get_drive_from_local_path(adc, local_path):
for drv_info in _get_drives_info(adc):
drv_localpath = op.normpath(drv_info.WorkspaceLocation)
if op.normpath(local_path).startswith(drv_localpath):
return drv_info
def _drive_path_to_local_path(drv_info, path):
drive_schema = ADC_DRIVE_SCHEMA.format(drive_name=drv_info.Name)
return op.normpath(
op.join(
drv_info.WorkspaceLocation,
path.replace(drive_schema, '')
)
)
def _ensure_local_path(adc, path):
drv_info = _get_drive_from_path(adc, path)
if drv_info:
return _drive_path_to_local_path(drv_info, path)
elif not _get_drive_from_local_path(adc, path):
raise PyRevitException("Path is not inside any ADC drive")
return path
def _get_item(adc, path):
path = _ensure_local_path(adc, path)
if not op.isfile(path):
raise PyRevitException("Path does not point to a file")
res = adc.GetItemsByWorkspacePaths([path])
if not res:
raise PyRevitException("Can not find item in any ADC drive")
# grab the first item (we only except one since path is to a file)
return res[0].Item
def _get_item_drive(adc, item):
for drv_info in _get_drives_info(adc):
if drv_info.Id == item.DriveId:
return drv_info
def _get_item_lockstatus(adc, item):
res = adc.GetLockStatus([item.Id])
if res and res.Status:
return res.Status[0]
def _get_item_property_value(adc, drive, item, prop_name):
for prop_def in _get_drive_properties(adc, drive):
if prop_def.DisplayName == prop_name:
res = adc.GetProperties([item.Id], [prop_def.Id])
if res:
return res.Values[0]
def _get_item_property_id_value(adc, drive, item, prop_id):
for prop_def in _get_drive_properties(adc, drive):
if prop_def.Id == prop_id:
res = adc.GetProperties([item.Id], [prop_def.Id])
if res:
return res.Values[0]
def is_available():
"""Check if ADC service is available"""
try:
_get_adc().Discover()
return True
except Exception:
return False
def get_drive_paths():
"""Get dict of local paths for ADC drives"""
adc = _get_adc()
return {x.Name: x.WorkspaceLocation for x in _get_drives_info(adc)}
def get_local_path(path):
"""Convert ADC BIM360 drive path to local path"""
adc = _get_adc()
drv_info = _get_drive_from_path(adc, path)
if drv_info:
return _drive_path_to_local_path(drv_info, path)
def lock_file(path):
"""Lock given file"""
adc = _get_adc()
item = _get_item(adc, path)
adc.LockFile(item.Id)
def is_locked(path):
"""Check if file is locked"""
adc = _get_adc()
item = _get_item(adc, path)
lock_status = _get_item_lockstatus(adc, item)
return lock_status.State == API.LockState.LockedByOther, \
lock_status.LockOwner
def unlock_file(path):
"""Unlock given file"""
adc = _get_adc()
item = _get_item(adc, path)
adc.UnlockFile(item.Id)
def is_synced(path):
"""Check if file is synchronized"""
adc = _get_adc()
item = _get_item(adc, path)
drive = _get_item_drive(adc, item)
# ADC uses translated property names so
# check status property by its type "LocalState"
# see https://github.com/eirannejad/pyRevit/issues/1152
prop_val = _get_item_property_id_value(adc, drive, item, 'LocalState')
# possible values, 'Cached', 'Stale', 'Modified'
# .Value is not translated
return prop_val.Value == 'Cached'or prop_val.Value == 'Synced'
def sync_file(path, force=False):
"""Force ADC to sync given file to latest version"""
if not force and is_synced(path):
return
adc = _get_adc()
item = _get_item(adc, path)
# make sure path is local
local_path = _ensure_local_path(adc, path)
for proc_id in _get_all_processids():
adc.FileClosedWithinRunningProcess(proc_id, local_path)
adc.SyncFiles([item.Id])
| gpl-3.0 | 5,065,847,212,209,732,000 | 27.174359 | 81 | 0.644703 | false |
plumdog/django_migration_testcase | tests/test_app/tests.py | 1 | 8515 | import unittest
import django
from django.db import IntegrityError
from django.conf import settings
from django_migration_testcase import MigrationTest
from django_migration_testcase.base import InvalidModelStateError, idempotent_transaction
class ExampleMigrationTest(MigrationTest):
before = '0001_initial'
after = '0002_mymodel_number'
app_name = 'test_app'
def test_migration(self):
MyModel = self.get_model_before('MyModel')
for i in range(10):
mymodel = MyModel()
mymodel.name = 'example name {}'.format(i)
mymodel.save()
self.assertEqual(MyModel.objects.count(), 10)
self.run_migration()
MyModel = self.get_model_after('MyModel')
self.assertEqual(MyModel.objects.count(), 10)
def test_run_reverse_migration(self):
MyModel = self.get_model_before('MyModel')
for i in range(10):
mymodel = MyModel()
mymodel.name = 'example name {}'.format(i)
mymodel.save()
self.assertEqual(MyModel.objects.count(), 10)
self.run_migration()
MyModel = self.get_model_after('MyModel')
self.assertEqual(MyModel.objects.count(), 10)
self.run_reverse_migration()
self.assertEqual(MyModel.objects.count(), 10)
def test_invalid_field(self):
MyModel = self.get_model_before('MyModel')
mymodel = MyModel()
mymodel.number = 10
mymodel.save()
mymodel = MyModel.objects.get()
with self.assertRaises(AttributeError):
mymodel.number
self.run_migration()
MyModel = self.get_model_after('MyModel')
mymodel = MyModel.objects.get()
self.assertEqual(mymodel.number, None)
mymodel.number = 10
mymodel.save()
mymodel = MyModel.objects.get()
self.assertEqual(mymodel.number, 10)
def field_names(model_class):
try:
return model_class._meta.get_all_field_names()
except AttributeError:
return [f.name for f in model_class._meta.get_fields()]
class AddDoubleNumberTest(MigrationTest):
before = '0002_mymodel_number'
after = '0003_mymodel_double_number'
app_name = 'test_app'
def test_migration(self):
MyModel = self.get_model_before('MyModel')
self.assertNotIn('double_number', field_names(MyModel))
self.run_migration()
MyModel = self.get_model_after('MyModel')
self.assertIn('double_number', field_names(MyModel))
class MigrationsByNumberOnlyTest(MigrationTest):
before = '0002'
after = '0003'
app_name = 'test_app'
def test_migration(self):
MyModel = self.get_model_before('MyModel')
self.assertNotIn('double_number', field_names(MyModel))
self.run_migration()
MyModel = self.get_model_after('MyModel')
self.assertIn('double_number', field_names(MyModel))
class PopulateDoubleNumberTest(MigrationTest):
before = '0003_mymodel_double_number'
after = '0004_populate_mymodel_double_number'
app_name = 'test_app'
def test_migration(self):
MyModel = self.get_model_before('MyModel')
for i in range(10):
mymodel = MyModel()
mymodel.name = 'example name {}'.format(i)
mymodel.number = i
mymodel.save()
self.run_migration()
MyModel = self.get_model_after('MyModel')
for mymodel in MyModel.objects.all():
self.assertEqual(mymodel.number * 2, mymodel.double_number)
class GetModelMigrationTest(MigrationTest):
before = '0001_initial'
after = '0002_mymodel_number'
app_name = 'test_app'
def test_migration(self):
MyModel = self.get_model_before('test_app.MyModel')
self.assertEqual(MyModel.__name__, 'MyModel')
self.run_migration()
MyModel = self.get_model_after('test_app.MyModel')
self.assertEqual(MyModel.__name__, 'MyModel')
class ForeignKeyTest(MigrationTest):
before = '0004_populate_mymodel_double_number'
after = '0005_foreignmodel'
app_name = 'test_app'
def test_migration(self):
MyModel = self.get_model_before('test_app.MyModel')
self.assertEqual(MyModel.__name__, 'MyModel')
self.run_migration()
ForeignModel = self.get_model_after('test_app.ForeignModel')
self.assertEqual(ForeignModel.__name__, 'ForeignModel')
MyModel = self.get_model_after('test_app.MyModel')
self.assertEqual(MyModel.__name__, 'MyModel')
my = MyModel(name='test_my', number=1, double_number=3.14)
my.save()
ForeignModel(name='test_foreign', my=my)
def test_migration2(self):
"""Same test as test_migration, but this one passes."""
MyModel = self.get_model_before('test_app.MyModel')
self.assertEqual(MyModel.__name__, 'MyModel')
self.run_migration()
ForeignModel = self.get_model_after('test_app.ForeignModel')
self.assertEqual(ForeignModel.__name__, 'ForeignModel')
# get_model_before/get_model_after seems to not get the same model as
# this crazy thing.
if django.VERSION >= (2, 0):
MyModel = ForeignModel.my.field.related_model
else:
MyModel = ForeignModel.my.field.rel.to
self.assertEqual(MyModel.__name__, 'MyModel')
my = MyModel(name='test_my', number=1, double_number=3.14)
my.save()
ForeignModel(name='test_foreign', my=my)
def test_migration_clearly(self):
"""A clear illustration of the problem."""
self.run_migration()
ForeignModel = self.get_model_after('test_app.ForeignModel')
# get_model_before/get_model_after seems to not get the same model as
# this crazy thing.
if django.VERSION >= (2, 0):
MyModel = ForeignModel.my.field.related_model
else:
MyModel = ForeignModel.my.field.rel.to
MyModel2 = self.get_model_after('test_app.MyModel')
self.assertEqual(MyModel, MyModel2)
class UtilsMigrationTest(MigrationTest):
before = '0001_initial'
after = '0002_mymodel_number'
app_name = 'test_app'
def test_migration_not_run_exception(self):
with self.assertRaises(InvalidModelStateError):
self.get_model_after('MyModel')
with self.assertRaises(InvalidModelStateError):
self.run_reverse_migration()
def test_migration_already_run_exception(self):
self.run_migration()
with self.assertRaises(InvalidModelStateError):
self.get_model_before('MyModel')
class MigrateFromZero(MigrationTest):
before = 'zero'
after = '0001_initial'
app_name = 'test_app'
def test_model_exists(self):
with self.assertRaises(LookupError):
self.get_model_before('MyModel')
self.run_migration()
MyModel = self.get_model_after('MyModel')
self.assertEqual(MyModel.__name__, 'MyModel')
class TeardownCanFail(MigrationTest):
before = '0006'
after = '0007'
app_name = 'test_app'
def test_second_model_name_is_unique(self):
model_before = self.get_model_before('MySecondModel')
model_before.objects.create(name='foo')
model_before.objects.create(name='foo')
with self.assertRaises(IntegrityError):
self.run_migration()
def tearDown(self):
self.assertTrue(self.get_model_before('MySecondModel').objects.all().exists())
with self.assertRaises(IntegrityError):
# tearDown fails since migrations runs again with the data
super(TeardownCanFail, self).tearDown()
self.get_model_before('MySecondModel').objects.all().delete()
super(TeardownCanFail, self).tearDown()
@unittest.skipIf(django.VERSION < (1, 7), 'Not supported by older django versions')
@unittest.skipIf(django.VERSION >= (2, 0) and settings.DATABASES['default']['ENGINE'] == 'django.db.backends.sqlite3',
'Not supported with django2 with sqlite3')
class TeardownFailCanBeAvoidedWithIdempotentTransaction(MigrationTest):
before = '0006'
after = '0007'
app_name = 'test_app'
@idempotent_transaction
def test_second_model_name_is_unique(self):
model_before = self.get_model_before('MySecondModel')
model_before.objects.create(name='foo')
model_before.objects.create(name='foo')
with self.assertRaises(IntegrityError):
self.run_migration()
| mit | -2,998,300,733,460,082,000 | 29.629496 | 118 | 0.639342 | false |
vitalyisaev2/autojsoncxx | autojsoncxx.py | 1 | 19692 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014 Siyuan Ren ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import unicode_literals
from __future__ import print_function
import re
import argparse
import os
import hashlib
import sys
import io
from numbers import Number
# Python 2/3 compatibility layer
is_python2 = sys.version_info.major == 2
if is_python2:
str = unicode
# simplejson has the same interface as the standard json module, but with better error messages
try:
import simplejson as json
except ImportError:
import json
# parsimonious is required for parsing C++ type name
try:
import parsimonious
except ImportError:
parsimonious = None
# base class for all custom exceptions in this unit
class InvalidDefinitionError(Exception):
pass
class InvalidIdentifier(InvalidDefinitionError):
def __init__(self, identifier):
self.identifier = identifier
def __str__(self):
return "Invalid string for C++ identifier: " + repr(self.identifier)
class InvalidNamespace(InvalidDefinitionError):
def __init__(self, namespace):
self.namespace = namespace
def __str__(self):
return "Invalid namespace: " + repr(self.namespace)
class UnrecognizedOption(InvalidDefinitionError):
def __init__(self, option):
self.option = option
def __str__(self):
return "Unrecognized option: " + repr(self.option)
class UnsupportedTypeError(InvalidDefinitionError):
def __init__(self, type_name):
self.type_name = type_name
def __str__(self):
return "Unsupported C++ type: " + repr(self.type_name)
# convert arbitrary byte sequence into a C++ string literal by escaping every character
if is_python2:
def cstring_literal(byte_string):
return '"' + ''.join('\\x{:02x}'.format(ord(char)) for char in byte_string) + '"'
else:
def cstring_literal(byte_string):
return '"' + ''.join('\\x{:02x}'.format(char) for char in byte_string) + '"'
def check_identifier(identifier):
if not re.match(r'^[A-Za-z_]\w*$', identifier):
raise InvalidIdentifier(identifier)
class ClassInfo(object):
accept_options = {"name", "namespace", "parse_mode", "members", "constructor_code", "comment", "no_duplicates"}
def __init__(self, record):
self._name = record['name']
self._members = [MemberInfo(r) for r in record['members']]
self._strict = record.get('parse_mode', '') == 'strict'
self._namespace = record.get("namespace", None)
self._constructor_code = record.get("constructor_code", "")
self._no_duplicates = record.get("no_duplicates", False)
check_identifier(self._name)
if self._namespace is not None and not re.match(r'^(?:::)?[A-Za-z_]\w*(?:::[A-Za-z_]\w*)*$', self._namespace):
raise InvalidNamespace(self._namespace)
for op in record:
if op not in ClassInfo.accept_options:
raise UnrecognizedOption(op)
@property
def name(self):
return self._name
@property
def qualified_name(self):
if self.namespace is None:
return '::' + self.name
if self.namespace.startswith('::'):
return self.namespace + '::' + self.name
return '::' + self.namespace + '::' + self.name
@property
def members(self):
return self._members
@property
def strict_parsing(self):
return self._strict
@property
def namespace(self):
return self._namespace
@property
def constructor_code(self):
return self._constructor_code
@property
def no_duplicates(self):
return self._no_duplicates
class ClassDefinitionCodeGenerator(object):
def __init__(self, class_info):
self._class_info = class_info
@property
def class_info(self):
return self._class_info
def member_declarations(self):
return '\n'.join(m.type_name + ' ' + m.variable_name + ';' for m in self.class_info.members)
def initializer_list(self):
return ', '.join('{0}({1})'.format(m.variable_name, m.constructor_args) for m in self.class_info.members)
def constructor(self):
return 'explicit {name}():{init} {{ {code} }}\n'.format(name=self.class_info.name,
init=self.initializer_list(),
code=self.class_info.constructor_code)
def class_definition(self):
class_def = 'struct {name} {{\n {declarations}\n\n{constructor}\n\n \n}};' \
.format(name=self.class_info.name, declarations=self.member_declarations(),
constructor=self.constructor())
if self.class_info.namespace is not None:
for space in reversed(self.class_info.namespace.split('::')):
if space:
class_def = 'namespace {} {{ {} }}\n'.format(space, class_def)
return class_def
class MemberInfo(object):
accept_options = {'default', 'required', 'json_key', 'comment', 'minLength', 'maxLength', 'pattern', 'format', 'enum', 'oneOf','containerTypeProperty', 'minimum', 'maximum', 'minItems', 'maxItems', 'uniqueItems'}
def __init__(self, record):
self._record = record
if '*' in self.type_name or '&' in self.type_name:
raise UnsupportedTypeError(self.type_name)
check_identifier(self.variable_name)
if len(record) > 3:
raise UnrecognizedOption(record[3:])
@property
def type_name(self):
return self._record[0]
@property
def variable_name(self):
return self._record[1]
@property
def json_key(self):
try:
return self._record[2]['json_key'].encode('utf-8')
except (IndexError, KeyError):
return self.variable_name.encode('utf-8')
@property
def is_required(self):
return self._record[2]['required']
@property
def default(self):
try:
return self._record[2]['default']
except (IndexError, KeyError):
return None
@property
def constructor_args(self):
return MemberInfo.cpp_repr(self.default)
@staticmethod
def cpp_repr(args):
if args is None:
return ''
elif args is True:
return 'true'
elif args is False:
return 'false'
elif isinstance(args, str):
return cstring_literal(args.encode('utf-8'))
elif isinstance(args, Number):
return str(args)
elif isinstance(args, bytes):
return cstring_literal(args)
else:
raise UnrecognizedOption("default=" + repr(args))
class HelperClassCodeGenerator(object):
def __init__(self, class_info):
self._class_info = class_info
@property
def class_info(self):
return self._class_info
@property
def members_info(self):
return self._class_info.members
def handler_declarations(self):
return '\n'.join('SAXEventHandler< {} > handler_{};'.format(m.type_name, i)
for i, m in enumerate(self.members_info))
def handler_initializers(self):
return '\n'.join(', handler_{}(&obj->{})'.format(i, m.variable_name)
for i, m in enumerate(self.members_info))
def flags_declaration(self):
return '\n'.join('bool has_{};'.format(m.variable_name) for m in self.members_info
if self.class_info.no_duplicates or m.is_required)
def flags_reset(self):
return '\n'.join(self.flag_statement(m, "false") for m in self.members_info)
def post_validation(self):
return '\n'.join('if (!has_{0}) set_missing_required("{0}");'
.format(m.variable_name) for m in self.members_info if m.is_required)
def key_event_handling(self):
return '\n'.join('else if (utility::string_equal(str, length, {key}, {key_length}))\n\
{{ state={state}; {dup_check} {set_flag} }}'
.format(key=cstring_literal(m.json_key), key_length=len(m.json_key),
state=i, dup_check=self.check_for_duplicate_key(m),
set_flag=self.flag_statement(m, "true"))
for i, m in enumerate(self.members_info))
def event_forwarding(self, call_text):
return '\n\n'.join('case {i}:\n return checked_event_forwarding(handler_{i}.{call});'
.format(i=i, call=call_text) for i in range(len(self.members_info)))
def error_reaping(self):
return '\n'.join('case {0}:\n handler_{0}.ReapError(errs); break;'.format(i)
for i in range(len(self.members_info)))
def writer_type_name(self):
return "Writer" + hashlib.sha256(self.class_info.qualified_name.encode()).hexdigest()
def data_serialization(self):
return '\n'.join('w.Key({}, {}, false); Serializer< {}, {} >()(w, value.{});'
.format(cstring_literal(m.json_key), len(m.json_key),
self.writer_type_name(), m.type_name, m.variable_name)
for m in self.members_info)
def current_member_name(self):
return '\n'.join('case {}:\n return "{}";'.format(i, m.variable_name)
for i, m in enumerate(self.members_info))
def unknown_key_handling(self):
if self.class_info.strict_parsing:
return 'the_error.reset(new error::UnknownFieldError(str, length)); return false;'
else:
return 'return true;'
def count_of_members(self):
return str(len(self.members_info))
def flag_statement(self, member_info, flag):
if self.class_info.no_duplicates or member_info.is_required:
return 'has_{} = {};'.format(member_info.variable_name, flag)
else:
return ''
def check_for_duplicate_key(self, member_info):
if self.class_info.no_duplicates:
return 'if (has_{}) the_error.reset(new error::DuplicateKeyError(current_member_name()));\n'.\
format(member_info.variable_name)
else:
return ''
def prepare_for_reuse(self):
return ''.join('handler_{}.PrepareForReuse();\n'.format(i) for i in range(len(self.members_info)))
class CPPTypeNameChecker(object):
# PEG grammar for parsing the C++ type name we support
# Note that raw pointer, reference, array, void, enum, function and pointer-to-member types are not supported
PEG_GRAMMAR = r'''
type = (space cv_type space "<" space type_list space ">" space) / ( space cv_type space )
type_list = (type space "," space type_list) / type / space
cv_type = c_and_v_type / c_or_v_type / simple_type
c_and_v_type = ("const" space "volatile" space simple_type) / ("volatile" space "const" space simple_type)
c_or_v_type = ("const" space simple_type) / ("volatile" space simple_type)
simple_type = spaced_type / ("::"? identifier ("::" identifier)*)
spaced_type = sign_type / long_type
sign_type = ("unsigned" / "signed") space ( ("long" space "long"? space "int"?) / "int" / "char")
long_type = ("long" space "long" space "int") / ("long" space "long") / ("long" space "int")
identifier = ~"[A-Za-z_][A-Za-z_0-9]*"
space = ~"[ \t]*"
'''
KNOWN_BASIC_TYPE_NAMES = frozenset(['bool', 'char', 'int', 'unsigned int', 'unsigned', 'long long', 'long long int',
'unsigned long long', 'unsigned long long int', 'std::uint32_t', 'std::int32_t',
'std::uint64_t', 'std::int64_t', 'uint32_t', 'int32_t', 'uint64_t', 'int64_t',
'std::nullptr_t',
'std::size_t', 'size_t', 'std::ptrdiff_t', 'ptrdiff_t',
'double', 'std::string', 'std::vector', 'std::deque', 'std::array',
'boost::container::vector', 'boost::container::deque', 'boost::array',
'std::shared_ptr', 'std::unique_ptr', 'boost::shared_ptr', 'boost::optional',
'std::map', 'std::unordered_map', 'std::multimap', 'std::unordered_multimap',
'boost::unordered_map', 'boost::unordered_multimap', 'std::tuple'])
ParseError = parsimonious.ParseError if parsimonious else None
def __init__(self):
self._grammar = parsimonious.Grammar(CPPTypeNameChecker.PEG_GRAMMAR)
self._known_names = set(CPPTypeNameChecker.KNOWN_BASIC_TYPE_NAMES)
@staticmethod
def __extract_simple_type(node):
if node.expr_name == 'simple_type':
yield node.text.lstrip(':')
for sub_node in node.children:
for value in CPPTypeNameChecker.__extract_simple_type(sub_node):
yield value
def check_for_unknown_basic_types(self, name):
"""
:param name: the full name of the type to check
:return: a list of unknown basic types
"""
node = self.grammar.parse(name)
simple_types = set(self.__extract_simple_type(node))
return simple_types - self.known_names
@property
def grammar(self):
return self._grammar
@property
def known_names(self):
return self._known_names
def build_class(template, class_info):
gen = HelperClassCodeGenerator(class_info)
replacement = {
"class definition": ClassDefinitionCodeGenerator(class_info).class_definition(),
"list of declarations": gen.handler_declarations() + gen.flags_declaration(),
"init": gen.handler_initializers(),
"serialize all members": gen.data_serialization(),
"change state": gen.key_event_handling(),
"reap error": gen.error_reaping(),
"get member name": gen.current_member_name(),
"validation": gen.post_validation(),
"reset flags": gen.flags_reset(),
"handle unknown key": gen.unknown_key_handling(),
"TypeName": class_info.qualified_name,
"count of members": gen.count_of_members(),
"Writer": gen.writer_type_name(),
"call PrepareForReuse": gen.prepare_for_reuse()
}
def evaluate(match):
try:
return replacement[match.group(1)]
except KeyError:
match = re.match(r'forward (.*?) to members', match.group(1))
if match:
return gen.event_forwarding(match.group(1))
else:
raise
return re.sub(r'/\*\s*(.*?)\s*\*/', evaluate, template)
def warn_if_name_unknown(checker, class_info):
checker.known_names.add(class_info.qualified_name.lstrip(':'))
for m in class_info.members:
try:
unknowns = checker.check_for_unknown_basic_types(m.type_name)
for u in unknowns:
print("Warning:", "The type", repr(u), "may not be recognized", file=sys.stderr)
print("\tReferenced from variable", repr(m.variable_name),
"in class", repr(class_info.qualified_name), "\n", file=sys.stderr)
except CPPTypeNameChecker.ParseError:
print("Warning:", "The type", repr(m.type_name), "is not valid", file=sys.stderr)
print("\tReferenced from variable", repr(m.variable_name),
"in class", repr(class_info.qualified_name), "\n", file=sys.stderr)
def main():
parser = argparse.ArgumentParser(description='`autojsoncxx` code generator '
'(visit https://github.com/netheril96/autojsoncxx for details)')
parser.add_argument('-c', '--check', help='check the type names specified; requires `parsimonious` to be installed',
action='store_true', default=False)
parser.add_argument('-i', '--input', help='input name for the definition file for classes', required=True)
parser.add_argument('-o', '--output', help='output name for the header file', default=None)
parser.add_argument('--template', help='location of the template file', default=None)
args = parser.parse_args()
if args.check and not parsimonious:
print("Unable to import module `parsimonious`", "Type checks disabled", "", sep='\n', file=sys.stderr)
args.check = False
if args.template is None:
# On Windows code_template located in the same folder as the executable
if getattr(sys, 'frozen', False):
template_dir = os.path.dirname(sys.executable)
args.template = os.path.join(template_dir, 'code_template')
# On UNIX It's worth checking system directories while looking for a code_template
else:
possible_template_dirs = [
"/usr/local/share/autojsoncxx",
"/usr/share/autojsoncxx",
os.path.dirname(os.path.abspath(__file__)),
]
custom_template_dir = os.environ.get("AUTOJSONCXX_TEMPLATE_DIR")
if custom_template_dir and os.path.isdir(custom_template_dir):
possible_template_dirs.insert(0, custom_template_dir)
possible_template_pathes = (os.path.join(d, 'code_template') for d in possible_template_dirs)
args.template = next(p for p in possible_template_pathes if os.path.isfile(p))
if args.output is None:
args.output = os.path.basename(args.input)
args.output = os.path.splitext(args.output)[0] + '.hpp'
if args.check:
checker = CPPTypeNameChecker()
else:
checker = None
with io.open(args.template, encoding='utf-8') as f:
template = f.read()
with io.open(args.input, encoding='utf-8') as f:
raw_record = json.load(f)
with io.open(args.output, 'w', encoding='utf-8') as output:
output.write('#pragma once\n\n')
def output_class(class_record):
class_info = ClassInfo(class_record)
if args.check:
warn_if_name_unknown(checker, class_info)
output.write(build_class(template, class_info))
if isinstance(raw_record, list):
for r in raw_record:
print("Processing:", r)
output_class(r)
else:
output_class(raw_record)
if __name__ == '__main__':
main()
| mit | 2,564,326,828,184,291,300 | 36.869231 | 216 | 0.595826 | false |
facebook/redex | tools/bytecode_debugger/inject-debug.py | 1 | 1222 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import os
from pyredex.utils import with_temp_cleanup
from tools.bytecode_debugger.inject_debug_lib import run_debug_injector
def arg_parser():
description = "Injects bytecode-level debug information into an APK"
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter, description=description
)
parser.add_argument("bin_path", help="Path to program binary")
parser.add_argument("input_apk", help="Input APK file")
parser.add_argument(
"-o",
"--output_apk",
nargs="?",
type=os.path.realpath,
default="out.apk",
help="Output APK file name (defaults to out.apk)",
)
parser.add_argument("-s", "--keystore", nargs="?", default=None)
parser.add_argument("-a", "--keyalias", nargs="?", default=None)
parser.add_argument("-p", "--keypass", nargs="?", default=None)
return parser
if __name__ == "__main__":
args = arg_parser().parse_args()
with_temp_cleanup(lambda: run_debug_injector(args))
| mit | -4,664,302,263,808,888,000 | 32.944444 | 85 | 0.670213 | false |
crankycyclops/epubtools | drivers/input/driver.py | 1 | 2618 | # -*- coding: utf-8 -*-
import os
from abc import ABCMeta, abstractmethod
from exception import InputException
from ..domnode import EbookNode
# Input driver base class
class Driver:
__metaclass__ = ABCMeta
##########################################################################
# Constructor
def __init__(self):
# We use a DOM-like structure to represent the contents of an ebook.
# Parts and chapters are all children of this node.
self.DOMRoot = EbookNode('ebook')
# Represents our current location in the ebook's "DOM" while parsing.
self._curDOMNode = self.DOMRoot
##########################################################################
# Opens the input source for reading and throws an exception if opening
# the document failed. If a driver needs to do more than what this method
# does, then it should override this function and call super().open().
def open(self, filename):
self._inputPath = filename
if not os.path.exists(self._inputPath):
raise InputException('Could not open ' + self._inputPath + ' for reading.')
##########################################################################
# Parse the input document into a DOM-like representation and return it,
# along with the contents of self.chapterLog, so that the output driver can
# work its black voodoo magic.
# TODO: detail the DOM structure in more detail
@abstractmethod
def parse(self):
pass
##########################################################################
# Cleans up after parsing is complete. If there's no cleanup to do for a
# particular driver, just implement an empty function.
@abstractmethod
def cleanup(self):
pass
###########################################################################
# A debug method that allows me to see what the parsed DOM looks like.
def printTree(self, curNode = None, indent = ''):
if curNode is None:
curNode = self.DOMRoot
nodeAttributes = '{'
for key in curNode.attributes.keys():
nodeAttributes += "'" + key + "': " + str(curNode.attributes[key]) + ", "
if len(nodeAttributes) > 1:
nodeAttributes = nodeAttributes[0:len(nodeAttributes) - 2]
nodeAttributes += '}'
if isinstance(curNode.value, (bytes, bytearray)):
nodeValue = '<Binary Data>'
else:
nodeValue = curNode.value
print('')
print(indent + 'nodeType: ' + curNode.nodeType)
print(indent + 'attributes: ' + nodeAttributes)
print(indent + 'value: ' + nodeValue)
print(indent + 'children: ' + str(curNode.childCount()))
if curNode.children:
for child in curNode.children:
self.printTree(child, indent + '\t')
| gpl-3.0 | 658,955,758,611,459,200 | 29.44186 | 78 | 0.60275 | false |
yandex/mastermind | src/cocaine-app/jobs/tasks/task.py | 1 | 2211 | import uuid
class Task(object):
STATUS_QUEUED = 'queued'
STATUS_EXECUTING = 'executing'
STATUS_FAILED = 'failed'
STATUS_SKIPPED = 'skipped'
STATUS_COMPLETED = 'completed'
def __init__(self, job):
self.status = self.STATUS_QUEUED
self.id = uuid.uuid4().hex
self.type = None
self.start_ts = None
self.finish_ts = None
self.attempts = 0
self.error_msg = []
self.parent_job = job
def on_exec_start(self, processor):
"""
Called every time task changes status from 'queued' to 'executing'
"""
pass
def on_exec_stop(self, processor):
"""
Called every time task changes status from 'executing' to anything else
"""
pass
@classmethod
def new(cls, job, **kwargs):
task = cls(job)
for param in cls.PARAMS:
setattr(task, param, kwargs.get(param))
return task
@classmethod
def from_data(cls, data, job):
task = cls(job)
task.load(data)
return task
def load(self, data):
# TODO: remove 'or' part
self.id = data['id'] or uuid.uuid4().hex
self.status = data['status']
self.type = data['type']
self.start_ts = data['start_ts']
self.finish_ts = data['finish_ts']
self.error_msg = data['error_msg']
self.attempts = data.get('attempts', 0)
for param in self.PARAMS:
val = data.get(param)
if isinstance(val, unicode):
val = val.encode('utf-8')
setattr(self, param, val)
def dump(self):
res = {'status': self.status,
'id': self.id,
'type': self.type,
'start_ts': self.start_ts,
'finish_ts': self.finish_ts,
'error_msg': self.error_msg,
'attempts': self.attempts}
res.update({
k: getattr(self, k)
for k in self.PARAMS
})
return res
def human_dump(self):
return self.dump()
def __str__(self):
raise RuntimeError('__str__ method should be implemented in '
'derived class')
| gpl-2.0 | 5,557,278,604,231,714,000 | 25.963415 | 79 | 0.526006 | false |
nmercier/linux-cross-gcc | linux/lib/python2.7/dist-packages/ccm/Widgets.py | 1 | 58989 | # -*- coding: UTF-8 -*-
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Authors: Quinn Storm ([email protected])
# Patrick Niklaus ([email protected])
# Guillaume Seguin ([email protected])
# Christopher Williams ([email protected])
# Copyright (C) 2007 Quinn Storm
import pygtk
import gtk
import gtk.gdk
import gobject
import cairo, pangocairo
from math import pi, sqrt
import time
import re
import mimetypes
mimetypes.init()
from ccm.Utils import *
from ccm.Constants import *
from ccm.Conflicts import *
import locale
import gettext
locale.setlocale(locale.LC_ALL, "")
gettext.bindtextdomain("ccsm", DataDir + "/locale")
gettext.textdomain("ccsm")
_ = gettext.gettext
#
# Try to use gtk like coding style for consistency
#
class ClearEntry(gtk.Entry):
def __init__(self):
gtk.Entry.__init__(self)
self.set_icon_from_stock(gtk.ENTRY_ICON_SECONDARY, gtk.STOCK_CLEAR)
self.set_icon_tooltip_text(gtk.ENTRY_ICON_SECONDARY, _("Clear"))
self.connect('icon-press', self._clear_pressed)
def _clear_pressed(self, widget, pos, event):
if pos == gtk.ENTRY_ICON_SECONDARY:
self.set_text("")
# Cell Renderer for MultiList
class CellRendererColor(gtk.GenericCellRenderer):
__gproperties__ = {
'text': (gobject.TYPE_STRING,
'color markup text',
'The color as markup like this: #rrrrggggbbbbaaaa',
'#0000000000000000',
gobject.PARAM_READWRITE)
}
_text = '#0000000000000000'
_color = [0, 0, 0, 0]
_surface = None
_surface_size = (-1, -1)
def __init__(self):
gtk.GenericCellRenderer.__init__(self)
def _parse_color(self):
color = gtk.gdk.color_parse(self._text[:-4])
alpha = int("0x%s" % self._text[-4:], base=16)
self._color = [color.red/65535.0, color.green/65535.0, color.blue/65535.0, alpha/65535.0]
def do_set_property(self, property, value):
if property.name == 'text':
self._text = value
self._parse_color()
def do_get_property(self, property):
if property.name == 'text':
return self._text
def on_get_size(self, widget, cell_area):
return (0, 0, 0, 0) # FIXME
def redraw(self, width, height):
# found in gtk-color-button.c
CHECK_SIZE = 4
CHECK_DARK = 21845 # 65535 / 3
CHECK_LIGHT = 43690
width += 10
height += 10
self._surface_size = (width, height)
self._surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
cr = cairo.Context(self._surface)
x = 0
y = 0
colors = [CHECK_DARK, CHECK_LIGHT]
state = 0
begin_state = 0
while y < height:
while x < width:
cr.rectangle(x, y, CHECK_SIZE, CHECK_SIZE)
c = colors[state] / 65535.0
cr.set_source_rgb(c, c, c)
cr.fill()
x += CHECK_SIZE
state = not state
state = not begin_state
begin_state = state
x = 0
y += CHECK_SIZE
def on_render(self, window, widget, background_area, cell_area, expose_area, flags):
cr = window.cairo_create()
height, width = (cell_area.height, cell_area.width)
sheight, swidth = self._surface_size
if height > sheight or width > swidth:
self.redraw(width, height)
cr.rectangle(cell_area.x, cell_area.y, width, height)
cr.clip()
cr.set_source_surface(self._surface, cell_area.x, cell_area.y)
cr.paint()
r, g, b, a = self._color
cr.set_source_rgba(r, g, b, a)
cr.paint()
class PluginView(gtk.TreeView):
def __init__(self, plugins):
liststore = gtk.ListStore(str, gtk.gdk.Pixbuf, bool, object)
self.model = liststore.filter_new()
gtk.TreeView.__init__(self, self.model)
self.SelectionHandler = None
self.Plugins = set(plugins)
for plugin in sorted(plugins.values(), key=PluginKeyFunc):
liststore.append([plugin.ShortDesc, Image(plugin.Name, type=ImagePlugin).props.pixbuf,
plugin.Enabled, plugin])
column = self.insert_column_with_attributes(0, _('Plugin'), gtk.CellRendererPixbuf(), pixbuf=1, sensitive=2)
cell = gtk.CellRendererText()
cell.props.wrap_width = 200
column.pack_start(cell)
column.set_attributes(cell, text=0)
self.model.set_visible_func(self.VisibleFunc)
self.get_selection().connect('changed', self.SelectionChanged)
def VisibleFunc(self, model, iter):
return model[iter][3].Name in self.Plugins
def Filter(self, plugins):
self.Plugins = set(plugins)
self.model.refilter()
def SelectionChanged(self, selection):
model, iter = selection.get_selected()
if iter is None:
return self.SelectionHandler(None)
return self.SelectionHandler(model[iter][3])
class GroupView(gtk.TreeView):
def __init__(self, name):
self.model = gtk.ListStore(str, str)
gtk.TreeView.__init__(self, self.model)
self.SelectionHandler = None
self.Visible = set()
cell = gtk.CellRendererText()
cell.props.ypad = 5
cell.props.wrap_width = 200
column = gtk.TreeViewColumn(name, cell, text=0)
self.append_column(column)
self.get_selection().connect('changed', self.SelectionChanged)
self.hide_all()
self.props.no_show_all = True
def Update(self, items):
self.model.clear()
self.model.append([_('All'), 'All'])
length = 0
for item in items:
self.model.append([item or _("General"), item])
if item: # exclude "General" from count
length += 1
if length:
self.show_all()
self.props.no_show_all = False
else:
self.hide_all()
self.props.no_show_all = True
def SelectionChanged(self, selection):
model, iter = selection.get_selected()
if iter is None:
return None
return self.SelectionHandler(model[iter][1])
# Selector Buttons
#
class SelectorButtons(gtk.HBox):
def __init__(self):
gtk.HBox.__init__(self)
self.set_border_width(10)
self.set_spacing(5)
self.buttons = []
self.arrows = []
def clear_buttons(self):
for widget in (self.arrows + self.buttons):
widget.destroy()
self.arrows = []
self.buttons = []
def add_button(self, label, callback):
arrow = gtk.Arrow(gtk.ARROW_RIGHT, gtk.SHADOW_NONE)
button = gtk.Button(label)
button.set_relief(gtk.RELIEF_NONE)
button.connect('clicked', self.on_button_clicked, callback)
if self.get_children():
self.pack_start(arrow, False, False)
self.arrows.append(arrow)
self.pack_start(button, False, False)
self.buttons.append(button)
self.show_all()
def remove_button(self, pos):
if pos > len(self.buttons)-1:
return
self.buttons[pos].destroy()
self.buttons.remove(self.buttons[pos])
if pos > 0:
self.arrows[pos-1].destroy()
self.arrows.remove(self.arrows[pos-1])
def on_button_clicked(self, widget, callback):
callback(selector=True)
# Selector Box
#
class SelectorBox(gtk.ScrolledWindow):
def __init__(self, backgroundColor):
gtk.ScrolledWindow.__init__(self)
self.viewport = gtk.Viewport()
self.viewport.modify_bg(gtk.STATE_NORMAL, gtk.gdk.color_parse(backgroundColor))
self.props.hscrollbar_policy = gtk.POLICY_NEVER
self.props.vscrollbar_policy = gtk.POLICY_AUTOMATIC
self.box = gtk.VBox()
self.box.set_spacing(5)
self.viewport.add(self.box)
self.add(self.viewport)
def close(self):
self.destroy()
self.viewport.destroy()
for button in self.box.get_children():
button.destroy()
self.box.destroy()
def add_item(self, item, callback, markup="%s", image=None, info=None):
button = gtk.Button()
label = Label(wrap=170)
text = protect_pango_markup(item)
label.set_markup(markup % text or _("General"))
labelBox = gtk.VBox()
labelBox.set_spacing(5)
labelBox.pack_start(label)
if info:
infoLabel = Label()
infoLabel.set_markup("<span size='small'>%s</span>" % info)
labelBox.pack_start(infoLabel)
box = gtk.HBox()
box.set_spacing(5)
if image:
box.pack_start(image, False, False)
box.pack_start(labelBox)
button.add(box)
button.connect("clicked", callback, item)
button.set_relief(gtk.RELIEF_NONE)
self.box.pack_start(button, False, False)
def clear_list(self):
for button in self.box.get_children():
button.destroy()
def set_item_list(self, list, callback):
self.clear_list()
for item in list:
self.add_item(item)
self.box.show_all()
# Scrolled List
#
class ScrolledList(gtk.ScrolledWindow):
def __init__(self, name):
gtk.ScrolledWindow.__init__(self)
self.props.hscrollbar_policy = gtk.POLICY_NEVER
self.props.vscrollbar_policy = gtk.POLICY_AUTOMATIC
self.store = gtk.ListStore(gobject.TYPE_STRING)
self.view = gtk.TreeView(self.store)
self.view.set_headers_visible(True)
self.view.insert_column_with_attributes(-1, name, gtk.CellRendererText(), text=0)
self.set_size_request(300, 300)
self.add(self.view)
self.select = self.view.get_selection()
self.select.set_mode(gtk.SELECTION_SINGLE)
def get_list(self):
values = []
iter = self.store.get_iter_first()
while iter:
value = self.store.get(iter, 0)[0]
if value != "":
values.append(value)
iter = self.store.iter_next(iter)
return values
def clear(self):
self.store.clear()
def append(self, value):
iter = self.store.append()
self.store.set(iter, 0, value)
def set(self, pos, value):
iter = self.store.get_iter(pos)
self.store.set(iter, 0, value)
def delete(self, b):
selected_rows = self.select.get_selected_rows()[1]
for path in selected_rows:
iter = self.store.get_iter(path)
self.store.remove(iter)
def move_up(self, b):
selected_rows = self.select.get_selected_rows()[1]
if len(selected_rows) == 1:
iter = self.store.get_iter(selected_rows[0])
prev = self.store.get_iter_first()
if not self.store.get_path(prev) == self.store.get_path(iter):
while prev is not None and not self.store.get_path(self.store.iter_next(prev)) == self.store.get_path(iter):
prev = self.store.iter_next(prev)
self.store.swap(iter, prev)
def move_down(self, b):
selected_rows = self.select.get_selected_rows()[1]
if len(selected_rows) == 1:
iter = self.store.get_iter(selected_rows[0])
next = self.store.iter_next(iter)
if next is not None:
self.store.swap(iter, next)
# Button modifier selection widget
#
class ModifierSelector (gtk.DrawingArea):
__gsignals__ = {"added" : (gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE, [gobject.TYPE_STRING]),
"removed" : (gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE, [gobject.TYPE_STRING])}
_current = []
_base_surface = None
_surface = None
_x0 = 0
_y0 = 12
_width = 100
_height = 50
_font = "Sans 12 Bold"
def __init__ (self, mods):
'''Prepare widget'''
super (ModifierSelector, self).__init__ ()
self._current = mods.split ("|")
modifier = "%s/modifier.png" % PixmapDir
self._base_surface = cairo.ImageSurface.create_from_png (modifier)
self.add_events (gtk.gdk.BUTTON_PRESS_MASK)
self.connect ("expose_event", self.expose)
self.connect ("button_press_event", self.button_press)
self.set_size_request (200, 120)
x0, y0, width, height = self._x0, self._y0, self._width, self._height
self._modifiers = {
"Shift" : (x0, y0),
"Control" : (x0, y0 + height),
"Super" : (x0 + width, y0),
"Alt" : (x0 + width, y0 + height)
}
self._names = {
"Control" : "Ctrl"
}
def set_current (self, value):
self._current = value.split ("|")
self.redraw (queue = True)
def get_current (self):
return "|".join ([s for s in self._current if len (s) > 0])
current = property (get_current, set_current)
def draw (self, cr, width, height):
'''The actual drawing function'''
for mod in self._modifiers:
x, y = self._modifiers[mod]
if mod in self._names: text = self._names[mod]
else: text = mod
cr.set_source_surface (self._base_surface, x, y)
cr.rectangle (x, y, self._width, self._height)
cr.fill_preserve ()
if mod in self._current:
cr.set_source_rgb (0.3, 0.3, 0.3)
self.write (cr, x + 23, y + 15, text)
cr.set_source_rgb (0.5, 1, 0)
else:
cr.set_source_rgb (0, 0, 0)
self.write (cr, x + 22, y + 14, text)
def write (self, cr, x, y, text):
cr.move_to (x, y)
markup = '''<span font_desc="%s">%s</span>''' % (self._font, text)
pcr = pangocairo.CairoContext (cr)
layout = pcr.create_layout ()
layout.set_markup (markup)
pcr.show_layout (layout)
def redraw (self, queue = False):
'''Redraw internal surface'''
alloc = self.get_allocation ()
# Prepare drawing surface
width, height = alloc.width, alloc.height
self._surface = cairo.ImageSurface (cairo.FORMAT_ARGB32, width, height)
cr = cairo.Context (self._surface)
# Clear
cr.set_operator (cairo.OPERATOR_CLEAR)
cr.paint ()
cr.set_operator (cairo.OPERATOR_OVER)
# Draw
self.draw (cr, alloc.width, alloc.height)
# Queue expose event if required
if queue:
self.queue_draw ()
def expose (self, widget, event):
'''Expose event handler'''
cr = self.window.cairo_create ()
if not self._surface:
self.redraw ()
cr.set_source_surface (self._surface)
cr.rectangle (event.area.x, event.area.y,
event.area.width, event.area.height)
cr.clip ()
cr.paint ()
return False
def in_rect (self, x, y, x0, y0, x1, y1):
return x >= x0 and y >= y0 and x <= x1 and y <= y1
def button_press (self, widget, event):
x, y = event.x, event.y
mod = ""
for modifier in self._modifiers:
x0, y0 = self._modifiers[modifier]
if self.in_rect (x, y, x0, y0,
x0 + self._width, y0 + self._height):
mod = modifier
break
if not len (mod):
return
if mod in self._current:
self._current.remove (mod)
self.emit ("removed", mod)
else:
self._current.append (mod)
self.emit ("added", mod)
self.redraw (queue = True)
# Edge selection widget
#
class EdgeSelector (gtk.DrawingArea):
__gsignals__ = {"clicked" : (gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE, (gobject.TYPE_STRING, gobject.TYPE_PYOBJECT,))}
_base_surface = None
_surface = None
_radius = 13
_cradius = 20
_coords = []
def __init__ (self):
'''Prepare widget'''
super (EdgeSelector, self).__init__ ()
background = "%s/display.png" % PixmapDir
self._base_surface = cairo.ImageSurface.create_from_png (background)
self.add_events (gtk.gdk.BUTTON_PRESS_MASK)
self.connect ("expose_event", self.expose)
self.connect ("button_press_event", self.button_press)
self.set_size_request (196, 196)
# Useful vars
x0 = 16
y0 = 24
x1 = 181
y1 = 133
x2 = x0 + 39
y2 = y0 + 26
x3 = x1 - 39
y3 = y1 - 26
self._coords = (x0, y0, x1, y1, x2, y2, x3, y3)
def draw (self, cr, width, height):
'''The actual drawing function'''
# Useful vars
x0, y0, x1, y1, x2, y2, x3, y3 = self._coords
cradius = self._cradius
radius = self._radius
cr.set_line_width(1.0)
# Top left edge
cr.new_path ()
cr.move_to (x0, y0 + cradius)
cr.line_to (x0, y0)
cr.line_to (x0 + cradius, y0)
cr.arc (x0, y0, cradius, 0, pi / 2)
cr.close_path ()
self.set_fill_color (cr, "TopLeft")
cr.fill_preserve ()
self.set_stroke_color (cr, "TopLeft")
cr.stroke ()
# Top right edge
cr.new_path ()
cr.move_to (x1, y0 + cradius)
cr.line_to (x1, y0)
cr.line_to (x1 - cradius, y0)
cr.arc_negative (x1, y0, cradius, pi, pi/2)
cr.close_path ()
self.set_fill_color (cr, "TopRight")
cr.fill_preserve ()
self.set_stroke_color (cr, "TopRight")
cr.stroke ()
# Bottom left edge
cr.new_path ()
cr.move_to (x0, y1 - cradius)
cr.line_to (x0, y1)
cr.line_to (x0 + cradius, y1)
cr.arc_negative (x0, y1, cradius, 2 * pi, 3 * pi / 2)
cr.close_path ()
self.set_fill_color (cr, "BottomLeft")
cr.fill_preserve ()
self.set_stroke_color (cr, "BottomLeft")
cr.stroke ()
# Bottom right edge
cr.new_path ()
cr.move_to (x1, y1 - cradius)
cr.line_to (x1, y1)
cr.line_to (x1 - cradius, y1)
cr.arc (x1, y1, cradius, pi, 3 * pi / 2)
cr.close_path ()
self.set_fill_color (cr, "BottomRight")
cr.fill_preserve ()
self.set_stroke_color (cr, "BottomRight")
cr.stroke ()
# Top edge
cr.new_path ()
cr.move_to (x2 + radius, y0)
cr.line_to (x3 - radius, y0)
cr.arc (x3 - radius, y0, radius, 0, pi / 2)
cr.line_to (x2 + radius, y0 + radius)
cr.arc (x2 + radius, y0, radius, pi / 2, pi)
cr.close_path ()
self.set_fill_color (cr, "Top")
cr.fill_preserve ()
self.set_stroke_color (cr, "Top")
cr.stroke ()
# Bottom edge
cr.new_path ()
cr.move_to (x2 + radius, y1)
cr.line_to (x3 - radius, y1)
cr.arc_negative (x3 - radius, y1, radius, 0, - pi / 2)
cr.line_to (x2 + radius, y1 - radius)
cr.arc_negative (x2 + radius, y1, radius, - pi / 2, pi)
cr.close_path ()
self.set_fill_color (cr, "Bottom")
cr.fill_preserve ()
self.set_stroke_color (cr, "Bottom")
cr.stroke ()
# Left edge
cr.new_path ()
cr.move_to (x0, y2 + radius)
cr.line_to (x0, y3 - radius)
cr.arc_negative (x0, y3 - radius, radius, pi / 2, 0)
cr.line_to (x0 + radius, y2 + radius)
cr.arc_negative (x0, y2 + radius, radius, 0, 3 * pi / 2)
cr.close_path ()
self.set_fill_color (cr, "Left")
cr.fill_preserve ()
self.set_stroke_color (cr, "Left")
cr.stroke ()
# Right edge
cr.new_path ()
cr.move_to (x1, y2 + radius)
cr.line_to (x1, y3 - radius)
cr.arc (x1, y3 - radius, radius, pi / 2, pi)
cr.line_to (x1 - radius, y2 + radius)
cr.arc (x1, y2 + radius, radius, pi, 3 * pi / 2)
cr.close_path ()
self.set_fill_color (cr, "Right")
cr.fill_preserve ()
self.set_stroke_color (cr, "Right")
cr.stroke ()
def set_fill_color (self, cr, edge):
'''Set painting color for edge'''
cr.set_source_rgb (0.9, 0.9, 0.9)
def set_stroke_color (self, cr, edge):
'''Set stroke color for edge'''
cr.set_source_rgb (0.45, 0.45, 0.45)
def redraw (self, queue = False):
'''Redraw internal surface'''
alloc = self.get_allocation ()
# Prepare drawing surface
width, height = alloc.width, alloc.height
self._surface = cairo.ImageSurface (cairo.FORMAT_ARGB32, width, height)
cr = cairo.Context (self._surface)
# Draw background
cr.set_source_surface (self._base_surface)
cr.paint ()
# Draw
self.draw (cr, alloc.width, alloc.height)
# Queue expose event if required
if queue:
self.queue_draw ()
def expose (self, widget, event):
'''Expose event handler'''
cr = self.window.cairo_create ()
if not self._surface:
self.redraw ()
cr.set_source_surface (self._surface)
cr.rectangle (event.area.x, event.area.y,
event.area.width, event.area.height)
cr.clip ()
cr.paint ()
return False
def in_circle_quarter (self, x, y, x0, y0, x1, y1, x2, y2, radius):
'''Args:
x, y = point coordinates
x0, y0 = center coordinates
x1, y1 = circle square top left coordinates
x2, y2 = circle square bottom right coordinates
radius = circle radius'''
if not self.in_rect (x, y, x1, y1, x2, y2):
return False
return self.dist (x, y, x0, y0) <= radius
def dist (self, x1, y1, x2, y2):
return sqrt ((x2 - x1) ** 2 + (y2 - y1) ** 2)
def in_rect (self, x, y, x0, y0, x1, y1):
return x >= x0 and y >= y0 and x <= x1 and y <= y1
def button_press (self, widget, event):
x, y = event.x, event.y
edge = ""
# Useful vars
x0, y0, x1, y1, x2, y2, x3, y3 = self._coords
cradius = self._cradius
radius = self._radius
if self.in_circle_quarter (x, y, x0, y0, x0, y0,
x0 + cradius, y0 + cradius,
cradius):
edge = "TopLeft"
elif self.in_circle_quarter (x, y, x1, y0, x1 - cradius, y0,
x1, y0 + cradius, cradius):
edge = "TopRight"
elif self.in_circle_quarter (x, y, x0, y1, x0, y1 - cradius,
x0 + cradius, y1, cradius):
edge = "BottomLeft"
elif self.in_circle_quarter (x, y, x1, y1, x1 - cradius, y1 - cradius,
x1, y1, cradius):
edge = "BottomRight"
elif self.in_rect (x, y, x2 + radius, y0, x3 - radius, y0 + radius) \
or self.in_circle_quarter (x, y, x2 + radius, y0, x2, y0,
x2 + radius, y0 + radius, radius) \
or self.in_circle_quarter (x, y, x3 - radius, y0, x3 - radius, y0,
x3, y0 + radius, radius):
edge = "Top"
elif self.in_rect (x, y, x2 + radius, y1 - radius, x3 - radius, y1) \
or self.in_circle_quarter (x, y, x2 + radius, y1, x2, y1 - radius,
x2 + radius, y1, radius) \
or self.in_circle_quarter (x, y, x3 - radius, y1,
x3 - radius, y1 - radius,
x3, y1, radius):
edge = "Bottom"
elif self.in_rect (x, y, x0, y2 + radius, x0 + radius, y3 - radius) \
or self.in_circle_quarter (x, y, x0, y2 + radius, x0, y2,
x0 + radius, y2 + radius, radius) \
or self.in_circle_quarter (x, y, x0, y3 - radius,
x0, y3 - radius,
x0 + radius, y3, radius):
edge = "Left"
elif self.in_rect (x, y, x1 - radius, y2 + radius, x1, y3 - radius) \
or self.in_circle_quarter (x, y, x1, y2 + radius, x1 - radius, y2,
x1, y2 + radius, radius) \
or self.in_circle_quarter (x, y, x1, y3 - radius,
x1 - radius, y3 - radius,
x1, y3, radius):
edge = "Right"
if edge:
self.emit ("clicked", edge, event)
# Edge selection widget
#
class SingleEdgeSelector (EdgeSelector):
_current = []
def __init__ (self, edge):
'''Prepare widget'''
EdgeSelector.__init__ (self)
self._current = edge.split ("|")
self.connect ('clicked', self.edge_clicked)
def set_current (self, value):
self._current = value.split ("|")
self.redraw (queue = True)
def get_current (self):
return "|".join ([s for s in self._current if len (s) > 0])
current = property (get_current, set_current)
def set_fill_color (self, cr, edge):
'''Set painting color for edge'''
if edge in self._current:
cr.set_source_rgb (0.64, 1.0, 0.09)
else:
cr.set_source_rgb (0.80, 0.00, 0.00)
def set_stroke_color (self, cr, edge):
'''Set stroke color for edge'''
if edge in self._current:
cr.set_source_rgb (0.31, 0.60, 0.02)
else:
cr.set_source_rgb (0.64, 0.00, 0.00)
def edge_clicked (self, widget, edge, event):
if not len (edge):
return
if edge in self._current:
self._current.remove (edge)
else:
self._current.append (edge)
self.redraw (queue = True)
# Global Edge Selector
#
class GlobalEdgeSelector(EdgeSelector):
_settings = []
_edges = {}
_text = {}
_context = None
def __init__ (self, context, settings=[]):
EdgeSelector.__init__ (self)
self._context = context
self._settings = settings
self.connect ("clicked", self.show_popup)
if len (settings) <= 0:
self.generate_setting_list ()
def set_fill_color (self, cr, edge):
'''Set painting color for edge'''
if edge in self._edges:
cr.set_source_rgb (0.64, 1.0, 0.09)
else:
cr.set_source_rgb (0.80, 0.00, 0.00)
def set_stroke_color (self, cr, edge):
'''Set stroke color for edge'''
if edge in self._edges:
cr.set_source_rgb (0.31, 0.60, 0.02)
else:
cr.set_source_rgb (0.64, 0.00, 0.00)
def set_settings (self, value):
self._settings = value
def get_settings (self):
return self._settings
settings = property (get_settings, set_settings)
def generate_setting_list (self):
self._settings = []
def filter_settings(plugin):
if plugin.Enabled:
settings = sorted (GetSettings(plugin), key=SettingKeyFunc)
settings = [s for s in settings if s.Type == 'Edge']
return settings
return []
for plugin in self._context.Plugins.values ():
self._settings += filter_settings (plugin)
for setting in self._settings:
edges = setting.Value.split ("|")
for edge in edges:
self._edges[edge] = setting
def set_edge_setting (self, setting, edge):
if not setting:
if edge in self._edges:
self._edges.pop(edge)
for setting in self._settings:
value = setting.Value.split ("|")
if edge in value:
value.remove(edge)
value = "|".join ([s for s in value if len (s) > 0])
setting.Value = value
else:
value = setting.Value.split ("|")
if not edge in value:
value.append (edge)
value = "|".join ([s for s in value if len (s) > 0])
conflict = EdgeConflict (setting, value, settings = self._settings, autoResolve = True)
if conflict.Resolve (GlobalUpdater):
setting.Value = value
self._edges[edge] = setting
self._context.Write()
self.redraw (queue = True)
def show_popup (self, widget, edge, event):
self._text = {}
comboBox = gtk.combo_box_new_text ()
comboBox.append_text (_("None"))
comboBox.set_active (0)
i = 1
for setting in self._settings:
text = "%s: %s" % (setting.Plugin.ShortDesc, setting.ShortDesc)
comboBox.append_text (text)
self._text[text] = setting
if edge in setting.Value.split ("|"):
comboBox.set_active (i)
i += 1
comboBox.set_size_request (200, -1)
comboBox.connect ('changed', self.combo_changed, edge)
popup = Popup (self, child=comboBox, decorated=False, mouse=True, modal=False)
popup.show_all()
popup.connect ('focus-out-event', self.focus_out)
def focus_out (self, widget, event):
combo = widget.get_child ()
if combo.props.popup_shown:
return
gtk_process_events ()
widget.destroy ()
def combo_changed (self, widget, edge):
text = widget.get_active_text ()
setting = None
if text != _("None"):
setting = self._text[text]
self.set_edge_setting (setting, edge)
popup = widget.get_parent ()
popup.destroy ()
# Popup
#
class Popup (gtk.Window):
def __init__ (self, parent=None, text=None, child=None, decorated=True, mouse=False, modal=True):
gtk.Window.__init__ (self, gtk.WINDOW_TOPLEVEL)
self.set_type_hint (gtk.gdk.WINDOW_TYPE_HINT_UTILITY)
self.set_position (mouse and gtk.WIN_POS_MOUSE or gtk.WIN_POS_CENTER_ALWAYS)
if parent:
self.set_transient_for (parent.get_toplevel ())
self.set_modal (modal)
self.set_decorated (decorated)
self.set_property("skip-taskbar-hint", True)
if text:
label = gtk.Label (text)
align = gtk.Alignment ()
align.set_padding (20, 20, 20, 20)
align.add (label)
self.add (align)
elif child:
self.add (child)
gtk_process_events ()
def destroy (self):
gtk.Window.destroy (self)
gtk_process_events ()
# Key Grabber
#
class KeyGrabber (gtk.Button):
__gsignals__ = {"changed" : (gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
[gobject.TYPE_INT, gobject.TYPE_INT]),
"current-changed" : (gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
[gobject.TYPE_INT, gobject.TYPE_INT])}
key = 0
mods = 0
handler = None
popup = None
label = None
def __init__ (self, key = 0, mods = 0, label = None):
'''Prepare widget'''
super (KeyGrabber, self).__init__ ()
self.key = key
self.mods = mods
self.label = label
self.connect ("clicked", self.begin_key_grab)
self.set_label ()
def begin_key_grab (self, widget):
self.add_events (gtk.gdk.KEY_PRESS_MASK)
self.popup = Popup (self, _("Please press the new key combination"))
self.popup.show_all()
self.handler = self.popup.connect ("key-press-event",
self.on_key_press_event)
while gtk.gdk.keyboard_grab (self.popup.window) != gtk.gdk.GRAB_SUCCESS:
time.sleep (0.1)
def end_key_grab (self):
gtk.gdk.keyboard_ungrab (gtk.get_current_event_time ())
self.popup.disconnect (self.handler)
self.popup.destroy ()
def on_key_press_event (self, widget, event):
mods = event.state & gtk.accelerator_get_default_mod_mask ()
if event.keyval in (gtk.keysyms.Escape, gtk.keysyms.Return) \
and not mods:
if event.keyval == gtk.keysyms.Escape:
self.emit ("changed", self.key, self.mods)
self.end_key_grab ()
self.set_label ()
return
key = gtk.gdk.keyval_to_lower (event.keyval)
if (key == gtk.keysyms.ISO_Left_Tab):
key = gtk.keysyms.Tab
if gtk.accelerator_valid (key, mods) \
or (key == gtk.keysyms.Tab and mods):
self.set_label (key, mods)
self.end_key_grab ()
self.key = key
self.mods = mods
self.emit ("changed", self.key, self.mods)
return
self.set_label (key, mods)
def set_label (self, key = None, mods = None):
if self.label:
if key != None and mods != None:
self.emit ("current-changed", key, mods)
gtk.Button.set_label (self, self.label)
return
if key == None and mods == None:
key = self.key
mods = self.mods
label = GetAcceleratorName (key, mods)
if not len (label):
label = _("Disabled")
gtk.Button.set_label (self, label)
# Match Button
#
class MatchButton(gtk.Button):
__gsignals__ = {"changed" : (gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
[gobject.TYPE_STRING])}
prefix = {\
_("Window Title"): 'title',
_("Window Role"): 'role',
_("Window Name"): 'name',
_("Window Class"): 'class',
_("Window Type"): 'type',
_("Window ID"): 'xid',
}
symbols = {\
_("And"): '&',
_("Or"): '|'
}
match = None
def __init__ (self, entry = None):
'''Prepare widget'''
super (MatchButton, self).__init__ ()
self.entry = entry
self.match = entry.get_text()
self.add (Image (name = gtk.STOCK_ADD, type = ImageStock,
size = gtk.ICON_SIZE_BUTTON))
self.connect ("clicked", self.run_edit_dialog)
def set_match (self, value):
self.match = value
self.entry.set_text(value)
self.entry.activate()
def get_xprop (self, regexp, proc = "xprop"):
proc = os.popen (proc)
output = proc.readlines ()
rex = re.compile (regexp)
value = ""
for line in output:
if rex.search (line):
m = rex.match (line)
value = m.groups () [-1]
break
return value
# Regular Expressions taken from beryl-settings
def grab_value (self, widget, value_widget, type_widget):
value = ""
prefix = self.prefix[type_widget.get_active_text()]
if prefix == "type":
value = self.get_xprop("^_NET_WM_WINDOW_TYPE\(ATOM\) = _NET_WM_WINDOW_TYPE_(\w+)")
value = value.lower().capitalize()
elif prefix == "role":
value = self.get_xprop("^WM_WINDOW_ROLE\(STRING\) = \"([^\"]+)\"")
elif prefix == "name":
value = self.get_xprop("^WM_CLASS\(STRING\) = \"([^\"]+)\"")
elif prefix == "class":
value = self.get_xprop("^WM_CLASS\(STRING\) = \"([^\"]+)\", \"([^\"]+)\"")
elif prefix == "title":
value = self.get_xprop("^_NET_WM_NAME\(UTF8_STRING\) = ([^\n]+)")
if value:
list = value.split(", ")
value = ""
for hex in list:
value += "%c" % int(hex, 16)
else:
value = self.get_xprop("^WM_NAME\(STRING\) = \"([^\"]+)\"")
elif prefix == "id":
value = self.get_xprop("^xwininfo: Window id: ([^\s]+)", "xwininfo")
value_widget.set_text(value)
def generate_match (self, t, value, relation, invert):
match = ""
text = self.match
prefix = self.prefix[t]
symbol = self.symbols[relation]
# check if the current match needs some brackets
if len(text) > 0 and text[-1] != ')' and text[0] != '(':
match = "(%s)" % text
else:
match = text
if invert:
match = "%s %s !(%s=%s)" % (match, symbol, prefix, value)
elif len(match) > 0:
match = "%s %s %s=%s" % (match, symbol, prefix, value)
else:
match = "%s=%s" % (prefix, value)
self.set_match (match)
def _check_entry_value (self, entry, dialog):
is_valid = False
value = entry.get_text()
if value != "":
is_valid = True
dialog.set_response_sensitive(gtk.RESPONSE_OK, is_valid)
def run_edit_dialog (self, widget):
'''Run dialog to generate a match'''
self.match = self.entry.get_text ()
dlg = gtk.Dialog (_("Edit match"))
dlg.set_position (gtk.WIN_POS_CENTER_ON_PARENT)
dlg.set_transient_for (self.get_parent ().get_toplevel ())
dlg.add_button (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
dlg.add_button (gtk.STOCK_ADD, gtk.RESPONSE_OK).grab_default ()
dlg.set_response_sensitive(gtk.RESPONSE_OK, False)
dlg.set_default_response (gtk.RESPONSE_OK)
table = gtk.Table ()
rows = []
# Type
label = Label (_("Type"))
type_chooser = gtk.combo_box_new_text ()
for t in self.prefix:
type_chooser.append_text (t)
type_chooser.set_active (0)
rows.append ((label, type_chooser))
# Value
label = Label (_("Value"))
box = gtk.HBox ()
box.set_spacing (5)
entry = gtk.Entry ()
entry.connect ('changed', self._check_entry_value, dlg)
button = gtk.Button (_("Grab"))
button.connect ('clicked', self.grab_value, entry, type_chooser)
box.pack_start (entry, True, True)
box.pack_start (button, False, False)
rows.append ((label, box))
# Relation
label = Label (_("Relation"))
relation_chooser = gtk.combo_box_new_text ()
for relation in self.symbols:
relation_chooser.append_text (relation)
relation_chooser.set_active (0)
rows.append ((label, relation_chooser))
# Invert
label = Label (_("Invert"))
check = gtk.CheckButton ()
rows.append ((label, check))
row = 0
for label, widget in rows:
table.attach(label, 0, 1, row, row+1, yoptions=0, xpadding=TableX, ypadding=TableY)
table.attach(widget, 1, 2, row, row+1, yoptions=0, xpadding=TableX, ypadding=TableY)
row += 1
dlg.vbox.pack_start (table)
dlg.vbox.set_spacing (5)
dlg.show_all ()
response = dlg.run ()
dlg.hide ()
if response == gtk.RESPONSE_OK:
t = type_chooser.get_active_text ()
value = entry.get_text ()
relation = relation_chooser.get_active_text ()
invert = check.get_active ()
self.generate_match (t, value, relation, invert)
dlg.destroy ()
class FileButton (gtk.Button):
__gsignals__ = {"changed" : (gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
[gobject.TYPE_STRING])}
_directory = False
_context = None
_image = False
_path = ""
def __init__ (self, context, entry, directory=False, image=False, path=""):
gtk.Button.__init__ (self)
self._entry = entry
self._directory = directory
self._context = context
self._image = image
self._path = path
self.set_tooltip_text(_("Browse..."))
self.set_image(gtk.image_new_from_stock(
gtk.STOCK_OPEN, gtk.ICON_SIZE_BUTTON))
self.connect('clicked', self.open_dialog)
def set_path (self, value):
self._path = value
self._entry.set_text (value)
self._entry.activate ()
def create_filter(self):
filter = gtk.FileFilter ()
if self._image:
filter.set_name (_("Images"))
filter.add_pattern ("*.png")
filter.add_pattern ("*.jpg")
filter.add_pattern ("*.jpeg")
filter.add_pattern ("*.svg")
else:
filter.add_pattern ("*")
filter.set_name (_("File"))
return filter
def check_type (self, filename):
if filename.find (".") == -1:
return True
ext = filename.split (".") [-1]
try:
mime = mimetypes.types_map [".%s" %ext]
except KeyError:
return True
if self._image:
require = FeatureRequirement (self._context, 'imagemime:' + mime)
return require.Resolve ()
return True
def update_preview (self, widget):
path = widget.get_preview_filename ()
if path is None or os.path.isdir (path):
widget.get_preview_widget ().set_from_file (None)
return
try:
pixbuf = gtk.gdk.pixbuf_new_from_file_at_size (path, 128, 128)
except gobject.GError:
return
widget.get_preview_widget ().set_from_pixbuf (pixbuf)
def open_dialog (self, widget):
b = (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OPEN, gtk.RESPONSE_OK)
if self._directory:
title = _("Open directory...")
else:
title = _("Open file...")
chooser = gtk.FileChooserDialog (title = title, buttons = b)
if self._directory:
chooser.set_action (gtk.FILE_CHOOSER_ACTION_SELECT_FOLDER)
else:
chooser.set_filter (self.create_filter ())
if self._path and os.path.exists (self._path):
chooser.set_filename (self._path)
else:
chooser.set_current_folder (os.environ.get("HOME"))
if self._image:
chooser.set_use_preview_label (False)
chooser.set_preview_widget (gtk.Image ())
chooser.connect ("selection-changed", self.update_preview)
ret = chooser.run ()
filename = chooser.get_filename ()
chooser.destroy ()
if ret == gtk.RESPONSE_OK:
if self._directory or self.check_type (filename):
self.set_path (filename)
# About Dialog
#
class AboutDialog (gtk.AboutDialog):
def __init__ (self, parent):
gtk.AboutDialog.__init__ (self)
self.set_transient_for (parent)
self.set_name (_("CompizConfig Settings Manager"))
self.set_version (Version)
self.set_comments (_("This is a settings manager for the CompizConfig configuration system."))
self.set_copyright ("Copyright \xC2\xA9 2007-2008 Patrick Niklaus/Christopher Williams/Guillaume Seguin/Quinn Storm")
self.set_translator_credits (_("translator-credits"))
self.set_authors (["Patrick Niklaus <[email protected]>",
"Christopher Williams <[email protected]>",
"Guillaume Seguin <[email protected]>",
"Quinn Storm <[email protected]>"])
self.set_artists (["Andrew Wedderburn <[email protected]>",
"Patrick Niklaus <[email protected]>",
"Gnome Icon Theme Team"])
if IconTheme.lookup_icon("ccsm", 64, 0):
icon = IconTheme.load_icon("ccsm", 64, 0)
self.set_logo (icon)
self.set_website ("https://launchpad.net/compiz")
# Error dialog
#
class ErrorDialog (gtk.MessageDialog):
'''Display an error dialog'''
def __init__ (self, parent, message):
gtk.MessageDialog.__init__ (self, parent,
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_ERROR,
gtk.BUTTONS_CLOSE)
self.set_position (gtk.WIN_POS_CENTER)
self.set_markup (message)
self.set_title (_("An error has occured"))
self.set_transient_for (parent)
self.set_modal (True)
self.show_all ()
self.connect ("response", lambda *args: self.destroy ())
# Warning dialog
#
class WarningDialog (gtk.MessageDialog):
'''Display a warning dialog'''
def __init__ (self, parent, message):
gtk.MessageDialog.__init__ (self, parent,
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING,
gtk.BUTTONS_YES_NO)
self.set_position (gtk.WIN_POS_CENTER)
self.set_markup (message)
self.set_title (_("Warning"))
self.set_transient_for (parent)
self.connect_after ("response", lambda *args: self.destroy ())
# First run dialog providing a user warning.
#
class FirstRun (gtk.MessageDialog):
'''First run dialog providing a user warning.'''
def __init__(self, parent):
gtk.MessageDialog.__init__ (self, parent,
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING,
gtk.BUTTONS_OK)
self.set_position (gtk.WIN_POS_CENTER)
title = _("CCSM is an advanced tool. Use with caution.")
self.set_markup("<b>%s</b>" % title)
message = _("This tool allows you to deeply configure Compiz's settings. Some options may be incompatible with each other. Unless used with care, it is possible to be left with an unusable desktop.")
self.format_secondary_markup(message)
check_button = gtk.CheckButton(label=_("Show this warning next time?"))
check_button.set_active(True)
self.vbox.pack_start(check_button, True, True, 2)
check_button.show()
check_button.connect("toggled", self.callback, "check button 1")
self.set_transient_for(parent)
self.set_modal(True)
self.show_all()
self.connect("response", lambda *args: self.destroy ())
def callback(self, widget, data=None):
if widget.get_active() == True:
if os.path.isfile(ConfFile):
os.remove(ConfFile)
else:
if not os.path.exists(ConfDir):
os.mkdir(ConfDir)
if os.path.isdir(ConfDir):
f = open(ConfFile, "w")
f.close()
# Plugin Button
#
class PluginButton (gtk.HBox):
__gsignals__ = {"clicked" : (gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
[]),
"activated" : (gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
[])}
_plugin = None
def __init__ (self, plugin, useMissingImage = False):
gtk.HBox.__init__(self)
self._plugin = plugin
image = Image (plugin.Name, ImagePlugin, 32, useMissingImage)
label = Label (plugin.ShortDesc, 120)
label.connect ('style-set', self.style_set)
box = gtk.HBox ()
box.set_spacing (5)
box.pack_start (image, False, False)
box.pack_start (label)
button = PrettyButton ()
button.connect ('clicked', self.show_plugin_page)
button.set_tooltip_text (plugin.LongDesc)
button.add (box)
blacklist_plugins = ['core']
if os.getenv('XDG_CURRENT_DESKTOP') == 'Unity':
blacklist_plugins.append('unityshell')
if plugin.Name not in blacklist_plugins:
enable = gtk.CheckButton ()
enable.set_tooltip_text(_("Enable %s") % plugin.ShortDesc)
enable.set_active (plugin.Enabled)
enable.set_sensitive (plugin.Context.AutoSort)
self._toggled_handler = enable.connect ("toggled", self.enable_plugin)
PluginSetting (plugin, enable, self._toggled_handler)
self.pack_start (enable, False, False)
self.pack_start (button, False, False)
self.set_size_request (220, -1)
StyleBlock = 0
def style_set (self, widget, previous):
if self.StyleBlock > 0:
return
self.StyleBlock += 1
widget.modify_fg(gtk.STATE_NORMAL, widget.style.text[gtk.STATE_NORMAL])
self.StyleBlock -= 1
def enable_plugin (self, widget):
plugin = self._plugin
conflicts = plugin.Enabled and plugin.DisableConflicts or plugin.EnableConflicts
conflict = PluginConflict (plugin, conflicts)
if conflict.Resolve ():
plugin.Enabled = widget.get_active ()
else:
widget.handler_block(self._toggled_handler)
widget.set_active (plugin.Enabled)
widget.handler_unblock(self._toggled_handler)
plugin.Context.Write ()
GlobalUpdater.UpdatePlugins()
plugin.Context.UpdateExtensiblePlugins ()
self.emit ('activated')
def show_plugin_page (self, widget):
self.emit ('clicked')
def filter (self, text, level=FilterAll):
found = False
if level & FilterName:
if (text in self._plugin.Name.lower ()
or text in self._plugin.ShortDesc.lower ()):
found = True
if not found and level & FilterLongDesc:
if text in self._plugin.LongDesc.lower():
found = True
if not found and level & FilterCategory:
if text == None \
or (text == "" and self._plugin.Category.lower() == "") \
or (text != "" and text in self._plugin.Category.lower()):
found = True
return found
def get_plugin (self):
return self._plugin
# Category Box
#
class CategoryBox(gtk.VBox):
_plugins = None
_unfiltered_plugins = None
_buttons = None
_context = None
_name = ""
_tabel = None
_alignment = None
_current_cols = 0
_current_plugins = 0
def __init__ (self, context, name, plugins=None, categoryIndex=0):
gtk.VBox.__init__ (self)
self.set_spacing (5)
self._context = context
if plugins is not None:
self._plugins = plugins
else:
self._plugins = []
if not plugins:
for plugin in context.Plugins.values ():
if plugin.Category == name:
self._plugins.append (plugin)
self._plugins.sort(key=PluginKeyFunc)
self._name = name
text = name or 'Uncategorized'
# Keep unfiltered list of plugins for correct background icon loading
self._unfiltered_plugins = self._plugins
header = gtk.HBox ()
header.set_border_width (5)
header.set_spacing (10)
label = Label ('', -1)
label.set_markup ("<span color='#aaa' size='x-large' weight='800'>%s</span>" % _(text))
icon = text.lower ().replace (" ", "_")
image = Image (icon, ImageCategory)
header.pack_start (image, False, False)
header.pack_start (label, False, False)
self._table = gtk.Table ()
self._table.set_border_width (10)
# load icons now only for the first 3 categories
dontLoadIcons = (categoryIndex >= 3);
self._buttons = []
for plugin in self._plugins:
button = PluginButton(plugin, dontLoadIcons)
self._buttons.append(button)
self._alignment = gtk.Alignment (0, 0, 1, 1)
self._alignment.set_padding (0, 20, 0, 0)
self._alignment.add (gtk.HSeparator ())
self.pack_start (header, False, False)
self.pack_start (self._table, False, False)
self.pack_start (self._alignment)
def show_separator (self, show):
children = self.get_children ()
if show:
if self._alignment not in children:
self.pack_start (self._alignment)
else:
if self._alignment in children:
self.remove(self._alignment)
def filter_buttons (self, text, level=FilterAll):
self._plugins = []
for button in self._buttons:
if button.filter (text, level=level):
self._plugins.append (button.get_plugin())
return bool(self._plugins)
def rebuild_table (self, ncols, force = False):
if (not force and ncols == self._current_cols
and len (self._plugins) == self._current_plugins):
return
self._current_cols = ncols
self._current_plugins = len (self._plugins)
children = self._table.get_children ()
if children:
for child in children:
self._table.remove(child)
row = 0
col = 0
for button in self._buttons:
if button.get_plugin () in self._plugins:
self._table.attach (button, col, col+1, row, row+1, 0, xpadding=TableX, ypadding=TableY)
col += 1
if col == ncols:
col = 0
row += 1
self.show_all ()
def get_buttons (self):
return self._buttons
def get_plugins (self):
return self._plugins
def get_unfiltered_plugins (self):
return self._unfiltered_plugins
# Plugin Window
#
class PluginWindow(gtk.ScrolledWindow):
__gsignals__ = {"show-plugin" : (gobject.SIGNAL_RUN_FIRST,
gobject.TYPE_NONE,
[gobject.TYPE_PYOBJECT])}
_not_found_box = None
_style_block = 0
_context = None
_categories = None
_viewport = None
_boxes = None
_box = None
def __init__ (self, context, categories=[], plugins=[]):
gtk.ScrolledWindow.__init__ (self)
self._categories = {}
self._boxes = []
self._context = context
pool = plugins or list(self._context.Plugins.values())
if len (categories):
for plugin in pool:
category = plugin.Category
if category in categories:
if not category in self._categories:
self._categories[category] = []
self._categories[category].append(plugin)
else:
for plugin in pool:
category = plugin.Category
if not category in self._categories:
self._categories[category] = []
self._categories[category].append(plugin)
self.props.hscrollbar_policy = gtk.POLICY_NEVER
self.props.vscrollbar_policy = gtk.POLICY_AUTOMATIC
self.connect ('size-allocate', self.rebuild_boxes)
self._box = gtk.VBox ()
self._box.set_spacing (5)
self._not_found_box = NotFoundBox ()
categories = sorted(self._categories, key=CategoryKeyFunc)
for (i, category) in enumerate(categories):
plugins = self._categories[category]
category_box = CategoryBox(context, category, plugins, i)
self.connect_buttons (category_box)
self._boxes.append (category_box)
self._box.pack_start (category_box, False, False)
viewport = gtk.Viewport ()
viewport.connect("style-set", self.set_viewport_style)
viewport.set_focus_vadjustment (self.get_vadjustment ())
viewport.add (self._box)
self.add (viewport)
def connect_buttons (self, category_box):
buttons = category_box.get_buttons ()
for button in buttons:
button.connect('clicked', self.show_plugin_page)
def set_viewport_style (self, widget, previous):
if self._style_block > 0:
return
self._style_block += 1
widget.modify_bg(gtk.STATE_NORMAL, widget.style.base[gtk.STATE_NORMAL])
self._style_block -= 1
def filter_boxes (self, text, level=FilterAll):
found = False
for box in self._boxes:
found |= box.filter_buttons (text, level)
viewport = self.get_child ()
child = viewport.get_child ()
if not found:
if child is not self._not_found_box:
viewport.remove (self._box)
viewport.add (self._not_found_box)
self._not_found_box.update (text)
else:
if child is self._not_found_box:
viewport.remove (self._not_found_box)
viewport.add (self._box)
self.queue_resize()
self.show_all()
def rebuild_boxes (self, widget, request):
ncols = request.width / 220
width = ncols * (220 + 2 * TableX) + 40
if width > request.width:
ncols -= 1
pos = 0
last_box = None
children = self._box.get_children ()
for box in self._boxes:
plugins = box.get_plugins ()
if len (plugins) == 0:
if box in children:
self._box.remove(box)
else:
if last_box:
last_box.show_separator (True)
if box not in children:
self._box.pack_start (box, False, False)
self._box.reorder_child (box, pos)
box.rebuild_table (ncols)
box.show_separator (False)
pos += 1
last_box = box
def get_categories (self):
return list(self._categories)
def show_plugin_page (self, widget):
plugin = widget.get_plugin ()
self.emit ('show-plugin', plugin)
| bsd-3-clause | 2,181,059,991,415,421,700 | 32.727273 | 207 | 0.537778 | false |
jsilhan/dnf | tests/test_cli.py | 1 | 10295 | # Copyright (C) 2012-2016 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# the GNU General Public License v.2, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY expressed or implied, including the implied warranties of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details. You should have received a copy of the
# GNU General Public License along with this program; if not, write to the
# Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA. Any Red Hat trademarks that are incorporated in the
# source code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission of
# Red Hat, Inc.
#
from __future__ import absolute_import
from __future__ import unicode_literals
from argparse import Namespace
from tests import support
from tests.support import TestCase
from tests.support import mock
import dnf.cli.cli
import dnf.conf
import dnf.goal
import dnf.repo
import dnf.repodict
import os
import re
VERSIONS_OUTPUT = """\
Installed: pepper-0:20-0.x86_64 at 1970-01-01 00:00
Built : at 1970-01-01 00:00
Installed: tour-0:5-0.noarch at 1970-01-01 00:00
Built : at 1970-01-01 00:00
"""
class VersionStringTest(TestCase):
def test_print_versions(self):
base = support.MockBase()
output = support.MockOutput()
with mock.patch('sys.stdout') as stdout,\
mock.patch('dnf.sack._rpmdb_sack', return_value=base.sack):
dnf.cli.cli.print_versions(['pepper', 'tour'], base, output)
written = ''.join([mc[1][0] for mc in stdout.method_calls
if mc[0] == 'write'])
self.assertEqual(written, VERSIONS_OUTPUT)
@mock.patch('dnf.cli.cli.logger', new_callable=support.mock_logger)
class BaseCliTest(support.ResultTestCase):
def setUp(self):
self._base = dnf.cli.cli.BaseCli()
self._base._sack = support.mock_sack('main', 'updates')
self._base._goal = dnf.goal.Goal(self._base.sack)
self._base.output.term = support.MockTerminal()
self._base.downgrade_to = mock.Mock(wraps=self._base.downgrade_to)
def test_downgradePkgs(self, logger):
self._base.downgradePkgs(('tour',))
self.assertEqual(self._base.downgrade_to.mock_calls, [mock.call('tour', strict=False)])
self.assertEqual(logger.mock_calls, [])
def test_downgradePkgs_notfound(self, logger):
with self.assertRaises(dnf.exceptions.Error) as ctx:
self._base.downgradePkgs(('non-existent',))
self.assertEqual(str(ctx.exception), 'Nothing to do.')
self.assertEqual(self._base.downgrade_to.mock_calls,
[mock.call('non-existent', strict=False)])
self.assertEqual(logger.mock_calls,
[mock.call.info('No package %s available.',
'non-existent')])
@mock.patch('dnf.cli.cli._', dnf.pycomp.NullTranslations().ugettext)
def test_downgradePkgs_notinstalled(self, logger):
pkg = support.ObjectMatcher(dnf.package.Package, {'name': 'lotus'})
with self.assertRaises(dnf.exceptions.Error) as ctx:
self._base.downgradePkgs(('lotus',))
self.assertEqual(str(ctx.exception), 'Nothing to do.')
self.assertEqual(self._base.downgrade_to.mock_calls, [mock.call('lotus', strict=False)])
@mock.patch('dnf.cli.cli.Cli._read_conf_file')
class CliTest(TestCase):
def setUp(self):
self.base = support.MockBase("main")
self.base.output = support.MockOutput()
self.cli = dnf.cli.cli.Cli(self.base)
def test_knows_upgrade(self, _):
upgrade = self.cli.cli_commands['upgrade']
update = self.cli.cli_commands['update']
self.assertIs(upgrade, update)
def test_simple(self, _):
self.assertFalse(self.base.conf.assumeyes)
self.cli.configure(['update', '-y'])
self.assertTrue(self.base.conf.assumeyes)
def test_glob_options_cmds(self, _):
params = [
['install', '-y', 'pkg1', 'pkg2'],
['install', 'pkg1', '-y', 'pkg2'],
['install', 'pkg1', 'pkg2', '-y'],
['-y', 'install', 'pkg1', 'pkg2']
]
for param in params:
self.cli.configure(args=param)
self.assertTrue(self.base.conf.assumeyes)
self.assertEqual(self.cli.command.opts.command, ["install"])
self.assertEqual(self.cli.command.opts.pkg_specs, ["pkg1", "pkg2"])
def test_configure_repos(self, _):
opts = Namespace()
opts.repo = []
opts.repos_ed = [('*', 'disable'), ('comb', 'enable')]
opts.cacheonly = True
opts.nogpgcheck = True
opts.repofrompath = {}
self.base._repos = dnf.repodict.RepoDict()
self.base._repos.add(support.MockRepo('one', self.base.conf))
self.base._repos.add(support.MockRepo('two', self.base.conf))
self.base._repos.add(support.MockRepo('comb', self.base.conf))
self.cli._configure_repos(opts)
self.assertFalse(self.base.repos['one'].enabled)
self.assertFalse(self.base.repos['two'].enabled)
self.assertTrue(self.base.repos['comb'].enabled)
self.assertFalse(self.base.repos["comb"].gpgcheck)
self.assertFalse(self.base.repos["comb"].repo_gpgcheck)
self.assertEqual(self.base.repos["comb"]._sync_strategy,
dnf.repo.SYNC_ONLY_CACHE)
def test_configure_repos_expired(self, _):
"""Ensure that --cacheonly beats the expired status."""
opts = Namespace()
opts.repo = []
opts.repos_ed = []
opts.cacheonly = True
opts.repofrompath = {}
pers = self.base._repo_persistor
pers.get_expired_repos = mock.Mock(return_value=('one',))
self.base._repos = dnf.repodict.RepoDict()
self.base._repos.add(support.MockRepo('one', self.base.conf))
self.cli._configure_repos(opts)
# _process_demands() should respect --cacheonly in spite of modified demands
self.cli.demands.fresh_metadata = False
self.cli.demands.cacheonly = True
self.cli._process_demands()
self.assertEqual(self.base.repos['one']._sync_strategy,
dnf.repo.SYNC_ONLY_CACHE)
@mock.patch('dnf.logging.Logging._setup', new=mock.MagicMock)
class ConfigureTest(TestCase):
def setUp(self):
self.base = support.MockBase("main")
self.base._conf = dnf.conf.Conf()
self.base.output = support.MockOutput()
self.base._plugins = mock.Mock()
self.cli = dnf.cli.cli.Cli(self.base)
self.cli.command = mock.Mock()
self.conffile = os.path.join(support.dnf_toplevel(), "etc/dnf/dnf.conf")
@mock.patch('dnf.util.am_i_root', lambda: False)
def test_configure_user(self):
""" Test Cli.configure as user."""
self.base._conf = dnf.conf.Conf()
with mock.patch('dnf.rpm.detect_releasever', return_value=69):
self.cli.configure(['update', '-c', self.conffile])
reg = re.compile('^/var/tmp/dnf-[a-zA-Z0-9_-]+$')
self.assertIsNotNone(reg.match(self.base.conf.cachedir))
self.assertEqual(self.cli.cmdstring, "dnf update -c %s " % self.conffile)
@mock.patch('dnf.util.am_i_root', lambda: True)
def test_configure_root(self):
""" Test Cli.configure as root."""
self.base._conf = dnf.conf.Conf()
with mock.patch('dnf.rpm.detect_releasever', return_value=69):
self.cli.configure(['update', '--nogpgcheck', '-c', self.conffile])
reg = re.compile('^/var/cache/dnf$')
self.assertIsNotNone(reg.match(self.base.conf.system_cachedir))
self.assertEqual(self.cli.cmdstring,
"dnf update --nogpgcheck -c %s " % self.conffile)
def test_configure_verbose(self):
with mock.patch('dnf.rpm.detect_releasever', return_value=69):
self.cli.configure(['-v', 'update', '-c', self.conffile])
self.assertEqual(self.cli.cmdstring, "dnf -v update -c %s " %
self.conffile)
self.assertEqual(self.base.conf.debuglevel, 6)
self.assertEqual(self.base.conf.errorlevel, 6)
@mock.patch('dnf.cli.cli.Cli._parse_commands', new=mock.MagicMock)
@mock.patch('os.path.exists', return_value=True)
def test_conf_exists_in_installroot(self, ospathexists):
with mock.patch('logging.Logger.warning') as warn, \
mock.patch('dnf.rpm.detect_releasever', return_value=69):
self.cli.configure(['--installroot', '/roots/dnf', 'update'])
self.assertEqual(self.base.conf.config_file_path, '/roots/dnf/etc/dnf/dnf.conf')
self.assertEqual(self.base.conf.installroot, '/roots/dnf')
@mock.patch('dnf.cli.cli.Cli._parse_commands', new=mock.MagicMock)
@mock.patch('os.path.exists', return_value=False)
def test_conf_notexists_in_installroot(self, ospathexists):
with mock.patch('dnf.rpm.detect_releasever', return_value=69):
self.cli.configure(['--installroot', '/roots/dnf', 'update'])
self.assertEqual(self.base.conf.config_file_path, '/etc/dnf/dnf.conf')
self.assertEqual(self.base.conf.installroot, '/roots/dnf')
@mock.patch('dnf.cli.cli.Cli._parse_commands', new=mock.MagicMock)
def test_installroot_with_etc(self):
"""Test that conffile is detected in a new installroot."""
self.cli.base.extcmds = []
tlv = support.dnf_toplevel()
self.cli.configure(['--installroot', tlv, 'update'])
self.assertEqual(self.base.conf.config_file_path, '%s/etc/dnf/dnf.conf' % tlv)
def test_installroot_configurable(self):
"""Test that conffile is detected in a new installroot."""
conf = os.path.join(support.dnf_toplevel(), "tests/etc/installroot.conf")
self.cli.configure(['-c', conf, '--nogpgcheck', '--releasever', '17', 'update'])
self.assertEqual(self.base.conf.installroot, '/roots/dnf')
| gpl-2.0 | 6,798,114,908,982,375,000 | 43.184549 | 96 | 0.636814 | false |
huangkuan/hack | lib/langdetect/detector.py | 1 | 8605 | import random
import re
import six
from six.moves import zip, xrange
from .lang_detect_exception import ErrorCode, LangDetectException
from .language import Language
from .utils.ngram import NGram
from .utils.unicode_block import unicode_block
class Detector(object):
'''
Detector class is to detect language from specified text.
Its instance is able to be constructed via the factory class DetectorFactory.
After appending a target text to the Detector instance with .append(string),
the detector provides the language detection results for target text via .detect() or .get_probabilities().
.detect() method returns a single language name which has the highest probability.
.get_probabilities() methods returns a list of multiple languages and their probabilities.
The detector has some parameters for language detection.
See set_alpha(double), .set_max_text_length(int) .set_prior_map(dict).
Example:
from langdetect.detector_factory import DetectorFactory
factory = DetectorFactory()
factory.load_profile('/path/to/profile/directory')
def detect(text):
detector = factory.create()
detector.append(text)
return detector.detect()
def detect_langs(text):
detector = factory.create()
detector.append(text)
return detector.get_probabilities()
'''
ALPHA_DEFAULT = 0.5
ALPHA_WIDTH = 0.05
ITERATION_LIMIT = 1000
PROB_THRESHOLD = 0.1
CONV_THRESHOLD = 0.99999
BASE_FREQ = 10000
UNKNOWN_LANG = 'unknown'
URL_RE = re.compile(r'https?://[-_.?&~;+=/#0-9A-Za-z]{1,2076}')
MAIL_RE = re.compile(r'[-_.0-9A-Za-z]{1,64}@[-_0-9A-Za-z]{1,255}[-_.0-9A-Za-z]{1,255}')
def __init__(self, factory):
self.word_lang_prob_map = factory.word_lang_prob_map
self.langlist = factory.langlist
self.seed = factory.seed
self.text = ''
self.langprob = None
self.alpha = self.ALPHA_DEFAULT
self.n_trial = 7
self.max_text_length = 10000
self.prior_map = None
self.verbose = False
def set_verbose(self):
self.verbose = True
def set_alpha(self, alpha):
self.alpha = alpha
def set_prior_map(self, prior_map):
'''Set prior information about language probabilities.'''
self.prior_map = [0.0] * len(self.langlist)
sump = 0.0
for i in xrange(len(self.prior_map)):
lang = self.langlist[i]
if lang in prior_map:
p = prior_map[lang]
if p < 0:
raise LangDetectException(ErrorCode.InitParamError, 'Prior probability must be non-negative.')
self.prior_map[i] = p
sump += p
if sump <= 0.0:
raise LangDetectException(ErrorCode.InitParamError, 'More one of prior probability must be non-zero.')
for i in xrange(len(self.prior_map)):
self.prior_map[i] /= sump
def set_max_text_length(self, max_text_length):
'''Specify max size of target text to use for language detection.
The default value is 10000(10KB).
'''
self.max_text_length = max_text_length
def append(self, text):
'''Append the target text for language detection.
If the total size of target text exceeds the limit size specified by
Detector.set_max_text_length(int), the rest is cut down.
'''
text = self.URL_RE.sub(' ', text)
text = self.MAIL_RE.sub(' ', text)
text = NGram.normalize_vi(text)
pre = 0
for i in xrange(min(len(text), self.max_text_length)):
ch = text[i]
if ch != ' ' or pre != ' ':
self.text += ch
pre = ch
def cleaning_text(self):
'''Cleaning text to detect
(eliminate URL, e-mail address and Latin sentence if it is not written in Latin alphabet).
'''
latin_count, non_latin_count = 0, 0
for ch in self.text:
if 'A' <= ch <= 'z':
latin_count += 1
elif ch >= six.u('\u0300') and unicode_block(ch) != 'Latin Extended Additional':
non_latin_count += 1
if latin_count * 2 < non_latin_count:
text_without_latin = ''
for ch in self.text:
if ch < 'A' or 'z' < ch:
text_without_latin += ch
self.text = text_without_latin
def detect(self):
'''Detect language of the target text and return the language name
which has the highest probability.
'''
probabilities = self.get_probabilities()
if probabilities:
return probabilities[0].lang
return self.UNKNOWN_LANG
def get_probabilities(self):
if self.langprob is None:
self._detect_block()
return self._sort_probability(self.langprob)
def _detect_block(self):
self.cleaning_text()
ngrams = self._extract_ngrams()
if not ngrams:
raise LangDetectException(ErrorCode.CantDetectError, 'No features in text.')
self.langprob = [0.0] * len(self.langlist)
random.seed(self.seed)
for t in xrange(self.n_trial):
prob = self._init_probability()
alpha = self.alpha + random.gauss(0.0, 1.0) * self.ALPHA_WIDTH
i = 0
while True:
self._update_lang_prob(prob, random.choice(ngrams), alpha)
if i % 5 == 0:
if self._normalize_prob(prob) > self.CONV_THRESHOLD or i >= self.ITERATION_LIMIT:
break
if self.verbose:
six.print_('>', self._sort_probability(prob))
i += 1
for j in xrange(len(self.langprob)):
self.langprob[j] += prob[j] / self.n_trial
if self.verbose:
six.print_('==>', self._sort_probability(prob))
def _init_probability(self):
'''Initialize the map of language probabilities.
If there is the specified prior map, use it as initial map.
'''
if self.prior_map is not None:
return list(self.prior_map)
else:
return [1.0 / len(self.langlist)] * len(self.langlist)
def _extract_ngrams(self):
'''Extract n-grams from target text.'''
RANGE = list(xrange(1, NGram.N_GRAM + 1))
result = []
ngram = NGram()
for ch in self.text:
ngram.add_char(ch)
if ngram.capitalword:
continue
for n in RANGE:
# optimized w = ngram.get(n)
if len(ngram.grams) < n:
break
w = ngram.grams[-n:]
if w and w != ' ' and w in self.word_lang_prob_map:
result.append(w)
return result
def _update_lang_prob(self, prob, word, alpha):
'''Update language probabilities with N-gram string(N=1,2,3).'''
if word is None or word not in self.word_lang_prob_map:
return False
lang_prob_map = self.word_lang_prob_map[word]
if self.verbose:
six.print_('%s(%s): %s' % (word, self._unicode_encode(word), self._word_prob_to_string(lang_prob_map)))
weight = alpha / self.BASE_FREQ
for i in xrange(len(prob)):
prob[i] *= weight + lang_prob_map[i]
return True
def _word_prob_to_string(self, prob):
result = ''
for j in xrange(len(prob)):
p = prob[j]
if p >= 0.00001:
result += ' %s:%.5f' % (self.langlist[j], p)
return result
def _normalize_prob(self, prob):
'''Normalize probabilities and check convergence by the maximun probability.
'''
maxp, sump = 0.0, sum(prob)
for i in xrange(len(prob)):
p = prob[i] / sump
if maxp < p:
maxp = p
prob[i] = p
return maxp
def _sort_probability(self, prob):
result = [Language(lang, p) for (lang, p) in zip(self.langlist, prob) if p > self.PROB_THRESHOLD]
result.sort(reverse=True)
return result
def _unicode_encode(self, word):
buf = ''
for ch in word:
if ch >= six.u('\u0080'):
st = hex(0x10000 + ord(ch))[2:]
while len(st) < 4:
st = '0' + st
buf += r'\u' + st[1:5]
else:
buf += ch
return buf
| apache-2.0 | -1,899,257,612,203,328,000 | 33.697581 | 115 | 0.555491 | false |
HewlettPackard/python-proliant-sdk | examples/Rest/ex50_get_csr.py | 1 | 2123 | # Copyright 2016 Hewlett Packard Enterprise Development, LP.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from _restobject import RestObject
def ex50_get_csr(restobj, filename):
sys.stdout.write("\nEXAMPLE 50: Get CSR\n")
instances = restobj.search_for_type("HpHttpsCert.")
for instance in instances:
response = restobj.rest_get(instance["href"])
try:
csr_response = response.dict["CertificateSigningRequest"]
with open(filename, 'wb') as csroutput:
csroutput.write(csr_response)
csroutput.close()
sys.stdout.write("\tCSR Data saved successfully as \
"+ filename + "\n")
restobj.error_handler(response)
except KeyError:
sys.stdout.write("\tCSR cannot be accessed right now, \
please try again later")
if __name__ == "__main__":
# When running on the server locally use the following commented values
# iLO_https_url = "blobstore://."
# iLO_account = "None"
# iLO_password = "None"
# When running remotely connect using the iLO secured (https://) address,
# iLO account name, and password to send https requests
# iLO_https_url acceptable examples:
# "https://10.0.0.100"
# "https://f250asha.americas.hpqcorp.net"
iLO_https_url = "https://10.0.0.100"
iLO_account = "admin"
iLO_password = "password"
#Create a REST object
REST_OBJ = RestObject(iLO_https_url, iLO_account, iLO_password)
ex50_get_csr(REST_OBJ, "csr.txt")
| apache-2.0 | 713,086,320,064,606,000 | 36.6 | 78 | 0.6439 | false |
beefoo/babel-lab | scripts/praat/pitch_to_svg.py | 1 | 1861 | # -*- coding: utf-8 -*-
# Description: generates an svg file based on Praat pitch data
# Example usage:
# python pitch_to_svg.py data/open_audio_weekend.Pitch output/open_audio_weekend_pitch.svg 1200 240 80 240 0.1 0.1 6
from pprint import pprint
from praat import fileToPitchData
import svgwrite
import sys
import time
# Input
if len(sys.argv) < 8:
print "Usage: %s <inputfile.Pitch> <outputfile.svg> <width> <height> <min freq> <max freq> <min intensity> <min strength> <max radius>" % sys.argv[0]
sys.exit(1)
INPUT_FILE = sys.argv[1]
OUTPUT_FILE = sys.argv[2]
TARGET_WIDTH = int(sys.argv[3])
TARGET_HEIGHT = int(sys.argv[4])
MIN_FREQ = float(sys.argv[5])
MAX_FREQ = float(sys.argv[6])
MIN_INTENSITY = float(sys.argv[7])
MIN_STRENGTH = float(sys.argv[8])
MAX_RADIUS = int(sys.argv[9])
def px(value):
return "%spx" % value
# Retrieve pitch data from Praat file
frames = fileToPitchData(INPUT_FILE)
print "%s frames read from file %s" % (len(frames), INPUT_FILE)
# Total time
total_seconds = frames[-1]["start"]
print "A total of %s seconds (%s)" % (total_seconds, time.strftime('%M:%S', time.gmtime(total_seconds)))
# Init SVG
dwg = svgwrite.Drawing(filename=OUTPUT_FILE, size=(px(TARGET_WIDTH),px(TARGET_HEIGHT)))
circles = dwg.add(dwg.g(id='circles'))
# Loop through frames
for frame in frames:
topCandidate = frame["candidates"][0]
if frame["intensity"] > MIN_INTENSITY and topCandidate["strength"] > MIN_STRENGTH and MIN_FREQ <= topCandidate["frequency"] <= MAX_FREQ:
x = (frame["start"]/total_seconds) * TARGET_WIDTH - MAX_RADIUS
y = (1.0-(topCandidate["frequency"]-MIN_FREQ)/(MAX_FREQ-MIN_FREQ)) * TARGET_HEIGHT
radius = topCandidate["strength"] * MAX_RADIUS
circles.add(dwg.circle(center=(px(x), px(y)), r=px(radius), fill='black'))
# Save
dwg.save()
print "Wrote %s to file" % OUTPUT_FILE
| gpl-3.0 | -4,782,063,187,473,325,000 | 34.113208 | 153 | 0.686728 | false |
jangsutsr/tower-cli | lib/tower_cli/resources/organization.py | 1 | 3417 | # Copyright 2015, Ansible, Inc.
# Luke Sneeringer <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import click
from tower_cli import models, resources
from tower_cli.utils import types
class Resource(models.Resource):
cli_help = 'Manage organizations within Ansible Tower.'
endpoint = '/organizations/'
deprecated_methods = ['associate_project', 'disassociate_project']
name = models.Field(unique=True)
description = models.Field(required=False, display=False)
@resources.command(use_fields_as_options=False)
@click.option('--organization', type=types.Related('organization'),
required=True)
@click.option('--user', type=types.Related('user'), required=True)
def associate(self, organization, user):
"""Associate a user with this organization."""
return self._assoc('users', organization, user)
@resources.command(use_fields_as_options=False)
@click.option('--organization', type=types.Related('organization'),
required=True)
@click.option('--user', type=types.Related('user'), required=True)
def associate_admin(self, organization, user):
"""Associate an admin with this organization."""
return self._assoc('admins', organization, user)
@resources.command(use_fields_as_options=False)
@click.option('--organization', type=types.Related('organization'),
required=True)
@click.option('--user', type=types.Related('user'), required=True)
def disassociate(self, organization, user):
"""Disassociate a user from this organization."""
return self._disassoc('users', organization, user)
@resources.command(use_fields_as_options=False)
@click.option('--organization', type=types.Related('organization'),
required=True)
@click.option('--user', type=types.Related('user'), required=True)
def disassociate_admin(self, organization, user):
"""Disassociate an admin from this organization."""
return self._disassoc('admins', organization, user)
@resources.command(use_fields_as_options=False)
@click.option('--organization', type=types.Related('organization'),
required=True)
@click.option('--project', type=types.Related('project'), required=True)
def associate_project(self, organization, project):
"""Associate a project with this organization."""
return self._assoc('projects', organization, project)
@resources.command(use_fields_as_options=False)
@click.option('--organization', type=types.Related('organization'),
required=True)
@click.option('--project', type=types.Related('project'), required=True)
def disassociate_project(self, organization, project):
"""Disassociate a project from this organization."""
return self._disassoc('projects', organization, project)
| apache-2.0 | -8,779,574,059,538,204,000 | 43.960526 | 76 | 0.690957 | false |
COSMOGRAIL/PyCS | pycs/play/fit/bspl.py | 1 | 13642 | # """
# B splines
#
#
# somewhere we should reuse coeffs here... so that a next fit looks if is has the same number of knots, and if yes, uses the previous fit as starting value.
#
# """
#
# import sys
#
# from pycs.gen import *
#
# import numpy as np
# import math
# import matplotlib.pyplot as plt
# import scipy.optimize as spopt
# import scipy.interpolate as spip
#
#
#
# def fitcubbspline(x, y, yerr, t, cini=None, verbose=True):
# """
# This is "my own" cubic B-spline fitting method, using leastsq from scipy.
# I know, this looks like a very naive idea from somebody who has not a clue what a spline is...
# But... recall that we want to
# 1) control the positions of the knots (probably on a fixed grid etc)
# 2) have an irregular sampling of points (and perhaps also of knots)
# 3) IMPORTANT : and that it may well be that we have several "points" to fit for one single JD
# (this last point kills sp.interpolate.splrep as well as probably all direct methods, inverse filtering etc) !
#
# x y : the data
# t : the x-positions of the knots, WITH "prefix" and "suffix" knots !
# cini : initial coeffs for the knots t. If None, we start from zero.
#
#
# We use the notation "t, c, k" from scipy.interpolate.splev etc : t the knots, c the corresponding coeffs, k the degree
# """
#
#
# k = 3 # cubic spline
# if cini == None:
# if verbose :
# print "Spline fit : starting from 0.0"
# cini = np.zeros(len(t)) # initial coeffs = 0.0
# else:
# # we check that they are compatible :
# if len(cini) != len(t):
# raise RuntimeError, "Spline fit : cini has the wrong length"
#
# leastsqres = spopt.leastsq(splinefiterrfunc, cini, args=(t, k, x, y, yerr), full_output=1)
# # this should be faster without the full output...
#
# if verbose:
# print "Spline fit : %i function calls" % leastsqres[2]['nfev']
#
# fittedc = leastsqres[0]
# tck = (t, fittedc, k)
#
# return tck
#
#
# def splinefiterrfunc(c, t, k, x, y, yerr):
# """
# Auxiliary function for the fit.
# Give me a spline (c,t,k) and some points (x, y, yerr) and I return the vector of differences.
# """
# tck = (t, c, k)
# interpy = spip.splev(x, tck, der=0)
# return (y - interpy)/yerr
#
#
#
# def knots(x, sheme = "test1"):
# """
# Give me the x coords of some point, and I give you some knots according to a given sheme...
#
# stupid : for testing purposes
#
# test1 : I want to get the knots selected from a "fixed" absolute grid, JD = 0, n, 2n, 3n ...
#
# We add the extra coeffictients at both sides of t. See
# http://mathworld.wolfram.com/B-Spline.html
#
# """
# # In this first step, we add the "interior" knots (i.e. containing the extremal knots, but not repeating them).
#
# if sheme == "stupid" :
# t = np.linspace(x[0], x[-1], 10)
#
# if sheme == "test1" :
# n = 10 # n = 6 -> so the grid is 0, 6, 12, ... ie equally spaced by 6
# first = x[0] - (x[0] % n) # this is nearly an int, but no need to convert to int().
# t = np.arange(first, x[-1] + n, n)
#
# # We add some extra coefficients at both sides of t
# prefix = np.ones(3)*t[0]
# suffix = np.ones(3)*t[-1]
# fullt = np.concatenate([prefix, t, suffix])
#
# return fullt
#
#
# def cbsp(lcs, splitgap=60, oldtcks=None, verbose=True, plot=True):
# """
# First try to get a cubic B-spline fit working, simultaneously for n lightcurves, and return a chi2 like something.
# Give me a list of lightcurves and I return you a value for chi2, using a specified spline fit etc
#
# oldtcks : if not "None" but a list of tcks, we will try to start the fit of the spline coeffs using these ...
# This typically works if the number of knots has not changed, i.e. when we optimize microlensing...
#
# """
#
# jdslist = []
# magslist = []
# magerrslist = []
# for thislc in lcs:
# jdslist.append(thislc.getjds()[thislc.mask])
# magslist.append(thislc.getmags()[thislc.mask])
# magerrslist.append(thislc.magerrs[thislc.mask])
#
# mergedjds = np.concatenate(jdslist)
# mergedmags = np.concatenate(magslist)
# mergedmagerrs = np.concatenate(magerrslist)
#
# # Now the sorting ...
# sortedindices = np.argsort(mergedjds)
# sortedjds = mergedjds[sortedindices]
# sortedmags = mergedmags[sortedindices]
# sortedmagerrs = mergedmagerrs[sortedindices]
#
# # We need to find the overlapping regions ?
# # For a first try, let's split this as usual :
# first = sortedjds[:-1]
# second = sortedjds[1:]
# gapindices = np.where(second - first > splitgap)[0] + 1
# # make one big vector of all the indices :
# indices = np.arange(len(sortedjds))
# # split it according to the gaps :
# indexlist = np.hsplit(indices, gapindices)
# if verbose:
# print "We have %i splines." % len(indexlist)
#
# if oldtcks == None:
# # Then we do not have previous splines to start from
# oldtcks = [None] * len(indexlist)
#
# tcks = [] # we will append here the splines from the individual spline fits, not only for plotting, also to pass them to the next call !
# chi2s = [] # the resulting chi2s
# ns = [] # the number of points for that spline
#
# for indexes, oldtck in zip(indexlist, oldtcks): # i.e. for each "season" aka "group" of points
# jds = sortedjds[indexes]
# mags = sortedmags[indexes]
# magerrs = sortedmagerrs[indexes]
#
# t = knots(jds, sheme="test1")
# if (oldtck != None) and (len(t) == len(oldtck[0])) : # Then we should be able to reuse this...
# tck = fitcubbspline(jds, mags, magerrs, t, cini=oldtck[1], verbose=False)
# else:
# tck = fitcubbspline(jds, mags, magerrs, t, verbose=False)
#
# #if verbose:
# # for (t, c) in zip(tck[0], tck[1]):
# # print "t = %8.3f -> c = %8.3f" % (t, c)
#
# tcks.append(tck)
#
# diffs = (mags - spip.splev(jds, tck, der=0))/magerrs
# chi2 = np.sum(diffs * diffs)
#
# if verbose:
# print "chi2 : %8.3f for %i points" % (chi2, len(jds))
#
# chi2s.append(chi2)
# ns.append(len(jds))
#
# totchi2 = np.sum(np.array(chi2s))
# totn = np.sum(np.array(ns))
# chi2n = totchi2/float(totn)
# if verbose:
# print "tot : %8.3f for %i points" % (totchi2, totn)
# print "chi2n: %8.3f" % (chi2n)
#
#
#
# if plot:
# plt.figure(figsize=(12,8)) # sets figure size
# axes = plt.gca()
#
# # The points
# plt.errorbar(sortedjds, sortedmags, sortedmagerrs, fmt=".", color="red", ecolor="#BBBBBB")
#
# # The groups
# for groupindexes in indexlist:
# plt.axvline(sortedjds[groupindexes[0]], color = "#AAAAAA", dashes = (5,5))
# plt.axvline(sortedjds[groupindexes[-1]], color = "#AAAAAA", dashes = (5,5))
#
# # The spline
# for (tck, indexes) in zip(tcks, indexlist):
# xnew = np.linspace(sortedjds[indexes][0], sortedjds[indexes][-1], 1000)
# ynew = spip.splev(xnew,tck,der=0)
#
# plt.plot(xnew, ynew, color="blue")
#
# for knot in tck[0]:
# plt.axvline(knot, color = "#0000AA", dashes = (2,2))
#
#
# # Splines may get huge, so we want to limit the axis ranges :
# axes.set_ylim((min(sortedmags) - 0.1, max(sortedmags) + 0.1))
#
#
# # Something for astronomers only : we invert the y axis direction !
# axes.set_ylim(axes.get_ylim()[::-1])
#
# # And we make a title for that combination of lightcurves :
# #plt.title("Lightcurves", fontsize=20)
# plt.xlabel("Days", fontsize=16)
# plt.ylabel("Magnitude", fontsize=16)
# plt.title("Spline", fontsize=16)
#
# plt.xlim([2340, 5000])
#
# plt.show()
#
#
# return {'chi2':totchi2, 'n':totn, 'chi2n':chi2n, 'tcks':tcks}
#
#
# #def test(lcs, splitgap=60, usemask=True, verbose=True, plot=True):
# # """
# # First try to get a cubic B-spline fit working, simultaneously for n lightcurves, and return a chi2 like something.
# # Give me a list of lightcurves and I return you a value for chi2, using a specified spline fit etc
# #
# #
# # """
# #
# # jdslist = []
# # magslist = []
# # magerrslist = []
# # for thislc in lcs:
# # jdslist.append(thislc.getjds()[thislc.mask])
# # magslist.append(thislc.getmags()[thislc.mask])
# # magerrslist.append(thislc.magerrs[thislc.mask])
# #
# # mergedjds = np.concatenate(jdslist)
# # mergedmags = np.concatenate(magslist)
# # mergedmagerrs = np.concatenate(magerrslist)
# #
# # # Now the sorting ...
# # sortedindices = np.argsort(mergedjds)
# # sortedjds = mergedjds[sortedindices]
# # sortedmags = mergedmags[sortedindices]
# # sortedmagerrs = mergedmagerrs[sortedindices]
# #
# # # We need to find the overlapping regions ?
# # # For a first try, let's split this as usual :
# # first = sortedjds[:-1]
# # second = sortedjds[1:]
# # gapindices = np.where(second - first > splitgap)[0] + 1
# # # make one big vector of all the indices :
# # indices = np.arange(len(sortedjds))
# # # split it according to the gaps :
# # indexlist = np.hsplit(indices, gapindices)
# # if verbose:
# # print "We have %i splines." % len(indexlist)
# #
# #
# # tcks = [] # we will append here the splines from the individual spline fits (for plotting ...)
# # chi2s = [] # the resulting chi2s
# # ns = [] # the number of points for that spline
# #
# # for indexes in indexlist: # i.e. for each "season" aka "group" of points
# # jds = sortedjds[indexes]
# # mags = sortedmags[indexes]
# # magerrs = sortedmagerrs[indexes]
# #
# # #t = knots(jds, sheme="test1")
# # #tck = fitcubbspline(jds, mags, magerrs, t, verbose=False)
# #
# # tck = spip.splrep(jds, mags, w=(1.0/magerrs))
# # print tck
# # #maspline = spip.UnivariateSpline(jds, mags, w=magerrs, k=3)
# #
# # #tck = [maspline.get_knots(), maspline.get_coeffs(), 3]
# # #print len(maspline.get_knots())
# #
# # #if verbose:
# # # for (t, c) in zip(tck[0], tck[1]):
# # # print "t = %8.3f -> c = %8.3f" % (t, c)
# #
# # tcks.append(tck)
# #
# # diffs = (mags - spip.splev(jds, tck, der=0))/magerrs
# # chi2 = np.sum(diffs * diffs)
# #
# # if verbose:
# # print "chi2 : %8.3f for %i points" % (chi2, len(jds))
# #
# # chi2s.append(chi2)
# # ns.append(len(jds))
# #
# # totchi2 = np.sum(np.array(chi2s))
# # totn = np.sum(np.array(ns))
# # chi2n = totchi2/float(totn)
# # if verbose:
# # print "tot : %8.3f for %i points" % (totchi2, totn)
# # print "chi2n: %8.3f" % (chi2n)
# #
# #
# #
# # if plot:
# # plt.figure(figsize=(12,8)) # sets figure size
# # axes = plt.gca()
# #
# # # The points
# # plt.errorbar(sortedjds, sortedmags, sortedmagerrs, fmt=".", color="red", ecolor="#BBBBBB")
# #
# # # The groups
# # for groupindexes in indexlist:
# # plt.axvline(sortedjds[groupindexes[0]], color = "#AAAAAA", dashes = (5,5))
# # plt.axvline(sortedjds[groupindexes[-1]], color = "#AAAAAA", dashes = (5,5))
# #
# # # The spline
# # for (tck, indexes) in zip(tcks, indexlist):
# # xnew = np.linspace(sortedjds[indexes][0], sortedjds[indexes][-1], 1000)
# # ynew = spip.splev(xnew,tck,der=0)
# #
# # plt.plot(xnew, ynew, color="blue")
# #
# # for knot in tck[0]:
# # plt.axvline(knot, color = "#0000AA", dashes = (2,2))
# #
# #
# # # Splines may get huge, so we want to limit the axis ranges :
# # axes.set_ylim((min(sortedmags) - 0.1, max(sortedmags) + 0.1))
# #
# #
# # # Something for astronomers only : we invert the y axis direction !
# # axes.set_ylim(axes.get_ylim()[::-1])
# #
# # # And we make a title for that combination of lightcurves :
# # #plt.title("Lightcurves", fontsize=20)
# # plt.xlabel("Days", fontsize=16)
# # plt.ylabel("Magnitude", fontsize=16)
# # plt.title("Spline", fontsize=16)
# #
# # plt.xlim([2340, 5000])
# #
# # plt.show()
# #
# #
# # #return {'chi2':totchi2, 'n':totn, 'chi2n':chi2n}
# #
# #
# #def one(lcs, verbose=True, plot=True):
# # """
# # Trying to build one big spline over all the groups...
# #
# # """
# #
# #
# # jdslist = []
# # magslist = []
# # magerrslist = []
# # for thislc in lcs:
# # jdslist.append(thislc.getjds()[thislc.mask])
# # magslist.append(thislc.getmags()[thislc.mask])
# # magerrslist.append(thislc.magerrs[thislc.mask])
# #
# # mergedjds = np.concatenate(jdslist)
# # mergedmags = np.concatenate(magslist)
# # mergedmagerrs = np.concatenate(magerrslist)
# #
# # # Now the sorting ...
# # sortedindices = np.argsort(mergedjds)
# # sortedjds = mergedjds[sortedindices]
# # sortedmags = mergedmags[sortedindices]
# # sortedmagerrs = mergedmagerrs[sortedindices]
# #
# # jds = sortedjds
# # mags = sortedmags
# # magerrs = sortedmagerrs
# #
# # #t = np.linspace(jds[0], jds[-1], 10) # the knots
# # t = np.arange(int(math.floor(jds[0])), int(math.ceil(jds[-1])), 30)
# #
# # tck = fitcubbspline(jds, mags, magerrs, t, verbose=True)
# #
# # if plot:
# # plt.figure(figsize=(12,8)) # sets figure size
# # axes = plt.gca()
# #
# # # The points
# # plt.errorbar(sortedjds, sortedmags, sortedmagerrs, fmt=".", color="red", ecolor="#BBBBBB")
# #
# # # The spline
# #
# # xnew = np.linspace(sortedjds[0], sortedjds[-1], 1000)
# # ynew = spip.splev(xnew,tck,der=0)
# #
# # plt.plot(xnew, ynew, color="blue")
# #
# # for knot in tck[0]:
# # plt.axvline(knot, color = "#0000AA", dashes = (2,2))
# #
# # # Something for astronomers only : we invert the y axis direction !
# # axes.set_ylim(axes.get_ylim()[::-1])
# #
# # # And we make a title for that combination of lightcurves :
# # #plt.title("Lightcurves", fontsize=20)
# # plt.xlabel("Days", fontsize=16)
# # plt.ylabel("Magnitude", fontsize=16)
# # plt.title("Spline", fontsize=16)
# #
# # plt.xlim([2340, 5000])
# #
# # plt.show()
# #
# #
# # diffs = (mags - spip.splev(jds, tck, der=0))/magerrs
# # chi2 = np.sum(diffs * diffs)
# #
# # if verbose:
# # print "Chi2 : %8.3f" % chi2
# #
# #
# # return chi2
# #
# #
| gpl-3.0 | -1,284,258,648,724,685,600 | 30.725581 | 156 | 0.62073 | false |
bally12345/enigma2 | lib/python/Screens/Standby.py | 1 | 7770 | from Screen import Screen
from Components.ActionMap import ActionMap
from Components.config import config
from Components.AVSwitch import AVSwitch
from Components.SystemInfo import SystemInfo
from GlobalActions import globalActionMap
from enigma import eDVBVolumecontrol
from os import path
import Screens.InfoBar
inStandby = None
class Standby2(Screen):
def Power(self):
print "leave standby"
#set input to encoder
self.avswitch.setInput("ENCODER")
#restart last played service
#unmute adc
self.leaveMute()
#kill me
self.close(True)
def setMute(self):
if (eDVBVolumecontrol.getInstance().isMuted()):
self.wasMuted = 1
print "mute already active"
else:
self.wasMuted = 0
eDVBVolumecontrol.getInstance().volumeToggleMute()
def leaveMute(self):
if self.wasMuted == 0:
eDVBVolumecontrol.getInstance().volumeToggleMute()
def __init__(self, session):
Screen.__init__(self, session)
self.skinName = "Standby"
self.avswitch = AVSwitch()
print "enter standby"
self["actions"] = ActionMap( [ "StandbyActions" ],
{
"power": self.Power,
"discrete_on": self.Power
}, -1)
globalActionMap.setEnabled(False)
#mute adc
self.setMute()
self.paused_service = None
self.prev_running_service = None
if self.session.current_dialog:
if self.session.current_dialog.ALLOW_SUSPEND == Screen.SUSPEND_STOPS:
#get currently playing service reference
self.prev_running_service = self.session.nav.getCurrentlyPlayingServiceReference()
#stop actual played dvb-service
self.session.nav.stopService()
elif self.session.current_dialog.ALLOW_SUSPEND == Screen.SUSPEND_PAUSES:
self.paused_service = self.session.current_dialog
self.paused_service.pauseService()
#set input to vcr scart
if SystemInfo["ScartSwitch"]:
self.avswitch.setInput("SCART")
else:
self.avswitch.setInput("AUX")
self.onFirstExecBegin.append(self.__onFirstExecBegin)
self.onClose.append(self.__onClose)
def __onClose(self):
global inStandby
inStandby = None
if self.prev_running_service:
self.session.nav.playService(self.prev_running_service)
elif self.paused_service:
self.paused_service.unPauseService()
self.session.screen["Standby"].boolean = False
globalActionMap.setEnabled(True)
def __onFirstExecBegin(self):
global inStandby
inStandby = self
self.session.screen["Standby"].boolean = True
config.misc.standbyCounter.value += 1
def createSummary(self):
return StandbySummary
class Standby(Standby2):
def __init__(self, session):
if Screens.InfoBar.InfoBar and Screens.InfoBar.InfoBar.instance and Screens.InfoBar.InfoBar.ptsGetSaveTimeshiftStatus(Screens.InfoBar.InfoBar.instance):
self.skin = """<screen position="0,0" size="0,0"/>"""
Screen.__init__(self, session)
self.onFirstExecBegin.append(self.showMessageBox)
self.onHide.append(self.close)
else:
Standby2.__init__(self, session)
self.skinName = "Standby"
def showMessageBox(self):
if Screens.InfoBar.InfoBar and Screens.InfoBar.InfoBar.instance:
Screens.InfoBar.InfoBar.saveTimeshiftActions(Screens.InfoBar.InfoBar.instance, postaction="standby")
class StandbySummary(Screen):
skin = """
<screen position="0,0" size="132,64">
<widget source="global.CurrentTime" render="Label" position="0,0" size="132,64" font="Regular;40" halign="center">
<convert type="ClockToText" />
</widget>
<widget source="session.RecordState" render="FixedLabel" text=" " position="0,0" size="132,64" zPosition="1" >
<convert type="ConfigEntryTest">config.usage.blinking_display_clock_during_recording,True,CheckSourceBoolean</convert>
<convert type="ConditionalShowHide">Blink</convert>
</widget>
</screen>"""
from enigma import quitMainloop, iRecordableService
from Screens.MessageBox import MessageBox
from time import time
from Components.Task import job_manager
class QuitMainloopScreen(Screen):
def __init__(self, session, retvalue=1):
self.skin = """<screen name="QuitMainloopScreen" position="fill" flags="wfNoBorder">
<ePixmap pixmap="skin_default/icons/input_info.png" position="c-27,c-60" size="53,53" alphatest="on" />
<widget name="text" position="center,c+5" size="720,100" font="Regular;22" halign="center" />
</screen>"""
Screen.__init__(self, session)
from Components.Label import Label
text = { 1: _("Your STB_BOX is shutting down"),
2: _("Your STB_BOX is rebooting"),
3: _("The User Interface of your STB_BOX is restarting"),
4: _("Your frontprocessor will be upgraded\nPlease wait until your STB_BOX reboots\nThis may take a few minutes"),
5: _("The User Interface of your STB_BOX is restarting\ndue to an error in mytest.py"),
42: _("Unattended upgrade in progress\nPlease wait until your STB_BOX reboots\nThis may take a few minutes") }.get(retvalue)
self["text"] = Label(text)
inTryQuitMainloop = False
class TryQuitMainloop(MessageBox):
def __init__(self, session, retvalue=1, timeout=-1, default_yes = False):
self.retval = retvalue
self.ptsmainloopvalue = retvalue
recordings = session.nav.getRecordings()
jobs = len(job_manager.getPendingJobs())
self.connected = False
reason = ""
next_rec_time = -1
if not recordings:
next_rec_time = session.nav.RecordTimer.getNextRecordingTime()
if jobs:
reason = _("Job task(s) are in progress!") + '\n'
if jobs == 1:
job = job_manager.getPendingJobs()[0]
reason += "%s: %s (%d%%)\n" % (job.getStatustext(), job.name, int(100*job.progress/float(job.end)))
else:
reason += (_("%d jobs are running in the background!") % jobs) + '\n'
if recordings or (next_rec_time > 0 and (next_rec_time - time()) < 360):
reason = _("Recording(s) are in progress or coming up in few seconds!") + '\n'
if reason and inStandby:
session.nav.record_event.append(self.getRecordEvent)
self.skinName = ""
elif reason and not inStandby:
text = { 1: _("Really shutdown now?"),
2: _("Really reboot now?"),
3: _("Really restart now?"),
4: _("Really upgrade the frontprocessor and reboot now?"),
42: _("Really upgrade your STB_BOX and reboot now?") }.get(retvalue)
if text:
MessageBox.__init__(self, session, reason+text, type = MessageBox.TYPE_YESNO, timeout = timeout, default = default_yes)
self.skinName = "MessageBox"
session.nav.record_event.append(self.getRecordEvent)
self.connected = True
self.onShow.append(self.__onShow)
self.onHide.append(self.__onHide)
return
self.skin = """<screen position="1310,0" size="0,0"/>"""
Screen.__init__(self, session)
self.close(True)
def getRecordEvent(self, recservice, event):
if event == iRecordableService.evEnd and config.timeshift.isRecording.value:
return
else:
if event == iRecordableService.evEnd:
recordings = self.session.nav.getRecordings()
if not recordings: # no more recordings exist
rec_time = self.session.nav.RecordTimer.getNextRecordingTime()
if rec_time > 0 and (rec_time - time()) < 360:
self.initTimeout(360) # wait for next starting timer
self.startTimer()
else:
self.close(True) # immediate shutdown
elif event == iRecordableService.evStart:
self.stopTimer()
def close(self, value):
if self.connected:
self.conntected=False
self.session.nav.record_event.remove(self.getRecordEvent)
if value:
self.hide()
if self.retval == 1:
config.misc.DeepStandby.value = True
self.session.nav.stopService()
self.quitScreen = self.session.instantiateDialog(QuitMainloopScreen,retvalue=self.retval)
self.quitScreen.show()
quitMainloop(self.retval)
else:
MessageBox.close(self, True)
def __onShow(self):
global inTryQuitMainloop
inTryQuitMainloop = True
def __onHide(self):
global inTryQuitMainloop
inTryQuitMainloop = False
| gpl-2.0 | 2,002,061,532,838,308,000 | 33.843049 | 154 | 0.716474 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.